Merge "envsetup: Add pygrep function to grep all python files"
diff --git a/Changes.md b/Changes.md
index cabbed6..8979e30 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,88 @@
# Build System Changes for Android.mk Writers
+## Stop referencing sysprop_library directly from cc modules
+
+For the migration to Bazel, we are no longer mapping sysprop_library targets
+to their generated `cc_library` counterparts when dependning on them from a
+cc module. Instead, directly depend on the generated module by prefixing the
+module name with `lib`. For example, depending on the following module:
+
+```
+sysprop_library {
+ name: "foo",
+ srcs: ["foo.sysprop"],
+}
+```
+
+from a module named `bar` can be done like so:
+
+```
+cc_library {
+ name: "bar",
+ srcs: ["bar.cc"],
+ deps: ["libfoo"],
+}
+```
+
+Failure to do this will result in an error about a missing variant.
+
+## Gensrcs starts disallowing depfile property
+
+To migrate all gensrcs to Bazel, we are restricting the use of depfile property
+because Bazel requires specifying the dependencies directly.
+
+To fix existing uses, remove depfile and directly specify all the dependencies
+in .bp files. For example:
+
+```
+gensrcs {
+ name: "framework-cppstream-protos",
+ tools: [
+ "aprotoc",
+ "protoc-gen-cppstream",
+ ],
+ cmd: "mkdir -p $(genDir)/$(in) " +
+ "&& $(location aprotoc) " +
+ " --plugin=$(location protoc-gen-cppstream) " +
+ " -I . " +
+ " $(in) ",
+ srcs: [
+ "bar.proto",
+ ],
+ output_extension: "srcjar",
+}
+```
+where `bar.proto` imports `external.proto` would become
+
+```
+gensrcs {
+ name: "framework-cppstream-protos",
+ tools: [
+ "aprotoc",
+ "protoc-gen-cpptream",
+ ],
+ tool_files: [
+ "external.proto",
+ ],
+ cmd: "mkdir -p $(genDir)/$(in) " +
+ "&& $(location aprotoc) " +
+ " --plugin=$(location protoc-gen-cppstream) " +
+ " $(in) ",
+ srcs: [
+ "bar.proto",
+ ],
+ output_extension: "srcjar",
+}
+```
+as in https://android-review.googlesource.com/c/platform/frameworks/base/+/2125692/.
+
+`BUILD_BROKEN_DEPFILE` can be used to allowlist usage of depfile in `gensrcs`.
+
+If `depfile` is needed for generating javastream proto, `java_library` with `proto.type`
+set `stream` is the alternative solution. Sees
+https://android-review.googlesource.com/c/platform/packages/modules/Permission/+/2118004/
+for an example.
+
## Genrule starts disallowing directory inputs
To better specify the inputs to the build, we are restricting use of directories
@@ -733,6 +816,38 @@
Clang is the default and only supported Android compiler, so there is no reason
for this option to exist.
+### Stop using clang property
+
+Clang has been deleted from Soong. To fix any build errors, remove the clang
+property from affected Android.bp files using bpmodify.
+
+
+``` make
+go run bpmodify.go -w -m=module_name -remove-property=true -property=clang filepath
+```
+
+`BUILD_BROKEN_CLANG_PROPERTY` can be used as temporarily workaround
+
+
+### Stop using clang_cflags and clang_asflags
+
+clang_cflags and clang_asflags are deprecated.
+To fix any build errors, use bpmodify to either
+ - move the contents of clang_asflags/clang_cflags into asflags/cflags or
+ - delete clang_cflags/as_flags as necessary
+
+To Move the contents:
+``` make
+go run bpmodify.go -w -m=module_name -move-property=true -property=clang_cflags -new-location=cflags filepath
+```
+
+To Delete:
+``` make
+go run bpmodify.go -w -m=module_name -remove-property=true -property=clang_cflags filepath
+```
+
+`BUILD_BROKEN_CLANG_ASFLAGS` and `BUILD_BROKEN_CLANG_CFLAGS` can be used as temporarily workarounds
+
### Other envsetup.sh variables {#other_envsetup_variables}
* ANDROID_TOOLCHAIN
@@ -745,6 +860,39 @@
the makefile system. If you need one of them, you'll have to set up your own
version.
+## Soong config variables
+
+### Soong config string variables must list all values they can be set to
+
+In order to facilitate the transition to bazel, all soong_config_string_variables
+must only be set to a value listed in their `values` property, or an empty string.
+It is a build error otherwise.
+
+Example Android.bp:
+```
+soong_config_string_variable {
+ name: "my_string_variable",
+ values: [
+ "foo",
+ "bar",
+ ],
+}
+
+soong_config_module_type {
+ name: "my_cc_defaults",
+ module_type: "cc_defaults",
+ config_namespace: "my_namespace",
+ variables: ["my_string_variable"],
+ properties: [
+ "shared_libs",
+ "static_libs",
+ ],
+}
+```
+Product config:
+```
+$(call soong_config_set,my_namespace,my_string_variable,baz) # Will be an error as baz is not listed in my_string_variable's values.
+```
[build/soong/Changes.md]: https://android.googlesource.com/platform/build/soong/+/master/Changes.md
[build/soong/docs/best_practices.md#headers]: https://android.googlesource.com/platform/build/soong/+/master/docs/best_practices.md#headers
diff --git a/OWNERS b/OWNERS
index 6e7c0ea..8a1cc34 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,3 +1,4 @@
include platform/build/soong:/OWNERS
-per-file finalize_branch_for_release.sh = smoreland@google.com
+# Finalization scripts
+per-file finalize* = smoreland@google.com, alexbuy@google.com
diff --git a/core/BUILD b/core/BUILD
new file mode 100644
index 0000000..3e69e62
--- /dev/null
+++ b/core/BUILD
@@ -0,0 +1,4 @@
+# Export tradefed templates for tests.
+exports_files(
+ glob(["*.xml"]),
+)
diff --git a/core/Makefile b/core/Makefile
index a96cfd5..955b360 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -7,6 +7,7 @@
SYSTEM_NOTICE_DEPS :=
VENDOR_NOTICE_DEPS :=
UNMOUNTED_NOTICE_DEPS :=
+UNMOUNTED_NOTICE_VENDOR_DEPS :=
ODM_NOTICE_DEPS :=
OEM_NOTICE_DEPS :=
PRODUCT_NOTICE_DEPS :=
@@ -599,8 +600,10 @@
$(if $(PACKAGES.$(p).EXTERNAL_KEY),\
$(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),EXTERNAL,,$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@),\
$(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@))))
- $(if $(filter true,$(PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA)),\
- $(call _apkcerts_write_line,$(notdir $(basename $(FSVERITY_APK_OUT))),$(FSVERITY_APK_KEY_PATH).x509.pem,$(FSVERITY_APK_KEY_PATH).pk8,,system,$@))
+ $(if $(filter true,$(PRODUCT_FSVERITY_GENERATE_METADATA)),\
+ $(call _apkcerts_write_line,BuildManifest,$(FSVERITY_APK_KEY_PATH).x509.pem,$(FSVERITY_APK_KEY_PATH).pk8,,system,$@) \
+ $(if $(filter true,$(BUILDING_SYSTEM_EXT_IMAGE)),\
+ $(call _apkcerts_write_line,BuildManifestSystemExt,$(FSVERITY_APK_KEY_PATH).x509.pem,$(FSVERITY_APK_KEY_PATH).pk8,,system_ext,$@)))
# In case value of PACKAGES is empty.
$(hide) touch $@
@@ -900,7 +903,7 @@
$(call declare-1p-container,$(INSTALLED_RAMDISK_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_RAMDISK_TARGET),$(INTERNAL_RAMDISK_FILE),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_RAMDISK_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_RAMDISK_TARGET)
.PHONY: ramdisk-nodeps
ramdisk-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
@@ -935,6 +938,7 @@
my_apex_extracted_boot_image := $(ALL_MODULES.$(my_installed_prebuilt_gki_apex).EXTRACTED_BOOT_IMAGE)
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
$(eval $(call copy-one-file,$(my_apex_extracted_boot_image),$(INSTALLED_BOOTIMAGE_TARGET)))
+ $(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
INTERNAL_PREBUILT_BOOTIMAGE := $(my_apex_extracted_boot_image)
@@ -963,20 +967,16 @@
$(if $(1),--partition_size $(1),--dynamic_partition_size)
endef
+ifndef BOARD_PREBUILT_BOOTIMAGE
+
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
INTERNAL_BOOTIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
-INTERNAL_INIT_BOOT_IMAGE_ARGS :=
-
# TODO(b/229701033): clean up BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK.
ifneq ($(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK),true)
- ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
- INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
- else
- INTERNAL_INIT_BOOT_IMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
- endif
+ ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
+ INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
endif
endif
@@ -988,15 +988,6 @@
INTERNAL_BOOTIMAGE_FILES := $(filter-out --%,$(INTERNAL_BOOTIMAGE_ARGS))
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-VERITY_KEYID := veritykeyid=id:`openssl x509 -in $(PRODUCT_VERITY_SIGNING_KEY).x509.pem -text \
- | grep keyid | sed 's/://g' | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]' | sed 's/keyid//g'`
-endif
-endif
-
-INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID))
-
# kernel cmdline/base/pagesize in boot.
# - If using GKI, use GENERIC_KERNEL_CMDLINE. Remove kernel base and pagesize because they are
# device-specific.
@@ -1102,40 +1093,17 @@
$(call pretty,"Target boot image: $@")
$(call build_boot_board_avb_enabled,$@)
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES) $(INTERNAL_GKI_CERTIFICATE_DEPS),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS)
@echo "make $@: ignoring dependencies"
$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
-else ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
-
-# $1: boot image target
-define build_boot_supports_boot_signer
- $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
- $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)
- $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
-endef
-
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
- $(call pretty,"Target boot image: $@")
- $(call build_boot_supports_boot_signer,$@)
-
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
-
-.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
- @echo "make $@: ignoring dependencies"
- $(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_boot_signer,$(b)))
-
-else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
+else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # BOARD_AVB_ENABLE != true
# $1: boot image target
define build_boot_supports_vboot
@@ -1148,10 +1116,10 @@
$(call pretty,"Target boot image: $@")
$(call build_boot_supports_vboot,$@)
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
@@ -1170,10 +1138,10 @@
$(call pretty,"Target boot image: $@")
$(call build_boot_novboot,$@)
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG)
@@ -1184,7 +1152,10 @@
endif # BUILDING_BOOT_IMAGE
else # TARGET_NO_KERNEL == "true"
-ifdef BOARD_PREBUILT_BOOTIMAGE
+INSTALLED_BOOTIMAGE_TARGET :=
+endif # TARGET_NO_KERNEL
+
+else # BOARD_PREBUILT_BOOTIMAGE defined
INTERNAL_PREBUILT_BOOTIMAGE := $(BOARD_PREBUILT_BOOTIMAGE)
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
@@ -1197,19 +1168,17 @@
--partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",bool)
$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_PREBUILT_BOOTIMAGE),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
else
$(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE)
cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
endif # BOARD_AVB_ENABLE
-else # BOARD_PREBUILT_BOOTIMAGE not defined
-INSTALLED_BOOTIMAGE_TARGET :=
endif # BOARD_PREBUILT_BOOTIMAGE
-endif # TARGET_NO_KERNEL
+
endif # my_installed_prebuilt_gki_apex not defined
my_apex_extracted_boot_image :=
@@ -1222,6 +1191,8 @@
INSTALLED_INIT_BOOT_IMAGE_TARGET := $(PRODUCT_OUT)/init_boot.img
$(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_RAMDISK_TARGET)
+INTERNAL_INIT_BOOT_IMAGE_ARGS := --ramdisk $(INSTALLED_RAMDISK_TARGET)
+
ifdef BOARD_KERNEL_PAGESIZE
INTERNAL_INIT_BOOT_IMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
endif
@@ -1248,7 +1219,7 @@
$(call declare-1p-target,$(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
endif
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
else # BUILDING_INIT_BOOT_IMAGE is not true
@@ -1274,7 +1245,7 @@
$(call declare-1p-target,$(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
endif # BOARD_AVB_ENABLE
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
else # BOARD_PREBUILT_INIT_BOOT_IMAGE not defined
INSTALLED_INIT_BOOT_IMAGE_TARGET :=
@@ -1287,10 +1258,6 @@
INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_VENDOR_RAMDISK_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
- $(error vboot 1.0 does not support vendor_boot partition)
-endif
-
INTERNAL_VENDOR_RAMDISK_FILES := $(filter $(TARGET_VENDOR_RAMDISK_OUT)/%, \
$(ALL_DEFAULT_INSTALLED_MODULES))
@@ -1565,7 +1532,6 @@
# TARGET_OUT_NOTICE_FILES now that the notice files are gathered from
# the src subdirectory.
kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
-winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
# Some targets get included under $(PRODUCT_OUT) for debug symbols or other
# reasons--not to be flashed onto any device. Targets under these directories
@@ -1688,17 +1654,74 @@
ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
+need_vendor_notice:=false
+ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
+ need_vendor_notice:=true
+endif
+
+ifdef BUILDING_DEBUG_VENDOR_BOOT_IMAGE
+ need_vendor_notice:=true
+endif
+
+ifdef BUILDING_VENDOR_IMAGE
+ need_vendor_notice:=true
+endif
+
+ifeq (true,$(need_vendor_notice))
+ifneq (,$(installed_vendor_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
+endif
+endif
+
+need_vendor_notice:=
+
+ifdef BUILDING_ODM_IMAGE
+ifneq (,$(installed_odm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_PRODUCT_IMAGE
+ifneq (,$(installed_product_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+ifneq (,$(installed_system_ext_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_ext_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_VENDOR_DLKM_IMAGE
+ifneq (,$(installed_vendor_dlkm_notice_xml_gz)
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_ODM_DLKM_IMAGE
+ifneq (,$(installed_odm_dlkm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+ifneq (,$(installed_system_dlkm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
+endif
+endif
+
endif # TARGET_BUILD_APPS
-# The kernel isn't really a module, so to get its module file in there, we
-# make the target NOTICE files depend on this particular file too, which will
-# then be in the right directory for the find in combine-notice-files to work.
+# Presently none of the prebuilts etc. comply with policy to have a license text. Fake one here.
$(eval $(call copy-one-file,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,$(kernel_notice_file)))
-# No matter where it gets copied from, a copied linux kernel is licensed under "GPL 2.0 only"
-$(eval $(call declare-copy-files-license-metadata,,:kernel,SPDX-license-identifier-GPL-2.0-only,notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,))
+ifneq (,$(strip $(INSTALLED_KERNEL_TARGET)))
+$(call declare-license-metadata,$(INSTALLED_KERNEL_TARGET),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+endif
-$(eval $(call copy-one-file,$(BUILD_SYSTEM)/WINPTHREADS_COPYING,$(winpthreads_notice_file)))
+# No matter where it gets copied from, a copied linux kernel is licensed under "GPL 2.0 only"
+$(eval $(call declare-copy-files-license-metadata,,:kernel,SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,kernel))
# #################################################################
@@ -1754,13 +1777,6 @@
INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
endif
-ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
-INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
-ifeq (true,$(PRODUCT_SUPPORTS_VERITY_FEC))
-INTERNAL_USERIMAGES_DEPS += $(FEC)
-endif
-endif
-
ifeq ($(BOARD_AVB_ENABLE),true)
INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
endif
@@ -1777,14 +1793,6 @@
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
-ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
-
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
- $(error vboot 1.0 doesn't support logical partition)
-endif
-
-endif # PRODUCT_USE_DYNAMIC_PARTITIONS
-
# $(1) the partition name (eg system)
# $(2) the image prop file
define add-common-flags-to-image-props
@@ -1894,11 +1902,6 @@
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
$(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
$(if $(BOARD_FLASH_ERASE_BLOCK_SIZE), $(hide) echo "flash_erase_block_size=$(BOARD_FLASH_ERASE_BLOCK_SIZE)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCT_SUPPORTS_VERITY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(notdir $(VERITY_SIGNER))" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY_FEC),$(hide) echo "verity_fec=$(PRODUCT_SUPPORTS_VERITY_FEC)" >> $(1))
$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
$(if $(PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
$(if $(PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
@@ -1981,8 +1984,6 @@
$(hide) echo "avb_system_dlkm_rollback_index_location=$(BOARD_SYSTEM_SYSTEM_DLKM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
$(hide) echo "recovery_as_boot=true" >> $(1))
-$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
- $(hide) echo "system_root_image=true" >> $(1))
$(if $(filter true,$(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK)),\
$(hide) echo "gki_boot_image_without_ramdisk=true" >> $(1))
$(hide) echo "root_dir=$(TARGET_ROOT_OUT)" >> $(1)
@@ -2259,20 +2260,18 @@
# (BOARD_USES_FULL_RECOVERY_IMAGE = true);
# b) We build a single image that contains boot and recovery both - no recovery image to install
# (BOARD_USES_RECOVERY_AS_BOOT = true);
-# c) We mount the system image as / and therefore do not have a ramdisk in boot.img
-# (BOARD_BUILD_SYSTEM_ROOT_IMAGE = true).
-# d) We include the recovery DTBO image within recovery - not needing the resource file as we
+# c) We include the recovery DTBO image within recovery - not needing the resource file as we
# do bsdiff because boot and recovery will contain different number of entries
# (BOARD_INCLUDE_RECOVERY_DTBO = true).
-# e) We include the recovery ACPIO image within recovery - not needing the resource file as we
+# d) We include the recovery ACPIO image within recovery - not needing the resource file as we
# do bsdiff because boot and recovery will contain different number of entries
# (BOARD_INCLUDE_RECOVERY_ACPIO = true).
-# f) We build a single image that contains vendor_boot and recovery both - no recovery image to
+# e) We build a single image that contains vendor_boot and recovery both - no recovery image to
# install
# (BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT = true).
ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT) \
- $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO) \
+ $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO) \
$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
# Named '.dat' so we don't attempt to use imgdiff for patching it.
RECOVERY_RESOURCE_ZIP := $(TARGET_OUT_VENDOR)/etc/recovery-resource.dat
@@ -2394,8 +2393,7 @@
# Use rsync because "cp -Rf" fails to overwrite broken symlinks on Mac.
rsync -a --exclude=sdcard $(IGNORE_RECOVERY_SEPOLICY) $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
# Modifying ramdisk contents...
- $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),, \
- ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init)
+ ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init
# Removes $(TARGET_RECOVERY_ROOT_OUT)/init*.rc EXCEPT init.recovery*.rc.
find $(TARGET_RECOVERY_ROOT_OUT) -maxdepth 1 -name 'init*.rc' -type f -not -name "init.recovery.*.rc" | xargs rm -f
cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/ 2> /dev/null || true # Ignore error when the src file doesn't exist.
@@ -2428,12 +2426,6 @@
$(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
$(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
$(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1))
- $(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
- $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
- $(BOOT_SIGNER) /boot $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
- $(BOOT_SIGNER) /recovery $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)\
- )\
- )
$(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
$(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
@@ -2446,9 +2438,6 @@
endef
recoveryimage-deps := $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel)
-ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER))
- recoveryimage-deps += $(BOOT_SIGNER)
-endif
ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
recoveryimage-deps += $(VBOOT_SIGNER)
endif
@@ -2475,10 +2464,10 @@
$(call pretty,"Target boot image from recovery: $@")
$(call build-recoveryimage-target, $@, $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $@))))
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",bool)
$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(recoveryimage-deps),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_BOOTIMAGE_TARGET)
endif # BOARD_USES_RECOVERY_AS_BOOT
$(INSTALLED_RECOVERYIMAGE_TARGET): $(recoveryimage-deps)
@@ -2496,7 +2485,7 @@
$(call declare-1p-container,$(INSTALLED_RECOVERYIMAGE_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_RECOVERYIMAGE_TARGET),$(recoveryimage-deps),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_RECOVERYIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_RECOVERYIMAGE_TARGET)
.PHONY: recoveryimage-nodeps
recoveryimage-nodeps:
@@ -2582,7 +2571,7 @@
$(call declare-1p-container,$(INSTALLED_DEBUG_RAMDISK_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_DEBUG_RAMDISK_TARGET),$(INSTALLED_RAMDISK_TARGET),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_DEBUG_RAMDISK_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DEBUG_RAMDISK_TARGET)
.PHONY: ramdisk_debug-nodeps
ramdisk_debug-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
@@ -2646,10 +2635,10 @@
$(call pretty,"Target boot debug image: $@")
$(call build-debug-bootimage-target, $@)
-$(call declare-1p-container,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
$(call declare-container-license-deps,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(INSTALLED_BOOTIMAGE_TARGET),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
.PHONY: bootimage_debug-nodeps
bootimage_debug-nodeps: $(MKBOOTIMG) $(AVBTOOL)
@@ -2802,7 +2791,7 @@
$(call declare-1p-container,$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET),$(INTERNAL_TEST_HARNESS_RAMDISK_SRC_DEPS),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
.PHONY: ramdisk_test_harness-nodeps
ramdisk_test_harness-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
@@ -2851,7 +2840,7 @@
$(call declare-1p-container,$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET),$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)
.PHONY: bootimage_test_harness-nodeps
bootimage_test_harness-nodeps: $(MKBOOTIMG) $(AVBTOOL)
@@ -2937,21 +2926,26 @@
endef
-# -----------------------------------------------------------------
-# system image
-
# FSVerity metadata generation
# Generate fsverity metadata files (.fsv_meta) and build manifest
-# (system/etc/security/fsverity/BuildManifest.apk) BEFORE filtering systemimage files below
-ifeq ($(PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA),true)
+# (<partition>/etc/security/fsverity/BuildManifest<suffix>.apk) BEFORE filtering systemimage,
+# vendorimage, odmimage, productimage files below.
+ifeq ($(PRODUCT_FSVERITY_GENERATE_METADATA),true)
-# Generate fsv_meta
-fsverity-metadata-targets := $(sort $(filter \
+fsverity-metadata-targets-patterns := \
$(TARGET_OUT)/framework/% \
$(TARGET_OUT)/etc/boot-image.prof \
$(TARGET_OUT)/etc/dirty-image-objects \
$(TARGET_OUT)/etc/preloaded-classes \
- $(TARGET_OUT)/etc/classpaths/%.pb, \
+ $(TARGET_OUT)/etc/classpaths/%.pb \
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+fsverity-metadata-targets-patterns += $(TARGET_OUT_SYSTEM_EXT)/framework/%
+endif
+
+# Generate fsv_meta
+fsverity-metadata-targets := $(sort $(filter \
+ $(fsverity-metadata-targets-patterns), \
$(ALL_DEFAULT_INSTALLED_MODULES)))
define fsverity-generate-metadata
@@ -2965,38 +2959,66 @@
$(foreach f,$(fsverity-metadata-targets),$(eval $(call fsverity-generate-metadata,$(f))))
ALL_DEFAULT_INSTALLED_MODULES += $(addsuffix .fsv_meta,$(fsverity-metadata-targets))
-# Generate BuildManifest.apk
FSVERITY_APK_KEY_PATH := $(DEFAULT_SYSTEM_DEV_CERTIFICATE)
-FSVERITY_APK_OUT := $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk
-FSVERITY_APK_MANIFEST_PATH := system/security/fsverity/AndroidManifest.xml
-$(FSVERITY_APK_OUT): PRIVATE_FSVERITY := $(HOST_OUT_EXECUTABLES)/fsverity
-$(FSVERITY_APK_OUT): PRIVATE_AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
-$(FSVERITY_APK_OUT): PRIVATE_MIN_SDK_VERSION := $(DEFAULT_APP_TARGET_SDK)
-$(FSVERITY_APK_OUT): PRIVATE_VERSION_CODE := $(PLATFORM_SDK_VERSION)
-$(FSVERITY_APK_OUT): PRIVATE_VERSION_NAME := $(APPS_DEFAULT_VERSION_NAME)
-$(FSVERITY_APK_OUT): PRIVATE_APKSIGNER := $(HOST_OUT_EXECUTABLES)/apksigner
-$(FSVERITY_APK_OUT): PRIVATE_MANIFEST := $(FSVERITY_APK_MANIFEST_PATH)
-$(FSVERITY_APK_OUT): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
-$(FSVERITY_APK_OUT): PRIVATE_KEY := $(FSVERITY_APK_KEY_PATH)
-$(FSVERITY_APK_OUT): PRIVATE_INPUTS := $(fsverity-metadata-targets)
-$(FSVERITY_APK_OUT): $(HOST_OUT_EXECUTABLES)/fsverity_manifest_generator \
+FSVERITY_APK_MANIFEST_TEMPLATE_PATH := system/security/fsverity/AndroidManifest.xml
+
+# Generate and install BuildManifest<suffix>.apk for the given partition
+# $(1): path of the output APK
+# $(2): partition name
+define fsverity-generate-and-install-manifest-apk
+fsverity-metadata-targets-$(2) := $(filter $(PRODUCT_OUT)/$(2)/%,\
+ $(fsverity-metadata-targets))
+$(1): PRIVATE_FSVERITY := $(HOST_OUT_EXECUTABLES)/fsverity
+$(1): PRIVATE_AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
+$(1): PRIVATE_MIN_SDK_VERSION := $(DEFAULT_APP_TARGET_SDK)
+$(1): PRIVATE_VERSION_CODE := $(PLATFORM_SDK_VERSION)
+$(1): PRIVATE_VERSION_NAME := $(APPS_DEFAULT_VERSION_NAME)
+$(1): PRIVATE_APKSIGNER := $(HOST_OUT_EXECUTABLES)/apksigner
+$(1): PRIVATE_MANIFEST := $(FSVERITY_APK_MANIFEST_TEMPLATE_PATH)
+$(1): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
+$(1): PRIVATE_KEY := $(FSVERITY_APK_KEY_PATH)
+$(1): PRIVATE_INPUTS := $$(fsverity-metadata-targets-$(2))
+$(1): PRIVATE_ASSETS := $(call intermediates-dir-for,ETC,build_manifest-$(2))/assets
+$(1): $(HOST_OUT_EXECUTABLES)/fsverity_manifest_generator \
$(HOST_OUT_EXECUTABLES)/fsverity $(HOST_OUT_EXECUTABLES)/aapt2 \
- $(HOST_OUT_EXECUTABLES)/apksigner $(FSVERITY_APK_MANIFEST_PATH) \
+ $(HOST_OUT_EXECUTABLES)/apksigner $(FSVERITY_APK_MANIFEST_TEMPLATE_PATH) \
$(FSVERITY_APK_KEY_PATH).x509.pem $(FSVERITY_APK_KEY_PATH).pk8 \
$(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk \
- $(fsverity-metadata-targets)
- $< --fsverity-path $(PRIVATE_FSVERITY) --aapt2-path $(PRIVATE_AAPT2) \
- --min-sdk-version $(PRIVATE_MIN_SDK_VERSION) \
- --version-code $(PRIVATE_VERSION_CODE) \
- --version-name $(PRIVATE_VERSION_NAME) \
- --apksigner-path $(PRIVATE_APKSIGNER) --apk-key-path $(PRIVATE_KEY) \
- --apk-manifest-path $(PRIVATE_MANIFEST) --framework-res $(PRIVATE_FRAMEWORK_RES) \
- --output $@ \
- --base-dir $(PRODUCT_OUT) $(PRIVATE_INPUTS)
+ $$(fsverity-metadata-targets-$(2))
+ rm -rf $$(PRIVATE_ASSETS)
+ mkdir -p $$(PRIVATE_ASSETS)
+ $$< --fsverity-path $$(PRIVATE_FSVERITY) \
+ --base-dir $$(PRODUCT_OUT) \
+ --output $$(PRIVATE_ASSETS)/build_manifest.pb \
+ $$(PRIVATE_INPUTS)
+ $$(PRIVATE_AAPT2) link -o $$@ \
+ -A $$(PRIVATE_ASSETS) \
+ -I $$(PRIVATE_FRAMEWORK_RES) \
+ --min-sdk-version $$(PRIVATE_MIN_SDK_VERSION) \
+ --version-code $$(PRIVATE_VERSION_CODE) \
+ --version-name $$(PRIVATE_VERSION_NAME) \
+ --manifest $$(PRIVATE_MANIFEST) \
+ --rename-manifest-package com.android.security.fsverity_metadata.$(2)
+ $$(PRIVATE_APKSIGNER) sign --in $$@ \
+ --cert $$(PRIVATE_KEY).x509.pem \
+ --key $$(PRIVATE_KEY).pk8
-ALL_DEFAULT_INSTALLED_MODULES += $(FSVERITY_APK_OUT)
+ALL_DEFAULT_INSTALLED_MODULES += $(1)
-endif # PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA
+endef # fsverity-generate-and-install-manifest-apk
+
+$(eval $(call fsverity-generate-and-install-manifest-apk, \
+ $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk,system))
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+ $(eval $(call fsverity-generate-and-install-manifest-apk, \
+ $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk,system_ext))
+endif
+
+endif # PRODUCT_FSVERITY_GENERATE_METADATA
+
+
+# -----------------------------------------------------------------
+# system image
INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
@@ -3122,7 +3144,7 @@
ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
-ifneq (,$(filter true, $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
+ifneq (,$(filter true,$(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
diff_tool := $(HOST_OUT_EXECUTABLES)/bsdiff
else
diff_tool := $(HOST_OUT_EXECUTABLES)/imgdiff
@@ -3214,7 +3236,7 @@
$(call declare-1p-container,$(INSTALLED_USERDATAIMAGE_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_USERDATAIMAGE_TARGET),$(INSTALLED_USERDATAIMAGE_TARGET_DEPS),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_USERDATAIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_USERDATAIMAGE_TARGET)
.PHONY: userdataimage-nodeps
userdataimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
@@ -3266,7 +3288,7 @@
$(call declare-1p-container,$(INSTALLED_BPTIMAGE_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_BPTIMAGE_TARGET),$(BOARD_BPT_INPUT_FILES),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BPTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_BPTIMAGE_TARGET)
.PHONY: bptimage-nodeps
bptimage-nodeps:
@@ -3305,7 +3327,7 @@
$(call declare-1p-container,$(INSTALLED_CACHEIMAGE_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_CACHEIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAGE_FILES),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_CACHEIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_CACHEIMAGE_TARGET)
.PHONY: cacheimage-nodeps
cacheimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
@@ -3858,6 +3880,11 @@
$(INSTALLED_FILES_FILE_SYSTEM_DLKM)
$(build-system_dlkmimage-target)
+SYSTEM_DLKM_NOTICE_DEPS += $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+
+$(call declare-1p-container,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEM_DLKM),$(PRODUCT_OUT)/:/)
+
.PHONY: system_dlkmimage-nodeps sdnod
system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-system_dlkmimage-target)
@@ -3886,7 +3913,7 @@
$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_TARGET),)
$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_TARGET),$(BOARD_PREBUILT_DTBOIMAGE),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_DTBOIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DTBOIMAGE_TARGET)
else
$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
cp $(BOARD_PREBUILT_DTBOIMAGE) $@
@@ -3897,33 +3924,21 @@
# -----------------------------------------------------------------
# Protected VM firmware image
ifeq ($(BOARD_USES_PVMFWIMAGE),true)
+
+.PHONY: pvmfwimage
+pvmfwimage: $(INSTALLED_PVMFWIMAGE_TARGET)
+
INSTALLED_PVMFWIMAGE_TARGET := $(PRODUCT_OUT)/pvmfw.img
INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET := $(PRODUCT_OUT)/pvmfw_embedded.avbpubkey
-INTERNAL_PREBUILT_PVMFWIMAGE := packages/modules/Virtualization/pvmfw/pvmfw.img
-INTERNAL_PVMFW_EMBEDDED_AVBKEY := external/avb/test/data/testkey_rsa4096_pub.bin
-
-ifdef BOARD_PREBUILT_PVMFWIMAGE
-PREBUILT_PVMFWIMAGE_TARGET := $(BOARD_PREBUILT_PVMFWIMAGE)
-else
-PREBUILT_PVMFWIMAGE_TARGET := $(INTERNAL_PREBUILT_PVMFWIMAGE)
-endif
-
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_PVMFWIMAGE_TARGET): $(PREBUILT_PVMFWIMAGE_TARGET) $(AVBTOOL) $(BOARD_AVB_PVMFW_KEY_PATH)
- cp $< $@
- $(AVBTOOL) add_hash_footer \
- --image $@ \
- $(call get-partition-size-argument,$(BOARD_PVMFWIMAGE_PARTITION_SIZE)) \
- --partition_name pvmfw $(INTERNAL_AVB_PVMFW_SIGNING_ARGS) \
- $(BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS)
+INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,pvmfw_img)
+INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key)
$(call declare-1p-container,$(INSTALLED_PVMFWIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_PVMFWIMAGE_TARGET),$(PREBUILT_PVMFWIMAGE_TARGET),$(PRODUCT_OUT)/:/)
+$(call declare-container-license-deps,$(INSTALLED_PVMFWIMAGE_TARGET),$(INTERNAL_PVMFWIMAGE_FILES),$(PRODUCT_OUT)/:/)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_PVMFWIMAGE_TARGET)
-else
-$(eval $(call copy-one-file,$(PREBUILT_PVMFWIMAGE_TARGET),$(INSTALLED_PVMFWIMAGE_TARGET)))
-endif
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_PVMFWIMAGE_TARGET)
+
+$(eval $(call copy-one-file,$(INTERNAL_PVMFWIMAGE_FILES),$(INSTALLED_PVMFWIMAGE_TARGET)))
$(INSTALLED_PVMFWIMAGE_TARGET): $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET)
@@ -4330,18 +4345,6 @@
$(eval $(call check-and-set-custom-avb-chain-args,$(partition))))
endif
-# Add kernel cmdline descriptor for kernel to mount system.img as root with
-# dm-verity. This works when system.img is either chained or not-chained:
-# - chained: The --setup_as_rootfs_from_kernel option will add dm-verity kernel
-# cmdline descriptor to system.img
-# - not-chained: The --include_descriptors_from_image option for make_vbmeta_image
-# will include the kernel cmdline descriptor from system.img into vbmeta.img
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-ifeq ($(filter system, $(BOARD_SUPER_PARTITION_PARTITION_LIST)),)
-BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --setup_as_rootfs_from_kernel
-endif
-endif
-
BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS += --padding_size 4096
BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS += --padding_size 4096
@@ -4467,7 +4470,7 @@
$(call declare-1p-container,$(INSTALLED_VBMETA_VENDORIMAGE_TARGET),)
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
endif
define build-vbmetaimage-target
@@ -4547,6 +4550,26 @@
intermediates := $(call intermediates-dir-for,PACKAGING,check_vintf_all)
check_vintf_all_deps :=
+APEX_OUT := $(PRODUCT_OUT)/apex
+# -----------------------------------------------------------------
+# Create apex-info-file.xsd
+
+APEX_DIRS := \
+ $(TARGET_OUT)/apex/% \
+ $(TARGET_OUT_SYSTEM_EXT)/apex/% \
+ $(TARGET_OUT_VENDOR)/apex/% \
+ $(TARGET_OUT_ODM)/apex/% \
+ $(TARGET_OUT_PRODUCT)/apex/% \
+
+apex_vintf_files := $(sort $(filter $(APEX_DIRS), $(INTERNAL_ALLIMAGES_FILES)))
+APEX_INFO_FILE := $(APEX_OUT)/apex-info-list.xml
+
+$(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/dump_apex_info $(apex_vintf_files)
+ @echo "Creating apex-info-file in $(PRODUCT_OUT) "
+ $< --root_dir $(PRODUCT_OUT) --out_file $@
+
+apex_vintf_files :=
+
# The build system only writes VINTF metadata to */etc/vintf paths. Legacy paths aren't needed here
# because they are only used for prebuilt images.
check_vintf_common_srcs_patterns := \
@@ -4557,6 +4580,7 @@
$(TARGET_OUT_SYSTEM_EXT)/etc/vintf/% \
check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns),$(INTERNAL_ALLIMAGES_FILES)))
+check_vintf_common_srcs += $(APEX_INFO_FILE)
check_vintf_common_srcs_patterns :=
check_vintf_has_system :=
@@ -4581,7 +4605,10 @@
$(call declare-0p-target,$(check_vintf_system_log))
check_vintf_system_log :=
-vintffm_log := $(intermediates)/vintffm.log
+# -- Check framework manifest against frozen manifests for GSI targets. They need to be compatible.
+ifneq (true, $(BUILDING_VENDOR_IMAGE))
+ vintffm_log := $(intermediates)/vintffm.log
+endif
check_vintf_all_deps += $(vintffm_log)
$(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps)
@( $< --check --dirmap /system:$(TARGET_OUT) \
@@ -4736,10 +4763,12 @@
--dirmap /odm:$(TARGET_OUT_ODM) \
--dirmap /product:$(TARGET_OUT_PRODUCT) \
--dirmap /system_ext:$(TARGET_OUT_SYSTEM_EXT) \
+ --dirmap /apex:$(APEX_OUT) \
ifdef PRODUCT_SHIPPING_API_LEVEL
check_vintf_compatible_args += --property ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
endif # PRODUCT_SHIPPING_API_LEVEL
+check_vintf_compatible_args += --apex-info-file $(APEX_INFO_FILE)
$(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_ARGS := $(check_vintf_compatible_args)
$(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_DEPS := $(check_vintf_compatible_deps)
@@ -4951,9 +4980,9 @@
mke2fs \
mke2fs.conf \
mkfs.erofs \
- mkf2fsuserimg.sh \
+ mkf2fsuserimg \
mksquashfs \
- mksquashfsimage.sh \
+ mksquashfsimage \
mkuserimg_mke2fs \
ota_extractor \
ota_from_target_files \
@@ -4982,8 +5011,11 @@
INTERNAL_OTATOOLS_MODULES += \
apexer \
apex_compression_tool \
+ blkid \
deapexer \
debugfs_static \
+ dump_apex_info \
+ fsck.erofs \
merge_zips \
resize2fs \
soong_zip \
@@ -5011,7 +5043,13 @@
INTERNAL_OTATOOLS_PACKAGE_FILES += \
$(sort $(shell find build/make/target/product/security -type f -name "*.x509.pem" -o \
- -name "*.pk8" -o -name verity_key))
+ -name "*.pk8"))
+
+ifneq (,$(wildcard packages/modules))
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
+ $(sort $(shell find packages/modules -type f -name "*.x509.pem" -o -name "*.pk8" -o -name \
+ "key.pem"))
+endif
ifneq (,$(wildcard device))
INTERNAL_OTATOOLS_PACKAGE_FILES += \
@@ -5029,8 +5067,8 @@
endif
INTERNAL_OTATOOLS_RELEASETOOLS := \
- $(sort $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
- \( -type f -o -type l \) -print))
+ $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
+ \( -type f -o -type l \) -print | sort)
BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
$(BUILT_OTATOOLS_PACKAGE): PRIVATE_ZIP_ROOT := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
@@ -5676,10 +5714,8 @@
$(TARGET_ROOT_OUT),$(zip_root)/ROOT)
@# If we are using recovery as boot, this is already done when processing recovery.
ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
$(hide) $(call package_files-copy-root, \
$(TARGET_RAMDISK_OUT),$(zip_root)/BOOT/RAMDISK)
-endif
ifdef INSTALLED_KERNEL_TARGET
$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/
endif
@@ -5942,8 +5978,6 @@
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
$(hide) cp $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) $(zip_root)/PREBUILT_IMAGES/
- $(hide) mkdir -p $(zip_root)/PVMFW
- $(hide) cp $(PREBUILT_PVMFWIMAGE_TARGET) $(zip_root)/PVMFW/
endif
ifdef BOARD_PREBUILT_BOOTLOADER
$(hide) mkdir -p $(zip_root)/IMAGES
@@ -5987,10 +6021,8 @@
endif
@# ROOT always contains the files for the root under normal boot.
$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
- @# BOOT/RAMDISK exists and contains the ramdisk for recovery if using BOARD_USES_RECOVERY_AS_BOOT.
+ @# BOOT/RAMDISK contains the first stage and recovery ramdisk.
$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
-endif
ifdef BUILDING_INIT_BOOT_IMAGE
$(hide) $(call package_files-copy-root, $(TARGET_RAMDISK_OUT),$(zip_root)/INIT_BOOT/RAMDISK)
$(hide) $(call fs_config,$(zip_root)/INIT_BOOT/RAMDISK,) > $(zip_root)/META/init_boot_filesystem_config.txt
@@ -6001,10 +6033,6 @@
ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
$(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt
endif
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- @# BOOT/RAMDISK also exists and contains the first stage ramdisk if not using BOARD_BUILD_SYSTEM_ROOT_IMAGE.
- $(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
-endif
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
$(hide) $(call fs_config,$(zip_root)/RECOVERY/RAMDISK,) > $(zip_root)/META/recovery_filesystem_config.txt
endif
@@ -6077,12 +6105,14 @@
# -----------------------------------------------------------------
# NDK Sysroot Package
NDK_SYSROOT_TARGET := $(PRODUCT_OUT)/ndk_sysroot.tar.bz2
+.PHONY: ndk_sysroot
+ndk_sysroot: $(NDK_SYSROOT_TARGET)
$(NDK_SYSROOT_TARGET): $(SOONG_OUT_DIR)/ndk.timestamp
@echo Package NDK sysroot...
$(hide) tar cjf $@ -C $(SOONG_OUT_DIR) ndk
ifeq ($(HOST_OS),linux)
-$(call dist-for-goals,sdk,$(NDK_SYSROOT_TARGET))
+$(call dist-for-goals,sdk ndk_sysroot,$(NDK_SYSROOT_TARGET))
endif
ifeq ($(build_ota_package),true)
@@ -6189,7 +6219,7 @@
# The mac build doesn't build dex2oat, so create the zip file only if the build OS is linux.
ifeq ($(BUILD_OS),linux)
ifneq ($(DEX2OAT),)
-dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN) $(AAPT2)
+dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN)
dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/dexdump
dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/oatdump
DEXPREOPT_TOOLS_ZIP := $(PRODUCT_OUT)/dexpreopt_tools.zip
@@ -6306,7 +6336,7 @@
ifeq (true,$(CLANG_COVERAGE))
LLVM_PROFDATA := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-profdata
LLVM_COV := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-cov
- LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/libc++.so.1
+ LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib/x86_64-unknown-linux-gnu/libc++.so.1
# Use llvm-profdata.zip for backwards compatibility with tradefed code.
LLVM_COVERAGE_TOOLS_ZIP := $(PRODUCT_OUT)/llvm-profdata.zip
@@ -6524,22 +6554,22 @@
endif
endif
-# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
-# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
-# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
-# virtual devices.
-ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
$(INSTALLED_SUPERIMAGE_TARGET): $(INSTALLED_SUPERIMAGE_DEPENDENCIES)
$(call pretty,"Target super fs image for debug: $@")
$(call build-superimage-target,$(INSTALLED_SUPERIMAGE_TARGET),\
$(call intermediates-dir-for,PACKAGING,superimage_debug)/misc_info.txt)
-droidcore-unbundled: $(INSTALLED_SUPERIMAGE_TARGET)
-
# For devices that uses super image directly, the superimage target points to the file in $(PRODUCT_OUT).
.PHONY: superimage
superimage: $(INSTALLED_SUPERIMAGE_TARGET)
+# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
+# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
+# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
+# virtual devices.
+ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
+droidcore-unbundled: $(INSTALLED_SUPERIMAGE_TARGET)
+
$(call dist-for-goals,dist_files,$(INSTALLED_MISC_INFO_TARGET):super_misc_info.txt)
endif # BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT
@@ -6791,8 +6821,6 @@
sdk_atree_files += $(atree_dir)/sdk.atree
endif
-include $(BUILD_SYSTEM)/sdk_font.mk
-
deps := \
$(OUT_DOCS)/offline-sdk-timestamp \
$(SDK_METADATA_FILES) \
@@ -6800,15 +6828,28 @@
$(ATREE_FILES) \
$(sdk_atree_files) \
$(HOST_OUT_EXECUTABLES)/atree \
- $(HOST_OUT_EXECUTABLES)/line_endings \
- $(SDK_FONT_DEPS)
+ $(HOST_OUT_EXECUTABLES)/line_endings
+
+# The name of the subdir within the platforms dir of the sdk. One of:
+# - android-<SDK_INT> (stable base dessert SDKs)
+# - android-<CODENAME> (stable extension SDKs)
+# - android-<SDK_INT>-ext<EXT_INT> (codename SDKs)
+sdk_platform_dir_name := $(strip \
+ $(if $(filter REL,$(PLATFORM_VERSION_CODENAME)), \
+ $(if $(filter $(PLATFORM_SDK_EXTENSION_VERSION),$(PLATFORM_BASE_SDK_EXTENSION_VERSION)), \
+ android-$(PLATFORM_SDK_VERSION), \
+ android-$(PLATFORM_SDK_VERSION)-ext$(PLATFORM_SDK_EXTENSION_VERSION) \
+ ), \
+ android-$(PLATFORM_VERSION_CODENAME) \
+ ) \
+)
INTERNAL_SDK_TARGET := $(sdk_dir)/$(sdk_name).zip
$(INTERNAL_SDK_TARGET): PRIVATE_NAME := $(sdk_name)
$(INTERNAL_SDK_TARGET): PRIVATE_DIR := $(sdk_dir)/$(sdk_name)
$(INTERNAL_SDK_TARGET): PRIVATE_DEP_FILE := $(sdk_dep_file)
$(INTERNAL_SDK_TARGET): PRIVATE_INPUT_FILES := $(sdk_atree_files)
-
+$(INTERNAL_SDK_TARGET): PRIVATE_PLATFORM_NAME := $(sdk_platform_dir_name)
# Set SDK_GNU_ERROR to non-empty to fail when a GNU target is built.
#
#SDK_GNU_ERROR := true
@@ -6824,7 +6865,6 @@
fi; \
done; \
if [ $$FAIL ]; then exit 1; fi
- $(hide) echo $(notdir $(SDK_FONT_DEPS)) | tr " " "\n" > $(SDK_FONT_TEMP)/fontsInSdk.txt
$(hide) ( \
ATREE_STRIP="$(HOST_STRIP) -x" \
$(HOST_OUT_EXECUTABLES)/atree \
@@ -6834,13 +6874,12 @@
-I $(PRODUCT_OUT) \
-I $(HOST_OUT) \
-I $(TARGET_COMMON_OUT_ROOT) \
- -v "PLATFORM_NAME=android-$(PLATFORM_VERSION)" \
+ -v "PLATFORM_NAME=$(PRIVATE_PLATFORM_NAME)" \
-v "OUT_DIR=$(OUT_DIR)" \
-v "HOST_OUT=$(HOST_OUT)" \
-v "TARGET_ARCH=$(TARGET_ARCH)" \
-v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
-v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
- -v "FONT_OUT=$(SDK_FONT_TEMP)" \
-o $(PRIVATE_DIR) && \
HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
@@ -6920,14 +6959,26 @@
.PHONY: haiku
haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
$(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku,$(PRODUCT_OUT)/module-info.json)
-.PHONY: haiku-java
-haiku-java: $(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_TARGETS)
-$(call dist-for-goals,haiku-java,$(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES))
+.PHONY: haiku-java-device
+haiku-java-device: $(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_DEVICE_TARGETS)
+$(call dist-for-goals,haiku-java-device,$(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku-java-device,$(PRODUCT_OUT)/module-info.json)
+
+.PHONY: haiku-java-host
+haiku-java-host: $(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_HOST_TARGETS)
+$(call dist-for-goals,haiku-java-host,$(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku-java-host,$(PRODUCT_OUT)/module-info.json)
.PHONY: haiku-rust
haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
$(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku-rust,$(PRODUCT_OUT)/module-info.json)
+
+# -----------------------------------------------------------------
+# Extract platform fonts used in Layoutlib
+include $(BUILD_SYSTEM)/layoutlib_fonts.mk
# -----------------------------------------------------------------
diff --git a/core/OWNERS b/core/OWNERS
index 980186c..d48ceab 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,6 +1,9 @@
per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
+# For global Proguard rules
+per-file proguard*.flags = jdduke@google.com
+
# For version updates
per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6dfb61e..7a2dea6 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -26,6 +26,7 @@
# Add variables to the namespace below:
+$(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
$(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
$(call add_soong_config_var,ANDROID,IS_TARGET_MIXED_SEPOLICY)
ifeq ($(IS_TARGET_MIXED_SEPOLICY),true)
@@ -33,7 +34,6 @@
endif
$(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
$(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
-$(call add_soong_config_var,ANDROID,BOARD_BUILD_SYSTEM_ROOT_IMAGE)
$(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
# Default behavior for the tree wrt building modules or using prebuilts. This
@@ -75,10 +75,14 @@
# are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
# default.
INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
+ bluetooth \
+ permission \
+ uwb \
wifi \
$(foreach m, $(INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES),\
- $(call soong_config_set,$(m)_module,source_build,$(MODULE_BUILD_FROM_SOURCE)))
+ $(if $(call soong_config_get,$(m)_module,source_build),,\
+ $(call soong_config_set,$(m)_module,source_build,$(MODULE_BUILD_FROM_SOURCE))))
# Apex build mode variables
ifdef APEX_BUILD_FOR_PRE_S_DEVICES
@@ -97,6 +101,32 @@
# TODO(b/203088572): Remove when Java optimizations enabled by default for
# SystemUI.
$(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
-# TODO(b/196084106): Remove when Java optimizations enabled by default for
-# system packages.
+
+ifdef PRODUCT_AVF_ENABLED
+$(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
+endif
+
+# Enable system_server optimizations by default unless explicitly set or if
+# there may be dependent runtime jars.
+# TODO(b/240588226): Remove the off-by-default exceptions after handling
+# system_server jars automatically w/ R8.
+ifeq (true,$(PRODUCT_BROKEN_SUBOPTIMAL_ORDER_OF_SYSTEM_SERVER_JARS))
+ # If system_server jar ordering is broken, don't assume services.jar can be
+ # safely optimized in isolation, as there may be dependent jars.
+ SYSTEM_OPTIMIZE_JAVA ?= false
+else ifneq (platform:services,$(lastword $(PRODUCT_SYSTEM_SERVER_JARS)))
+ # If services is not the final jar in the dependency ordering, don't assume
+ # it can be safely optimized in isolation, as there may be dependent jars.
+ SYSTEM_OPTIMIZE_JAVA ?= false
+else
+ SYSTEM_OPTIMIZE_JAVA ?= true
+endif
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
+
+# Check for SupplementalApi module.
+ifeq ($(wildcard packages/modules/SupplementalApi),)
+$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,false)
+else
+$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,true)
+endif
+
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 79639a8..eb429cd 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -128,6 +128,9 @@
LOCAL_CERTIFICATE := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))$(LOCAL_CERTIFICATE)
endif
+ # NOTE(ruperts): Consider moving the logic below out of a conditional,
+ # to avoid the possibility of silently ignoring user settings.
+
PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
@@ -142,6 +145,8 @@
$(built_module): $(LOCAL_CERTIFICATE_LINEAGE)
$(built_module): PRIVATE_CERTIFICATE_LINEAGE := $(LOCAL_CERTIFICATE_LINEAGE)
+
+ $(built_module): PRIVATE_ROTATION_MIN_SDK_VERSION := $(LOCAL_ROTATION_MIN_SDK_VERSION)
endif
ifneq ($(LOCAL_MODULE_STEM),)
@@ -275,7 +280,7 @@
endif
my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
-$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
+$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR) $(SIGNAPK_JNI_LIBRARY_PATH)
$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 355a22e..00f5f21 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -1012,7 +1012,11 @@
$(ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS) $(LOCAL_SYSTEM_SHARED_LIBRARIES)
ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES := \
- $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES)
+ $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES) \
+ $(LOCAL_JAVA_LIBRARIES)
+
+ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES := \
+ $(ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES)
ifdef LOCAL_TEST_DATA
# Export the list of targets that are handled as data inputs and required
@@ -1036,6 +1040,24 @@
$(filter-out $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS),$(my_supported_variant))
##########################################################################
+## When compiling against API imported module, use API import stub
+## libraries.
+##########################################################################
+ifneq ($(LOCAL_USE_VNDK),)
+ ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ apiimport_postfix := .apiimport
+ ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ apiimport_postfix := .apiimport.product
+ else
+ apiimport_postfix := .apiimport.vendor
+ endif
+
+ my_required_modules := $(foreach l,$(my_required_modules), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+ endif
+endif
+
+##########################################################################
## When compiling against the VNDK, add the .vendor or .product suffix to
## required modules.
##########################################################################
@@ -1121,6 +1143,9 @@
ifdef LOCAL_IS_UNIT_TEST
ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
endif
+ifdef LOCAL_TEST_OPTIONS_TAGS
+ALL_MODULES.$(my_register_name).TEST_OPTIONS_TAGS := $(LOCAL_TEST_OPTIONS_TAGS)
+endif
test_config :=
INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/binary.mk b/core/binary.mk
index 3f32fa9..6320726 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -19,7 +19,11 @@
# supply that, for example, when building libc itself.
ifdef LOCAL_IS_HOST_MODULE
ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
+ ifdef USE_HOST_MUSL
+ my_system_shared_libraries := libc_musl
+ else
my_system_shared_libraries :=
+ endif
else
my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
endif
@@ -348,9 +352,11 @@
else # LOCAL_IS_HOST_MODULE
# Add -ldl, -lpthread, -lm and -lrt to host builds to match the default behavior of
# device builds
- my_ldlibs += -ldl -lpthread -lm
- ifneq ($(HOST_OS),darwin)
- my_ldlibs += -lrt
+ ifndef USE_HOST_MUSL
+ my_ldlibs += -ldl -lpthread -lm
+ ifneq ($(HOST_OS),darwin)
+ my_ldlibs += -lrt
+ endif
endif
endif
@@ -1145,6 +1151,28 @@
$(my_static_libraries),hwasan)
endif
+###################################################################
+## When compiling against API imported module, use API import stub
+## libraries.
+##################################################################
+
+apiimport_postfix := .apiimport
+
+ifneq ($(LOCAL_USE_VNDK),)
+ ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ apiimport_postfix := .apiimport.product
+ else
+ apiimport_postfix := .apiimport.vendor
+ endif
+endif
+
+my_shared_libraries := $(foreach l,$(my_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+my_header_libraries := $(foreach l,$(my_header_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_HEADER_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+
###########################################################
## When compiling against the VNDK, use LL-NDK libraries
###########################################################
diff --git a/core/board_config.mk b/core/board_config.mk
index 8074225..70c91a8 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -174,6 +174,10 @@
_build_broken_var_list := \
+ BUILD_BROKEN_CLANG_PROPERTY \
+ BUILD_BROKEN_CLANG_ASFLAGS \
+ BUILD_BROKEN_CLANG_CFLAGS \
+ BUILD_BROKEN_DEPFILE \
BUILD_BROKEN_DUP_RULES \
BUILD_BROKEN_DUP_SYSPROP \
BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \
@@ -282,6 +286,8 @@
$(if $(filter-out true false,$($(var))), \
$(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
+include $(BUILD_SYSTEM)/board_config_wifi.mk
+
# Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
@@ -399,12 +405,6 @@
endef
###########################################
-# Now we can substitute with the real value of TARGET_COPY_OUT_RAMDISK
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-TARGET_COPY_OUT_RAMDISK := $(TARGET_COPY_OUT_ROOT)
-endif
-
-###########################################
# Configure whether we're building the system image
BUILDING_SYSTEM_IMAGE := true
ifeq ($(PRODUCT_BUILD_SYSTEM_IMAGE),)
@@ -553,15 +553,8 @@
# Are we building a debug vendor_boot image
BUILDING_DEBUG_VENDOR_BOOT_IMAGE :=
-# Can't build vendor_boot-debug.img if BOARD_BUILD_SYSTEM_ROOT_IMAGE is true,
-# because building debug vendor_boot image requires a ramdisk.
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- ifeq ($(PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE),true)
- $(warning PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE is true, but so is BOARD_BUILD_SYSTEM_ROOT_IMAGE. \
- Skip building the debug vendor_boot image.)
- endif
# Can't build vendor_boot-debug.img if we're not building a ramdisk.
-else ifndef BUILDING_RAMDISK_IMAGE
+ifndef BUILDING_RAMDISK_IMAGE
ifeq ($(PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE),true)
$(warning PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE is true, but we're not building a ramdisk image. \
Skip building the debug vendor_boot image.)
@@ -598,15 +591,8 @@
# Are we building a debug boot image
BUILDING_DEBUG_BOOT_IMAGE :=
-# Can't build boot-debug.img if BOARD_BUILD_SYSTEM_ROOT_IMAGE is true,
-# because building debug boot image requires a ramdisk.
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- ifeq ($(PRODUCT_BUILD_DEBUG_BOOT_IMAGE),true)
- $(warning PRODUCT_BUILD_DEBUG_BOOT_IMAGE is true, but so is BOARD_BUILD_SYSTEM_ROOT_IMAGE. \
- Skip building the debug boot image.)
- endif
# Can't build boot-debug.img if we're not building a ramdisk.
-else ifndef BUILDING_RAMDISK_IMAGE
+ifndef BUILDING_RAMDISK_IMAGE
ifeq ($(PRODUCT_BUILD_DEBUG_BOOT_IMAGE),true)
$(warning PRODUCT_BUILD_DEBUG_BOOT_IMAGE is true, but we're not building a ramdisk image. \
Skip building the debug boot image.)
@@ -927,9 +913,6 @@
.KATI_READONLY := BUILDING_SYSTEM_DLKM_IMAGE
BOARD_USES_PVMFWIMAGE :=
-ifdef BOARD_PREBUILT_PVMFWIMAGE
- BOARD_USES_PVMFWIMAGE := true
-endif
ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true)
BOARD_USES_PVMFWIMAGE := true
endif
@@ -939,9 +922,6 @@
ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true)
BUILDING_PVMFW_IMAGE := true
endif
-ifdef BOARD_PREBUILT_PVMFWIMAGE
- BUILDING_PVMFW_IMAGE :=
-endif
.KATI_READONLY := BUILDING_PVMFW_IMAGE
###########################################
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
new file mode 100644
index 0000000..ddeb0d7
--- /dev/null
+++ b/core/board_config_wifi.mk
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# ###############################################################
+# This file adds WIFI variables into soong config namespace (`wifi`)
+# ###############################################################
+
+ifdef BOARD_WLAN_DEVICE
+ $(call soong_config_set,wifi,board_wlan_device,$(BOARD_WLAN_DEVICE))
+endif
+ifdef WIFI_DRIVER_MODULE_PATH
+ $(call soong_config_set,wifi,driver_module_path,$(WIFI_DRIVER_MODULE_PATH))
+endif
+ifdef WIFI_DRIVER_MODULE_ARG
+ $(call soong_config_set,wifi,driver_module_arg,$(WIFI_DRIVER_MODULE_ARG))
+endif
+ifdef WIFI_DRIVER_MODULE_NAME
+ $(call soong_config_set,wifi,driver_module_name,$(WIFI_DRIVER_MODULE_NAME))
+endif
+ifdef WIFI_DRIVER_FW_PATH_STA
+ $(call soong_config_set,wifi,driver_fw_path_sta,$(WIFI_DRIVER_FW_PATH_STA))
+endif
+ifdef WIFI_DRIVER_FW_PATH_AP
+ $(call soong_config_set,wifi,driver_fw_path_ap,$(WIFI_DRIVER_FW_PATH_AP))
+endif
+ifdef WIFI_DRIVER_FW_PATH_P2P
+ $(call soong_config_set,wifi,driver_fw_path_p2p,$(WIFI_DRIVER_FW_PATH_P2P))
+endif
+ifdef WIFI_DRIVER_FW_PATH_PARAM
+ $(call soong_config_set,wifi,driver_fw_path_param,$(WIFI_DRIVER_FW_PATH_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_CTRL_PARAM
+ $(call soong_config_set,wifi,driver_state_ctrl_param,$(WIFI_DRIVER_STATE_CTRL_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_ON
+ $(call soong_config_set,wifi,driver_state_on,$(WIFI_DRIVER_STATE_ON))
+endif
+ifdef WIFI_DRIVER_STATE_OFF
+ $(call soong_config_set,wifi,driver_state_off,$(WIFI_DRIVER_STATE_OFF))
+endif
+ifdef WIFI_MULTIPLE_VENDOR_HALS
+ $(call soong_config_set,wifi,multiple_vendor_hals,$(WIFI_MULTIPLE_VENDOR_HALS))
+endif
+ifneq ($(wildcard vendor/google/libraries/GoogleWifiConfigLib),)
+ $(call soong_config_set,wifi,google_wifi_config_lib,true)
+endif
+ifdef WIFI_HAL_INTERFACE_COMBINATIONS
+ $(call soong_config_set,wifi,hal_interface_combinations,$(WIFI_HAL_INTERFACE_COMBINATIONS))
+endif
+ifdef WIFI_HIDL_FEATURE_AWARE
+ $(call soong_config_set,wifi,hidl_feature_aware,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DUAL_INTERFACE
+ $(call soong_config_set,wifi,hidl_feature_dual_interface,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP
+ $(call soong_config_set,wifi,hidl_feature_disable_ap,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP_MAC_RANDOMIZATION
+ $(call soong_config_set,wifi,hidl_feature_disable_ap_mac_randomization,true)
+endif
+ifdef WIFI_AVOID_IFACE_RESET_MAC_CHANGE
+ $(call soong_config_set,wifi,avoid_iface_reset_mac_change,true)
+endif
\ No newline at end of file
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
index e8e01d8..2de4115 100644
--- a/core/cc_prebuilt_internal.mk
+++ b/core/cc_prebuilt_internal.mk
@@ -139,6 +139,27 @@
# my_shared_libraries).
include $(BUILD_SYSTEM)/cxx_stl_setup.mk
+# When compiling against API imported module, use API import stub libraries.
+apiimport_postfix := .apiimport
+
+ifneq ($(LOCAL_USE_VNDK),)
+ ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ apiimport_postfix := .apiimport.product
+ else
+ apiimport_postfix := .apiimport.vendor
+ endif
+endif
+
+ifdef my_shared_libraries
+my_shared_libraries := $(foreach l,$(my_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+endif #my_shared_libraries
+
+ifdef my_system_shared_libraries
+my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+endif #my_system_shared_libraries
+
ifdef my_shared_libraries
ifdef LOCAL_USE_VNDK
ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
diff --git a/core/clang/TARGET_riscv64.mk b/core/clang/TARGET_riscv64.mk
new file mode 100644
index 0000000..cfb5c7d
--- /dev/null
+++ b/core/clang/TARGET_riscv64.mk
@@ -0,0 +1,10 @@
+RS_TRIPLE := renderscript64-linux-android
+RS_TRIPLE_CFLAGS := -D__riscv64__
+RS_COMPAT_TRIPLE := riscv64-linux-android
+
+TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-riscv64-android.a
+TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-riscv64-android.a
+
+# Address sanitizer clang config
+ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
+ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/cleanspec.mk b/core/cleanspec.mk
index af28954..0232a17 100644
--- a/core/cleanspec.mk
+++ b/core/cleanspec.mk
@@ -58,6 +58,12 @@
#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+$(call add-clean-step, rm -rf $(OUT_DIR)/obj/ETC/build_manifest-vendor_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/obj/ETC/build_manifest-odm_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/obj/ETC/build_manifest-product_intermediates)
+$(call add-clean-step, rm -rf $(TARGET_OUT_VENDOR)/etc/security/fsverity)
+$(call add-clean-step, rm -rf $(TARGET_OUT_ODM)/etc/security/fsverity)
+$(call add-clean-step, rm -rf $(TARGET_OUT_PRODUCT)/etc/security/fsverity)
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index b5b371c..e325760 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -134,6 +134,7 @@
LOCAL_IS_HOST_MODULE:=
LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:=
LOCAL_IS_UNIT_TEST:=
+LOCAL_TEST_OPTIONS_TAGS:=
LOCAL_JACK_CLASSPATH:=
LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
@@ -152,7 +153,6 @@
LOCAL_JAR_PROCESSOR_ARGS:=
LOCAL_JAVACFLAGS:=
LOCAL_JAVA_LANGUAGE_VERSION:=
-LOCAL_JAVA_LAYERS_FILE:=
LOCAL_JAVA_LIBRARIES:=
LOCAL_JAVA_RESOURCE_DIRS:=
LOCAL_JAVA_RESOURCE_FILES:=
diff --git a/core/combo/TARGET_linux-riscv64.mk b/core/combo/TARGET_linux-riscv64.mk
new file mode 100644
index 0000000..8f8fd3c
--- /dev/null
+++ b/core/combo/TARGET_linux-riscv64.mk
@@ -0,0 +1,40 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Configuration for Linux on riscv64 as a target.
+# Included by combo/select.mk
+
+# Provide a default variant.
+ifeq ($(strip $(TARGET_ARCH_VARIANT)),)
+TARGET_ARCH_VARIANT := riscv64
+endif
+
+# Include the arch-variant-specific configuration file.
+# Its role is to define various ARCH_X86_HAVE_XXX feature macros,
+# plus initial values for TARGET_GLOBAL_CFLAGS
+#
+TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_ARCH)/$(TARGET_ARCH_VARIANT).mk
+ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
+$(error Unknown $(TARGET_ARCH) architecture version: $(TARGET_ARCH_VARIANT))
+endif
+
+include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
+
+TARGET_LINKER := /system/bin/linker64
diff --git a/core/combo/arch/riscv64/riscv64.mk b/core/combo/arch/riscv64/riscv64.mk
new file mode 100644
index 0000000..0505541
--- /dev/null
+++ b/core/combo/arch/riscv64/riscv64.mk
@@ -0,0 +1,2 @@
+# This file contains feature macro definitions specific to the
+# base 'riscv64' platform ABI.
diff --git a/core/config.mk b/core/config.mk
index c0dea95..9e09faf 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -161,6 +161,12 @@
$(KATI_obsolete_var PRODUCT_CHECK_ELF_FILES,Use BUILD_BROKEN_PREBUILT_ELF_FILES instead)
$(KATI_obsolete_var ALL_GENERATED_SOURCES,ALL_GENERATED_SOURCES is no longer used)
$(KATI_obsolete_var ALL_ORIGINAL_DYNAMIC_BINARIES,ALL_ORIGINAL_DYNAMIC_BINARIES is no longer used)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_VERITY,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_VERITY_FEC,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_BOOT_SIGNER,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_VERITY_SIGNING_KEY,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var BOARD_PREBUILT_PVMFWIMAGE,pvmfw.bin is now built in AOSP and custom versions are no longer supported)
+$(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE)
# Used to force goals to build. Only use for conditionally defined goals.
.PHONY: FORCE
@@ -600,15 +606,15 @@
MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
MKEROFS := $(HOST_OUT_EXECUTABLES)/mkfs.erofs
-MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
-MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
+MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage
+MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg
SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
FAT16COPY := build/make/tools/fat16copy.py
-CHECK_ELF_FILE := build/make/tools/check_elf_file.py
+CHECK_ELF_FILE := $(HOST_OUT_EXECUTABLES)/check_elf_file$(HOST_EXECUTABLE_SUFFIX)
LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
ADD_IMG_TO_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/add_img_to_target_files$(HOST_EXECUTABLE_SUFFIX)
BUILD_IMAGE := $(HOST_OUT_EXECUTABLES)/build_image$(HOST_EXECUTABLE_SUFFIX)
@@ -629,10 +635,8 @@
VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata
BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
-BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
VBOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/vboot_signer
-FEC := $(HOST_OUT_EXECUTABLES)/fec
DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -711,27 +715,16 @@
BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
endif
-# If PRODUCT_USE_VNDK is true and BOARD_VNDK_VERSION is not defined yet,
-# BOARD_VNDK_VERSION will be set to "current" as default.
-# PRODUCT_USE_VNDK will be true in Android-P or later launching devices.
-PRODUCT_USE_VNDK := false
-ifneq ($(PRODUCT_USE_VNDK_OVERRIDE),)
- PRODUCT_USE_VNDK := $(PRODUCT_USE_VNDK_OVERRIDE)
-else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
- # No shipping level defined
-else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),27),true)
- PRODUCT_USE_VNDK := $(PRODUCT_FULL_TREBLE)
+# Starting in Android U, non-VNDK devices not supported
+# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
+# letting upstream know it's important to you, we may do cleanup which breaks this
+# significantly. Please let us know if you are changing this.
+ifndef BOARD_VNDK_VERSION
+# READ WARNING - DO NOT CHANGE
+BOARD_VNDK_VERSION := current
+# READ WARNING - DO NOT CHANGE
endif
-ifeq ($(PRODUCT_USE_VNDK),true)
- ifndef BOARD_VNDK_VERSION
- BOARD_VNDK_VERSION := current
- endif
-endif
-
-$(KATI_obsolete_var PRODUCT_USE_VNDK,Use BOARD_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_VNDK_OVERRIDE,Use BOARD_VNDK_VERSION instead)
-
ifdef PRODUCT_PRODUCT_VNDK_VERSION
ifndef BOARD_VNDK_VERSION
# VNDK for product partition is not available unless BOARD_VNDK_VERSION
@@ -803,6 +796,7 @@
else
MAINLINE_SEPOLICY_DEV_CERTIFICATES := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
endif
+.KATI_READONLY := MAINLINE_SEPOLICY_DEV_CERTIFICATES
BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
@@ -819,7 +813,7 @@
# is made which breaks compatibility with the previous platform sepolicy version,
# not just on every increase in PLATFORM_SDK_VERSION. The minor version should
# be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 32
+sepolicy_major_vers := 33
sepolicy_minor_vers := 0
ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
@@ -880,9 +874,6 @@
endif
ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
- ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE cannot be true for devices with dynamic partitions)
- endif
ifneq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
$(error PRODUCT_USE_DYNAMIC_PARTITION_SIZE must be true for devices with dynamic partitions)
endif
@@ -973,16 +964,6 @@
$(eval .KATI_READONLY := BOARD_$(group)_PARTITION_LIST) \
)
-# BOARD_*_PARTITION_LIST: a list of the following tokens
-valid_super_partition_list := system vendor product system_ext odm vendor_dlkm odm_dlkm system_dlkm
-$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
- $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
- $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
- $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)). \
- Valid names are $(valid_super_partition_list))))
-valid_super_partition_list :=
-
-
# Define BOARD_SUPER_PARTITION_PARTITION_LIST, the sum of all BOARD_*_PARTITION_LIST
ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
$(error BOARD_SUPER_PARTITION_PARTITION_LIST should not be defined, but computed from \
@@ -1073,14 +1054,6 @@
BOARD_PREBUILT_HIDDENAPI_DIR ?=
.KATI_READONLY := BOARD_PREBUILT_HIDDENAPI_DIR
-ifdef USE_HOST_MUSL
- ifneq (,$(or $(BUILD_BROKEN_USES_BUILD_HOST_EXECUTABLE),\
- $(BUILD_BROKEN_USES_BUILD_HOST_SHARED_LIBRARY),\
- $(BUILD_BROKEN_USES_BUILD_HOST_STATIC_LIBRARY)))
- $(error USE_HOST_MUSL can't be set when native host builds are enabled in Make with BUILD_BROKEN_USES_BUILD_HOST_*)
- endif
-endif
-
# ###############################################################
# Set up final options.
# ###############################################################
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index a0ff119..35c632c 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -161,17 +161,19 @@
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
-# Also disable CFI if ASAN is enabled.
+# Also disable CFI and MTE if ASAN is enabled.
ifneq ($(filter address,$(my_sanitize)),)
my_sanitize := $(filter-out cfi,$(my_sanitize))
+ my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+ my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
# Disable memtag for host targets. Host executables in AndroidMk files are
# deprecated, but some partners still have them floating around.
ifdef LOCAL_IS_HOST_MODULE
- my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
- my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+ my_sanitize := $(filter-out memtag_heap memtag_stack,$(my_sanitize))
+ my_sanitize_diag := $(filter-out memtag_heap memtag_stack,$(my_sanitize_diag))
endif
# Disable sanitizers which need the UBSan runtime for host targets.
@@ -205,10 +207,13 @@
ifneq ($(filter arm x86 x86_64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
my_sanitize := $(filter-out hwaddress,$(my_sanitize))
my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
+ my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
endif
ifneq ($(filter hwaddress,$(my_sanitize)),)
my_sanitize := $(filter-out address,$(my_sanitize))
+ my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+ my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
my_sanitize := $(filter-out thread,$(my_sanitize))
my_sanitize := $(filter-out cfi,$(my_sanitize))
endif
@@ -224,21 +229,27 @@
endif
endif
-ifneq ($(filter memtag_heap,$(my_sanitize)),)
- # Add memtag ELF note.
- ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
- ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
- my_whole_static_libraries += note_memtag_heap_sync
- else
- my_whole_static_libraries += note_memtag_heap_async
- endif
+ifneq ($(filter memtag_heap memtag_stack,$(my_sanitize)),)
+ ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+ my_cflags += -fsanitize-memtag-mode=sync
+ my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+ else
+ my_cflags += -fsanitize-memtag-mode=async
endif
- # This is all that memtag_heap does - it is not an actual -fsanitize argument.
- # Remove it from the list.
+endif
+
+ifneq ($(filter memtag_heap,$(my_sanitize)),)
+ my_cflags += -fsanitize=memtag-heap
my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
endif
-my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+ifneq ($(filter memtag_stack,$(my_sanitize)),)
+ my_cflags += -fsanitize=memtag-stack
+ my_cflags += -march=armv8a+memtag
+ my_ldflags += -march=armv8a+memtag
+ my_asflags += -march=armv8a+memtag
+ my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+endif
# TSAN is not supported on 32-bit architectures. For non-multilib cases, make
# its use an error. For multilib cases, don't use it for the 32-bit case.
diff --git a/core/definitions.mk b/core/definitions.mk
index 1754713..31f3028 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -570,7 +570,7 @@
## Target directory for license metadata files.
###########################################################
define license-metadata-dir
-$(call generated-sources-dir-for,META,lic,)
+$(call generated-sources-dir-for,META,lic,$(filter-out $(PRODUCT_OUT)%,$(1)))
endef
TARGETS_MISSING_LICENSE_METADATA:=
@@ -595,7 +595,7 @@
## license metadata.
###########################################################
define declare-copy-target-license-metadata
-$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir))\
+$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir,$(1)))\
$(eval _tgt:=$(strip $(1)))\
$(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic)))\
$(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
@@ -641,6 +641,7 @@
$(2): PRIVATE_IS_CONTAINER := $(ALL_MODULES.$(1).IS_CONTAINER)
$(2): PRIVATE_PACKAGE_NAME := $(strip $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME))
$(2): PRIVATE_INSTALL_MAP := $(_map)
+$(2): PRIVATE_MODULE_NAME := $(1)
$(2): PRIVATE_MODULE_TYPE := $(ALL_MODULES.$(1).MODULE_TYPE)
$(2): PRIVATE_MODULE_CLASS := $(ALL_MODULES.$(1).MODULE_CLASS)
$(2): PRIVATE_INSTALL_MAP := $(_map)
@@ -651,6 +652,7 @@
mkdir -p $$(dir $$@)
mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
$$(call dump-words-to-file,\
+ $$(addprefix -mn ,$$(PRIVATE_MODULE_NAME))\
$$(addprefix -mt ,$$(PRIVATE_MODULE_TYPE))\
$$(addprefix -mc ,$$(PRIVATE_MODULE_CLASS))\
$$(addprefix -k ,$$(PRIVATE_KINDS))\
@@ -675,7 +677,7 @@
## License metadata build rule for non-module target $(1)
###########################################################
define non-module-license-metadata-rule
-$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _dir := $(call license-metadata-dir,$(1))))
$(strip $(eval _tgt := $(strip $(1))))
$(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic))))
$(strip $(eval _deps := $(sort $(filter-out 0p: :,$(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)),$(ALL_TARGETS.$(call word-colon,1,$(d)).META_LIC):$(call wordlist-colon,2,9999,$(d)))))))
@@ -738,7 +740,7 @@
endef
define _copied-target-license-metadata-rule
-$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _dir := $(call license-metadata-dir,$(1))))
$(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(1).meta_lic))))
$(strip $(eval ALL_TARGETS.$(1).META_LIC:=$(_meta)))
$(strip $(eval _dep:=))
@@ -785,7 +787,7 @@
$(strip \
$(eval _tgt := $(subst //,/,$(strip $(1)))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
- $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_KINDS := $(strip $(2))) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS := $(strip $(3))) \
$(eval ALL_NON_MODULES.$(_tgt).NOTICES := $(strip $(4))) \
@@ -826,7 +828,7 @@
$(strip \
$(eval _tgt := $(subst //,/,$(strip $(1)))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
- $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_KINDS := $(strip $(2))) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS := $(strip $(3))) \
$(eval ALL_NON_MODULES.$(_tgt).NOTICES := $(strip $(4))) \
@@ -897,9 +899,9 @@
###########################################################
define declare-license-deps
$(strip \
- $(eval _tgt := $(strip $(1))) \
+ $(eval _tgt := $(subst //,/,$(strip $(1)))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
- $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).DEPENDENCIES := $(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES) $(2))) \
)
endef
@@ -914,9 +916,9 @@
###########################################################
define declare-container-license-deps
$(strip \
- $(eval _tgt := $(strip $(1))) \
+ $(eval _tgt := $(subst //,/,$(strip $(1)))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
- $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).DEPENDENCIES := $(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES) $(2))) \
$(eval ALL_NON_MODULES.$(_tgt).IS_CONTAINER := true) \
$(eval ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS := $(strip $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS) $(3))) \
@@ -957,7 +959,7 @@
$(strip $(eval _all := $(call all-license-metadata)))
.PHONY: reportallnoticelibrarynames
-reportallnoticelibrarynames: PRIVATE_LIST_FILE := $(call license-metadata-dir)/filelist
+reportallnoticelibrarynames: PRIVATE_LIST_FILE := $(call license-metadata-dir,COMMON)/filelist
reportallnoticelibrarynames: | $(COMPLIANCENOTICE_SHIPPEDLIBS)
reportallnoticelibrarynames: $(_all)
@echo Reporting notice library names for at least $$(words $(_all)) license metadata files
@@ -984,13 +986,12 @@
###########################################################
define build-license-metadata
$(strip \
- $(strip $(eval _dir := $(call license-metadata-dir))) \
$(foreach t,$(sort $(ALL_0P_TARGETS)), \
$(eval ALL_TARGETS.$(t).META_LIC := 0p) \
) \
+ $(foreach t,$(sort $(ALL_COPIED_TARGETS)),$(eval $(call copied-target-license-metadata-rule,$(t)))) \
$(foreach t,$(sort $(ALL_NON_MODULES)),$(eval $(call non-module-license-metadata-rule,$(t)))) \
$(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m)))) \
- $(foreach t,$(sort $(ALL_COPIED_TARGETS)),$(eval $(call copied-target-license-metadata-rule,$(t)))) \
$(eval $(call build-all-license-metadata-rule)))
endef
@@ -2120,6 +2121,7 @@
$(PRIVATE_HOST_GLOBAL_LDFLAGS) \
) \
$(PRIVATE_LDFLAGS) \
+ $(PRIVATE_CRTBEGIN) \
$(PRIVATE_ALL_OBJECTS) \
-Wl,--whole-archive \
$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
@@ -2128,8 +2130,10 @@
$(PRIVATE_ALL_STATIC_LIBRARIES) \
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
+ $(PRIVATE_LIBCRT_BUILTINS) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
-o $@ \
+ $(PRIVATE_CRTEND) \
$(PRIVATE_LDLIBS)
endef
endif
@@ -2263,6 +2267,7 @@
ifneq ($(HOST_CUSTOM_LD_COMMAND),true)
define transform-host-o-to-executable-inner
$(hide) $(PRIVATE_CXX_LINK) \
+ $(PRIVATE_CRTBEGIN) \
$(PRIVATE_ALL_OBJECTS) \
-Wl,--whole-archive \
$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
@@ -2271,6 +2276,7 @@
$(PRIVATE_ALL_STATIC_LIBRARIES) \
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
+ $(PRIVATE_LIBCRT_BUILTINS) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
$(foreach path,$(PRIVATE_RPATHS), \
-Wl,-rpath,\$$ORIGIN/$(path)) \
@@ -2279,6 +2285,7 @@
) \
$(PRIVATE_LDFLAGS) \
-o $@ \
+ $(PRIVATE_CRTEND) \
$(PRIVATE_LDLIBS)
endef
endif
@@ -2603,8 +2610,6 @@
$(if $(PRIVATE_SRCJARS),\@$(PRIVATE_SRCJAR_LIST_FILE)) \
|| ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
fi
-$(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/make/tools/java-layers.py \
- $(PRIVATE_JAVA_LAYERS_FILE) @$(PRIVATE_JAVA_SOURCE_LIST),)
$(if $(PRIVATE_JAR_EXCLUDE_FILES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
-name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
$(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
@@ -2822,6 +2827,7 @@
$(hide) mv $(1) $(1).unsigned
$(hide) $(JAVA) -Djava.library.path=$$(dirname $(SIGNAPK_JNI_LIBRARY_PATH)) -jar $(SIGNAPK_JAR) \
$(if $(strip $(PRIVATE_CERTIFICATE_LINEAGE)), --lineage $(PRIVATE_CERTIFICATE_LINEAGE)) \
+ $(if $(strip $(PRIVATE_ROTATION_MIN_SDK_VERSION)), --rotation-min-sdk-version $(PRIVATE_ROTATION_MIN_SDK_VERSION)) \
$(PRIVATE_CERTIFICATE) $(PRIVATE_PRIVATE_KEY) \
$(PRIVATE_ADDITIONAL_CERTIFICATES) $(1).unsigned $(1).signed
$(hide) mv $(1).signed $(1)
@@ -2976,6 +2982,19 @@
$$(copy-file-to-target)
endef
+# Define a rule to copy a license metadata file. For use via $(eval).
+# $(1): source license metadata file
+# $(2): destination license metadata file
+# $(3): built targets
+# $(4): installed targets
+define copy-one-license-metadata-file
+$(2): PRIVATE_BUILT=$(3)
+$(2): PRIVATE_INSTALLED=$(4)
+$(2): $(1)
+ @echo "Copy: $$@"
+ $$(call copy-license-metadata-file-to-target,$$(PRIVATE_BUILT),$$(PRIVATE_INSTALLED))
+endef
+
define copy-and-uncompress-dexs
$(2): $(1) $(ZIPALIGN) $(ZIP2ZIP)
@echo "Uncompress dexs in: $$@"
@@ -3024,7 +3043,7 @@
# $(2): destination file
define copy-init-script-file-checked
ifdef TARGET_BUILD_UNBUNDLED
-# TODO (b/185624993): Remove the chck on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
+# TODO (b/185624993): Remove the check on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
# without requiring the HIDL interface map.
$(2): $(1)
else ifneq ($(HOST_OS),darwin)
@@ -3163,6 +3182,17 @@
$(hide) cp "$<" "$@"
endef
+# Same as copy-file-to-target, but assume file is a licenes metadata file,
+# and append built from $(1) and installed from $(2).
+define copy-license-metadata-file-to-target
+@mkdir -p $(dir $@)
+$(hide) rm -f $@
+$(hide) cp "$<" "$@" $(strip \
+ $(foreach b,$(1), && (grep -F 'built: "'"$(b)"'"' "$@" >/dev/null || echo 'built: "'"$(b)"'"' >>"$@")) \
+ $(foreach i,$(2), && (grep -F 'installed: "'"$(i)"'"' "$@" >/dev/null || echo 'installed: "'"$(i)"'"' >>"$@")) \
+)
+endef
+
# The same as copy-file-to-target, but use the local
# cp command instead of acp.
define copy-file-to-target-with-cp
@@ -3529,11 +3559,11 @@
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval $(if $(strip $(module_license_metadata)),\
$$(foreach f,$$(my_compat_dist_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
- $$(eval my_test_data += $$(foreach f,$$(my_compat_dist_$(suite)), $$(call word-colon,2,$$(f)))) \
+ $$(eval my_test_data += $$(my_compat_dist_$(suite))) \
)) \
$(eval $(if $(strip $(module_license_metadata)),\
$$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
- $$(eval my_test_config += $$(foreach f,$$(my_compat_dist_config_$(suite)), $$(call word-colon,2,$$(f)))) \
+ $$(eval my_test_config += $$(my_compat_dist_config_$(suite))) \
)) \
$(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,\
$(eval ALL_COMPATIBILITY_SUITES += $(suite)) \
@@ -3811,6 +3841,10 @@
-include $(TOPDIR)vendor/*/build/core/definitions.mk
-include $(TOPDIR)device/*/build/core/definitions.mk
-include $(TOPDIR)product/*/build/core/definitions.mk
+# Also the project-specific definitions.mk file
+-include $(TOPDIR)vendor/*/*/build/core/definitions.mk
+-include $(TOPDIR)device/*/*/build/core/definitions.mk
+-include $(TOPDIR)product/*/*/build/core/definitions.mk
# broken:
# $(foreach file,$^,$(if $(findstring,.a,$(suffix $file)),-l$(file),$(file)))
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 593ad66..d7a00d0 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -62,6 +62,8 @@
boot_zip := $(PRODUCT_OUT)/boot.zip
bootclasspath_jars := $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+
+# TODO remove system_server_jars usages from boot.zip and depend directly on system_server.zip file.
system_server_jars := \
$(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),\
$(PRODUCT_OUT)/system/framework/$(call word-colon,2,$(m)).jar)
@@ -79,5 +81,34 @@
$(call dist-for-goals, droidcore, $(boot_zip))
+# Build the system_server.zip which contains the Apex system server jars and standalone system server jars
+system_server_zip := $(PRODUCT_OUT)/system_server.zip
+apex_system_server_jars := \
+ $(foreach m,$(PRODUCT_APEX_SYSTEM_SERVER_JARS),\
+ $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+
+apex_standalone_system_server_jars := \
+ $(foreach m,$(PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS),\
+ $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+
+standalone_system_server_jars := \
+ $(foreach m,$(PRODUCT_STANDALONE_SYSTEM_SERVER_JARS),\
+ $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+
+$(system_server_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
+$(system_server_zip): PRIVATE_APEX_SYSTEM_SERVER_JARS := $(apex_system_server_jars)
+$(system_server_zip): PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS := $(apex_standalone_system_server_jars)
+$(system_server_zip): PRIVATE_STANDALONE_SYSTEM_SERVER_JARS := $(standalone_system_server_jars)
+$(system_server_zip): $(system_server_jars) $(apex_system_server_jars) $(apex_standalone_system_server_jars) $(standalone_system_server_jars) $(SOONG_ZIP)
+ @echo "Create system server package: $@"
+ rm -f $@
+ $(SOONG_ZIP) -o $@ \
+ -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS)) \
+ -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_APEX_SYSTEM_SERVER_JARS)) \
+ -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS)) \
+ -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_STANDALONE_SYSTEM_SERVER_JARS))
+
+$(call dist-for-goals, droidcore, $(system_server_zip))
+
endif #PRODUCT_USES_DEFAULT_ART_CONFIG
endif #WITH_DEXPREOPT
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 216168b..b303b52 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -245,7 +245,7 @@
$(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
$(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
$(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check_arg)
- $(my_enforced_uses_libraries): $(AAPT)
+ $(my_enforced_uses_libraries): $(AAPT2)
$(my_enforced_uses_libraries): $(my_verify_script)
$(my_enforced_uses_libraries): $(my_dexpreopt_dep_configs)
$(my_enforced_uses_libraries): $(my_manifest_or_apk)
@@ -254,7 +254,7 @@
$(my_verify_script) \
--enforce-uses-libraries \
--enforce-uses-libraries-status $@ \
- --aapt $(AAPT) \
+ --aapt $(AAPT2) \
$(PRIVATE_USES_LIBRARIES) \
$(PRIVATE_OPTIONAL_USES_LIBRARIES) \
$(PRIVATE_DEXPREOPT_CONFIGS) \
diff --git a/core/envsetup.mk b/core/envsetup.mk
index d116aaf..7dd9b12 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -135,15 +135,17 @@
HOST_OS := darwin
endif
-HOST_OS_EXTRA := $(shell uname -rsm)
-ifeq ($(HOST_OS),linux)
- ifneq ($(wildcard /etc/os-release),)
- HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME)
+ifeq ($(CALLED_FROM_SETUP),true)
+ HOST_OS_EXTRA := $(shell uname -rsm)
+ ifeq ($(HOST_OS),linux)
+ ifneq ($(wildcard /etc/os-release),)
+ HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME)
+ endif
+ else ifeq ($(HOST_OS),darwin)
+ HOST_OS_EXTRA += $(shell sw_vers -productVersion)
endif
-else ifeq ($(HOST_OS),darwin)
- HOST_OS_EXTRA += $(shell sw_vers -productVersion)
+ HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA))
endif
-HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA))
# BUILD_OS is the real host doing the build.
BUILD_OS := $(HOST_OS)
@@ -323,7 +325,9 @@
# likely to be relevant to the product or board configuration.
# Soong config variables are dumped as $(call soong_config_set) calls
# instead of the raw variable values, because mk2rbc can't read the
-# raw ones.
+# raw ones. There is a final sed command on the output file to
+# remove leading spaces because I couldn't figure out how to remove
+# them in pure make code.
define dump-variables-rbc
$(eval _dump_variables_rbc_excluded := \
BUILD_NUMBER \
@@ -345,6 +349,7 @@
$(foreach ns,$(sort $(SOONG_CONFIG_NAMESPACES)),\
$(foreach v,$(sort $(SOONG_CONFIG_$(ns))),\
$$(call soong_config_set,$(ns),$(v),$(SOONG_CONFIG_$(ns)_$(v)))$(newline))))
+$(shell sed -i "s/^ *//g" $(1))
endef
# Read the product specs so we can get TARGET_DEVICE and other
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 0cf62a4..2ff9ff2 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -39,6 +39,21 @@
endif
my_libdir :=
+my_crtbegin :=
+my_crtend :=
+my_libcrt_builtins :=
+ifdef USE_HOST_MUSL
+ my_crtbegin := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtbegin_dynamic)
+ my_crtend := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtend)
+ my_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
+ $(LOCAL_BUILT_MODULE): PRIVATE_LDFLAGS += -Wl,--no-dynamic-linker
+endif
+
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTBEGIN := $(my_crtbegin)
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTEND := $(my_crtend)
+$(LOCAL_BUILT_MODULE): PRIVATE_LIBCRT_BUILTINS := $(my_libcrt_builtins)
+$(LOCAL_BUILT_MODULE): $(my_crtbegin) $(my_crtend) $(my_libcrt_builtins)
+
$(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries) $(CLANG_CXX)
$(transform-host-o-to-executable)
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 0f95202..89aa53c 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -56,10 +56,6 @@
include $(BUILD_SYSTEM)/java_common.mk
-# The layers file allows you to enforce a layering between java packages.
-# Run build/make/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-
# List of dependencies for anything that needs all java sources in place
java_sources_deps := \
$(java_sources) \
@@ -72,7 +68,6 @@
# TODO(b/143658984): goma can't handle the --system argument to javac.
#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index da20874..ae8b798 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -36,6 +36,17 @@
my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
$(LOCAL_BUILT_MODULE): PRIVATE_HOST_LIBPROFILE_RT := $(my_host_libprofile_rt)
+ifdef USE_HOST_MUSL
+ my_crtbegin := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtbegin_so)
+ my_crtend := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtend_so)
+ my_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
+endif
+
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTBEGIN := $(my_crtbegin)
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTEND := $(my_crtend)
+$(LOCAL_BUILT_MODULE): PRIVATE_LIBCRT_BUILTINS := $(my_libcrt_builtins)
+$(LOCAL_BUILT_MODULE): $(my_crtbegin) $(my_crtend) $(my_libcrt_builtins)
+
$(LOCAL_BUILT_MODULE): \
$(all_objects) \
$(all_libraries) \
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 289d16f..5491247 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -5,6 +5,7 @@
# my_prebuilt_jni_libs
# my_installed_module_stem (from configure_module_stem.mk)
# partition_tag (from base_rules.mk)
+# partition_lib_pairs
# my_prebuilt_src_file (from prebuilt_internal.mk)
#
# Output variables:
@@ -66,13 +67,32 @@
ifeq ($(filter address,$(SANITIZE_TARGET)),)
my_symlink_target_dir := $(patsubst $(PRODUCT_OUT)%,%,\
$(my_shared_library_path))
- $(foreach lib,$(my_jni_filenames),\
- $(call symlink-file, \
- $(my_shared_library_path)/$(lib), \
- $(my_symlink_target_dir)/$(lib), \
- $(my_app_lib_path)/$(lib)) \
- $(eval $$(LOCAL_INSTALLED_MODULE) : $$(my_app_lib_path)/$$(lib)) \
- $(eval ALL_MODULES.$(my_register_name).INSTALLED += $$(my_app_lib_path)/$$(lib)))
+
+ ifdef partition_lib_pairs
+ # Support cross-partition jni lib dependency for bp modules
+ # API domain check is done in Soong
+ $(foreach pl_pair,$(partition_lib_pairs),\
+ $(eval lib_name := $(call word-colon, 1, $(pl_pair)))\
+ $(eval lib_partition := $(call word-colon, 2, $(pl_pair)))\
+ $(eval shared_library_path := $(call get_non_asan_path,\
+ $($(my_2nd_arch_prefix)TARGET_OUT$(lib_partition)_SHARED_LIBRARIES)))\
+ $(call symlink-file,\
+ $(shared_library_path)/$(lib_name).so,\
+ $(my_symlink_target_dir)/$(lib_name).so,\
+ $(my_app_lib_path)/$(lib_name).so)\
+ $(eval $$(LOCAL_INSTALLED_MODULE) : $$(my_app_lib_path)/$$(lib_name).so)\
+ $(eval ALL_MODULES.$(my_register_name).INSTALLED += $$(my_app_lib_path)/$$(lib_name).so))
+
+ else
+ # Cross-partition jni lib dependency currently not supported for mk modules
+ $(foreach lib,$(my_jni_filenames),\
+ $(call symlink-file, \
+ $(my_shared_library_path)/$(lib), \
+ $(my_symlink_target_dir)/$(lib), \
+ $(my_app_lib_path)/$(lib)) \
+ $(eval $$(LOCAL_INSTALLED_MODULE) : $$(my_app_lib_path)/$$(lib)) \
+ $(eval ALL_MODULES.$(my_register_name).INSTALLED += $$(my_app_lib_path)/$$(lib)))
+ endif # partition_lib_pairs
endif
# Clear jni_shared_libraries to not embed it into the apk.
diff --git a/core/java.mk b/core/java.mk
index 01951c0..b13ef4d 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -200,10 +200,6 @@
$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_stubs_jar)))
ALL_MODULES.$(my_register_name).STUBS := $(full_classes_stubs_jar)
-# The layers file allows you to enforce a layering between java packages.
-# Run build/make/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
# Compile the java files to a .jar file.
diff --git a/core/layoutlib_fonts.mk b/core/layoutlib_fonts.mk
new file mode 100644
index 0000000..d2a814f
--- /dev/null
+++ b/core/layoutlib_fonts.mk
@@ -0,0 +1,35 @@
+# Fonts for layoutlib
+
+FONT_TEMP := $(call intermediates-dir-for,PACKAGING,fonts,HOST,COMMON)
+
+# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
+font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
+font_config := $(addprefix $(FONT_TEMP)/, $(notdir $(font_config)))
+
+$(font_config): $(FONT_TEMP)/%.xml: \
+ frameworks/base/data/fonts/%.xml
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+# List of fonts on the device that we want to ship. This is all .ttf, .ttc and .otf fonts.
+fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
+fonts_device := $(addprefix $(FONT_TEMP)/, $(notdir $(fonts_device)))
+
+# TODO: If the font file is a symlink, reuse the font renamed from the symlink
+# target.
+$(fonts_device): $(FONT_TEMP)/%: $(TARGET_OUT)/fonts/%
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+# List of all dependencies - all fonts and configuration files.
+FONT_FILES := $(fonts_device) $(font_config)
+
+.PHONY: layoutlib layoutlib-tests
+layoutlib layoutlib-tests: $(FONT_FILES)
+
+$(call dist-for-goals, layoutlib, $(foreach m,$(FONT_FILES), $(m):layoutlib_native/fonts/$(notdir $(m))))
+
+FONT_TEMP :=
+font_config :=
+fonts_device :=
+FONT_FILES :=
diff --git a/core/main.mk b/core/main.mk
index cdbc3ef..2e39601 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1846,30 +1846,28 @@
$(INSTALLED_FILES_JSON_ROOT) \
)
- ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- $(call dist-for-goals, droidcore-unbundled, \
- $(INSTALLED_FILES_FILE_RAMDISK) \
- $(INSTALLED_FILES_JSON_RAMDISK) \
- $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
- $(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
- $(INSTALLED_FILES_FILE_VENDOR_RAMDISK) \
- $(INSTALLED_FILES_JSON_VENDOR_RAMDISK) \
- $(INSTALLED_FILES_FILE_VENDOR_KERNEL_RAMDISK) \
- $(INSTALLED_FILES_JSON_VENDOR_KERNEL_RAMDISK) \
- $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
- $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
- $(INSTALLED_DEBUG_RAMDISK_TARGET) \
- $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
- $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
- $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_TEST_HARNESS_RAMDISK_TARGET) \
- $(INSTALLED_VENDOR_TEST_HARNESS_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_RAMDISK_TARGET) \
- $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
- $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \
- )
- endif
+ $(call dist-for-goals, droidcore-unbundled, \
+ $(INSTALLED_FILES_FILE_RAMDISK) \
+ $(INSTALLED_FILES_JSON_RAMDISK) \
+ $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_KERNEL_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_KERNEL_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
+ $(INSTALLED_DEBUG_RAMDISK_TARGET) \
+ $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
+ $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
+ $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_TEST_HARNESS_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_TEST_HARNESS_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \
+ )
ifeq ($(PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST),true)
$(call dist-for-goals, droidcore-unbundled, $(INSTALLED_BOOTIMAGE_TARGET))
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index e436b2c..2b5ceee 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -25,6 +25,7 @@
cts \
custom_images \
dicttool_aosp \
+ docs \
eng \
oem_image \
online-system-api-sdk-docs \
diff --git a/core/node_fns.mk b/core/node_fns.mk
index 2243cd7..144eb8b 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -83,27 +83,17 @@
# If needle appears multiple times, only the first occurrance
# will survive.
#
-# How it works:
-#
-# - Stick everything in haystack into a single word,
-# with "|||" separating the words.
-# - Replace occurrances of "|||$(needle)|||" with "||| |||",
-# breaking haystack back into multiple words, with spaces
-# where needle appeared.
-# - Add needle between the first and second words of haystack.
-# - Replace "|||" with spaces, breaking haystack back into
-# individual words.
-#
define uniq-word
$(strip \
$(if $(filter-out 0 1,$(words $(filter $(2),$(1)))), \
- $(eval h := |||$(subst $(space),|||,$(strip $(1)))|||) \
- $(eval h := $(subst |||$(strip $(2))|||,|||$(space)|||,$(h))) \
- $(eval h := $(word 1,$(h)) $(2) $(wordlist 2,9999,$(h))) \
- $(subst |||,$(space),$(h)) \
- , \
- $(1) \
- ))
+ $(eval _uniq_word_seen :=) \
+ $(foreach w,$(1), \
+ $(if $(filter $(2),$(w)), \
+ $(if $(_uniq_word_seen),, \
+ $(w) \
+ $(eval _uniq_word_seen := true)), \
+ $(w))), \
+ $(1)))
endef
INHERIT_TAG := @inherit:
diff --git a/core/notice_files.mk b/core/notice_files.mk
index cbfcaa4..a5852cc 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -11,6 +11,8 @@
ifneq (,$(strip $(LOCAL_LICENSE_PACKAGE_NAME)))
license_package_name:=$(strip $(LOCAL_LICENSE_PACKAGE_NAME))
+else
+license_package_name:=
endif
ifneq (,$(strip $(LOCAL_LICENSE_INSTALL_MAP)))
@@ -127,15 +129,19 @@
ifdef my_register_name
module_license_metadata := $(call local-meta-intermediates-dir)/$(my_register_name).meta_lic
- $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))) \
- $(my_test_data) $(my_test_config),\
+ $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))),\
$(eval ALL_TARGETS.$(target).META_LIC := $(module_license_metadata)))
+ $(foreach f,$(my_test_data) $(my_test_config),\
+ $(if $(strip $(ALL_TARGETS.$(call word-colon,1,$(f)).META_LIC)), \
+ $(call declare-copy-target-license-metadata,$(call word-colon,2,$(f)),$(call word-colon,1,$(f))), \
+ $(eval ALL_TARGETS.$(call word-colon,2,$(f)).META_LIC := $(module_license_metadata))))
+
ALL_MODULES.$(my_register_name).META_LIC := $(strip $(ALL_MODULES.$(my_register_name).META_LIC) $(module_license_metadata))
ifdef LOCAL_SOONG_LICENSE_METADATA
# Soong modules have already produced a license metadata file, copy it to where Make expects it.
- $(eval $(call copy-one-file, $(LOCAL_SOONG_LICENSE_METADATA), $(module_license_metadata)))
+ $(eval $(call copy-one-license-metadata-file, $(LOCAL_SOONG_LICENSE_METADATA), $(module_license_metadata),$(ALL_MODULES.$(my_register_name).BUILT),$(ALL_MODUES.$(my_register_name).INSTALLED)))
else
# Make modules don't have enough information to produce a license metadata rule until after fix-notice-deps
# has been called, store the necessary information until later.
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index d8d3c78..db7c422 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -5,7 +5,7 @@
ifneq (,$(SYSTEM_NOTICE_DEPS))
-SYSTEM_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS)
+SYSTEM_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS) $(UNMOUNTED_NOTICE_VENDOR_DEPS)
ifneq ($(PRODUCT_NOTICE_SPLIT),true)
$(eval $(call html-notice-rule,$(target_notice_file_html_gz),"System image",$(system_notice_file_message),$(SYSTEM_NOTICE_DEPS),$(SYSTEM_NOTICE_DEPS)))
@@ -23,7 +23,6 @@
$(call declare-0p-target,$(target_notice_file_xml_gz))
$(call declare-0p-target,$(installed_notice_html_or_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
endif
.PHONY: vendorlicense
@@ -31,22 +30,21 @@
ifneq (,$(VENDOR_NOTICE_DEPS))
-VENDOR_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS)
+VENDOR_NOTICE_DEPS += $(UNMOUNTED_NOTICE_VENDOR_DEPS)
$(eval $(call text-notice-rule,$(target_vendor_notice_file_txt),"Vendor image", \
"Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \
- $(VENDOR_NOTICE_DEPS)))
+ $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_vendor_notice_file_xml_gz),"Vendor image", \
"Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \
- $(VENDOR_NOTICE_DEPS)))
+ $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS)))
$(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_vendor_notice_file_xml_gz))
$(call declare-0p-target,$(installed_vendor_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
endif
.PHONY: odmlicense
@@ -55,18 +53,17 @@
ifneq (,$(ODM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_odm_notice_file_txt),"ODM filesystem image", \
"Notices for files contained in the odm filesystem image in this directory:", \
- $(ODM_NOTICE_DEPS)))
+ $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_odm_notice_file_xml_gz),"ODM filesystem image", \
"Notices for files contained in the odm filesystem image in this directory:", \
- $(ODM_NOTICE_DEPS)))
+ $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS)))
$(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_odm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_odm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
endif
.PHONY: oemlicense
@@ -78,18 +75,17 @@
ifneq (,$(PRODUCT_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_product_notice_file_txt),"Product image", \
"Notices for files contained in the product filesystem image in this directory:", \
- $(PRODUCT_NOTICE_DEPS)))
+ $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_product_notice_file_xml_gz),"Product image", \
"Notices for files contained in the product filesystem image in this directory:", \
- $(PRODUCT_NOTICE_DEPS)))
+ $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS)))
$(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_product_notice_file_xml_gz))
$(call declare-0p-target,$(installed_product_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
endif
.PHONY: systemextlicense
@@ -98,18 +94,17 @@
ifneq (,$(SYSTEM_EXT_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_system_ext_notice_file_txt),"System_ext image", \
"Notices for files contained in the system_ext filesystem image in this directory:", \
- $(SYSTEM_EXT_NOTICE_DEPS)))
+ $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_system_ext_notice_file_xml_gz),"System_ext image", \
"Notices for files contained in the system_ext filesystem image in this directory:", \
- $(SYSTEM_EXT_NOTICE_DEPS)))
+ $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS)))
$(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_system_ext_notice_file_xml_gz))
$(call declare-0p-target,$(installed_system_ext_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_ext_notice_xml_gz)
endif
.PHONY: vendor_dlkmlicense
@@ -118,18 +113,17 @@
ifneq (,$(VENDOR_DLKM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_vendor_dlkm_notice_file_txt),"Vendor_dlkm image", \
"Notices for files contained in the vendor_dlkm filesystem image in this directory:", \
- $(VENDOR_DLKM_NOTICE_DEPS)))
+ $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_vendor_dlkm_notice_file_xml_gz),"Vendor_dlkm image", \
"Notices for files contained in the vendor_dlkm filesystem image in this directory:", \
- $(VENDOR_DLKM_NOTICE_DEPS)))
+ $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS)))
$(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
endif
.PHONY: odm_dlkmlicense
@@ -138,18 +132,17 @@
ifneq (,$(ODM_DLKM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_odm_dlkm_notice_file_txt),"ODM_dlkm filesystem image", \
"Notices for files contained in the odm_dlkm filesystem image in this directory:", \
- $(ODM_DLKM_NOTICE_DEPS)))
+ $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_odm_dlkm_notice_file_xml_gz),"ODM_dlkm filesystem image", \
"Notices for files contained in the odm_dlkm filesystem image in this directory:", \
- $(ODM_DLMK_NOTICE_DEPS)))
+ $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS)))
$(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
endif
.PHONY: system_dlkmlicense
@@ -158,18 +151,17 @@
ifneq (,$(SYSTEM_DLKM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_system_dlkm_notice_file_txt),"System_dlkm filesystem image", \
"Notices for files contained in the system_dlkm filesystem image in this directory:", \
- $(SYSTEM_DLKM_NOTICE_DEPS)))
+ $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_system_dlkm_notice_file_xml_gz),"System_dlkm filesystem image", \
"Notices for files contained in the system_dlkm filesystem image in this directory:", \
- $(SYSTEM_DLMK_NOTICE_DEPS)))
+ $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS)))
$(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
endif
endif # not TARGET_BUILD_APPS
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 8199ad2..c7a173b 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -481,6 +481,8 @@
$(LOCAL_BUILT_MODULE): $(LOCAL_CERTIFICATE_LINEAGE)
$(LOCAL_BUILT_MODULE): PRIVATE_CERTIFICATE_LINEAGE := $(LOCAL_CERTIFICATE_LINEAGE)
+$(LOCAL_BUILT_MODULE): PRIVATE_ROTATION_MIN_SDK_VERSION := $(LOCAL_ROTATION_MIN_SDK_VERSION)
+
# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
PACKAGES.$(LOCAL_PACKAGE_NAME).PARTITION := $(actual_partition_tag)
diff --git a/core/product.mk b/core/product.mk
index 53fee1c..dcfdf1f 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -136,10 +136,7 @@
# PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
_product_list_vars += PRODUCT_BOOT_JARS_EXTRA
-_product_single_value_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
_product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
-_product_single_value_vars += PRODUCT_SUPPORTS_VERITY
-_product_single_value_vars += PRODUCT_SUPPORTS_VERITY_FEC
_product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
# List of system_server classpath jars on the platform.
_product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
@@ -168,7 +165,6 @@
_product_list_vars += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
_product_single_value_vars += PRODUCT_VBOOT_SIGNING_KEY
_product_single_value_vars += PRODUCT_VBOOT_SIGNING_SUBKEY
-_product_single_value_vars += PRODUCT_VERITY_SIGNING_KEY
_product_single_value_vars += PRODUCT_SYSTEM_VERITY_PARTITION
_product_single_value_vars += PRODUCT_VENDOR_VERITY_PARTITION
_product_single_value_vars += PRODUCT_PRODUCT_VERITY_PARTITION
@@ -360,20 +356,20 @@
# This option is only meant to be set by compliance GSI targets.
_product_single_value_vars += PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT
-# If set, metadata files for the following artifacts will be generated.
-# - system/framework/*.jar
-# - system/framework/oat/<arch>/*.{oat,vdex,art}
-# - system/etc/boot-image.prof
-# - system/etc/dirty-image-objects
-# One fsverity metadata container file per one input file will be generated in
-# system.img, with a suffix ".fsv_meta". e.g. a container file for
-# "/system/framework/foo.jar" will be "system/framework/foo.jar.fsv_meta".
-_product_single_value_vars += PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA
+# If set, fsverity metadata files will be generated for each files in the
+# allowlist, plus an manifest APK per partition. For example,
+# /system/framework/service.jar will come with service.jar.fsv_meta in the same
+# directory; the file information will also be included in
+# /system/etc/security/fsverity/BuildManifest.apk
+_product_single_value_vars += PRODUCT_FSVERITY_GENERATE_METADATA
# If true, sets the default for MODULE_BUILD_FROM_SOURCE. This overrides
# BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE but not an explicitly set value.
_product_single_value_vars += PRODUCT_MODULE_BUILD_FROM_SOURCE
+# If true, installs a full version of com.android.virt APEX.
+_product_single_value_vars += PRODUCT_AVF_ENABLED
+
.KATI_READONLY := _product_single_value_vars _product_list_vars
_product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
@@ -404,7 +400,7 @@
$(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
$(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
$(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
- $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+ $(call dump-inherit,$(current_mk),$(1)) \
$(call dump-config-vals,$(current_mk),inherit))
endef
diff --git a/core/product_config.mk b/core/product_config.mk
index 540289a..e03ae2b 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -429,7 +429,7 @@
# Show a warning wall of text if non-compliance-GSI products set this option.
ifdef PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT
- ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64,$(PRODUCT_NAME)))
+ ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64 gsi_tv_arm gsi_tv_arm64,$(PRODUCT_NAME)))
$(warning PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT is set but \
PRODUCT_NAME ($(PRODUCT_NAME)) doesn't look like a GSI for compliance \
testing. This is a special configuration for compliance GSI, so do make \
@@ -473,6 +473,9 @@
ifneq (,$(call math_gt_or_eq,29,$(PRODUCT_SHIPPING_API_LEVEL)))
PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29)
endif
+ ifneq (,$(call math_gt_or_eq,33,$(PRODUCT_SHIPPING_API_LEVEL)))
+ PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33)
+ endif
endif
# If build command defines OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS,
diff --git a/core/proguard.flags b/core/proguard.flags
index aee5271..53f63d8 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -9,10 +9,15 @@
# Add this flag in your package's own configuration if it's needed.
#-flattenpackagehierarchy
-# Keep classes and methods that have the guava @VisibleForTesting annotation
--keep @**.VisibleForTesting class *
--keepclassmembers class * {
-@**.VisibleForTesting *;
+# Keep classes and methods that have @VisibleForTesting annotations, except in
+# intermediate libraries that export those annotations (e.g., androidx, guava).
+# This avoids keeping library-specific test code that isn't actually needed
+# for platform testing.
+# TODO(b/239961360): Migrate away from androidx.annotation.VisibleForTesting
+# and com.google.common.annotations.VisibleForTesting use in platform code.
+-keep @**.VisibleForTesting class !androidx.**,!com.google.common.**,*
+-keepclassmembers class !androidx.**,!com.google.common.**,* {
+ @**.VisibleForTesting *;
}
# Keep rule for members that are needed solely to keep alive downstream weak
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 30c2341..7e7b270 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -2,6 +2,14 @@
# that isn't explicitly part of the API
-dontskipnonpubliclibraryclasses -dontskipnonpubliclibraryclassmembers
+# Preserve line number information for debugging stack traces.
+-keepattributes SourceFile,LineNumberTable
+
+# Annotations are implemented as attributes, so we have to explicitly keep them.
+# Keep all runtime-visible annotations like RuntimeVisibleParameterAnnotations
+# and RuntimeVisibleTypeAnnotations, as well as associated defaults.
+-keepattributes RuntimeVisible*Annotation*,AnnotationDefault
+
# For enumeration classes, see http://proguard.sourceforge.net/manual/examples.html#enumerations
-keepclassmembers enum * {
public static **[] values();
@@ -48,7 +56,7 @@
# -keep class * extends android.app.BackupAgent
# Parcelable CREATORs must be kept for Parcelable functionality
--keep class * implements android.os.Parcelable {
+-keepclassmembers class * implements android.os.Parcelable {
public static final ** CREATOR;
}
@@ -70,9 +78,23 @@
# has a fallback, but again, don't use Futures.getChecked on Android regardless.
-dontwarn java.lang.ClassValue
+# Ignore missing annotation references for various support libraries.
+# While this is not ideal, it should be relatively safe given that
+# 1) runtime-visible annotations will still be kept, and 2) compile-time
+# annotations are stripped by R8 anyway.
+# Note: The ** prefix is used to accommodate jarjar repackaging.
+# TODO(b/242088131): Remove these exemptions after resolving transitive libs
+# dependencies that are provided to R8.
+-dontwarn **android**.annotation*.**
+-dontwarn **com.google.errorprone.annotations.**
+-dontwarn javax.annotation.**
+-dontwarn org.checkerframework.**
+-dontwarn org.jetbrains.annotations.**
+
# Less spammy.
-dontnote
# The lite proto runtime uses reflection to access fields based on the names in
-# the schema, keep all the fields.
--keepclassmembers class * extends com.google.protobuf.MessageLite { <fields>; }
+# the schema, keep all the fields. Wildcard is used to apply the rule to classes
+# that have been renamed with jarjar.
+-keepclassmembers class * extends **.protobuf.MessageLite { <fields>; }
diff --git a/core/rbe.mk b/core/rbe.mk
index 90328d3..65abde5 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -46,12 +46,6 @@
cxx_compare := false
endif
- ifdef RBE_CXX_COMPARE
- cxx_compare := $(RBE_CXX_COMPARE)
- else
- cxx_compare := "false"
- endif
-
ifdef RBE_JAVAC_EXEC_STRATEGY
javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
else
@@ -87,11 +81,11 @@
endif
ifdef RBE_R8
- R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+ R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=$(JAVA))
endif
ifdef RBE_D8
- D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+ D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=$(JAVA))
endif
rbe_dir :=
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index 483b957..56d2312 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -18,7 +18,7 @@
<option name="test-suite-tag" value="robolectric" />
<option name="test-suite-tag" value="robolectric-tests" />
- <option name="java-folder" value="prebuilts/jdk/jdk11/linux-x86/" />
+ <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
<option name="exclude-paths" value="java" />
<option name="use-robolectric-resources" value="true" />
@@ -26,5 +26,12 @@
<test class="com.android.tradefed.testtype.IsolatedHostTest" >
<option name="jar" value="{MODULE}.jar" />
+ <option name="java-flags" value="--add-modules=jdk.compiler"/>
+ <option name="java-flags" value="--add-opens=java.base/java.lang=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-opens=java.base/java.lang.reflect=ALL-UNNAMED"/>
+ <!-- b/238100560 -->
+ <option name="java-flags" value="--add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED"/>
+ <!-- b/251387255 -->
+ <option name="java-flags" value="--add-opens=java.base/java.io=ALL-UNNAMED"/>
</test>
</configuration>
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
deleted file mode 100644
index 1742925..0000000
--- a/core/sdk_font.mk
+++ /dev/null
@@ -1,66 +0,0 @@
-###############################################################################
-# Fonts shipped with the SDK need to be renamed for Java to handle them
-# properly. Hence, a special script is used to rename the fonts. We bundle all
-# the fonts that are shipped on a newer non-space-constrained device. However,
-# OpenType fonts used on these devices are not supported by Java. Their
-# replacements are added separately.
-###############################################################################
-
-
-# The script that renames the font.
-sdk_font_rename_script := frameworks/layoutlib/rename_font/build_font_single.py
-
-# Location of the fonttools library that the above script depends on.
-fonttools_lib := external/fonttools/Lib
-
-# A temporary location to store the renamed fonts. atree picks all files in
-# this directory and bundles it with the SDK.
-SDK_FONT_TEMP := $(call intermediates-dir-for,PACKAGING,sdk-fonts,HOST,COMMON)
-
-# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
-sdk_font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
-sdk_font_config := $(addprefix $(SDK_FONT_TEMP)/standard/, $(notdir $(sdk_font_config)))
-
-$(sdk_font_config): $(SDK_FONT_TEMP)/standard/%.xml: \
- frameworks/base/data/fonts/%.xml
- $(hide) mkdir -p $(dir $@)
- $(hide) cp -vf $< $@
-
-# List of fonts on the device that we want to ship. This is all .ttf fonts.
-sdk_fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf, $(INTERNAL_SYSTEMIMAGE_FILES))
-sdk_fonts_device := $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_fonts_device)))
-
-# Macro to rename the font.
-sdk_rename_font = PYTHONPATH=$$PYTHONPATH:$(fonttools_lib) $(sdk_font_rename_script) \
- $1 $2
-
-# TODO: If the font file is a symlink, reuse the font renamed from the symlink
-# target.
-$(sdk_fonts_device): $(SDK_FONT_TEMP)/%.ttf: $(TARGET_OUT)/fonts/%.ttf \
- $(sdk_font_rename_script)
- $(hide) mkdir -p $(dir $@)
- $(hide) $(call sdk_rename_font,$<,$@)
-
-# List of all dependencies - all fonts and configuration files.
-SDK_FONT_DEPS := $(sdk_fonts_device) $(sdk_font_config)
-
-# Define a macro to create rule for addititional fonts that we want to include
-# in the SDK.
-# $1 Output font name
-# $2 Source font path
-define sdk-extra-font-rule
-fontfullname := $$(SDK_FONT_TEMP)/$1
-ifeq ($$(filter $$(fontfullname),$$(sdk_fonts_device)),)
-SDK_FONT_DEPS += $$(fontfullname)
-$$(fontfullname): $2 $$(sdk_font_rename_script)
- $$(hide) mkdir -p $$(dir $$@)
- $$(hide) $$(call sdk_rename_font,$$<,$$@)
-endif
-fontfullname :=
-endef
-
-# These extra fonts are used as a replacement for OpenType fonts.
-$(eval $(call sdk-extra-font-rule,NanumGothic.ttf,external/naver-fonts/NanumGothic.ttf))
-$(eval $(call sdk-extra-font-rule,DroidSansFallback.ttf,frameworks/base/data/fonts/DroidSansFallbackFull.ttf))
-
-sdk-extra-font-rule :=
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index d771d22..786a755 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -165,12 +165,14 @@
ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
my_2nd_arch_prefix :=
LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
+ partition_lib_pairs := $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
endif
ifdef TARGET_2ND_ARCH
ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
+ partition_lib_pairs := $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
endif
endif
@@ -178,6 +180,7 @@
my_embed_jni :=
my_prebuilt_jni_libs :=
my_2nd_arch_prefix :=
+partition_lib_pairs :=
PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
ifndef LOCAL_CERTIFICATE
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 7b8f6df..b000df6 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -34,6 +34,7 @@
$(call add_json_str, Platform_preview_sdk_version, $(PLATFORM_PREVIEW_SDK_VERSION))
$(call add_json_str, Platform_base_os, $(PLATFORM_BASE_OS))
$(call add_json_str, Platform_version_last_stable, $(PLATFORM_VERSION_LAST_STABLE))
+$(call add_json_str, Platform_version_known_codenames, $(PLATFORM_VERSION_KNOWN_CODENAMES))
$(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
@@ -93,6 +94,7 @@
$(call add_json_list, AAPTPrebuiltDPI, $(PRODUCT_AAPT_PREBUILT_DPI))
$(call add_json_str, DefaultAppCertificate, $(PRODUCT_DEFAULT_DEV_CERTIFICATE))
+$(call add_json_str, MainlineSepolicyDevCertificates, $(MAINLINE_SEPOLICY_DEV_CERTIFICATES))
$(call add_json_str, AppsDefaultVersionName, $(APPS_DEFAULT_VERSION_NAME))
@@ -249,7 +251,7 @@
$(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\
$(call add_json_map, $(namespace))\
$(foreach key,$(SOONG_CONFIG_$(namespace)),\
- $(call add_json_str,$(key),$(SOONG_CONFIG_$(namespace)_$(key))))\
+ $(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_$(namespace)_$(key)))))\
$(call end_json_map))
$(call end_json_map)
@@ -273,6 +275,10 @@
$(call add_json_str, ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
+$(call add_json_bool, BuildBrokenClangProperty, $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
+$(call add_json_bool, BuildBrokenClangAsFlags, $(filter true,$(BUILD_BROKEN_CLANG_ASFLAGS)))
+$(call add_json_bool, BuildBrokenClangCFlags, $(filter true,$(BUILD_BROKEN_CLANG_CFLAGS)))
+$(call add_json_bool, BuildBrokenDepfile, $(filter true,$(BUILD_BROKEN_DEPFILE)))
$(call add_json_bool, BuildBrokenEnforceSyspropOwner, $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
$(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
$(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
@@ -291,6 +297,8 @@
$(call add_json_bool, GenerateAidlNdkPlatformBackend, $(filter true,$(NEED_AIDL_NDK_PLATFORM_BACKEND)))
+$(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
+
$(call json_end)
$(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 61c07ba..b51818a 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -47,10 +47,18 @@
echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
)\
- $(if $(filter system vendor odm,$(1)),\
- echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST) " >> $(2);\
- echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
- echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ $(if $(filter true,$(ZYGOTE_FORCE_64)),\
+ $(if $(filter vendor,$(1)),\
+ echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist32=" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ )\
+ ,\
+ $(if $(filter system vendor odm,$(1)),\
+ echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ )\
)\
echo "ro.$(1).build.date=`$(DATE_FROM_FILE)`" >> $(2);\
echo "ro.$(1).build.date.utc=`$(DATE_FROM_FILE) +%s`" >> $(2);\
@@ -261,7 +269,6 @@
BUILD_USERNAME="$(BUILD_USERNAME)" \
BUILD_HOSTNAME="$(BUILD_HOSTNAME)" \
BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
- BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT="$(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT)" \
PLATFORM_VERSION="$(PLATFORM_VERSION)" \
PLATFORM_DISPLAY_VERSION="$(PLATFORM_DISPLAY_VERSION)" \
@@ -282,6 +289,7 @@
TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
+ ZYGOTE_FORCE_64_BIT="$(ZYGOTE_FORCE_64_BIT)" \
bash $(BUILDINFO_SH) > $@
ifdef TARGET_SYSTEM_PROP
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index c9b07da..680ad11 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -62,8 +62,6 @@
CUSTOM_IMAGE_MODULES \
CUSTOM_IMAGE_COPY_FILES \
CUSTOM_IMAGE_SELINUX \
- CUSTOM_IMAGE_SUPPORT_VERITY \
- CUSTOM_IMAGE_SUPPORT_VERITY_FEC \
CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
CUSTOM_IMAGE_AVB_HASH_ENABLE \
CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS \
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index c282268..c8b1183 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -211,7 +211,7 @@
# 3 - Format of the report
define generate-coverage-report-cts
$(hide) mkdir -p $(dir $@)
- $(hide) $(PRIVATE_CTS_API_COVERAGE_EXE) -d $(PRIVATE_DEXDEPS_EXE) -a $(PRIVATE_API_XML_DESC) -n $(PRIVATE_NAPI_XML_DESC) -f $(3) -o $@ $(2)
+ $(hide) $(PRIVATE_CTS_API_COVERAGE_EXE) -j 8 -d $(PRIVATE_DEXDEPS_EXE) -a $(PRIVATE_API_XML_DESC) -n $(PRIVATE_NAPI_XML_DESC) -f $(3) -o $@ $(2)
@ echo $(1): file://$$(cd $(dir $@); pwd)/$(notdir $@)
endef
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 5252394..5726ee2 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -42,16 +42,24 @@
# Copy kernel test modules to testcases directories
include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
-kernel_test_copy_pairs := \
- $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_host_out))
-copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
+ltp_copy_pairs := \
+ $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_host_out))
+kselftest_copy_pairs := \
+ $(call target-native-copy-pairs,$(kernel_kselftest_modules),$(kernel_kselftest_host_out))
+copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs))
+copy_kselftest_tests := $(call copy-many-files,$(kselftest_copy_pairs))
-# PHONY target to be used to build and test `vts_kernel_tests` without building full vts
-.PHONY: vts_kernel_tests
-vts_kernel_tests: $(copy_kernel_tests)
+# PHONY target to be used to build and test `vts_ltp_tests` and `vts_kselftest_tests` without building full vts
+.PHONY: vts_kernel_ltp_tests
+vts_kernel_ltp_tests: $(copy_ltp_tests)
-$(general_tests_zip) : $(copy_kernel_tests)
-$(general_tests_zip) : PRIVATE_KERNEL_TEST_HOST_OUT := $(kernel_test_host_out)
+.PHONY: vts_kernel_kselftest_tests
+vts_kernel_kselftest_tests: $(copy_kselftest_tests)
+
+$(general_tests_zip) : $(copy_ltp_tests)
+$(general_tests_zip) : $(copy_kselftest_tests)
+$(general_tests_zip) : PRIVATE_KERNEL_LTP_HOST_OUT := $(kernel_ltp_host_out)
+$(general_tests_zip) : PRIVATE_KERNEL_KSELFTEST_HOST_OUT := $(kernel_kselftest_host_out)
$(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
$(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip)
$(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
@@ -64,7 +72,8 @@
rm -f $@ $(PRIVATE_general_tests_list_zip)
mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
- find $(PRIVATE_KERNEL_TEST_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
+ find $(PRIVATE_KERNEL_LTP_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
+ find $(PRIVATE_KERNEL_KSELFTEST_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 8097535..e83d408 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -24,10 +24,14 @@
'"classes_jar": [$(foreach w,$(sort $(ALL_MODULES.$(m).CLASSES_JAR)),"$(w)", )], ' \
'"test_mainline_modules": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)", )], ' \
'"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)", ' \
+ '"test_options_tags": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS)),"$(w)", )], ' \
'"data": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA)),"$(w)", )], ' \
'"runtime_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES)),"$(w)", )], ' \
+ '"static_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).LOCAL_STATIC_LIBRARIES)),"$(w)", )], ' \
'"data_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA_BINS)),"$(w)", )], ' \
'"supported_variants": [$(foreach w,$(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS)),"$(w)", )], ' \
+ '"host_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET)),"$(w)", )], ' \
+ '"target_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST)),"$(w)", )], ' \
'},\n' \
) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
$(hide) echo '}' >> $@
@@ -37,3 +41,9 @@
$(call dist-for-goals, general-tests, $(MODULE_INFO_JSON))
$(call dist-for-goals, droidcore-unbundled, $(MODULE_INFO_JSON))
+
+# On every build, generate an all_modules.txt file to be used for autocompleting
+# the m command. After timing this using $(shell date +"%s.%3N"), it only adds
+# 0.01 seconds to the internal master build, and will only rerun on builds that
+# rerun kati.
+$(file >$(PRODUCT_OUT)/all_modules.txt,$(subst $(space),$(newline),$(ALL_MODULES)))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f9ae2c1..2626120 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -91,9 +91,6 @@
$(my_built_custom_image): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
$(my_built_custom_image): PRIVATE_PICKUP_FILES := $(my_pickup_files)
$(my_built_custom_image): PRIVATE_SELINUX := $(CUSTOM_IMAGE_SELINUX)
-$(my_built_custom_image): PRIVATE_SUPPORT_VERITY := $(CUSTOM_IMAGE_SUPPORT_VERITY)
-$(my_built_custom_image): PRIVATE_SUPPORT_VERITY_FEC := $(CUSTOM_IMAGE_SUPPORT_VERITY_FEC)
-$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCT_VERITY_SIGNING_KEY)
$(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
$(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
$(my_built_custom_image): PRIVATE_AVB_AVBTOOL := $(AVBTOOL)
@@ -108,9 +105,6 @@
else ifneq (,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
$(error Cannot set both CUSTOM_IMAGE_AVB_HASH_ENABLE and CUSTOM_IMAGE_AVB_HASHTREE_ENABLE to true)
endif
-ifeq (true,$(CUSTOM_IMAGE_SUPPORT_VERITY_FEC))
- $(my_built_custom_image): $(FEC)
-endif
$(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) $(my_custom_image_modules_dep) \
$(CUSTOM_IMAGE_DICT_FILE)
@echo "Build image $@"
@@ -130,13 +124,6 @@
$(hide) echo "partition_size=$(PRIVATE_PARTITION_SIZE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(if $(PRIVATE_SELINUX),$(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
- $(if $(PRIVATE_SUPPORT_VERITY),\
- $(hide) echo "verity=$(PRIVATE_SUPPORT_VERITY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
- echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
- echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
- echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
- $(if $(PRIVATE_SUPPORT_VERITY_FEC),\
- $(hide) echo "verity_fec=$(PRIVATE_SUPPORT_VERITY_FEC)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
$(hide) echo "avb_avbtool=$(PRIVATE_AVB_AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(if $(PRIVATE_AVB_KEY_PATH),\
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 4b8bd16..a5f162a 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -53,9 +53,20 @@
$(test_suite_jdk): $(SOONG_ZIP)
$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR)
-$(call declare-license-metadata,$(test_suite_jdk),SPDX-license-identifier-GPL-2.0-with-classpath-exception,restricted,\
+$(call declare-license-metadata,$(test_suite_jdk),SPDX-license-identifier-GPL-2.0-with-classpath-exception,permissive,\
$(test_suite_jdk_dir)/legal/java.base/LICENSE,JDK,prebuilts/jdk/$(notdir $(patsubst %/,%,$(dir $(test_suite_jdk_dir)))))
+# Copy license metadata
+$(call declare-copy-target-license-metadata,$(out_dir)/$(notdir $(test_suite_jdk)),$(test_suite_jdk))
+$(foreach t,$(test_tools) $(test_suite_prebuilt_tools),\
+ $(eval _dst := $(out_dir)/tools/$(notdir $(t)))\
+ $(if $(strip $(ALL_TARGETS.$(t).META_LIC)),\
+ $(call declare-copy-target-license-metadata,$(_dst),$(t)),\
+ $(warning $(t) has no license metadata)\
+ )\
+)
+test_copied_tools := $(foreach t,$(test_tools) $(test_suite_prebuilt_tools), $(out_dir)/tools/$(notdir $(t))) $(out_dir)/$(notdir $(test_suite_jdk))
+
# Include host shared libraries
host_shared_libs := $(call copy-many-files, $(COMPATIBILITY.$(test_suite_name).HOST_SHARED_LIBRARY.FILES))
@@ -65,7 +76,7 @@
$(eval _src := $(call word-colon,1,$(p)))\
$(eval _dst := $(call word-colon,2,$(p)))\
$(if $(strip $(ALL_TARGETS.$(_src).META_LIC)),\
- $(eval ALL_TARGETS.$(_dst).META_LIC := $(ALL_TARGETS.$(_src).META_LIC)),\
+ $(call declare-copy-target-license-metadata,$(_dst),$(_src)),\
$(warning $(_src) has no license metadata for $(_dst))\
)\
)\
@@ -124,7 +135,7 @@
$(call declare-0p-target,$(compatibility_tests_list_zip),)
$(call declare-1p-container,$(compatibility_zip),)
-$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_suite_jdk), $(out_dir)/:/)
+$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_copied_tools), $(out_dir)/:/)
$(eval $(call html-notice-rule,$(test_suite_notice_html),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
$(eval $(call text-notice-rule,$(test_suite_notice_txt),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
diff --git a/core/tasks/tools/vts-kernel-tests.mk b/core/tasks/tools/vts-kernel-tests.mk
index 5fbb589..bd115c9 100644
--- a/core/tasks/tools/vts-kernel-tests.mk
+++ b/core/tasks/tools/vts-kernel-tests.mk
@@ -18,9 +18,12 @@
include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
# Copy kernel test modules to testcases directories
-kernel_test_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_tests
-kernel_test_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_tests
-kernel_test_modules := \
- $(kselftest_modules) \
+kernel_ltp_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_ltp_tests
+kernel_ltp_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_ltp_tests
+kernel_ltp_modules := \
ltp \
- $(ltp_packages)
\ No newline at end of file
+ $(ltp_packages)
+
+kernel_kselftest_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_kselftest_tests
+kernel_kselftest_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_kselftest_tests
+kernel_kselftest_modules := $(kselftest_modules)
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index 5e1b5d5..bd7652b 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -18,12 +18,15 @@
include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
-kernel_test_copy_pairs := \
- $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_vts_out))
+ltp_copy_pairs := \
+ $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_vts_out))
+kselftest_copy_pairs := \
+ $(call target-native-copy-pairs,$(kernel_kselftest_modules),$(kernel_kselftest_vts_out))
-copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
+copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs))
+copy_kselftest_tests := $(call copy-many-files,$(kselftest_copy_pairs))
-test_suite_extra_deps := $(copy_kernel_tests)
+test_suite_extra_deps := $(copy_ltp_tests) $(copy_kselftest_tests)
include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index af7d1c0..60c0f67 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -42,24 +42,23 @@
DEFAULT_PLATFORM_VERSION := UP1A
.KATI_READONLY := DEFAULT_PLATFORM_VERSION
-MIN_PLATFORM_VERSION := TP1A
+MIN_PLATFORM_VERSION := UP1A
MAX_PLATFORM_VERSION := UP1A
# The last stable version name of the platform that was released. During
# development, this stays at that previous version, while the codename indicates
# further work based on the previous version.
-PLATFORM_VERSION_LAST_STABLE := 12
+PLATFORM_VERSION_LAST_STABLE := 13
.KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
# These are the current development codenames, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.TP1A := Tiramisu
PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
# This is the user-visible version. In a final release build it should
# be empty to use PLATFORM_VERSION as the user-visible version. For
# a preview release it can be set to a user-friendly value like `12 Preview 1`
-PLATFORM_DISPLAY_VERSION :=
+PLATFORM_DISPLAY_VERSION := 13
ifndef PLATFORM_SDK_VERSION
# This is the canonical definition of the SDK version, which defines
@@ -74,16 +73,16 @@
# When you increment the PLATFORM_SDK_VERSION please ensure you also
# clear out the following text file of all older PLATFORM_VERSION's:
# cts/tests/tests/os/assets/platform_versions.txt
- PLATFORM_SDK_VERSION := 32
+ PLATFORM_SDK_VERSION := 33
endif
.KATI_READONLY := PLATFORM_SDK_VERSION
# This is the sdk extension version of this tree.
-PLATFORM_SDK_EXTENSION_VERSION := 1
+PLATFORM_SDK_EXTENSION_VERSION := 3
.KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
-PLATFORM_BASE_SDK_EXTENSION_VERSION := 1
+PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION)
.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
# This are all known codenames.
@@ -104,9 +103,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2022-05-05
+ PLATFORM_SECURITY_PATCH := 2022-11-05
endif
-.KATI_READONLY := PLATFORM_SECURITY_PATCH
include $(BUILD_SYSTEM)/version_util.mk
-
diff --git a/envsetup.sh b/envsetup.sh
index 6e756ca..3709999 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -10,7 +10,8 @@
invocations of 'm' etc.
- tapas: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
Sets up the build environment for building unbundled apps (APKs).
-- banchan: banchan <module1> [<module2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
+- banchan: banchan <module1> [<module2> ...] [arm|x86|arm64|x86_64|arm64_only|x86_64only] \
+ [eng|userdebug|user]
Sets up the build environment for building unbundled modules (APEXes).
- croot: Changes directory to the top of the tree, or a subdirectory thereof.
- m: Makes from the top of the tree.
@@ -38,6 +39,7 @@
- godir: Go to the directory containing a file.
- allmod: List all modules.
- gomod: Go to the directory containing a module.
+- bmod: Get the Bazel label of a Soong module if it is converted with bp2build.
- pathmod: Get the directory containing a module.
- outmod: Gets the location of a module's installed outputs with a certain extension.
- dirmods: Gets the modules defined in a given directory.
@@ -205,41 +207,6 @@
fi
# and in with the new
- local prebuiltdir=$(getprebuilt)
- local gccprebuiltdir=$(get_abs_build_var ANDROID_GCC_PREBUILTS)
-
- # defined in core/config.mk
- local targetgccversion=$(get_build_var TARGET_GCC_VERSION)
- local targetgccversion2=$(get_build_var 2ND_TARGET_GCC_VERSION)
- export TARGET_GCC_VERSION=$targetgccversion
-
- # The gcc toolchain does not exists for windows/cygwin. In this case, do not reference it.
- export ANDROID_TOOLCHAIN=
- export ANDROID_TOOLCHAIN_2ND_ARCH=
- local ARCH=$(get_build_var TARGET_ARCH)
- local toolchaindir toolchaindir2=
- case $ARCH in
- x86) toolchaindir=x86/x86_64-linux-android-$targetgccversion/bin
- ;;
- x86_64) toolchaindir=x86/x86_64-linux-android-$targetgccversion/bin
- ;;
- arm) toolchaindir=arm/arm-linux-androideabi-$targetgccversion/bin
- ;;
- arm64) toolchaindir=aarch64/aarch64-linux-android-$targetgccversion/bin;
- toolchaindir2=arm/arm-linux-androideabi-$targetgccversion2/bin
- ;;
- *)
- echo "Can't find toolchain for unknown architecture: $ARCH"
- toolchaindir=xxxxxxxxx
- ;;
- esac
- if [ -d "$gccprebuiltdir/$toolchaindir" ]; then
- export ANDROID_TOOLCHAIN=$gccprebuiltdir/$toolchaindir
- fi
-
- if [ "$toolchaindir2" -a -d "$gccprebuiltdir/$toolchaindir2" ]; then
- export ANDROID_TOOLCHAIN_2ND_ARCH=$gccprebuiltdir/$toolchaindir2
- fi
export ANDROID_DEV_SCRIPTS=$T/development/scripts:$T/prebuilts/devtools/tools
@@ -252,8 +219,7 @@
;;
esac
- ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS):$ANDROID_TOOLCHAIN
- ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_TOOLCHAIN_2ND_ARCH
+ ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS)
ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_DEV_SCRIPTS
# Append llvm binutils prebuilts path to ANDROID_BUILD_PATHS.
@@ -288,6 +254,9 @@
local ATEST_PATH="$T/prebuilts/asuite/atest/$os_arch"
ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ACLOUD_PATH:$AIDEGEN_PATH:$ATEST_PATH
+ # Build system
+ ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$T/build/bazel/bin
+
export ANDROID_BUILD_PATHS=$(tr -s : <<<"${ANDROID_BUILD_PATHS}:")
export PATH=$ANDROID_BUILD_PATHS$PATH
@@ -329,22 +298,6 @@
#export HOST_EXTRACFLAGS="-I "$T/system/kernel_headers/host_include
}
-function bazel()
-{
- if which bazel &>/dev/null; then
- >&2 echo "NOTE: bazel() function sourced from Android's envsetup.sh is being used instead of $(which bazel)"
- >&2 echo
- fi
-
- local T="$(gettop)"
- if [ ! "$T" ]; then
- >&2 echo "Couldn't locate the top of the Android tree. Try setting TOP. This bazel() function cannot be used outside of the AOSP directory."
- return
- fi
-
- "$T/tools/bazel" "$@"
-}
-
function printconfig()
{
local T=$(gettop)
@@ -361,8 +314,6 @@
set_sequence_number
export ANDROID_BUILD_TOP=$(gettop)
- # With this environment variable new GCC can apply colors to warnings/errors
- export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01'
}
function set_sequence_number()
@@ -399,6 +350,7 @@
packages/modules/adb/adb.bash
system/core/fastboot/fastboot.bash
tools/asuite/asuite.sh
+ prebuilts/bazel/common/bazel-complete.bash
)
# Completion can be disabled selectively to allow users to use non-standard completion.
# e.g.
@@ -420,6 +372,8 @@
if [ -z "$ZSH_VERSION" ]; then
# Doesn't work in zsh.
complete -o nospace -F _croot croot
+ # TODO(b/244559459): Support b autocompletion for zsh
+ complete -F _bazel__complete -o nospace b
fi
complete -F _lunch lunch
@@ -427,6 +381,7 @@
complete -F _complete_android_module_names gomod
complete -F _complete_android_module_names outmod
complete -F _complete_android_module_names installmod
+ complete -F _complete_android_module_names bmod
complete -F _complete_android_module_names m
}
@@ -456,10 +411,19 @@
{
local code
local results
+ # Lunch must be run in the topdir, but this way we get a clear error
+ # message, instead of FileNotFound.
+ local T=$(multitree_gettop)
+ if [ -n "$T" ]; then
+ "$T/orchestrator/build/orchestrator/core/lunch.py" "$@"
+ else
+ _multitree_lunch_error
+ return 1
+ fi
if $(echo "$1" | grep -q '^-') ; then
# Calls starting with a -- argument are passed directly and the function
# returns with the lunch.py exit code.
- build/build/make/orchestrator/core/lunch.py "$@"
+ "${T}/orchestrator/build/orchestrator/core/lunch.py" "$@"
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -470,7 +434,7 @@
fi
else
# All other calls go through the --lunch variant of lunch.py
- results=($(build/build/make/orchestrator/core/lunch.py --lunch "$@"))
+ results=($(${T}/orchestrator/build/orchestrator/core/lunch.py --lunch "$@"))
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -791,6 +755,10 @@
set_stuff_for_environment
[[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
destroy_build_var_cache
+
+ if [[ -n "${CHECK_MU_CONFIG:-}" ]]; then
+ check_mu_config
+ fi
}
unset COMMON_LUNCH_CHOICES_CACHE
@@ -881,7 +849,7 @@
function banchan()
{
local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
- local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|x86_64)$' | xargs)"
+ local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|x86_64|arm64only|x86_64only)$' | xargs)"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|(.*_)?(arm|x86|arm64|x86_64))$' | xargs)"
@@ -910,6 +878,8 @@
x86) product=module_x86;;
arm64) product=module_arm64;;
x86_64) product=module_x86_64;;
+ arm64only) product=module_arm64only;;
+ x86_64only) product=module_x86_64only;;
esac
if [ -z "$variant" ]; then
variant=eng
@@ -960,7 +930,7 @@
# TODO: Merge into gettop as part of launching multitree
function multitree_gettop
{
- local TOPFILE=build/build/make/core/envsetup.mk
+ local TOPFILE=orchestrator/build/make/core/envsetup.mk
if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
# The following circumlocution ensures we remove symlinks from TOP.
(cd "$TOP"; PWD= /bin/pwd)
@@ -1057,7 +1027,7 @@
# Easy way to make system.img/etc writable
function syswrite() {
adb wait-for-device && adb root || return 1
- if [[ $(adb disable-verity | grep "reboot") ]]; then
+ if [[ $(adb disable-verity | grep -i "reboot") ]]; then
echo "rebooting"
adb reboot && adb wait-for-device && adb root || return 1
fi
@@ -1588,14 +1558,49 @@
fi
}
-# List all modules for the current device, as cached in module-info.json. If any build change is
-# made and it should be reflected in the output, you should run 'refreshmod' first.
+# List all modules for the current device, as cached in all_modules.txt. If any build change is
+# made and it should be reflected in the output, you should run `m nothing` first.
function allmod() {
- verifymodinfo || return 1
-
- python3 -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
+ cat $ANDROID_PRODUCT_OUT/all_modules.txt 2>/dev/null
}
+# Return the Bazel label of a Soong module if it is converted with bp2build.
+function bmod()
+(
+ if [ $# -ne 1 ]; then
+ echo "usage: bmod <module>" >&2
+ return 1
+ fi
+
+ # We could run bp2build here, but it might trigger bp2build invalidation
+ # when used with `b` (e.g. --run_soong_tests) and/or add unnecessary waiting
+ # time overhead.
+ #
+ # For a snappy result, use the latest generated version in soong_injection,
+ # and ask users to run m bp2build if it doesn't exist.
+ converted_json="$(get_abs_build_var OUT_DIR)/soong/soong_injection/metrics/converted_modules_path_map.json"
+
+ if [ ! -f ${converted_json} ]; then
+ echo "bp2build files not found. Have you ran 'm bp2build'?" >&2
+ return 1
+ fi
+
+ local target_label=$(python3 -c "import json
+module = '$1'
+converted_json='$converted_json'
+bp2build_converted_map = json.load(open(converted_json))
+if module not in bp2build_converted_map:
+ exit(1)
+print(bp2build_converted_map[module] + ':' + module)")
+
+ if [ -z "${target_label}" ]; then
+ echo "$1 is not converted to Bazel." >&2
+ return 1
+ else
+ echo "${target_label}"
+ fi
+)
+
# Get the path of a specific module in the android tree, as cached in module-info.json.
# If any build change is made, and it should be reflected in the output, you should run
# 'refreshmod' first. Note: This is the inverse of dirmods.
@@ -1735,7 +1740,7 @@
function _complete_android_module_names() {
local word=${COMP_WORDS[COMP_CWORD]}
- COMPREPLY=( $(QUIET_VERIFYMODINFO=true allmod | grep -E "^$word") )
+ COMPREPLY=( $(allmod | grep -E "^$word") )
}
# Print colored exit condition
@@ -1820,7 +1825,8 @@
function _trigger_build()
(
local -r bc="$1"; shift
- if T="$(gettop)"; then
+ local T=$(gettop)
+ if [ -n "$T" ]; then
_wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
else
>&2 echo "Couldn't locate the top of the tree. Try setting TOP."
@@ -1828,21 +1834,6 @@
fi
)
-# Convenience entry point (like m) to use Bazel in AOSP.
-function b()
-(
- # Generate BUILD, bzl files into the synthetic Bazel workspace (out/soong/workspace).
- _trigger_build "all-modules" bp2build USE_BAZEL_ANALYSIS= || return 1
- # Then, run Bazel using the synthetic workspace as the --package_path.
- if [[ -z "$@" ]]; then
- # If there are no args, show help.
- bazel help
- else
- # Else, always run with the bp2build configuration, which sets Bazel's package path to the synthetic workspace.
- bazel "$@" --config=bp2build
- fi
-)
-
function m()
(
_trigger_build "all-modules" "$@"
@@ -1880,8 +1871,9 @@
function multitree_build()
{
- if T="$(multitree_gettop)"; then
- "$T/build/build/orchestrator/core/orchestrator.py" "$@"
+ local T=$(multitree_gettop)
+ if [ -n "$T" ]; then
+ "$T/orchestrator/build/orchestrator/core/orchestrator.py" "$@"
else
_multitree_lunch_error
return 1
@@ -1993,13 +1985,7 @@
return
;;
esac
- if [[ -z "$OUT_DIR" ]]; then
- if [[ -z "$OUT_DIR_COMMON_BASE" ]]; then
- OUT_DIR=out
- else
- OUT_DIR=${OUT_DIR_COMMON_BASE}/${PWD##*/}
- fi
- fi
+ OUT_DIR="$(get_abs_build_var OUT_DIR)"
if [[ "$1" == "--regenerate" ]]; then
shift 1
NINJA_ARGS="-t commands $@" m
@@ -2010,6 +1996,13 @@
fi
}
+function avbtool() {
+ if [[ ! -f "$ANDROID_SOONG_HOST_OUT"/bin/avbtool ]]; then
+ m avbtool
+ fi
+ "$ANDROID_SOONG_HOST_OUT"/bin/avbtool $@
+}
+
validate_current_shell
source_vendorsetup
addcompletions
diff --git a/finalize-aidl-vndk-sdk-resources.sh b/finalize-aidl-vndk-sdk-resources.sh
new file mode 100755
index 0000000..8e12c49
--- /dev/null
+++ b/finalize-aidl-vndk-sdk-resources.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_aidl_vndk_sdk_resources() {
+ local top="$(dirname "$0")"/../..
+
+ # default target to modify tree and build SDK
+ local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+ # This script is WIP and only finalizes part of the Android branch for release.
+ # The full process can be found at (INTERNAL) go/android-sdk-finalization.
+
+ # VNDK snapshot (TODO)
+ # SDK snapshots (TODO)
+ # Update references in the codebase to new API version (TODO)
+ # ...
+
+ AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
+
+ # Generate ABI dumps
+ ANDROID_BUILD_TOP="$top" \
+ out/host/linux-x86/bin/create_reference_dumps \
+ -p aosp_arm64 --build-variant user
+
+ echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
+ # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
+ $m check-vndk-list || \
+ { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
+ echo "DONE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF"
+
+ # Finalize resources
+ "$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
+ "$top/frameworks/base/core/res/res/values/public-staging.xml" \
+ "$top/frameworks/base/core/res/res/values/public-final.xml"
+
+ # SDK finalization
+ local sdk_codename='public static final int UPSIDE_DOWN_CAKE = CUR_DEVELOPMENT;'
+ local sdk_version='public static final int UPSIDE_DOWN_CAKE = 34;'
+ local sdk_build="$top/frameworks/base/core/java/android/os/Build.java"
+
+ sed -i "s%$sdk_codename%$sdk_version%g" $sdk_build
+
+ # Force update current.txt
+ $m clobber
+ $m update-api
+}
+
+finalize_aidl_vndk_sdk_resources
+
diff --git a/finalize-cleanup.sh b/finalize-cleanup.sh
new file mode 100755
index 0000000..c62a97c
--- /dev/null
+++ b/finalize-cleanup.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Brings local repository to a remote head state.
+
+# set -ex
+
+function finalize_revert_local_changes_main() {
+ local top="$(dirname "$0")"/../..
+
+ repo selfupdate
+
+ repo forall -c '\
+ git checkout . ; git revert --abort ; git clean -fdx ;\
+ git checkout @ ; git branch fina-step1 -D ; git reset --hard; \
+ repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;'
+}
+
+finalize_revert_local_changes_main
diff --git a/finalize-step-1.sh b/finalize-step-1.sh
new file mode 100755
index 0000000..1ca98d4
--- /dev/null
+++ b/finalize-step-1.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+# Automation for finalize_branch_for_release.sh.
+# Sets up local environment, runs the finalization script and submits the results.
+# WIP:
+# - does not submit, only sends to gerrit.
+
+# set -ex
+
+function revert_to_unfinalized_state() {
+ repo forall -c '\
+ git checkout . ; git revert --abort ; git clean -fdx ;\
+ git checkout @ ; git branch fina-step1 -D ; git reset --hard; \
+ repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;\
+ baselineHash="$(git log --format=%H --no-merges --max-count=1 --grep ^FINALIZATION_STEP_1_BASELINE_COMMIT)" ;\
+ if [[ $baselineHash ]]; then
+ previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT $baselineHash..HEAD | tr \n \040)" ;\
+ else
+ previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT | tr \n \040)" ;\
+ fi ; \
+ if [[ $previousHash ]]; then git revert --no-commit --strategy=ort --strategy-option=ours $previousHash ; fi ;'
+}
+
+function commit_changes() {
+ repo forall -c '\
+ if [[ $(git status --short) ]]; then
+ repo start fina-step1 ;
+ git add -A . ;
+ git commit -m FINALIZATION_STEP_1_SCRIPT_COMMIT -m WILL_BE_AUTOMATICALLY_REVERTED ;
+ repo upload --cbr --no-verify -t -y . ;
+ git clean -fdx ; git reset --hard ;
+ fi'
+}
+
+function finalize_step_1_main() {
+ local top="$(dirname "$0")"/../..
+
+ repo selfupdate
+
+ revert_to_unfinalized_state
+
+ # vndk etc finalization
+ source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
+
+ # move all changes to fina-step1 branch and commit with a robot message
+ commit_changes
+}
+
+finalize_step_1_main
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
index 12b096f..9e9d6a1 100755
--- a/finalize_branch_for_release.sh
+++ b/finalize_branch_for_release.sh
@@ -8,23 +8,16 @@
# default target to modify tree and build SDK
local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
- # This script is WIP and only finalizes part of the Android branch for release.
- # The full process can be found at (INTERNAL) go/android-sdk-finalization.
+ # Build finalization artifacts.
+ source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
- # VNDK snapshot (TODO)
- # SDK snapshots (TODO)
- # Update references in the codebase to new API version (TODO)
- # ...
-
- AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api
-
- # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
- $m check-vndk-list || \
- { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
-
- # for now, we simulate the release state for AIDL, but in the future, we would want
- # to actually turn the branch into the REL state and test with that
- AIDL_FROZEN_REL=true $m nothing # test build
+ # This command tests:
+ # The release state for AIDL.
+ # ABI difference between user and userdebug builds.
+ # Resource/SDK finalization.
+ # In the future, we would want to actually turn the branch into the REL
+ # state and test with that.
+ AIDL_FROZEN_REL=true $m
# Build SDK (TODO)
# lunch sdk...
@@ -32,3 +25,4 @@
}
finalize_main
+
diff --git a/help.sh b/help.sh
index e51adc1..c405959 100755
--- a/help.sh
+++ b/help.sh
@@ -26,6 +26,8 @@
clean (aka clobber) equivalent to rm -rf out/
checkbuild Build every module defined in the source tree
droid Default target
+ sync Build everything in the default target except the images,
+ for use with adb sync.
nothing Do not build anything, just parse and validate the build structure
java Build all the java code in the source tree
diff --git a/orchestrator/README b/orchestrator/README
deleted file mode 100644
index 9a1e302..0000000
--- a/orchestrator/README
+++ /dev/null
@@ -1,8 +0,0 @@
-DEMO
-
-from the root of the workspace
-
-multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
-
-rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
-
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
deleted file mode 100644
index d7abef7..0000000
--- a/orchestrator/core/api_assembly.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import json
-import os
-import sys
-
-import api_assembly_cc
-import ninja_tools
-
-
-ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
-
-def assemble_apis(context, inner_trees):
- # Find all of the contributions from the inner tree
- contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
-
- # Load and validate the contribution files
- # TODO: Check timestamps and skip unnecessary work
- contributions = []
- for tree_key, filenames in contribution_files_dict.items():
- for filename in filenames:
- json_data = load_contribution_file(context, filename)
- if not json_data:
- continue
- # TODO: Validate the configs, especially that the domains match what we asked for
- # from the lunch config.
- contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
-
- # Group contributions by language and API surface
- stub_libraries = collate_contributions(contributions)
-
- # Initialize the ninja file writer
- with open(context.out.api_ninja_file(), "w") as ninja_file:
- ninja = ninja_tools.Ninja(context, ninja_file)
-
- # Initialize the build file writer
- build_file = BuildFile() # TODO: parameters?
-
- # Iterate through all of the stub libraries and generate rules to assemble them
- # and Android.bp/BUILD files to make those available to inner trees.
- # TODO: Parallelize? Skip unnecessary work?
- for stub_library in stub_libraries:
- STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
-
- # TODO: Handle host_executables separately or as a StubLibrary language?
-
- # Finish writing the ninja file
- ninja.write()
-
-
-def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
- "Scan an inner_tree's out dir for the api contribution files."
- directory = inner_tree.out.api_contributions_dir()
- result = []
- with os.scandir(directory) as it:
- for dirent in it:
- if not dirent.is_file():
- break
- if dirent.name.endswith(".json"):
- result.append(os.path.join(directory, dirent.name))
- return result
-
-
-def load_contribution_file(context, filename):
- "Load and return the API contribution at filename. On error report error and return None."
- with open(filename) as f:
- try:
- return json.load(f)
- except json.decoder.JSONDecodeError as ex:
- # TODO: Error reporting
- context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
- raise ex
-
-
-class StubLibraryContribution(object):
- def __init__(self, inner_tree, api_domain, library_contribution):
- self.inner_tree = inner_tree
- self.api_domain = api_domain
- self.library_contribution = library_contribution
-
-
-class StubLibrary(object):
- def __init__(self, language, api_surface, api_surface_version, name):
- self.language = language
- self.api_surface = api_surface
- self.api_surface_version = api_surface_version
- self.name = name
- self.contributions = []
-
- def add_contribution(self, contrib):
- self.contributions.append(contrib)
-
-
-def collate_contributions(contributions):
- """Take the list of parsed API contribution files, and group targets by API Surface, version,
- language and library name, and return a StubLibrary object for each of those.
- """
- grouped = {}
- for contribution in contributions:
- for language in STUB_LANGUAGE_HANDLERS.keys():
- for library in contribution.json_data.get(language, []):
- key = (language, contribution.json_data["name"],
- contribution.json_data["version"], library["name"])
- stub_library = grouped.get(key)
- if not stub_library:
- stub_library = StubLibrary(language, contribution.json_data["name"],
- contribution.json_data["version"], library["name"])
- grouped[key] = stub_library
- stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
- contribution.json_data["api_domain"], library))
- return list(grouped.values())
-
-
-def assemble_java_api_library(context, ninja, build_file, stub_library):
- print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-def assemble_resource_api_library(context, ninja, build_file, stub_library):
- print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-STUB_LANGUAGE_HANDLERS = {
- "cc_libraries": api_assembly_cc.assemble_cc_api_library,
- "java_libraries": assemble_java_api_library,
- "resource_libraries": assemble_resource_api_library,
-}
-
-
-class BuildFile(object):
- "Abstract generator for Android.bp files and BUILD files."
- pass
-
-
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
deleted file mode 100644
index ca9b2a4..0000000
--- a/orchestrator/core/api_assembly_cc.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-def assemble_cc_api_library(context, ninja, build_file, stub_library):
- staging_dir = context.out.api_library_dir(stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name)
- work_dir = context.out.api_library_work_dir(stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name)
-
- # Generate rules to copy headers
- includes = []
- include_dir = os.path.join(staging_dir, "include")
- for contrib in stub_library.contributions:
- for headers in contrib.library_contribution["headers"]:
- root = headers["root"]
- for file in headers["files"]:
- # TODO: Deal with collisions of the same name from multiple contributions
- include = os.path.join(include_dir, file)
- ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
- includes.append(include)
-
- # Generate rule to run ndkstubgen
-
-
- # Generate rule to compile stubs to library
-
- # Generate phony rule to build the library
- # TODO: This name probably conflictgs with something
- ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
- stub_library.name)), includes)
-
- # Generate build files
-
diff --git a/orchestrator/core/api_domain.py b/orchestrator/core/api_domain.py
deleted file mode 100644
index bb7306c..0000000
--- a/orchestrator/core/api_domain.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-class ApiDomain(object):
- def __init__(self, name, tree, product):
- # Product will be null for modules
- self.name = name
- self.tree = tree
- self.product = product
-
- def __str__(self):
- return "ApiDomain(name=\"%s\" tree.root=\"%s\" product=%s)" % (
- self.name, self.tree.root,
- "None" if self.product is None else "\"%s\"" % self.product)
-
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
deleted file mode 100644
index 03fe890..0000000
--- a/orchestrator/core/final_packaging.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-import sys
-
-import ninja_tools
-import ninja_syntax # Has to be after ninja_tools because of the path hack
-
-def final_packaging(context, inner_trees):
- """Pull together all of the previously defined rules into the final build stems."""
-
- with open(context.out.outer_ninja_file(), "w") as ninja_file:
- ninja = ninja_tools.Ninja(context, ninja_file)
-
- # Add the api surfaces file
- ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
-
- # For each inner tree
- for tree in inner_trees.keys():
- # TODO: Verify that inner_tree.ninja was generated
-
- # Read and verify file
- build_targets = read_build_targets_json(context, tree)
- if not build_targets:
- continue
-
- # Generate the ninja and build files for this inner tree
- generate_cross_domain_build_rules(context, ninja, tree, build_targets)
-
- # Finish writing the ninja file
- ninja.write()
-
-
-def read_build_targets_json(context, tree):
- """Read and validate the build_targets.json file for the given tree."""
- try:
- f = open(tree.out.build_targets_file())
- except FileNotFoundError:
- # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
- return None
-
- data = None
- with f:
- try:
- data = json.load(f)
- except json.decoder.JSONDecodeError as ex:
- sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
- # TODO: Error reporting
- raise ex
-
- # TODO: Better error handling
- # TODO: Validate json schema
- return data
-
-
-def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
- "Generate the ninja and build files for the inner tree."
- # Include the inner tree's inner_tree.ninja
- ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
-
- # Generate module rules and files
- for module in build_targets.get("modules", []):
- generate_shared_module(context, ninja, tree, module)
-
- # Generate staging rules
- staging_dir = context.out.staging_dir()
- for staged in build_targets.get("staging", []):
- # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
- dest = staged["dest"]
- dest = os.path.join(staging_dir, dest)
- if "src" in staged and "obj" in staged:
- context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
- ) # TODO: Filename and line if possible
- if "src" in staged:
- ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
- elif "obj" in staged:
- ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
- ninja.add_global_phony("staging", [dest])
-
- # Generate dist rules
- dist_dir = context.out.dist_dir()
- for disted in build_targets.get("dist", []):
- # TODO: Enforce that dest absolute
- dest = disted["dest"]
- dest = os.path.join(dist_dir, dest)
- ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
- ninja.add_global_phony("dist", [dest])
-
-
-def generate_shared_module(context, ninja, tree, module):
- """Generate ninja rules for the given build_targets.json defined module."""
- module_name = module["name"]
- module_type = module["type"]
- share_dir = context.out.module_share_dir(module_type, module_name)
- src_file = os.path.join(tree.root, module["file"])
-
- if module_type == "apex":
- ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
- # TODO: Generate build file
-
- else:
- # TODO: Better error handling
- raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
deleted file mode 100644
index d348ee7..0000000
--- a/orchestrator/core/inner_tree.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-import textwrap
-
-class InnerTreeKey(object):
- """Trees are identified uniquely by their root and the TARGET_PRODUCT they will use to build.
- If a single tree uses two different prdoucts, then we won't make assumptions about
- them sharing _anything_.
- TODO: This is true for soong. It's more likely that bazel could do analysis for two
- products at the same time in a single tree, so there's an optimization there to do
- eventually."""
- def __init__(self, root, product):
- self.root = root
- self.product = product
-
- def __str__(self):
- return "TreeKey(root=%s product=%s)" % (enquote(self.root), enquote(self.product))
-
- def __hash__(self):
- return hash((self.root, self.product))
-
- def _cmp(self, other):
- if self.root < other.root:
- return -1
- if self.root > other.root:
- return 1
- if self.product == other.product:
- return 0
- if self.product is None:
- return -1
- if other.product is None:
- return 1
- if self.product < other.product:
- return -1
- return 1
-
- def __eq__(self, other):
- return self._cmp(other) == 0
-
- def __ne__(self, other):
- return self._cmp(other) != 0
-
- def __lt__(self, other):
- return self._cmp(other) < 0
-
- def __le__(self, other):
- return self._cmp(other) <= 0
-
- def __gt__(self, other):
- return self._cmp(other) > 0
-
- def __ge__(self, other):
- return self._cmp(other) >= 0
-
-
-class InnerTree(object):
- def __init__(self, context, root, product):
- """Initialize with the inner tree root (relative to the workspace root)"""
- self.root = root
- self.product = product
- self.domains = {}
- # TODO: Base directory on OUT_DIR
- out_root = context.out.inner_tree_dir(root)
- if product:
- out_root += "_" + product
- else:
- out_root += "_unbundled"
- self.out = OutDirLayout(out_root)
-
- def __str__(self):
- return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
- enquote(self.product),
- " ".join([enquote(d) for d in sorted(self.domains.keys())]))
-
- def invoke(self, args):
- """Call the inner tree command for this inner tree. Exits on failure."""
- # TODO: Build time tracing
-
- # Validate that there is a .inner_build command to run at the root of the tree
- # so we can print a good error message
- inner_build_tool = os.path.join(self.root, ".inner_build")
- if not os.access(inner_build_tool, os.X_OK):
- sys.stderr.write(("Unable to execute %s. Is there an inner tree or lunch combo"
- + " misconfiguration?\n") % inner_build_tool)
- sys.exit(1)
-
- # TODO: This is where we should set up the shared trees
-
- # Build the command
- cmd = [inner_build_tool, "--out_dir", self.out.root()]
- for domain_name in sorted(self.domains.keys()):
- cmd.append("--api_domain")
- cmd.append(domain_name)
- cmd += args
-
- # Run the command
- process = subprocess.run(cmd, shell=False)
-
- # TODO: Probably want better handling of inner tree failures
- if process.returncode:
- sys.stderr.write("Build error in inner tree: %s\nstopping multitree build.\n"
- % self.root)
- sys.exit(1)
-
-
-class InnerTrees(object):
- def __init__(self, trees, domains):
- self.trees = trees
- self.domains = domains
-
- def __str__(self):
- "Return a debugging dump of this object"
- return textwrap.dedent("""\
- InnerTrees {
- trees: [
- %(trees)s
- ]
- domains: [
- %(domains)s
- ]
- }""" % {
- "trees": "\n ".join(sorted([str(t) for t in self.trees.values()])),
- "domains": "\n ".join(sorted([str(d) for d in self.domains.values()])),
- })
-
-
- def for_each_tree(self, func, cookie=None):
- """Call func for each of the inner trees once for each product that will be built in it.
-
- The calls will be in a stable order.
-
- Return a map of the InnerTreeKey to any results returned from func().
- """
- result = {}
- for key in sorted(self.trees.keys()):
- result[key] = func(key, self.trees[key], cookie)
- return result
-
-
- def get(self, tree_key):
- """Get an inner tree for tree_key"""
- return self.trees.get(tree_key)
-
- def keys(self):
- "Get the keys for the inner trees in name order."
- return [self.trees[k] for k in sorted(self.trees.keys())]
-
-
-class OutDirLayout(object):
- """Encapsulates the logic about the layout of the inner tree out directories.
- See also context.OutDir for outer tree out dir contents."""
-
- def __init__(self, root):
- "Initialize with the root of the OUT_DIR for the inner tree."
- self._root = root
-
- def root(self):
- return self._root
-
- def tree_info_file(self):
- return os.path.join(self._root, "tree_info.json")
-
- def api_contributions_dir(self):
- return os.path.join(self._root, "api_contributions")
-
- def build_targets_file(self):
- return os.path.join(self._root, "build_targets.json")
-
- def main_ninja_file(self):
- return os.path.join(self._root, "inner_tree.ninja")
-
-
-def enquote(s):
- return "None" if s is None else "\"%s\"" % s
-
-
diff --git a/orchestrator/core/interrogate.py b/orchestrator/core/interrogate.py
deleted file mode 100644
index 9fe769e..0000000
--- a/orchestrator/core/interrogate.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-
-def interrogate_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["describe"])
-
- info_json_filename = inner_tree.out.tree_info_file()
-
- # TODO: Error handling
- with open(info_json_filename) as f:
- info_json = json.load(f)
-
- # TODO: Check orchestrator protocol
-
diff --git a/orchestrator/core/lunch.py b/orchestrator/core/lunch.py
deleted file mode 100755
index a648478..0000000
--- a/orchestrator/core/lunch.py
+++ /dev/null
@@ -1,408 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import glob
-import json
-import os
-import sys
-
-EXIT_STATUS_OK = 0
-EXIT_STATUS_ERROR = 1
-EXIT_STATUS_NEED_HELP = 2
-
-
-def find_dirs(path, name, ttl=6):
- """Search at most ttl directories deep inside path for a directory called name
- and yield directories that match."""
- # The dance with subdirs is so that we recurse in sorted order.
- subdirs = []
- with os.scandir(path) as it:
- for dirent in sorted(it, key=lambda x: x.name):
- try:
- if dirent.is_dir():
- if dirent.name == name:
- yield os.path.join(path, dirent.name)
- elif ttl > 0:
- subdirs.append(dirent.name)
- except OSError:
- # Consume filesystem errors, e.g. too many links, permission etc.
- pass
- for subdir in subdirs:
- yield from find_dirs(os.path.join(path, subdir), name, ttl-1)
-
-
-def walk_paths(path, matcher, ttl=10):
- """Do a traversal of all files under path yielding each file that matches
- matcher."""
- # First look for files, then recurse into directories as needed.
- # The dance with subdirs is so that we recurse in sorted order.
- subdirs = []
- with os.scandir(path) as it:
- for dirent in sorted(it, key=lambda x: x.name):
- try:
- if dirent.is_file():
- if matcher(dirent.name):
- yield os.path.join(path, dirent.name)
- if dirent.is_dir():
- if ttl > 0:
- subdirs.append(dirent.name)
- except OSError:
- # Consume filesystem errors, e.g. too many links, permission etc.
- pass
- for subdir in sorted(subdirs):
- yield from walk_paths(os.path.join(path, subdir), matcher, ttl-1)
-
-
-def find_file(path, filename):
- """Return a file called filename inside path, no more than ttl levels deep.
-
- Directories are searched alphabetically.
- """
- for f in walk_paths(path, lambda x: x == filename):
- return f
-
-
-def find_config_dirs(workspace_root):
- """Find the configuration files in the well known locations inside workspace_root
-
- <workspace_root>/build/build/orchestrator/multitree_combos
- (AOSP devices, such as cuttlefish)
-
- <workspace_root>/vendor/**/multitree_combos
- (specific to a vendor and not open sourced)
-
- <workspace_root>/device/**/multitree_combos
- (specific to a vendor and are open sourced)
-
- Directories are returned specifically in this order, so that aosp can't be
- overridden, but vendor overrides device.
- """
- # TODO: This is not looking in inner trees correctly.
-
- # TODO: When orchestrator is in its own git project remove the "make/" here
- yield os.path.join(workspace_root, "build/build/make/orchestrator/multitree_combos")
-
- dirs = ["vendor", "device"]
- for d in dirs:
- yield from find_dirs(os.path.join(workspace_root, d), "multitree_combos")
-
-
-def find_named_config(workspace_root, shortname):
- """Find the config with the given shortname inside workspace_root.
-
- Config directories are searched in the order described in find_config_dirs,
- and inside those directories, alphabetically."""
- filename = shortname + ".mcombo"
- for config_dir in find_config_dirs(workspace_root):
- found = find_file(config_dir, filename)
- if found:
- return found
- return None
-
-
-def parse_product_variant(s):
- """Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
- split = s.split("-")
- if len(split) != 2:
- return None
- return split
-
-
-def choose_config_from_args(workspace_root, args):
- """Return the config file we should use for the given argument,
- or null if there's no file that matches that."""
- if len(args) == 1:
- # Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
- # file we don't match that.
- pv = parse_product_variant(args[0])
- if pv:
- config = find_named_config(workspace_root, pv[0])
- if config:
- return (config, pv[1])
- return None, None
- # Look for a specifically named file
- if os.path.isfile(args[0]):
- return (args[0], args[1] if len(args) > 1 else None)
- # That file didn't exist, return that we didn't find it.
- return None, None
-
-
-class ConfigException(Exception):
- ERROR_IDENTIFY = "identify"
- ERROR_PARSE = "parse"
- ERROR_CYCLE = "cycle"
- ERROR_VALIDATE = "validate"
-
- def __init__(self, kind, message, locations=[], line=0):
- """Error thrown when loading and parsing configurations.
-
- Args:
- message: Error message to display to user
- locations: List of filenames of the include history. The 0 index one
- the location where the actual error occurred
- """
- if len(locations):
- s = locations[0]
- if line:
- s += ":"
- s += str(line)
- s += ": "
- else:
- s = ""
- s += message
- if len(locations):
- for loc in locations[1:]:
- s += "\n included from %s" % loc
- super().__init__(s)
- self.kind = kind
- self.message = message
- self.locations = locations
- self.line = line
-
-
-def load_config(filename):
- """Load a config, including processing the inherits fields.
-
- Raises:
- ConfigException on errors
- """
- def load_and_merge(fn, visited):
- with open(fn) as f:
- try:
- contents = json.load(f)
- except json.decoder.JSONDecodeError as ex:
- if True:
- raise ConfigException(ConfigException.ERROR_PARSE, ex.msg, visited, ex.lineno)
- else:
- sys.stderr.write("exception %s" % ex.__dict__)
- raise ex
- # Merge all the parents into one data, with first-wins policy
- inherited_data = {}
- for parent in contents.get("inherits", []):
- if parent in visited:
- raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
- visited)
- deep_merge(inherited_data, load_and_merge(parent, [parent,] + visited))
- # Then merge inherited_data into contents, but what's already there will win.
- deep_merge(contents, inherited_data)
- contents.pop("inherits", None)
- return contents
- return load_and_merge(filename, [filename,])
-
-
-def deep_merge(merged, addition):
- """Merge all fields of addition into merged. Pre-existing fields win."""
- for k, v in addition.items():
- if k in merged:
- if isinstance(v, dict) and isinstance(merged[k], dict):
- deep_merge(merged[k], v)
- else:
- merged[k] = v
-
-
-def make_config_header(config_file, config, variant):
- def make_table(rows):
- maxcols = max([len(row) for row in rows])
- widths = [0] * maxcols
- for row in rows:
- for i in range(len(row)):
- widths[i] = max(widths[i], len(row[i]))
- text = []
- for row in rows:
- rowtext = []
- for i in range(len(row)):
- cell = row[i]
- rowtext.append(str(cell))
- rowtext.append(" " * (widths[i] - len(cell)))
- rowtext.append(" ")
- text.append("".join(rowtext))
- return "\n".join(text)
-
- trees = [("Component", "Path", "Product"),
- ("---------", "----", "-------")]
- entry = config.get("system", None)
- def add_config_tuple(trees, entry, name):
- if entry:
- trees.append((name, entry.get("tree"), entry.get("product", "")))
- add_config_tuple(trees, config.get("system"), "system")
- add_config_tuple(trees, config.get("vendor"), "vendor")
- for k, v in config.get("modules", {}).items():
- add_config_tuple(trees, v, k)
-
- return """========================================
-TARGET_BUILD_COMBO=%(TARGET_BUILD_COMBO)s
-TARGET_BUILD_VARIANT=%(TARGET_BUILD_VARIANT)s
-
-%(trees)s
-========================================\n""" % {
- "TARGET_BUILD_COMBO": config_file,
- "TARGET_BUILD_VARIANT": variant,
- "trees": make_table(trees),
- }
-
-
-def do_lunch(args):
- """Handle the lunch command."""
- # Check that we're at the top of a multitree workspace by seeing if this script exists.
- if not os.path.exists("build/build/make/orchestrator/core/lunch.py"):
- sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
- return EXIT_STATUS_ERROR
-
- # Choose the config file
- config_file, variant = choose_config_from_args(".", args)
-
- if config_file == None:
- sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
- return EXIT_STATUS_NEED_HELP
- if variant == None:
- sys.stderr.write("Can't find variant for: %s\n" % " ".join(args))
- return EXIT_STATUS_NEED_HELP
-
- # Parse the config file
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write(str(ex))
- return EXIT_STATUS_ERROR
-
- # Fail if the lunchable bit isn't set, because this isn't a usable config
- if not config.get("lunchable", False):
- sys.stderr.write("%s: Lunch config file (or inherited files) does not have the 'lunchable'"
- % config_file)
- sys.stderr.write(" flag set, which means it is probably not a complete lunch spec.\n")
-
- # All the validation has passed, so print the name of the file and the variant
- sys.stdout.write("%s\n" % config_file)
- sys.stdout.write("%s\n" % variant)
-
- # Write confirmation message to stderr
- sys.stderr.write(make_config_header(config_file, config, variant))
-
- return EXIT_STATUS_OK
-
-
-def find_all_combo_files(workspace_root):
- """Find all .mcombo files in the prescribed locations in the tree."""
- for dir in find_config_dirs(workspace_root):
- for file in walk_paths(dir, lambda x: x.endswith(".mcombo")):
- yield file
-
-
-def is_file_lunchable(config_file):
- """Parse config_file, flatten the inheritance, and return whether it can be
- used as a lunch target."""
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write("%s" % ex)
- return False
- return config.get("lunchable", False)
-
-
-def find_all_lunchable(workspace_root):
- """Find all mcombo files in the tree (rooted at workspace_root) that when
- parsed (and inheritance is flattened) have lunchable: true."""
- for f in [x for x in find_all_combo_files(workspace_root) if is_file_lunchable(x)]:
- yield f
-
-
-def load_current_config():
- """Load, validate and return the config as specified in TARGET_BUILD_COMBO. Throws
- ConfigException if there is a problem."""
-
- # Identify the config file
- config_file = os.environ.get("TARGET_BUILD_COMBO")
- if not config_file:
- raise ConfigException(ConfigException.ERROR_IDENTIFY,
- "TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.")
-
- # Parse the config file
- config = load_config(config_file)
-
- # Validate the config file
- if not config.get("lunchable", False):
- raise ConfigException(ConfigException.ERROR_VALIDATE,
- "Lunch config file (or inherited files) does not have the 'lunchable'"
- + " flag set, which means it is probably not a complete lunch spec.",
- [config_file,])
-
- # TODO: Validate that:
- # - there are no modules called system or vendor
- # - everything has all the required files
-
- variant = os.environ.get("TARGET_BUILD_VARIANT")
- if not variant:
- variant = "eng" # TODO: Is this the right default?
- # Validate variant is user, userdebug or eng
-
- return config_file, config, variant
-
-def do_list():
- """Handle the --list command."""
- for f in sorted(find_all_lunchable(".")):
- print(f)
-
-
-def do_print(args):
- """Handle the --print command."""
- # Parse args
- if len(args) == 0:
- config_file = os.environ.get("TARGET_BUILD_COMBO")
- if not config_file:
- sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch before building.\n")
- return EXIT_STATUS_NEED_HELP
- elif len(args) == 1:
- config_file = args[0]
- else:
- return EXIT_STATUS_NEED_HELP
-
- # Parse the config file
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write(str(ex))
- return EXIT_STATUS_ERROR
-
- # Print the config in json form
- json.dump(config, sys.stdout, indent=4)
-
- return EXIT_STATUS_OK
-
-
-def main(argv):
- if len(argv) < 2 or argv[1] == "-h" or argv[1] == "--help":
- return EXIT_STATUS_NEED_HELP
-
- if len(argv) == 2 and argv[1] == "--list":
- do_list()
- return EXIT_STATUS_OK
-
- if len(argv) == 2 and argv[1] == "--print":
- return do_print(argv[2:])
- return EXIT_STATUS_OK
-
- if (len(argv) == 3 or len(argv) == 4) and argv[1] == "--lunch":
- return do_lunch(argv[2:])
-
- sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
- return EXIT_STATUS_NEED_HELP
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
deleted file mode 100644
index ab81d66..0000000
--- a/orchestrator/core/ninja_runner.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-import sys
-
-def run_ninja(context, targets):
- """Run ninja.
- """
-
- # Construct the command
- cmd = [
- context.tools.ninja(),
- "-f",
- context.out.outer_ninja_file(),
- ] + targets
-
- # Run the command
- process = subprocess.run(cmd, shell=False)
-
- # TODO: Probably want better handling of inner tree failures
- if process.returncode:
- sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
- sys.exit(1)
-
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
deleted file mode 100644
index 16101ea..0000000
--- a/orchestrator/core/ninja_tools.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-
-# Workaround for python include path
-_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
-if _ninja_dir not in sys.path:
- sys.path.append(_ninja_dir)
-import ninja_writer
-from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
-
-
-class Ninja(ninja_writer.Writer):
- """Some higher level constructs on top of raw ninja writing.
- TODO: Not sure where these should be."""
- def __init__(self, context, file):
- super(Ninja, self).__init__(file)
- self._context = context
- self._did_copy_file = False
- self._phonies = {}
-
- def add_copy_file(self, copy_to, copy_from):
- if not self._did_copy_file:
- self._did_copy_file = True
- rule = Rule("copy_file")
- rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
- + " -f ${in} ${out}")
- self.add_rule(rule)
- build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
- implicits=[self._context.tools.acp()])
- build_action.add_variable("out_dir", os.path.dirname(copy_to))
- self.add_build_action(build_action)
-
- def add_global_phony(self, name, deps):
- """Add a phony target where there are multiple places that will want to add to
- the same phony. If you can, to save memory, use add_phony instead of this function."""
- if type(deps) not in (list, tuple):
- raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
- self._phonies.setdefault(name, []).extend(deps)
-
- def write(self):
- for phony, deps in self._phonies.items():
- self.add_phony(phony, deps)
- super(Ninja, self).write()
-
-
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
deleted file mode 100755
index 508f73a..0000000
--- a/orchestrator/core/orchestrator.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-
-sys.dont_write_bytecode = True
-import api_assembly
-import api_domain
-import api_export
-import final_packaging
-import inner_tree
-import tree_analysis
-import interrogate
-import lunch
-import ninja_runner
-import utils
-
-EXIT_STATUS_OK = 0
-EXIT_STATUS_ERROR = 1
-
-API_DOMAIN_SYSTEM = "system"
-API_DOMAIN_VENDOR = "vendor"
-API_DOMAIN_MODULE = "module"
-
-def process_config(context, lunch_config):
- """Returns a InnerTrees object based on the configuration requested in the lunch config."""
- def add(domain_name, tree_root, product):
- tree_key = inner_tree.InnerTreeKey(tree_root, product)
- if tree_key in trees:
- tree = trees[tree_key]
- else:
- tree = inner_tree.InnerTree(context, tree_root, product)
- trees[tree_key] = tree
- domain = api_domain.ApiDomain(domain_name, tree, product)
- domains[domain_name] = domain
- tree.domains[domain_name] = domain
-
- trees = {}
- domains = {}
-
- system_entry = lunch_config.get("system")
- if system_entry:
- add(API_DOMAIN_SYSTEM, system_entry["tree"], system_entry["product"])
-
- vendor_entry = lunch_config.get("vendor")
- if vendor_entry:
- add(API_DOMAIN_VENDOR, vendor_entry["tree"], vendor_entry["product"])
-
- for module_name, module_entry in lunch_config.get("modules", []).items():
- add(module_name, module_entry["tree"], None)
-
- return inner_tree.InnerTrees(trees, domains)
-
-
-def build():
- # Choose the out directory, set up error handling, etc.
- context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
-
- # Read the lunch config file
- try:
- config_file, config, variant = lunch.load_current_config()
- except lunch.ConfigException as ex:
- sys.stderr.write("%s\n" % ex)
- return EXIT_STATUS_ERROR
- sys.stdout.write(lunch.make_config_header(config_file, config, variant))
-
- # Construct the trees and domains dicts
- inner_trees = process_config(context, config)
-
- # 1. Interrogate the trees
- inner_trees.for_each_tree(interrogate.interrogate_tree)
- # TODO: Detect bazel-only mode
-
- # 2a. API Export
- inner_trees.for_each_tree(api_export.export_apis_from_tree)
-
- # 2b. API Surface Assembly
- api_assembly.assemble_apis(context, inner_trees)
-
- # 3a. Inner tree analysis
- tree_analysis.analyze_trees(context, inner_trees)
-
- # 3b. Final Packaging Rules
- final_packaging.final_packaging(context, inner_trees)
-
- # 4. Build Execution
- # TODO: Decide what we want the UX for selecting targets to be across
- # branches... since there are very likely to be conflicting soong short
- # names.
- print("Running ninja...")
- targets = ["staging", "system"]
- ninja_runner.run_ninja(context, targets)
-
- # Success!
- return EXIT_STATUS_OK
-
-def main(argv):
- return build()
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/test/configs/another/bad.mcombo b/orchestrator/core/test/configs/another/bad.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/another/bad.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/another/dir/a b/orchestrator/core/test/configs/another/dir/a
deleted file mode 100644
index 7898192..0000000
--- a/orchestrator/core/test/configs/another/dir/a
+++ /dev/null
@@ -1 +0,0 @@
-a
diff --git a/orchestrator/core/test/configs/b-eng b/orchestrator/core/test/configs/b-eng
deleted file mode 100644
index eceb3f3..0000000
--- a/orchestrator/core/test/configs/b-eng
+++ /dev/null
@@ -1 +0,0 @@
-INVALID FILE
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
deleted file mode 100644
index 8cc8370..0000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "lunchable": "true"
-}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
deleted file mode 100644
index f9805f2..0000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
+++ /dev/null
@@ -1 +0,0 @@
-not a combo file
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
deleted file mode 100644
index e69de29..0000000
--- a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
+++ /dev/null
diff --git a/orchestrator/core/test/configs/parsing/cycles/1.mcombo b/orchestrator/core/test/configs/parsing/cycles/1.mcombo
deleted file mode 100644
index ab8fe33..0000000
--- a/orchestrator/core/test/configs/parsing/cycles/1.mcombo
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/2.mcombo"
- ]
-}
diff --git a/orchestrator/core/test/configs/parsing/cycles/2.mcombo b/orchestrator/core/test/configs/parsing/cycles/2.mcombo
deleted file mode 100644
index 2b774d0..0000000
--- a/orchestrator/core/test/configs/parsing/cycles/2.mcombo
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/3.mcombo"
- ]
-}
-
diff --git a/orchestrator/core/test/configs/parsing/cycles/3.mcombo b/orchestrator/core/test/configs/parsing/cycles/3.mcombo
deleted file mode 100644
index 41b629b..0000000
--- a/orchestrator/core/test/configs/parsing/cycles/3.mcombo
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/1.mcombo"
- ]
-}
-
diff --git a/orchestrator/core/test/configs/parsing/merge/1.mcombo b/orchestrator/core/test/configs/parsing/merge/1.mcombo
deleted file mode 100644
index a5a57d7..0000000
--- a/orchestrator/core/test/configs/parsing/merge/1.mcombo
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/merge/2.mcombo",
- "test/configs/parsing/merge/3.mcombo"
- ],
- "in_1": "1",
- "in_1_2": "1",
- "merged": {
- "merged_1": "1",
- "merged_1_2": "1"
- },
- "dict_1": { "a" : "b" }
-}
diff --git a/orchestrator/core/test/configs/parsing/merge/2.mcombo b/orchestrator/core/test/configs/parsing/merge/2.mcombo
deleted file mode 100644
index 00963e2..0000000
--- a/orchestrator/core/test/configs/parsing/merge/2.mcombo
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "in_1_2": "2",
- "in_2": "2",
- "in_2_3": "2",
- "merged": {
- "merged_1_2": "2",
- "merged_2": "2",
- "merged_2_3": "2"
- },
- "dict_2": { "a" : "b" }
-}
-
diff --git a/orchestrator/core/test/configs/parsing/merge/3.mcombo b/orchestrator/core/test/configs/parsing/merge/3.mcombo
deleted file mode 100644
index 5fc9d90..0000000
--- a/orchestrator/core/test/configs/parsing/merge/3.mcombo
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "in_3": "3",
- "in_2_3": "3",
- "merged": {
- "merged_3": "3",
- "merged_2_3": "3"
- },
- "dict_3": { "a" : "b" }
-}
-
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
deleted file mode 100644
index e69de29..0000000
--- a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
+++ /dev/null
diff --git a/orchestrator/core/test_lunch.py b/orchestrator/core/test_lunch.py
deleted file mode 100755
index 2d85d05..0000000
--- a/orchestrator/core/test_lunch.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import unittest
-
-sys.dont_write_bytecode = True
-import lunch
-
-class TestStringMethods(unittest.TestCase):
-
- def test_find_dirs(self):
- self.assertEqual([x for x in lunch.find_dirs("test/configs", "multitree_combos")], [
- "test/configs/build/make/orchestrator/multitree_combos",
- "test/configs/device/aa/bb/multitree_combos",
- "test/configs/vendor/aa/bb/multitree_combos"])
-
- def test_find_file(self):
- # Finds the one in device first because this is searching from the root,
- # not using find_named_config.
- self.assertEqual(lunch.find_file("test/configs", "v.mcombo"),
- "test/configs/device/aa/bb/multitree_combos/v.mcombo")
-
- def test_find_config_dirs(self):
- self.assertEqual([x for x in lunch.find_config_dirs("test/configs")], [
- "test/configs/build/make/orchestrator/multitree_combos",
- "test/configs/vendor/aa/bb/multitree_combos",
- "test/configs/device/aa/bb/multitree_combos"])
-
- def test_find_named_config(self):
- # Inside build/orchestrator, overriding device and vendor
- self.assertEqual(lunch.find_named_config("test/configs", "b"),
- "test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
-
- # Nested dir inside a combo dir
- self.assertEqual(lunch.find_named_config("test/configs", "nested"),
- "test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
-
- # Inside vendor, overriding device
- self.assertEqual(lunch.find_named_config("test/configs", "v"),
- "test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
-
- # Inside device
- self.assertEqual(lunch.find_named_config("test/configs", "d"),
- "test/configs/device/aa/bb/multitree_combos/d.mcombo")
-
- # Make sure we don't look too deep (for performance)
- self.assertIsNone(lunch.find_named_config("test/configs", "too_deep"))
-
-
- def test_choose_config_file(self):
- # Empty string argument
- self.assertEqual(lunch.choose_config_from_args("test/configs", [""]),
- (None, None))
-
- # A PRODUCT-VARIANT name
- self.assertEqual(lunch.choose_config_from_args("test/configs", ["v-eng"]),
- ("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
-
- # A PRODUCT-VARIANT name that conflicts with a file
- self.assertEqual(lunch.choose_config_from_args("test/configs", ["b-eng"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
-
- # A PRODUCT-VARIANT that doesn't exist
- self.assertEqual(lunch.choose_config_from_args("test/configs", ["z-user"]),
- (None, None))
-
- # An explicit file
- self.assertEqual(lunch.choose_config_from_args("test/configs",
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
-
- # An explicit file that doesn't exist
- self.assertEqual(lunch.choose_config_from_args("test/configs",
- ["test/configs/doesnt_exist.mcombo", "eng"]),
- (None, None))
-
- # An explicit file without a variant should fail
- self.assertEqual(lunch.choose_config_from_args("test/configs",
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
-
-
- def test_config_cycles(self):
- # Test that we catch cycles
- with self.assertRaises(lunch.ConfigException) as context:
- lunch.load_config("test/configs/parsing/cycles/1.mcombo")
- self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
-
- def test_config_merge(self):
- # Test the merge logic
- self.assertEqual(lunch.load_config("test/configs/parsing/merge/1.mcombo"), {
- "in_1": "1",
- "in_1_2": "1",
- "merged": {"merged_1": "1",
- "merged_1_2": "1",
- "merged_2": "2",
- "merged_2_3": "2",
- "merged_3": "3"},
- "dict_1": {"a": "b"},
- "in_2": "2",
- "in_2_3": "2",
- "dict_2": {"a": "b"},
- "in_3": "3",
- "dict_3": {"a": "b"}
- })
-
- def test_list(self):
- self.assertEqual(sorted(lunch.find_all_lunchable("test/configs")),
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
-
-if __name__ == "__main__":
- unittest.main()
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
deleted file mode 100644
index 052cad6..0000000
--- a/orchestrator/core/tree_analysis.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-def analyze_trees(context, inner_trees):
- inner_trees.for_each_tree(run_analysis)
-
-def run_analysis(tree_key, inner_tree, cookie):
- inner_tree.invoke(["analyze"])
-
-
-
-
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
deleted file mode 100644
index 41310e0..0000000
--- a/orchestrator/core/utils.py
+++ /dev/null
@@ -1,141 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import platform
-
-class Context(object):
- """Mockable container for global state."""
- def __init__(self, out_root, errors):
- self.out = OutDir(out_root)
- self.errors = errors
- self.tools = HostTools()
-
-class TestContext(Context):
- "Context for testing. The real Context is manually constructed in orchestrator.py."
-
- def __init__(self, test_work_dir, test_name):
- super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
- Errors(None))
-
-
-class OutDir(object):
- """Encapsulates the logic about the out directory at the outer-tree level.
- See also inner_tree.OutDirLayout for inner tree out dir contents."""
-
- def __init__(self, root):
- "Initialize with the root of the OUT_DIR for the outer tree."
- self._out_root = root
- self._intermediates = "intermediates"
-
- def root(self):
- return self._out_root
-
- def inner_tree_dir(self, tree_root):
- """Root directory for inner tree inside the out dir."""
- return os.path.join(self._out_root, "trees", tree_root)
-
- def api_ninja_file(self):
- """The ninja file that assembles API surfaces."""
- return os.path.join(self._out_root, "api_surfaces.ninja")
-
- def api_library_dir(self, surface, version, library):
- """Directory for all the contents of a library inside an API surface, including
- the build files. Any intermediates should go in api_library_work_dir."""
- return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
-
- def api_library_work_dir(self, surface, version, library):
- """Intermediates / scratch directory for library inside an API surface."""
- return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
- str(version), library)
-
- def outer_ninja_file(self):
- return os.path.join(self._out_root, "multitree.ninja")
-
- def module_share_dir(self, module_type, module_name):
- return os.path.join(self._out_root, "shared", module_type, module_name)
-
- def staging_dir(self):
- return os.path.join(self._out_root, "staging")
-
- def dist_dir(self):
- "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
- return os.path.join(self._out_root, "dist")
-
-class Errors(object):
- """Class for reporting and tracking errors."""
- def __init__(self, stream):
- """Initialize Error reporter with a file-like object."""
- self._stream = stream
- self._all = []
-
- def error(self, message, file=None, line=None, col=None):
- """Record the error message."""
- s = ""
- if file:
- s += str(file)
- s += ":"
- if line:
- s += str(line)
- s += ":"
- if col:
- s += str(col)
- s += ":"
- if s:
- s += " "
- s += str(message)
- if s[-1] != "\n":
- s += "\n"
- self._all.append(s)
- if self._stream:
- self._stream.write(s)
-
- def had_error(self):
- """Return if there were any errors reported."""
- return len(self._all)
-
- def get_errors(self):
- """Get all errors that were reported."""
- return self._all
-
-
-class HostTools(object):
- def __init__(self):
- if platform.system() == "Linux":
- self._arch = "linux-x86"
- else:
- raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
-
- # Some of these are called a lot, so pre-compute the strings to save memory
- self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
- self._acp = os.path.join(self._prebuilts, "acp")
- self._ninja = os.path.join(self._prebuilts, "ninja")
-
- def acp(self):
- return self._acp
-
- def ninja(self):
- return self._ninja
-
-
-def choose_out_dir():
- """Get the root of the out dir, either from the environment or by picking
- a default."""
- result = os.environ.get("OUT_DIR")
- if result:
- return result
- else:
- return "out"
diff --git a/orchestrator/demo/buffet_helper.py b/orchestrator/demo/buffet_helper.py
deleted file mode 100644
index fa29aeb..0000000
--- a/orchestrator/demo/buffet_helper.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-import os
-import sys
-import yaml
-
-from hierarchy import parse_hierarchy
-
-
-def main():
- if len(sys.argv) != 2:
- print('usage: %s target' % sys.argv[0])
- exit(1)
-
- args = sys.argv[1].split('-')
- if len(args) != 2:
- print('target format: {target}-{variant}')
- exit(1)
-
- target, variant = args
-
- if variant not in ['eng', 'user', 'userdebug']:
- print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
- variant)
- exit(1)
-
- build_top = os.getenv('BUFFET_BUILD_TOP')
- if not build_top:
- print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
- exit(1)
-
- hierarchy_map = parse_hierarchy(build_top)
-
- if target not in hierarchy_map:
- raise RuntimeError(
- "unknown target '%s': couldn't find the target. Supported targets are: %s"
- % (target, list(hierarchy_map.keys())))
-
- hierarchy = [target]
- while hierarchy_map[hierarchy[-1]]:
- hierarchy.append(hierarchy_map[hierarchy[-1]])
-
- print('Target hierarchy for %s: %s' % (target, hierarchy))
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/build_helper.py b/orchestrator/demo/build_helper.py
deleted file mode 100644
index c481f80..0000000
--- a/orchestrator/demo/build_helper.py
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import copy
-import hierarchy
-import json
-import logging
-import filecmp
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import collect_metadata
-import utils
-
-BUILD_CMD_TO_ALL = (
- 'clean',
- 'installclean',
- 'update-meta',
-)
-BUILD_ALL_EXEMPTION = (
- 'art',
-)
-
-def get_supported_product(ctx, supported_products):
- hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
- target = ctx.target_product()
-
- while target not in supported_products:
- if target not in hierarchy_map:
- return None
- target = hierarchy_map[target]
- return target
-
-
-def parse_goals(ctx, metadata, goals):
- """Parse goals and returns a map from each component to goals.
-
- e.g.
-
- "m main art timezone:foo timezone:bar" will return the following dict: {
- "main": {"all"},
- "art": {"all"},
- "timezone": {"foo", "bar"},
- }
- """
- # for now, goal should look like:
- # {component} or {component}:{subgoal}
-
- ret = collections.defaultdict(set)
-
- for goal in goals:
- # check if the command is for all components
- if goal in BUILD_CMD_TO_ALL:
- ret['all'].add(goal)
- continue
-
- # should be {component} or {component}:{subgoal}
- try:
- component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
- except ValueError:
- raise RuntimeError(
- 'unknown goal: %s: should be {component} or {component}:{subgoal}' %
- goal)
- if component not in metadata:
- raise RuntimeError('unknown goal: %s: component %s not found' %
- (goal, component))
- if not get_supported_product(ctx, metadata[component]['lunch_targets']):
- raise RuntimeError("can't find matching target. Supported targets are: " +
- str(metadata[component]['lunch_targets']))
-
- ret[component].add(subgoal)
-
- return ret
-
-
-def find_cycle(metadata):
- """ Finds a cyclic dependency among components.
-
- This is for debugging.
- """
- visited = set()
- parent_node = dict()
- in_stack = set()
-
- # Returns a cycle if one is found
- def dfs(node):
- # visit_order[visit_time[node] - 1] == node
- nonlocal visited, parent_node, in_stack
-
- visited.add(node)
- in_stack.add(node)
- if 'deps' not in metadata[node]:
- in_stack.remove(node)
- return None
- for next in metadata[node]['deps']:
- # We found a cycle (next ~ node) if next is still in the stack
- if next in in_stack:
- cycle = [node]
- while cycle[-1] != next:
- cycle.append(parent_node[cycle[-1]])
- return cycle
-
- # Else, continue searching
- if next in visited:
- continue
-
- parent_node[next] = node
- result = dfs(next)
- if result:
- return result
-
- in_stack.remove(node)
- return None
-
- for component in metadata:
- if component in visited:
- continue
-
- result = dfs(component)
- if result:
- return result
-
- return None
-
-
-def topological_sort_components(metadata):
- """ Performs topological sort on components.
-
- If A depends on B, B appears first.
- """
- # If A depends on B, we want B to appear before A. But the graph in metadata
- # is represented as A -> B (B in metadata[A]['deps']). So we sort in the
- # reverse order, and then reverse the result again to get the desired order.
- indegree = collections.defaultdict(int)
- for component in metadata:
- if 'deps' not in metadata[component]:
- continue
- for dep in metadata[component]['deps']:
- indegree[dep] += 1
-
- component_queue = collections.deque()
- for component in metadata:
- if indegree[component] == 0:
- component_queue.append(component)
-
- result = []
- while component_queue:
- component = component_queue.popleft()
- result.append(component)
- if 'deps' not in metadata[component]:
- continue
- for dep in metadata[component]['deps']:
- indegree[dep] -= 1
- if indegree[dep] == 0:
- component_queue.append(dep)
-
- # If topological sort fails, there must be a cycle.
- if len(result) != len(metadata):
- cycle = find_cycle(metadata)
- raise RuntimeError('circular dependency found among metadata: %s' % cycle)
-
- return result[::-1]
-
-
-def add_dependency_goals(ctx, metadata, component, goals):
- """ Adds goals that given component depends on."""
- # For now, let's just add "all"
- # TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
- if 'deps' not in metadata[component]:
- return
-
- for dep in metadata[component]['deps']:
- goals[dep].add('all')
-
-
-def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
- """ Analyzes the dependency graph among components, adds build commands for
-
- dependencies, and then sorts the goals.
-
- Returns a list of tuples: (component_name, set of subgoals).
- Builds should be run in the list's order.
- """
- # TODO(inseob@): after topological sort, some components may be built in
- # parallel.
-
- topological_order = topological_sort_components(metadata)
- combined_goals = copy.deepcopy(parsed_goals)
-
- # Add build rules for each component's dependencies
- # We do this in reverse order, so it can be transitive.
- # e.g. if A depends on B and B depends on C, and we build A,
- # C should also be built, in addition to B.
- for component in topological_order[::-1]:
- if component in combined_goals:
- add_dependency_goals(ctx, metadata, component, combined_goals)
-
- ret = []
- for component in ['all'] + topological_order:
- if component in combined_goals:
- ret.append((component, combined_goals[component]))
-
- return ret
-
-
-def run_build(ctx, metadata, component, subgoals):
- build_cmd = metadata[component]['build_cmd']
- out_dir = metadata[component]['out_dir']
- default_goals = ''
- if 'default_goals' in metadata[component]:
- default_goals = metadata[component]['default_goals']
-
- if 'all' in subgoals:
- goal = default_goals
- else:
- goal = ' '.join(subgoals)
-
- build_vars = ''
- if 'update-meta' in subgoals:
- build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
- # TODO(inseob@): shell escape
- cmd = [
- '/bin/bash', '-c',
- 'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
- (get_supported_product(ctx, metadata[component]['lunch_targets']),
- ctx.target_build_variant(), build_vars, build_cmd, goal)
- ]
- logging.debug('cwd: ' + metadata[component]['path'])
- logging.debug('running build: ' + str(cmd))
-
- subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
-
-
-def run_build_all(ctx, metadata, subgoals):
- for component in metadata:
- if component in BUILD_ALL_EXEMPTION:
- continue
- run_build(ctx, metadata, component, subgoals)
-
-
-def find_components(metadata, predicate):
- for component in metadata:
- if predicate(component):
- yield component
-
-
-def import_filegroups(metadata, component, exporting_component, target_file_pairs):
- imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
-
- bp_content = ''
- for name, outpaths in target_file_pairs:
- bp_content += ('filegroup {{\n'
- ' name: "{fname}",\n'
- ' srcs: [\n'.format(fname=name))
- for outpath in outpaths:
- bp_content += ' "{outfile}",\n'.format(outfile=os.path.basename(outpath))
- bp_content += (' ],\n'
- '}\n')
-
- with tempfile.TemporaryDirectory() as tmp_dir:
- with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
- fout.write(bp_content)
- for _, outpaths in target_file_pairs:
- for outpath in outpaths:
- os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
- os.path.join(tmp_dir, os.path.basename(outpath)))
- cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
- if os.path.exists(imported_filegroup_dir) and len(
- cmp_result.left_only) + len(cmp_result.right_only) + len(
- cmp_result.diff_files) == 0:
- # Files are identical, it doesn't need to be written
- logging.info(
- 'imported files exists and the contents are identical: {} -> {}'
- .format(component, exporting_component))
- continue
- logging.info('creating symlinks for imported files: {} -> {}'.format(
- component, exporting_component))
- os.makedirs(imported_filegroup_dir, exist_ok=True)
- shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
- shutil.move(tmp_dir, imported_filegroup_dir)
-
-
-def prepare_build(metadata, component):
- imported_dir = os.path.join(metadata[component]['path'], 'imported')
- if utils.META_DEPS not in metadata[component]:
- if os.path.exists(imported_dir):
- logging.debug('remove {}'.format(imported_dir))
- shutil.rmtree(imported_dir)
- return
-
- imported_components = set()
- for exp_comp in metadata[component][utils.META_DEPS]:
- if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
- filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
- target_file_pairs = []
- for name in filegroups:
- target_file_pairs.append((name, filegroups[name]))
- import_filegroups(metadata, component, exp_comp, target_file_pairs)
- imported_components.add(exp_comp)
-
- # Remove directories that are not generated this time.
- if os.path.exists(imported_dir):
- if len(imported_components) == 0:
- shutil.rmtree(imported_dir)
- else:
- for remove_target in set(os.listdir(imported_dir)) - imported_components:
- logging.info('remove unnecessary imported dir: {}'.format(remove_target))
- shutil.rmtree(os.path.join(imported_dir, remove_target))
-
-
-def main():
- utils.set_logging_config(logging.DEBUG)
- ctx = utils.get_build_context()
-
- logging.info('collecting metadata')
-
- utils.set_logging_config(True)
-
- goals = sys.argv[1:]
- if not goals:
- logging.debug('empty goals. defaults to main')
- goals = ['main']
-
- logging.debug('goals: ' + str(goals))
-
- # Force update the metadata for the 'update-meta' build
- metadata_collector = collect_metadata.MetadataCollector(
- ctx.components_top(), ctx.out_dir(),
- collect_metadata.COMPONENT_METADATA_DIR,
- collect_metadata.COMPONENT_METADATA_FILE,
- force_update='update-meta' in goals)
- metadata_collector.collect()
-
- metadata = metadata_collector.get_metadata()
- logging.debug('metadata: ' + str(metadata))
-
- parsed_goals = parse_goals(ctx, metadata, goals)
- logging.debug('parsed goals: ' + str(parsed_goals))
-
- sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
- logging.debug('sorted goals with deps: ' + str(sorted_goals))
-
- for component, subgoals in sorted_goals:
- if component == 'all':
- run_build_all(ctx, metadata, subgoals)
- continue
- prepare_build(metadata, component)
- run_build(ctx, metadata, component, subgoals)
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/collect_metadata.py b/orchestrator/demo/collect_metadata.py
deleted file mode 100755
index 148167d..0000000
--- a/orchestrator/demo/collect_metadata.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import copy
-import json
-import logging
-import os
-import sys
-import yaml
-from collections import defaultdict
-from typing import (
- List,
- Set,
-)
-
-import utils
-
-# SKIP_COMPONENT_SEARCH = (
-# 'tools',
-# )
-COMPONENT_METADATA_DIR = '.repo'
-COMPONENT_METADATA_FILE = 'treeinfo.yaml'
-GENERATED_METADATA_FILE = 'metadata.json'
-COMBINED_METADATA_FILENAME = 'multitree_meta.json'
-
-
-class Dep(object):
- def __init__(self, name, component, deps_type):
- self.name = name
- self.component = component
- self.type = deps_type
- self.out_paths = list()
-
-
-class ExportedDep(Dep):
- def __init__(self, name, component, deps_type):
- super().__init__(name, component, deps_type)
-
- def setOutputPaths(self, output_paths: list):
- self.out_paths = output_paths
-
-
-class ImportedDep(Dep):
- required_type_map = {
- # import type: (required type, get imported module list)
- utils.META_FILEGROUP: (utils.META_MODULES, True),
- }
-
- def __init__(self, name, component, deps_type, import_map):
- super().__init__(name, component, deps_type)
- self.exported_deps: Set[ExportedDep] = set()
- self.imported_modules: List[str] = list()
- self.required_type = deps_type
- get_imported_module = False
- if deps_type in ImportedDep.required_type_map:
- self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
- if get_imported_module:
- self.imported_modules = import_map[name]
- else:
- self.imported_modules.append(name)
-
- def verify_and_add(self, exported: ExportedDep):
- if self.required_type != exported.type:
- raise RuntimeError(
- '{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
- .format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
- self.exported_deps.add(exported)
- self.out_paths.extend(exported.out_paths)
- # Remove duplicates. We may not use set() which is not JSON serializable
- self.out_paths = list(dict.fromkeys(self.out_paths))
-
-
-class MetadataCollector(object):
- """Visit all component directories and collect the metadata from them.
-
-Example of metadata:
-==========
-build_cmd: m # build command for this component. 'm' if omitted
-out_dir: out # out dir of this component. 'out' if omitted
-exports:
- libraries:
- - name: libopenjdkjvm
- - name: libopenjdkjvmd
- build_cmd: mma # build command for libopenjdkjvmd if specified
- out_dir: out/soong # out dir for libopenjdkjvmd if specified
- - name: libctstiagent
- APIs:
- - api1
- - api2
-imports:
- libraries:
- - lib1
- - lib2
- APIs:
- - import_api1
- - import_api2
-lunch_targets:
- - arm64
- - x86_64
-"""
-
- def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
- if not os.path.exists(out_dir):
- os.makedirs(out_dir)
-
- self.__component_top = component_top
- self.__out_dir = out_dir
- self.__metadata_path = os.path.join(meta_dir, meta_file)
- self.__combined_metadata_path = os.path.join(self.__out_dir,
- COMBINED_METADATA_FILENAME)
- self.__force_update = force_update
-
- self.__metadata = dict()
- self.__map_exports = dict()
- self.__component_set = set()
-
- def collect(self):
- """ Read precomputed combined metadata from the json file.
-
- If any components have updated their metadata, update the metadata
- information and the json file.
- """
- timestamp = self.__restore_metadata()
- if timestamp and os.path.getmtime(__file__) > timestamp:
- logging.info('Update the metadata as the orchestrator has been changed')
- self.__force_update = True
- self.__collect_from_components(timestamp)
-
- def get_metadata(self):
- """ Returns collected metadata from all components"""
- if not self.__metadata:
- logging.warning('Metadata is empty')
- return copy.deepcopy(self.__metadata)
-
- def __collect_from_components(self, timestamp):
- """ Read metadata from all components
-
- If any components have newer metadata files or are removed, update the
- combined metadata.
- """
- metadata_updated = False
- for component in os.listdir(self.__component_top):
- # if component in SKIP_COMPONENT_SEARCH:
- # continue
- if self.__read_component_metadata(timestamp, component):
- metadata_updated = True
- if self.__read_generated_metadata(timestamp, component):
- metadata_updated = True
-
- deleted_components = set()
- for meta in self.__metadata:
- if meta not in self.__component_set:
- logging.info('Component {} is removed'.format(meta))
- deleted_components.add(meta)
- metadata_updated = True
- for meta in deleted_components:
- del self.__metadata[meta]
-
- if metadata_updated:
- self.__update_dependencies()
- self.__store_metadata()
- logging.info('Metadata updated')
-
- def __read_component_metadata(self, timestamp, component):
- """ Search for the metadata file from a component.
-
- If the metadata is modified, read the file and update the metadata.
- """
- component_path = os.path.join(self.__component_top, component)
- metadata_file = os.path.join(component_path, self.__metadata_path)
- logging.info(
- 'Reading a metadata file from {} component ...'.format(component))
- if not os.path.isfile(metadata_file):
- logging.warning('Metadata file {} not found!'.format(metadata_file))
- return False
-
- self.__component_set.add(component)
- if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
- logging.info('... yaml not changed. Skip')
- return False
-
- with open(metadata_file) as f:
- meta = yaml.load(f, Loader=yaml.SafeLoader)
-
- meta['path'] = component_path
- if utils.META_BUILDCMD not in meta:
- meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
- if utils.META_OUTDIR not in meta:
- meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
-
- if utils.META_IMPORTS not in meta:
- meta[utils.META_IMPORTS] = defaultdict(dict)
- if utils.META_EXPORTS not in meta:
- meta[utils.META_EXPORTS] = defaultdict(dict)
-
- self.__metadata[component] = meta
- return True
-
- def __read_generated_metadata(self, timestamp, component):
- """ Read a metadata gerated by 'update-meta' build command from the soong build system
-
- Soong generate the metadata that has the information of import/export module/files.
- Build orchestrator read the generated metadata to collect the dependency information.
-
- Generated metadata has the following format:
- {
- "Imported": {
- "FileGroups": {
- "<name_of_filegroup>": [
- "<exported_module_name>",
- ...
- ],
- ...
- }
- }
- "Exported": {
- "<exported_module_name>": [
- "<output_file_path>",
- ...
- ],
- ...
- }
- }
- """
- if component not in self.__component_set:
- # skip reading generated metadata if the component metadata file was missing
- return False
- component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
- generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
- if not os.path.isfile(generated_metadata_file):
- logging.info('... Soong did not generated the metadata file. Skip')
- return False
- if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
- logging.info('... Soong generated metadata not changed. Skip')
- return False
-
- with open(generated_metadata_file, 'r') as gen_meta_json:
- try:
- gen_metadata = json.load(gen_meta_json)
- except json.decoder.JSONDecodeError:
- logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
- generated_metadata_file))
- return False
-
- if utils.SOONG_IMPORTED in gen_metadata:
- imported = gen_metadata[utils.SOONG_IMPORTED]
- if utils.SOONG_IMPORTED_FILEGROUPS in imported:
- self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
- if utils.SOONG_EXPORTED in gen_metadata:
- self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
-
- return True
-
- def __update_export_map(self):
- """ Read metadata of all components and update the export map
-
- 'libraries' and 'APIs' are special exproted types that are provided manually
- from the .yaml metadata files. These need to be replaced with the implementation
- in soong gerated metadata.
- The export type 'module' is generated from the soong build system from the modules
- with 'export: true' property. This export type includes a dictionary with module
- names as keys and their output files as values. These output files will be used as
- prebuilt sources when generating the imported modules.
- """
- self.__map_exports = dict()
- for comp in self.__metadata:
- if utils.META_EXPORTS not in self.__metadata[comp]:
- continue
- exports = self.__metadata[comp][utils.META_EXPORTS]
-
- for export_type in exports:
- for module in exports[export_type]:
- if export_type == utils.META_LIBS:
- name = module[utils.META_LIB_NAME]
- else:
- name = module
-
- if name in self.__map_exports:
- raise RuntimeError(
- 'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
- .format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
- exported_deps = ExportedDep(name, comp, export_type)
- if export_type == utils.META_MODULES:
- exported_deps.setOutputPaths(exports[export_type][module])
- self.__map_exports[name] = exported_deps
-
- def __verify_and_add_dependencies(self, component):
- """ Search all imported items from the export_map.
-
- If any imported items are not provided by the other components, report
- an error.
- Otherwise, add the component dependency and update the exported information to the
- import maps.
- """
- def verify_and_add_dependencies(imported_dep: ImportedDep):
- for module in imported_dep.imported_modules:
- if module not in self.__map_exports:
- raise RuntimeError(
- 'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
- .format(module=module, comp=imported_dep.component))
- imported_dep.verify_and_add(self.__map_exports[module])
-
- deps = self.__metadata[component][utils.META_DEPS]
- exp_comp = self.__map_exports[module].component
- if exp_comp not in deps:
- deps[exp_comp] = defaultdict(defaultdict)
- deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
-
- self.__metadata[component][utils.META_DEPS] = defaultdict()
- imports = self.__metadata[component][utils.META_IMPORTS]
- for import_type in imports:
- for module in imports[import_type]:
- verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
-
- def __check_imports(self):
- """ Search the export map to find the component to import libraries or APIs.
-
- Update the 'deps' field that includes the dependent components.
- """
- for component in self.__metadata:
- self.__verify_and_add_dependencies(component)
- if utils.META_DEPS in self.__metadata[component]:
- logging.debug('{comp} depends on {list} components'.format(
- comp=component, list=self.__metadata[component][utils.META_DEPS]))
-
- def __update_dependencies(self):
- """ Generate a dependency graph for the components
-
- Update __map_exports and the dependency graph with the maps.
- """
- self.__update_export_map()
- self.__check_imports()
-
- def __store_metadata(self):
- """ Store the __metadata dictionary as json format"""
- with open(self.__combined_metadata_path, 'w') as json_file:
- json.dump(self.__metadata, json_file, indent=2)
-
- def __restore_metadata(self):
- """ Read the stored json file and return the time stamps of the
-
- metadata file.
- """
- if not os.path.exists(self.__combined_metadata_path):
- return None
-
- with open(self.__combined_metadata_path, 'r') as json_file:
- try:
- self.__metadata = json.load(json_file)
- except json.decoder.JSONDecodeError:
- logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
- self.__combined_metadata_path))
- return None
-
- logging.info('Metadata restored from {}'.format(
- self.__combined_metadata_path))
- self.__update_export_map()
- return os.path.getmtime(self.__combined_metadata_path)
-
-
-def get_args():
-
- def check_dir(path):
- if os.path.exists(path) and os.path.isdir(path):
- return os.path.normpath(path)
- else:
- raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--component-top',
- help='Scan all components under this directory.',
- default=os.path.join(os.path.dirname(__file__), '../../../components'),
- type=check_dir)
- parser.add_argument(
- '--meta-file',
- help='Name of the metadata file.',
- default=COMPONENT_METADATA_FILE,
- type=str)
- parser.add_argument(
- '--meta-dir',
- help='Each component has the metadata in this directory.',
- default=COMPONENT_METADATA_DIR,
- type=str)
- parser.add_argument(
- '--out-dir',
- help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
- default=os.path.join(os.path.dirname(__file__), '../../../out'),
- type=os.path.normpath)
- parser.add_argument(
- '--force',
- '-f',
- action='store_true',
- help='Force to collect metadata',
- )
- parser.add_argument(
- '--verbose',
- '-v',
- help='Increase output verbosity, e.g. "-v", "-vv".',
- action='count',
- default=0)
- return parser.parse_args()
-
-
-def main():
- args = get_args()
- utils.set_logging_config(args.verbose)
-
- metadata_collector = MetadataCollector(args.component_top, args.out_dir,
- args.meta_dir, args.meta_file, args.force)
- metadata_collector.collect()
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/envsetup.sh b/orchestrator/demo/envsetup.sh
deleted file mode 100644
index 902a37c..0000000
--- a/orchestrator/demo/envsetup.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-
-function buffet()
-{
- local product variant selection
- if [[ $# -ne 1 ]]; then
- echo "usage: buffet [target]" >&2
- return 1
- fi
-
- selection=$1
- product=${selection%%-*} # Trim everything after first dash
- variant=${selection#*-} # Trim everything up to first dash
-
- if [ -z "$product" ]
- then
- echo
- echo "Invalid lunch combo: $selection"
- return 1
- fi
-
- if [ -z "$variant" ]
- then
- if [[ "$product" =~ .*_(eng|user|userdebug) ]]
- then
- echo "Did you mean -${product/*_/}? (dash instead of underscore)"
- fi
- return 1
- fi
-
- BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
-
- export BUFFET_BUILD_TOP=$(pwd)
- export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
- export BUFFET_TARGET_PRODUCT=$product
- export BUFFET_TARGET_BUILD_VARIANT=$variant
- export BUFFET_TARGET_BUILD_TYPE=release
-}
-
-function m()
-{
- if [ -z "$BUFFET_BUILD_TOP" ]
- then
- echo "Run \"buffet [target]\" first"
- return 1
- fi
- python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
-}
diff --git a/orchestrator/demo/hierarchy.py b/orchestrator/demo/hierarchy.py
deleted file mode 100644
index ae1825c..0000000
--- a/orchestrator/demo/hierarchy.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import yaml
-
-
-def parse_hierarchy(build_top):
- """Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
-
- Example of hierarchy file:
- ==========
- aosp_arm64:
- - armv8
- - aosp_cf_arm64_phone
-
- armv8:
- - aosp_oriole
- - aosp_sunfish
-
- aosp_oriole:
- - oriole
-
- aosp_sunfish:
- - sunfish
-
- oriole:
- # leaf
-
- sunfish:
- # leaf
- ==========
-
- If we parse this yaml, we get a dict looking like:
-
- {
- "sunfish": "aosp_sunfish",
- "oriole": "aosp_oriole",
- "aosp_oriole": "armv8",
- "aosp_sunfish": "armv8",
- "armv8": "aosp_arm64",
- "aosp_cf_arm64_phone": "aosp_arm64",
- "aosp_arm64": None, # no parent
- }
- """
- metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
- if not os.path.isfile(metadata_path):
- raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
-
- with open(metadata_path, 'r') as f:
- hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
-
- hierarchy_map = dict()
-
- for parent_target, child_targets in hierarchy_yaml.items():
- if not child_targets:
- # leaf
- continue
- for child_target in child_targets:
- hierarchy_map[child_target] = parent_target
-
- for parent_target in hierarchy_yaml:
- # targets with no parent
- if parent_target not in hierarchy_map:
- hierarchy_map[parent_target] = None
-
- return hierarchy_map
diff --git a/orchestrator/demo/hierarchy.yaml b/orchestrator/demo/hierarchy.yaml
deleted file mode 100644
index cc6de4d..0000000
--- a/orchestrator/demo/hierarchy.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
-# hierarchy of targets
-
-aosp_arm64:
-- armv8
-- aosp_cf_arm64_phone
-
-armv8:
-- mainline_modules_arm64
-
-mainline_modules_arm64:
-- aosp_oriole
-- aosp_sunfish
-- aosp_raven
-
-aosp_oriole:
-- oriole
-
-aosp_sunfish:
-- sunfish
-
-aosp_raven:
-- raven
-
-oriole:
-# leaf
-
-sunfish:
-# leaf
-
-raven:
-# leaf
-
-aosp_cf_arm64_phone:
-- cf_arm64_phone
-
-cf_arm64_phone:
-# leaf
diff --git a/orchestrator/demo/utils.py b/orchestrator/demo/utils.py
deleted file mode 100644
index 5dbbe4a..0000000
--- a/orchestrator/demo/utils.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import logging
-import os
-
-# default build configuration for each component
-DEFAULT_BUILDCMD = 'm'
-DEFAULT_OUTDIR = 'out'
-
-# yaml fields
-META_BUILDCMD = 'build_cmd'
-META_OUTDIR = 'out_dir'
-META_EXPORTS = 'exports'
-META_IMPORTS = 'imports'
-META_TARGETS = 'lunch_targets'
-META_DEPS = 'deps'
-# fields under 'exports' and 'imports'
-META_LIBS = 'libraries'
-META_APIS = 'APIs'
-META_FILEGROUP = 'filegroup'
-META_MODULES = 'modules'
-# fields under 'libraries'
-META_LIB_NAME = 'name'
-
-# fields for generated metadata file
-SOONG_IMPORTED = 'Imported'
-SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
-SOONG_EXPORTED = 'Exported'
-
-# export map items
-EXP_COMPONENT = 'component'
-EXP_TYPE = 'type'
-EXP_OUTPATHS = 'outpaths'
-
-class BuildContext:
-
- def __init__(self):
- self._build_top = os.getenv('BUFFET_BUILD_TOP')
- self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
- self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
- self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
- self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
- self._out_dir = os.path.join(self._build_top, 'out')
-
- if not self._build_top:
- raise RuntimeError("Can't find root. Did you run buffet?")
-
- def build_top(self):
- return self._build_top
-
- def components_top(self):
- return self._components_top
-
- def target_product(self):
- return self._target_product
-
- def target_build_variant(self):
- return self._target_build_variant
-
- def target_build_type(self):
- return self._target_build_type
-
- def out_dir(self):
- return self._out_dir
-
-
-def get_build_context():
- return BuildContext()
-
-
-def set_logging_config(verbose_level):
- verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
- verbosity = min(verbose_level, 2)
- logging.basicConfig(
- format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
- level=verbose_map[verbosity])
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
deleted file mode 100644
index 382844b..0000000
--- a/orchestrator/inner_build/common.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import sys
-
-def _parse_arguments(argv):
- argv = argv[1:]
- """Return an argparse options object."""
- # Top-level parser
- parser = argparse.ArgumentParser(prog=".inner_build")
-
- parser.add_argument("--out_dir", action="store", required=True,
- help="root of the output directory for this inner tree's API contributions")
-
- parser.add_argument("--api_domain", action="append", required=True,
- help="which API domains are to be built in this inner tree")
-
- subparsers = parser.add_subparsers(required=True, dest="command",
- help="subcommands")
-
- # inner_build describe command
- describe_parser = subparsers.add_parser("describe",
- help="describe the capabilities of this inner tree's build system")
-
- # create the parser for the "b" command
- export_parser = subparsers.add_parser("export_api_contributions",
- help="export the API contributions of this inner tree")
-
- # create the parser for the "b" command
- export_parser = subparsers.add_parser("analyze",
- help="main build analysis for this inner tree")
-
- # Parse the arguments
- return parser.parse_args(argv)
-
-
-class Commands(object):
- def Run(self, argv):
- """Parse the command arguments and call the corresponding subcommand method on
- this object.
-
- Throws AttributeError if the method for the command wasn't found.
- """
- args = _parse_arguments(argv)
- return getattr(self, args.command)(args)
-
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
deleted file mode 100755
index 264739b..0000000
--- a/orchestrator/inner_build/inner_build_demo.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-import textwrap
-
-sys.dont_write_bytecode = True
-import common
-
-def mkdirs(path):
- try:
- os.makedirs(path)
- except FileExistsError:
- pass
-
-
-class InnerBuildSoong(common.Commands):
- def describe(self, args):
- mkdirs(args.out_dir)
-
- with open(os.path.join(args.out_dir, "tree_info.json"), "w") as f:
- f.write(textwrap.dedent("""\
- {
- "requires_ninja": true,
- "orchestrator_protocol_version": 1
- }"""))
-
- def export_api_contributions(self, args):
- contributions_dir = os.path.join(args.out_dir, "api_contributions")
- mkdirs(contributions_dir)
-
- if "system" in args.api_domain:
- with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
- # 'name: android' is android.jar
- f.write(textwrap.dedent("""\
- {
- "name": "api_a",
- "version": 1,
- "api_domain": "system",
- "cc_libraries": [
- {
- "name": "libhello1",
- "headers": [
- {
- "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "files": [
- "hello1.h"
- ]
- }
- ],
- "api": [
- "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
- ]
- }
- ]
- }"""))
-
- def analyze(self, args):
- if "system" in args.api_domain:
- # Nothing to export in this demo
- # Write a fake inner_tree.ninja; what the inner tree would have generated
- with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
- # TODO: Note that this uses paths relative to the workspace not the iner tree
- # for demo purposes until we get the ninja chdir change in.
- f.write(textwrap.dedent("""\
- rule compile_c
- command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
- rule link_so
- command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
- build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
- out_dir = %(OUT_DIR)s/libhello1
- cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
- build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
- out_dir = %(OUT_DIR)s/libhello1
- build system: phony %(OUT_DIR)s/libhello1/libhello1.so
- """ % { "OUT_DIR": args.out_dir }))
- with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
- f.write(textwrap.dedent("""\
- {
- "staging": [
- {
- "dest": "staging/system/lib/libhello1.so",
- "obj": "libhello1/libhello1.so"
- }
- ]
- }""" % { "OUT_DIR": args.out_dir }))
-
-def main(argv):
- return InnerBuildSoong().Run(argv)
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/inner_build/inner_build_soong.py b/orchestrator/inner_build/inner_build_soong.py
deleted file mode 100755
index a653dcc..0000000
--- a/orchestrator/inner_build/inner_build_soong.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import sys
-
-sys.dont_write_bytecode = True
-import common
-
-class InnerBuildSoong(common.Commands):
- def describe(self, args):
- pass
-
-
- def export_api_contributions(self, args):
- pass
-
-
-def main(argv):
- return InnerBuildSoong().Run(argv)
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
diff --git a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
deleted file mode 100644
index 0790226..0000000
--- a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "master",
- "product": "aosp_cf_arm64_phone"
- },
- "vendor": {
- "tree": "master",
- "product": "aosp_cf_arm64_phone"
- },
- "modules": {
- "com.android.bionic": {
- "tree": "sc-mainline-prod"
- }
- }
-}
diff --git a/orchestrator/multitree_combos/test.mcombo b/orchestrator/multitree_combos/test.mcombo
deleted file mode 100644
index 3ad0717..0000000
--- a/orchestrator/multitree_combos/test.mcombo
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "inner_tree_system",
- "product": "system_lunch_product"
- },
- "vendor": {
- "tree": "inner_tree_vendor",
- "product": "vendor_lunch_product"
- },
- "modules": {
- "com.android.something": {
- "tree": "inner_tree_module"
- }
- }
-}
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
deleted file mode 100644
index df97b68..0000000
--- a/orchestrator/ninja/ninja_syntax.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from abc import ABC, abstractmethod
-
-from collections.abc import Iterator
-from typing import List
-
-TAB = " "
-
-class Node(ABC):
- '''An abstract class that can be serialized to a ninja file
- All other ninja-serializable classes inherit from this class'''
-
- @abstractmethod
- def stream(self) -> Iterator[str]:
- pass
-
-class Variable(Node):
- '''A ninja variable that can be reused across build actions
- https://ninja-build.org/manual.html#_variables'''
-
- def __init__(self, name:str, value:str, indent=0):
- self.name = name
- self.value = value
- self.indent = indent
-
- def stream(self) -> Iterator[str]:
- indent = TAB * self.indent
- yield f"{indent}{self.name} = {self.value}"
-
-class RuleException(Exception):
- pass
-
-# Ninja rules recognize a limited set of variables
-# https://ninja-build.org/manual.html#ref_rule
-# Keep this list sorted
-RULE_VARIABLES = ["command",
- "depfile",
- "deps",
- "description",
- "dyndep",
- "generator",
- "msvc_deps_prefix",
- "restat",
- "rspfile",
- "rspfile_content"]
-
-class Rule(Node):
- '''A shorthand for a command line that can be reused
- https://ninja-build.org/manual.html#_rules'''
-
- def __init__(self, name:str):
- self.name = name
- self.variables = []
-
- def add_variable(self, name: str, value: str):
- if name not in RULE_VARIABLES:
- raise RuleException(f"{name} is not a recognized variable in a ninja rule")
-
- self.variables.append(Variable(name=name, value=value, indent=1))
-
- def stream(self) -> Iterator[str]:
- self._validate_rule()
-
- yield f"rule {self.name}"
- # Yield rule variables sorted by `name`
- for var in sorted(self.variables, key=lambda x: x.name):
- # variables yield a single item, next() is sufficient
- yield next(var.stream())
-
- def _validate_rule(self):
- # command is a required variable in a ninja rule
- self._assert_variable_is_not_empty(variable_name="command")
-
- def _assert_variable_is_not_empty(self, variable_name: str):
- if not any(var.name == variable_name for var in self.variables):
- raise RuleException(f"{variable_name} is required in a ninja rule")
-
-class BuildActionException(Exception):
- pass
-
-class BuildAction(Node):
- '''Describes the dependency edge between inputs and output
- https://ninja-build.org/manual.html#_build_statements'''
-
- def __init__(self, output: str, rule: str, inputs: List[str]=None, implicits: List[str]=None, order_only: List[str]=None):
- self.output = output
- self.rule = rule
- self.inputs = self._as_list(inputs)
- self.implicits = self._as_list(implicits)
- self.order_only = self._as_list(order_only)
- self.variables = []
-
- def add_variable(self, name: str, value: str):
- '''Variables limited to the scope of this build action'''
- self.variables.append(Variable(name=name, value=value, indent=1))
-
- def stream(self) -> Iterator[str]:
- self._validate()
-
- build_statement = f"build {self.output}: {self.rule}"
- if len(self.inputs) > 0:
- build_statement += " "
- build_statement += " ".join(self.inputs)
- if len(self.implicits) > 0:
- build_statement += " | "
- build_statement += " ".join(self.implicits)
- if len(self.order_only) > 0:
- build_statement += " || "
- build_statement += " ".join(self.order_only)
- yield build_statement
- # Yield variables sorted by `name`
- for var in sorted(self.variables, key=lambda x: x.name):
- # variables yield a single item, next() is sufficient
- yield next(var.stream())
-
- def _validate(self):
- if not self.output:
- raise BuildActionException("Output is required in a ninja build statement")
- if not self.rule:
- raise BuildActionException("Rule is required in a ninja build statement")
-
- def _as_list(self, list_like):
- if list_like is None:
- return []
- if isinstance(list_like, list):
- return list_like
- return [list_like]
-
-class Pool(Node):
- '''https://ninja-build.org/manual.html#ref_pool'''
-
- def __init__(self, name: str, depth: int):
- self.name = name
- self.depth = Variable(name="depth", value=depth, indent=1)
-
- def stream(self) -> Iterator[str]:
- yield f"pool {self.name}"
- yield next(self.depth.stream())
-
-class Subninja(Node):
-
- def __init__(self, subninja: str, chDir: str):
- self.subninja = subninja
- self.chDir = chDir
-
- # TODO(spandandas): Update the syntax when aosp/2064612 lands
- def stream(self) -> Iterator[str]:
- yield f"subninja {self.subninja}"
-
-class Line(Node):
- '''Generic class that can be used for comments/newlines/default_target etc'''
-
- def __init__(self, value:str):
- self.value = value
-
- def stream(self) -> Iterator[str]:
- yield self.value
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
deleted file mode 100644
index 9e80b4b..0000000
--- a/orchestrator/ninja/ninja_writer.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
-
-# TODO: Format the output according to a configurable width variable
-# This will ensure that the generated content fits on a screen and does not
-# require horizontal scrolling
-class Writer:
-
- def __init__(self, file):
- self.file = file
- self.nodes = [] # type Node
-
- def add_variable(self, variable: Variable):
- self.nodes.append(variable)
-
- def add_rule(self, rule: Rule):
- self.nodes.append(rule)
-
- def add_build_action(self, build_action: BuildAction):
- self.nodes.append(build_action)
-
- def add_pool(self, pool: Pool):
- self.nodes.append(pool)
-
- def add_comment(self, comment: str):
- self.nodes.append(Line(value=f"# {comment}"))
-
- def add_default(self, default: str):
- self.nodes.append(Line(value=f"default {default}"))
-
- def add_newline(self):
- self.nodes.append(Line(value=""))
-
- def add_subninja(self, subninja: Subninja):
- self.nodes.append(subninja)
-
- def add_phony(self, name, deps):
- build_action = BuildAction(name, "phony", inputs=deps)
- self.add_build_action(build_action)
-
- def write(self):
- for node in self.nodes:
- for line in node.stream():
- print(line, file=self.file)
diff --git a/orchestrator/ninja/test_ninja_syntax.py b/orchestrator/ninja/test_ninja_syntax.py
deleted file mode 100644
index d922fd2..0000000
--- a/orchestrator/ninja/test_ninja_syntax.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from ninja_syntax import Variable, Rule, RuleException, BuildAction, BuildActionException, Pool
-
-class TestVariable(unittest.TestCase):
-
- def test_assignment(self):
- variable = Variable(name="key", value="value")
- self.assertEqual("key = value", next(variable.stream()))
- variable = Variable(name="key", value="value with spaces")
- self.assertEqual("key = value with spaces", next(variable.stream()))
- variable = Variable(name="key", value="$some_other_variable")
- self.assertEqual("key = $some_other_variable", next(variable.stream()))
-
- def test_indentation(self):
- variable = Variable(name="key", value="value", indent=0)
- self.assertEqual("key = value", next(variable.stream()))
- variable = Variable(name="key", value="value", indent=1)
- self.assertEqual(" key = value", next(variable.stream()))
-
-class TestRule(unittest.TestCase):
-
- def test_rulename_comes_first(self):
- rule = Rule(name="myrule")
- rule.add_variable("command", "/bin/bash echo")
- self.assertEqual("rule myrule", next(rule.stream()))
-
- def test_command_is_a_required_variable(self):
- rule = Rule(name="myrule")
- with self.assertRaises(RuleException):
- next(rule.stream())
-
- def test_bad_rule_variable(self):
- rule = Rule(name="myrule")
- with self.assertRaises(RuleException):
- rule.add_variable(name="unrecognize_rule_variable", value="value")
-
- def test_rule_variables_are_indented(self):
- rule = Rule(name="myrule")
- rule.add_variable("command", "/bin/bash echo")
- stream = rule.stream()
- self.assertEqual("rule myrule", next(stream)) # top-level rule should not be indented
- self.assertEqual(" command = /bin/bash echo", next(stream))
-
- def test_rule_variables_are_sorted(self):
- rule = Rule(name="myrule")
- rule.add_variable("description", "Adding description before command")
- rule.add_variable("command", "/bin/bash echo")
- stream = rule.stream()
- self.assertEqual("rule myrule", next(stream)) # rule always comes first
- self.assertEqual(" command = /bin/bash echo", next(stream))
- self.assertEqual(" description = Adding description before command", next(stream))
-
-class TestBuildAction(unittest.TestCase):
-
- def test_no_inputs(self):
- build = BuildAction(output="out", rule="phony")
- stream = build.stream()
- self.assertEqual("build out: phony", next(stream))
- # Empty output
- build = BuildAction(output="", rule="phony")
- with self.assertRaises(BuildActionException):
- next(build.stream())
- # Empty rule
- build = BuildAction(output="out", rule="")
- with self.assertRaises(BuildActionException):
- next(build.stream())
-
- def test_inputs(self):
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
- self.assertEqual("build out: cat input1 input2", next(build.stream()))
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"], implicits=["implicits1", "implicits2"], order_only=["order_only1", "order_only2"])
- self.assertEqual("build out: cat input1 input2 | implicits1 implicits2 || order_only1 order_only2", next(build.stream()))
-
- def test_variables(self):
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
- build.add_variable(name="myvar", value="myval")
- stream = build.stream()
- next(stream)
- self.assertEqual(" myvar = myval", next(stream))
-
-class TestPool(unittest.TestCase):
-
- def test_pool(self):
- pool = Pool(name="mypool", depth=10)
- stream = pool.stream()
- self.assertEqual("pool mypool", next(stream))
- self.assertEqual(" depth = 10", next(stream))
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/orchestrator/ninja/test_ninja_writer.py b/orchestrator/ninja/test_ninja_writer.py
deleted file mode 100644
index 703dd4d..0000000
--- a/orchestrator/ninja/test_ninja_writer.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from io import StringIO
-
-from ninja_writer import Writer
-from ninja_syntax import Variable, Rule, BuildAction
-
-class TestWriter(unittest.TestCase):
-
- def test_simple_writer(self):
- with StringIO() as f:
- writer = Writer(f)
- writer.add_variable(Variable(name="cflags", value="-Wall"))
- writer.add_newline()
- cc = Rule(name="cc")
- cc.add_variable(name="command", value="gcc $cflags -c $in -o $out")
- writer.add_rule(cc)
- writer.add_newline()
- build_action = BuildAction(output="foo.o", rule="cc", inputs=["foo.c"])
- writer.add_build_action(build_action)
- writer.write()
- self.assertEqual('''cflags = -Wall
-
-rule cc
- command = gcc $cflags -c $in -o $out
-
-build foo.o: cc foo.c
-''', f.getvalue())
-
- def test_comment(self):
- with StringIO() as f:
- writer = Writer(f)
- writer.add_comment("This is a comment in a ninja file")
- writer.write()
- self.assertEqual("# This is a comment in a ninja file\n", f.getvalue())
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
deleted file mode 100644
index 8200dc0..0000000
--- a/orchestrator/test_workspace/combo.mcombo
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "product": "test_product1"
- },
- "vendor": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "product": "test_product2"
- },
- "modules": {
- "module_1": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
- }
- }
-}
-
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
deleted file mode 120000
index d8f235f..0000000
--- a/orchestrator/test_workspace/inner_tree_1/.inner_build
+++ /dev/null
@@ -1 +0,0 @@
-../../inner_build/inner_build_demo.py
\ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
deleted file mode 100644
index 1415082..0000000
--- a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+++ /dev/null
@@ -1,8 +0,0 @@
-#include <stdio.h>
-
-#include "hello1.h"
-
-void hello1(void) {
- printf("hello1");
-}
-
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
deleted file mode 100644
index 0309c1c..0000000
--- a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#pragma once
-
-extern "C" void hello1(void);
-
diff --git a/target/board/Android.mk b/target/board/Android.mk
index baa3d3a..21c0c10 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -19,8 +19,11 @@
ifndef board_info_txt
board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
endif
-$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) build/make/tools/check_radio_versions.py
- $(hide) build/make/tools/check_radio_versions.py $< $(BOARD_INFO_CHECK)
+CHECK_RADIO_VERSIONS := $(HOST_OUT_EXECUTABLES)/check_radio_versions$(HOST_EXECUTABLE_SUFFIX)
+$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) $(CHECK_RADIO_VERSIONS)
+ $(hide) $(CHECK_RADIO_VERSIONS) \
+ --board_info_txt $(board_info_txt) \
+ --board_info_check $(BOARD_INFO_CHECK)
$(call pretty,"Generated: ($@)")
ifdef board_info_txt
$(hide) grep -v '#' $< > $@
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
index 845225d..5acbe7f 100644
--- a/target/board/BoardConfigEmuCommon.mk
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -33,8 +33,8 @@
# emulator needs super.img
BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
- # 4G + 8M
- BOARD_SUPER_PARTITION_SIZE := 4303355904
+ # 8G + 8M
+ BOARD_SUPER_PARTITION_SIZE := 8598323200
BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
ifeq ($(QEMU_USE_SYSTEM_EXT_PARTITIONS),true)
@@ -56,8 +56,8 @@
vendor
endif
- # 4G
- BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE := 4294967296
+ # 8G
+ BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE := 8589934592
# in build environment to speed up make -j
ifeq ($(QEMU_DISABLE_AVB),true)
@@ -87,6 +87,5 @@
BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
BOARD_FLASH_BLOCK_SIZE := 512
-DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 53714a8..4d95b33 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -3,6 +3,8 @@
# Common compile-time definitions for GSI
# Builds upon the mainline config.
#
+# See device/generic/common/README.md for more details.
+#
include build/make/target/board/BoardConfigMainlineCommon.mk
@@ -17,6 +19,12 @@
TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
TARGET_USERIMAGES_SPARSE_EROFS_DISABLED := true
+# Enable system_dlkm image for creating a symlink in GSI to support
+# the devices with system_dlkm partition
+BOARD_USES_SYSTEM_DLKMIMAGE := true
+BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE := ext4
+TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
+
# GSI also includes make_f2fs to support userdata parition in f2fs
# for some devices
TARGET_USERIMAGES_USE_F2FS := true
@@ -80,6 +88,3 @@
# Setup a vendor image to let PRODUCT_VENDOR_PROPERTIES does not affect GSI
BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-
-# Disable 64 bit mediadrmserver
-TARGET_ENABLE_MEDIADRM_64 :=
diff --git a/target/board/BoardConfigPixelCommon.mk b/target/board/BoardConfigPixelCommon.mk
index a970fec..22521b5 100644
--- a/target/board/BoardConfigPixelCommon.mk
+++ b/target/board/BoardConfigPixelCommon.mk
@@ -5,6 +5,7 @@
# Using sha256 for dm-verity partitions. b/156162446
# system, system_other, system_ext and product.
BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 45ed3da..40be80e 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -52,6 +52,9 @@
TARGET_2ND_CPU_VARIANT := generic
endif
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
include build/make/target/board/BoardConfigGsiCommon.mk
# Some vendors still haven't cleaned up all device specific directories under
diff --git a/target/board/generic_riscv64/BoardConfig.mk b/target/board/generic_riscv64/BoardConfig.mk
new file mode 100644
index 0000000..906f7f0
--- /dev/null
+++ b/target/board/generic_riscv64/BoardConfig.mk
@@ -0,0 +1,28 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# riscv64 emulator specific definitions
+TARGET_ARCH := riscv64
+TARGET_ARCH_VARIANT :=
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := riscv64
+
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
+include build/make/target/board/BoardConfigGsiCommon.mk
+
+# Temporary hack while prebuilt modules are missing riscv64.
+ALLOW_MISSING_DEPENDENCIES := true
diff --git a/target/board/generic_riscv64/README.txt b/target/board/generic_riscv64/README.txt
new file mode 100644
index 0000000..9811982
--- /dev/null
+++ b/target/board/generic_riscv64/README.txt
@@ -0,0 +1,7 @@
+The "generic_riscv64" product defines a non-hardware-specific riscv64 target
+without a bootloader.
+
+It is also the target to build the generic kernel image (GKI).
+
+It is not a product "base class"; no other products inherit
+from it or use it in any way.
diff --git a/target/product/iorap_large_memory_config.mk b/target/board/generic_riscv64/device.mk
similarity index 90%
rename from target/product/iorap_large_memory_config.mk
rename to target/board/generic_riscv64/device.mk
index 0c6c89a..27a4175 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/board/generic_riscv64/device.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/target/board/generic_riscv64/system_ext.prop b/target/board/generic_riscv64/system_ext.prop
new file mode 100644
index 0000000..42c4ef5
--- /dev/null
+++ b/target/board/generic_riscv64/system_ext.prop
@@ -0,0 +1,5 @@
+#
+# system.prop for generic riscv64 sdk
+#
+
+rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 93694f2..e7f2ae0 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -22,6 +22,9 @@
TARGET_2ND_ARCH := x86
TARGET_2ND_ARCH_VARIANT := x86_64
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
include build/make/target/board/BoardConfigGsiCommon.mk
ifndef BUILDING_GSI
diff --git a/target/board/gsi_arm64/BoardConfig.mk b/target/board/gsi_arm64/BoardConfig.mk
index db6f3f0..db95082 100644
--- a/target/board/gsi_arm64/BoardConfig.mk
+++ b/target/board/gsi_arm64/BoardConfig.mk
@@ -27,6 +27,9 @@
TARGET_2ND_CPU_ABI2 := armeabi
TARGET_2ND_CPU_VARIANT := generic
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
# TODO(b/111434759, b/111287060) SoC specific hacks
BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
diff --git a/orchestrator/core/api_export.py b/target/board/module_arm64only/BoardConfig.mk
similarity index 77%
rename from orchestrator/core/api_export.py
rename to target/board/module_arm64only/BoardConfig.mk
index 2f26b02..3cabf05 100644
--- a/orchestrator/core/api_export.py
+++ b/target/board/module_arm64only/BoardConfig.mk
@@ -1,5 +1,3 @@
-#!/usr/bin/python3
-#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,8 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-def export_apis_from_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["export_api_contributions"])
+include build/make/target/board/BoardConfigModuleCommon.mk
-
+TARGET_ARCH := arm64
+TARGET_ARCH_VARIANT := armv8-a
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := arm64-v8a
diff --git a/target/board/module_arm64only/README.md b/target/board/module_arm64only/README.md
new file mode 100644
index 0000000..0dd1699
--- /dev/null
+++ b/target/board/module_arm64only/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to an
+arm64 device. 32 bit binaries will not be built.
diff --git a/target/product/iorap_large_memory_config.mk b/target/board/module_x86_64only/BoardConfig.mk
similarity index 80%
copy from target/product/iorap_large_memory_config.mk
copy to target/board/module_x86_64only/BoardConfig.mk
index 0c6c89a..b0676cb 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/board/module_x86_64only/BoardConfig.mk
@@ -12,3 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+
+include build/make/target/board/BoardConfigModuleCommon.mk
+
+TARGET_CPU_ABI := x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/module_x86_64only/README.md b/target/board/module_x86_64only/README.md
new file mode 100644
index 0000000..8fd7dc4
--- /dev/null
+++ b/target/board/module_x86_64only/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to an
+x86_64 device. 32 bit binaries will not be built.
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 67b0b17..585630b 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -46,6 +46,7 @@
$(LOCAL_DIR)/aosp_64bitonly_x86_64.mk \
$(LOCAL_DIR)/aosp_arm64.mk \
$(LOCAL_DIR)/aosp_arm.mk \
+ $(LOCAL_DIR)/aosp_riscv64.mk \
$(LOCAL_DIR)/aosp_x86_64.mk \
$(LOCAL_DIR)/aosp_x86_arm.mk \
$(LOCAL_DIR)/aosp_x86.mk \
@@ -78,8 +79,10 @@
$(LOCAL_DIR)/mainline_sdk.mk \
$(LOCAL_DIR)/module_arm.mk \
$(LOCAL_DIR)/module_arm64.mk \
+ $(LOCAL_DIR)/module_arm64only.mk \
$(LOCAL_DIR)/module_x86.mk \
$(LOCAL_DIR)/module_x86_64.mk \
+ $(LOCAL_DIR)/module_x86_64only.mk \
COMMON_LUNCH_CHOICES := \
aosp_arm64-eng \
diff --git a/target/product/OWNERS b/target/product/OWNERS
index b3d8998..61f7d45 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -3,3 +3,8 @@
# GSI
per-file gsi_release.mk = file:/target/product/gsi/OWNERS
per-file developer_gsi_keys.mk = file:/target/product/gsi/OWNERS
+
+# Android Go
+per-file go_defaults.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_512.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_common.mk = gkaiser@google.com, rajekumar@google.com
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 90acc17..5f200aa 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -49,7 +49,7 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
+$(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index 01897b7..ffc37a9 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -43,6 +43,9 @@
$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+# pKVM
+$(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
+
#
# All components inherited here go to product image
#
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
new file mode 100644
index 0000000..436ff97
--- /dev/null
+++ b/target/product/aosp_riscv64.mk
@@ -0,0 +1,80 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+# The system image of aosp_riscv64-userdebug is a GSI for the devices with:
+# - riscv64 user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+# GSI for system/product & support 64-bit apps only
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+#$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+TARGET_FLATTEN_APEX := false
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product-if-exists, device/generic/goldfish/riscv64-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
+
+#
+# Special settings for GSI releasing
+#
+ifeq (aosp_riscv64,$(TARGET_PRODUCT))
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
+endif
+
+# TODO: this list should come via mainline_system.mk, but for now list
+# just the modules that work for riscv64.
+PRODUCT_PACKAGES := \
+ init.environ.rc \
+ init_first_stage \
+ init_system \
+ linker \
+ shell_and_utilities \
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
+PRODUCT_USES_DEFAULT_ART_CONFIG := false
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+ root/init.zygote64.rc
+
+# This build configuration supports 64-bit apps only
+PRODUCT_NAME := aosp_riscv64
+PRODUCT_DEVICE := generic_riscv64
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on Riscv64
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index b3cfae4..d55866f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -45,6 +45,9 @@
$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+# pKVM
+$(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
+
#
# All components inherited here go to product image
#
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 2c2b5a9..e01bb8c 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -50,7 +50,9 @@
charger \
cmd \
com.android.adbd \
+ com.android.adservices \
com.android.appsearch \
+ com.android.btservices \
com.android.conscrypt \
com.android.cronet \
com.android.extservices \
@@ -60,15 +62,17 @@
com.android.media \
com.android.media.swcodec \
com.android.mediaprovider \
+ com.android.ondevicepersonalization \
com.android.os.statsd \
com.android.permission \
com.android.resolv \
com.android.neuralnetworks \
com.android.scheduling \
com.android.sdkext \
- com.android.sepolicy \
com.android.tethering \
com.android.tzdata \
+ com.android.uwb \
+ com.android.virt \
com.android.wifi \
ContactsProvider \
content \
@@ -218,6 +222,7 @@
mke2fs \
mkfs.erofs \
monkey \
+ mtectrl \
mtpd \
ndc \
netd \
@@ -235,6 +240,7 @@
pppd \
preinstalled-packages-platform.xml \
privapp-permissions-platform.xml \
+ prng_seeder \
racoon \
recovery-persist \
resize2fs \
@@ -271,7 +277,6 @@
traced \
traced_probes \
tune2fs \
- tzdatacheck \
uiautomator \
uinput \
uncrypt \
@@ -314,6 +319,11 @@
endif # EMMA_INSTRUMENT_STATIC
endif # EMMA_INSTRUMENT
+# For testing purposes
+ifeq ($(FORCE_AUDIO_SILENT), true)
+ PRODUCT_SYSTEM_PROPERTIES += ro.audio.silent=1
+endif
+
# Host tools to install
PRODUCT_HOST_PACKAGES += \
BugReport \
@@ -341,7 +351,6 @@
sqlite3 \
tinyplay \
tune2fs \
- tzdatacheck \
unwind_info \
unwind_reg_info \
unwind_symbols \
@@ -368,7 +377,6 @@
PRODUCT_PACKAGES_DEBUG := \
adb_keys \
arping \
- com.android.sepolicy.cert-debug.der \
dmuserd \
idlcli \
init-debug.rc \
@@ -380,7 +388,6 @@
procrank \
profcollectd \
profcollectctl \
- remount \
servicedispatcher \
showmap \
sqlite3 \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 5004b85..7fb785c 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -29,6 +29,11 @@
shell_and_utilities_recovery \
watchdogd.recovery \
+PRODUCT_VENDOR_PROPERTIES += \
+ ro.recovery.usb.vid?=18D1 \
+ ro.recovery.usb.adb.pid?=D001 \
+ ro.recovery.usb.fastboot.pid?=4EE0 \
+
# These had been pulled in via init_second_stage.recovery, but may not be needed.
PRODUCT_HOST_PACKAGES += \
e2fsdroid \
@@ -42,7 +47,6 @@
# Base modules and settings for the vendor partition.
PRODUCT_PACKAGES += \
android.hardware.cas@1.2-service \
- android.hardware.media.omx@1.0-service \
boringssl_self_test_vendor \
dumpsys_vendor \
fs_config_files_nonsystem \
@@ -69,6 +73,14 @@
selinux_policy_nonsystem \
shell_and_utilities_vendor \
+# OMX not supported for 64bit_only builds
+# Only supported when SHIPPING_API_LEVEL is less than or equal to 33
+ifneq ($(TARGET_SUPPORTS_OMX_SERVICE),false)
+ PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33 += \
+ android.hardware.media.omx@1.0-service \
+
+endif
+
# Base module when shipping api level is less than or equal to 29
PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
android.hardware.configstore@1.1-service \
diff --git a/target/product/core_64_bit.mk b/target/product/core_64_bit.mk
index 322fa80..e0c4d53 100644
--- a/target/product/core_64_bit.mk
+++ b/target/product/core_64_bit.mk
@@ -23,11 +23,17 @@
# for 32-bit only.
# Copy the 64-bit primary, 32-bit secondary zygote startup script
-PRODUCT_COPY_FILES += system/core/rootdir/init.zygote64_32.rc:system/etc/init/hw/init.zygote64_32.rc
+PRODUCT_COPY_FILES += \
+ system/core/rootdir/init.zygote64.rc:system/etc/init/hw/init.zygote64.rc \
+ system/core/rootdir/init.zygote64_32.rc:system/etc/init/hw/init.zygote64_32.rc \
# Set the zygote property to select the 64-bit primary, 32-bit secondary script
# This line must be parsed before the one in core_minimal.mk
+ifeq ($(ZYGOTE_FORCE_64),true)
+PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64
+else
PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64_32
+endif
TARGET_SUPPORTS_32_BIT_APPS := true
TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
index 061728f..fc2b8e5 100644
--- a/target/product/core_64_bit_only.mk
+++ b/target/product/core_64_bit_only.mk
@@ -31,3 +31,4 @@
TARGET_SUPPORTS_32_BIT_APPS := false
TARGET_SUPPORTS_64_BIT_APPS := true
+TARGET_SUPPORTS_OMX_SERVICE := false
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 5695803..20d2865 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -51,12 +51,16 @@
# Note: core-icu4j is moved back to PRODUCT_BOOT_JARS in product_config.mk at a later stage.
# Note: For modules available in Q, DO NOT add new entries here.
PRODUCT_APEX_BOOT_JARS := \
+ com.android.adservices:framework-adservices \
+ com.android.adservices:framework-sdksandbox \
com.android.appsearch:framework-appsearch \
+ com.android.btservices:framework-bluetooth \
com.android.conscrypt:conscrypt \
com.android.i18n:core-icu4j \
com.android.ipsec:android.net.ipsec.ike \
com.android.media:updatable-media \
com.android.mediaprovider:framework-mediaprovider \
+ com.android.ondevicepersonalization:framework-ondevicepersonalization \
com.android.os.statsd:framework-statsd \
com.android.permission:framework-permission \
com.android.permission:framework-permission-s \
@@ -65,12 +69,16 @@
com.android.tethering:framework-connectivity \
com.android.tethering:framework-connectivity-t \
com.android.tethering:framework-tethering \
- com.android.wifi:framework-wifi
+ com.android.uwb:framework-uwb \
+ com.android.virt:framework-virtualization \
+ com.android.wifi:framework-wifi \
# List of system_server classpath jars delivered via apex.
# Keep the list sorted by module names and then library names.
# Note: For modules available in Q, DO NOT add new entries here.
PRODUCT_APEX_SYSTEM_SERVER_JARS := \
+ com.android.adservices:service-adservices \
+ com.android.adservices:service-sdksandbox \
com.android.appsearch:service-appsearch \
com.android.art:service-art \
com.android.media:service-media-s \
@@ -90,9 +98,11 @@
# Keep the list sorted by module names and then library names.
# Note: For modules available in Q, DO NOT add new entries here.
PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS := \
+ com.android.btservices:service-bluetooth \
com.android.os.statsd:service-statsd \
com.android.scheduling:service-scheduling \
com.android.tethering:service-connectivity \
+ com.android.uwb:service-uwb \
com.android.wifi:service-wifi \
# Minimal configuration for running dex2oat (default argument values).
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
deleted file mode 100644
index 36da1f7..0000000
--- a/target/product/emulator.mk
+++ /dev/null
@@ -1,60 +0,0 @@
-#
-# Copyright (C) 2012 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# This file is included by other product makefiles to add all the
-# emulator-related modules to PRODUCT_PACKAGES.
-#
-
-# Device modules
-PRODUCT_PACKAGES += \
- CarrierConfig \
-
-# need this for gles libraries to load properly
-# after moving to /vendor/lib/
-PRODUCT_PACKAGES += \
- vndk-sp
-
-# WiFi: system side
-PRODUCT_PACKAGES += \
- ip \
- iw \
- wificond \
-
-
-PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
-
-PRODUCT_CHARACTERISTICS := emulator
-
-PRODUCT_FULL_TREBLE_OVERRIDE := true
-
-# goldfish vendor partition configurations
-$(call inherit-product-if-exists, device/generic/goldfish/vendor.mk)
-
-#watchdog tiggers reboot because location service is not
-#responding, disble it for now.
-#still keep it on internal master as it is still working
-#once it is fixed in aosp, remove this block of comment.
-#PRODUCT_VENDOR_PROPERTIES += \
-#config.disable_location=true
-
-# enable Google-specific location features,
-# like NetworkLocationProvider and LocationCollector
-PRODUCT_SYSTEM_EXT_PROPERTIES += \
- ro.com.google.locationfeatures=1
-
-# disable setupwizard
-PRODUCT_SYSTEM_EXT_PROPERTIES += \
- ro.setupwizard.mode=DISABLED
diff --git a/target/product/full.mk b/target/product/full.mk
index adb54ab..945957f 100644
--- a/target/product/full.mk
+++ b/target/product/full.mk
@@ -19,8 +19,8 @@
# build quite specifically for the emulator, and might not be
# entirely appropriate to inherit from for on-device configurations.
-$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
+$(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic/device.mk)
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index a8e1e91..39c66da3 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -28,12 +28,6 @@
PhotoTable \
preinstalled-packages-platform-full-base.xml
-# Bluetooth:
-# audio.a2dp.default is a system module. Generic system image includes
-# audio.a2dp.default to support A2DP if board has the capability.
-PRODUCT_PACKAGES += \
- audio.a2dp.default
-
# Net:
# Vendors can use the platform-provided network configuration utilities (ip,
# iptable, etc.) to configure the Linux networking stack, but these utilities
diff --git a/target/product/full_x86.mk b/target/product/full_x86.mk
index 2f40c03..0f3be91 100644
--- a/target/product/full_x86.mk
+++ b/target/product/full_x86.mk
@@ -23,7 +23,7 @@
# that isn't a wifi connection. This will instruct init.rc to enable the
# network connection so that you can use it with ADB
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index f13c9db..1a639ef 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -52,11 +52,6 @@
cppreopts.sh \
otapreopt_script \
-# Bluetooth libraries
-PRODUCT_PACKAGES += \
- audio.a2dp.default \
- audio.hearing_aid.default \
-
# For ringtones that rely on forward lock encryption
PRODUCT_PACKAGES += libfwdlockengine
diff --git a/target/product/gsi/33.txt b/target/product/gsi/33.txt
new file mode 100644
index 0000000..03a143d
--- /dev/null
+++ b/target/product/gsi/33.txt
@@ -0,0 +1,254 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libbinder_ndk.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libselinux.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.common-V2-ndk.so
+VNDK-SP: android.hardware.common.fmq-V1-ndk.so
+VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
+VNDK-SP: android.hardware.graphics.common-V3-ndk.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.composer3-V1-ndk.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libdmabufheap.so
+VNDK-SP: libgralloctypes.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.hardware.audio.common-V1-ndk.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.authsecret-V1-ndk.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk.so
+VNDK-core: android.hardware.bluetooth.audio-V2-ndk.so
+VNDK-core: android.hardware.camera.common-V1-ndk.so
+VNDK-core: android.hardware.camera.device-V1-ndk.so
+VNDK-core: android.hardware.camera.metadata-V1-ndk.so
+VNDK-core: android.hardware.camera.provider-V1-ndk.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.drm-V1-ndk.so
+VNDK-core: android.hardware.dumpstate-V1-ndk.so
+VNDK-core: android.hardware.gnss-V2-ndk.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health-V1-ndk.so
+VNDK-core: android.hardware.health.storage-V1-ndk.so
+VNDK-core: android.hardware.identity-V4-ndk.so
+VNDK-core: android.hardware.ir-V1-ndk.so
+VNDK-core: android.hardware.keymaster-V3-ndk.so
+VNDK-core: android.hardware.light-V2-ndk.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack-V1-ndk.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.nfc-V1-ndk.so
+VNDK-core: android.hardware.oemlock-V1-ndk.so
+VNDK-core: android.hardware.power-V3-ndk.so
+VNDK-core: android.hardware.power.stats-V1-ndk.so
+VNDK-core: android.hardware.radio-V1-ndk.so
+VNDK-core: android.hardware.radio.config-V1-ndk.so
+VNDK-core: android.hardware.radio.data-V1-ndk.so
+VNDK-core: android.hardware.radio.messaging-V1-ndk.so
+VNDK-core: android.hardware.radio.modem-V1-ndk.so
+VNDK-core: android.hardware.radio.network-V1-ndk.so
+VNDK-core: android.hardware.radio.sim-V1-ndk.so
+VNDK-core: android.hardware.radio.voice-V1-ndk.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk.so
+VNDK-core: android.hardware.security.dice-V1-ndk.so
+VNDK-core: android.hardware.security.keymint-V2-ndk.so
+VNDK-core: android.hardware.security.secureclock-V1-ndk.so
+VNDK-core: android.hardware.security.sharedsecret-V1-ndk.so
+VNDK-core: android.hardware.sensors-V1-ndk.so
+VNDK-core: android.hardware.soundtrigger3-V1-ndk.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.usb-V1-ndk.so
+VNDK-core: android.hardware.uwb-V1-ndk.so
+VNDK-core: android.hardware.vibrator-V2-ndk.so
+VNDK-core: android.hardware.weaver-V1-ndk.so
+VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
+VNDK-core: android.hardware.wifi.supplicant-V1-ndk.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.media.audio.common.types-V1-ndk.so
+VNDK-core: android.media.soundtrigger.types-V1-ndk.so
+VNDK-core: android.system.keystore2-V2-ndk.so
+VNDK-core: android.system.suspend-V1-ndk.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libjpeg.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
+VNDK-product: android.hardware.audio.common@2.0.so
+VNDK-product: android.hardware.configstore@1.0.so
+VNDK-product: android.hardware.configstore@1.1.so
+VNDK-product: android.hardware.graphics.allocator@2.0.so
+VNDK-product: android.hardware.graphics.allocator@3.0.so
+VNDK-product: android.hardware.graphics.allocator@4.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-product: android.hardware.graphics.common@1.0.so
+VNDK-product: android.hardware.graphics.common@1.1.so
+VNDK-product: android.hardware.graphics.common@1.2.so
+VNDK-product: android.hardware.graphics.mapper@2.0.so
+VNDK-product: android.hardware.graphics.mapper@2.1.so
+VNDK-product: android.hardware.graphics.mapper@3.0.so
+VNDK-product: android.hardware.graphics.mapper@4.0.so
+VNDK-product: android.hardware.media.bufferpool@2.0.so
+VNDK-product: android.hardware.media.omx@1.0.so
+VNDK-product: android.hardware.media@1.0.so
+VNDK-product: android.hardware.memtrack@1.0.so
+VNDK-product: android.hardware.renderscript@1.0.so
+VNDK-product: android.hardware.soundtrigger@2.0.so
+VNDK-product: android.hidl.memory.token@1.0.so
+VNDK-product: android.hidl.memory@1.0.so
+VNDK-product: android.hidl.safe_union@1.0.so
+VNDK-product: android.hidl.token@1.0.so
+VNDK-product: android.system.suspend@1.0.so
+VNDK-product: libaudioutils.so
+VNDK-product: libbacktrace.so
+VNDK-product: libbase.so
+VNDK-product: libc++.so
+VNDK-product: libcamera_metadata.so
+VNDK-product: libcap.so
+VNDK-product: libcompiler_rt.so
+VNDK-product: libcrypto.so
+VNDK-product: libcurl.so
+VNDK-product: libcutils.so
+VNDK-product: libevent.so
+VNDK-product: libexpat.so
+VNDK-product: libfmq.so
+VNDK-product: libhidlbase.so
+VNDK-product: libhidlmemory.so
+VNDK-product: libion.so
+VNDK-product: libjpeg.so
+VNDK-product: libjsoncpp.so
+VNDK-product: libldacBT_abr.so
+VNDK-product: libldacBT_enc.so
+VNDK-product: liblz4.so
+VNDK-product: liblzma.so
+VNDK-product: libminijail.so
+VNDK-product: libnl.so
+VNDK-product: libpcre2.so
+VNDK-product: libpiex.so
+VNDK-product: libpng.so
+VNDK-product: libprocessgroup.so
+VNDK-product: libprocinfo.so
+VNDK-product: libspeexresampler.so
+VNDK-product: libssl.so
+VNDK-product: libtinyalsa.so
+VNDK-product: libtinyxml2.so
+VNDK-product: libunwindstack.so
+VNDK-product: libutils.so
+VNDK-product: libutilscallstack.so
+VNDK-product: libwifi-system-iface.so
+VNDK-product: libxml2.so
+VNDK-product: libyuv.so
+VNDK-product: libz.so
+VNDK-product: libziparchive.so
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 85e551d..d02dc7a 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -185,6 +185,10 @@
$(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
$(VNDK_USING_CORE_VARIANT_LIBRARIES) \
com.android.vndk.current
+
+LOCAL_ADDITIONAL_DEPENDENCIES += $(call module-built-files,\
+ $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES)))
+
endif
include $(BUILD_PHONY_PACKAGE)
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index fd86d87..24005d9 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -7,6 +7,7 @@
LLNDK: libbinder_ndk.so
LLNDK: libc.so
LLNDK: libcgrouprc.so
+LLNDK: libcom.android.tethering.connectivity_native.so
LLNDK: libdl.so
LLNDK: libft2.so
LLNDK: liblog.so
@@ -20,23 +21,24 @@
LLNDK: libvulkan.so
VNDK-SP: android.hardware.common-V2-ndk.so
VNDK-SP: android.hardware.common.fmq-V1-ndk.so
-VNDK-SP: android.hardware.graphics.common-V2-ndk.so
+VNDK-SP: android.hardware.graphics.common-V3-ndk.so
VNDK-SP: android.hardware.graphics.common@1.0.so
VNDK-SP: android.hardware.graphics.common@1.1.so
VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.composer3-V1-ndk.so
VNDK-SP: android.hardware.graphics.mapper@2.0.so
VNDK-SP: android.hardware.graphics.mapper@2.1.so
VNDK-SP: android.hardware.graphics.mapper@3.0.so
VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
VNDK-SP: android.hardware.renderscript@1.0.so
-VNDK-SP: android.hidl.safe_union@1.0.so
VNDK-SP: android.hidl.memory.token@1.0.so
VNDK-SP: android.hidl.memory@1.0-impl.so
VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
VNDK-SP: libRSCpuRef.so
VNDK-SP: libRSDriver.so
VNDK-SP: libRS_internal.so
-VNDK-SP: libbacktrace.so
VNDK-SP: libbase.so
VNDK-SP: libbcinfo.so
VNDK-SP: libblas.so
@@ -56,60 +58,25 @@
VNDK-SP: libutils.so
VNDK-SP: libutilscallstack.so
VNDK-SP: libz.so
-VNDK-core: android.hardware.audio.common-V1-ndk.so
VNDK-core: android.hardware.audio.common@2.0.so
-VNDK-core: android.hardware.authsecret-V1-ndk.so
-VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk.so
-VNDK-core: android.hardware.bluetooth.audio-V1-ndk.so
VNDK-core: android.hardware.configstore-utils.so
VNDK-core: android.hardware.configstore@1.0.so
VNDK-core: android.hardware.configstore@1.1.so
VNDK-core: android.hardware.confirmationui-support-lib.so
-VNDK-core: android.hardware.dumpstate-V1-ndk.so
-VNDK-core: android.hardware.gnss-V1-ndk.so
VNDK-core: android.hardware.graphics.allocator@2.0.so
VNDK-core: android.hardware.graphics.allocator@3.0.so
VNDK-core: android.hardware.graphics.allocator@4.0.so
VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
-VNDK-core: android.hardware.health-V1-ndk.so
-VNDK-core: android.hardware.health.storage-V1-ndk.so
-VNDK-core: android.hardware.identity-V3-ndk.so
-VNDK-core: android.hardware.keymaster-V3-ndk.so
-VNDK-core: android.hardware.light-V1-ndk.so
VNDK-core: android.hardware.media.bufferpool@2.0.so
VNDK-core: android.hardware.media.omx@1.0.so
VNDK-core: android.hardware.media@1.0.so
VNDK-core: android.hardware.memtrack-V1-ndk.so
VNDK-core: android.hardware.memtrack@1.0.so
-VNDK-core: android.hardware.nfc-V1-ndk.so
-VNDK-core: android.hardware.oemlock-V1-ndk.so
-VNDK-core: android.hardware.power-V2-ndk.so
-VNDK-core: android.hardware.power.stats-V1-ndk.so
-VNDK-core: android.hardware.radio-V1-ndk.so
-VNDK-core: android.hardware.radio.config-V1-ndk.so
-VNDK-core: android.hardware.radio.data-V1-ndk.so
-VNDK-core: android.hardware.radio.messaging-V1-ndk.so
-VNDK-core: android.hardware.radio.modem-V1-ndk.so
-VNDK-core: android.hardware.radio.network-V1-ndk.so
-VNDK-core: android.hardware.radio.sim-V1-ndk.so
-VNDK-core: android.hardware.radio.voice-V1-ndk.so
-VNDK-core: android.hardware.rebootescrow-V1-ndk.so
-VNDK-core: android.hardware.security.dice-V1-ndk.so
-VNDK-core: android.hardware.security.keymint-V1-ndk.so
-VNDK-core: android.hardware.security.secureclock-V1-ndk.so
-VNDK-core: android.hardware.security.sharedsecret-V1-ndk.so
VNDK-core: android.hardware.soundtrigger@2.0-core.so
VNDK-core: android.hardware.soundtrigger@2.0.so
-VNDK-core: android.hardware.usb-V1-ndk.so
-VNDK-core: android.hardware.vibrator-V2-ndk.so
-VNDK-core: android.hardware.weaver-V1-ndk.so
-VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
-VNDK-core: android.hardware.wifi.supplicant-V1-ndk.so
VNDK-core: android.hidl.token@1.0-utils.so
VNDK-core: android.hidl.token@1.0.so
-VNDK-core: android.media.audio.common.types-V1-ndk.so
-VNDK-core: android.system.keystore2-V1-ndk.so
VNDK-core: android.system.suspend-V1-ndk.so
VNDK-core: android.system.suspend@1.0.so
VNDK-core: libaudioroute.so
@@ -168,7 +135,6 @@
VNDK-core: libxml2.so
VNDK-core: libyuv.so
VNDK-core: libziparchive.so
-VNDK-private: libbacktrace.so
VNDK-private: libblas.so
VNDK-private: libcompiler_rt.so
VNDK-private: libft2.so
@@ -200,7 +166,6 @@
VNDK-product: android.hidl.token@1.0.so
VNDK-product: android.system.suspend@1.0.so
VNDK-product: libaudioutils.so
-VNDK-product: libbacktrace.so
VNDK-product: libbase.so
VNDK-product: libc++.so
VNDK-product: libcamera_metadata.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 74501cd..09d4bc9 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -23,6 +23,8 @@
# - Released GSI contains more VNDK packages to support old version vendors
# - etc.
#
+# See device/generic/common/README.md for more details.
+#
BUILDING_GSI := true
@@ -62,13 +64,18 @@
init.gsi.rc \
init.vndk-nodef.rc \
+# Overlay the GSI specific SystemUI setting
+PRODUCT_PACKAGES += gsi_overlay_systemui
+PRODUCT_COPY_FILES += \
+ device/generic/common/overlays/overlay-config.xml:$(TARGET_COPY_OUT_SYSTEM_EXT)/overlay/config/config.xml
+
# Support additional VNDK snapshots
PRODUCT_EXTRA_VNDK_VERSIONS := \
- 28 \
29 \
30 \
31 \
32 \
+ 33 \
# Do not build non-GSI partition images.
PRODUCT_BUILD_CACHE_IMAGE := false
@@ -78,6 +85,7 @@
PRODUCT_BUILD_VENDOR_IMAGE := false
PRODUCT_BUILD_SUPER_PARTITION := false
PRODUCT_BUILD_SUPER_EMPTY_IMAGE := false
+PRODUCT_BUILD_SYSTEM_DLKM_IMAGE := false
PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST := true
# Always build modules from source
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
index 2199c57..8755ae6 100644
--- a/target/product/handheld_product.mk
+++ b/target/product/handheld_product.mk
@@ -30,7 +30,6 @@
Gallery2 \
LatinIME \
Music \
- OneTimeInitializer \
preinstalled-packages-platform-handheld-product.xml \
QuickSearchBox \
SettingsIntelligence \
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 3a59f6c..41233b2 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -34,7 +34,6 @@
PRODUCT_PACKAGES += \
BasicDreams \
BlockedNumberProvider \
- Bluetooth \
BluetoothMidiService \
BookmarkProvider \
BuiltInPrintService \
diff --git a/orchestrator/core/api_export.py b/target/product/module_arm64only.mk
similarity index 71%
copy from orchestrator/core/api_export.py
copy to target/product/module_arm64only.mk
index 2f26b02..4e8d53e 100644
--- a/orchestrator/core/api_export.py
+++ b/target/product/module_arm64only.mk
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
@@ -13,8 +12,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-def export_apis_from_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["export_api_contributions"])
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
-
+PRODUCT_NAME := module_arm64only
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_arm64only
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/module_x86_64only.mk
similarity index 65%
copy from target/product/iorap_large_memory_config.mk
copy to target/product/module_x86_64only.mk
index 0c6c89a..bca4541 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/product/module_x86_64only.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,3 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+
+PRODUCT_NAME := module_x86_64only
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_x86_64only
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/ramdisk_stub.mk
similarity index 76%
copy from target/product/iorap_large_memory_config.mk
copy to target/product/ramdisk_stub.mk
index 0c6c89a..2a0b752 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/product/ramdisk_stub.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright 2022 The Android Open-Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,3 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+
+PRODUCT_COPY_FILES += \
+ build/make/target/product/ramdisk_stub.mk:$(TARGET_COPY_OUT_VENDOR_RAMDISK)/nonempty
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b6560fc..a62cda7 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -157,3 +157,21 @@
dalvik.vm.madvise.vdexfile.size=104857600 \
dalvik.vm.madvise.odexfile.size=104857600 \
dalvik.vm.madvise.artfile.size=4294967295
+
+# Properties for the Unspecialized App Process Pool
+PRODUCT_SYSTEM_PROPERTIES += \
+ dalvik.vm.usap_pool_enabled?=false \
+ dalvik.vm.usap_refill_threshold?=1 \
+ dalvik.vm.usap_pool_size_max?=3 \
+ dalvik.vm.usap_pool_size_min?=1 \
+ dalvik.vm.usap_pool_refill_delay_ms?=3000
+
+# Allow dexopt files that are side-effects of already allowlisted files.
+# This is only necessary when ART is prebuilt.
+ifeq (false,$(ART_MODULE_BUILD_FROM_SOURCE))
+ PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+ system/framework/%.art \
+ system/framework/%.oat \
+ system/framework/%.odex \
+ system/framework/%.vdex
+endif
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index 6c88b44..888505b 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -45,7 +45,7 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
+$(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/emulator_arm/device.mk)
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index ad25a92..4bd8efc 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -1,43 +1,6 @@
LOCAL_PATH:= $(call my-dir)
#######################################
-# verity_key (installed to /, i.e. part of system.img)
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := verity_key
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
-
-# For devices using a separate ramdisk, we need a copy there to establish the chain of trust.
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-LOCAL_REQUIRED_MODULES := verity_key_ramdisk
-endif
-
-include $(BUILD_PREBUILT)
-
-#######################################
-# verity_key (installed to ramdisk)
-#
-# Enabling the target when using system-as-root would cause build failure, as TARGET_RAMDISK_OUT
-# points to the same location as TARGET_ROOT_OUT.
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- include $(CLEAR_VARS)
- LOCAL_MODULE := verity_key_ramdisk
- LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
- LOCAL_LICENSE_CONDITIONS := notice
- LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
- LOCAL_MODULE_CLASS := ETC
- LOCAL_SRC_FILES := verity_key
- LOCAL_MODULE_STEM := verity_key
- LOCAL_MODULE_PATH := $(TARGET_RAMDISK_OUT)
- include $(BUILD_PREBUILT)
-endif
-
-#######################################
# adb key, if configured via PRODUCT_ADB_KEYS
ifdef PRODUCT_ADB_KEYS
ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
diff --git a/target/product/security/bluetooth.pk8 b/target/product/security/bluetooth.pk8
new file mode 100644
index 0000000..c6ea434
--- /dev/null
+++ b/target/product/security/bluetooth.pk8
Binary files differ
diff --git a/target/product/security/bluetooth.x509.pem b/target/product/security/bluetooth.x509.pem
new file mode 100644
index 0000000..396d7c9
--- /dev/null
+++ b/target/product/security/bluetooth.x509.pem
@@ -0,0 +1,36 @@
+-----BEGIN CERTIFICATE-----
+MIIGOzCCBCOgAwIBAgIUEiZapaWZVSter06CJMf2kHi8PIswDQYJKoZIhvcNAQEL
+BQAwgasxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQH
+DA1Nb3VudGFpbiBWaWV3MRAwDgYDVQQKDAdBbmRyb2lkMRAwDgYDVQQLDAdBbmRy
+b2lkMScwJQYDVQQDDB5jb20uYW5kcm9pZC5ibHVldG9vdGguc2VydmljZXMxIjAg
+BgkqhkiG9w0BCQEWE2FuZHJvaWRAYW5kcm9pZC5jb20wIBcNMjIwMzE1MDAzNjAz
+WhgPNDc2MDAyMDkwMDM2MDNaMIGrMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2Fs
+aWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4GA1UECgwHQW5kcm9p
+ZDEQMA4GA1UECwwHQW5kcm9pZDEnMCUGA1UEAwweY29tLmFuZHJvaWQuYmx1ZXRv
+b3RoLnNlcnZpY2VzMSIwIAYJKoZIhvcNAQkBFhNhbmRyb2lkQGFuZHJvaWQuY29t
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAsVlq9pozUREGlb8u8Y0A
+fYwPs5OuavNx/EsX03aTjmAXUfSOMAewqzUXDIRjw8UQvOW63utaZ0go9osDPzNf
+VEftmGxW/AUC+HWGaLDQfCYO3ficPPOS7xpEhGZERNbnhvh5qX0NBt6mJygsfpOm
+RPThbi6Ig2Brxh1eqVYqRkTjhNFKD6gCd1PdMmUSF88xEYaZWvTkET89Zh38lLza
+2x/wfNZmCSAVurNw1Kf9NQfYsaGHwMsjrvTyhG93TTYXzRBFzAO2WlBiw6R0tQr8
+ZW5XCM9Yo6AS0KXiU0ZWwOXxhGdr38rNd7j9nZtpFwWmN1kgeb/vpEfq0Ylua9By
+uURnfJZu2K4TbFamuyjihItra2ZKOtFNPDeuggKMCkuZz6WU8FCoMEpnq5P2agxN
+OGAa7ynXdNzek98N3TGX8qtfEgCv6vyuM0gakJ6D9nM43nsCm1LkB/JA0CacWyRz
+ljaLL1C4S43azEOYyOOb94ITnkZCQGtH33kxzamyPLIZ37VF4+v6yTXySLBzOnhe
+Os5uBIDohVJuI838bLhZf8e5mIrnjiKwsmExXiQvgidbwvZKCz9n8YT4iUhWPx4F
+W+GPcivZsvsECcnJ2QURK1zhir5QuLS7ZbAth4kiEUxJ6ujF5jftE+L/ClK2LiY0
+2IXWRCct8J1hfJZZx8lm3PUCAwEAAaNTMFEwHQYDVR0OBBYEFO5CgtQzKbTEd/Q9
+rxK14a9BBwFZMB8GA1UdIwQYMBaAFO5CgtQzKbTEd/Q9rxK14a9BBwFZMA8GA1Ud
+EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAGrGS1zmaoARVq7qhoY+xzSc
+1I/Tzf6vG6aHBC+CcIoSM2oqr6TGH+ADHAY6jhu/qzv1ij3gtoInAkBtkWvYsCIV
+eISPj8Qomcd8EIeW77p+ArKzS4HY5m1c/O4D/5rkl6c0exFq4Pdw9V8xyM98QtLd
+oj4xzzXUTPOIwkROHkj8otcML28m/MC0l/4b+flHnPqKFuLBjhxi9b/ZfwaXfjkx
+TcXpM3nPH8zN7kaJpS1fPW1IJyxJYvT022uK+afpezTmyS/50aOncUGjDJRw8CcO
+B88O8lpizDD3tD7P6jVOpRRJS4SnkVErbIn1xdWER6ubhnnycH7UmDVIx+vNd/t6
+YDa377au8Za+LnbDPfV1+Og+RaJSEIjJgfYyqnjBxGdRGN21VbqJdRzo/eO4ZFd2
+mGVtMosVr0jw4O8r60o9oMMWBTbFpxOI929QdcV+X1Lz8A8BZz0faXfZ2Z9usctu
+W2FtZge3tsJ07z7kuhNdbnm2yQVfd0FqiJsapUjlhgcdFVoDWPuqOfWAoG31ble6
+eiNnxfjiCckPWyciIE6lw97nvavGjlUacH5qVG86hOWU7xyBgeQ0PH4e+Nxr50yU
+A0GMxni1gefZFG8qEPdNRuDT1QdqDGh/8Ea11GEUMXdAxk0UzqyAtLDr6MbwK6lV
+mqmeueFdogdjvQ3mXe94
+-----END CERTIFICATE-----
diff --git a/target/product/security/verity.pk8 b/target/product/security/verity.pk8
deleted file mode 100644
index bebf216..0000000
--- a/target/product/security/verity.pk8
+++ /dev/null
Binary files differ
diff --git a/target/product/security/verity.x509.pem b/target/product/security/verity.x509.pem
deleted file mode 100644
index 86399c3..0000000
--- a/target/product/security/verity.x509.pem
+++ /dev/null
@@ -1,24 +0,0 @@
------BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
-VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
-AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
-A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
-6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
-fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
-T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
-AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
-jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
-HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
-oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
-NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
-JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
-dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
-UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
-yttuAJAEAymk1mipd9+zp38=
------END CERTIFICATE-----
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
deleted file mode 100644
index 31982d9..0000000
--- a/target/product/security/verity_key
+++ /dev/null
Binary files differ
diff --git a/target/product/verity.mk b/target/product/verity.mk
deleted file mode 100644
index 5f09283..0000000
--- a/target/product/verity.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Provides dependencies necessary for verified boot.
-
-PRODUCT_SUPPORTS_BOOT_SIGNER := true
-PRODUCT_SUPPORTS_VERITY := true
-PRODUCT_SUPPORTS_VERITY_FEC := true
-
-# The dev key is used to sign boot and recovery images, and the verity
-# metadata table. Actual product deliverables will be re-signed by hand.
-# We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
-PRODUCT_VERITY_SIGNING_KEY := build/make/target/product/security/verity
-
-PRODUCT_PACKAGES += \
- verity_key
diff --git a/tests/b_tests.sh b/tests/b_tests.sh
new file mode 100755
index 0000000..45cb4f7
--- /dev/null
+++ b/tests/b_tests.sh
@@ -0,0 +1,36 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# These commands are expected to always return successfully
+
+trap 'exit 1' ERR
+
+source $(dirname $0)/../envsetup.sh
+
+# lunch required to set up PATH to use b
+lunch aosp_arm64
+
+test_target=//build/bazel/scripts/difftool:difftool
+
+b build "$test_target"
+b build "$test_target" --run-soong-tests
+b build --run-soong-tests "$test_target"
+b --run-soong-tests build "$test_target"
+b cquery 'kind(test, //build/bazel/examples/android_app/...)' --config=android
+b run $test_target -- --help >/dev/null
+
+# Workflow tests for bmod
+bmod libm
+b run $(bmod fastboot) -- help
+b build $(bmod libm) $(bmod libcutils) --config=android
diff --git a/tests/envsetup_tests.sh b/tests/envsetup_tests.sh
index abdcd56..6b41766 100755
--- a/tests/envsetup_tests.sh
+++ b/tests/envsetup_tests.sh
@@ -1,37 +1,22 @@
#!/bin/bash -e
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-source $(dirname $0)/../envsetup.sh
-
-unset TARGET_PRODUCT TARGET_BUILD_VARIANT TARGET_PLATFORM_VERSION
-
-function check_lunch
-(
- echo lunch $1
- set +e
- lunch $1 > /dev/null 2> /dev/null
- set -e
- [ "$TARGET_PRODUCT" = "$2" ] || ( echo "lunch $1: expected TARGET_PRODUCT='$2', got '$TARGET_PRODUCT'" && exit 1 )
- [ "$TARGET_BUILD_VARIANT" = "$3" ] || ( echo "lunch $1: expected TARGET_BUILD_VARIANT='$3', got '$TARGET_BUILD_VARIANT'" && exit 1 )
- [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
+tests=(
+ $(dirname $0)/lunch_tests.sh
)
-default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
-valid_version=PPR1
-
-# lunch tests
-check_lunch "aosp_arm64" "aosp_arm64" "eng" ""
-check_lunch "aosp_arm64-userdebug" "aosp_arm64" "userdebug" ""
-check_lunch "aosp_arm64-userdebug-$default_version" "aosp_arm64" "userdebug" "$default_version"
-check_lunch "aosp_arm64-userdebug-$valid_version" "aosp_arm64" "userdebug" "$valid_version"
-check_lunch "abc" "" "" ""
-check_lunch "aosp_arm64-abc" "" "" ""
-check_lunch "aosp_arm64-userdebug-abc" "" "" ""
-check_lunch "aosp_arm64-abc-$valid_version" "" "" ""
-check_lunch "abc-userdebug-$valid_version" "" "" ""
-check_lunch "-" "" "" ""
-check_lunch "--" "" "" ""
-check_lunch "-userdebug" "" "" ""
-check_lunch "-userdebug-" "" "" ""
-check_lunch "-userdebug-$valid_version" "" "" ""
-check_lunch "aosp_arm64-userdebug-$valid_version-" "" "" ""
-check_lunch "aosp_arm64-userdebug-$valid_version-abc" "" "" ""
+for test in $tests; do
+ bash -x $test
+done
diff --git a/tests/lunch_tests.sh b/tests/lunch_tests.sh
new file mode 100755
index 0000000..4285d13
--- /dev/null
+++ b/tests/lunch_tests.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source $(dirname $0)/../envsetup.sh
+
+unset TARGET_PRODUCT TARGET_BUILD_VARIANT TARGET_PLATFORM_VERSION
+
+function check_lunch
+(
+ echo lunch $1
+ set +e
+ lunch $1 > /dev/null 2> /dev/null
+ set -e
+ [ "$TARGET_PRODUCT" = "$2" ] || ( echo "lunch $1: expected TARGET_PRODUCT='$2', got '$TARGET_PRODUCT'" && exit 1 )
+ [ "$TARGET_BUILD_VARIANT" = "$3" ] || ( echo "lunch $1: expected TARGET_BUILD_VARIANT='$3', got '$TARGET_BUILD_VARIANT'" && exit 1 )
+ [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
+)
+
+default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
+
+# lunch tests
+check_lunch "aosp_arm64" "aosp_arm64" "eng" ""
+check_lunch "aosp_arm64-userdebug" "aosp_arm64" "userdebug" ""
+check_lunch "aosp_arm64-userdebug-$default_version" "aosp_arm64" "userdebug" "$default_version"
+check_lunch "abc" "" "" ""
+check_lunch "aosp_arm64-abc" "" "" ""
+check_lunch "aosp_arm64-userdebug-abc" "" "" ""
+check_lunch "aosp_arm64-abc-$default_version" "" "" ""
+check_lunch "abc-userdebug-$default_version" "" "" ""
+check_lunch "-" "" "" ""
+check_lunch "--" "" "" ""
+check_lunch "-userdebug" "" "" ""
+check_lunch "-userdebug-" "" "" ""
+check_lunch "-userdebug-$default_version" "" "" ""
+check_lunch "aosp_arm64-userdebug-$default_version-" "" "" ""
+check_lunch "aosp_arm64-userdebug-$default_version-abc" "" "" ""
diff --git a/orchestrator/core/api_export.py b/tests/roboleaf_tests.sh
old mode 100644
new mode 100755
similarity index 74%
copy from orchestrator/core/api_export.py
copy to tests/roboleaf_tests.sh
index 2f26b02..2d13766
--- a/orchestrator/core/api_export.py
+++ b/tests/roboleaf_tests.sh
@@ -1,12 +1,11 @@
-#!/usr/bin/python3
-#
+#!/bin/bash -e
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -14,7 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-def export_apis_from_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["export_api_contributions"])
+tests=(
+ $(dirname $0)/b_tests.sh
+)
-
+for test in $tests; do
+ bash -x $test
+done
diff --git a/tools/Android.bp b/tools/Android.bp
index 6601c60..1f0d406 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -49,3 +49,18 @@
out: ["kernel_release.txt"],
cmd: "$(location) --tools lz4:$(location lz4) --input $(in) --output-release > $(out)"
}
+
+cc_binary_host {
+ name: "build-runfiles",
+ srcs: ["build-runfiles.cc"],
+}
+
+python_binary_host {
+ name: "check_radio_versions",
+ srcs: ["check_radio_versions.py"],
+}
+
+python_binary_host {
+ name: "check_elf_file",
+ srcs: ["check_elf_file.py"],
+}
diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel
index 3170820..0de178b 100644
--- a/tools/BUILD.bazel
+++ b/tools/BUILD.bazel
@@ -1,20 +1,27 @@
py_library(
- name="event_log_tags",
+ name = "event_log_tags",
srcs = ["event_log_tags.py"],
)
py_binary(
- name="java-event-log-tags",
- srcs=["java-event-log-tags.py"],
- deps=[":event_log_tags"],
- visibility = ["//visibility:public"],
+ name = "java-event-log-tags",
+ srcs = ["java-event-log-tags.py"],
python_version = "PY3",
+ visibility = ["//visibility:public"],
+ deps = [":event_log_tags"],
)
py_binary(
- name="merge-event-log-tags",
- srcs=["merge-event-log-tags.py"],
- deps=[":event_log_tags"],
- visibility = ["//visibility:public"],
+ name = "merge-event-log-tags",
+ srcs = ["merge-event-log-tags.py"],
python_version = "PY3",
+ visibility = ["//visibility:public"],
+ deps = [":event_log_tags"],
+)
+
+py_binary(
+ name = "check_elf_file",
+ srcs = ["check_elf_file.py"],
+ python_version = "PY3",
+ visibility = ["//visibility:public"],
)
diff --git a/tools/build-runfiles.cc b/tools/build-runfiles.cc
new file mode 100644
index 0000000..b6197f0
--- /dev/null
+++ b/tools/build-runfiles.cc
@@ -0,0 +1,426 @@
+// Copyright 2014 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// This program creates a "runfiles tree" from a "runfiles manifest".
+//
+// The command line arguments are an input manifest INPUT and an output
+// directory RUNFILES. First, the files in the RUNFILES directory are scanned
+// and any extraneous ones are removed. Second, any missing files are created.
+// Finally, a copy of the input manifest is written to RUNFILES/MANIFEST.
+//
+// The input manifest consists of lines, each containing a relative path within
+// the runfiles, a space, and an optional absolute path. If this second path
+// is present, a symlink is created pointing to it; otherwise an empty file is
+// created.
+//
+// Given the line
+// <workspace root>/output/path /real/path
+// we will create directories
+// RUNFILES/<workspace root>
+// RUNFILES/<workspace root>/output
+// a symlink
+// RUNFILES/<workspace root>/output/path -> /real/path
+// and the output manifest will contain a line
+// <workspace root>/output/path /real/path
+//
+// If --use_metadata is supplied, every other line is treated as opaque
+// metadata, and is ignored here.
+//
+// All output paths must be relative and generally (but not always) begin with
+// <workspace root>. No output path may be equal to another. No output path may
+// be a path prefix of another.
+
+#define _FILE_OFFSET_BITS 64
+
+#include <dirent.h>
+#include <err.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include <map>
+#include <string>
+
+// program_invocation_short_name is not portable.
+static const char *argv0;
+
+const char *input_filename;
+const char *output_base_dir;
+
+enum FileType {
+ FILE_TYPE_REGULAR,
+ FILE_TYPE_DIRECTORY,
+ FILE_TYPE_SYMLINK
+};
+
+struct FileInfo {
+ FileType type;
+ std::string symlink_target;
+
+ bool operator==(const FileInfo &other) const {
+ return type == other.type && symlink_target == other.symlink_target;
+ }
+
+ bool operator!=(const FileInfo &other) const {
+ return !(*this == other);
+ }
+};
+
+typedef std::map<std::string, FileInfo> FileInfoMap;
+
+class RunfilesCreator {
+ public:
+ explicit RunfilesCreator(const std::string &output_base)
+ : output_base_(output_base),
+ output_filename_("MANIFEST"),
+ temp_filename_(output_filename_ + ".tmp") {
+ SetupOutputBase();
+ if (chdir(output_base_.c_str()) != 0) {
+ err(2, "chdir '%s'", output_base_.c_str());
+ }
+ }
+
+ void ReadManifest(const std::string &manifest_file, bool allow_relative,
+ bool use_metadata) {
+ FILE *outfile = fopen(temp_filename_.c_str(), "w");
+ if (!outfile) {
+ err(2, "opening '%s/%s' for writing", output_base_.c_str(),
+ temp_filename_.c_str());
+ }
+ FILE *infile = fopen(manifest_file.c_str(), "r");
+ if (!infile) {
+ err(2, "opening '%s' for reading", manifest_file.c_str());
+ }
+
+ // read input manifest
+ int lineno = 0;
+ char buf[3 * PATH_MAX];
+ while (fgets(buf, sizeof buf, infile)) {
+ // copy line to output manifest
+ if (fputs(buf, outfile) == EOF) {
+ err(2, "writing to '%s/%s'", output_base_.c_str(),
+ temp_filename_.c_str());
+ }
+
+ // parse line
+ ++lineno;
+ // Skip metadata lines. They are used solely for
+ // dependency checking.
+ if (use_metadata && lineno % 2 == 0) continue;
+
+ char *tok = strtok(buf, " \n");
+ if (tok == nullptr) {
+ continue;
+ } else if (*tok == '/') {
+ errx(2, "%s:%d: paths must not be absolute", input_filename, lineno);
+ }
+ std::string link(tok);
+
+ const char *target = strtok(nullptr, " \n");
+ if (target == nullptr) {
+ target = "";
+ } else if (strtok(nullptr, " \n") != nullptr) {
+ errx(2, "%s:%d: link or target filename contains space", input_filename, lineno);
+ } else if (!allow_relative && target[0] != '/') {
+ errx(2, "%s:%d: expected absolute path", input_filename, lineno);
+ }
+
+ FileInfo *info = &manifest_[link];
+ if (target[0] == '\0') {
+ // No target means an empty file.
+ info->type = FILE_TYPE_REGULAR;
+ } else {
+ info->type = FILE_TYPE_SYMLINK;
+ info->symlink_target = target;
+ }
+
+ FileInfo parent_info;
+ parent_info.type = FILE_TYPE_DIRECTORY;
+
+ while (true) {
+ int k = link.rfind('/');
+ if (k < 0) break;
+ link.erase(k, std::string::npos);
+ if (!manifest_.insert(std::make_pair(link, parent_info)).second) break;
+ }
+ }
+ if (fclose(outfile) != 0) {
+ err(2, "writing to '%s/%s'", output_base_.c_str(),
+ temp_filename_.c_str());
+ }
+ fclose(infile);
+
+ // Don't delete the temp manifest file.
+ manifest_[temp_filename_].type = FILE_TYPE_REGULAR;
+ }
+
+ void CreateRunfiles() {
+ if (unlink(output_filename_.c_str()) != 0 && errno != ENOENT) {
+ err(2, "removing previous file at '%s/%s'", output_base_.c_str(),
+ output_filename_.c_str());
+ }
+
+ ScanTreeAndPrune(".");
+ CreateFiles();
+
+ // rename output file into place
+ if (rename(temp_filename_.c_str(), output_filename_.c_str()) != 0) {
+ err(2, "renaming '%s/%s' to '%s/%s'",
+ output_base_.c_str(), temp_filename_.c_str(),
+ output_base_.c_str(), output_filename_.c_str());
+ }
+ }
+
+ private:
+ void SetupOutputBase() {
+ struct stat st;
+ if (stat(output_base_.c_str(), &st) != 0) {
+ // Technically, this will cause problems if the user's umask contains
+ // 0200, but we don't care. Anyone who does that deserves what's coming.
+ if (mkdir(output_base_.c_str(), 0777) != 0) {
+ err(2, "creating directory '%s'", output_base_.c_str());
+ }
+ } else {
+ EnsureDirReadAndWritePerms(output_base_);
+ }
+ }
+
+ void ScanTreeAndPrune(const std::string &path) {
+ // A note on non-empty files:
+ // We don't distinguish between empty and non-empty files. That is, if
+ // there's a file that has contents, we don't truncate it here, even though
+ // the manifest supports creation of empty files, only. Given that
+ // .runfiles are *supposed* to be immutable, this shouldn't be a problem.
+ EnsureDirReadAndWritePerms(path);
+
+ struct dirent *entry;
+ DIR *dh = opendir(path.c_str());
+ if (!dh) {
+ err(2, "opendir '%s'", path.c_str());
+ }
+
+ errno = 0;
+ const std::string prefix = (path == "." ? "" : path + "/");
+ while ((entry = readdir(dh)) != nullptr) {
+ if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) continue;
+
+ std::string entry_path = prefix + entry->d_name;
+ FileInfo actual_info;
+ actual_info.type = DentryToFileType(entry_path, entry);
+
+ if (actual_info.type == FILE_TYPE_SYMLINK) {
+ ReadLinkOrDie(entry_path, &actual_info.symlink_target);
+ }
+
+ FileInfoMap::iterator expected_it = manifest_.find(entry_path);
+ if (expected_it == manifest_.end() ||
+ expected_it->second != actual_info) {
+ DelTree(entry_path, actual_info.type);
+ } else {
+ manifest_.erase(expected_it);
+ if (actual_info.type == FILE_TYPE_DIRECTORY) {
+ ScanTreeAndPrune(entry_path);
+ }
+ }
+
+ errno = 0;
+ }
+ if (errno != 0) {
+ err(2, "reading directory '%s'", path.c_str());
+ }
+ closedir(dh);
+ }
+
+ void CreateFiles() {
+ for (FileInfoMap::const_iterator it = manifest_.begin();
+ it != manifest_.end(); ++it) {
+ const std::string &path = it->first;
+ switch (it->second.type) {
+ case FILE_TYPE_DIRECTORY:
+ if (mkdir(path.c_str(), 0777) != 0) {
+ err(2, "mkdir '%s'", path.c_str());
+ }
+ break;
+ case FILE_TYPE_REGULAR:
+ {
+ int fd = open(path.c_str(), O_CREAT|O_EXCL|O_WRONLY, 0555);
+ if (fd < 0) {
+ err(2, "creating empty file '%s'", path.c_str());
+ }
+ close(fd);
+ }
+ break;
+ case FILE_TYPE_SYMLINK:
+ {
+ const std::string& target = it->second.symlink_target;
+ if (symlink(target.c_str(), path.c_str()) != 0) {
+ err(2, "symlinking '%s' -> '%s'", path.c_str(), target.c_str());
+ }
+ }
+ break;
+ }
+ }
+ }
+
+ FileType DentryToFileType(const std::string &path, struct dirent *ent) {
+#ifdef _DIRENT_HAVE_D_TYPE
+ if (ent->d_type != DT_UNKNOWN) {
+ if (ent->d_type == DT_DIR) {
+ return FILE_TYPE_DIRECTORY;
+ } else if (ent->d_type == DT_LNK) {
+ return FILE_TYPE_SYMLINK;
+ } else {
+ return FILE_TYPE_REGULAR;
+ }
+ } else // NOLINT (the brace is in the next line)
+#endif
+ {
+ struct stat st;
+ LStatOrDie(path, &st);
+ if (S_ISDIR(st.st_mode)) {
+ return FILE_TYPE_DIRECTORY;
+ } else if (S_ISLNK(st.st_mode)) {
+ return FILE_TYPE_SYMLINK;
+ } else {
+ return FILE_TYPE_REGULAR;
+ }
+ }
+ }
+
+ void LStatOrDie(const std::string &path, struct stat *st) {
+ if (lstat(path.c_str(), st) != 0) {
+ err(2, "lstating file '%s'", path.c_str());
+ }
+ }
+
+ void StatOrDie(const std::string &path, struct stat *st) {
+ if (stat(path.c_str(), st) != 0) {
+ err(2, "stating file '%s'", path.c_str());
+ }
+ }
+
+ void ReadLinkOrDie(const std::string &path, std::string *output) {
+ char readlink_buffer[PATH_MAX];
+ int sz = readlink(path.c_str(), readlink_buffer, sizeof(readlink_buffer));
+ if (sz < 0) {
+ err(2, "reading symlink '%s'", path.c_str());
+ }
+ // readlink returns a non-null terminated string.
+ std::string(readlink_buffer, sz).swap(*output);
+ }
+
+ void EnsureDirReadAndWritePerms(const std::string &path) {
+ const int kMode = 0700;
+ struct stat st;
+ LStatOrDie(path, &st);
+ if ((st.st_mode & kMode) != kMode) {
+ int new_mode = st.st_mode | kMode;
+ if (chmod(path.c_str(), new_mode) != 0) {
+ err(2, "chmod '%s'", path.c_str());
+ }
+ }
+ }
+
+ bool DelTree(const std::string &path, FileType file_type) {
+ if (file_type != FILE_TYPE_DIRECTORY) {
+ if (unlink(path.c_str()) != 0) {
+ err(2, "unlinking '%s'", path.c_str());
+ return false;
+ }
+ return true;
+ }
+
+ EnsureDirReadAndWritePerms(path);
+
+ struct dirent *entry;
+ DIR *dh = opendir(path.c_str());
+ if (!dh) {
+ err(2, "opendir '%s'", path.c_str());
+ }
+ errno = 0;
+ while ((entry = readdir(dh)) != nullptr) {
+ if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) continue;
+ const std::string entry_path = path + '/' + entry->d_name;
+ FileType entry_file_type = DentryToFileType(entry_path, entry);
+ DelTree(entry_path, entry_file_type);
+ errno = 0;
+ }
+ if (errno != 0) {
+ err(2, "readdir '%s'", path.c_str());
+ }
+ closedir(dh);
+ if (rmdir(path.c_str()) != 0) {
+ err(2, "rmdir '%s'", path.c_str());
+ }
+ return true;
+ }
+
+ private:
+ std::string output_base_;
+ std::string output_filename_;
+ std::string temp_filename_;
+
+ FileInfoMap manifest_;
+};
+
+int main(int argc, char **argv) {
+ argv0 = argv[0];
+
+ argc--; argv++;
+ bool allow_relative = false;
+ bool use_metadata = false;
+
+ while (argc >= 1) {
+ if (strcmp(argv[0], "--allow_relative") == 0) {
+ allow_relative = true;
+ argc--; argv++;
+ } else if (strcmp(argv[0], "--use_metadata") == 0) {
+ use_metadata = true;
+ argc--; argv++;
+ } else {
+ break;
+ }
+ }
+
+ if (argc != 2) {
+ fprintf(stderr, "usage: %s "
+ "[--allow_relative] [--use_metadata] "
+ "INPUT RUNFILES\n",
+ argv0);
+ return 1;
+ }
+
+ input_filename = argv[0];
+ output_base_dir = argv[1];
+
+ std::string manifest_file = input_filename;
+ if (input_filename[0] != '/') {
+ char cwd_buf[PATH_MAX];
+ if (getcwd(cwd_buf, sizeof(cwd_buf)) == nullptr) {
+ err(2, "getcwd failed");
+ }
+ manifest_file = std::string(cwd_buf) + '/' + manifest_file;
+ }
+
+ RunfilesCreator runfiles_creator(output_base_dir);
+ runfiles_creator.ReadManifest(manifest_file, allow_relative, use_metadata);
+ runfiles_creator.CreateRunfiles();
+
+ return 0;
+}
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 536a381..c2e36df 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -30,9 +30,6 @@
echo "ro.build.host=$BUILD_HOSTNAME"
echo "ro.build.tags=$BUILD_VERSION_TAGS"
echo "ro.build.flavor=$TARGET_BUILD_FLAVOR"
-if [ -n "$BOARD_BUILD_SYSTEM_ROOT_IMAGE" ] ; then
- echo "ro.build.system_root_image=$BOARD_BUILD_SYSTEM_ROOT_IMAGE"
-fi
# These values are deprecated, use "ro.product.cpu.abilist"
# instead (see below).
diff --git a/tools/canoninja/go.mod b/tools/canoninja/go.mod
index c5a924e..9e668a5 100644
--- a/tools/canoninja/go.mod
+++ b/tools/canoninja/go.mod
@@ -1 +1,3 @@
module canoninja
+
+go 1.19
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 0b80226..51ec23b 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2019 The Android Open Source Project
#
@@ -196,11 +196,7 @@
def _read_llvm_readobj(cls, elf_file_path, header, llvm_readobj):
"""Run llvm-readobj and parse the output."""
cmd = [llvm_readobj, '--dynamic-table', '--dyn-symbols', elf_file_path]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, _ = proc.communicate()
- rc = proc.returncode
- if rc != 0:
- raise subprocess.CalledProcessError(rc, cmd, out)
+ out = subprocess.check_output(cmd, text=True)
lines = out.splitlines()
return cls._parse_llvm_readobj(elf_file_path, header, lines)
@@ -411,8 +407,7 @@
# Chech whether all DT_NEEDED entries are specified.
for lib in self._file_under_test.dt_needed:
if lib not in specified_sonames:
- self._error('DT_NEEDED "{}" is not specified in shared_libs.'
- .format(lib.decode('utf-8')))
+ self._error(f'DT_NEEDED "{lib}" is not specified in shared_libs.')
missing_shared_libs = True
if missing_shared_libs:
@@ -467,7 +462,7 @@
"""Check whether all undefined symbols are resolved to a definition."""
all_elf_files = [self._file_under_test] + self._shared_libs
missing_symbols = []
- for sym, imported_vers in self._file_under_test.imported.iteritems():
+ for sym, imported_vers in self._file_under_test.imported.items():
for imported_ver in imported_vers:
lib = self._find_symbol_from_libs(all_elf_files, sym, imported_ver)
if not lib:
@@ -475,16 +470,14 @@
if missing_symbols:
for sym, ver in sorted(missing_symbols):
- sym = sym.decode('utf-8')
if ver:
- sym += '@' + ver.decode('utf-8')
- self._error('Unresolved symbol: {}'.format(sym))
+ sym += '@' + ver
+ self._error(f'Unresolved symbol: {sym}')
self._note()
self._note('Some dependencies might be changed, thus the symbol(s) '
'above cannot be resolved.')
- self._note('Please re-build the prebuilt file: "{}".'
- .format(self._file_path))
+ self._note(f'Please re-build the prebuilt file: "{self._file_path}".')
self._note()
self._note('If this is a new prebuilt file and it is designed to have '
diff --git a/tools/check_radio_versions.py b/tools/check_radio_versions.py
index ebe621f..d1d50e6 100755
--- a/tools/check_radio_versions.py
+++ b/tools/check_radio_versions.py
@@ -22,11 +22,18 @@
except ImportError:
from sha import sha as sha1
-if len(sys.argv) < 2:
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--board_info_txt", nargs="?", required=True)
+parser.add_argument("--board_info_check", nargs="*", required=True)
+args = parser.parse_args()
+
+if not args.board_info_txt:
sys.exit(0)
build_info = {}
-f = open(sys.argv[1])
+f = open(args.board_info_txt)
for line in f:
line = line.strip()
if line.startswith("require"):
@@ -36,7 +43,7 @@
bad = False
-for item in sys.argv[2:]:
+for item in args.board_info_check:
key, fn = item.split(":", 1)
values = build_info.get(key, None)
@@ -52,8 +59,8 @@
try:
f = open(fn + ".sha1")
except IOError:
- if not bad: print
- print "*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key)
+ if not bad: print()
+ print("*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key))
bad = True
continue
for line in f:
@@ -63,17 +70,17 @@
versions[h] = v
if digest not in versions:
- if not bad: print
- print "*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn)
+ if not bad: print()
+ print("*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn))
bad = True
continue
if versions[digest] not in values:
- if not bad: print
- print "*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
- fn, versions[digest], key, sys.argv[1])
+ if not bad: print()
+ print("*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
+ fn, versions[digest], key, args.board_info_txt))
bad = True
if bad:
- print
+ print()
sys.exit(1)
diff --git a/tools/compare_fileslist.py b/tools/compare_fileslist.py
deleted file mode 100755
index 1f507d8..0000000
--- a/tools/compare_fileslist.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import cgi, os, string, sys
-
-def IsDifferent(row):
- val = None
- for v in row:
- if v:
- if not val:
- val = v
- else:
- if val != v:
- return True
- return False
-
-def main(argv):
- inputs = argv[1:]
- data = {}
- index = 0
- for input in inputs:
- f = file(input, "r")
- lines = f.readlines()
- f.close()
- lines = map(string.split, lines)
- lines = map(lambda (x,y): (y,int(x)), lines)
- for fn,sz in lines:
- if not data.has_key(fn):
- data[fn] = {}
- data[fn][index] = sz
- index = index + 1
- rows = []
- for fn,sizes in data.iteritems():
- row = [fn]
- for i in range(0,index):
- if sizes.has_key(i):
- row.append(sizes[i])
- else:
- row.append(None)
- rows.append(row)
- rows = sorted(rows, key=lambda x: x[0])
- print """<html>
- <head>
- <style type="text/css">
- .fn, .sz, .z, .d {
- padding-left: 10px;
- padding-right: 10px;
- }
- .sz, .z, .d {
- text-align: right;
- }
- .fn {
- background-color: #ffffdd;
- }
- .sz {
- background-color: #ffffcc;
- }
- .z {
- background-color: #ffcccc;
- }
- .d {
- background-color: #99ccff;
- }
- </style>
- </head>
- <body>
- """
- print "<table>"
- print "<tr>"
- for input in inputs:
- combo = input.split(os.path.sep)[1]
- print " <td class='fn'>%s</td>" % cgi.escape(combo)
- print "</tr>"
-
- for row in rows:
- print "<tr>"
- for sz in row[1:]:
- if not sz:
- print " <td class='z'> </td>"
- elif IsDifferent(row[1:]):
- print " <td class='d'>%d</td>" % sz
- else:
- print " <td class='sz'>%d</td>" % sz
- print " <td class='fn'>%s</td>" % cgi.escape(row[0])
- print "</tr>"
- print "</table>"
- print "</body></html>"
-
-if __name__ == '__main__':
- main(sys.argv)
-
-
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index 225f3a5..8e13f2f 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -18,6 +18,17 @@
}
blueprint_go_binary {
+ name: "compliance_checkmetadata",
+ srcs: ["cmd/checkmetadata/checkmetadata.go"],
+ deps: [
+ "compliance-module",
+ "projectmetadata-module",
+ "soong-response",
+ ],
+ testSrcs: ["cmd/checkmetadata/checkmetadata_test.go"],
+}
+
+blueprint_go_binary {
name: "compliance_checkshare",
srcs: ["cmd/checkshare/checkshare.go"],
deps: [
@@ -120,6 +131,17 @@
testSrcs: ["cmd/xmlnotice/xmlnotice_test.go"],
}
+blueprint_go_binary {
+ name: "compliance_sbom",
+ srcs: ["cmd/sbom/sbom.go"],
+ deps: [
+ "compliance-module",
+ "blueprint-deptools",
+ "soong-response",
+ ],
+ testSrcs: ["cmd/sbom/sbom_test.go"],
+}
+
bootstrap_go_package {
name: "compliance-module",
srcs: [
@@ -156,6 +178,8 @@
"test_util.go",
],
deps: [
+ "compliance-test-fs-module",
+ "projectmetadata-module",
"golang-protobuf-proto",
"golang-protobuf-encoding-prototext",
"license_metadata_proto",
diff --git a/tools/compliance/README.md b/tools/compliance/README.md
new file mode 100644
index 0000000..995d9ca
--- /dev/null
+++ b/tools/compliance/README.md
@@ -0,0 +1,101 @@
+# Compliance
+
+<!-- Much of this content appears too in doc.go
+When changing this file consider whether the change also applies to doc.go -->
+
+Package compliance provides an approved means for reading, consuming, and
+analyzing license metadata graphs.
+
+Assuming the license metadata and dependencies are fully and accurately
+recorded in the build system, any discrepancy between the official policy for
+open source license compliance and this code is **a bug in this code.**
+
+## Naming
+
+All of the code that directly reflects a policy decision belongs in a file with
+a name begninning `policy_`. Changes to these files need to be authored or
+reviewed by someone in OSPO or whichever successor group governs policy.
+
+The files with names not beginning `policy_` describe data types, and general,
+reusable algorithms.
+
+The source code for binary tools and utilities appears under the `cmd/`
+subdirectory. Other subdirectories contain reusable components that are not
+`compliance` per se.
+
+## Data Types
+
+A few principal types to understand are LicenseGraph, LicenseCondition, and
+ResolutionSet.
+
+### LicenseGraph
+
+A LicenseGraph is an immutable graph of the targets and dependencies reachable
+from a specific set of root targets. In general, the root targets will be the
+artifacts in a release or distribution. While conceptually immutable, parts of
+the graph may be loaded or evaluated lazily.
+
+Conceptually, the graph itself will always be a directed acyclic graph. One
+representation is a set of directed edges. Another is a set of nodes with
+directed edges to their dependencies.
+
+The edges have annotations, which can distinguish between build tools, runtime
+dependencies, and dependencies like 'contains' that make a derivative work.
+
+### LicenseCondition
+
+A LicenseCondition is an immutable tuple pairing a condition name with an
+originating target. e.g. Per current policy, a static library licensed under an
+MIT license would pair a "notice" condition with the static library target, and
+a dynamic license licensed under GPL would pair a "restricted" condition with
+the dynamic library target.
+
+### ResolutionSet
+
+A ResolutionSet is an immutable set of `AttachesTo`, `ActsOn`, `Resolves`
+tuples describing how license conditions apply to targets.
+
+`AttachesTo` is the trigger for acting. Distribution of the target invokes
+the policy.
+
+`ActsOn` is the target to share, give notice for, hide etc.
+
+`Resolves` is the set of conditions that the action resolves.
+
+For most condition types, `ActsOn` will be the target where the condition
+originated. For example, a notice condition policy means attribution or notice
+must be given for the target where the condition originates. Likewise, a
+proprietary condition policy means the privacy of the target where the
+condition originates must be respected. i.e. The thing acted on is the origin.
+
+Restricted conditions are different. The infectious nature of restricted often
+means sharing code that is not the target where the restricted condition
+originates. Linking an MIT library to a GPL library implies a policy to share
+the MIT library despite the MIT license having no source sharing requirement.
+
+In this case, one or more resolution tuples will have the MIT license module in
+`ActsOn` and the restricted condition originating at the GPL library module in
+`Resolves`. These tuples will `AttachTo` every target that depends on the GPL
+library because shipping any of those targets trigger the policy to share the
+code.
+
+## Processes
+
+### ReadLicenseGraph
+
+The principal means to ingest license metadata. Given the distribution targets,
+ReadLicenseGraph populates the LicenseGraph for those root targets.
+
+### NoticeIndex.IndexLicenseTexts
+
+IndexLicenseTexts reads, deduplicates and caches license texts for notice
+files. Also reads and caches project metadata for deriving library names.
+
+The algorithm for deriving library names has not been dictated by OSPO policy,
+but reflects a pragmatic attempt to comply with Android policy regarding
+unreleased product names, proprietary partner names etc.
+
+### projectmetadata.Index.MetadataForProjects
+
+MetadataForProjects reads, deduplicates and caches project METADATA files used
+for notice library names, and various properties appearing in SBOMs.
diff --git a/tools/compliance/cmd/checkmetadata/checkmetadata.go b/tools/compliance/cmd/checkmetadata/checkmetadata.go
new file mode 100644
index 0000000..c6c84e4
--- /dev/null
+++ b/tools/compliance/cmd/checkmetadata/checkmetadata.go
@@ -0,0 +1,148 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "android/soong/response"
+ "android/soong/tools/compliance"
+ "android/soong/tools/compliance/projectmetadata"
+)
+
+var (
+ failNoneRequested = fmt.Errorf("\nNo projects requested")
+)
+
+func main() {
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} projectdir {projectdir...}
+
+Tries to open the METADATA.android or METADATA file in each projectdir
+reporting any errors on stderr.
+
+Reports "FAIL" to stdout if any errors found and exits with status 1.
+
+Otherwise, reports "PASS" and the number of project metadata files
+found exiting with status 0.
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+
+ flags.Parse(expandedArgs)
+
+ // Must specify at least one root target.
+ if flags.NArg() == 0 {
+ flags.Usage()
+ os.Exit(2)
+ }
+
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
+ }
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
+ err := checkProjectMetadata(ofile, os.Stderr, compliance.FS, flags.Args()...)
+ if err != nil {
+ if err == failNoneRequested {
+ flags.Usage()
+ }
+ fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+ fmt.Fprintln(ofile, "FAIL")
+ os.Exit(1)
+ }
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+ os.Exit(1)
+ }
+ }
+ os.Exit(0)
+}
+
+// checkProjectMetadata implements the checkmetadata utility.
+func checkProjectMetadata(stdout, stderr io.Writer, rootFS fs.FS, projects ...string) error {
+
+ if len(projects) < 1 {
+ return failNoneRequested
+ }
+
+ // Read the project metadata files from `projects`
+ ix := projectmetadata.NewIndex(rootFS)
+ pms, err := ix.MetadataForProjects(projects...)
+ if err != nil {
+ return fmt.Errorf("Unable to read project metadata file(s) %q from %q: %w\n", projects, os.Getenv("PWD"), err)
+ }
+
+ fmt.Fprintf(stdout, "PASS -- parsed %d project metadata files for %d projects\n", len(pms), len(projects))
+ return nil
+}
diff --git a/tools/compliance/cmd/checkmetadata/checkmetadata_test.go b/tools/compliance/cmd/checkmetadata/checkmetadata_test.go
new file mode 100644
index 0000000..cf2090b
--- /dev/null
+++ b/tools/compliance/cmd/checkmetadata/checkmetadata_test.go
@@ -0,0 +1,191 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "strings"
+ "testing"
+
+ "android/soong/tools/compliance"
+)
+
+func TestMain(m *testing.M) {
+ // Change into the parent directory before running the tests
+ // so they can find the testdata directory.
+ if err := os.Chdir(".."); err != nil {
+ fmt.Printf("failed to change to testdata directory: %s\n", err)
+ os.Exit(1)
+ }
+ os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+ tests := []struct {
+ name string
+ projects []string
+ expectedStdout string
+ }{
+ {
+ name: "1p",
+ projects: []string{"firstparty"},
+ expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+ },
+ {
+ name: "notice",
+ projects: []string{"notice"},
+ expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+ },
+ {
+ name: "1p+notice",
+ projects: []string{"firstparty", "notice"},
+ expectedStdout: "PASS -- parsed 2 project metadata files for 2 projects",
+ },
+ {
+ name: "reciprocal",
+ projects: []string{"reciprocal"},
+ expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+ },
+ {
+ name: "1p+notice+reciprocal",
+ projects: []string{"firstparty", "notice", "reciprocal"},
+ expectedStdout: "PASS -- parsed 3 project metadata files for 3 projects",
+ },
+ {
+ name: "restricted",
+ projects: []string{"restricted"},
+ expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+ },
+ {
+ name: "1p+notice+reciprocal+restricted",
+ projects: []string{
+ "firstparty",
+ "notice",
+ "reciprocal",
+ "restricted",
+ },
+ expectedStdout: "PASS -- parsed 4 project metadata files for 4 projects",
+ },
+ {
+ name: "proprietary",
+ projects: []string{"proprietary"},
+ expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+ },
+ {
+ name: "1p+notice+reciprocal+restricted+proprietary",
+ projects: []string{
+ "firstparty",
+ "notice",
+ "reciprocal",
+ "restricted",
+ "proprietary",
+ },
+ expectedStdout: "PASS -- parsed 5 project metadata files for 5 projects",
+ },
+ {
+ name: "missing1",
+ projects: []string{"regressgpl1"},
+ expectedStdout: "PASS -- parsed 0 project metadata files for 1 projects",
+ },
+ {
+ name: "1p+notice+reciprocal+restricted+proprietary+missing1",
+ projects: []string{
+ "firstparty",
+ "notice",
+ "reciprocal",
+ "restricted",
+ "proprietary",
+ "regressgpl1",
+ },
+ expectedStdout: "PASS -- parsed 5 project metadata files for 6 projects",
+ },
+ {
+ name: "missing2",
+ projects: []string{"regressgpl2"},
+ expectedStdout: "PASS -- parsed 0 project metadata files for 1 projects",
+ },
+ {
+ name: "1p+notice+reciprocal+restricted+proprietary+missing1+missing2",
+ projects: []string{
+ "firstparty",
+ "notice",
+ "reciprocal",
+ "restricted",
+ "proprietary",
+ "regressgpl1",
+ "regressgpl2",
+ },
+ expectedStdout: "PASS -- parsed 5 project metadata files for 7 projects",
+ },
+ {
+ name: "missing2+1p+notice+reciprocal+restricted+proprietary+missing1",
+ projects: []string{
+ "regressgpl2",
+ "firstparty",
+ "notice",
+ "reciprocal",
+ "restricted",
+ "proprietary",
+ "regressgpl1",
+ },
+ expectedStdout: "PASS -- parsed 5 project metadata files for 7 projects",
+ },
+ {
+ name: "missing2+1p+notice+missing1+reciprocal+restricted+proprietary",
+ projects: []string{
+ "regressgpl2",
+ "firstparty",
+ "notice",
+ "regressgpl1",
+ "reciprocal",
+ "restricted",
+ "proprietary",
+ },
+ expectedStdout: "PASS -- parsed 5 project metadata files for 7 projects",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ stdout := &bytes.Buffer{}
+ stderr := &bytes.Buffer{}
+
+ projects := make([]string, 0, len(tt.projects))
+ for _, project := range tt.projects {
+ projects = append(projects, "testdata/"+project)
+ }
+ err := checkProjectMetadata(stdout, stderr, compliance.GetFS(""), projects...)
+ if err != nil {
+ t.Fatalf("checkmetadata: error = %v, stderr = %v", err, stderr)
+ return
+ }
+ var actualStdout string
+ for _, s := range strings.Split(stdout.String(), "\n") {
+ ts := strings.TrimLeft(s, " \t")
+ if len(ts) < 1 {
+ continue
+ }
+ if len(actualStdout) > 0 {
+ t.Errorf("checkmetadata: unexpected multiple output lines %q, want %q", actualStdout+"\n"+ts, tt.expectedStdout)
+ }
+ actualStdout = ts
+ }
+ if actualStdout != tt.expectedStdout {
+ t.Errorf("checkmetadata: unexpected stdout %q, want %q", actualStdout, tt.expectedStdout)
+ }
+ })
+ }
+}
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice.go b/tools/compliance/cmd/htmlnotice/htmlnotice.go
index 1a49610..78371ee 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice.go
@@ -24,6 +24,7 @@
"io/fs"
"os"
"path/filepath"
+ "sort"
"strings"
"android/soong/response"
@@ -275,7 +276,8 @@
}
fmt.Fprintln(ctx.stdout, "</body></html>")
- *ctx.deps = ni.InputNoticeFiles()
+ *ctx.deps = ni.InputFiles()
+ sort.Strings(*ctx.deps)
return nil
}
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
index b927018..8dc1197 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
@@ -78,7 +78,16 @@
usedBy{"highest.apex/lib/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/highest.apex.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -106,7 +115,16 @@
usedBy{"highest.apex/lib/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/highest.apex.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -124,7 +142,16 @@
usedBy{"highest.apex/lib/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/highest.apex.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -154,7 +181,16 @@
usedBy{"highest.apex/lib/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/highest.apex.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -170,7 +206,16 @@
usedBy{"container.zip/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/container.zip.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -182,7 +227,13 @@
usedBy{"application"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/application.meta_lic",
+ "testdata/firstparty/bin/bin3.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -194,7 +245,12 @@
usedBy{"bin/bin1"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -206,7 +262,10 @@
usedBy{"lib/libd.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "notice",
@@ -231,6 +290,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
},
},
{
@@ -256,6 +322,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
},
},
{
@@ -275,6 +348,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
},
},
{
@@ -296,6 +373,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
},
},
{
@@ -308,7 +388,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
},
{
condition: "reciprocal",
@@ -333,6 +416,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/bin/bin2.meta_lic",
+ "testdata/reciprocal/highest.apex.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -358,6 +448,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/bin/bin2.meta_lic",
+ "testdata/reciprocal/container.zip.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -377,6 +474,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/application.meta_lic",
+ "testdata/reciprocal/bin/bin3.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
},
},
{
@@ -398,6 +499,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
},
},
{
@@ -410,7 +514,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
+ },
},
{
condition: "restricted",
@@ -440,6 +547,13 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/bin/bin2.meta_lic",
+ "testdata/restricted/highest.apex.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
+ "testdata/restricted/lib/libd.so.meta_lic",
},
},
{
@@ -470,6 +584,13 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/bin/bin2.meta_lic",
+ "testdata/restricted/container.zip.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
+ "testdata/restricted/lib/libd.so.meta_lic",
},
},
{
@@ -489,6 +610,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/application.meta_lic",
+ "testdata/restricted/bin/bin3.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
},
},
{
@@ -513,6 +638,9 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
},
},
{
@@ -525,7 +653,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/restricted/lib/libd.so.meta_lic",
+ },
},
{
condition: "proprietary",
@@ -555,6 +686,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/bin/bin2.meta_lic",
+ "testdata/proprietary/highest.apex.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
+ "testdata/proprietary/lib/libd.so.meta_lic",
"testdata/restricted/RESTRICTED_LICENSE",
},
},
@@ -586,6 +724,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/bin/bin2.meta_lic",
+ "testdata/proprietary/container.zip.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
+ "testdata/proprietary/lib/libd.so.meta_lic",
"testdata/restricted/RESTRICTED_LICENSE",
},
},
@@ -606,6 +751,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/application.meta_lic",
+ "testdata/proprietary/bin/bin3.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
},
},
{
@@ -627,6 +776,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
},
},
{
@@ -639,7 +791,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/proprietary/lib/libd.so.meta_lic",
+ },
},
}
for _, tt := range tests {
diff --git a/tools/compliance/cmd/listshare/listshare.go b/tools/compliance/cmd/listshare/listshare.go
index 31bd1b2..4ca6457 100644
--- a/tools/compliance/cmd/listshare/listshare.go
+++ b/tools/compliance/cmd/listshare/listshare.go
@@ -149,6 +149,9 @@
// Group the resolutions by project.
presolution := make(map[string]compliance.LicenseConditionSet)
for _, target := range shareSource.AttachesTo() {
+ if shareSource.IsPureAggregate(target) && !target.LicenseConditions().MatchesAnySet(compliance.ImpliesShared) {
+ continue
+ }
rl := shareSource.Resolutions(target)
sort.Sort(rl)
for _, r := range rl {
diff --git a/tools/compliance/cmd/listshare/listshare_test.go b/tools/compliance/cmd/listshare/listshare_test.go
index c1e38be..fb61583 100644
--- a/tools/compliance/cmd/listshare/listshare_test.go
+++ b/tools/compliance/cmd/listshare/listshare_test.go
@@ -194,13 +194,6 @@
conditions: []string{"restricted"},
},
{
- project: "highest/apex",
- conditions: []string{
- "restricted",
- "restricted_allows_dynamic_linking",
- },
- },
- {
project: "static/binary",
conditions: []string{
"restricted_allows_dynamic_linking",
@@ -225,13 +218,6 @@
conditions: []string{"restricted"},
},
{
- project: "container/zip",
- conditions: []string{
- "restricted",
- "restricted_allows_dynamic_linking",
- },
- },
- {
project: "device/library",
conditions: []string{"restricted_allows_dynamic_linking"},
},
@@ -320,10 +306,6 @@
project: "dynamic/binary",
conditions: []string{"restricted"},
},
- {
- project: "highest/apex",
- conditions: []string{"restricted"},
- },
},
},
{
@@ -336,10 +318,6 @@
conditions: []string{"restricted"},
},
{
- project: "container/zip",
- conditions: []string{"restricted"},
- },
- {
project: "dynamic/binary",
conditions: []string{"restricted"},
},
@@ -381,10 +359,6 @@
project: "bin/threelibraries",
conditions: []string{"restricted"},
},
- {
- project: "container/zip",
- conditions: []string{"restricted"},
- },
},
},
{
@@ -397,10 +371,6 @@
conditions: []string{"restricted"},
},
{
- project: "container/zip",
- conditions: []string{"restricted"},
- },
- {
project: "lib/apache",
conditions: []string{"restricted"},
},
@@ -420,10 +390,6 @@
conditions: []string{"restricted"},
},
{
- project: "container/zip",
- conditions: []string{"restricted"},
- },
- {
project: "lib/apache",
conditions: []string{"restricted"},
},
@@ -447,10 +413,6 @@
conditions: []string{"restricted"},
},
{
- project: "container/zip",
- conditions: []string{"restricted"},
- },
- {
project: "lib/apache",
conditions: []string{"restricted"},
},
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
new file mode 100644
index 0000000..493d331
--- /dev/null
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -0,0 +1,422 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "time"
+
+ "android/soong/response"
+ "android/soong/tools/compliance"
+ "android/soong/tools/compliance/projectmetadata"
+
+ "github.com/google/blueprint/deptools"
+)
+
+var (
+ failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+ failNoLicenses = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+ stdout io.Writer
+ stderr io.Writer
+ rootFS fs.FS
+ product string
+ stripPrefix []string
+ creationTime creationTimeGetter
+}
+
+func (ctx context) strip(installPath string) string {
+ for _, prefix := range ctx.stripPrefix {
+ if strings.HasPrefix(installPath, prefix) {
+ p := strings.TrimPrefix(installPath, prefix)
+ if 0 == len(p) {
+ p = ctx.product
+ }
+ if 0 == len(p) {
+ continue
+ }
+ return p
+ }
+ }
+ return installPath
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+ var f multiString
+ flags.Var(&f, name, usage)
+ return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an SBOM.spdx.
+
+Options:
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the SBOM spdx file. (default stdout)")
+ depsFile := flags.String("d", "", "Where to write the deps file")
+ product := flags.String("product", "", "The name of the product for which the notice is generated.")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+ flags.Parse(expandedArgs)
+
+ // Must specify at least one root target.
+ if flags.NArg() == 0 {
+ flags.Usage()
+ os.Exit(2)
+ }
+
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
+ }
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
+ ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime}
+
+ deps, err := sbomGenerator(ctx, flags.Args()...)
+ if err != nil {
+ if err == failNoneRequested {
+ flags.Usage()
+ }
+ fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+ os.Exit(1)
+ }
+
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ }
+
+ if *depsFile != "" {
+ err := deptools.WriteDepFile(*depsFile, *outputFile, deps)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write deps to %q: %s\n", *depsFile, err)
+ os.Exit(1)
+ }
+ }
+ os.Exit(0)
+}
+
+type creationTimeGetter func() time.Time
+
+// actualTime returns current time in UTC
+func actualTime() time.Time {
+ return time.Now().UTC()
+}
+
+// replaceSlashes replaces "/" by "-" for the library path to be used for packages & files SPDXID
+func replaceSlashes(x string) string {
+ return strings.ReplaceAll(x, "/", "-")
+}
+
+// getPackageName returns a package name of a target Node
+func getPackageName(_ *context, tn *compliance.TargetNode) string {
+ return replaceSlashes(tn.Name())
+}
+
+// getDocumentName returns a package name of a target Node
+func getDocumentName(ctx *context, tn *compliance.TargetNode, pm *projectmetadata.ProjectMetadata) string {
+ if len(ctx.product) > 0 {
+ return replaceSlashes(ctx.product)
+ }
+ if len(tn.ModuleName()) > 0 {
+ if pm != nil {
+ return replaceSlashes(pm.Name() + ":" + tn.ModuleName())
+ }
+ return replaceSlashes(tn.ModuleName())
+ }
+
+ // TO DO: Replace tn.Name() with pm.Name() + parts of the target name
+ return replaceSlashes(tn.Name())
+}
+
+// getDownloadUrl returns the download URL if available (GIT, SVN, etc..),
+// or NOASSERTION if not available, none determined or ambiguous
+func getDownloadUrl(_ *context, pm *projectmetadata.ProjectMetadata) string {
+ if pm == nil {
+ return "NOASSERTION"
+ }
+
+ urlsByTypeName := pm.UrlsByTypeName()
+ if urlsByTypeName == nil {
+ return "NOASSERTION"
+ }
+
+ url := urlsByTypeName.DownloadUrl()
+ if url == "" {
+ return "NOASSERTION"
+ }
+ return url
+}
+
+// getProjectMetadata returns the optimal project metadata for the target node
+func getProjectMetadata(_ *context, pmix *projectmetadata.Index,
+ tn *compliance.TargetNode) (*projectmetadata.ProjectMetadata, error) {
+ pms, err := pmix.MetadataForProjects(tn.Projects()...)
+ if err != nil {
+ return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn, err)
+ }
+ if len(pms) == 0 {
+ return nil, nil
+ }
+
+ // Getting the project metadata that contains most of the info needed for sbomGenerator
+ score := -1
+ index := -1
+ for i := 0; i < len(pms); i++ {
+ tempScore := 0
+ if pms[i].Name() != "" {
+ tempScore += 1
+ }
+ if pms[i].Version() != "" {
+ tempScore += 1
+ }
+ if pms[i].UrlsByTypeName().DownloadUrl() != "" {
+ tempScore += 1
+ }
+
+ if tempScore == score {
+ if pms[i].Project() < pms[index].Project() {
+ index = i
+ }
+ } else if tempScore > score {
+ score = tempScore
+ index = i
+ }
+ }
+ return pms[index], nil
+}
+
+// sbomGenerator implements the spdx bom utility
+
+// SBOM is part of the new government regulation issued to improve national cyber security
+// and enhance software supply chain and transparency, see https://www.cisa.gov/sbom
+
+// sbomGenerator uses the SPDX standard, see the SPDX specification (https://spdx.github.io/spdx-spec/)
+// sbomGenerator is also following the internal google SBOM styleguide (http://goto.google.com/spdx-style-guide)
+func sbomGenerator(ctx *context, files ...string) ([]string, error) {
+ // Must be at least one root file.
+ if len(files) < 1 {
+ return nil, failNoneRequested
+ }
+
+ pmix := projectmetadata.NewIndex(ctx.rootFS)
+
+ lg, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+
+ if err != nil {
+ return nil, fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+ }
+
+ // implementing the licenses references for the packages
+ licenses := make(map[string]string)
+ concludedLicenses := func(licenseTexts []string) string {
+ licenseRefs := make([]string, 0, len(licenseTexts))
+ for _, licenseText := range licenseTexts {
+ license := strings.SplitN(licenseText, ":", 2)[0]
+ if _, ok := licenses[license]; !ok {
+ licenseRef := "LicenseRef-" + replaceSlashes(license)
+ licenses[license] = licenseRef
+ }
+
+ licenseRefs = append(licenseRefs, licenses[license])
+ }
+ if len(licenseRefs) > 1 {
+ return "(" + strings.Join(licenseRefs, " AND ") + ")"
+ } else if len(licenseRefs) == 1 {
+ return licenseRefs[0]
+ }
+ return "NONE"
+ }
+
+ isMainPackage := true
+ var mainPackage string
+ visitedNodes := make(map[*compliance.TargetNode]struct{})
+
+ // performing a Breadth-first top down walk of licensegraph and building package information
+ compliance.WalkTopDownBreadthFirst(nil, lg,
+ func(lg *compliance.LicenseGraph, tn *compliance.TargetNode, path compliance.TargetEdgePath) bool {
+ if err != nil {
+ return false
+ }
+ var pm *projectmetadata.ProjectMetadata
+ pm, err = getProjectMetadata(ctx, pmix, tn)
+ if err != nil {
+ return false
+ }
+
+ if isMainPackage {
+ mainPackage = getDocumentName(ctx, tn, pm)
+ fmt.Fprintf(ctx.stdout, "SPDXVersion: SPDX-2.2\n")
+ fmt.Fprintf(ctx.stdout, "DataLicense: CC-1.0\n")
+ fmt.Fprintf(ctx.stdout, "DocumentName: %s\n", mainPackage)
+ fmt.Fprintf(ctx.stdout, "SPDXID: SPDXRef-DOCUMENT-%s\n", mainPackage)
+ fmt.Fprintf(ctx.stdout, "DocumentNamespace: Android\n")
+ fmt.Fprintf(ctx.stdout, "Creator: Organization: Google LLC\n")
+ fmt.Fprintf(ctx.stdout, "Created: %s\n", ctx.creationTime().Format("2006-01-02T15:04:05Z"))
+ isMainPackage = false
+ }
+
+ relationships := make([]string, 0, 1)
+ defer func() {
+ if r := recover(); r != nil {
+ panic(r)
+ }
+ for _, relationship := range relationships {
+ fmt.Fprintln(ctx.stdout, relationship)
+ }
+ }()
+ if len(path) == 0 {
+ relationships = append(relationships,
+ fmt.Sprintf("Relationship: SPDXRef-DOCUMENT-%s DESCRIBES SPDXRef-Package-%s",
+ mainPackage, getPackageName(ctx, tn)))
+ } else {
+ // Check parent and identify annotation
+ parent := path[len(path)-1]
+ targetEdge := parent.Edge()
+ if targetEdge.IsRuntimeDependency() {
+ // Adding the dynamic link annotation RUNTIME_DEPENDENCY_OF relationship
+ relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s RUNTIME_DEPENDENCY_OF SPDXRef-Package-%s", getPackageName(ctx, tn), getPackageName(ctx, targetEdge.Target())))
+
+ } else if targetEdge.IsDerivation() {
+ // Adding the derivation annotation as a CONTAINS relationship
+ relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s CONTAINS SPDXRef-Package-%s", getPackageName(ctx, targetEdge.Target()), getPackageName(ctx, tn)))
+
+ } else if targetEdge.IsBuildTool() {
+ // Adding the toolchain annotation as a BUILD_TOOL_OF relationship
+ relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s BUILD_TOOL_OF SPDXRef-Package-%s", getPackageName(ctx, tn), getPackageName(ctx, targetEdge.Target())))
+ } else {
+ panic(fmt.Errorf("Unknown dependency type: %v", targetEdge.Annotations()))
+ }
+ }
+
+ if _, alreadyVisited := visitedNodes[tn]; alreadyVisited {
+ return false
+ }
+ visitedNodes[tn] = struct{}{}
+ pkgName := getPackageName(ctx, tn)
+ fmt.Fprintf(ctx.stdout, "##### Package: %s\n", strings.Replace(pkgName, "-", "/", -2))
+ fmt.Fprintf(ctx.stdout, "PackageName: %s\n", pkgName)
+ if pm != nil && pm.Version() != "" {
+ fmt.Fprintf(ctx.stdout, "PackageVersion: %s\n", pm.Version())
+ }
+ fmt.Fprintf(ctx.stdout, "SPDXID: SPDXRef-Package-%s\n", pkgName)
+ fmt.Fprintf(ctx.stdout, "PackageDownloadLocation: %s\n", getDownloadUrl(ctx, pm))
+ fmt.Fprintf(ctx.stdout, "PackageLicenseConcluded: %s\n", concludedLicenses(tn.LicenseTexts()))
+ return true
+ })
+
+ fmt.Fprintf(ctx.stdout, "##### Non-standard license:\n")
+
+ licenseTexts := make([]string, 0, len(licenses))
+
+ for licenseText := range licenses {
+ licenseTexts = append(licenseTexts, licenseText)
+ }
+
+ sort.Strings(licenseTexts)
+
+ for _, licenseText := range licenseTexts {
+ fmt.Fprintf(ctx.stdout, "LicenseID: %s\n", licenses[licenseText])
+ // open the file
+ f, err := ctx.rootFS.Open(filepath.Clean(licenseText))
+ if err != nil {
+ return nil, fmt.Errorf("error opening license text file %q: %w", licenseText, err)
+ }
+
+ // read the file
+ text, err := io.ReadAll(f)
+ if err != nil {
+ return nil, fmt.Errorf("error reading license text file %q: %w", licenseText, err)
+ }
+ // adding the extracted license text
+ fmt.Fprintf(ctx.stdout, "ExtractedText: <text>%v</text>\n", string(text))
+ }
+
+ deps := licenseTexts
+ return deps, nil
+}
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
new file mode 100644
index 0000000..6b40a27
--- /dev/null
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -0,0 +1,1629 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "os"
+ "reflect"
+ "regexp"
+ "strings"
+ "testing"
+ "time"
+
+ "android/soong/tools/compliance"
+)
+
+var (
+ spdxVersionTag = regexp.MustCompile(`^\s*SPDXVersion: SPDX-2.2\s*$`)
+ spdxDataLicenseTag = regexp.MustCompile(`^\s*DataLicense: CC-1.0\s*$`)
+ spdxDocumentNameTag = regexp.MustCompile(`^\s*DocumentName:\s*Android*\s*$`)
+ spdxIDTag = regexp.MustCompile(`^\s*SPDXID:\s*SPDXRef-DOCUMENT-(.*)\s*$`)
+ spdxDocumentNameSpaceTag = regexp.MustCompile(`^\s*DocumentNamespace:\s*Android\s*$`)
+ spdxCreatorOrganizationTag = regexp.MustCompile(`^\s*Creator:\s*Organization:\s*Google LLC\s*$`)
+ spdxCreatedTimeTag = regexp.MustCompile(`^\s*Created: 1970-01-01T00:00:00Z\s*$`)
+ spdxPackageTag = regexp.MustCompile(`^\s*#####\s*Package:\s*(.*)\s*$`)
+ spdxPackageNameTag = regexp.MustCompile(`^\s*PackageName:\s*(.*)\s*$`)
+ spdxPkgIDTag = regexp.MustCompile(`^\s*SPDXID:\s*SPDXRef-Package-(.*)\s*$`)
+ spdxPkgDownloadLocationTag = regexp.MustCompile(`^\s*PackageDownloadLocation:\s*NOASSERTION\s*$`)
+ spdxPkgLicenseDeclaredTag = regexp.MustCompile(`^\s*PackageLicenseConcluded:\s*LicenseRef-(.*)\s*$`)
+ spdxRelationshipTag = regexp.MustCompile(`^\s*Relationship:\s*SPDXRef-(.*)\s*(DESCRIBES|CONTAINS|BUILD_TOOL_OF|RUNTIME_DEPENDENCY_OF)\s*SPDXRef-Package-(.*)\s*$`)
+ spdxLicenseTag = regexp.MustCompile(`^\s*##### Non-standard license:\s*$`)
+ spdxLicenseIDTag = regexp.MustCompile(`^\s*LicenseID: LicenseRef-(.*)\s*$`)
+ spdxExtractedTextTag = regexp.MustCompile(`^\s*ExtractedText:\s*<text>(.*)\s*$`)
+ spdxExtractedClosingTextTag = regexp.MustCompile(`^\s*</text>\s*$`)
+)
+
+func TestMain(m *testing.M) {
+ // Change into the parent directory before running the tests
+ // so they can find the testdata directory.
+ if err := os.Chdir(".."); err != nil {
+ fmt.Printf("failed to change to testdata directory: %s\n", err)
+ os.Exit(1)
+ }
+ os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+ tests := []struct {
+ condition string
+ name string
+ outDir string
+ roots []string
+ stripPrefix string
+ expectedOut []matcher
+ expectedDeps []string
+ }{
+ {
+ condition: "firstparty",
+ name: "apex",
+ roots: []string{"highest.apex.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/firstparty/highest.apex.meta_lic"},
+ packageName{"testdata/firstparty/highest.apex.meta_lic"},
+ spdxPkgID{"testdata/firstparty/highest.apex.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata-firstparty-highest.apex.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
+ packageName{"testdata/firstparty/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/bin/bin2.meta_lic"},
+ packageName{"testdata/firstparty/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/firstparty/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata-firstparty-bin-bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
+ packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata-firstparty-bin-bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/lib/libd.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ },
+ {
+ condition: "firstparty",
+ name: "application",
+ roots: []string{"application.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/firstparty/application.meta_lic"},
+ packageName{"testdata/firstparty/application.meta_lic"},
+ spdxPkgID{"testdata/firstparty/application.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/application.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/firstparty/bin/bin3.meta_lic"},
+ packageName{"testdata/firstparty/bin/bin3.meta_lic"},
+ spdxPkgID{"testdata/firstparty/bin/bin3.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/bin/bin3.meta_lic ", "testdata-firstparty-application.meta_lic", "BUILD_TOOL_OF"},
+ packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/application.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata-firstparty-application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ },
+ {
+ condition: "firstparty",
+ name: "container",
+ roots: []string{"container.zip.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/firstparty/container.zip.meta_lic"},
+ packageName{"testdata/firstparty/container.zip.meta_lic"},
+ spdxPkgID{"testdata/firstparty/container.zip.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/container.zip.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
+ packageName{"testdata/firstparty/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/bin/bin2.meta_lic"},
+ packageName{"testdata/firstparty/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/firstparty/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
+ packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/lib/libd.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ },
+ {
+ condition: "firstparty",
+ name: "binary",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
+ packageName{"testdata/firstparty/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/bin/bin1.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
+ packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ },
+ {
+ condition: "firstparty",
+ name: "library",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
+ packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/lib/libd.so.meta_lic", "DESCRIBES"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ },
+ {
+ condition: "notice",
+ name: "apex",
+ roots: []string{"highest.apex.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/notice/highest.apex.meta_lic"},
+ packageName{"testdata/notice/highest.apex.meta_lic"},
+ spdxPkgID{"testdata/notice/highest.apex.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/notice/highest.apex.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/notice/bin/bin1.meta_lic"},
+ packageName{"testdata/notice/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/bin/bin2.meta_lic"},
+ packageName{"testdata/notice/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/notice/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+ packageName{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/libb.so.meta_lic"},
+ packageName{"testdata/notice/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/libc.a.meta_lic"},
+ packageName{"testdata/notice/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/notice/lib/libb.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/notice/lib/libd.so.meta_lic"},
+ packageName{"testdata/notice/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/lib/libd.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ },
+ },
+ {
+ condition: "notice",
+ name: "container",
+ roots: []string{"container.zip.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/notice/container.zip.meta_lic"},
+ packageName{"testdata/notice/container.zip.meta_lic"},
+ spdxPkgID{"testdata/notice/container.zip.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/notice/container.zip.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/notice/bin/bin1.meta_lic"},
+ packageName{"testdata/notice/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/bin/bin2.meta_lic"},
+ packageName{"testdata/notice/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/notice/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+ packageName{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/libb.so.meta_lic"},
+ packageName{"testdata/notice/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/libc.a.meta_lic"},
+ packageName{"testdata/notice/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/notice/lib/libb.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/notice/lib/libd.so.meta_lic"},
+ packageName{"testdata/notice/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/lib/libd.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ },
+ },
+ {
+ condition: "notice",
+ name: "application",
+ roots: []string{"application.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/notice/application.meta_lic"},
+ packageName{"testdata/notice/application.meta_lic"},
+ spdxPkgID{"testdata/notice/application.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata-notice-application.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/notice/bin/bin3.meta_lic"},
+ packageName{"testdata/notice/bin/bin3.meta_lic"},
+ spdxPkgID{"testdata/notice/bin/bin3.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata-notice-bin-bin3.meta_lic ", "testdata/notice/application.meta_lic", "BUILD_TOOL_OF"},
+ packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+ packageName{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/application.meta_lic ", "testdata-notice-lib-liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/libb.so.meta_lic"},
+ packageName{"testdata/notice/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata-notice-lib-libb.so.meta_lic ", "testdata/notice/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ },
+ },
+ {
+ condition: "notice",
+ name: "binary",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/notice/bin/bin1.meta_lic"},
+ packageName{"testdata/notice/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/notice/bin/bin1.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+ packageName{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/notice/lib/libc.a.meta_lic"},
+ packageName{"testdata/notice/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ },
+ },
+ {
+ condition: "notice",
+ name: "library",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/notice/lib/libd.so.meta_lic"},
+ packageName{"testdata/notice/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/notice/lib/libd.so.meta_lic", "DESCRIBES"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ },
+ {
+ condition: "reciprocal",
+ name: "apex",
+ roots: []string{"highest.apex.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/reciprocal/highest.apex.meta_lic"},
+ packageName{"testdata/reciprocal/highest.apex.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/highest.apex.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/highest.apex.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
+ packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata-reciprocal-bin-bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/bin/bin2.meta_lic"},
+ packageName{"testdata/reciprocal/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata-reciprocal-bin-bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata/reciprocal/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/lib/libd.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ },
+ },
+ {
+ condition: "reciprocal",
+ name: "container",
+ roots: []string{"container.zip.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/reciprocal/container.zip.meta_lic"},
+ packageName{"testdata/reciprocal/container.zip.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/container.zip.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/container.zip.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
+ packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata-reciprocal-bin-bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/bin/bin2.meta_lic"},
+ packageName{"testdata/reciprocal/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata-reciprocal-bin-bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata/reciprocal/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/lib/libd.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ },
+ },
+ {
+ condition: "reciprocal",
+ name: "application",
+ roots: []string{"application.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/reciprocal/application.meta_lic"},
+ packageName{"testdata/reciprocal/application.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/application.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/application.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/reciprocal/bin/bin3.meta_lic"},
+ packageName{"testdata/reciprocal/bin/bin3.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/bin/bin3.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata-reciprocal-bin-bin3.meta_lic ", "testdata/reciprocal/application.meta_lic", "BUILD_TOOL_OF"},
+ packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/application.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ },
+ },
+ {
+ condition: "reciprocal",
+ name: "binary",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
+ packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/bin/bin1.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ },
+ },
+ {
+ condition: "reciprocal",
+ name: "library",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/lib/libd.so.meta_lic", "DESCRIBES"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ },
+ },
+ {
+ condition: "restricted",
+ name: "apex",
+ roots: []string{"highest.apex.meta_lic"},
+ stripPrefix: "out/target/product/fictional/system/apex/",
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/restricted/highest.apex.meta_lic"},
+ packageName{"testdata/restricted/highest.apex.meta_lic"},
+ spdxPkgID{"testdata/restricted/highest.apex.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/highest.apex.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/restricted/bin/bin1.meta_lic"},
+ packageName{"testdata/restricted/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/bin/bin2.meta_lic"},
+ packageName{"testdata/restricted/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/restricted/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
+ packageName{"testdata/restricted/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/libb.so.meta_lic"},
+ packageName{"testdata/restricted/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
+ packageName{"testdata/restricted/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/restricted/lib/libb.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
+ packageName{"testdata/restricted/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/lib/libd.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxExtractedText{"###Restricted License###"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/restricted/RESTRICTED_LICENSE",
+ },
+ },
+ {
+ condition: "restricted",
+ name: "container",
+ roots: []string{"container.zip.meta_lic"},
+ stripPrefix: "out/target/product/fictional/system/apex/",
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/restricted/container.zip.meta_lic"},
+ packageName{"testdata/restricted/container.zip.meta_lic"},
+ spdxPkgID{"testdata/restricted/container.zip.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/container.zip.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/restricted/bin/bin1.meta_lic"},
+ packageName{"testdata/restricted/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/bin/bin2.meta_lic"},
+ packageName{"testdata/restricted/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/restricted/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
+ packageName{"testdata/restricted/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/libb.so.meta_lic"},
+ packageName{"testdata/restricted/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
+ packageName{"testdata/restricted/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/restricted/lib/libb.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
+ packageName{"testdata/restricted/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/lib/libd.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxExtractedText{"###Restricted License###"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/restricted/RESTRICTED_LICENSE",
+ },
+ },
+ {
+ condition: "restricted",
+ name: "binary",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/restricted/bin/bin1.meta_lic"},
+ packageName{"testdata/restricted/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/bin/bin1.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
+ packageName{"testdata/restricted/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
+ packageName{"testdata/restricted/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+ spdxExtractedText{"$$$Reciprocal License$$$"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxExtractedText{"###Restricted License###"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/restricted/RESTRICTED_LICENSE",
+ },
+ },
+ {
+ condition: "restricted",
+ name: "library",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
+ packageName{"testdata/restricted/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/lib/libd.so.meta_lic", "DESCRIBES"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ },
+ {
+ condition: "proprietary",
+ name: "apex",
+ roots: []string{"highest.apex.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/proprietary/highest.apex.meta_lic"},
+ packageName{"testdata/proprietary/highest.apex.meta_lic"},
+ spdxPkgID{"testdata/proprietary/highest.apex.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/highest.apex.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
+ packageName{"testdata/proprietary/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/bin/bin2.meta_lic"},
+ packageName{"testdata/proprietary/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/proprietary/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
+ packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata-proprietary-lib-libb.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata-proprietary-lib-libd.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxExtractedText{"@@@Proprietary License@@@"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxExtractedText{"###Restricted License###"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/restricted/RESTRICTED_LICENSE",
+ },
+ },
+ {
+ condition: "proprietary",
+ name: "container",
+ roots: []string{"container.zip.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/proprietary/container.zip.meta_lic"},
+ packageName{"testdata/proprietary/container.zip.meta_lic"},
+ spdxPkgID{"testdata/proprietary/container.zip.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/container.zip.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
+ packageName{"testdata/proprietary/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/bin/bin1.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/bin/bin2.meta_lic"},
+ packageName{"testdata/proprietary/bin/bin2.meta_lic"},
+ spdxPkgID{"testdata/proprietary/bin/bin2.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/lib/libb.so.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
+ packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxRelationship{"Package-testdata-proprietary-lib-libb.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"Package-testdata-proprietary-lib-libd.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxExtractedText{"@@@Proprietary License@@@"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxExtractedText{"###Restricted License###"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/restricted/RESTRICTED_LICENSE",
+ },
+ },
+ {
+ condition: "proprietary",
+ name: "application",
+ roots: []string{"application.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/proprietary/application.meta_lic"},
+ packageName{"testdata/proprietary/application.meta_lic"},
+ spdxPkgID{"testdata/proprietary/application.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/application.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/proprietary/bin/bin3.meta_lic"},
+ packageName{"testdata/proprietary/bin/bin3.meta_lic"},
+ spdxPkgID{"testdata/proprietary/bin/bin3.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin3.meta_lic ", "testdata/proprietary/application.meta_lic", "BUILD_TOOL_OF"},
+ packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/application.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/lib/libb.so.meta_lic ", "testdata/proprietary/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxExtractedText{"@@@Proprietary License@@@"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+ spdxExtractedText{"###Restricted License###"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/restricted/RESTRICTED_LICENSE",
+ },
+ },
+ {
+ condition: "proprietary",
+ name: "binary",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
+ packageName{"testdata/proprietary/bin/bin1.meta_lic"},
+ spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/bin/bin1.meta_lic", "DESCRIBES"},
+ packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+ packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
+ packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+ spdxExtractedText{"&&&First Party License&&&"},
+ spdxExtractedClosingText{},
+ spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+ spdxExtractedText{"@@@Proprietary License@@@"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/proprietary/PROPRIETARY_LICENSE",
+ },
+ },
+ {
+ condition: "proprietary",
+ name: "library",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedOut: []matcher{
+ spdxVersion{},
+ spdxDataLicense{},
+ spdxDocumentName{"Android"},
+ spdxID{"Android"},
+ spdxDocumentNameSpace{},
+ spdxCreatorOrganization{},
+ spdxCreatedTime{},
+ packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
+ packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
+ spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
+ spdxPkgDownloadLocation{"NOASSERTION"},
+ spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+ spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/lib/libd.so.meta_lic", "DESCRIBES"},
+ spdxLicense{},
+ spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+ spdxExtractedText{"%%%Notice License%%%"},
+ spdxExtractedClosingText{},
+ },
+ expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+ stdout := &bytes.Buffer{}
+ stderr := &bytes.Buffer{}
+
+ rootFiles := make([]string, 0, len(tt.roots))
+ for _, r := range tt.roots {
+ rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+ }
+
+ ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "Android", []string{tt.stripPrefix}, fakeTime}
+
+ deps, err := sbomGenerator(&ctx, rootFiles...)
+ if err != nil {
+ t.Fatalf("sbom: error = %v, stderr = %v", err, stderr)
+ return
+ }
+ if stderr.Len() > 0 {
+ t.Errorf("sbom: gotStderr = %v, want none", stderr)
+ }
+
+ t.Logf("got stdout: %s", stdout.String())
+
+ t.Logf("want stdout: %s", matcherList(tt.expectedOut).String())
+
+ out := bufio.NewScanner(stdout)
+ lineno := 0
+ for out.Scan() {
+ line := out.Text()
+ if strings.TrimLeft(line, " ") == "" {
+ continue
+ }
+ if len(tt.expectedOut) <= lineno {
+ t.Errorf("sbom: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+ } else if !tt.expectedOut[lineno].isMatch(line) {
+ t.Errorf("sbom: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno])
+ }
+ lineno++
+ }
+ for ; lineno < len(tt.expectedOut); lineno++ {
+ t.Errorf("bom: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno])
+ }
+
+ t.Logf("got deps: %q", deps)
+
+ t.Logf("want deps: %q", tt.expectedDeps)
+
+ if g, w := deps, tt.expectedDeps; !reflect.DeepEqual(g, w) {
+ t.Errorf("unexpected deps, wanted:\n%s\ngot:\n%s\n",
+ strings.Join(w, "\n"), strings.Join(g, "\n"))
+ }
+ })
+ }
+}
+
+type matcher interface {
+ isMatch(line string) bool
+ String() string
+}
+
+type packageTag struct {
+ name string
+}
+
+func (m packageTag) isMatch(line string) bool {
+ groups := spdxPackageTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == m.name
+}
+
+func (m packageTag) String() string {
+ return "##### Package: " + m.name
+}
+
+type packageName struct {
+ name string
+}
+
+func (m packageName) isMatch(line string) bool {
+ groups := spdxPackageNameTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.name)
+}
+
+func (m packageName) String() string {
+ return "PackageName: " + replaceSlashes(m.name)
+}
+
+type spdxID struct {
+ name string
+}
+
+func (m spdxID) isMatch(line string) bool {
+ groups := spdxIDTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxID) String() string {
+ return "SPDXID: SPDXRef-DOCUMENT-" + replaceSlashes(m.name)
+}
+
+type spdxPkgID struct {
+ name string
+}
+
+func (m spdxPkgID) isMatch(line string) bool {
+ groups := spdxPkgIDTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxPkgID) String() string {
+ return "SPDXID: SPDXRef-Package-" + replaceSlashes(m.name)
+}
+
+type spdxVersion struct{}
+
+func (m spdxVersion) isMatch(line string) bool {
+ return spdxVersionTag.MatchString(line)
+}
+
+func (m spdxVersion) String() string {
+ return "SPDXVersion: SPDX-2.2"
+}
+
+type spdxDataLicense struct{}
+
+func (m spdxDataLicense) isMatch(line string) bool {
+ return spdxDataLicenseTag.MatchString(line)
+}
+
+func (m spdxDataLicense) String() string {
+ return "DataLicense: CC-1.0"
+}
+
+type spdxDocumentName struct {
+ name string
+}
+
+func (m spdxDocumentName) isMatch(line string) bool {
+ return spdxDocumentNameTag.MatchString(line)
+}
+
+func (m spdxDocumentName) String() string {
+ return "DocumentName: " + m.name
+}
+
+type spdxDocumentNameSpace struct {
+ name string
+}
+
+func (m spdxDocumentNameSpace) isMatch(line string) bool {
+ return spdxDocumentNameSpaceTag.MatchString(line)
+}
+
+func (m spdxDocumentNameSpace) String() string {
+ return "DocumentNameSpace: Android"
+}
+
+type spdxCreatorOrganization struct{}
+
+func (m spdxCreatorOrganization) isMatch(line string) bool {
+ return spdxCreatorOrganizationTag.MatchString(line)
+}
+
+func (m spdxCreatorOrganization) String() string {
+ return "Creator: Organization: Google LLC"
+}
+
+func fakeTime() time.Time {
+ return time.UnixMicro(0).UTC()
+}
+
+type spdxCreatedTime struct{}
+
+func (m spdxCreatedTime) isMatch(line string) bool {
+ return spdxCreatedTimeTag.MatchString(line)
+}
+
+func (m spdxCreatedTime) String() string {
+ return "Created: 1970-01-01T00:00:00Z"
+}
+
+type spdxPkgDownloadLocation struct {
+ name string
+}
+
+func (m spdxPkgDownloadLocation) isMatch(line string) bool {
+ return spdxPkgDownloadLocationTag.MatchString(line)
+}
+
+func (m spdxPkgDownloadLocation) String() string {
+ return "PackageDownloadLocation: " + m.name
+}
+
+type spdxPkgLicenseDeclared struct {
+ name string
+}
+
+func (m spdxPkgLicenseDeclared) isMatch(line string) bool {
+ groups := spdxPkgLicenseDeclaredTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxPkgLicenseDeclared) String() string {
+ return "PackageLicenseConcluded: LicenseRef-" + m.name
+}
+
+type spdxRelationship struct {
+ pkg1 string
+ pkg2 string
+ relation string
+}
+
+func (m spdxRelationship) isMatch(line string) bool {
+ groups := spdxRelationshipTag.FindStringSubmatch(line)
+ if len(groups) != 4 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.pkg1) && groups[2] == m.relation && groups[3] == replaceSlashes(m.pkg2)
+}
+
+func (m spdxRelationship) String() string {
+ return "Relationship: SPDXRef-" + replaceSlashes(m.pkg1) + " " + m.relation + " SPDXRef-Package-" + replaceSlashes(m.pkg2)
+}
+
+type spdxLicense struct{}
+
+func (m spdxLicense) isMatch(line string) bool {
+ return spdxLicenseTag.MatchString(line)
+}
+
+func (m spdxLicense) String() string {
+ return "##### Non-standard license:"
+}
+
+type spdxLicenseID struct {
+ name string
+}
+
+func (m spdxLicenseID) isMatch(line string) bool {
+ groups := spdxLicenseIDTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxLicenseID) String() string {
+ return "LicenseID: LicenseRef-" + m.name
+}
+
+type spdxExtractedText struct {
+ name string
+}
+
+func (m spdxExtractedText) isMatch(line string) bool {
+ groups := spdxExtractedTextTag.FindStringSubmatch(line)
+ if len(groups) != 2 {
+ return false
+ }
+ return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxExtractedText) String() string {
+ return "ExtractedText: <text>" + m.name
+}
+
+type spdxExtractedClosingText struct{}
+
+func (m spdxExtractedClosingText) isMatch(line string) bool {
+ return spdxExtractedClosingTextTag.MatchString(line)
+}
+
+func (m spdxExtractedClosingText) String() string {
+ return "</text>"
+}
+
+type matcherList []matcher
+
+func (l matcherList) String() string {
+ var sb strings.Builder
+ for _, m := range l {
+ s := m.String()
+ fmt.Fprintf(&sb, "%s\n", s)
+ }
+ return sb.String()
+}
diff --git a/tools/compliance/cmd/testdata/firstparty/METADATA b/tools/compliance/cmd/testdata/firstparty/METADATA
new file mode 100644
index 0000000..62b4481
--- /dev/null
+++ b/tools/compliance/cmd/testdata/firstparty/METADATA
@@ -0,0 +1,6 @@
+# Comments are allowed
+name: "1ptd"
+description: "First Party Test Data"
+third_party {
+ version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/notice/METADATA b/tools/compliance/cmd/testdata/notice/METADATA
new file mode 100644
index 0000000..302dfeb
--- /dev/null
+++ b/tools/compliance/cmd/testdata/notice/METADATA
@@ -0,0 +1,6 @@
+# Comments are allowed
+name: "noticetd"
+description: "Notice Test Data"
+third_party {
+ version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/proprietary/METADATA b/tools/compliance/cmd/testdata/proprietary/METADATA
new file mode 100644
index 0000000..72cc54a
--- /dev/null
+++ b/tools/compliance/cmd/testdata/proprietary/METADATA
@@ -0,0 +1 @@
+# comments are allowed
diff --git a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
index 7ef14e9..a7c3d01 100644
--- a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
@@ -2,7 +2,7 @@
module_classes: "EXECUTABLES"
projects: "standalone/binary"
license_kinds: "SPDX-license-identifier-LGPL-2.0"
-license_conditions: "restricted"
+license_conditions: "restricted_allows_dynamic_linking"
license_texts: "testdata/restricted/RESTRICTED_LICENSE"
is_container: false
built: "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
diff --git a/tools/compliance/cmd/testdata/reciprocal/METADATA b/tools/compliance/cmd/testdata/reciprocal/METADATA
new file mode 100644
index 0000000..50cc2ef
--- /dev/null
+++ b/tools/compliance/cmd/testdata/reciprocal/METADATA
@@ -0,0 +1,5 @@
+# Comments are allowed
+description: "Reciprocal Test Data"
+third_party {
+ version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/restricted/METADATA b/tools/compliance/cmd/testdata/restricted/METADATA
new file mode 100644
index 0000000..6bcf83f
--- /dev/null
+++ b/tools/compliance/cmd/testdata/restricted/METADATA
@@ -0,0 +1,6 @@
+name {
+ id: 1
+}
+third_party {
+ version: 2
+}
diff --git a/tools/compliance/cmd/testdata/restricted/METADATA.android b/tools/compliance/cmd/testdata/restricted/METADATA.android
new file mode 100644
index 0000000..1142499
--- /dev/null
+++ b/tools/compliance/cmd/testdata/restricted/METADATA.android
@@ -0,0 +1,6 @@
+# Comments are allowed
+name: "testdata"
+description: "Restricted Test Data"
+third_party {
+ version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
index 7ef14e9..a7c3d01 100644
--- a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
@@ -2,7 +2,7 @@
module_classes: "EXECUTABLES"
projects: "standalone/binary"
license_kinds: "SPDX-license-identifier-LGPL-2.0"
-license_conditions: "restricted"
+license_conditions: "restricted_allows_dynamic_linking"
license_texts: "testdata/restricted/RESTRICTED_LICENSE"
is_container: false
built: "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
index a505d4a..101ca19 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
@@ -1,7 +1,7 @@
package_name: "Device"
projects: "device/library"
license_kinds: "SPDX-license-identifier-LGPL-2.0"
-license_conditions: "restricted"
+license_conditions: "restricted_allows_dynamic_linking"
license_texts: "testdata/restricted/RESTRICTED_LICENSE"
is_container: false
built: "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
diff --git a/tools/compliance/cmd/textnotice/textnotice.go b/tools/compliance/cmd/textnotice/textnotice.go
index 9beaf58..450290c 100644
--- a/tools/compliance/cmd/textnotice/textnotice.go
+++ b/tools/compliance/cmd/textnotice/textnotice.go
@@ -23,6 +23,7 @@
"io/fs"
"os"
"path/filepath"
+ "sort"
"strings"
"android/soong/response"
@@ -230,7 +231,8 @@
fmt.Fprintln(ctx.stdout)
}
- *ctx.deps = ni.InputNoticeFiles()
+ *ctx.deps = ni.InputFiles()
+ sort.Strings(*ctx.deps)
return nil
}
diff --git a/tools/compliance/cmd/textnotice/textnotice_test.go b/tools/compliance/cmd/textnotice/textnotice_test.go
index e661a44..a902313 100644
--- a/tools/compliance/cmd/textnotice/textnotice_test.go
+++ b/tools/compliance/cmd/textnotice/textnotice_test.go
@@ -65,7 +65,16 @@
usedBy{"highest.apex/lib/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/highest.apex.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -81,7 +90,16 @@
usedBy{"container.zip/libb.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/container.zip.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -93,7 +111,13 @@
usedBy{"application"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/application.meta_lic",
+ "testdata/firstparty/bin/bin3.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -105,7 +129,12 @@
usedBy{"bin/bin1"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -117,7 +146,10 @@
usedBy{"lib/libd.so"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "notice",
@@ -142,6 +174,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
},
},
{
@@ -167,6 +206,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
},
},
{
@@ -186,6 +232,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
},
},
{
@@ -207,6 +257,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
},
},
{
@@ -219,7 +272,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
},
{
condition: "reciprocal",
@@ -244,6 +300,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/bin/bin2.meta_lic",
+ "testdata/reciprocal/highest.apex.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -269,6 +332,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/bin/bin2.meta_lic",
+ "testdata/reciprocal/container.zip.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -288,6 +358,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/application.meta_lic",
+ "testdata/reciprocal/bin/bin3.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
},
},
{
@@ -309,6 +383,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
},
},
{
@@ -323,6 +400,7 @@
},
expectedDeps: []string{
"testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -353,6 +431,13 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/bin/bin2.meta_lic",
+ "testdata/restricted/highest.apex.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
+ "testdata/restricted/lib/libd.so.meta_lic",
},
},
{
@@ -383,6 +468,13 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/bin/bin2.meta_lic",
+ "testdata/restricted/container.zip.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
+ "testdata/restricted/lib/libd.so.meta_lic",
},
},
{
@@ -402,6 +494,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/application.meta_lic",
+ "testdata/restricted/bin/bin3.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
},
},
{
@@ -426,6 +522,9 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
},
},
{
@@ -438,7 +537,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/restricted/lib/libd.so.meta_lic",
+ },
},
{
condition: "proprietary",
@@ -468,6 +570,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/bin/bin2.meta_lic",
+ "testdata/proprietary/highest.apex.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
+ "testdata/proprietary/lib/libd.so.meta_lic",
"testdata/restricted/RESTRICTED_LICENSE",
},
},
@@ -499,6 +608,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/bin/bin2.meta_lic",
+ "testdata/proprietary/container.zip.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
+ "testdata/proprietary/lib/libd.so.meta_lic",
"testdata/restricted/RESTRICTED_LICENSE",
},
},
@@ -519,6 +635,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/application.meta_lic",
+ "testdata/proprietary/bin/bin3.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
},
},
{
@@ -540,6 +660,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
},
},
{
@@ -552,7 +675,10 @@
usedBy{"lib/libd.so"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/proprietary/lib/libd.so.meta_lic",
+ },
},
}
for _, tt := range tests {
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice.go b/tools/compliance/cmd/xmlnotice/xmlnotice.go
index 2097b7c..c3f8e4c 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice.go
@@ -24,6 +24,7 @@
"io/fs"
"os"
"path/filepath"
+ "sort"
"strings"
"android/soong/response"
@@ -238,7 +239,8 @@
}
fmt.Fprintln(ctx.stdout, "</licenses>")
- *ctx.deps = ni.InputNoticeFiles()
+ *ctx.deps = ni.InputFiles()
+ sort.Strings(*ctx.deps)
return nil
}
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
index 731e783..551006f 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
@@ -65,7 +65,16 @@
target{"highest.apex/lib/libb.so", "Android"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/highest.apex.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -79,7 +88,16 @@
target{"container.zip/libb.so", "Android"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/bin/bin2.meta_lic",
+ "testdata/firstparty/container.zip.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -89,7 +107,13 @@
target{"application", "Android"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/application.meta_lic",
+ "testdata/firstparty/bin/bin3.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libb.so.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -99,7 +123,12 @@
target{"bin/bin1", "Android"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/bin/bin1.meta_lic",
+ "testdata/firstparty/lib/liba.so.meta_lic",
+ "testdata/firstparty/lib/libc.a.meta_lic",
+ },
},
{
condition: "firstparty",
@@ -109,7 +138,10 @@
target{"lib/libd.so", "Android"},
firstParty{},
},
- expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+ expectedDeps: []string{
+ "testdata/firstparty/FIRST_PARTY_LICENSE",
+ "testdata/firstparty/lib/libd.so.meta_lic",
+ },
},
{
condition: "notice",
@@ -129,6 +161,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
},
},
{
@@ -149,6 +188,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
},
},
{
@@ -164,6 +210,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
},
},
{
@@ -180,6 +230,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
},
},
{
@@ -190,7 +243,10 @@
target{"lib/libd.so", "External"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
},
{
condition: "reciprocal",
@@ -210,6 +266,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/bin/bin2.meta_lic",
+ "testdata/reciprocal/highest.apex.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -230,6 +293,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/bin/bin2.meta_lic",
+ "testdata/reciprocal/container.zip.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
},
},
{
@@ -245,6 +315,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/application.meta_lic",
+ "testdata/reciprocal/bin/bin3.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libb.so.meta_lic",
},
},
{
@@ -261,6 +335,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
+ "testdata/reciprocal/bin/bin1.meta_lic",
+ "testdata/reciprocal/lib/liba.so.meta_lic",
+ "testdata/reciprocal/lib/libc.a.meta_lic",
},
},
{
@@ -271,7 +348,10 @@
target{"lib/libd.so", "External"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/reciprocal/lib/libd.so.meta_lic",
+ },
},
{
condition: "restricted",
@@ -294,6 +374,13 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/bin/bin2.meta_lic",
+ "testdata/restricted/highest.apex.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
+ "testdata/restricted/lib/libd.so.meta_lic",
},
},
{
@@ -317,6 +404,13 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/bin/bin2.meta_lic",
+ "testdata/restricted/container.zip.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
+ "testdata/restricted/lib/libd.so.meta_lic",
},
},
{
@@ -332,6 +426,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/application.meta_lic",
+ "testdata/restricted/bin/bin3.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libb.so.meta_lic",
},
},
{
@@ -350,6 +448,9 @@
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/reciprocal/RECIPROCAL_LICENSE",
"testdata/restricted/RESTRICTED_LICENSE",
+ "testdata/restricted/bin/bin1.meta_lic",
+ "testdata/restricted/lib/liba.so.meta_lic",
+ "testdata/restricted/lib/libc.a.meta_lic",
},
},
{
@@ -360,7 +461,10 @@
target{"lib/libd.so", "External"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/restricted/lib/libd.so.meta_lic",
+ },
},
{
condition: "proprietary",
@@ -382,6 +486,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/bin/bin2.meta_lic",
+ "testdata/proprietary/highest.apex.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
+ "testdata/proprietary/lib/libd.so.meta_lic",
"testdata/restricted/RESTRICTED_LICENSE",
},
},
@@ -405,6 +516,13 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/bin/bin2.meta_lic",
+ "testdata/proprietary/container.zip.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
+ "testdata/proprietary/lib/libd.so.meta_lic",
"testdata/restricted/RESTRICTED_LICENSE",
},
},
@@ -421,6 +539,10 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/application.meta_lic",
+ "testdata/proprietary/bin/bin3.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libb.so.meta_lic",
},
},
{
@@ -437,6 +559,9 @@
expectedDeps: []string{
"testdata/firstparty/FIRST_PARTY_LICENSE",
"testdata/proprietary/PROPRIETARY_LICENSE",
+ "testdata/proprietary/bin/bin1.meta_lic",
+ "testdata/proprietary/lib/liba.so.meta_lic",
+ "testdata/proprietary/lib/libc.a.meta_lic",
},
},
{
@@ -447,7 +572,10 @@
target{"lib/libd.so", "External"},
notice{},
},
- expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+ expectedDeps: []string{
+ "testdata/notice/NOTICE_LICENSE",
+ "testdata/proprietary/lib/libd.so.meta_lic",
+ },
},
}
for _, tt := range tests {
diff --git a/tools/compliance/condition.go b/tools/compliance/condition.go
index cfe6f82..3145249 100644
--- a/tools/compliance/condition.go
+++ b/tools/compliance/condition.go
@@ -23,7 +23,7 @@
type LicenseCondition uint16
// LicenseConditionMask is a bitmask for the recognized license conditions.
-const LicenseConditionMask = LicenseCondition(0x3ff)
+const LicenseConditionMask = LicenseCondition(0x1ff)
const (
// UnencumberedCondition identifies public domain or public domain-
@@ -41,21 +41,18 @@
// RestrictedCondition identifies a license with requirement to share
// all source code linked to the module's source.
RestrictedCondition = LicenseCondition(0x0010)
- // RestrictedClasspathExceptionCondition identifies RestrictedCondition
- // waived for dynamic linking from independent modules.
- RestrictedClasspathExceptionCondition = LicenseCondition(0x0020)
// WeaklyRestrictedCondition identifies a RestrictedCondition waived
// for dynamic linking.
- WeaklyRestrictedCondition = LicenseCondition(0x0040)
+ WeaklyRestrictedCondition = LicenseCondition(0x0020)
// ProprietaryCondition identifies a license with source privacy
// requirements.
- ProprietaryCondition = LicenseCondition(0x0080)
+ ProprietaryCondition = LicenseCondition(0x0040)
// ByExceptionOnly identifies a license where policy requires product
// counsel review prior to use.
- ByExceptionOnlyCondition = LicenseCondition(0x0100)
+ ByExceptionOnlyCondition = LicenseCondition(0x0080)
// NotAllowedCondition identifies a license with onerous conditions
// where policy prohibits use.
- NotAllowedCondition = LicenseCondition(0x0200)
+ NotAllowedCondition = LicenseCondition(0x0100)
)
var (
@@ -66,7 +63,6 @@
"notice": NoticeCondition,
"reciprocal": ReciprocalCondition,
"restricted": RestrictedCondition,
- "restricted_with_classpath_exception": RestrictedClasspathExceptionCondition,
"restricted_allows_dynamic_linking": WeaklyRestrictedCondition,
"proprietary": ProprietaryCondition,
"by_exception_only": ByExceptionOnlyCondition,
@@ -87,8 +83,6 @@
return "reciprocal"
case RestrictedCondition:
return "restricted"
- case RestrictedClasspathExceptionCondition:
- return "restricted_with_classpath_exception"
case WeaklyRestrictedCondition:
return "restricted_allows_dynamic_linking"
case ProprietaryCondition:
@@ -98,5 +92,5 @@
case NotAllowedCondition:
return "not_allowed"
}
- panic(fmt.Errorf("unrecognized license condition: %04x", lc))
+ panic(fmt.Errorf("unrecognized license condition: %#v", lc))
}
diff --git a/tools/compliance/condition_test.go b/tools/compliance/condition_test.go
index 778ce4a..16ec72c 100644
--- a/tools/compliance/condition_test.go
+++ b/tools/compliance/condition_test.go
@@ -21,22 +21,22 @@
func TestConditionSetHas(t *testing.T) {
impliesShare := ImpliesShared
- t.Logf("testing with imliesShare=%04x", impliesShare)
+ t.Logf("testing with imliesShare=%#v", impliesShare)
if impliesShare.HasAny(NoticeCondition) {
- t.Errorf("impliesShare.HasAny(\"notice\"=%04x) got true, want false", NoticeCondition)
+ t.Errorf("impliesShare.HasAny(\"notice\"=%#v) got true, want false", NoticeCondition)
}
if !impliesShare.HasAny(RestrictedCondition) {
- t.Errorf("impliesShare.HasAny(\"restricted\"=%04x) got false, want true", RestrictedCondition)
+ t.Errorf("impliesShare.HasAny(\"restricted\"=%#v) got false, want true", RestrictedCondition)
}
if !impliesShare.HasAny(ReciprocalCondition) {
- t.Errorf("impliesShare.HasAny(\"reciprocal\"=%04x) got false, want true", ReciprocalCondition)
+ t.Errorf("impliesShare.HasAny(\"reciprocal\"=%#v) got false, want true", ReciprocalCondition)
}
if impliesShare.HasAny(LicenseCondition(0x0000)) {
- t.Errorf("impliesShare.HasAny(nil=%04x) got true, want false", LicenseCondition(0x0000))
+ t.Errorf("impliesShare.HasAny(nil=%#v) got true, want false", LicenseCondition(0x0000))
}
}
@@ -44,7 +44,7 @@
for expected, condition := range RecognizedConditionNames {
actual := condition.Name()
if expected != actual {
- t.Errorf("unexpected name for condition %04x: got %s, want %s", condition, actual, expected)
+ t.Errorf("unexpected name for condition %#v: got %s, want %s", condition, actual, expected)
}
}
}
@@ -62,6 +62,6 @@
t.Errorf("invalid condition unexpected name: got %s, wanted panic", name)
}()
if !panicked {
- t.Errorf("no expected panic for %04x.Name(): got no panic, wanted panic", lc)
+ t.Errorf("no expected panic for %#v.Name(): got no panic, wanted panic", lc)
}
}
diff --git a/tools/compliance/conditionset_test.go b/tools/compliance/conditionset_test.go
index c91912f..020cc0c 100644
--- a/tools/compliance/conditionset_test.go
+++ b/tools/compliance/conditionset_test.go
@@ -96,14 +96,13 @@
{
name: "everything",
conditions: []string{"unencumbered", "permissive", "notice", "reciprocal", "restricted", "proprietary"},
- plus: &[]string{"restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
+ plus: &[]string{"restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
matchingAny: map[string][]string{
"unencumbered": []string{"unencumbered"},
"permissive": []string{"permissive"},
"notice": []string{"notice"},
"reciprocal": []string{"reciprocal"},
"restricted": []string{"restricted"},
- "restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
"restricted_allows_dynamic_linking": []string{"restricted_allows_dynamic_linking"},
"proprietary": []string{"proprietary"},
"by_exception_only": []string{"by_exception_only"},
@@ -116,7 +115,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"restricted_allows_dynamic_linking",
"proprietary",
"by_exception_only",
@@ -131,7 +129,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"restricted_allows_dynamic_linking",
"proprietary",
"by_exception_only",
@@ -151,7 +148,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"restricted_allows_dynamic_linking",
"proprietary",
"by_exception_only",
@@ -168,7 +164,6 @@
"notice": []string{"notice"},
"reciprocal": []string{"reciprocal"},
"restricted": []string{"restricted"},
- "restricted_with_classpath_exception": []string{},
"restricted_allows_dynamic_linking": []string{"restricted_allows_dynamic_linking"},
"proprietary": []string{"proprietary"},
"by_exception_only": []string{"by_exception_only"},
@@ -195,7 +190,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"restricted_allows_dynamic_linking",
"proprietary",
"by_exception_only",
@@ -208,7 +202,6 @@
"notice": []string{"notice"},
"reciprocal": []string{"reciprocal"},
"restricted": []string{"restricted"},
- "restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
"restricted_allows_dynamic_linking": []string{},
"proprietary": []string{"proprietary"},
"by_exception_only": []string{"by_exception_only"},
@@ -221,7 +214,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"proprietary",
"by_exception_only",
"not_allowed",
@@ -235,7 +227,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"restricted_allows_dynamic_linking",
"proprietary",
"by_exception_only",
@@ -247,7 +238,6 @@
"notice",
"reciprocal",
"restricted",
- "restricted_with_classpath_exception",
"restricted_allows_dynamic_linking",
"proprietary",
"by_exception_only",
@@ -259,7 +249,6 @@
"notice": []string{},
"reciprocal": []string{},
"restricted": []string{},
- "restricted_with_classpath_exception": []string{},
"restricted_allows_dynamic_linking": []string{},
"proprietary": []string{},
"by_exception_only": []string{},
@@ -270,21 +259,20 @@
},
{
name: "restrictedplus",
- conditions: []string{"restricted", "restricted_with_classpath_exception", "restricted_allows_dynamic_linking"},
+ conditions: []string{"restricted", "restricted_allows_dynamic_linking"},
plus: &[]string{"permissive", "notice", "restricted", "proprietary"},
matchingAny: map[string][]string{
"unencumbered": []string{},
"permissive": []string{"permissive"},
"notice": []string{"notice"},
"restricted": []string{"restricted"},
- "restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
"restricted_allows_dynamic_linking": []string{"restricted_allows_dynamic_linking"},
"proprietary": []string{"proprietary"},
"restricted|proprietary": []string{"restricted", "proprietary"},
"by_exception_only": []string{},
"proprietary|by_exception_only": []string{"proprietary"},
},
- expected: []string{"permissive", "notice", "restricted", "restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "proprietary"},
+ expected: []string{"permissive", "notice", "restricted", "restricted_allows_dynamic_linking", "proprietary"},
},
}
for _, tt := range tests {
@@ -342,11 +330,11 @@
actual := cs.MatchingAny(toConditions(strings.Split(data, "|"))...)
actualNames := actual.Names()
- t.Logf("MatchingAny(%s): actual set %04x %s", data, actual, actual.String())
- t.Logf("MatchingAny(%s): expected set %04x %s", data, expected, expected.String())
+ t.Logf("MatchingAny(%s): actual set %#v %s", data, actual, actual.String())
+ t.Logf("MatchingAny(%s): expected set %#v %s", data, expected, expected.String())
if actual != expected {
- t.Errorf("MatchingAny(%s): got %04x, want %04x", data, actual, expected)
+ t.Errorf("MatchingAny(%s): got %#v, want %#v", data, actual, expected)
continue
}
if len(actualNames) != len(expectedNames) {
@@ -382,11 +370,11 @@
actual := cs.MatchingAnySet(NewLicenseConditionSet(toConditions(strings.Split(data, "|"))...))
actualNames := actual.Names()
- t.Logf("MatchingAnySet(%s): actual set %04x %s", data, actual, actual.String())
- t.Logf("MatchingAnySet(%s): expected set %04x %s", data, expected, expected.String())
+ t.Logf("MatchingAnySet(%s): actual set %#v %s", data, actual, actual.String())
+ t.Logf("MatchingAnySet(%s): expected set %#v %s", data, expected, expected.String())
if actual != expected {
- t.Errorf("MatchingAnySet(%s): got %04x, want %04x", data, actual, expected)
+ t.Errorf("MatchingAnySet(%s): got %#v, want %#v", data, actual, expected)
continue
}
if len(actualNames) != len(expectedNames) {
@@ -426,11 +414,11 @@
actualNames := actual.Names()
- t.Logf("actual license condition set: %04x %s", actual, actual.String())
- t.Logf("expected license condition set: %04x %s", expected, expected.String())
+ t.Logf("actual license condition set: %#v %s", actual, actual.String())
+ t.Logf("expected license condition set: %#v %s", expected, expected.String())
if actual != expected {
- t.Errorf("checkExpected: got %04x, want %04x", actual, expected)
+ t.Errorf("checkExpected: got %#v, want %#v", actual, expected)
return false
}
@@ -487,7 +475,7 @@
notExpected := (AllLicenseConditions &^ expected)
notExpectedList := notExpected.AsList()
- t.Logf("not expected license condition set: %04x %s", notExpected, notExpected.String())
+ t.Logf("not expected license condition set: %#v %s", notExpected, notExpected.String())
if len(tt.expected) == 0 {
if actual.HasAny(append(expectedConditions, notExpectedList...)...) {
@@ -526,11 +514,11 @@
actualNames := actual.Names()
- t.Logf("actual license condition set: %04x %s", actual, actual.String())
- t.Logf("expected license condition set: %04x %s", expected, expected.String())
+ t.Logf("actual license condition set: %#v %s", actual, actual.String())
+ t.Logf("expected license condition set: %#v %s", expected, expected.String())
if actual != expected {
- t.Errorf("checkExpectedSet: got %04x, want %04x", actual, expected)
+ t.Errorf("checkExpectedSet: got %#v, want %#v", actual, expected)
return false
}
@@ -581,7 +569,7 @@
}
notExpected := (AllLicenseConditions &^ expected)
- t.Logf("not expected license condition set: %04x %s", notExpected, notExpected.String())
+ t.Logf("not expected license condition set: %#v %s", notExpected, notExpected.String())
if len(tt.expected) == 0 {
if actual.MatchesAnySet(expected, notExpected) {
@@ -606,10 +594,10 @@
t.Errorf("actual.Difference({expected}).IsEmpty(): want true, got false")
}
if expected != actual.Intersection(expected) {
- t.Errorf("expected == actual.Intersection({expected}): want true, got false (%04x != %04x)", expected, actual.Intersection(expected))
+ t.Errorf("expected == actual.Intersection({expected}): want true, got false (%#v != %#v)", expected, actual.Intersection(expected))
}
if actual != actual.Intersection(expected) {
- t.Errorf("actual == actual.Intersection({expected}): want true, got false (%04x != %04x)", actual, actual.Intersection(expected))
+ t.Errorf("actual == actual.Intersection({expected}): want true, got false (%#v != %#v)", actual, actual.Intersection(expected))
}
return true
}
diff --git a/tools/compliance/doc.go b/tools/compliance/doc.go
index a47c1cf..5ced9ee 100644
--- a/tools/compliance/doc.go
+++ b/tools/compliance/doc.go
@@ -11,6 +11,10 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
+
+// Much of this content appears too in README.md
+// When changing this file consider whether the change also applies to README.md
+
/*
Package compliance provides an approved means for reading, consuming, and
@@ -31,6 +35,13 @@
artifacts in a release or distribution. While conceptually immutable, parts of
the graph may be loaded or evaluated lazily.
+Conceptually, the graph itself will always be a directed acyclic graph. One
+representation is a set of directed edges. Another is a set of nodes with
+directed edges to their dependencies.
+
+The edges have annotations, which can distinguish between build tools, runtime
+dependencies, and dependencies like 'contains' that make a derivative work.
+
LicenseCondition
----------------
@@ -51,17 +62,13 @@
`ActsOn` is the target to share, give notice for, hide etc.
-`Resolves` is the license condition that the action resolves.
+`Resolves` is the set of condition types that the action resolves.
-Remember: Each license condition pairs a condition name with an originating
-target so each resolution in a ResolutionSet has two targets it applies to and
-one target from which it originates, all of which may be the same target.
-
-For most condition types, `ActsOn` and `Resolves.Origin` will be the same
-target. For example, a notice condition policy means attribution or notice must
-be given for the target where the condition originates. Likewise, a proprietary
-condition policy means the privacy of the target where the condition originates
-must be respected. i.e. The thing acted on is the origin.
+For most condition types, `ActsOn` will be the target where the condition
+originated. For example, a notice condition policy means attribution or notice
+must be given for the target where the condition originates. Likewise, a
+proprietary condition policy means the privacy of the target where the
+condition originates must be respected. i.e. The thing acted on is the origin.
Restricted conditions are different. The infectious nature of restricted often
means sharing code that is not the target where the restricted condition
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
index 61e2158..088915a 100644
--- a/tools/compliance/go.mod
+++ b/tools/compliance/go.mod
@@ -4,9 +4,17 @@
replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
-require android/soong v0.0.0
+require (
+ android/soong v0.0.0
+ github.com/google/blueprint v0.0.0
+)
-replace android/soong v0.0.0 => ../../../soong
+require golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
+
+replace android/soong v0.0.0 => ../../../soong
+
+replace github.com/google/blueprint => ../../../blueprint
+
// Indirect deps from golang-protobuf
exclude github.com/golang/protobuf v1.5.0
diff --git a/tools/compliance/go.sum b/tools/compliance/go.sum
new file mode 100644
index 0000000..cbe76d9
--- /dev/null
+++ b/tools/compliance/go.sum
@@ -0,0 +1,2 @@
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
diff --git a/tools/compliance/graph.go b/tools/compliance/graph.go
index e73ab46..80a2f47 100644
--- a/tools/compliance/graph.go
+++ b/tools/compliance/graph.go
@@ -58,13 +58,11 @@
/// (guarded by mu)
targets map[string]*TargetNode
- // wgBU becomes non-nil when the bottom-up resolve begins and reaches 0
- // (i.e. Wait() proceeds) when the bottom-up resolve completes. (guarded by mu)
- wgBU *sync.WaitGroup
+ // onceBottomUp makes sure the bottom-up resolve walk only happens one time.
+ onceBottomUp sync.Once
- // wgTD becomes non-nil when the top-down resolve begins and reaches 0 (i.e. Wait()
- // proceeds) when the top-down resolve completes. (guarded by mu)
- wgTD *sync.WaitGroup
+ // onceTopDown makes sure the top-down resolve walk only happens one time.
+ onceTopDown sync.Once
// shippedNodes caches the results of a full walk of nodes identifying targets
// distributed either directly or as derivative works. (creation guarded by mu)
@@ -139,6 +137,24 @@
return e.annotations
}
+// IsRuntimeDependency returns true for edges representing shared libraries
+// linked dynamically at runtime.
+func (e *TargetEdge) IsRuntimeDependency() bool {
+ return edgeIsDynamicLink(e)
+}
+
+// IsDerivation returns true for edges where the target is a derivative
+// work of dependency.
+func (e *TargetEdge) IsDerivation() bool {
+ return edgeIsDerivation(e)
+}
+
+// IsBuildTool returns true for edges where the target is built
+// by dependency.
+func (e *TargetEdge) IsBuildTool() bool {
+ return !edgeIsDerivation(e) && !edgeIsDynamicLink(e)
+}
+
// String returns a human-readable string representation of the edge.
func (e *TargetEdge) String() string {
return fmt.Sprintf("%s -[%s]> %s", e.target.name, strings.Join(e.annotations.AsList(), ", "), e.dependency.name)
@@ -188,6 +204,11 @@
return s.edge.dependency
}
+// Edge describes the target edge.
+func (s TargetEdgePathSegment) Edge() *TargetEdge {
+ return s.edge
+}
+
// Annotations describes the type of edge by the set of annotations attached to
// it.
//
@@ -300,21 +321,9 @@
return tn.proto.GetPackageName()
}
-// ModuleTypes returns the list of module types implementing the target.
-// (unordered)
-//
-// In an ideal world, only 1 module type would implement each target, but the
-// interactions between Soong and Make for host versus product and for a
-// variety of architectures sometimes causes multiple module types per target
-// (often a regular build target and a prebuilt.)
-func (tn *TargetNode) ModuleTypes() []string {
- return append([]string{}, tn.proto.ModuleTypes...)
-}
-
-// ModuleClasses returns the list of module classes implementing the target.
-// (unordered)
-func (tn *TargetNode) ModuleClasses() []string {
- return append([]string{}, tn.proto.ModuleClasses...)
+// ModuleName returns the module name of the target.
+func (tn *TargetNode) ModuleName() string {
+ return tn.proto.GetModuleName()
}
// Projects returns the projects defining the target node. (unordered)
@@ -326,14 +335,6 @@
return append([]string{}, tn.proto.Projects...)
}
-// LicenseKinds returns the list of license kind names for the module or
-// target. (unordered)
-//
-// e.g. SPDX-license-identifier-MIT or legacy_proprietary
-func (tn *TargetNode) LicenseKinds() []string {
- return append([]string{}, tn.proto.LicenseKinds...)
-}
-
// LicenseConditions returns a copy of the set of license conditions
// originating at the target. The values that appear and how each is resolved
// is a matter of policy. (unordered)
diff --git a/tools/compliance/noticeindex.go b/tools/compliance/noticeindex.go
index f082383..dbfede6 100644
--- a/tools/compliance/noticeindex.go
+++ b/tools/compliance/noticeindex.go
@@ -15,7 +15,6 @@
package compliance
import (
- "bufio"
"crypto/md5"
"fmt"
"io"
@@ -25,16 +24,11 @@
"regexp"
"sort"
"strings"
-)
-const (
- noProjectName = "\u2205"
+ "android/soong/tools/compliance/projectmetadata"
)
var (
- nameRegexp = regexp.MustCompile(`^\s*name\s*:\s*"(.*)"\s*$`)
- descRegexp = regexp.MustCompile(`^\s*description\s*:\s*"(.*)"\s*$`)
- versionRegexp = regexp.MustCompile(`^\s*version\s*:\s*"(.*)"\s*$`)
licensesPathRegexp = regexp.MustCompile(`licen[cs]es?/`)
)
@@ -43,6 +37,8 @@
type NoticeIndex struct {
// lg identifies the license graph to which the index applies.
lg *LicenseGraph
+ // pmix indexes project metadata
+ pmix *projectmetadata.Index
// rs identifies the set of resolutions upon which the index is based.
rs ResolutionSet
// shipped identifies the set of target nodes shipped directly or as derivative works.
@@ -75,6 +71,7 @@
}
ni := &NoticeIndex{
lg: lg,
+ pmix: projectmetadata.NewIndex(rootFS),
rs: rs,
shipped: ShippedNodes(lg),
rootFS: rootFS,
@@ -110,9 +107,12 @@
return hashes, nil
}
- link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) {
+ link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) error {
for h := range hashes {
- libName := ni.getLibName(tn, h)
+ libName, err := ni.getLibName(tn, h)
+ if err != nil {
+ return err
+ }
if _, ok := ni.libHash[libName]; !ok {
ni.libHash[libName] = make(map[hash]struct{})
}
@@ -145,6 +145,11 @@
}
}
}
+ return nil
+ }
+
+ cacheMetadata := func(tn *TargetNode) {
+ ni.pmix.MetadataForProjects(tn.Projects()...)
}
// returns error from walk below.
@@ -157,13 +162,17 @@
if !ni.shipped.Contains(tn) {
return false
}
+ go cacheMetadata(tn)
installPaths := getInstallPaths(tn, path)
var hashes map[hash]struct{}
hashes, err = index(tn)
if err != nil {
return false
}
- link(tn, hashes, installPaths)
+ err = link(tn, hashes, installPaths)
+ if err != nil {
+ return false
+ }
if tn.IsContainer() {
return true
}
@@ -173,7 +182,10 @@
if err != nil {
return false
}
- link(r.actsOn, hashes, installPaths)
+ err = link(r.actsOn, hashes, installPaths)
+ if err != nil {
+ return false
+ }
}
return false
})
@@ -214,12 +226,18 @@
close(c)
}()
return c
+
}
-// InputNoticeFiles returns the list of files that were hashed during IndexLicenseTexts.
-func (ni *NoticeIndex) InputNoticeFiles() []string {
- files := append([]string(nil), ni.files...)
- sort.Strings(files)
+// InputFiles returns the complete list of files read during indexing.
+func (ni *NoticeIndex) InputFiles() []string {
+ projectMeta := ni.pmix.AllMetadataFiles()
+ files := make([]string, 0, len(ni.files) + len(ni.lg.targets) + len(projectMeta))
+ files = append(files, ni.files...)
+ for f := range ni.lg.targets {
+ files = append(files, f)
+ }
+ files = append(files, projectMeta...)
return files
}
@@ -308,15 +326,18 @@
}
// getLibName returns the name of the library associated with `noticeFor`.
-func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) string {
+func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) (string, error) {
for _, text := range noticeFor.LicenseTexts() {
if !strings.Contains(text, ":") {
if ni.hash[text].key != h.key {
continue
}
- ln := ni.checkMetadataForLicenseText(noticeFor, text)
+ ln, err := ni.checkMetadataForLicenseText(noticeFor, text)
+ if err != nil {
+ return "", err
+ }
if len(ln) > 0 {
- return ln
+ return ln, nil
}
continue
}
@@ -331,17 +352,20 @@
if err != nil {
continue
}
- return ln
+ return ln, nil
}
// use name from METADATA if available
- ln := ni.checkMetadata(noticeFor)
+ ln, err := ni.checkMetadata(noticeFor)
+ if err != nil {
+ return "", err
+ }
if len(ln) > 0 {
- return ln
+ return ln, nil
}
// use package_name: from license{} module if available
pn := noticeFor.PackageName()
if len(pn) > 0 {
- return pn
+ return pn, nil
}
for _, p := range noticeFor.Projects() {
if strings.HasPrefix(p, "prebuilts/") {
@@ -360,18 +384,17 @@
continue
}
}
- for r, prefix := range SafePrebuiltPrefixes {
- match := r.FindString(licenseText)
+ for _, safePrebuiltPrefix := range safePrebuiltPrefixes {
+ match := safePrebuiltPrefix.re.FindString(licenseText)
if len(match) == 0 {
continue
}
- strip := SafePathPrefixes[prefix]
- if strip {
+ if safePrebuiltPrefix.strip {
// strip entire prefix
match = licenseText[len(match):]
} else {
// strip from prebuilts/ until safe prefix
- match = licenseText[len(match)-len(prefix):]
+ match = licenseText[len(match)-len(safePrebuiltPrefix.prefix):]
}
// remove LICENSE or NOTICE or other filename
li := strings.LastIndex(match, "/")
@@ -386,17 +409,17 @@
match = match[:li]
}
}
- return match
+ return match, nil
}
break
}
}
- for prefix, strip := range SafePathPrefixes {
- if strings.HasPrefix(p, prefix) {
- if strip {
- return p[len(prefix):]
+ for _, safePathPrefix := range safePathPrefixes {
+ if strings.HasPrefix(p, safePathPrefix.prefix) {
+ if safePathPrefix.strip {
+ return p[len(safePathPrefix.prefix):], nil
} else {
- return p
+ return p, nil
}
}
}
@@ -411,35 +434,26 @@
if fi > 0 {
n = n[:fi]
}
- return n
+ return n, nil
}
// checkMetadata tries to look up a library name from a METADATA file associated with `noticeFor`.
-func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) string {
- for _, p := range noticeFor.Projects() {
- if name, ok := ni.projectName[p]; ok {
- if name == noProjectName {
- continue
- }
- return name
- }
- name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
- if err != nil {
- ni.projectName[p] = noProjectName
- continue
- }
- if len(name) == 0 {
- ni.projectName[p] = noProjectName
- continue
- }
- ni.projectName[p] = name
- return name
+func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) (string, error) {
+ pms, err := ni.pmix.MetadataForProjects(noticeFor.Projects()...)
+ if err != nil {
+ return "", err
}
- return ""
+ for _, pm := range pms {
+ name := pm.VersionedName()
+ if name != "" {
+ return name, nil
+ }
+ }
+ return "", nil
}
// checkMetadataForLicenseText
-func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) string {
+func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) (string, error) {
p := ""
for _, proj := range noticeFor.Projects() {
if strings.HasPrefix(licenseText, proj) {
@@ -457,79 +471,17 @@
p = filepath.Dir(p)
continue
}
- return ""
+ return "", nil
}
}
- if name, ok := ni.projectName[p]; ok {
- if name == noProjectName {
- return ""
- }
- return name
- }
- name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
- if err == nil && len(name) > 0 {
- ni.projectName[p] = name
- return name
- }
- ni.projectName[p] = noProjectName
- return ""
-}
-
-// checkMetadataFile tries to look up a library name from a METADATA file at `path`.
-func (ni *NoticeIndex) checkMetadataFile(path string) (string, error) {
- f, err := ni.rootFS.Open(path)
+ pms, err := ni.pmix.MetadataForProjects(p)
if err != nil {
return "", err
}
- name := ""
- description := ""
- version := ""
- s := bufio.NewScanner(f)
- for s.Scan() {
- line := s.Text()
- m := nameRegexp.FindStringSubmatch(line)
- if m != nil {
- if 1 < len(m) && m[1] != "" {
- name = m[1]
- }
- if version != "" {
- break
- }
- continue
- }
- m = versionRegexp.FindStringSubmatch(line)
- if m != nil {
- if 1 < len(m) && m[1] != "" {
- version = m[1]
- }
- if name != "" {
- break
- }
- continue
- }
- m = descRegexp.FindStringSubmatch(line)
- if m != nil {
- if 1 < len(m) && m[1] != "" {
- description = m[1]
- }
- }
+ if pms == nil {
+ return "", nil
}
- _ = s.Err()
- _ = f.Close()
- if name != "" {
- if version != "" {
- if version[0] == 'v' || version[0] == 'V' {
- return name + "_" + version, nil
- } else {
- return name + "_v_" + version, nil
- }
- }
- return name, nil
- }
- if description != "" {
- return description, nil
- }
- return "", nil
+ return pms[0].VersionedName(), nil
}
// addText reads and indexes the content of a license text file.
diff --git a/tools/compliance/policy_policy.go b/tools/compliance/policy_policy.go
index 60bdf48..23e25c6 100644
--- a/tools/compliance/policy_policy.go
+++ b/tools/compliance/policy_policy.go
@@ -29,30 +29,31 @@
"toolchain": "toolchain",
}
- // SafePathPrefixes maps the path prefixes presumed not to contain any
+ // safePathPrefixes maps the path prefixes presumed not to contain any
// proprietary or confidential pathnames to whether to strip the prefix
// from the path when used as the library name for notices.
- SafePathPrefixes = map[string]bool{
- "external/": true,
- "art/": false,
- "build/": false,
- "cts/": false,
- "dalvik/": false,
- "developers/": false,
- "development/": false,
- "frameworks/": false,
- "packages/": true,
- "prebuilts/": false,
- "sdk/": false,
- "system/": false,
- "test/": false,
- "toolchain/": false,
- "tools/": false,
+ safePathPrefixes = []safePathPrefixesType{
+ {"external/", true},
+ {"art/", false},
+ {"build/", false},
+ {"cts/", false},
+ {"dalvik/", false},
+ {"developers/", false},
+ {"development/", false},
+ {"frameworks/", false},
+ {"packages/", true},
+ {"prebuilts/module_sdk/", true},
+ {"prebuilts/", false},
+ {"sdk/", false},
+ {"system/", false},
+ {"test/", false},
+ {"toolchain/", false},
+ {"tools/", false},
}
- // SafePrebuiltPrefixes maps the regular expression to match a prebuilt
+ // safePrebuiltPrefixes maps the regular expression to match a prebuilt
// containing the path of a safe prefix to the safe prefix.
- SafePrebuiltPrefixes = make(map[*regexp.Regexp]string)
+ safePrebuiltPrefixes []safePrebuiltPrefixesType
// ImpliesUnencumbered lists the condition names representing an author attempt to disclaim copyright.
ImpliesUnencumbered = LicenseConditionSet(UnencumberedCondition)
@@ -62,14 +63,13 @@
// ImpliesNotice lists the condition names implying a notice or attribution policy.
ImpliesNotice = LicenseConditionSet(UnencumberedCondition | PermissiveCondition | NoticeCondition | ReciprocalCondition |
- RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition |
- ProprietaryCondition | ByExceptionOnlyCondition)
+ RestrictedCondition | WeaklyRestrictedCondition | ProprietaryCondition | ByExceptionOnlyCondition)
// ImpliesReciprocal lists the condition names implying a local source-sharing policy.
ImpliesReciprocal = LicenseConditionSet(ReciprocalCondition)
// Restricted lists the condition names implying an infectious source-sharing policy.
- ImpliesRestricted = LicenseConditionSet(RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition)
+ ImpliesRestricted = LicenseConditionSet(RestrictedCondition | WeaklyRestrictedCondition)
// ImpliesProprietary lists the condition names implying a confidentiality policy.
ImpliesProprietary = LicenseConditionSet(ProprietaryCondition)
@@ -81,9 +81,19 @@
ImpliesPrivate = LicenseConditionSet(ProprietaryCondition)
// ImpliesShared lists the condition names implying a source-code sharing policy.
- ImpliesShared = LicenseConditionSet(ReciprocalCondition | RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition)
+ ImpliesShared = LicenseConditionSet(ReciprocalCondition | RestrictedCondition | WeaklyRestrictedCondition)
)
+type safePathPrefixesType struct {
+ prefix string
+ strip bool
+}
+
+type safePrebuiltPrefixesType struct {
+ safePathPrefixesType
+ re *regexp.Regexp
+}
+
var (
anyLgpl = regexp.MustCompile(`^SPDX-license-identifier-LGPL.*`)
versionedGpl = regexp.MustCompile(`^SPDX-license-identifier-GPL-\p{N}.*`)
@@ -92,12 +102,13 @@
)
func init() {
- for prefix := range SafePathPrefixes {
- if prefix == "prebuilts/" {
+ for _, safePathPrefix := range safePathPrefixes {
+ if strings.HasPrefix(safePathPrefix.prefix, "prebuilts/") {
continue
}
- r := regexp.MustCompile("^prebuilts/[^ ]*/" + prefix)
- SafePrebuiltPrefixes[r] = prefix
+ r := regexp.MustCompile("^prebuilts/(?:runtime/mainline/)?" + safePathPrefix.prefix)
+ safePrebuiltPrefixes = append(safePrebuiltPrefixes,
+ safePrebuiltPrefixesType{safePathPrefix, r})
}
}
@@ -106,36 +117,6 @@
func LicenseConditionSetFromNames(tn *TargetNode, names ...string) LicenseConditionSet {
cs := NewLicenseConditionSet()
for _, name := range names {
- if name == "restricted" {
- if 0 == len(tn.LicenseKinds()) {
- cs = cs.Plus(RestrictedCondition)
- continue
- }
- hasLgpl := false
- hasClasspath := false
- hasGeneric := false
- for _, kind := range tn.LicenseKinds() {
- if strings.HasSuffix(kind, "-with-classpath-exception") {
- cs = cs.Plus(RestrictedClasspathExceptionCondition)
- hasClasspath = true
- } else if anyLgpl.MatchString(kind) {
- cs = cs.Plus(WeaklyRestrictedCondition)
- hasLgpl = true
- } else if versionedGpl.MatchString(kind) {
- cs = cs.Plus(RestrictedCondition)
- } else if genericGpl.MatchString(kind) {
- hasGeneric = true
- } else if kind == "legacy_restricted" || ccBySa.MatchString(kind) {
- cs = cs.Plus(RestrictedCondition)
- } else {
- cs = cs.Plus(RestrictedCondition)
- }
- }
- if hasGeneric && !hasLgpl && !hasClasspath {
- cs = cs.Plus(RestrictedCondition)
- }
- continue
- }
if lc, ok := RecognizedConditionNames[name]; ok {
cs |= LicenseConditionSet(lc)
}
@@ -202,9 +183,6 @@
}
result |= depConditions & LicenseConditionSet(RestrictedCondition)
- if 0 != (depConditions&LicenseConditionSet(RestrictedClasspathExceptionCondition)) && !edgeNodesAreIndependentModules(e) {
- result |= LicenseConditionSet(RestrictedClasspathExceptionCondition)
- }
return result
}
@@ -241,9 +219,6 @@
return result
}
result = result.Minus(WeaklyRestrictedCondition)
- if edgeNodesAreIndependentModules(e) {
- result = result.Minus(RestrictedClasspathExceptionCondition)
- }
return result
}
@@ -261,10 +236,7 @@
return NewLicenseConditionSet()
}
- result &= LicenseConditionSet(RestrictedCondition | RestrictedClasspathExceptionCondition)
- if 0 != (result&LicenseConditionSet(RestrictedClasspathExceptionCondition)) && edgeNodesAreIndependentModules(e) {
- result &= LicenseConditionSet(RestrictedCondition)
- }
+ result &= LicenseConditionSet(RestrictedCondition)
return result
}
@@ -281,9 +253,3 @@
isToolchain := e.annotations.HasAnnotation("toolchain")
return !isDynamic && !isToolchain
}
-
-// edgeNodesAreIndependentModules returns true for edges where the target and
-// dependency are independent modules.
-func edgeNodesAreIndependentModules(e *TargetEdge) bool {
- return e.target.PackageName() != e.dependency.PackageName()
-}
diff --git a/tools/compliance/policy_policy_test.go b/tools/compliance/policy_policy_test.go
index 27ce16c..6188eb2 100644
--- a/tools/compliance/policy_policy_test.go
+++ b/tools/compliance/policy_policy_test.go
@@ -20,6 +20,8 @@
"sort"
"strings"
"testing"
+
+ "android/soong/tools/compliance/testfs"
)
func TestPolicy_edgeConditions(t *testing.T) {
@@ -85,19 +87,13 @@
{
name: "independentmodulestatic",
edge: annotated{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
- expectedDepActions: []string{
- "apacheBin.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
- "gplWithClasspathException.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
- },
+ expectedDepActions: []string{},
expectedTargetConditions: []string{},
},
{
name: "dependentmodule",
edge: annotated{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
- expectedDepActions: []string{
- "dependentModule.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
- "gplWithClasspathException.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
- },
+ expectedDepActions: []string{},
expectedTargetConditions: []string{},
},
@@ -166,13 +162,13 @@
name: "independentmodulereversestatic",
edge: annotated{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
expectedDepActions: []string{},
- expectedTargetConditions: []string{"gplWithClasspathException.meta_lic:restricted_with_classpath_exception"},
+ expectedTargetConditions: []string{},
},
{
name: "dependentmodulereverse",
edge: annotated{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
expectedDepActions: []string{},
- expectedTargetConditions: []string{"gplWithClasspathException.meta_lic:restricted_with_classpath_exception"},
+ expectedTargetConditions: []string{},
},
{
name: "ponr",
@@ -216,7 +212,7 @@
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- fs := make(testFS)
+ fs := make(testfs.TestFS)
stderr := &bytes.Buffer{}
target := meta[tt.edge.target] + fmt.Sprintf("deps: {\n file: \"%s\"\n", tt.edge.dep)
for _, ann := range tt.edge.annotations {
@@ -257,9 +253,9 @@
otherCs := otn.LicenseConditions()
depConditions |= otherCs
}
- t.Logf("calculate target actions for edge=%s, dep conditions=%04x, treatAsAggregate=%v", edge.String(), depConditions, tt.treatAsAggregate)
+ t.Logf("calculate target actions for edge=%s, dep conditions=%#v %s, treatAsAggregate=%v", edge.String(), depConditions, depConditions, tt.treatAsAggregate)
csActual := depConditionsPropagatingToTarget(lg, edge, depConditions, tt.treatAsAggregate)
- t.Logf("calculated target conditions as %04x{%s}", csActual, strings.Join(csActual.Names(), ", "))
+ t.Logf("calculated target conditions as %#v %s", csActual, csActual)
csExpected := NewLicenseConditionSet()
for _, triple := range tt.expectedDepActions {
fields := strings.Split(triple, ":")
@@ -269,9 +265,9 @@
}
csExpected |= expectedConditions
}
- t.Logf("expected target conditions as %04x{%s}", csExpected, strings.Join(csExpected.Names(), ", "))
+ t.Logf("expected target conditions as %#v %s", csExpected, csExpected)
if csActual != csExpected {
- t.Errorf("unexpected license conditions: got %04x, want %04x", csActual, csExpected)
+ t.Errorf("unexpected license conditions: got %#v, want %#v", csActual, csExpected)
}
})
}
diff --git a/tools/compliance/policy_resolve.go b/tools/compliance/policy_resolve.go
index d357aec..fc8ed4c 100644
--- a/tools/compliance/policy_resolve.go
+++ b/tools/compliance/policy_resolve.go
@@ -49,89 +49,71 @@
func TraceBottomUpConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
// short-cut if already walked and cached
- lg.mu.Lock()
- wg := lg.wgBU
+ lg.onceBottomUp.Do(func() {
+ // amap identifes targets previously walked. (guarded by mu)
+ amap := make(map[*TargetNode]struct{})
- if wg != nil {
- lg.mu.Unlock()
- wg.Wait()
- return
- }
- wg = &sync.WaitGroup{}
- wg.Add(1)
- lg.wgBU = wg
- lg.mu.Unlock()
+ // mu guards concurrent access to amap
+ var mu sync.Mutex
- // amap identifes targets previously walked. (guarded by mu)
- amap := make(map[*TargetNode]struct{})
+ var walk func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet
- // cmap identifies targets previously walked as pure aggregates. i.e. as containers
- // (guarded by mu)
- cmap := make(map[*TargetNode]struct{})
- var mu sync.Mutex
+ walk = func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet {
+ priorWalkResults := func() (LicenseConditionSet, bool) {
+ mu.Lock()
+ defer mu.Unlock()
- var walk func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet
+ if _, alreadyWalked := amap[target]; alreadyWalked {
+ if treatAsAggregate {
+ return target.resolution, true
+ }
+ if !target.pure {
+ return target.resolution, true
+ }
+ // previously walked in a pure aggregate context,
+ // needs to walk again in non-aggregate context
+ } else {
+ target.resolution |= conditionsFn(target)
+ amap[target] = struct{}{}
+ }
+ target.pure = treatAsAggregate
+ return target.resolution, false
+ }
+ cs, alreadyWalked := priorWalkResults()
+ if alreadyWalked {
+ return cs
+ }
- walk = func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet {
- priorWalkResults := func() (LicenseConditionSet, bool) {
+ c := make(chan LicenseConditionSet, len(target.edges))
+ // add all the conditions from all the dependencies
+ for _, edge := range target.edges {
+ go func(edge *TargetEdge) {
+ // walk dependency to get its conditions
+ cs := walk(edge.dependency, treatAsAggregate && edge.dependency.IsContainer())
+
+ // turn those into the conditions that apply to the target
+ cs = depConditionsPropagatingToTarget(lg, edge, cs, treatAsAggregate)
+
+ c <- cs
+ }(edge)
+ }
+ for i := 0; i < len(target.edges); i++ {
+ cs |= <-c
+ }
mu.Lock()
- defer mu.Unlock()
+ target.resolution |= cs
+ mu.Unlock()
- if _, alreadyWalked := amap[target]; alreadyWalked {
- if treatAsAggregate {
- return target.resolution, true
- }
- if _, asAggregate := cmap[target]; !asAggregate {
- return target.resolution, true
- }
- // previously walked in a pure aggregate context,
- // needs to walk again in non-aggregate context
- delete(cmap, target)
- } else {
- target.resolution |= conditionsFn(target)
- amap[target] = struct{}{}
- }
- if treatAsAggregate {
- cmap[target] = struct{}{}
- }
- return target.resolution, false
- }
- cs, alreadyWalked := priorWalkResults()
- if alreadyWalked {
+ // return conditions up the tree
return cs
}
- c := make(chan LicenseConditionSet, len(target.edges))
- // add all the conditions from all the dependencies
- for _, edge := range target.edges {
- go func(edge *TargetEdge) {
- // walk dependency to get its conditions
- cs := walk(edge.dependency, treatAsAggregate && edge.dependency.IsContainer())
-
- // turn those into the conditions that apply to the target
- cs = depConditionsPropagatingToTarget(lg, edge, cs, treatAsAggregate)
-
- c <- cs
- }(edge)
+ // walk each of the roots
+ for _, rname := range lg.rootFiles {
+ rnode := lg.targets[rname]
+ _ = walk(rnode, rnode.IsContainer())
}
- for i := 0; i < len(target.edges); i++ {
- cs |= <-c
- }
- mu.Lock()
- target.resolution |= cs
- mu.Unlock()
-
- // return conditions up the tree
- return cs
- }
-
- // walk each of the roots
- for _, rname := range lg.rootFiles {
- rnode := lg.targets[rname]
- _ = walk(rnode, rnode.IsContainer())
- }
-
- wg.Done()
+ })
}
// ResolveTopDownCondtions performs a top-down walk of the LicenseGraph
@@ -150,85 +132,76 @@
func TraceTopDownConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
// short-cut if already walked and cached
- lg.mu.Lock()
- wg := lg.wgTD
+ lg.onceTopDown.Do(func() {
+ wg := &sync.WaitGroup{}
+ wg.Add(1)
- if wg != nil {
- lg.mu.Unlock()
- wg.Wait()
- return
- }
- wg = &sync.WaitGroup{}
- wg.Add(1)
- lg.wgTD = wg
- lg.mu.Unlock()
+ // start with the conditions propagated up the graph
+ TraceBottomUpConditions(lg, conditionsFn)
- // start with the conditions propagated up the graph
- TraceBottomUpConditions(lg, conditionsFn)
+ // amap contains the set of targets already walked. (guarded by mu)
+ amap := make(map[*TargetNode]struct{})
- // amap contains the set of targets already walked. (guarded by mu)
- amap := make(map[*TargetNode]struct{})
+ // mu guards concurrent access to amap
+ var mu sync.Mutex
- // cmap contains the set of targets walked as pure aggregates. i.e. containers
- // (guarded by mu)
- cmap := make(map[*TargetNode]struct{})
+ var walk func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool)
- // mu guards concurrent access to cmap
- var mu sync.Mutex
-
- var walk func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool)
-
- walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
- defer wg.Done()
- mu.Lock()
- fnode.resolution |= conditionsFn(fnode)
- fnode.resolution |= cs
- amap[fnode] = struct{}{}
- if treatAsAggregate {
- cmap[fnode] = struct{}{}
- }
- cs = fnode.resolution
- mu.Unlock()
- // for each dependency
- for _, edge := range fnode.edges {
- func(edge *TargetEdge) {
- // dcs holds the dpendency conditions inherited from the target
- dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
- dnode := edge.dependency
+ walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
+ defer wg.Done()
+ continueWalk := func() bool {
mu.Lock()
defer mu.Unlock()
- depcs := dnode.resolution
- _, alreadyWalked := amap[dnode]
- if !dcs.IsEmpty() && alreadyWalked {
- if dcs.Difference(depcs).IsEmpty() {
+
+ depcs := fnode.resolution
+ _, alreadyWalked := amap[fnode]
+ if alreadyWalked {
+ if cs.IsEmpty() {
+ return false
+ }
+ if cs.Difference(depcs).IsEmpty() {
// no new conditions
// pure aggregates never need walking a 2nd time with same conditions
if treatAsAggregate {
- return
+ return false
}
// non-aggregates don't need walking as non-aggregate a 2nd time
- if _, asAggregate := cmap[dnode]; !asAggregate {
- return
+ if !fnode.pure {
+ return false
}
// previously walked as pure aggregate; need to re-walk as non-aggregate
- delete(cmap, dnode)
}
}
+ fnode.resolution |= conditionsFn(fnode)
+ fnode.resolution |= cs
+ fnode.pure = treatAsAggregate
+ amap[fnode] = struct{}{}
+ cs = fnode.resolution
+ return true
+ }()
+ if !continueWalk {
+ return
+ }
+ // for each dependency
+ for _, edge := range fnode.edges {
+ // dcs holds the dpendency conditions inherited from the target
+ dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
+ dnode := edge.dependency
// add the conditions to the dependency
wg.Add(1)
go walk(dnode, dcs, treatAsAggregate && dnode.IsContainer())
- }(edge)
+ }
}
- }
- // walk each of the roots
- for _, rname := range lg.rootFiles {
- rnode := lg.targets[rname]
- wg.Add(1)
- // add the conditions to the root and its transitive closure
- go walk(rnode, NewLicenseConditionSet(), rnode.IsContainer())
- }
- wg.Done()
- wg.Wait()
+ // walk each of the roots
+ for _, rname := range lg.rootFiles {
+ rnode := lg.targets[rname]
+ wg.Add(1)
+ // add the conditions to the root and its transitive closure
+ go walk(rnode, NewLicenseConditionSet(), rnode.IsContainer())
+ }
+ wg.Done()
+ wg.Wait()
+ })
}
diff --git a/tools/compliance/policy_resolve_test.go b/tools/compliance/policy_resolve_test.go
index f98e4cc..d6731fe 100644
--- a/tools/compliance/policy_resolve_test.go
+++ b/tools/compliance/policy_resolve_test.go
@@ -289,8 +289,8 @@
{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
},
expectedActions: []tcond{
- {"apacheBin.meta_lic", "notice|restricted_with_classpath_exception"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+ {"apacheBin.meta_lic", "notice"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -300,8 +300,8 @@
{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
},
expectedActions: []tcond{
- {"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+ {"dependentModule.meta_lic", "notice"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -312,7 +312,7 @@
},
expectedActions: []tcond{
{"apacheBin.meta_lic", "notice"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -322,8 +322,8 @@
{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
},
expectedActions: []tcond{
- {"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+ {"dependentModule.meta_lic", "notice"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
},
},
}
@@ -593,9 +593,9 @@
{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
},
expectedActions: []tcond{
- {"apacheBin.meta_lic", "notice|restricted_with_classpath_exception"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
- {"mitLib.meta_lic", "notice|restricted_with_classpath_exception"},
+ {"apacheBin.meta_lic", "notice"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
+ {"mitLib.meta_lic", "notice"},
},
},
{
@@ -606,9 +606,9 @@
{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
},
expectedActions: []tcond{
- {"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
- {"mitLib.meta_lic", "notice|restricted_with_classpath_exception"},
+ {"dependentModule.meta_lic", "notice"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
+ {"mitLib.meta_lic", "notice"},
},
},
{
@@ -620,7 +620,7 @@
},
expectedActions: []tcond{
{"apacheBin.meta_lic", "notice"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
{"mitLib.meta_lic", "notice"},
},
},
@@ -632,9 +632,9 @@
{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
},
expectedActions: []tcond{
- {"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
- {"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
- {"mitLib.meta_lic", "notice|restricted_with_classpath_exception"},
+ {"dependentModule.meta_lic", "notice"},
+ {"gplWithClasspathException.meta_lic", "permissive"},
+ {"mitLib.meta_lic", "notice"},
},
},
}
diff --git a/tools/compliance/policy_resolvenotices_test.go b/tools/compliance/policy_resolvenotices_test.go
index cd9dd71..92b0ce3 100644
--- a/tools/compliance/policy_resolvenotices_test.go
+++ b/tools/compliance/policy_resolvenotices_test.go
@@ -217,10 +217,10 @@
},
expectedResolutions: []res{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+ {"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -245,7 +245,7 @@
expectedResolutions: []res{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+ {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -258,17 +258,17 @@
},
expectedResolutions: []res{
{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
- {"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheContainer.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
- {"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheContainer.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+ {"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheContainer.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"apacheContainer.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheContainer.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+ {"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -280,11 +280,11 @@
},
expectedResolutions: []res{
{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
- {"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -309,7 +309,7 @@
expectedResolutions: []res{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -336,7 +336,7 @@
{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -363,7 +363,7 @@
{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+ {"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -375,10 +375,8 @@
},
expectedResolutions: []res{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"apacheBin.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -390,10 +388,8 @@
},
expectedResolutions: []res{
{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"dependentModule.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -418,7 +414,7 @@
expectedResolutions: []res{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -430,9 +426,7 @@
},
expectedResolutions: []res{
{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"dependentModule.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -444,11 +438,8 @@
},
expectedResolutions: []res{
{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
- {"dependentModule.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
}
diff --git a/tools/compliance/policy_resolveshare_test.go b/tools/compliance/policy_resolveshare_test.go
index c451b86..cf88058 100644
--- a/tools/compliance/policy_resolveshare_test.go
+++ b/tools/compliance/policy_resolveshare_test.go
@@ -40,9 +40,7 @@
edges: []annotated{
{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "independentmodulestaticrestricted",
@@ -50,10 +48,7 @@
edges: []annotated{
{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
},
- expectedResolutions: []res{
- {"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulerestricted",
@@ -61,9 +56,7 @@
edges: []annotated{
{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulerestrictedshipclasspath",
@@ -71,11 +64,7 @@
edges: []annotated{
{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "lgplonfprestricted",
@@ -84,8 +73,8 @@
{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
},
expectedResolutions: []res{
- {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
- {"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+ {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
+ {"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -95,7 +84,7 @@
{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
},
expectedResolutions: []res{
- {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+ {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -105,7 +94,7 @@
{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
},
expectedResolutions: []res{
- {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+ {"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
},
},
{
@@ -185,9 +174,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "independentmodulereversestaticrestricted",
@@ -195,10 +182,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulereverserestricted",
@@ -206,9 +190,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulereverserestrictedshipdependent",
@@ -216,11 +198,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "ponrrestricted",
diff --git a/tools/compliance/policy_shareprivacyconflicts.go b/tools/compliance/policy_shareprivacyconflicts.go
index 279e179..947bb96 100644
--- a/tools/compliance/policy_shareprivacyconflicts.go
+++ b/tools/compliance/policy_shareprivacyconflicts.go
@@ -49,7 +49,11 @@
// size is the size of the result
size := 0
- for _, cs := range combined {
+ for actsOn, cs := range combined {
+ if actsOn.pure && !actsOn.LicenseConditions().MatchesAnySet(ImpliesShared) {
+ // no need to share code to build "a distribution medium"
+ continue
+ }
size += cs.Intersection(ImpliesShared).Len() * cs.Intersection(ImpliesPrivate).Len()
}
if size == 0 {
@@ -57,6 +61,9 @@
}
result := make([]SourceSharePrivacyConflict, 0, size)
for actsOn, cs := range combined {
+ if actsOn.pure { // no need to share code for "a distribution medium"
+ continue
+ }
pconditions := cs.Intersection(ImpliesPrivate).AsList()
ssconditions := cs.Intersection(ImpliesShared).AsList()
diff --git a/tools/compliance/policy_walk.go b/tools/compliance/policy_walk.go
index f4d7bba..beb6d53 100644
--- a/tools/compliance/policy_walk.go
+++ b/tools/compliance/policy_walk.go
@@ -45,7 +45,7 @@
}
// VisitNode is called for each root and for each walked dependency node by
-// WalkTopDown. When VisitNode returns true, WalkTopDown will proceed to walk
+// WalkTopDown and WalkTopDownBreadthFirst. When VisitNode returns true, WalkTopDown will proceed to walk
// down the dependences of the node
type VisitNode func(lg *LicenseGraph, target *TargetNode, path TargetEdgePath) bool
@@ -79,6 +79,54 @@
}
}
+// WalkTopDownBreadthFirst performs a Breadth-first top down walk of `lg` calling `visit` and descending
+// into depenencies when `visit` returns true.
+func WalkTopDownBreadthFirst(ctx EdgeContextProvider, lg *LicenseGraph, visit VisitNode) {
+ path := NewTargetEdgePath(32)
+
+ var walk func(fnode *TargetNode)
+ walk = func(fnode *TargetNode) {
+ edgesToWalk := make(TargetEdgeList, 0, len(fnode.edges))
+ for _, edge := range fnode.edges {
+ var edgeContext interface{}
+ if ctx == nil {
+ edgeContext = nil
+ } else {
+ edgeContext = ctx.Context(lg, *path, edge)
+ }
+ path.Push(edge, edgeContext)
+ if visit(lg, edge.dependency, *path){
+ edgesToWalk = append(edgesToWalk, edge)
+ }
+ path.Pop()
+ }
+
+ for _, edge := range(edgesToWalk) {
+ var edgeContext interface{}
+ if ctx == nil {
+ edgeContext = nil
+ } else {
+ edgeContext = ctx.Context(lg, *path, edge)
+ }
+ path.Push(edge, edgeContext)
+ walk(edge.dependency)
+ path.Pop()
+ }
+ }
+
+ path.Clear()
+ rootsToWalk := make([]*TargetNode, 0, len(lg.rootFiles))
+ for _, r := range lg.rootFiles {
+ if visit(lg, lg.targets[r], *path){
+ rootsToWalk = append(rootsToWalk, lg.targets[r])
+ }
+ }
+
+ for _, rnode := range(rootsToWalk) {
+ walk(rnode)
+ }
+}
+
// resolutionKey identifies results from walking a specific target for a
// specific set of conditions.
type resolutionKey struct {
diff --git a/tools/compliance/policy_walk_test.go b/tools/compliance/policy_walk_test.go
index 92867f9..0bc37f8 100644
--- a/tools/compliance/policy_walk_test.go
+++ b/tools/compliance/policy_walk_test.go
@@ -16,9 +16,22 @@
import (
"bytes"
+ "fmt"
+ "os"
+ "strings"
"testing"
)
+func TestMain(m *testing.M) {
+ // Change into the cmd directory before running the tests
+ // so they can find the testdata directory.
+ if err := os.Chdir("cmd"); err != nil {
+ fmt.Printf("failed to change to testdata directory: %s\n", err)
+ os.Exit(1)
+ }
+ os.Exit(m.Run())
+}
+
func TestWalkResolutionsForCondition(t *testing.T) {
tests := []struct {
name string
@@ -104,8 +117,7 @@
},
expectedResolutions: []res{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -115,10 +127,7 @@
edges: []annotated{
{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
},
- expectedResolutions: []res{
- {"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulenotice",
@@ -129,7 +138,6 @@
},
expectedResolutions: []res{
{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -139,9 +147,7 @@
edges: []annotated{
{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "lgplonfpnotice",
@@ -347,7 +353,7 @@
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -357,9 +363,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "independentmodulereverserestrictedshipped",
@@ -368,9 +372,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "independentmodulereversestaticnotice",
@@ -380,9 +382,8 @@
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
},
expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -392,10 +393,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulereversenotice",
@@ -405,7 +403,7 @@
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -415,9 +413,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "dependentmodulereverserestrictedshipped",
@@ -426,11 +422,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
- expectedResolutions: []res{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedResolutions: []res{},
},
{
name: "ponrnotice",
@@ -716,8 +708,7 @@
},
expectedActions: []act{
{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -727,10 +718,7 @@
edges: []annotated{
{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
},
- expectedActions: []act{
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "dependentmodulenotice",
@@ -741,7 +729,6 @@
},
expectedActions: []act{
{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
- {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -751,9 +738,7 @@
edges: []annotated{
{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
},
- expectedActions: []act{
- {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "lgplonfpnotice",
@@ -956,7 +941,7 @@
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -966,9 +951,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
- expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "independentmodulereverserestrictedshipped",
@@ -977,9 +960,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
},
- expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "independentmodulereversestaticnotice",
@@ -989,9 +970,8 @@
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
},
expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
},
},
{
@@ -1001,10 +981,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
},
- expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "dependentmodulereversenotice",
@@ -1014,7 +991,7 @@
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+ {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
},
},
{
@@ -1024,9 +1001,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
- expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "dependentmodulereverserestrictedshipped",
@@ -1035,10 +1010,7 @@
edges: []annotated{
{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
},
- expectedActions: []act{
- {"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- {"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
- },
+ expectedActions: []act{},
},
{
name: "ponrnotice",
@@ -1238,3 +1210,417 @@
})
}
}
+
+func TestWalkTopDownBreadthFirst(t *testing.T) {
+ tests := []struct {
+ name string
+ roots []string
+ edges []annotated
+ expectedResult []string
+ }{
+ {
+ name: "bin/bin1",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin2",
+ roots: []string{"bin/bin2.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin3",
+ roots: []string{"bin/bin3.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin3.meta_lic",
+ },
+ },
+ {
+ name: "lib/liba.so",
+ roots: []string{"lib/liba.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/liba.so.meta_lic",
+ },
+ },
+ {
+ name: "lib/libb.so",
+ roots: []string{"lib/libb.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "lib/libc.so",
+ roots: []string{"lib/libc.a.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/libc.a.meta_lic",
+ },
+ },
+ {
+ name: "lib/libd.so",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "highest.apex",
+ roots: []string{"highest.apex.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "container.zip",
+ roots: []string{"container.zip.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "application",
+ roots: []string{"application.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin1&lib/liba",
+ roots: []string{"bin/bin1.meta_lic","lib/liba.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin2&lib/libd",
+ roots: []string{"bin/bin2.meta_lic", "lib/libd.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "application&bin/bin3",
+ roots: []string{"application.meta_lic", "bin/bin3.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "highest.apex&container.zip",
+ roots: []string{"highest.apex.meta_lic", "container.zip.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ stderr := &bytes.Buffer{}
+ actualOut := &bytes.Buffer{}
+
+ rootFiles := make([]string, 0, len(tt.roots))
+ for _, r := range tt.roots {
+ rootFiles = append(rootFiles, "testdata/notice/"+r)
+ }
+
+ lg, err := ReadLicenseGraph(GetFS(""), stderr, rootFiles)
+
+ if err != nil {
+ t.Errorf("unexpected test data error: got %s, want no error", err)
+ return
+ }
+
+ expectedRst := tt.expectedResult
+
+ WalkTopDownBreadthFirst(nil, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
+ fmt.Fprintln(actualOut, tn.Name())
+ return true
+ })
+
+ actualRst := strings.Split(actualOut.String(), "\n")
+
+ if len(actualRst) > 0 {
+ actualRst = actualRst[:len(actualRst)-1]
+ }
+
+ t.Logf("actual nodes visited: %s", actualOut.String())
+ t.Logf("expected nodes visited: %s", strings.Join(expectedRst, "\n"))
+
+ if len(actualRst) != len(expectedRst) {
+ t.Errorf("WalkTopDownBreadthFirst: number of visited nodes is different: got %d, want %d", len(actualRst), len(expectedRst))
+ }
+
+ for i := 0; i < len(actualRst) && i < len(expectedRst); i++ {
+ if actualRst[i] != expectedRst[i] {
+ t.Errorf("WalkTopDownBreadthFirst: lines differ at index %d: got %q, want %q", i, actualRst[i], expectedRst[i])
+ break
+ }
+ }
+
+ if len(actualRst) < len(expectedRst) {
+ t.Errorf("WalkTopDownBreadthFirst: extra lines at %d: got %q, want nothing", len(actualRst), expectedRst[len(actualRst)])
+ }
+
+ if len(expectedRst) < len(actualRst) {
+ t.Errorf("WalkTopDownBreadthFirst: missing lines at %d: got nothing, want %q", len(expectedRst), actualRst[len(expectedRst)])
+ }
+ })
+ }
+}
+
+func TestWalkTopDownBreadthFirstWithoutDuplicates(t *testing.T) {
+ tests := []struct {
+ name string
+ roots []string
+ edges []annotated
+ expectedResult []string
+ }{
+ {
+ name: "bin/bin1",
+ roots: []string{"bin/bin1.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin2",
+ roots: []string{"bin/bin2.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin3",
+ roots: []string{"bin/bin3.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin3.meta_lic",
+ },
+ },
+ {
+ name: "lib/liba.so",
+ roots: []string{"lib/liba.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/liba.so.meta_lic",
+ },
+ },
+ {
+ name: "lib/libb.so",
+ roots: []string{"lib/libb.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "lib/libc.so",
+ roots: []string{"lib/libc.a.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/libc.a.meta_lic",
+ },
+ },
+ {
+ name: "lib/libd.so",
+ roots: []string{"lib/libd.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "highest.apex",
+ roots: []string{"highest.apex.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "container.zip",
+ roots: []string{"container.zip.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ {
+ name: "application",
+ roots: []string{"application.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin1&lib/liba",
+ roots: []string{"bin/bin1.meta_lic", "lib/liba.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ },
+ },
+ {
+ name: "bin/bin2&lib/libd",
+ roots: []string{"bin/bin2.meta_lic", "lib/libd.so.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "application&bin/bin3",
+ roots: []string{"application.meta_lic", "bin/bin3.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/application.meta_lic",
+ "testdata/notice/bin/bin3.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ },
+ },
+ {
+ name: "highest.apex&container.zip",
+ roots: []string{"highest.apex.meta_lic", "container.zip.meta_lic"},
+ expectedResult: []string{
+ "testdata/notice/highest.apex.meta_lic",
+ "testdata/notice/container.zip.meta_lic",
+ "testdata/notice/bin/bin1.meta_lic",
+ "testdata/notice/bin/bin2.meta_lic",
+ "testdata/notice/lib/liba.so.meta_lic",
+ "testdata/notice/lib/libb.so.meta_lic",
+ "testdata/notice/lib/libc.a.meta_lic",
+ "testdata/notice/lib/libd.so.meta_lic",
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ stderr := &bytes.Buffer{}
+ actualOut := &bytes.Buffer{}
+
+ rootFiles := make([]string, 0, len(tt.roots))
+ for _, r := range tt.roots {
+ rootFiles = append(rootFiles, "testdata/notice/"+r)
+ }
+
+ lg, err := ReadLicenseGraph(GetFS(""), stderr, rootFiles)
+
+ if err != nil {
+ t.Errorf("unexpected test data error: got %s, want no error", err)
+ return
+ }
+
+ expectedRst := tt.expectedResult
+
+ //Keeping track of the visited nodes
+ //Only add to actualOut if not visited
+ visitedNodes := make(map[string]struct{})
+ WalkTopDownBreadthFirst(nil, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
+ if _, alreadyVisited := visitedNodes[tn.Name()]; alreadyVisited {
+ return false
+ }
+ fmt.Fprintln(actualOut, tn.Name())
+ visitedNodes[tn.Name()] = struct{}{}
+ return true
+ })
+
+ actualRst := strings.Split(actualOut.String(), "\n")
+
+ if len(actualRst) > 0 {
+ actualRst = actualRst[:len(actualRst)-1]
+ }
+
+ t.Logf("actual nodes visited: %s", actualOut.String())
+ t.Logf("expected nodes visited: %s", strings.Join(expectedRst, "\n"))
+
+ if len(actualRst) != len(expectedRst) {
+ t.Errorf("WalkTopDownBreadthFirst: number of visited nodes is different: got %d, want %d", len(actualRst), len(expectedRst))
+ }
+
+ for i := 0; i < len(actualRst) && i < len(expectedRst); i++ {
+ if actualRst[i] != expectedRst[i] {
+ t.Errorf("WalkTopDownBreadthFirst: lines differ at index %d: got %q, want %q", i, actualRst[i], expectedRst[i])
+ break
+ }
+ }
+
+ if len(actualRst) < len(expectedRst) {
+ t.Errorf("WalkTopDownBreadthFirst: extra lines at %d: got %q, want nothing", len(actualRst), expectedRst[len(actualRst)])
+ }
+
+ if len(expectedRst) < len(actualRst) {
+ t.Errorf("WalkTopDownBreadthFirst: missing lines at %d: got nothing, want %q", len(expectedRst), actualRst[len(expectedRst)])
+ }
+ })
+ }
+}
diff --git a/tools/compliance/projectmetadata/Android.bp b/tools/compliance/projectmetadata/Android.bp
new file mode 100644
index 0000000..dccff76
--- /dev/null
+++ b/tools/compliance/projectmetadata/Android.bp
@@ -0,0 +1,34 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+ name: "projectmetadata-module",
+ srcs: [
+ "projectmetadata.go",
+ ],
+ deps: [
+ "compliance-test-fs-module",
+ "golang-protobuf-proto",
+ "golang-protobuf-encoding-prototext",
+ "project_metadata_proto",
+ ],
+ testSrcs: [
+ "projectmetadata_test.go",
+ ],
+ pkgPath: "android/soong/tools/compliance/projectmetadata",
+}
diff --git a/tools/compliance/projectmetadata/projectmetadata.go b/tools/compliance/projectmetadata/projectmetadata.go
new file mode 100644
index 0000000..b137a12
--- /dev/null
+++ b/tools/compliance/projectmetadata/projectmetadata.go
@@ -0,0 +1,292 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package projectmetadata
+
+import (
+ "fmt"
+ "io"
+ "io/fs"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "android/soong/compliance/project_metadata_proto"
+
+ "google.golang.org/protobuf/encoding/prototext"
+)
+
+var (
+ // ConcurrentReaders is the size of the task pool for limiting resource usage e.g. open files.
+ ConcurrentReaders = 5
+)
+
+// ProjectMetadata contains the METADATA for a git project.
+type ProjectMetadata struct {
+ proto project_metadata_proto.Metadata
+
+ // project is the path to the directory containing the METADATA file.
+ project string
+}
+
+// ProjectUrlMap maps url type name to url value
+type ProjectUrlMap map[string]string
+
+// DownloadUrl returns the address of a download location
+func (m ProjectUrlMap) DownloadUrl() string {
+ for _, urlType := range []string{"GIT", "SVN", "HG", "DARCS"} {
+ if url, ok := m[urlType]; ok {
+ return url
+ }
+ }
+ return ""
+}
+
+// String returns a string representation of the metadata for error messages.
+func (pm *ProjectMetadata) String() string {
+ return fmt.Sprintf("project: %q\n%s", pm.project, pm.proto.String())
+}
+
+// Project returns the path to the directory containing the METADATA file
+func (pm *ProjectMetadata) Project() string {
+ return pm.project
+}
+
+// ProjectName returns the name of the project.
+func (pm *ProjectMetadata) Name() string {
+ return pm.proto.GetName()
+}
+
+// ProjectVersion returns the version of the project if available.
+func (pm *ProjectMetadata) Version() string {
+ tp := pm.proto.GetThirdParty()
+ if tp != nil {
+ version := tp.GetVersion()
+ return version
+ }
+ return ""
+}
+
+// VersionedName returns the name of the project including the version if any.
+func (pm *ProjectMetadata) VersionedName() string {
+ name := pm.proto.GetName()
+ if name != "" {
+ tp := pm.proto.GetThirdParty()
+ if tp != nil {
+ version := tp.GetVersion()
+ if version != "" {
+ if version[0] == 'v' || version[0] == 'V' {
+ return name + "_" + version
+ } else {
+ return name + "_v_" + version
+ }
+ }
+ }
+ return name
+ }
+ return pm.proto.GetDescription()
+}
+
+// UrlsByTypeName returns a map of URLs by Type Name
+func (pm *ProjectMetadata) UrlsByTypeName() ProjectUrlMap {
+ tp := pm.proto.GetThirdParty()
+ if tp == nil {
+ return nil
+ }
+ if len(tp.Url) == 0 {
+ return nil
+ }
+ urls := make(ProjectUrlMap)
+
+ for _, url := range tp.Url {
+ uri := url.GetValue()
+ if uri == "" {
+ continue
+ }
+ urls[project_metadata_proto.URL_Type_name[int32(url.GetType())]] = uri
+ }
+ return urls
+}
+
+// projectIndex describes a project to be read; after `wg.Wait()`, will contain either
+// a `ProjectMetadata`, pm (can be nil even without error), or a non-nil `err`.
+type projectIndex struct {
+ project string
+ path string
+ pm *ProjectMetadata
+ err error
+ done chan struct{}
+}
+
+// finish marks the task to read the `projectIndex` completed.
+func (pi *projectIndex) finish() {
+ close(pi.done)
+}
+
+// wait suspends execution until the `projectIndex` task completes.
+func (pi *projectIndex) wait() {
+ <-pi.done
+}
+
+// Index reads and caches ProjectMetadata (thread safe)
+type Index struct {
+ // projecs maps project name to a wait group if read has already started, and
+ // to a `ProjectMetadata` or to an `error` after the read completes.
+ projects sync.Map
+
+ // task provides a fixed-size task pool to limit concurrent open files etc.
+ task chan bool
+
+ // rootFS locates the root of the file system from which to read the files.
+ rootFS fs.FS
+}
+
+// NewIndex constructs a project metadata `Index` for the given file system.
+func NewIndex(rootFS fs.FS) *Index {
+ ix := &Index{task: make(chan bool, ConcurrentReaders), rootFS: rootFS}
+ for i := 0; i < ConcurrentReaders; i++ {
+ ix.task <- true
+ }
+ return ix
+}
+
+// MetadataForProjects returns 0..n ProjectMetadata for n `projects`, or an error.
+// Each project that has a METADATA.android or a METADATA file in the root of the project will have
+// a corresponding ProjectMetadata in the result. Projects with neither file get skipped. A nil
+// result with no error indicates none of the given `projects` has a METADATA file.
+// (thread safe -- can be called concurrently from multiple goroutines)
+func (ix *Index) MetadataForProjects(projects ...string) ([]*ProjectMetadata, error) {
+ if ConcurrentReaders < 1 {
+ return nil, fmt.Errorf("need at least one task in project metadata pool")
+ }
+ if len(projects) == 0 {
+ return nil, nil
+ }
+ // Identify the projects that have never been read
+ projectsToRead := make([]*projectIndex, 0, len(projects))
+ projectIndexes := make([]*projectIndex, 0, len(projects))
+ for _, p := range projects {
+ pi, loaded := ix.projects.LoadOrStore(p, &projectIndex{project: p, done: make(chan struct{})})
+ if !loaded {
+ projectsToRead = append(projectsToRead, pi.(*projectIndex))
+ }
+ projectIndexes = append(projectIndexes, pi.(*projectIndex))
+ }
+ // findMeta locates and reads the appropriate METADATA file, if any.
+ findMeta := func(pi *projectIndex) {
+ <-ix.task
+ defer func() {
+ ix.task <- true
+ pi.finish()
+ }()
+
+ // Support METADATA.android for projects that already have a different sort of METADATA file.
+ path := filepath.Join(pi.project, "METADATA.android")
+ fi, err := fs.Stat(ix.rootFS, path)
+ if err == nil {
+ if fi.Mode().IsRegular() {
+ ix.readMetadataFile(pi, path)
+ return
+ }
+ }
+ // No METADATA.android try METADATA file.
+ path = filepath.Join(pi.project, "METADATA")
+ fi, err = fs.Stat(ix.rootFS, path)
+ if err == nil {
+ if fi.Mode().IsRegular() {
+ ix.readMetadataFile(pi, path)
+ return
+ }
+ }
+ // no METADATA file exists -- leave nil and finish
+ }
+ // Look for the METADATA files to read, and record any missing.
+ for _, p := range projectsToRead {
+ go findMeta(p)
+ }
+ // Wait until all of the projects have been read.
+ var msg strings.Builder
+ result := make([]*ProjectMetadata, 0, len(projects))
+ for _, pi := range projectIndexes {
+ pi.wait()
+ // Combine any errors into a single error.
+ if pi.err != nil {
+ fmt.Fprintf(&msg, " %v\n", pi.err)
+ } else if pi.pm != nil {
+ result = append(result, pi.pm)
+ }
+ }
+ if msg.Len() > 0 {
+ return nil, fmt.Errorf("error reading project(s):\n%s", msg.String())
+ }
+ if len(result) == 0 {
+ return nil, nil
+ }
+ return result, nil
+}
+
+// AllMetadataFiles returns the sorted list of all METADATA files read thus far.
+func (ix *Index) AllMetadataFiles() []string {
+ var files []string
+ ix.projects.Range(func(key, value any) bool {
+ pi := value.(*projectIndex)
+ if pi.path != "" {
+ files = append(files, pi.path)
+ }
+ return true
+ })
+ return files
+}
+
+// readMetadataFile tries to read and parse a METADATA file at `path` for `project`.
+func (ix *Index) readMetadataFile(pi *projectIndex, path string) {
+ f, err := ix.rootFS.Open(path)
+ if err != nil {
+ pi.err = fmt.Errorf("error opening project %q metadata %q: %w", pi.project, path, err)
+ return
+ }
+
+ // read the file
+ data, err := io.ReadAll(f)
+ if err != nil {
+ pi.err = fmt.Errorf("error reading project %q metadata %q: %w", pi.project, path, err)
+ return
+ }
+ f.Close()
+
+ uo := prototext.UnmarshalOptions{DiscardUnknown: true}
+ pm := &ProjectMetadata{project: pi.project}
+ err = uo.Unmarshal(data, &pm.proto)
+ if err != nil {
+ pi.err = fmt.Errorf(`error in project %q METADATA %q: %v
+
+METADATA and METADATA.android files must parse as text protobufs
+defined by
+ build/soong/compliance/project_metadata_proto/project_metadata.proto
+
+* unknown fields don't matter
+* check invalid ENUM names
+* check quoting
+* check unescaped nested quotes
+* check the comment marker for protobuf is '#' not '//'
+
+if importing a library that uses a different sort of METADATA file, add
+a METADATA.android file beside it to parse instead
+`, pi.project, path, err)
+ return
+ }
+
+ pi.path = path
+ pi.pm = pm
+}
diff --git a/tools/compliance/projectmetadata/projectmetadata_test.go b/tools/compliance/projectmetadata/projectmetadata_test.go
new file mode 100644
index 0000000..0af0cd7
--- /dev/null
+++ b/tools/compliance/projectmetadata/projectmetadata_test.go
@@ -0,0 +1,722 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package projectmetadata
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "android/soong/compliance/project_metadata_proto"
+ "android/soong/tools/compliance/testfs"
+)
+
+const (
+ // EMPTY represents a METADATA file with no recognized fields
+ EMPTY = ``
+
+ // INVALID_NAME represents a METADATA file with the wrong type of name
+ INVALID_NAME = `name: a library\n`
+
+ // INVALID_DESCRIPTION represents a METADATA file with the wrong type of description
+ INVALID_DESCRIPTION = `description: unquoted text\n`
+
+ // INVALID_VERSION represents a METADATA file with the wrong type of version
+ INVALID_VERSION = `third_party { version: 1 }`
+
+ // MY_LIB_1_0 represents a METADATA file for version 1.0 of mylib
+ MY_LIB_1_0 = `name: "mylib" description: "my library" third_party { version: "1.0" }`
+
+ // NO_NAME_0_1 represents a METADATA file with a description but no name
+ NO_NAME_0_1 = `description: "my library" third_party { version: "0.1" }`
+
+ // URL values per type
+ GIT_URL = "http://example.github.com/my_lib"
+ SVN_URL = "http://example.svn.com/my_lib"
+ HG_URL = "http://example.hg.com/my_lib"
+ DARCS_URL = "http://example.darcs.com/my_lib"
+ PIPER_URL = "http://google3/third_party/my/package"
+ HOMEPAGE_URL = "http://example.com/homepage"
+ OTHER_URL = "http://google.com/"
+ ARCHIVE_URL = "http://ftp.example.com/"
+ LOCAL_SOURCE_URL = "https://android.googlesource.com/platform/external/apache-http/"
+)
+
+// libWithUrl returns a METADATA file with the right download url
+func libWithUrl(urlTypes ...string) string {
+ var sb strings.Builder
+
+ fmt.Fprintln(&sb, `name: "mylib" description: "my library"
+ third_party {
+ version: "1.0"`)
+
+ for _, urltype := range urlTypes {
+ var urlValue string
+ switch urltype {
+ case "GIT":
+ urlValue = GIT_URL
+ case "SVN":
+ urlValue = SVN_URL
+ case "HG":
+ urlValue = HG_URL
+ case "DARCS":
+ urlValue = DARCS_URL
+ case "PIPER":
+ urlValue = PIPER_URL
+ case "HOMEPAGE":
+ urlValue = HOMEPAGE_URL
+ case "OTHER":
+ urlValue = OTHER_URL
+ case "ARCHIVE":
+ urlValue = ARCHIVE_URL
+ case "LOCAL_SOURCE":
+ urlValue = LOCAL_SOURCE_URL
+ default:
+ panic(fmt.Errorf("unknown url type: %q. Please update libWithUrl() in build/make/tools/compliance/projectmetadata/projectmetadata_test.go", urltype))
+ }
+ fmt.Fprintf(&sb, " url { type: %s value: %q }\n", urltype, urlValue)
+ }
+ fmt.Fprintln(&sb, `}`)
+
+ return sb.String()
+}
+
+func TestVerifyAllUrlTypes(t *testing.T) {
+ t.Run("verifyAllUrlTypes", func(t *testing.T) {
+ types := make([]string, 0, len(project_metadata_proto.URL_Type_value))
+ for t := range project_metadata_proto.URL_Type_value {
+ types = append(types, t)
+ }
+ libWithUrl(types...)
+ })
+}
+
+func TestUnknownPanics(t *testing.T) {
+ t.Run("Unknown panics", func(t *testing.T) {
+ defer func() {
+ if r := recover(); r == nil {
+ t.Errorf("unexpected success: got no error, want panic")
+ }
+ }()
+ libWithUrl("SOME WILD VALUE THAT DOES NOT EXIST")
+ })
+}
+
+func TestReadMetadataForProjects(t *testing.T) {
+ tests := []struct {
+ name string
+ fs *testfs.TestFS
+ projects []string
+ expectedError string
+ expected []pmeta
+ }{
+ {
+ name: "trivial",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte("name: \"Android\"\n"),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "Android",
+ name: "Android",
+ version: "",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "versioned",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(MY_LIB_1_0),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_homepage",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HOMEPAGE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_git",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("GIT")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_svn",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("SVN")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: SVN_URL,
+ }},
+ },
+ {
+ name: "lib_with_hg",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HG")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: HG_URL,
+ }},
+ },
+ {
+ name: "lib_with_darcs",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: DARCS_URL,
+ }},
+ },
+ {
+ name: "lib_with_piper",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("PIPER")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_other",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("OTHER")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_local_source",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("LOCAL_SOURCE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_archive",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("ARCHIVE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_all_downloads",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "HG", "SVN", "GIT")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_downloads_in_different_order",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "GIT", "SVN", "HG")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_but_git",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "HG", "SVN")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: SVN_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_but_git_and_svn",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "HG")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: HG_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_nondownloads_and_git",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HOMEPAGE", "LOCAL_SOURCE", "PIPER", "ARCHIVE", "GIT")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_nondownloads",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HOMEPAGE", "LOCAL_SOURCE", "PIPER", "ARCHIVE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_all_nondownloads",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl()),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "versioneddesc",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(NO_NAME_0_1),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "unterminated",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte("name: \"Android\n"),
+ },
+ projects: []string{"/a"},
+ expectedError: `invalid character '\n' in string`,
+ },
+ {
+ name: "abc",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(EMPTY),
+ "/b/METADATA": []byte(MY_LIB_1_0),
+ "/c/METADATA": []byte(NO_NAME_0_1),
+ },
+ projects: []string{"/a", "/b", "/c"},
+ expected: []pmeta{
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
+ },
+ },
+ {
+ name: "ab",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(EMPTY),
+ "/b/METADATA": []byte(MY_LIB_1_0),
+ },
+ projects: []string{"/a", "/b", "/c"},
+ expected: []pmeta{
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ },
+ },
+ {
+ name: "ac",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(EMPTY),
+ "/c/METADATA": []byte(NO_NAME_0_1),
+ },
+ projects: []string{"/a", "/b", "/c"},
+ expected: []pmeta{
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
+ },
+ },
+ {
+ name: "bc",
+ fs: &testfs.TestFS{
+ "/b/METADATA": []byte(MY_LIB_1_0),
+ "/c/METADATA": []byte(NO_NAME_0_1),
+ },
+ projects: []string{"/a", "/b", "/c"},
+ expected: []pmeta{
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
+ },
+ },
+ {
+ name: "wrongnametype",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(INVALID_NAME),
+ },
+ projects: []string{"/a"},
+ expectedError: `invalid value for string type`,
+ },
+ {
+ name: "wrongdescriptiontype",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(INVALID_DESCRIPTION),
+ },
+ projects: []string{"/a"},
+ expectedError: `invalid value for string type`,
+ },
+ {
+ name: "wrongversiontype",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(INVALID_VERSION),
+ },
+ projects: []string{"/a"},
+ expectedError: `invalid value for string type`,
+ },
+ {
+ name: "wrongtype",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(INVALID_NAME + INVALID_DESCRIPTION + INVALID_VERSION),
+ },
+ projects: []string{"/a"},
+ expectedError: `invalid value for string type`,
+ },
+ {
+ name: "empty",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(EMPTY),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "emptyother",
+ fs: &testfs.TestFS{
+ "/a/METADATA.bp": []byte(EMPTY),
+ },
+ projects: []string{"/a"},
+ },
+ {
+ name: "emptyfs",
+ fs: &testfs.TestFS{},
+ projects: []string{"/a"},
+ },
+ {
+ name: "override",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(INVALID_NAME + INVALID_DESCRIPTION + INVALID_VERSION),
+ "/a/METADATA.android": []byte(MY_LIB_1_0),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "enchilada",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(INVALID_NAME + INVALID_DESCRIPTION + INVALID_VERSION),
+ "/a/METADATA.android": []byte(EMPTY),
+ "/b/METADATA": []byte(MY_LIB_1_0),
+ "/c/METADATA": []byte(NO_NAME_0_1),
+ },
+ projects: []string{"/a", "/b", "/c"},
+ expected: []pmeta{
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ ix := NewIndex(tt.fs)
+ pms, err := ix.MetadataForProjects(tt.projects...)
+ if err != nil {
+ if len(tt.expectedError) == 0 {
+ t.Errorf("unexpected error: got %s, want no error", err)
+ } else if !strings.Contains(err.Error(), tt.expectedError) {
+ t.Errorf("unexpected error: got %s, want %q", err, tt.expectedError)
+ }
+ return
+ }
+ t.Logf("actual %d project metadata", len(pms))
+ for _, pm := range pms {
+ t.Logf(" %v", pm.String())
+ }
+ t.Logf("expected %d project metadata", len(tt.expected))
+ for _, pm := range tt.expected {
+ t.Logf(" %s", pm.String())
+ }
+ if len(tt.expectedError) > 0 {
+ t.Errorf("unexpected success: got no error, want %q err", tt.expectedError)
+ return
+ }
+ if len(pms) != len(tt.expected) {
+ t.Errorf("missing project metadata: got %d project metadata, want %d", len(pms), len(tt.expected))
+ }
+ for i := 0; i < len(pms) && i < len(tt.expected); i++ {
+ if msg := tt.expected[i].difference(pms[i]); msg != "" {
+ t.Errorf("unexpected metadata starting at index %d: %s", i, msg)
+ return
+ }
+ }
+ if len(pms) < len(tt.expected) {
+ t.Errorf("missing metadata starting at index %d: got nothing, want %s", len(pms), tt.expected[len(pms)].String())
+ }
+ if len(tt.expected) < len(pms) {
+ t.Errorf("unexpected metadata starting at index %d: got %s, want nothing", len(tt.expected), pms[len(tt.expected)].String())
+ }
+ })
+ }
+}
+
+type pmeta struct {
+ project string
+ versionedName string
+ name string
+ version string
+ downloadUrl string
+}
+
+func (pm pmeta) String() string {
+ return fmt.Sprintf("project: %q versionedName: %q name: %q version: %q downloadUrl: %q\n", pm.project, pm.versionedName, pm.name, pm.version, pm.downloadUrl)
+}
+
+func (pm pmeta) equals(other *ProjectMetadata) bool {
+ if pm.project != other.project {
+ return false
+ }
+ if pm.versionedName != other.VersionedName() {
+ return false
+ }
+ if pm.name != other.Name() {
+ return false
+ }
+ if pm.version != other.Version() {
+ return false
+ }
+ if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+ return false
+ }
+ return true
+}
+
+func (pm pmeta) difference(other *ProjectMetadata) string {
+ if pm.equals(other) {
+ return ""
+ }
+ var sb strings.Builder
+ fmt.Fprintf(&sb, "got")
+ if pm.project != other.project {
+ fmt.Fprintf(&sb, " project: %q", other.project)
+ }
+ if pm.versionedName != other.VersionedName() {
+ fmt.Fprintf(&sb, " versionedName: %q", other.VersionedName())
+ }
+ if pm.name != other.Name() {
+ fmt.Fprintf(&sb, " name: %q", other.Name())
+ }
+ if pm.version != other.Version() {
+ fmt.Fprintf(&sb, " version: %q", other.Version())
+ }
+ if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+ fmt.Fprintf(&sb, " downloadUrl: %q", other.UrlsByTypeName().DownloadUrl())
+ }
+ fmt.Fprintf(&sb, ", want")
+ if pm.project != other.project {
+ fmt.Fprintf(&sb, " project: %q", pm.project)
+ }
+ if pm.versionedName != other.VersionedName() {
+ fmt.Fprintf(&sb, " versionedName: %q", pm.versionedName)
+ }
+ if pm.name != other.Name() {
+ fmt.Fprintf(&sb, " name: %q", pm.name)
+ }
+ if pm.version != other.Version() {
+ fmt.Fprintf(&sb, " version: %q", pm.version)
+ }
+ if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+ fmt.Fprintf(&sb, " downloadUrl: %q", pm.downloadUrl)
+ }
+ return sb.String()
+}
diff --git a/tools/compliance/readgraph.go b/tools/compliance/readgraph.go
index 7516440..bf364e6 100644
--- a/tools/compliance/readgraph.go
+++ b/tools/compliance/readgraph.go
@@ -34,10 +34,17 @@
type globalFS struct{}
+var _ fs.FS = globalFS{}
+var _ fs.StatFS = globalFS{}
+
func (s globalFS) Open(name string) (fs.File, error) {
return os.Open(name)
}
+func (s globalFS) Stat(name string) (fs.FileInfo, error) {
+ return os.Stat(name)
+}
+
var FS globalFS
// GetFS returns a filesystem for accessing files under the OUT_DIR environment variable.
@@ -198,6 +205,9 @@
// resolution identifies the set of conditions resolved by acting on the target node.
resolution LicenseConditionSet
+
+ // pure indicates whether to treat the node as a pure aggregate (no internal linkage)
+ pure bool
}
// addDependencies converts the proto AnnotatedDependencies into `edges`
diff --git a/tools/compliance/readgraph_test.go b/tools/compliance/readgraph_test.go
index bcf9f39..a2fb04d 100644
--- a/tools/compliance/readgraph_test.go
+++ b/tools/compliance/readgraph_test.go
@@ -19,12 +19,14 @@
"sort"
"strings"
"testing"
+
+ "android/soong/tools/compliance/testfs"
)
func TestReadLicenseGraph(t *testing.T) {
tests := []struct {
name string
- fs *testFS
+ fs *testfs.TestFS
roots []string
expectedError string
expectedEdges []edge
@@ -32,7 +34,7 @@
}{
{
name: "trivial",
- fs: &testFS{
+ fs: &testfs.TestFS{
"app.meta_lic": []byte("package_name: \"Android\"\n"),
},
roots: []string{"app.meta_lic"},
@@ -41,7 +43,7 @@
},
{
name: "unterminated",
- fs: &testFS{
+ fs: &testfs.TestFS{
"app.meta_lic": []byte("package_name: \"Android\n"),
},
roots: []string{"app.meta_lic"},
@@ -49,7 +51,7 @@
},
{
name: "danglingref",
- fs: &testFS{
+ fs: &testfs.TestFS{
"app.meta_lic": []byte(AOSP + "deps: {\n file: \"lib.meta_lic\"\n}\n"),
},
roots: []string{"app.meta_lic"},
@@ -57,7 +59,7 @@
},
{
name: "singleedge",
- fs: &testFS{
+ fs: &testfs.TestFS{
"app.meta_lic": []byte(AOSP + "deps: {\n file: \"lib.meta_lic\"\n}\n"),
"lib.meta_lic": []byte(AOSP),
},
@@ -67,7 +69,7 @@
},
{
name: "fullgraph",
- fs: &testFS{
+ fs: &testfs.TestFS{
"apex.meta_lic": []byte(AOSP + "deps: {\n file: \"app.meta_lic\"\n}\ndeps: {\n file: \"bin.meta_lic\"\n}\n"),
"app.meta_lic": []byte(AOSP),
"bin.meta_lic": []byte(AOSP + "deps: {\n file: \"lib.meta_lic\"\n}\n"),
diff --git a/tools/compliance/resolutionset.go b/tools/compliance/resolutionset.go
index 7c8f333..1be4a34 100644
--- a/tools/compliance/resolutionset.go
+++ b/tools/compliance/resolutionset.go
@@ -72,6 +72,16 @@
return isPresent
}
+// IsPureAggregate returns true if `target`, which must be in
+// `AttachesTo()` resolves to a pure aggregate in the resolution.
+func (rs ResolutionSet) IsPureAggregate(target *TargetNode) bool {
+ _, isPresent := rs[target]
+ if !isPresent {
+ panic(fmt.Errorf("ResolutionSet.IsPureAggregate(%s): not attached to %s", target.Name(), target.Name()))
+ }
+ return target.pure
+}
+
// Resolutions returns the list of resolutions that `attachedTo`
// target must resolve. Returns empty list if no conditions apply.
func (rs ResolutionSet) Resolutions(attachesTo *TargetNode) ResolutionList {
diff --git a/tools/compliance/test_util.go b/tools/compliance/test_util.go
index 26d7461..db711a7 100644
--- a/tools/compliance/test_util.go
+++ b/tools/compliance/test_util.go
@@ -17,10 +17,11 @@
import (
"fmt"
"io"
- "io/fs"
"sort"
"strings"
"testing"
+
+ "android/soong/tools/compliance/testfs"
)
const (
@@ -42,7 +43,7 @@
Classpath = `` +
`package_name: "Free Software"
license_kinds: "SPDX-license-identifier-GPL-2.0-with-classpath-exception"
-license_conditions: "restricted"
+license_conditions: "permissive"
`
// DependentModule starts a test metadata file for a module in the same package as `Classpath`.
@@ -56,7 +57,7 @@
LGPL = `` +
`package_name: "Free Library"
license_kinds: "SPDX-license-identifier-LGPL-2.0"
-license_conditions: "restricted"
+license_conditions: "restricted_allows_dynamic_linking"
`
// MPL starts a test metadata file for a module with MPL 2.0 reciprical licensing.
@@ -145,51 +146,6 @@
return cs
}
-// testFS implements a test file system (fs.FS) simulated by a map from filename to []byte content.
-type testFS map[string][]byte
-
-// Open implements fs.FS.Open() to open a file based on the filename.
-func (fs *testFS) Open(name string) (fs.File, error) {
- if _, ok := (*fs)[name]; !ok {
- return nil, fmt.Errorf("unknown file %q", name)
- }
- return &testFile{fs, name, 0}, nil
-}
-
-// testFile implements a test file (fs.File) based on testFS above.
-type testFile struct {
- fs *testFS
- name string
- posn int
-}
-
-// Stat not implemented to obviate implementing fs.FileInfo.
-func (f *testFile) Stat() (fs.FileInfo, error) {
- return nil, fmt.Errorf("unimplemented")
-}
-
-// Read copies bytes from the testFS map.
-func (f *testFile) Read(b []byte) (int, error) {
- if f.posn < 0 {
- return 0, fmt.Errorf("file not open: %q", f.name)
- }
- if f.posn >= len((*f.fs)[f.name]) {
- return 0, io.EOF
- }
- n := copy(b, (*f.fs)[f.name][f.posn:])
- f.posn += n
- return n, nil
-}
-
-// Close marks the testFile as no longer in use.
-func (f *testFile) Close() error {
- if f.posn < 0 {
- return fmt.Errorf("file already closed: %q", f.name)
- }
- f.posn = -1
- return nil
-}
-
// edge describes test data edges to define test graphs.
type edge struct {
target, dep string
@@ -268,7 +224,7 @@
deps[edge.dep] = []annotated{}
}
}
- fs := make(testFS)
+ fs := make(testfs.TestFS)
for file, edges := range deps {
body := meta[file]
for _, edge := range edges {
@@ -521,7 +477,7 @@
expectedConditions := expectedRl[i].Resolves()
actualConditions := actualRl[i].Resolves()
if expectedConditions != actualConditions {
- t.Errorf("unexpected conditions apply to %q acting on %q: got %04x with names %s, want %04x with names %s",
+ t.Errorf("unexpected conditions apply to %q acting on %q: got %#v with names %s, want %#v with names %s",
target.name, expectedRl[i].actsOn.name,
actualConditions, actualConditions.Names(),
expectedConditions, expectedConditions.Names())
@@ -586,7 +542,7 @@
expectedConditions := expectedRl[i].Resolves()
actualConditions := actualRl[i].Resolves()
if expectedConditions != (expectedConditions & actualConditions) {
- t.Errorf("expected conditions missing from %q acting on %q: got %04x with names %s, want %04x with names %s",
+ t.Errorf("expected conditions missing from %q acting on %q: got %#v with names %s, want %#v with names %s",
target.name, expectedRl[i].actsOn.name,
actualConditions, actualConditions.Names(),
expectedConditions, expectedConditions.Names())
diff --git a/tools/compliance/testfs/Android.bp b/tools/compliance/testfs/Android.bp
new file mode 100644
index 0000000..6baaf18
--- /dev/null
+++ b/tools/compliance/testfs/Android.bp
@@ -0,0 +1,25 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+ name: "compliance-test-fs-module",
+ srcs: [
+ "testfs.go",
+ ],
+ pkgPath: "android/soong/tools/compliance/testfs",
+}
diff --git a/tools/compliance/testfs/testfs.go b/tools/compliance/testfs/testfs.go
new file mode 100644
index 0000000..2c75c5b
--- /dev/null
+++ b/tools/compliance/testfs/testfs.go
@@ -0,0 +1,129 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package testfs
+
+import (
+ "fmt"
+ "io"
+ "io/fs"
+ "strings"
+ "time"
+)
+
+// TestFS implements a test file system (fs.FS) simulated by a map from filename to []byte content.
+type TestFS map[string][]byte
+
+var _ fs.FS = (*TestFS)(nil)
+var _ fs.StatFS = (*TestFS)(nil)
+
+// Open implements fs.FS.Open() to open a file based on the filename.
+func (tfs *TestFS) Open(name string) (fs.File, error) {
+ if _, ok := (*tfs)[name]; !ok {
+ return nil, fmt.Errorf("unknown file %q", name)
+ }
+ return &TestFile{tfs, name, 0}, nil
+}
+
+// Stat implements fs.StatFS.Stat() to examine a file based on the filename.
+func (tfs *TestFS) Stat(name string) (fs.FileInfo, error) {
+ if content, ok := (*tfs)[name]; ok {
+ return &TestFileInfo{name, len(content), 0666}, nil
+ }
+ dirname := name
+ if !strings.HasSuffix(dirname, "/") {
+ dirname = dirname + "/"
+ }
+ for name := range (*tfs) {
+ if strings.HasPrefix(name, dirname) {
+ return &TestFileInfo{name, 8, fs.ModeDir | fs.ModePerm}, nil
+ }
+ }
+ return nil, fmt.Errorf("file not found: %q", name)
+}
+
+// TestFileInfo implements a file info (fs.FileInfo) based on TestFS above.
+type TestFileInfo struct {
+ name string
+ size int
+ mode fs.FileMode
+}
+
+var _ fs.FileInfo = (*TestFileInfo)(nil)
+
+// Name returns the name of the file
+func (fi *TestFileInfo) Name() string {
+ return fi.name
+}
+
+// Size returns the size of the file in bytes.
+func (fi *TestFileInfo) Size() int64 {
+ return int64(fi.size)
+}
+
+// Mode returns the fs.FileMode bits.
+func (fi *TestFileInfo) Mode() fs.FileMode {
+ return fi.mode
+}
+
+// ModTime fakes a modification time.
+func (fi *TestFileInfo) ModTime() time.Time {
+ return time.UnixMicro(0xb0bb)
+}
+
+// IsDir is a synonym for Mode().IsDir()
+func (fi *TestFileInfo) IsDir() bool {
+ return fi.mode.IsDir()
+}
+
+// Sys is unused and returns nil.
+func (fi *TestFileInfo) Sys() any {
+ return nil
+}
+
+// TestFile implements a test file (fs.File) based on TestFS above.
+type TestFile struct {
+ fs *TestFS
+ name string
+ posn int
+}
+
+var _ fs.File = (*TestFile)(nil)
+
+// Stat not implemented to obviate implementing fs.FileInfo.
+func (f *TestFile) Stat() (fs.FileInfo, error) {
+ return f.fs.Stat(f.name)
+}
+
+// Read copies bytes from the TestFS map.
+func (f *TestFile) Read(b []byte) (int, error) {
+ if f.posn < 0 {
+ return 0, fmt.Errorf("file not open: %q", f.name)
+ }
+ if f.posn >= len((*f.fs)[f.name]) {
+ return 0, io.EOF
+ }
+ n := copy(b, (*f.fs)[f.name][f.posn:])
+ f.posn += n
+ return n, nil
+}
+
+// Close marks the TestFile as no longer in use.
+func (f *TestFile) Close() error {
+ if f.posn < 0 {
+ return fmt.Errorf("file already closed: %q", f.name)
+ }
+ f.posn = -1
+ return nil
+}
diff --git a/tools/event_log_tags.bzl b/tools/event_log_tags.bzl
deleted file mode 100644
index 35305ae..0000000
--- a/tools/event_log_tags.bzl
+++ /dev/null
@@ -1,35 +0,0 @@
-"""Event log tags generation rule"""
-
-load("@bazel_skylib//lib:paths.bzl", "paths")
-
-def _event_log_tags_impl(ctx):
- out_files = []
- for logtag_file in ctx.files.srcs:
- out_filename = paths.replace_extension(logtag_file.basename, ".java")
- out_file = ctx.actions.declare_file(out_filename)
- out_files.append(out_file)
- ctx.actions.run(
- inputs = [logtag_file],
- outputs = [out_file],
- arguments = [
- "-o",
- out_file.path,
- logtag_file.path,
- ],
- progress_message = "Generating Java logtag file from %s" % logtag_file.short_path,
- executable = ctx.executable._logtag_to_java_tool,
- )
- return [DefaultInfo(files = depset(out_files))]
-
-event_log_tags = rule(
- implementation = _event_log_tags_impl,
- attrs = {
- "srcs": attr.label_list(allow_files = [".logtags"], mandatory = True),
- "_logtag_to_java_tool": attr.label(
- executable = True,
- cfg = "exec",
- allow_files = True,
- default = Label("//build/make/tools:java-event-log-tags"),
- ),
- },
-)
diff --git a/tools/fileslist_util.py b/tools/fileslist_util.py
index ff40d51..a1b1197 100755
--- a/tools/fileslist_util.py
+++ b/tools/fileslist_util.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2016 The Android Open Source Project
#
@@ -15,7 +15,9 @@
# limitations under the License.
#
-import getopt, json, sys
+import argparse
+import json
+import sys
def PrintFileNames(path):
with open(path) as jf:
@@ -27,42 +29,25 @@
with open(path) as jf:
data = json.load(jf)
for line in data:
- print "{0:12d} {1}".format(line["Size"], line["Name"])
+ print(f"{line['Size']:12d} {line['Name']}")
-def PrintUsage(name):
- print("""
-Usage: %s -[nc] json_files_list
- -n produces list of files only
- -c produces classic installed-files.txt
-""" % (name))
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-n", action="store_true",
+ help="produces list of files only")
+ parser.add_argument("-c", action="store_true",
+ help="produces classic installed-files.txt")
+ parser.add_argument("json_files_list")
+ args = parser.parse_args()
-def main(argv):
- try:
- opts, args = getopt.getopt(argv[1:], "nc", "")
- except getopt.GetoptError, err:
- print(err)
- PrintUsage(argv[0])
- sys.exit(2)
-
- if len(opts) == 0:
- print("No conversion option specified")
- PrintUsage(argv[0])
- sys.exit(2)
-
- if len(args) == 0:
- print("No input file specified")
- PrintUsage(argv[0])
- sys.exit(2)
-
- for o, a in opts:
- if o == ("-n"):
- PrintFileNames(args[0])
- sys.exit()
- elif o == ("-c"):
- PrintCanonicalList(args[0])
- sys.exit()
- else:
- assert False, "Unsupported option"
+ if args.n and args.c:
+ sys.exit("Cannot specify both -n and -c")
+ elif args.n:
+ PrintFileNames(args.json_files_list)
+ elif args.c:
+ PrintCanonicalList(args.json_files_list)
+ else:
+ sys.exit("No conversion option specified")
if __name__ == '__main__':
- main(sys.argv)
+ main()
diff --git a/tools/findleaves.py b/tools/findleaves.py
index 97302e9..86f3f3a 100755
--- a/tools/findleaves.py
+++ b/tools/findleaves.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2009 The Android Open Source Project
#
@@ -121,7 +121,7 @@
results = list(set(perform_find(mindepth, prune, dirlist, filenames)))
results.sort()
for r in results:
- print r
+ print(r)
if __name__ == "__main__":
main(sys.argv)
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 8891a0a..55fdca4 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -40,14 +40,28 @@
cflags: ["-Werror"],
}
+python_binary_host {
+ name: "fs_config_generator",
+ srcs: ["fs_config_generator.py"],
+}
+
+python_test_host {
+ name: "test_fs_config_generator",
+ main: "test_fs_config_generator.py",
+ srcs: [
+ "test_fs_config_generator.py",
+ "fs_config_generator.py",
+ ],
+}
+
target_fs_config_gen_filegroup {
name: "target_fs_config_gen",
}
genrule {
name: "oemaids_header_gen",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) oemaid --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) oemaid --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -67,8 +81,8 @@
// TARGET_FS_CONFIG_GEN files.
genrule {
name: "passwd_gen_system",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) passwd --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) passwd --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -84,8 +98,8 @@
genrule {
name: "passwd_gen_vendor",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) passwd --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) passwd --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -102,8 +116,8 @@
genrule {
name: "passwd_gen_odm",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) passwd --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) passwd --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -120,8 +134,8 @@
genrule {
name: "passwd_gen_product",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) passwd --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) passwd --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -138,8 +152,8 @@
genrule {
name: "passwd_gen_system_ext",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) passwd --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) passwd --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -159,8 +173,8 @@
// TARGET_FS_CONFIG_GEN files.
genrule {
name: "group_gen_system",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) group --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) group --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -176,8 +190,8 @@
genrule {
name: "group_gen_vendor",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) group --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) group --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -194,8 +208,8 @@
genrule {
name: "group_gen_odm",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) group --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) group --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -212,8 +226,8 @@
genrule {
name: "group_gen_product",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) group --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) group --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
@@ -230,8 +244,8 @@
genrule {
name: "group_gen_system_ext",
- tool_files: ["fs_config_generator.py"],
- cmd: "$(location fs_config_generator.py) group --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+ tools: ["fs_config_generator"],
+ cmd: "$(location fs_config_generator) group --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
srcs: [
":target_fs_config_gen",
":android_filesystem_config_header",
diff --git a/tools/fs_config/README.md b/tools/fs_config/README.md
index bad5e10..62d6d1e 100644
--- a/tools/fs_config/README.md
+++ b/tools/fs_config/README.md
@@ -69,13 +69,13 @@
From within the `fs_config` directory, unit tests can be executed like so:
- $ python -m unittest test_fs_config_generator.Tests
- .............
+ $ python test_fs_config_generator.py
+ ................
----------------------------------------------------------------------
- Ran 13 tests in 0.004s
-
+ Ran 16 tests in 0.004s
OK
+
One could also use nose if they would like:
$ nose2
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 098fde6..44480b8 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
"""Generates config files for Android file system properties.
This script is used for generating configuration files for configuring
@@ -11,7 +11,7 @@
"""
import argparse
-import ConfigParser
+import configparser
import ctypes
import re
import sys
@@ -179,6 +179,10 @@
and self.normalized_value == other.normalized_value \
and self.login_shell == other.login_shell
+ def __repr__(self):
+ return "AID { identifier = %s, value = %s, normalized_value = %s, login_shell = %s }" % (
+ self.identifier, self.value, self.normalized_value, self.login_shell)
+
@staticmethod
def is_friendly(name):
"""Determines if an AID is a freindly name or C define.
@@ -312,7 +316,7 @@
]
_AID_DEFINE = re.compile(r'\s*#define\s+%s.*' % AID.PREFIX)
_RESERVED_RANGE = re.compile(
- r'#define AID_(.+)_RESERVED_\d*_*(START|END)\s+(\d+)')
+ r'#define AID_(.+)_RESERVED_(?:(\d+)_)?(START|END)\s+(\d+)')
# AID lines cannot end with _START or _END, ie AID_FOO is OK
# but AID_FOO_START is skiped. Note that AID_FOOSTART is NOT skipped.
@@ -345,6 +349,7 @@
aid_file (file): The open AID header file to parse.
"""
+ ranges_by_name = {}
for lineno, line in enumerate(aid_file):
def error_message(msg):
@@ -355,20 +360,24 @@
range_match = self._RESERVED_RANGE.match(line)
if range_match:
- partition = range_match.group(1).lower()
- value = int(range_match.group(3), 0)
+ partition, name, start, value = range_match.groups()
+ partition = partition.lower()
+ if name is None:
+ name = "unnamed"
+ start = start == "START"
+ value = int(value, 0)
if partition == 'oem':
partition = 'vendor'
- if partition in self._ranges:
- if isinstance(self._ranges[partition][-1], int):
- self._ranges[partition][-1] = (
- self._ranges[partition][-1], value)
- else:
- self._ranges[partition].append(value)
- else:
- self._ranges[partition] = [value]
+ if partition not in ranges_by_name:
+ ranges_by_name[partition] = {}
+ if name not in ranges_by_name[partition]:
+ ranges_by_name[partition][name] = [None, None]
+ if ranges_by_name[partition][name][0 if start else 1] is not None:
+ sys.exit(error_message("{} of range {} of partition {} was already defined".format(
+ "Start" if start else "End", name, partition)))
+ ranges_by_name[partition][name][0 if start else 1] = value
if AIDHeaderParser._AID_DEFINE.match(line):
chunks = line.split()
@@ -390,6 +399,21 @@
error_message('{} for "{}"'.format(
exception, identifier)))
+ for partition in ranges_by_name:
+ for name in ranges_by_name[partition]:
+ start = ranges_by_name[partition][name][0]
+ end = ranges_by_name[partition][name][1]
+ if start is None:
+ sys.exit("Range '%s' for partition '%s' had undefined start" % (name, partition))
+ if end is None:
+ sys.exit("Range '%s' for partition '%s' had undefined end" % (name, partition))
+ if start > end:
+ sys.exit("Range '%s' for partition '%s' had start after end. Start: %d, end: %d" % (name, partition, start, end))
+
+ if partition not in self._ranges:
+ self._ranges[partition] = []
+ self._ranges[partition].append((start, end))
+
def _handle_aid(self, identifier, value):
"""Handle an AID C #define.
@@ -439,7 +463,7 @@
# No core AIDs should be within any oem range.
for aid in self._aid_value_to_name:
for ranges in self._ranges.values():
- if Utils.in_any_range(aid, ranges):
+ if Utils.in_any_range(int(aid, 0), ranges):
name = self._aid_value_to_name[aid]
raise ValueError(
'AID "%s" value: %u within reserved OEM Range: "%s"' %
@@ -545,7 +569,7 @@
# override previous
# sections.
- config = ConfigParser.ConfigParser()
+ config = configparser.ConfigParser()
config.read(file_name)
for section in config.sections():
@@ -589,7 +613,7 @@
ranges = None
- partitions = self._ranges.keys()
+ partitions = list(self._ranges.keys())
partitions.sort(key=len, reverse=True)
for partition in partitions:
if aid.friendly.startswith(partition):
@@ -1049,7 +1073,7 @@
user_binary = bytearray(ctypes.c_uint16(int(user, 0)))
group_binary = bytearray(ctypes.c_uint16(int(group, 0)))
caps_binary = bytearray(ctypes.c_uint64(caps_value))
- path_binary = ctypes.create_string_buffer(path,
+ path_binary = ctypes.create_string_buffer(path.encode(),
path_length_aligned_64).raw
out_file.write(length_binary)
@@ -1145,21 +1169,21 @@
hdr = AIDHeaderParser(args['hdrfile'])
max_name_length = max(len(aid.friendly) + 1 for aid in hdr.aids)
- print AIDArrayGen._GENERATED
- print
- print AIDArrayGen._INCLUDE
- print
- print AIDArrayGen._STRUCT_FS_CONFIG % max_name_length
- print
- print AIDArrayGen._OPEN_ID_ARRAY
+ print(AIDArrayGen._GENERATED)
+ print()
+ print(AIDArrayGen._INCLUDE)
+ print()
+ print(AIDArrayGen._STRUCT_FS_CONFIG % max_name_length)
+ print()
+ print(AIDArrayGen._OPEN_ID_ARRAY)
for aid in hdr.aids:
- print AIDArrayGen._ID_ENTRY % (aid.friendly, aid.identifier)
+ print(AIDArrayGen._ID_ENTRY % (aid.friendly, aid.identifier))
- print AIDArrayGen._CLOSE_FILE_STRUCT
- print
- print AIDArrayGen._COUNT
- print
+ print(AIDArrayGen._CLOSE_FILE_STRUCT)
+ print()
+ print(AIDArrayGen._COUNT)
+ print()
@generator('oemaid')
@@ -1201,15 +1225,15 @@
parser = FSConfigFileParser(args['fsconfig'], hdr_parser.ranges)
- print OEMAidGen._GENERATED
+ print(OEMAidGen._GENERATED)
- print OEMAidGen._FILE_IFNDEF_DEFINE
+ print(OEMAidGen._FILE_IFNDEF_DEFINE)
for aid in parser.aids:
self._print_aid(aid)
- print
+ print()
- print OEMAidGen._FILE_ENDIF
+ print(OEMAidGen._FILE_ENDIF)
def _print_aid(self, aid):
"""Prints a valid #define AID identifier to stdout.
@@ -1221,10 +1245,10 @@
# print the source file location of the AID
found_file = aid.found
if found_file != self._old_file:
- print OEMAidGen._FILE_COMMENT % found_file
+ print(OEMAidGen._FILE_COMMENT % found_file)
self._old_file = found_file
- print OEMAidGen._GENERIC_DEFINE % (aid.identifier, aid.value)
+ print(OEMAidGen._GENERIC_DEFINE % (aid.identifier, aid.value))
@generator('passwd')
@@ -1268,7 +1292,7 @@
return
aids_by_partition = {}
- partitions = hdr_parser.ranges.keys()
+ partitions = list(hdr_parser.ranges.keys())
partitions.sort(key=len, reverse=True)
for aid in aids:
@@ -1307,7 +1331,7 @@
except ValueError as exception:
sys.exit(exception)
- print "%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell)
+ print("%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell))
@generator('group')
@@ -1332,7 +1356,7 @@
except ValueError as exception:
sys.exit(exception)
- print "%s::%s:" % (logon, uid)
+ print("%s::%s:" % (logon, uid))
@generator('print')
@@ -1355,7 +1379,7 @@
aids.sort(key=lambda item: int(item.normalized_value))
for aid in aids:
- print '%s %s' % (aid.identifier, aid.normalized_value)
+ print('%s %s' % (aid.identifier, aid.normalized_value))
def main():
@@ -1369,7 +1393,7 @@
gens = generator.get()
# for each gen, instantiate and add them as an option
- for name, gen in gens.iteritems():
+ for name, gen in gens.items():
generator_option_parser = subparser.add_parser(name, help=gen.__doc__)
generator_option_parser.set_defaults(which=name)
diff --git a/tools/fs_config/test_fs_config_generator.py b/tools/fs_config/test_fs_config_generator.py
index b7f173e..cbf46a1 100755
--- a/tools/fs_config/test_fs_config_generator.py
+++ b/tools/fs_config/test_fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
"""Unit test suite for the fs_config_genertor.py tool."""
import tempfile
@@ -64,7 +64,7 @@
def test_aid_header_parser_good(self):
"""Test AID Header Parser good input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO 1000
@@ -78,11 +78,11 @@
temp_file.flush()
parser = AIDHeaderParser(temp_file.name)
- oem_ranges = parser.oem_ranges
+ ranges = parser.ranges
aids = parser.aids
- self.assertTrue((2900, 2999) in oem_ranges)
- self.assertFalse((5000, 6000) in oem_ranges)
+ self.assertTrue((2900, 2999) in ranges["vendor"])
+ self.assertFalse((5000, 6000) in ranges["vendor"])
for aid in aids:
self.assertTrue(aid.normalized_value in ['1000', '1001'])
@@ -91,7 +91,7 @@
def test_aid_header_parser_good_unordered(self):
"""Test AID Header Parser good unordered input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO 1000
@@ -105,11 +105,11 @@
temp_file.flush()
parser = AIDHeaderParser(temp_file.name)
- oem_ranges = parser.oem_ranges
+ ranges = parser.ranges
aids = parser.aids
- self.assertTrue((2900, 2999) in oem_ranges)
- self.assertFalse((5000, 6000) in oem_ranges)
+ self.assertTrue((2900, 2999) in ranges["vendor"])
+ self.assertFalse((5000, 6000) in ranges["vendor"])
for aid in aids:
self.assertTrue(aid.normalized_value in ['1000', '1001'])
@@ -118,7 +118,7 @@
def test_aid_header_parser_bad_aid(self):
"""Test AID Header Parser bad aid input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO "bad"
@@ -131,7 +131,7 @@
def test_aid_header_parser_bad_oem_range(self):
"""Test AID Header Parser bad oem range input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2900
@@ -145,7 +145,7 @@
def test_aid_header_parser_bad_oem_range_no_end(self):
"""Test AID Header Parser bad oem range (no end) input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2900
@@ -158,7 +158,7 @@
def test_aid_header_parser_bad_oem_range_no_start(self):
"""Test AID Header Parser bad oem range (no start) input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_END 2900
@@ -168,10 +168,26 @@
with self.assertRaises(SystemExit):
AIDHeaderParser(temp_file.name)
+ def test_aid_header_parser_bad_oem_range_duplicated(self):
+ """Test AID Header Parser bad oem range (no start) input file"""
+
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
+ temp_file.write(
+ textwrap.dedent("""
+ #define AID_OEM_RESERVED_START 2000
+ #define AID_OEM_RESERVED_END 2900
+ #define AID_OEM_RESERVED_START 3000
+ #define AID_OEM_RESERVED_END 3900
+ """))
+ temp_file.flush()
+
+ with self.assertRaises(SystemExit):
+ AIDHeaderParser(temp_file.name)
+
def test_aid_header_parser_bad_oem_range_mismatch_start_end(self):
"""Test AID Header Parser bad oem range mismatched input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2900
@@ -185,7 +201,7 @@
def test_aid_header_parser_bad_duplicate_ranges(self):
"""Test AID Header Parser exits cleanly on duplicate AIDs"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO 100
@@ -206,7 +222,7 @@
- https://android-review.googlesource.com/#/c/313169
"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_APP 10000 /* TODO: switch users over to AID_APP_START */
@@ -241,7 +257,7 @@
def test_fs_config_file_parser_good(self):
"""Test FSConfig Parser good input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
[/system/bin/file]
@@ -262,7 +278,7 @@
"""))
temp_file.flush()
- parser = FSConfigFileParser([temp_file.name], [(5000, 5999)])
+ parser = FSConfigFileParser([temp_file.name], {"oem1": [(5000, 5999)]})
files = parser.files
dirs = parser.dirs
aids = parser.aids
@@ -284,12 +300,12 @@
FSConfig('0777', 'AID_FOO', 'AID_SYSTEM', '0',
'/vendor/path/dir/', temp_file.name))
- self.assertEqual(aid, AID('AID_OEM1', '0x1389', temp_file.name, '/vendor/bin/sh'))
+ self.assertEqual(aid, AID('AID_OEM1', '0x1389', temp_file.name, '/bin/sh'))
def test_fs_config_file_parser_bad(self):
"""Test FSConfig Parser bad input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
[/system/bin/file]
@@ -298,12 +314,12 @@
temp_file.flush()
with self.assertRaises(SystemExit):
- FSConfigFileParser([temp_file.name], [(5000, 5999)])
+ FSConfigFileParser([temp_file.name], {})
def test_fs_config_file_parser_bad_aid_range(self):
"""Test FSConfig Parser bad aid range value input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
[AID_OEM1]
@@ -312,4 +328,7 @@
temp_file.flush()
with self.assertRaises(SystemExit):
- FSConfigFileParser([temp_file.name], [(5000, 5999)])
+ FSConfigFileParser([temp_file.name], {"oem1": [(5000, 5999)]})
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tools/java-event-log-tags.py b/tools/java-event-log-tags.py
index 4bd6d2b..bbd65fa 100755
--- a/tools/java-event-log-tags.py
+++ b/tools/java-event-log-tags.py
@@ -100,7 +100,8 @@
" * Source file: %s\n"
" */\n\n" % (fn,))
-buffer.write("package %s;\n\n" % (tagfile.options["java_package"][0],))
+# .rstrip(";") to avoid an empty top-level statement errorprone error
+buffer.write("package %s;\n\n" % (tagfile.options["java_package"][0].rstrip(";"),))
basename, _ = os.path.splitext(os.path.basename(fn))
diff --git a/tools/java-layers.py b/tools/java-layers.py
deleted file mode 100755
index b3aec2b..0000000
--- a/tools/java-layers.py
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import re
-import sys
-
-def fail_with_usage():
- sys.stderr.write("usage: java-layers.py DEPENDENCY_FILE SOURCE_DIRECTORIES...\n")
- sys.stderr.write("\n")
- sys.stderr.write("Enforces layering between java packages. Scans\n")
- sys.stderr.write("DIRECTORY and prints errors when the packages violate\n")
- sys.stderr.write("the rules defined in the DEPENDENCY_FILE.\n")
- sys.stderr.write("\n")
- sys.stderr.write("Prints a warning when an unknown package is encountered\n")
- sys.stderr.write("on the assumption that it should fit somewhere into the\n")
- sys.stderr.write("layering.\n")
- sys.stderr.write("\n")
- sys.stderr.write("DEPENDENCY_FILE format\n")
- sys.stderr.write(" - # starts comment\n")
- sys.stderr.write(" - Lines consisting of two java package names: The\n")
- sys.stderr.write(" first package listed must not contain any references\n")
- sys.stderr.write(" to any classes present in the second package, or any\n")
- sys.stderr.write(" of its dependencies.\n")
- sys.stderr.write(" - Lines consisting of one java package name: The\n")
- sys.stderr.write(" packge is assumed to be a high level package and\n")
- sys.stderr.write(" nothing may depend on it.\n")
- sys.stderr.write(" - Lines consisting of a dash (+) followed by one java\n")
- sys.stderr.write(" package name: The package is considered a low level\n")
- sys.stderr.write(" package and may not import any of the other packages\n")
- sys.stderr.write(" listed in the dependency file.\n")
- sys.stderr.write(" - Lines consisting of a plus (-) followed by one java\n")
- sys.stderr.write(" package name: The package is considered \'legacy\'\n")
- sys.stderr.write(" and excluded from errors.\n")
- sys.stderr.write("\n")
- sys.exit(1)
-
-class Dependency:
- def __init__(self, filename, lineno, lower, top, lowlevel, legacy):
- self.filename = filename
- self.lineno = lineno
- self.lower = lower
- self.top = top
- self.lowlevel = lowlevel
- self.legacy = legacy
- self.uppers = []
- self.transitive = set()
-
- def matches(self, imp):
- for d in self.transitive:
- if imp.startswith(d):
- return True
- return False
-
-class Dependencies:
- def __init__(self, deps):
- def recurse(obj, dep, visited):
- global err
- if dep in visited:
- sys.stderr.write("%s:%d: Circular dependency found:\n"
- % (dep.filename, dep.lineno))
- for v in visited:
- sys.stderr.write("%s:%d: Dependency: %s\n"
- % (v.filename, v.lineno, v.lower))
- err = True
- return
- visited.append(dep)
- for upper in dep.uppers:
- obj.transitive.add(upper)
- if upper in deps:
- recurse(obj, deps[upper], visited)
- self.deps = deps
- self.parts = [(dep.lower.split('.'),dep) for dep in deps.itervalues()]
- # transitive closure of dependencies
- for dep in deps.itervalues():
- recurse(dep, dep, [])
- # disallow everything from the low level components
- for dep in deps.itervalues():
- if dep.lowlevel:
- for d in deps.itervalues():
- if dep != d and not d.legacy:
- dep.transitive.add(d.lower)
- # disallow the 'top' components everywhere but in their own package
- for dep in deps.itervalues():
- if dep.top and not dep.legacy:
- for d in deps.itervalues():
- if dep != d and not d.legacy:
- d.transitive.add(dep.lower)
- for dep in deps.itervalues():
- dep.transitive = set([x+"." for x in dep.transitive])
- if False:
- for dep in deps.itervalues():
- print "-->", dep.lower, "-->", dep.transitive
-
- # Lookup the dep object for the given package. If pkg is a subpackage
- # of one with a rule, that one will be returned. If no matches are found,
- # None is returned.
- def lookup(self, pkg):
- # Returns the number of parts that match
- def compare_parts(parts, pkg):
- if len(parts) > len(pkg):
- return 0
- n = 0
- for i in range(0, len(parts)):
- if parts[i] != pkg[i]:
- return 0
- n = n + 1
- return n
- pkg = pkg.split(".")
- matched = 0
- result = None
- for (parts,dep) in self.parts:
- x = compare_parts(parts, pkg)
- if x > matched:
- matched = x
- result = dep
- return result
-
-def parse_dependency_file(filename):
- global err
- f = file(filename)
- lines = f.readlines()
- f.close()
- def lineno(s, i):
- i[0] = i[0] + 1
- return (i[0],s)
- n = [0]
- lines = [lineno(x,n) for x in lines]
- lines = [(n,s.split("#")[0].strip()) for (n,s) in lines]
- lines = [(n,s) for (n,s) in lines if len(s) > 0]
- lines = [(n,s.split()) for (n,s) in lines]
- deps = {}
- for n,words in lines:
- if len(words) == 1:
- lower = words[0]
- top = True
- legacy = False
- lowlevel = False
- if lower[0] == '+':
- lower = lower[1:]
- top = False
- lowlevel = True
- elif lower[0] == '-':
- lower = lower[1:]
- legacy = True
- if lower in deps:
- sys.stderr.write(("%s:%d: Package '%s' already defined on"
- + " line %d.\n") % (filename, n, lower, deps[lower].lineno))
- err = True
- else:
- deps[lower] = Dependency(filename, n, lower, top, lowlevel, legacy)
- elif len(words) == 2:
- lower = words[0]
- upper = words[1]
- if lower in deps:
- dep = deps[lower]
- if dep.top:
- sys.stderr.write(("%s:%d: Can't add dependency to top level package "
- + "'%s'\n") % (filename, n, lower))
- err = True
- else:
- dep = Dependency(filename, n, lower, False, False, False)
- deps[lower] = dep
- dep.uppers.append(upper)
- else:
- sys.stderr.write("%s:%d: Too many words on line starting at \'%s\'\n" % (
- filename, n, words[2]))
- err = True
- return Dependencies(deps)
-
-def find_java_files(srcs):
- result = []
- for d in srcs:
- if d[0] == '@':
- f = file(d[1:])
- result.extend([fn for fn in [s.strip() for s in f.readlines()]
- if len(fn) != 0])
- f.close()
- else:
- for root, dirs, files in os.walk(d):
- result.extend([os.sep.join((root,f)) for f in files
- if f.lower().endswith(".java")])
- return result
-
-COMMENTS = re.compile("//.*?\n|/\*.*?\*/", re.S)
-PACKAGE = re.compile("package\s+(.*)")
-IMPORT = re.compile("import\s+(.*)")
-
-def examine_java_file(deps, filename):
- global err
- # Yes, this is a crappy java parser. Write a better one if you want to.
- f = file(filename)
- text = f.read()
- f.close()
- text = COMMENTS.sub("", text)
- index = text.find("{")
- if index < 0:
- sys.stderr.write(("%s: Error: Unable to parse java. Can't find class "
- + "declaration.\n") % filename)
- err = True
- return
- text = text[0:index]
- statements = [s.strip() for s in text.split(";")]
- # First comes the package declaration. Then iterate while we see import
- # statements. Anything else is either bad syntax that we don't care about
- # because the compiler will fail, or the beginning of the class declaration.
- m = PACKAGE.match(statements[0])
- if not m:
- sys.stderr.write(("%s: Error: Unable to parse java. Missing package "
- + "statement.\n") % filename)
- err = True
- return
- pkg = m.group(1)
- imports = []
- for statement in statements[1:]:
- m = IMPORT.match(statement)
- if not m:
- break
- imports.append(m.group(1))
- # Do the checking
- if False:
- print filename
- print "'%s' --> %s" % (pkg, imports)
- dep = deps.lookup(pkg)
- if not dep:
- sys.stderr.write(("%s: Error: Package does not appear in dependency file: "
- + "%s\n") % (filename, pkg))
- err = True
- return
- for imp in imports:
- if dep.matches(imp):
- sys.stderr.write("%s: Illegal import in package '%s' of '%s'\n"
- % (filename, pkg, imp))
- err = True
-
-err = False
-
-def main(argv):
- if len(argv) < 3:
- fail_with_usage()
- deps = parse_dependency_file(argv[1])
-
- if err:
- sys.exit(1)
-
- java = find_java_files(argv[2:])
- for filename in java:
- examine_java_file(deps, filename)
-
- if err:
- sys.stderr.write("%s: Using this file as dependency file.\n" % argv[1])
- sys.exit(1)
-
- sys.exit(0)
-
-if __name__ == "__main__":
- main(sys.argv)
-
diff --git a/tools/normalize_path.py b/tools/normalize_path.py
index 6c4d548..363df1f 100755
--- a/tools/normalize_path.py
+++ b/tools/normalize_path.py
@@ -22,8 +22,8 @@
if len(sys.argv) > 1:
for p in sys.argv[1:]:
- print os.path.normpath(p)
+ print(os.path.normpath(p))
sys.exit(0)
for line in sys.stdin:
- print os.path.normpath(line.strip())
+ print(os.path.normpath(line.strip()))
diff --git a/tools/parsedeps.py b/tools/parsedeps.py
deleted file mode 100755
index 32d8ad7..0000000
--- a/tools/parsedeps.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python
-# vim: ts=2 sw=2
-
-import optparse
-import re
-import sys
-
-
-class Dependency:
- def __init__(self, tgt):
- self.tgt = tgt
- self.pos = ""
- self.prereqs = set()
- self.visit = 0
-
- def add(self, prereq):
- self.prereqs.add(prereq)
-
-
-class Dependencies:
- def __init__(self):
- self.lines = {}
- self.__visit = 0
- self.count = 0
-
- def add(self, tgt, prereq):
- t = self.lines.get(tgt)
- if not t:
- t = Dependency(tgt)
- self.lines[tgt] = t
- p = self.lines.get(prereq)
- if not p:
- p = Dependency(prereq)
- self.lines[prereq] = p
- t.add(p)
- self.count = self.count + 1
-
- def setPos(self, tgt, pos):
- t = self.lines.get(tgt)
- if not t:
- t = Dependency(tgt)
- self.lines[tgt] = t
- t.pos = pos
-
- def get(self, tgt):
- if self.lines.has_key(tgt):
- return self.lines[tgt]
- else:
- return None
-
- def __iter__(self):
- return self.lines.iteritems()
-
- def trace(self, tgt, prereq):
- self.__visit = self.__visit + 1
- d = self.lines.get(tgt)
- if not d:
- return
- return self.__trace(d, prereq)
-
- def __trace(self, d, prereq):
- if d.visit == self.__visit:
- return d.trace
- if d.tgt == prereq:
- return [ [ d ], ]
- d.visit = self.__visit
- result = []
- for pre in d.prereqs:
- recursed = self.__trace(pre, prereq)
- for r in recursed:
- result.append([ d ] + r)
- d.trace = result
- return result
-
-def help():
- print "Commands:"
- print " dep TARGET Print the prerequisites for TARGET"
- print " trace TARGET PREREQ Print the paths from TARGET to PREREQ"
-
-
-def main(argv):
- opts = optparse.OptionParser()
- opts.add_option("-i", "--interactive", action="store_true", dest="interactive",
- help="Interactive mode")
- (options, args) = opts.parse_args()
-
- deps = Dependencies()
-
- filename = args[0]
- print "Reading %s" % filename
-
- if True:
- f = open(filename)
- for line in f:
- line = line.strip()
- if len(line) > 0:
- if line[0] == '#':
- pos,tgt = line.rsplit(":", 1)
- pos = pos[1:].strip()
- tgt = tgt.strip()
- deps.setPos(tgt, pos)
- else:
- (tgt,prereq) = line.split(':', 1)
- tgt = tgt.strip()
- prereq = prereq.strip()
- deps.add(tgt, prereq)
- f.close()
-
- print "Read %d dependencies. %d targets." % (deps.count, len(deps.lines))
- while True:
- line = raw_input("target> ")
- if not line.strip():
- continue
- split = line.split()
- cmd = split[0]
- if len(split) == 2 and cmd == "dep":
- tgt = split[1]
- d = deps.get(tgt)
- if d:
- for prereq in d.prereqs:
- print prereq.tgt
- elif len(split) == 3 and cmd == "trace":
- tgt = split[1]
- prereq = split[2]
- if False:
- print "from %s to %s" % (tgt, prereq)
- trace = deps.trace(tgt, prereq)
- if trace:
- width = 0
- for g in trace:
- for t in g:
- if len(t.tgt) > width:
- width = len(t.tgt)
- for g in trace:
- for t in g:
- if t.pos:
- print t.tgt, " " * (width-len(t.tgt)), " #", t.pos
- else:
- print t.tgt
- print
- else:
- help()
-
-if __name__ == "__main__":
- try:
- main(sys.argv)
- except KeyboardInterrupt:
- print
- except EOFError:
- print
-
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index d8e34b7..29fc771 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -37,6 +37,7 @@
"releasetools_build_image",
"releasetools_build_super_image",
"releasetools_common",
+ "libavbtool",
],
required: [
"care_map_generator",
@@ -62,7 +63,7 @@
"mkuserimg_mke2fs",
"simg2img",
"tune2fs",
- "mkf2fsuserimg.sh",
+ "mkf2fsuserimg",
"fsck.f2fs",
],
}
@@ -94,10 +95,13 @@
"check_target_files_vintf.py",
],
libs: [
+ "apex_manifest",
"releasetools_common",
],
required: [
"checkvintf",
+ "deapexer",
+ "dump_apex_info",
],
}
@@ -150,7 +154,6 @@
"edify_generator.py",
"non_ab_ota.py",
"ota_from_target_files.py",
- "ota_utils.py",
"target_files_diff.py",
],
libs: [
@@ -160,6 +163,7 @@
"releasetools_verity_utils",
"apex_manifest",
"care_map_proto_py",
+ "ota_utils_lib",
],
required: [
"brillo_update_payload",
@@ -324,6 +328,33 @@
],
}
+python_library_host {
+ name: "ota_utils_lib",
+ srcs: [
+ "ota_utils.py",
+ "payload_signer.py",
+ ],
+}
+
+python_binary_host {
+ name: "merge_ota",
+ version: {
+ py3: {
+ embedded_launcher: true,
+ },
+ },
+ srcs: [
+ "merge_ota.py",
+ ],
+ libs: [
+ "ota_metadata_proto",
+ "update_payload",
+ "care_map_proto_py",
+ "releasetools_common",
+ "ota_utils_lib",
+ ],
+}
+
python_binary_host {
name: "build_image",
defaults: [
@@ -519,23 +550,6 @@
}
python_binary_host {
- name: "fsverity_manifest_generator",
- defaults: ["releasetools_binary_defaults"],
- srcs: [
- "fsverity_manifest_generator.py",
- ],
- libs: [
- "fsverity_digests_proto_python",
- "releasetools_common",
- ],
- required: [
- "aapt2",
- "apksigner",
- "fsverity",
- ],
-}
-
-python_binary_host {
name: "fsverity_metadata_generator",
defaults: ["releasetools_binary_defaults"],
srcs: [
@@ -561,6 +575,7 @@
"sign_apex.py",
"sign_target_files_apks.py",
"validate_target_files.py",
+ "merge_ota.py",
":releasetools_merge_sources",
":releasetools_merge_tests",
@@ -577,6 +592,7 @@
"releasetools_img_from_target_files",
"releasetools_ota_from_target_files",
"releasetools_verity_utils",
+ "update_payload",
],
data: [
"testdata/**/*",
diff --git a/tools/releasetools/add_img_to_target_files b/tools/releasetools/add_img_to_target_files
deleted file mode 120000
index 04323bd..0000000
--- a/tools/releasetools/add_img_to_target_files
+++ /dev/null
@@ -1 +0,0 @@
-add_img_to_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 09f69d0..d308a55 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -46,6 +46,7 @@
from __future__ import print_function
+import avbtool
import datetime
import logging
import os
@@ -62,9 +63,11 @@
import common
import verity_utils
import ota_metadata_pb2
+import rangelib
+import sparse_img
from apex_utils import GetApexInfoFromTargetFiles
-from common import AddCareMapForAbOta, ZipDelete
+from common import ZipDelete, PARTITIONS_WITH_CARE_MAP, ExternalError, RunAndCheckOutput, IsSparseImage, MakeTempFile, ZipWrite
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -76,8 +79,6 @@
OPTIONS.add_missing = False
OPTIONS.rebuild_recovery = False
OPTIONS.replace_updated_files_list = []
-OPTIONS.replace_verity_public_key = False
-OPTIONS.replace_verity_private_key = False
OPTIONS.is_signing = False
# Use a fixed timestamp (01/01/2009 00:00:00 UTC) for files when packaging
@@ -87,6 +88,159 @@
datetime.datetime.utcfromtimestamp(0)).total_seconds())
+def ParseAvbFooter(img_path) -> avbtool.AvbFooter:
+ with open(img_path, 'rb') as fp:
+ fp.seek(-avbtool.AvbFooter.SIZE, os.SEEK_END)
+ data = fp.read(avbtool.AvbFooter.SIZE)
+ return avbtool.AvbFooter(data)
+
+
+def GetCareMap(which, imgname):
+ """Returns the care_map string for the given partition.
+
+ Args:
+ which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
+ imgname: The filename of the image.
+
+ Returns:
+ (which, care_map_ranges): care_map_ranges is the raw string of the care_map
+ RangeSet; or None.
+ """
+ assert which in PARTITIONS_WITH_CARE_MAP
+
+ is_sparse_img = IsSparseImage(imgname)
+ unsparsed_image_size = os.path.getsize(imgname)
+
+ # A verified image contains original image + hash tree data + FEC data
+ # + AVB footer, all concatenated together. The caremap specifies a range
+ # of blocks that update_verifier should read on top of dm-verity device
+ # to verify correctness of OTA updates. When reading off of dm-verity device,
+ # the hashtree and FEC part of image isn't available. So caremap should
+ # only contain the original image blocks.
+ try:
+ avbfooter = None
+ if is_sparse_img:
+ with tempfile.NamedTemporaryFile() as tmpfile:
+ img = sparse_img.SparseImage(imgname)
+ unsparsed_image_size = img.total_blocks * img.blocksize
+ for data in img.ReadBlocks(img.total_blocks - 1, 1):
+ tmpfile.write(data)
+ tmpfile.flush()
+ avbfooter = ParseAvbFooter(tmpfile.name)
+ else:
+ avbfooter = ParseAvbFooter(imgname)
+ except LookupError as e:
+ logger.warning(
+ "Failed to parse avbfooter for partition %s image %s, %s", which, imgname, e)
+ return None
+
+ image_size = avbfooter.original_image_size
+ assert image_size < unsparsed_image_size, f"AVB footer's original image size {image_size} is larger than or equal to image size on disk {unsparsed_image_size}, this can't happen because a verified image = original image + hash tree data + FEC data + avbfooter."
+ assert image_size > 0
+
+ image_blocks = int(image_size) // 4096 - 1
+ # It's OK for image_blocks to be 0, because care map ranges are inclusive.
+ # So 0-0 means "just block 0", which is valid.
+ assert image_blocks >= 0, "blocks for {} must be non-negative, image size: {}".format(
+ which, image_size)
+
+ # For sparse images, we will only check the blocks that are listed in the care
+ # map, i.e. the ones with meaningful data.
+ if is_sparse_img:
+ simg = sparse_img.SparseImage(imgname)
+ care_map_ranges = simg.care_map.intersect(
+ rangelib.RangeSet("0-{}".format(image_blocks)))
+
+ # Otherwise for non-sparse images, we read all the blocks in the filesystem
+ # image.
+ else:
+ care_map_ranges = rangelib.RangeSet("0-{}".format(image_blocks))
+
+ return [which, care_map_ranges.to_string_raw()]
+
+
+def AddCareMapForAbOta(output_file, ab_partitions, image_paths):
+ """Generates and adds care_map.pb for a/b partition that has care_map.
+
+ Args:
+ output_file: The output zip file (needs to be already open),
+ or file path to write care_map.pb.
+ ab_partitions: The list of A/B partitions.
+ image_paths: A map from the partition name to the image path.
+ """
+ if not output_file:
+ raise ExternalError('Expected output_file for AddCareMapForAbOta')
+
+ care_map_list = []
+ for partition in ab_partitions:
+ partition = partition.strip()
+ if partition not in PARTITIONS_WITH_CARE_MAP:
+ continue
+
+ verity_block_device = "{}_verity_block_device".format(partition)
+ avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
+ if (verity_block_device in OPTIONS.info_dict or
+ OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
+ if partition not in image_paths:
+ logger.warning('Potential partition with care_map missing from images: %s',
+ partition)
+ continue
+ image_path = image_paths[partition]
+ if not os.path.exists(image_path):
+ raise ExternalError('Expected image at path {}'.format(image_path))
+
+ care_map = GetCareMap(partition, image_path)
+ if not care_map:
+ continue
+ care_map_list += care_map
+
+ # adds fingerprint field to the care_map
+ # TODO(xunchang) revisit the fingerprint calculation for care_map.
+ partition_props = OPTIONS.info_dict.get(partition + ".build.prop")
+ prop_name_list = ["ro.{}.build.fingerprint".format(partition),
+ "ro.{}.build.thumbprint".format(partition)]
+
+ present_props = [x for x in prop_name_list if
+ partition_props and partition_props.GetProp(x)]
+ if not present_props:
+ logger.warning(
+ "fingerprint is not present for partition %s", partition)
+ property_id, fingerprint = "unknown", "unknown"
+ else:
+ property_id = present_props[0]
+ fingerprint = partition_props.GetProp(property_id)
+ care_map_list += [property_id, fingerprint]
+
+ if not care_map_list:
+ return
+
+ # Converts the list into proto buf message by calling care_map_generator; and
+ # writes the result to a temp file.
+ temp_care_map_text = MakeTempFile(prefix="caremap_text-",
+ suffix=".txt")
+ with open(temp_care_map_text, 'w') as text_file:
+ text_file.write('\n'.join(care_map_list))
+
+ temp_care_map = MakeTempFile(prefix="caremap-", suffix=".pb")
+ care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
+ RunAndCheckOutput(care_map_gen_cmd)
+
+ if not isinstance(output_file, zipfile.ZipFile):
+ shutil.copy(temp_care_map, output_file)
+ return
+ # output_file is a zip file
+ care_map_path = "META/care_map.pb"
+ if care_map_path in output_file.namelist():
+ # Copy the temp file into the OPTIONS.input_tmp dir and update the
+ # replace_updated_files_list used by add_img_to_target_files
+ if not OPTIONS.replace_updated_files_list:
+ OPTIONS.replace_updated_files_list = []
+ shutil.copy(temp_care_map, os.path.join(OPTIONS.input_tmp, care_map_path))
+ OPTIONS.replace_updated_files_list.append(care_map_path)
+ else:
+ ZipWrite(output_file, temp_care_map, arcname=care_map_path)
+
+
class OutputFile(object):
"""A helper class to write a generated file to the given dir or zip.
@@ -279,6 +433,7 @@
block_list=block_list)
return img.name
+
def AddSystemDlkm(output_zip):
"""Turn the contents of SystemDlkm into an system_dlkm image and store it in output_zip."""
@@ -457,8 +612,7 @@
# Set the '_image_size' for given image size.
is_verity_partition = "verity_block_device" in image_props
- verity_supported = (image_props.get("verity") == "true" or
- image_props.get("avb_enable") == "true")
+ verity_supported = (image_props.get("avb_enable") == "true")
is_avb_enable = image_props.get("avb_hashtree_enable") == "true"
if verity_supported and (is_verity_partition or is_avb_enable):
image_size = image_props.get("image_size")
@@ -557,7 +711,7 @@
cmd = [bpttool, "make_table", "--output_json", bpt.name,
"--output_gpt", img.name]
input_files_str = OPTIONS.info_dict["board_bpt_input_files"]
- input_files = input_files_str.split(" ")
+ input_files = input_files_str.split()
for i in input_files:
cmd.extend(["--input", i])
disk_size = OPTIONS.info_dict.get("board_bpt_disk_size")
@@ -688,14 +842,13 @@
SYSTEM/ after rebuilding recovery.
"""
common.ZipDelete(zip_filename, files_list)
- output_zip = zipfile.ZipFile(zip_filename, "a",
+ with zipfile.ZipFile(zip_filename, "a",
compression=zipfile.ZIP_DEFLATED,
- allowZip64=True)
- for item in files_list:
- file_path = os.path.join(OPTIONS.input_tmp, item)
- assert os.path.exists(file_path)
- common.ZipWrite(output_zip, file_path, arcname=item)
- common.ZipClose(output_zip)
+ allowZip64=True) as output_zip:
+ for item in files_list:
+ file_path = os.path.join(OPTIONS.input_tmp, item)
+ assert os.path.exists(file_path)
+ common.ZipWrite(output_zip, file_path, arcname=item)
def HasPartition(partition_name):
@@ -783,7 +936,8 @@
has_boot = OPTIONS.info_dict.get("no_boot") != "true"
has_init_boot = OPTIONS.info_dict.get("init_boot") == "true"
has_vendor_boot = OPTIONS.info_dict.get("vendor_boot") == "true"
- has_vendor_kernel_boot = OPTIONS.info_dict.get("vendor_kernel_boot") == "true"
+ has_vendor_kernel_boot = OPTIONS.info_dict.get(
+ "vendor_kernel_boot") == "true"
# {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm, system_dlkm, system, system_other}.img
# can be built from source, or dropped into target_files.zip as a prebuilt blob.
@@ -876,7 +1030,7 @@
"VENDOR_KERNEL_BOOT")
if vendor_kernel_boot_image:
partitions['vendor_kernel_boot'] = os.path.join(OPTIONS.input_tmp, "IMAGES",
- "vendor_kernel_boot.img")
+ "vendor_kernel_boot.img")
if not os.path.exists(partitions['vendor_kernel_boot']):
vendor_kernel_boot_image.WriteToDir(OPTIONS.input_tmp)
if output_zip:
@@ -1021,7 +1175,7 @@
AddVbmetaDigest(output_zip)
if output_zip:
- common.ZipClose(output_zip)
+ output_zip.close()
if OPTIONS.replace_updated_files_list:
ReplaceUpdatedFiles(output_zip.filename,
OPTIONS.replace_updated_files_list)
@@ -1054,7 +1208,8 @@
ZipDelete(zipfile_path, [entry.filename for entry in entries_to_store])
with zipfile.ZipFile(zipfile_path, "a", allowZip64=True) as zfp:
for entry in entries_to_store:
- zfp.write(os.path.join(tmpdir, entry.filename), entry.filename, compress_type=zipfile.ZIP_STORED)
+ zfp.write(os.path.join(tmpdir, entry.filename),
+ entry.filename, compress_type=zipfile.ZIP_STORED)
def main(argv):
@@ -1064,9 +1219,11 @@
elif o in ("-r", "--rebuild_recovery",):
OPTIONS.rebuild_recovery = True
elif o == "--replace_verity_private_key":
- OPTIONS.replace_verity_private_key = (True, a)
+ raise ValueError("--replace_verity_private_key is no longer supported,"
+ " please switch to AVB")
elif o == "--replace_verity_public_key":
- OPTIONS.replace_verity_public_key = (True, a)
+ raise ValueError("--replace_verity_public_key is no longer supported,"
+ " please switch to AVB")
elif o == "--is_signing":
OPTIONS.is_signing = True
else:
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 941edc6..194ff58 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -63,10 +63,14 @@
self.codename_to_api_level_map = codename_to_api_level_map
self.debugfs_path = os.path.join(
OPTIONS.search_path, "bin", "debugfs_static")
+ self.fsckerofs_path = os.path.join(
+ OPTIONS.search_path, "bin", "fsck.erofs")
+ self.blkid_path = os.path.join(
+ OPTIONS.search_path, "bin", "blkid")
self.avbtool = avbtool if avbtool else "avbtool"
self.sign_tool = sign_tool
- def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
+ def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False):
"""Scans and signs the payload files and repack the apex
Args:
@@ -80,13 +84,17 @@
"Couldn't find location of debugfs_static: " +
"Path {} does not exist. ".format(self.debugfs_path) +
"Make sure bin/debugfs_static can be found in -p <path>")
- list_cmd = ['deapexer', '--debugfs_path',
- self.debugfs_path, 'list', self.apex_path]
+ list_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
+ 'list', self.apex_path]
entries_names = common.RunAndCheckOutput(list_cmd).split()
apk_entries = [name for name in entries_names if name.endswith('.apk')]
+ sepolicy_entries = []
+ if is_sepolicy:
+ sepolicy_entries = [name for name in entries_names if
+ name.startswith('./etc/SEPolicy') and name.endswith('.zip')]
# No need to sign and repack, return the original apex path.
- if not apk_entries and self.sign_tool is None:
+ if not apk_entries and not sepolicy_entries and self.sign_tool is None:
logger.info('No apk file to sign in %s', self.apex_path)
return self.apex_path
@@ -102,29 +110,41 @@
' %s', entry)
payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
- apk_entries, apk_keys, payload_key, signing_args)
+ apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args)
if not has_signed_content:
- logger.info('No contents has been signed in %s', self.apex_path)
+ logger.info('No contents have been signed in %s', self.apex_path)
return self.apex_path
return self.RepackApexPayload(payload_dir, payload_key, signing_args)
- def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key, signing_args):
+ def ExtractApexPayloadAndSignContents(self, apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args):
"""Extracts the payload image and signs the containing apk files."""
if not os.path.exists(self.debugfs_path):
raise ApexSigningError(
"Couldn't find location of debugfs_static: " +
"Path {} does not exist. ".format(self.debugfs_path) +
"Make sure bin/debugfs_static can be found in -p <path>")
+ if not os.path.exists(self.fsckerofs_path):
+ raise ApexSigningError(
+ "Couldn't find location of fsck.erofs: " +
+ "Path {} does not exist. ".format(self.fsckerofs_path) +
+ "Make sure bin/fsck.erofs can be found in -p <path>")
+ if not os.path.exists(self.blkid_path):
+ raise ApexSigningError(
+ "Couldn't find location of blkid: " +
+ "Path {} does not exist. ".format(self.blkid_path) +
+ "Make sure bin/blkid can be found in -p <path>")
payload_dir = common.MakeTempDir()
- extract_cmd = ['deapexer', '--debugfs_path',
- self.debugfs_path, 'extract', self.apex_path, payload_dir]
+ extract_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
+ '--fsckerofs_path', self.fsckerofs_path,
+ '--blkid_path', self.blkid_path, 'extract',
+ self.apex_path, payload_dir]
common.RunAndCheckOutput(extract_cmd)
+ assert os.path.exists(self.apex_path)
has_signed_content = False
for entry in apk_entries:
apk_path = os.path.join(payload_dir, entry)
- assert os.path.exists(self.apex_path)
key_name = apk_keys.get(os.path.basename(entry))
if key_name in common.SPECIAL_CERT_STRINGS:
@@ -141,6 +161,37 @@
codename_to_api_level_map=self.codename_to_api_level_map)
has_signed_content = True
+ for entry in sepolicy_entries:
+ sepolicy_path = os.path.join(payload_dir, entry)
+
+ if not 'etc' in entry:
+ logger.warning('Sepolicy path does not contain the intended directory name etc:'
+ ' %s', entry)
+
+ key_name = apk_keys.get(os.path.basename(entry))
+ if key_name is None:
+ logger.warning('Failed to find signing keys for {} in'
+ ' apex {}, payload key will be used instead.'
+ ' Use "-e <name>=" to specify a key'
+ .format(entry, self.apex_path))
+ key_name = payload_key
+
+ if key_name in common.SPECIAL_CERT_STRINGS:
+ logger.info('Not signing: %s due to special cert string', sepolicy_path)
+ continue
+
+ if OPTIONS.sign_sepolicy_path is not None:
+ sig_path = os.path.join(payload_dir, sepolicy_path + '.sig')
+ fsv_sig_path = os.path.join(payload_dir, sepolicy_path + '.fsv_sig')
+ old_sig = common.MakeTempFile()
+ old_fsv_sig = common.MakeTempFile()
+ os.rename(sig_path, old_sig)
+ os.rename(fsv_sig_path, old_fsv_sig)
+
+ logger.info('Signing sepolicy file %s in apex %s', sepolicy_path, self.apex_path)
+ if common.SignSePolicy(sepolicy_path, key_name, self.key_passwords.get(key_name)):
+ has_signed_content = True
+
if self.sign_tool:
logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
# Pass avbtool to the custom signing tool
@@ -324,7 +375,8 @@
def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
container_pw, apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None, sign_tool=None):
+ no_hashtree, signing_args=None, sign_tool=None,
+ is_sepolicy=False):
"""Signs the current uncompressed APEX with the given payload/container keys.
Args:
@@ -337,6 +389,7 @@
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
sign_tool: A tool to sign the contents of the APEX.
+ is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -346,7 +399,8 @@
apk_signer = ApexApkSigner(apex_file, container_pw,
codename_to_api_level_map,
avbtool, sign_tool)
- apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key, signing_args)
+ apex_file = apk_signer.ProcessApexFile(
+ apk_keys, payload_key, signing_args, is_sepolicy)
# 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
# payload_key.
@@ -377,7 +431,7 @@
apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
- common.ZipClose(apex_zip)
+ apex_zip.close()
# 3. Sign the APEX container with container_key.
signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
@@ -400,7 +454,8 @@
def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
container_pw, apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None, sign_tool=None):
+ no_hashtree, signing_args=None, sign_tool=None,
+ is_sepolicy=False):
"""Signs the current compressed APEX with the given payload/container keys.
Args:
@@ -412,6 +467,7 @@
codename_to_api_level_map: A dict that maps from codename to API level.
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
+ is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -438,7 +494,8 @@
codename_to_api_level_map,
no_hashtree,
signing_args,
- sign_tool)
+ sign_tool,
+ is_sepolicy)
# 3. Compress signed original apex.
compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -465,8 +522,8 @@
def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
- apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None, sign_tool=None):
+ apk_keys, codename_to_api_level_map, no_hashtree,
+ signing_args=None, sign_tool=None, is_sepolicy=False):
"""Signs the current APEX with the given payload/container keys.
Args:
@@ -478,6 +535,7 @@
codename_to_api_level_map: A dict that maps from codename to API level.
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
+ is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -498,24 +556,26 @@
apex_file,
payload_key=payload_key,
container_key=container_key,
- container_pw=None,
+ container_pw=container_pw,
codename_to_api_level_map=codename_to_api_level_map,
no_hashtree=no_hashtree,
apk_keys=apk_keys,
signing_args=signing_args,
- sign_tool=sign_tool)
+ sign_tool=sign_tool,
+ is_sepolicy=is_sepolicy)
elif apex_type == 'COMPRESSED':
return SignCompressedApex(
avbtool,
apex_file,
payload_key=payload_key,
container_key=container_key,
- container_pw=None,
+ container_pw=container_pw,
codename_to_api_level_map=codename_to_api_level_map,
no_hashtree=no_hashtree,
apk_keys=apk_keys,
signing_args=signing_args,
- sign_tool=sign_tool)
+ sign_tool=sign_tool,
+ is_sepolicy=is_sepolicy)
else:
# TODO(b/172912232): support signing compressed apex
raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
@@ -559,11 +619,13 @@
debugfs_path = "debugfs"
if OPTIONS.search_path:
debugfs_path = os.path.join(OPTIONS.search_path, "bin", "debugfs_static")
+
deapexer = 'deapexer'
if OPTIONS.search_path:
deapexer_path = os.path.join(OPTIONS.search_path, "bin", "deapexer")
if os.path.isfile(deapexer_path):
deapexer = deapexer_path
+
for apex_filename in os.listdir(target_dir):
apex_filepath = os.path.join(target_dir, apex_filename)
if not os.path.isfile(apex_filepath) or \
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index d33c2f7..8087fcd 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -537,14 +537,6 @@
self.touched_src_sha1 = self.src.RangeSha1(self.touched_src_ranges)
- if self.tgt.hashtree_info:
- out.append("compute_hash_tree {} {} {} {} {}\n".format(
- self.tgt.hashtree_info.hashtree_range.to_string_raw(),
- self.tgt.hashtree_info.filesystem_range.to_string_raw(),
- self.tgt.hashtree_info.hash_algorithm,
- self.tgt.hashtree_info.salt,
- self.tgt.hashtree_info.root_hash))
-
# Zero out extended blocks as a workaround for bug 20881595.
if self.tgt.extended:
assert (WriteSplitTransfers(out, "zero", self.tgt.extended) ==
@@ -830,12 +822,6 @@
assert touched[i] == 0
touched[i] = 1
- if self.tgt.hashtree_info:
- for s, e in self.tgt.hashtree_info.hashtree_range:
- for i in range(s, e):
- assert touched[i] == 0
- touched[i] = 1
-
# Check that we've written every target block.
for s, e in self.tgt.care_map:
for i in range(s, e):
@@ -1185,7 +1171,7 @@
try:
# Compresses with the default level
compress_obj = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
- compressed_data = (compress_obj.compress("".join(tgt_data))
+ compressed_data = (compress_obj.compress(b"".join(tgt_data))
+ compress_obj.flush())
compressed_size = len(compressed_data)
except zlib.error as e:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 9567fdc..252b1d5 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -328,7 +328,7 @@
compressor = prop_dict["erofs_default_compressor"]
if "erofs_compressor" in prop_dict:
compressor = prop_dict["erofs_compressor"]
- if compressor:
+ if compressor and compressor != "none":
build_command.extend(["-z", compressor])
compress_hints = None
@@ -365,7 +365,7 @@
run_fsck = RunErofsFsck
elif fs_type.startswith("squash"):
- build_command = ["mksquashfsimage.sh"]
+ build_command = ["mksquashfsimage"]
build_command.extend([in_dir, out_file])
if "squashfs_sparse_flag" in prop_dict and not disable_sparse:
build_command.extend([prop_dict["squashfs_sparse_flag"]])
@@ -387,7 +387,7 @@
if prop_dict.get("squashfs_disable_4k_align") == "true":
build_command.extend(["-a"])
elif fs_type.startswith("f2fs"):
- build_command = ["mkf2fsuserimg.sh"]
+ build_command = ["mkf2fsuserimg"]
build_command.extend([out_file, prop_dict["image_size"]])
if "f2fs_sparse_flag" in prop_dict and not disable_sparse:
build_command.extend([prop_dict["f2fs_sparse_flag"]])
@@ -410,7 +410,7 @@
build_command.append("--casefold")
if (needs_compress or prop_dict.get("f2fs_compress") == "true"):
build_command.append("--compression")
- if (prop_dict.get("mount_point") != "data"):
+ if "ro_mount_point" in prop_dict:
build_command.append("--readonly")
if (prop_dict.get("f2fs_compress") == "true"):
build_command.append("--sldc")
@@ -671,11 +671,6 @@
"f2fs_sparse_flag",
"skip_fsck",
"ext_mkuserimg",
- "verity",
- "verity_key",
- "verity_signer_cmd",
- "verity_fec",
- "verity_disable",
"avb_enable",
"avb_avbtool",
"use_dynamic_partition_size",
@@ -762,6 +757,8 @@
if not copy_prop(prop, "extfs_rsv_pct"):
d["extfs_rsv_pct"] = "0"
+ d["ro_mount_point"] = "1"
+
# Copy partition-specific properties.
d["mount_point"] = mount_point
if mount_point == "system":
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index b395c19..97957be 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -142,7 +142,7 @@
"""Verifies the payload and metadata signatures in an A/B OTA payload."""
package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
if 'payload.bin' not in package_zip.namelist():
- common.ZipClose(package_zip)
+ package_zip.close()
return
print('Verifying A/B OTA payload signatures...')
@@ -160,7 +160,7 @@
'--in_file=' + payload_file,
'--public_key=' + pubkey]
common.RunAndCheckOutput(cmd)
- common.ZipClose(package_zip)
+ package_zip.close()
# Verified successfully upon reaching here.
print('\nPayload signatures VERIFIED\n\n')
diff --git a/tools/releasetools/check_target_files_signatures b/tools/releasetools/check_target_files_signatures
deleted file mode 120000
index 9f62aa3..0000000
--- a/tools/releasetools/check_target_files_signatures
+++ /dev/null
@@ -1 +0,0 @@
-check_target_files_signatures.py
\ No newline at end of file
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 4a2a905..b32b85c 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -22,13 +22,16 @@
target_files can be a ZIP file or an extracted target files directory.
"""
+import json
import logging
+import os
+import shutil
import subprocess
import sys
-import os
import zipfile
import common
+from apex_manifest import ParseApexManifest
logger = logging.getLogger(__name__)
@@ -123,7 +126,12 @@
logger.warning('PRODUCT_ENFORCE_VINTF_MANIFEST is not set, skipping checks')
return True
+
dirmap = GetDirmap(input_tmp)
+
+ apex_root, apex_info_file = PrepareApexDirectory(input_tmp)
+ dirmap['/apex'] = apex_root
+
args_for_skus = GetArgsForSkus(info_dict)
shipping_api_level_args = GetArgsForShippingApiLevel(info_dict)
kernel_args = GetArgsForKernel(input_tmp)
@@ -132,6 +140,8 @@
'checkvintf',
'--check-compat',
]
+ common_command += ['--apex-info-file', apex_info_file]
+
for device_path, real_path in sorted(dirmap.items()):
common_command += ['--dirmap', '{}:{}'.format(device_path, real_path)]
common_command += kernel_args
@@ -142,9 +152,10 @@
command = common_command + sku_args
proc = common.Run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
+ last_out_line = out.split()[-1] if out != "" else out
if proc.returncode == 0:
logger.info("Command `%s` returns 'compatible'", ' '.join(command))
- elif out.strip() == "INCOMPATIBLE":
+ elif last_out_line.strip() == "INCOMPATIBLE":
logger.info("Command `%s` returns 'incompatible'", ' '.join(command))
success = False
else:
@@ -185,6 +196,112 @@
paths = sum((PathToPatterns(path) for path in paths if path), [])
return paths
+def GetVintfApexUnzipPatterns():
+ """ Build unzip pattern for APEXes. """
+ patterns = []
+ for target_files_rel_paths in DIR_SEARCH_PATHS.values():
+ for target_files_rel_path in target_files_rel_paths:
+ patterns.append(os.path.join(target_files_rel_path,"apex/*"))
+
+ return patterns
+
+def PrepareApexDirectory(inp):
+ """ Prepare the APEX data.
+
+ Apex binaries do not support dirmaps, in order to use these binaries we
+ need to move the APEXes from the extracted target file archives to the
+ expected device locations.
+
+ The APEXes will also be extracted under the APEX/ directory
+ matching what would be on the target.
+
+ Create the following structure under the input inp directory:
+ APEX/apex # Extracted APEXes
+ APEX/system/apex/ # System APEXes
+ APEX/vendor/apex/ # Vendor APEXes
+ ...
+
+ Args:
+ inp: path to the directory that contains the extracted target files archive.
+
+ Returns:
+ extracted apex directory
+ apex-info-list.xml file
+ """
+
+ deapexer = 'deapexer'
+ debugfs_path = 'debugfs'
+ blkid_path = 'blkid'
+ fsckerofs_path = 'fsck.erofs'
+ if OPTIONS.search_path:
+ debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
+ deapexer_path = os.path.join(OPTIONS.search_path, 'bin', 'deapexer')
+ blkid_path = os.path.join(OPTIONS.search_path, 'bin', 'blkid')
+ fsckerofs_path = os.path.join(OPTIONS.search_path, 'bin', 'fsck.erofs')
+ if os.path.isfile(deapexer_path):
+ deapexer = deapexer_path
+
+ def ExtractApexes(path, outp):
+ # Extract all APEXes found in input path.
+ logger.info('Extracting APEXs in %s', path)
+ for f in os.listdir(path):
+ logger.info(' adding APEX %s', os.path.basename(f))
+ apex = os.path.join(path, f)
+ if os.path.isdir(apex) and os.path.isfile(os.path.join(apex, 'apex_manifest.pb')):
+ info = ParseApexManifest(os.path.join(apex, 'apex_manifest.pb'))
+ # Flattened APEXes may have symlinks for libs (linked to /system/lib)
+ # We need to blindly copy them all.
+ shutil.copytree(apex, os.path.join(outp, info.name), symlinks=True)
+ elif os.path.isfile(apex) and apex.endswith(('.apex', '.capex')):
+ cmd = [deapexer,
+ '--debugfs_path', debugfs_path,
+ 'info',
+ apex]
+ info = json.loads(common.RunAndCheckOutput(cmd))
+
+ cmd = [deapexer,
+ '--debugfs_path', debugfs_path,
+ '--fsckerofs_path', fsckerofs_path,
+ '--blkid_path', blkid_path,
+ 'extract',
+ apex,
+ os.path.join(outp, info['name'])]
+ common.RunAndCheckOutput(cmd)
+ else:
+ logger.info(' .. skipping %s (is it APEX?)', path)
+
+ root_dir_name = 'APEX'
+ root_dir = os.path.join(inp, root_dir_name)
+ extracted_root = os.path.join(root_dir, 'apex')
+ apex_info_file = os.path.join(extracted_root, 'apex-info-list.xml')
+
+ # Always create APEX directory for dirmap
+ os.makedirs(extracted_root)
+
+ create_info_file = False
+
+ # Loop through search path looking for and processing apex/ directories.
+ for device_path, target_files_rel_paths in DIR_SEARCH_PATHS.items():
+ for target_files_rel_path in target_files_rel_paths:
+ inp_partition = os.path.join(inp, target_files_rel_path,"apex")
+ if os.path.exists(inp_partition):
+ apex_dir = root_dir + os.path.join(device_path + "/apex");
+ os.makedirs(root_dir + device_path)
+ shutil.copytree(inp_partition, apex_dir, symlinks=True)
+ ExtractApexes(apex_dir, extracted_root)
+ create_info_file = True
+
+ if create_info_file:
+ ### Create apex-info-list.xml
+ dump_cmd = ['dump_apex_info',
+ '--root_dir', root_dir,
+ '--out_file', apex_info_file]
+ common.RunAndCheckOutput(dump_cmd)
+ if not os.path.exists(apex_info_file):
+ raise RuntimeError('Failed to create apex info file %s', apex_info_file)
+ logger.info('Created %s', apex_info_file)
+
+ return extracted_root, apex_info_file
def CheckVintfFromTargetFiles(inp, info_dict=None):
"""
@@ -198,7 +315,7 @@
True if VINTF check is skipped or compatible, False if incompatible. Raise
a RuntimeError if any error occurs.
"""
- input_tmp = common.UnzipTemp(inp, GetVintfFileList() + UNZIP_PATTERN)
+ input_tmp = common.UnzipTemp(inp, GetVintfFileList() + GetVintfApexUnzipPatterns() + UNZIP_PATTERN)
return CheckVintfFromExtractedTargetFiles(input_tmp, info_dict)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index caa4641..2f05d44 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -20,6 +20,7 @@
import datetime
import errno
import fnmatch
+from genericpath import isdir
import getopt
import getpass
import gzip
@@ -72,18 +73,16 @@
if "ANDROID_HOST_OUT" in os.environ:
self.search_path = os.environ["ANDROID_HOST_OUT"]
self.signapk_shared_library_path = "lib64" # Relative to search_path
+ self.sign_sepolicy_path = None
self.extra_signapk_args = []
+ self.extra_sign_sepolicy_args = []
self.aapt2_path = "aapt2"
self.java_path = "java" # Use the one on the path by default.
- self.java_args = ["-Xmx2048m"] # The default JVM args.
+ self.java_args = ["-Xmx4096m"] # The default JVM args.
self.android_jar_path = None
self.public_key_suffix = ".x509.pem"
self.private_key_suffix = ".pk8"
# use otatools built boot_signer by default
- self.boot_signer_path = "boot_signer"
- self.boot_signer_args = []
- self.verity_signer_path = None
- self.verity_signer_args = []
self.verbose = False
self.tempfiles = []
self.device_specific = None
@@ -97,6 +96,7 @@
self.stash_threshold = 0.8
self.logfile = None
self.host_tools = {}
+ self.sepolicy_name = 'sepolicy.apex'
OPTIONS = Options()
@@ -700,7 +700,13 @@
"""Reads the contents of fn from input zipfile or directory."""
if isinstance(input_file, zipfile.ZipFile):
return input_file.read(fn).decode()
+ elif zipfile.is_zipfile(input_file):
+ with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+ return zfp.read(fn).decode()
else:
+ if not os.path.isdir(input_file):
+ raise ValueError(
+ "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
path = os.path.join(input_file, *fn.split("/"))
try:
with open(path) as f:
@@ -717,7 +723,16 @@
with open(tmp_file, 'wb') as f:
f.write(input_file.read(fn))
return tmp_file
+ elif zipfile.is_zipfile(input_file):
+ with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+ tmp_file = MakeTempFile(os.path.basename(fn))
+ with open(tmp_file, "wb") as fp:
+ fp.write(zfp.read(fn))
+ return tmp_file
else:
+ if not os.path.isdir(input_file):
+ raise ValueError(
+ "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
file = os.path.join(input_file, *fn.split("/"))
if not os.path.exists(file):
raise KeyError(fn)
@@ -858,6 +873,10 @@
d["avb_{}_salt".format(partition)] = sha256(
fingerprint.encode()).hexdigest()
+ # Set up the salt for partitions without build.prop
+ if build_info.fingerprint:
+ d["avb_salt"] = sha256(build_info.fingerprint.encode()).hexdigest()
+
# Set the vbmeta digest if exists
try:
d["vbmeta_digest"] = read_helper("META/vbmeta_digest.txt").rstrip()
@@ -1052,6 +1071,13 @@
return {key: val for key, val in d.items()
if key in self.props_allow_override}
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ # Don't pickle baz
+ if "input_file" in state and isinstance(state["input_file"], zipfile.ZipFile):
+ state["input_file"] = state["input_file"].filename
+ return state
+
def GetProp(self, prop):
return self.build_props.get(prop)
@@ -1186,13 +1212,13 @@
"""
def uniq_concat(a, b):
- combined = set(a.split(" "))
- combined.update(set(b.split(" ")))
+ combined = set(a.split())
+ combined.update(set(b.split()))
combined = [item.strip() for item in combined if item.strip()]
return " ".join(sorted(combined))
if (framework_dict.get("use_dynamic_partitions") !=
- "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
+ "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
raise ValueError("Both dictionaries must have use_dynamic_partitions=true")
merged_dict = {"use_dynamic_partitions": "true"}
@@ -1208,7 +1234,7 @@
# Super block devices are defined by the vendor dict.
if "super_block_devices" in vendor_dict:
merged_dict["super_block_devices"] = vendor_dict["super_block_devices"]
- for block_device in merged_dict["super_block_devices"].split(" "):
+ for block_device in merged_dict["super_block_devices"].split():
key = "super_%s_device_size" % block_device
if key not in vendor_dict:
raise ValueError("Vendor dict does not contain required key %s." % key)
@@ -1217,7 +1243,7 @@
# Partition groups and group sizes are defined by the vendor dict because
# these values may vary for each board that uses a shared system image.
merged_dict["super_partition_groups"] = vendor_dict["super_partition_groups"]
- for partition_group in merged_dict["super_partition_groups"].split(" "):
+ for partition_group in merged_dict["super_partition_groups"].split():
# Set the partition group's size using the value from the vendor dict.
key = "super_%s_group_size" % partition_group
if key not in vendor_dict:
@@ -1679,23 +1705,8 @@
with open(img.name, 'ab') as f:
f.write(boot_signature_bytes)
- if (info_dict.get("boot_signer") == "true" and
- info_dict.get("verity_key")):
- # Hard-code the path as "/boot" for two-step special recovery image (which
- # will be loaded into /boot during the two-step OTA).
- if two_step_image:
- path = "/boot"
- else:
- path = "/" + partition_name
- cmd = [OPTIONS.boot_signer_path]
- cmd.extend(OPTIONS.boot_signer_args)
- cmd.extend([path, img.name,
- info_dict["verity_key"] + ".pk8",
- info_dict["verity_key"] + ".x509.pem", img.name])
- RunAndCheckOutput(cmd)
-
# Sign the image if vboot is non-empty.
- elif info_dict.get("vboot"):
+ if info_dict.get("vboot"):
path = "/" + partition_name
img_keyblock = tempfile.NamedTemporaryFile()
# We have switched from the prebuilt futility binary to using the tool
@@ -1874,7 +1885,8 @@
fn = os.path.join(sourcedir, "dtb")
if os.access(fn, os.F_OK):
- has_vendor_kernel_boot = (info_dict.get("vendor_kernel_boot", "").lower() == "true")
+ has_vendor_kernel_boot = (info_dict.get(
+ "vendor_kernel_boot", "").lower() == "true")
# Pack dtb into vendor_kernel_boot if building vendor_kernel_boot.
# Otherwise pack dtb into vendor_boot.
@@ -1985,7 +1997,7 @@
def GetVendorKernelBootImage(name, prebuilt_name, unpack_dir, tree_subdir,
- info_dict=None):
+ info_dict=None):
"""Return a File object with the desired vendor kernel boot image.
Look for it under 'unpack_dir'/IMAGES, otherwise construct it from
@@ -2070,7 +2082,6 @@
def GetUserImage(which, tmpdir, input_zip,
info_dict=None,
allow_shared_blocks=None,
- hashtree_info_generator=None,
reset_file_map=False):
"""Returns an Image object suitable for passing to BlockImageDiff.
@@ -2087,8 +2098,6 @@
info_dict: The dict to be looked up for relevant info.
allow_shared_blocks: If image is sparse, whether having shared blocks is
allowed. If none, it is looked up from info_dict.
- hashtree_info_generator: If present and image is sparse, generates the
- hashtree_info for this sparse image.
reset_file_map: If true and image is sparse, reset file map before returning
the image.
Returns:
@@ -2110,15 +2119,14 @@
allow_shared_blocks = info_dict.get("ext4_share_dup_blocks") == "true"
if is_sparse:
- img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
- hashtree_info_generator)
+ img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks)
if reset_file_map:
img.ResetFileMap()
return img
- return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
+ return GetNonSparseImage(which, tmpdir)
-def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
+def GetNonSparseImage(which, tmpdir):
"""Returns a Image object suitable for passing to BlockImageDiff.
This function loads the specified non-sparse image from the given path.
@@ -2136,11 +2144,10 @@
# ota_from_target_files.py (since LMP).
assert os.path.exists(path) and os.path.exists(mappath)
- return images.FileImage(path, hashtree_info_generator=hashtree_info_generator)
+ return images.FileImage(path)
-def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
- hashtree_info_generator=None):
+def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
"""Returns a SparseImage object suitable for passing to BlockImageDiff.
This function loads the specified sparse image from the given path, and
@@ -2153,8 +2160,6 @@
tmpdir: The directory that contains the prebuilt image and block map file.
input_zip: The target-files ZIP archive.
allow_shared_blocks: Whether having shared blocks is allowed.
- hashtree_info_generator: If present, generates the hashtree_info for this
- sparse image.
Returns:
A SparseImage object, with file_map info loaded.
"""
@@ -2171,8 +2176,7 @@
clobbered_blocks = "0"
image = sparse_img.SparseImage(
- path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks,
- hashtree_info_generator=hashtree_info_generator)
+ path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks)
# block.map may contain less blocks, because mke2fs may skip allocating blocks
# if they contain all zeros. We can't reconstruct such a file from its block
@@ -2376,8 +2380,40 @@
stdoutdata, _ = proc.communicate(password)
if proc.returncode != 0:
raise ExternalError(
- "Failed to run signapk.jar: return code {}:\n{}".format(
+ "Failed to run {}: return code {}:\n{}".format(cmd,
+ proc.returncode, stdoutdata))
+
+
+def SignSePolicy(sepolicy, key, password):
+ """Sign the sepolicy zip, producing an fsverity .fsv_sig and
+ an RSA .sig signature files.
+ """
+
+ if OPTIONS.sign_sepolicy_path is None:
+ logger.info("No sign_sepolicy_path specified, %s was not signed", sepolicy)
+ return False
+
+ java_library_path = os.path.join(
+ OPTIONS.search_path, OPTIONS.signapk_shared_library_path)
+
+ cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
+ ["-Djava.library.path=" + java_library_path,
+ "-jar", os.path.join(OPTIONS.search_path, OPTIONS.sign_sepolicy_path)] +
+ OPTIONS.extra_sign_sepolicy_args)
+
+ cmd.extend([key + OPTIONS.public_key_suffix,
+ key + OPTIONS.private_key_suffix,
+ sepolicy, os.path.dirname(sepolicy)])
+
+ proc = Run(cmd, stdin=subprocess.PIPE)
+ if password is not None:
+ password += "\n"
+ stdoutdata, _ = proc.communicate(password)
+ if proc.returncode != 0:
+ raise ExternalError(
+ "Failed to run sign sepolicy: return code {}:\n{}".format(
proc.returncode, stdoutdata))
+ return True
def CheckSize(data, target, info_dict):
@@ -2555,7 +2591,8 @@
opts, args = getopt.getopt(
argv, "hvp:s:x:" + extra_opts,
["help", "verbose", "path=", "signapk_path=",
- "signapk_shared_library_path=", "extra_signapk_args=", "aapt2_path=",
+ "signapk_shared_library_path=", "extra_signapk_args=",
+ "sign_sepolicy_path=", "extra_sign_sepolicy_args=", "aapt2_path=",
"java_path=", "java_args=", "android_jar_path=", "public_key_suffix=",
"private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
"verity_signer_path=", "verity_signer_args=", "device_specific=",
@@ -2579,6 +2616,10 @@
OPTIONS.signapk_shared_library_path = a
elif o in ("--extra_signapk_args",):
OPTIONS.extra_signapk_args = shlex.split(a)
+ elif o in ("--sign_sepolicy_path",):
+ OPTIONS.sign_sepolicy_path = a
+ elif o in ("--extra_sign_sepolicy_args",):
+ OPTIONS.extra_sign_sepolicy_args = shlex.split(a)
elif o in ("--aapt2_path",):
OPTIONS.aapt2_path = a
elif o in ("--java_path",):
@@ -2592,13 +2633,17 @@
elif o in ("--private_key_suffix",):
OPTIONS.private_key_suffix = a
elif o in ("--boot_signer_path",):
- OPTIONS.boot_signer_path = a
+ raise ValueError(
+ "--boot_signer_path is no longer supported, please switch to AVB")
elif o in ("--boot_signer_args",):
- OPTIONS.boot_signer_args = shlex.split(a)
+ raise ValueError(
+ "--boot_signer_args is no longer supported, please switch to AVB")
elif o in ("--verity_signer_path",):
- OPTIONS.verity_signer_path = a
+ raise ValueError(
+ "--verity_signer_path is no longer supported, please switch to AVB")
elif o in ("--verity_signer_args",):
- OPTIONS.verity_signer_args = shlex.split(a)
+ raise ValueError(
+ "--verity_signer_args is no longer supported, please switch to AVB")
elif o in ("-s", "--device_specific"):
OPTIONS.device_specific = a
elif o in ("-x", "--extra"):
@@ -2752,18 +2797,6 @@
def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
compress_type=None):
- # http://b/18015246
- # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
- # for files larger than 2GiB. We can work around this by adjusting their
- # limit. Note that `zipfile.writestr()` will not work for strings larger than
- # 2GiB. The Python interpreter sometimes rejects strings that large (though
- # it isn't clear to me exactly what circumstances cause this).
- # `zipfile.write()` must be used directly to work around this.
- #
- # This mess can be avoided if we port to python3.
- saved_zip64_limit = zipfile.ZIP64_LIMIT
- zipfile.ZIP64_LIMIT = (1 << 32) - 1
-
if compress_type is None:
compress_type = zip_file.compression
if arcname is None:
@@ -2789,14 +2822,13 @@
finally:
os.chmod(filename, saved_stat.st_mode)
os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
- zipfile.ZIP64_LIMIT = saved_zip64_limit
def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
compress_type=None):
"""Wrap zipfile.writestr() function to work around the zip64 limit.
- Even with the ZIP64_LIMIT workaround, it won't allow writing a string
+ Python's zip implementation won't allow writing a string
longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
when calling crc32(bytes).
@@ -2805,9 +2837,6 @@
when we know the string won't be too long.
"""
- saved_zip64_limit = zipfile.ZIP64_LIMIT
- zipfile.ZIP64_LIMIT = (1 << 32) - 1
-
if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
zinfo.compress_type = zip_file.compression
@@ -2840,41 +2869,37 @@
zinfo.date_time = (2009, 1, 1, 0, 0, 0)
zip_file.writestr(zinfo, data)
- zipfile.ZIP64_LIMIT = saved_zip64_limit
-def ZipDelete(zip_filename, entries):
+def ZipDelete(zip_filename, entries, force=False):
"""Deletes entries from a ZIP file.
- Since deleting entries from a ZIP file is not supported, it shells out to
- 'zip -d'.
-
Args:
zip_filename: The name of the ZIP file.
entries: The name of the entry, or the list of names to be deleted.
-
- Raises:
- AssertionError: In case of non-zero return from 'zip'.
"""
if isinstance(entries, str):
entries = [entries]
# If list is empty, nothing to do
if not entries:
return
- cmd = ["zip", "-d", zip_filename] + entries
- RunAndCheckOutput(cmd)
+ with zipfile.ZipFile(zip_filename, 'r') as zin:
+ if not force and len(set(zin.namelist()).intersection(entries)) == 0:
+ raise ExternalError(
+ "Failed to delete zip entries, name not matched: %s" % entries)
-def ZipClose(zip_file):
- # http://b/18015246
- # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
- # central directory.
- saved_zip64_limit = zipfile.ZIP64_LIMIT
- zipfile.ZIP64_LIMIT = (1 << 32) - 1
+ fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(zip_filename))
+ os.close(fd)
- zip_file.close()
+ with zipfile.ZipFile(new_zipfile, 'w') as zout:
+ for item in zin.infolist():
+ if item.filename in entries:
+ continue
+ buffer = zin.read(item.filename)
+ zout.writestr(item, buffer)
- zipfile.ZIP64_LIMIT = saved_zip64_limit
+ os.replace(new_zipfile, zip_filename)
class DeviceSpecificParams(object):
@@ -3420,7 +3445,8 @@
"ext4": "EMMC",
"emmc": "EMMC",
"f2fs": "EMMC",
- "squashfs": "EMMC"
+ "squashfs": "EMMC",
+ "erofs": "EMMC"
}
@@ -3955,133 +3981,9 @@
return None
-def GetCareMap(which, imgname):
- """Returns the care_map string for the given partition.
-
- Args:
- which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
- imgname: The filename of the image.
-
- Returns:
- (which, care_map_ranges): care_map_ranges is the raw string of the care_map
- RangeSet; or None.
- """
- assert which in PARTITIONS_WITH_CARE_MAP
-
- # which + "_image_size" contains the size that the actual filesystem image
- # resides in, which is all that needs to be verified. The additional blocks in
- # the image file contain verity metadata, by reading which would trigger
- # invalid reads.
- image_size = OPTIONS.info_dict.get(which + "_image_size")
- if not image_size:
- return None
-
- disable_sparse = OPTIONS.info_dict.get(which + "_disable_sparse")
-
- image_blocks = int(image_size) // 4096 - 1
- # It's OK for image_blocks to be 0, because care map ranges are inclusive.
- # So 0-0 means "just block 0", which is valid.
- assert image_blocks >= 0, "blocks for {} must be non-negative, image size: {}".format(
- which, image_size)
-
- # For sparse images, we will only check the blocks that are listed in the care
- # map, i.e. the ones with meaningful data.
- if "extfs_sparse_flag" in OPTIONS.info_dict and not disable_sparse:
- simg = sparse_img.SparseImage(imgname)
- care_map_ranges = simg.care_map.intersect(
- rangelib.RangeSet("0-{}".format(image_blocks)))
-
- # Otherwise for non-sparse images, we read all the blocks in the filesystem
- # image.
- else:
- care_map_ranges = rangelib.RangeSet("0-{}".format(image_blocks))
-
- return [which, care_map_ranges.to_string_raw()]
-
-
-def AddCareMapForAbOta(output_file, ab_partitions, image_paths):
- """Generates and adds care_map.pb for a/b partition that has care_map.
-
- Args:
- output_file: The output zip file (needs to be already open),
- or file path to write care_map.pb.
- ab_partitions: The list of A/B partitions.
- image_paths: A map from the partition name to the image path.
- """
- if not output_file:
- raise ExternalError('Expected output_file for AddCareMapForAbOta')
-
- care_map_list = []
- for partition in ab_partitions:
- partition = partition.strip()
- if partition not in PARTITIONS_WITH_CARE_MAP:
- continue
-
- verity_block_device = "{}_verity_block_device".format(partition)
- avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
- if (verity_block_device in OPTIONS.info_dict or
- OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
- if partition not in image_paths:
- logger.warning('Potential partition with care_map missing from images: %s',
- partition)
- continue
- image_path = image_paths[partition]
- if not os.path.exists(image_path):
- raise ExternalError('Expected image at path {}'.format(image_path))
-
- care_map = GetCareMap(partition, image_path)
- if not care_map:
- continue
- care_map_list += care_map
-
- # adds fingerprint field to the care_map
- # TODO(xunchang) revisit the fingerprint calculation for care_map.
- partition_props = OPTIONS.info_dict.get(partition + ".build.prop")
- prop_name_list = ["ro.{}.build.fingerprint".format(partition),
- "ro.{}.build.thumbprint".format(partition)]
-
- present_props = [x for x in prop_name_list if
- partition_props and partition_props.GetProp(x)]
- if not present_props:
- logger.warning(
- "fingerprint is not present for partition %s", partition)
- property_id, fingerprint = "unknown", "unknown"
- else:
- property_id = present_props[0]
- fingerprint = partition_props.GetProp(property_id)
- care_map_list += [property_id, fingerprint]
-
- if not care_map_list:
- return
-
- # Converts the list into proto buf message by calling care_map_generator; and
- # writes the result to a temp file.
- temp_care_map_text = MakeTempFile(prefix="caremap_text-",
- suffix=".txt")
- with open(temp_care_map_text, 'w') as text_file:
- text_file.write('\n'.join(care_map_list))
-
- temp_care_map = MakeTempFile(prefix="caremap-", suffix=".pb")
- care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
- RunAndCheckOutput(care_map_gen_cmd)
-
- if not isinstance(output_file, zipfile.ZipFile):
- shutil.copy(temp_care_map, output_file)
- return
- # output_file is a zip file
- care_map_path = "META/care_map.pb"
- if care_map_path in output_file.namelist():
- # Copy the temp file into the OPTIONS.input_tmp dir and update the
- # replace_updated_files_list used by add_img_to_target_files
- if not OPTIONS.replace_updated_files_list:
- OPTIONS.replace_updated_files_list = []
- shutil.copy(temp_care_map, os.path.join(OPTIONS.input_tmp, care_map_path))
- OPTIONS.replace_updated_files_list.append(care_map_path)
- else:
- ZipWrite(output_file, temp_care_map, arcname=care_map_path)
-
-
def IsSparseImage(filepath):
+ if not os.path.exists(filepath):
+ return False
with open(filepath, 'rb') as fp:
# Magic for android sparse image format
# https://source.android.com/devices/bootloader/images
diff --git a/tools/releasetools/fsverity_manifest_generator.py b/tools/releasetools/fsverity_manifest_generator.py
deleted file mode 100644
index b8184bc..0000000
--- a/tools/releasetools/fsverity_manifest_generator.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-`fsverity_manifest_generator` generates build manifest APK file containing
-digests of target files. The APK file is signed so the manifest inside the APK
-can be trusted.
-"""
-
-import argparse
-import common
-import os
-import subprocess
-import sys
-from fsverity_digests_pb2 import FSVerityDigests
-
-HASH_ALGORITHM = 'sha256'
-
-def _digest(fsverity_path, input_file):
- cmd = [fsverity_path, 'digest', input_file]
- cmd.extend(['--compact'])
- cmd.extend(['--hash-alg', HASH_ALGORITHM])
- out = subprocess.check_output(cmd, universal_newlines=True).strip()
- return bytes(bytearray.fromhex(out))
-
-if __name__ == '__main__':
- p = argparse.ArgumentParser()
- p.add_argument(
- '--output',
- help='Path to the output manifest APK',
- required=True)
- p.add_argument(
- '--fsverity-path',
- help='path to the fsverity program',
- required=True)
- p.add_argument(
- '--aapt2-path',
- help='path to the aapt2 program',
- required=True)
- p.add_argument(
- '--min-sdk-version',
- help='minimum supported sdk version of the generated manifest apk',
- required=True)
- p.add_argument(
- '--version-code',
- help='version code for the generated manifest apk',
- required=True)
- p.add_argument(
- '--version-name',
- help='version name for the generated manifest apk',
- required=True)
- p.add_argument(
- '--framework-res',
- help='path to framework-res.apk',
- required=True)
- p.add_argument(
- '--apksigner-path',
- help='path to the apksigner program',
- required=True)
- p.add_argument(
- '--apk-key-path',
- help='path to the apk key',
- required=True)
- p.add_argument(
- '--apk-manifest-path',
- help='path to AndroidManifest.xml',
- required=True)
- p.add_argument(
- '--base-dir',
- help='directory to use as a relative root for the inputs',
- required=True)
- p.add_argument(
- 'inputs',
- nargs='+',
- help='input file for the build manifest')
- args = p.parse_args(sys.argv[1:])
-
- digests = FSVerityDigests()
- for f in sorted(args.inputs):
- # f is a full path for now; make it relative so it starts with {mount_point}/
- digest = digests.digests[os.path.relpath(f, args.base_dir)]
- digest.digest = _digest(args.fsverity_path, f)
- digest.hash_alg = HASH_ALGORITHM
-
- temp_dir = common.MakeTempDir()
-
- os.mkdir(os.path.join(temp_dir, "assets"))
- metadata_path = os.path.join(temp_dir, "assets", "build_manifest.pb")
- with open(metadata_path, "wb") as f:
- f.write(digests.SerializeToString())
-
- common.RunAndCheckOutput([args.aapt2_path, "link",
- "-A", os.path.join(temp_dir, "assets"),
- "-o", args.output,
- "--min-sdk-version", args.min_sdk_version,
- "--version-code", args.version_code,
- "--version-name", args.version_name,
- "-I", args.framework_res,
- "--manifest", args.apk_manifest_path])
- common.RunAndCheckOutput([args.apksigner_path, "sign", "--in", args.output,
- "--cert", args.apk_key_path + ".x509.pem",
- "--key", args.apk_key_path + ".pk8"])
diff --git a/tools/releasetools/images.py b/tools/releasetools/images.py
index a24148a..d06b979 100644
--- a/tools/releasetools/images.py
+++ b/tools/releasetools/images.py
@@ -149,7 +149,7 @@
class FileImage(Image):
"""An image wrapped around a raw image file."""
- def __init__(self, path, hashtree_info_generator=None):
+ def __init__(self, path):
self.path = path
self.blocksize = 4096
self._file_size = os.path.getsize(self.path)
@@ -166,10 +166,6 @@
self.generator_lock = threading.Lock()
- self.hashtree_info = None
- if hashtree_info_generator:
- self.hashtree_info = hashtree_info_generator.Generate(self)
-
zero_blocks = []
nonzero_blocks = []
reference = '\0' * self.blocksize
@@ -190,8 +186,6 @@
self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
if nonzero_blocks:
self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
- if self.hashtree_info:
- self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range
def __del__(self):
self._file.close()
diff --git a/tools/releasetools/img_from_target_files b/tools/releasetools/img_from_target_files
deleted file mode 120000
index afaf24b..0000000
--- a/tools/releasetools/img_from_target_files
+++ /dev/null
@@ -1 +0,0 @@
-img_from_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 76da89c..f8bdd81 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -173,7 +173,7 @@
logger.info('Writing super.img to archive...')
with zipfile.ZipFile(
output_file, 'a', compression=zipfile.ZIP_DEFLATED,
- allowZip64=not OPTIONS.sparse_userimages) as output_zip:
+ allowZip64=True) as output_zip:
common.ZipWrite(output_zip, super_file, 'super.img')
diff --git a/tools/releasetools/merge/merge_dexopt.py b/tools/releasetools/merge/merge_dexopt.py
index 7bf9bd4..16182b5 100644
--- a/tools/releasetools/merge/merge_dexopt.py
+++ b/tools/releasetools/merge/merge_dexopt.py
@@ -164,6 +164,10 @@
'deapexer',
'--debugfs_path',
'debugfs_static',
+ '--blkid_path',
+ 'blkid',
+ '--fsckerofs_path',
+ 'fsck.erofs',
'extract',
apex,
apex_extract_dir,
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
index 580b3ce..3288ef7 100644
--- a/tools/releasetools/merge/merge_meta.py
+++ b/tools/releasetools/merge/merge_meta.py
@@ -52,6 +52,49 @@
MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
+def ParseUpdateEngineConfig(path: str):
+ """Parse the update_engine config stored in file `path`
+ Args
+ path: Path to update_engine_config.txt file in target_files
+
+ Returns
+ A tuple of (major, minor) version number . E.g. (2, 8)
+ """
+ with open(path, "r") as fp:
+ # update_engine_config.txt is only supposed to contain two lines,
+ # PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
+ # sufficient. If the length is more than that, something is wrong.
+ data = fp.read(1024)
+ major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
+ if not major:
+ raise ValueError(
+ f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
+ minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
+ if not minor:
+ raise ValueError(
+ f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
+ return (int(major.group(1)), int(minor.group(1)))
+
+
+def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
+ UPDATE_ENGINE_CONFIG_NAME = "update_engine_config.txt"
+ config1_path = os.path.join(
+ input_metadir1, UPDATE_ENGINE_CONFIG_NAME)
+ config2_path = os.path.join(
+ input_metadir2, UPDATE_ENGINE_CONFIG_NAME)
+ config1 = ParseUpdateEngineConfig(config1_path)
+ config2 = ParseUpdateEngineConfig(config2_path)
+ # Copy older config to merged target files for maximum compatibility
+ # update_engine in system partition is from system side, but
+ # update_engine_sideload in recovery is from vendor side.
+ if config1 < config2:
+ shutil.copy(config1_path, os.path.join(
+ merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
+ else:
+ shutil.copy(config2_path, os.path.join(
+ merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
+
+
def MergeMetaFiles(temp_dir, merged_dir):
"""Merges various files in META/*."""
@@ -102,6 +145,11 @@
merged_meta_dir=merged_meta_dir,
file_name=file_name)
+ MergeUpdateEngineConfig(
+ framework_meta_dir,
+ vendor_meta_dir, merged_meta_dir,
+ )
+
# Write the now-finalized OPTIONS.merged_misc_info.
merge_utils.WriteSortedData(
data=OPTIONS.merged_misc_info,
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index e253b02..e056195 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -178,7 +178,6 @@
item_set.update([
'META/liblz4.so',
'META/postinstall_config.txt',
- 'META/update_engine_config.txt',
'META/zucchini_config.txt',
])
else: # vendor
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index eceb734..1ae1f54 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -142,7 +142,6 @@
'META/liblz4.so',
'META/postinstall_config.txt',
'META/product_filesystem_config.txt',
- 'META/update_engine_config.txt',
'META/zucchini_config.txt',
'PRODUCT/*',
'SYSTEM/*',
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
new file mode 100644
index 0000000..7d3d3a3
--- /dev/null
+++ b/tools/releasetools/merge_ota.py
@@ -0,0 +1,262 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import struct
+import sys
+import update_payload
+import tempfile
+import zipfile
+import os
+import care_map_pb2
+
+import common
+from typing import BinaryIO, List
+from update_metadata_pb2 import DeltaArchiveManifest, DynamicPartitionMetadata, DynamicPartitionGroup
+from ota_metadata_pb2 import OtaMetadata
+from update_payload import Payload
+
+from payload_signer import PayloadSigner
+from ota_utils import PayloadGenerator, METADATA_PROTO_NAME, FinalizeMetadata
+
+logger = logging.getLogger(__name__)
+
+CARE_MAP_ENTRY = "care_map.pb"
+
+
+def WriteDataBlob(payload: Payload, outfp: BinaryIO, read_size=1024*64):
+ for i in range(0, payload.total_data_length, read_size):
+ blob = payload.ReadDataBlob(
+ i, min(i+read_size, payload.total_data_length)-i)
+ outfp.write(blob)
+
+
+def ConcatBlobs(payloads: List[Payload], outfp: BinaryIO):
+ for payload in payloads:
+ WriteDataBlob(payload, outfp)
+
+
+def TotalDataLength(partitions):
+ for partition in reversed(partitions):
+ for op in reversed(partition.operations):
+ if op.data_length > 0:
+ return op.data_offset + op.data_length
+ return 0
+
+
+def ExtendPartitionUpdates(partitions, new_partitions):
+ prefix_blob_length = TotalDataLength(partitions)
+ partitions.extend(new_partitions)
+ for part in partitions[-len(new_partitions):]:
+ for op in part.operations:
+ if op.HasField("data_length") and op.data_length != 0:
+ op.data_offset += prefix_blob_length
+
+
+class DuplicatePartitionError(ValueError):
+ pass
+
+
+def MergeDynamicPartitionGroups(groups: List[DynamicPartitionGroup], new_groups: List[DynamicPartitionGroup]):
+ new_groups = {new_group.name: new_group for new_group in new_groups}
+ for group in groups:
+ if group.name not in new_groups:
+ continue
+ new_group = new_groups[group.name]
+ common_partitions = set(group.partition_names).intersection(
+ set(new_group.partition_names))
+ if len(common_partitions) != 0:
+ raise DuplicatePartitionError(
+ f"Old group and new group should not have any intersections, {group.partition_names}, {new_group.partition_names}, common partitions: {common_partitions}")
+ group.partition_names.extend(new_group.partition_names)
+ group.size = max(new_group.size, group.size)
+ del new_groups[group.name]
+ for new_group in new_groups.values():
+ groups.append(new_group)
+
+
+def MergeDynamicPartitionMetadata(metadata: DynamicPartitionMetadata, new_metadata: DynamicPartitionMetadata):
+ MergeDynamicPartitionGroups(metadata.groups, new_metadata.groups)
+ metadata.snapshot_enabled &= new_metadata.snapshot_enabled
+ metadata.vabc_enabled &= new_metadata.vabc_enabled
+ assert metadata.vabc_compression_param == new_metadata.vabc_compression_param, f"{metadata.vabc_compression_param} vs. {new_metadata.vabc_compression_param}"
+ metadata.cow_version = max(metadata.cow_version, new_metadata.cow_version)
+
+
+def MergeManifests(payloads: List[Payload]) -> DeltaArchiveManifest:
+ if len(payloads) == 0:
+ return None
+ if len(payloads) == 1:
+ return payloads[0].manifest
+
+ output_manifest = DeltaArchiveManifest()
+ output_manifest.block_size = payloads[0].manifest.block_size
+ output_manifest.partial_update = True
+ output_manifest.dynamic_partition_metadata.snapshot_enabled = payloads[
+ 0].manifest.dynamic_partition_metadata.snapshot_enabled
+ output_manifest.dynamic_partition_metadata.vabc_enabled = payloads[
+ 0].manifest.dynamic_partition_metadata.vabc_enabled
+ output_manifest.dynamic_partition_metadata.vabc_compression_param = payloads[
+ 0].manifest.dynamic_partition_metadata.vabc_compression_param
+ apex_info = {}
+ for payload in payloads:
+ manifest = payload.manifest
+ assert manifest.block_size == output_manifest.block_size
+ output_manifest.minor_version = max(
+ output_manifest.minor_version, manifest.minor_version)
+ output_manifest.max_timestamp = max(
+ output_manifest.max_timestamp, manifest.max_timestamp)
+ output_manifest.apex_info.extend(manifest.apex_info)
+ for apex in manifest.apex_info:
+ apex_info[apex.package_name] = apex
+ ExtendPartitionUpdates(output_manifest.partitions, manifest.partitions)
+ try:
+ MergeDynamicPartitionMetadata(
+ output_manifest.dynamic_partition_metadata, manifest.dynamic_partition_metadata)
+ except DuplicatePartitionError:
+ logger.error(
+ "OTA %s has duplicate partition with some of the previous OTAs", payload.name)
+ raise
+
+ for apex_name in sorted(apex_info.keys()):
+ output_manifest.apex_info.extend(apex_info[apex_name])
+
+ return output_manifest
+
+
+def MergePayloads(payloads: List[Payload]):
+ with tempfile.NamedTemporaryFile(prefix="payload_blob") as tmpfile:
+ ConcatBlobs(payloads, tmpfile)
+
+
+def MergeCareMap(paths: List[str]):
+ care_map = care_map_pb2.CareMap()
+ for path in paths:
+ with zipfile.ZipFile(path, "r", allowZip64=True) as zfp:
+ if CARE_MAP_ENTRY in zfp.namelist():
+ care_map_bytes = zfp.read(CARE_MAP_ENTRY)
+ partial_care_map = care_map_pb2.CareMap()
+ partial_care_map.ParseFromString(care_map_bytes)
+ care_map.partitions.extend(partial_care_map.partitions)
+ if len(care_map.partitions) == 0:
+ return b""
+ return care_map.SerializeToString()
+
+
+def WriteHeaderAndManifest(manifest: DeltaArchiveManifest, fp: BinaryIO):
+ __MAGIC = b"CrAU"
+ __MAJOR_VERSION = 2
+ manifest_bytes = manifest.SerializeToString()
+ fp.write(struct.pack(f">4sQQL", __MAGIC,
+ __MAJOR_VERSION, len(manifest_bytes), 0))
+ fp.write(manifest_bytes)
+
+
+def AddOtaMetadata(input_ota, metadata_ota, output_ota, package_key, pw):
+ with zipfile.ZipFile(metadata_ota, 'r') as zfp:
+ metadata = OtaMetadata()
+ metadata.ParseFromString(zfp.read(METADATA_PROTO_NAME))
+ FinalizeMetadata(metadata, input_ota, output_ota,
+ package_key=package_key, pw=pw)
+ return output_ota
+
+
+def CheckOutput(output_ota):
+ payload = update_payload.Payload(output_ota)
+ payload.CheckOpDataHash()
+
+
+def CheckDuplicatePartitions(payloads: List[Payload]):
+ partition_to_ota = {}
+ for payload in payloads:
+ for group in payload.manifest.dynamic_partition_metadata.groups:
+ for part in group.partition_names:
+ if part in partition_to_ota:
+ raise DuplicatePartitionError(
+ f"OTA {partition_to_ota[part].name} and {payload.name} have duplicating partition {part}")
+ partition_to_ota[part] = payload
+
+def main(argv):
+ parser = argparse.ArgumentParser(description='Merge multiple partial OTAs')
+ parser.add_argument('packages', type=str, nargs='+',
+ help='Paths to OTA packages to merge')
+ parser.add_argument('--package_key', type=str,
+ help='Paths to private key for signing payload')
+ parser.add_argument('--search_path', type=str,
+ help='Search path for framework/signapk.jar')
+ parser.add_argument('--output', type=str,
+ help='Paths to output merged ota', required=True)
+ parser.add_argument('--metadata_ota', type=str,
+ help='Output zip will use build metadata from this OTA package, if unspecified, use the last OTA package in merge list')
+ parser.add_argument('--private_key_suffix', type=str,
+ help='Suffix to be appended to package_key path', default=".pk8")
+ parser.add_argument('-v', action="store_true", help="Enable verbose logging", dest="verbose")
+ args = parser.parse_args(argv[1:])
+ file_paths = args.packages
+
+ common.OPTIONS.verbose = args.verbose
+ if args.verbose:
+ logger.setLevel(logging.INFO)
+
+ logger.info(args)
+ if args.search_path:
+ common.OPTIONS.search_path = args.search_path
+
+ metadata_ota = args.packages[-1]
+ if args.metadata_ota is not None:
+ metadata_ota = args.metadata_ota
+ assert os.path.exists(metadata_ota)
+
+ payloads = [Payload(path) for path in file_paths]
+
+ CheckDuplicatePartitions(payloads)
+
+ merged_manifest = MergeManifests(payloads)
+
+ with tempfile.NamedTemporaryFile() as unsigned_payload:
+ WriteHeaderAndManifest(merged_manifest, unsigned_payload)
+ ConcatBlobs(payloads, unsigned_payload)
+ unsigned_payload.flush()
+
+ generator = PayloadGenerator()
+ generator.payload_file = unsigned_payload.name
+ logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
+
+ if args.package_key:
+ logger.info("Signing payload...")
+ signer = PayloadSigner(args.package_key, args.private_key_suffix)
+ generator.payload_file = unsigned_payload.name
+ generator.Sign(signer)
+
+ logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
+
+ logger.info("Writing to %s", args.output)
+ key_passwords = common.GetKeyPasswords([args.package_key])
+ with tempfile.NamedTemporaryFile(prefix="signed_ota", suffix=".zip") as signed_ota:
+ with zipfile.ZipFile(signed_ota, "w") as zfp:
+ generator.WriteToZip(zfp)
+ care_map_bytes = MergeCareMap(args.packages)
+ if care_map_bytes:
+ zfp.writestr(CARE_MAP_ENTRY, care_map_bytes)
+ AddOtaMetadata(signed_ota.name, metadata_ota,
+ args.output, args.package_key, key_passwords[args.package_key])
+ return 0
+
+
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ sys.exit(main(sys.argv))
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 9732cda..ac85aa4 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -40,12 +40,9 @@
info_dict=source_info,
allow_shared_blocks=allow_shared_blocks)
- hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
- name, 4096, target_info)
partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
info_dict=target_info,
- allow_shared_blocks=allow_shared_blocks,
- hashtree_info_generator=hashtree_info_generator)
+ allow_shared_blocks=allow_shared_blocks)
# Check the first block of the source system partition for remount R/W only
# if the filesystem is ext4.
@@ -280,7 +277,7 @@
# We haven't written the metadata entry, which will be done in
# FinalizeMetadata.
- common.ZipClose(output_zip)
+ output_zip.close()
needed_property_files = (
NonAbOtaPropertyFiles(),
@@ -534,7 +531,7 @@
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
- common.ZipClose(output_zip)
+ output_zip.close()
# Sign the generated zip package unless no_signing is specified.
needed_property_files = (
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
deleted file mode 120000
index 6755a90..0000000
--- a/tools/releasetools/ota_from_target_files
+++ /dev/null
@@ -1 +0,0 @@
-ota_from_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 5384699..60e95ad 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -244,6 +244,9 @@
--vabc_compression_param
Compression algorithm to be used for VABC. Available options: gz, brotli, none
+
+ --security_patch_level
+ Override the security patch level in target files
"""
from __future__ import print_function
@@ -255,7 +258,6 @@
import re
import shlex
import shutil
-import struct
import subprocess
import sys
import zipfile
@@ -264,11 +266,12 @@
import common
import ota_utils
from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
- PropertyFiles, SECURITY_PATCH_LEVEL_PROP_NAME, GetZipEntryOffset)
+ PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME)
from common import IsSparseImage
import target_files_diff
from check_target_files_vintf import CheckVintfIfTrebleEnabled
from non_ab_ota import GenerateNonAbOtaPackage
+from payload_signer import PayloadSigner
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -316,6 +319,7 @@
OPTIONS.enable_zucchini = True
OPTIONS.enable_lz4diff = False
OPTIONS.vabc_compression_param = None
+OPTIONS.security_patch_level = None
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -335,207 +339,6 @@
'vendor', 'vendor_boot']
-class PayloadSigner(object):
- """A class that wraps the payload signing works.
-
- When generating a Payload, hashes of the payload and metadata files will be
- signed with the device key, either by calling an external payload signer or
- by calling openssl with the package key. This class provides a unified
- interface, so that callers can just call PayloadSigner.Sign().
-
- If an external payload signer has been specified (OPTIONS.payload_signer), it
- calls the signer with the provided args (OPTIONS.payload_signer_args). Note
- that the signing key should be provided as part of the payload_signer_args.
- Otherwise without an external signer, it uses the package key
- (OPTIONS.package_key) and calls openssl for the signing works.
- """
-
- def __init__(self):
- if OPTIONS.payload_signer is None:
- # Prepare the payload signing key.
- private_key = OPTIONS.package_key + OPTIONS.private_key_suffix
- pw = OPTIONS.key_passwords[OPTIONS.package_key]
-
- cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
- cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
- signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
- cmd.extend(["-out", signing_key])
- common.RunAndCheckOutput(cmd, verbose=False)
-
- self.signer = "openssl"
- self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
- "-pkeyopt", "digest:sha256"]
- self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
- signing_key)
- else:
- self.signer = OPTIONS.payload_signer
- self.signer_args = OPTIONS.payload_signer_args
- if OPTIONS.payload_signer_maximum_signature_size:
- self.maximum_signature_size = int(
- OPTIONS.payload_signer_maximum_signature_size)
- else:
- # The legacy config uses RSA2048 keys.
- logger.warning("The maximum signature size for payload signer is not"
- " set, default to 256 bytes.")
- self.maximum_signature_size = 256
-
- @staticmethod
- def _GetMaximumSignatureSizeInBytes(signing_key):
- out_signature_size_file = common.MakeTempFile("signature_size")
- cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
- out_signature_size_file), "--private_key={}".format(signing_key)]
- common.RunAndCheckOutput(cmd)
- with open(out_signature_size_file) as f:
- signature_size = f.read().rstrip()
- logger.info("%s outputs the maximum signature size: %s", cmd[0],
- signature_size)
- return int(signature_size)
-
- def Sign(self, in_file):
- """Signs the given input file. Returns the output filename."""
- out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
- cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
- common.RunAndCheckOutput(cmd)
- return out_file
-
-
-class Payload(object):
- """Manages the creation and the signing of an A/B OTA Payload."""
-
- PAYLOAD_BIN = 'payload.bin'
- PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
- SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
- SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
-
- def __init__(self, secondary=False):
- """Initializes a Payload instance.
-
- Args:
- secondary: Whether it's generating a secondary payload (default: False).
- """
- self.payload_file = None
- self.payload_properties = None
- self.secondary = secondary
-
- def _Run(self, cmd): # pylint: disable=no-self-use
- # Don't pipe (buffer) the output if verbose is set. Let
- # brillo_update_payload write to stdout/stderr directly, so its progress can
- # be monitored.
- if OPTIONS.verbose:
- common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
- else:
- common.RunAndCheckOutput(cmd)
-
- def Generate(self, target_file, source_file=None, additional_args=None):
- """Generates a payload from the given target-files zip(s).
-
- Args:
- target_file: The filename of the target build target-files zip.
- source_file: The filename of the source build target-files zip; or None if
- generating a full OTA.
- additional_args: A list of additional args that should be passed to
- brillo_update_payload script; or None.
- """
- if additional_args is None:
- additional_args = []
-
- payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
- cmd = ["brillo_update_payload", "generate",
- "--payload", payload_file,
- "--target_image", target_file]
- if source_file is not None:
- cmd.extend(["--source_image", source_file])
- if OPTIONS.disable_fec_computation:
- cmd.extend(["--disable_fec_computation", "true"])
- if OPTIONS.disable_verity_computation:
- cmd.extend(["--disable_verity_computation", "true"])
- cmd.extend(additional_args)
- self._Run(cmd)
-
- self.payload_file = payload_file
- self.payload_properties = None
-
- def Sign(self, payload_signer):
- """Generates and signs the hashes of the payload and metadata.
-
- Args:
- payload_signer: A PayloadSigner() instance that serves the signing work.
-
- Raises:
- AssertionError: On any failure when calling brillo_update_payload script.
- """
- assert isinstance(payload_signer, PayloadSigner)
-
- # 1. Generate hashes of the payload and metadata files.
- payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
- metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
- cmd = ["brillo_update_payload", "hash",
- "--unsigned_payload", self.payload_file,
- "--signature_size", str(payload_signer.maximum_signature_size),
- "--metadata_hash_file", metadata_sig_file,
- "--payload_hash_file", payload_sig_file]
- self._Run(cmd)
-
- # 2. Sign the hashes.
- signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
- signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
-
- # 3. Insert the signatures back into the payload file.
- signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
- suffix=".bin")
- cmd = ["brillo_update_payload", "sign",
- "--unsigned_payload", self.payload_file,
- "--payload", signed_payload_file,
- "--signature_size", str(payload_signer.maximum_signature_size),
- "--metadata_signature_file", signed_metadata_sig_file,
- "--payload_signature_file", signed_payload_sig_file]
- self._Run(cmd)
-
- # 4. Dump the signed payload properties.
- properties_file = common.MakeTempFile(prefix="payload-properties-",
- suffix=".txt")
- cmd = ["brillo_update_payload", "properties",
- "--payload", signed_payload_file,
- "--properties_file", properties_file]
- self._Run(cmd)
-
- if self.secondary:
- with open(properties_file, "a") as f:
- f.write("SWITCH_SLOT_ON_REBOOT=0\n")
-
- if OPTIONS.wipe_user_data:
- with open(properties_file, "a") as f:
- f.write("POWERWASH=1\n")
-
- self.payload_file = signed_payload_file
- self.payload_properties = properties_file
-
- def WriteToZip(self, output_zip):
- """Writes the payload to the given zip.
-
- Args:
- output_zip: The output ZipFile instance.
- """
- assert self.payload_file is not None
- assert self.payload_properties is not None
-
- if self.secondary:
- payload_arcname = Payload.SECONDARY_PAYLOAD_BIN
- payload_properties_arcname = Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT
- else:
- payload_arcname = Payload.PAYLOAD_BIN
- payload_properties_arcname = Payload.PAYLOAD_PROPERTIES_TXT
-
- # Add the signed payload file and properties into the zip. In order to
- # support streaming, we pack them as ZIP_STORED. So these entries can be
- # read directly with the offset and length pairs.
- common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
- compress_type=zipfile.ZIP_STORED)
- common.ZipWrite(output_zip, self.payload_properties,
- arcname=payload_properties_arcname,
- compress_type=zipfile.ZIP_STORED)
-
-
def _LoadOemDicts(oem_source):
"""Returns the list of loaded OEM properties dict."""
if not oem_source:
@@ -547,113 +350,6 @@
return oem_dicts
-class StreamingPropertyFiles(PropertyFiles):
- """A subclass for computing the property-files for streaming A/B OTAs."""
-
- def __init__(self):
- super(StreamingPropertyFiles, self).__init__()
- self.name = 'ota-streaming-property-files'
- self.required = (
- # payload.bin and payload_properties.txt must exist.
- 'payload.bin',
- 'payload_properties.txt',
- )
- self.optional = (
- # apex_info.pb isn't directly used in the update flow
- 'apex_info.pb',
- # care_map is available only if dm-verity is enabled.
- 'care_map.pb',
- 'care_map.txt',
- # compatibility.zip is available only if target supports Treble.
- 'compatibility.zip',
- )
-
-
-class AbOtaPropertyFiles(StreamingPropertyFiles):
- """The property-files for A/B OTA that includes payload_metadata.bin info.
-
- Since P, we expose one more token (aka property-file), in addition to the ones
- for streaming A/B OTA, for a virtual entry of 'payload_metadata.bin'.
- 'payload_metadata.bin' is the header part of a payload ('payload.bin'), which
- doesn't exist as a separate ZIP entry, but can be used to verify if the
- payload can be applied on the given device.
-
- For backward compatibility, we keep both of the 'ota-streaming-property-files'
- and the newly added 'ota-property-files' in P. The new token will only be
- available in 'ota-property-files'.
- """
-
- def __init__(self):
- super(AbOtaPropertyFiles, self).__init__()
- self.name = 'ota-property-files'
-
- def _GetPrecomputed(self, input_zip):
- offset, size = self._GetPayloadMetadataOffsetAndSize(input_zip)
- return ['payload_metadata.bin:{}:{}'.format(offset, size)]
-
- @staticmethod
- def _GetPayloadMetadataOffsetAndSize(input_zip):
- """Computes the offset and size of the payload metadata for a given package.
-
- (From system/update_engine/update_metadata.proto)
- A delta update file contains all the deltas needed to update a system from
- one specific version to another specific version. The update format is
- represented by this struct pseudocode:
-
- struct delta_update_file {
- char magic[4] = "CrAU";
- uint64 file_format_version;
- uint64 manifest_size; // Size of protobuf DeltaArchiveManifest
-
- // Only present if format_version > 1:
- uint32 metadata_signature_size;
-
- // The Bzip2 compressed DeltaArchiveManifest
- char manifest[metadata_signature_size];
-
- // The signature of the metadata (from the beginning of the payload up to
- // this location, not including the signature itself). This is a
- // serialized Signatures message.
- char medatada_signature_message[metadata_signature_size];
-
- // Data blobs for files, no specific format. The specific offset
- // and length of each data blob is recorded in the DeltaArchiveManifest.
- struct {
- char data[];
- } blobs[];
-
- // These two are not signed:
- uint64 payload_signatures_message_size;
- char payload_signatures_message[];
- };
-
- 'payload-metadata.bin' contains all the bytes from the beginning of the
- payload, till the end of 'medatada_signature_message'.
- """
- payload_info = input_zip.getinfo('payload.bin')
- (payload_offset, payload_size) = GetZipEntryOffset(input_zip, payload_info)
-
- # Read the underlying raw zipfile at specified offset
- payload_fp = input_zip.fp
- payload_fp.seek(payload_offset)
- header_bin = payload_fp.read(24)
-
- # network byte order (big-endian)
- header = struct.unpack("!IQQL", header_bin)
-
- # 'CrAU'
- magic = header[0]
- assert magic == 0x43724155, "Invalid magic: {:x}, computed offset {}" \
- .format(magic, payload_offset)
-
- manifest_size = header[2]
- metadata_signature_size = header[3]
- metadata_total = 24 + manifest_size + metadata_signature_size
- assert metadata_total < payload_size
-
- return (payload_offset, metadata_total)
-
-
def ModifyVABCCompressionParam(content, algo):
""" Update update VABC Compression Param in dynamic_partitions_info.txt
Args:
@@ -791,7 +487,7 @@
else:
common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
- common.ZipClose(target_zip)
+ target_zip.close()
return target_file
@@ -928,7 +624,7 @@
# TODO(xunchang) handle META/postinstall_config.txt'
- common.ZipClose(partial_target_zip)
+ partial_target_zip.close()
return partial_target_file
@@ -1013,7 +709,7 @@
# Write new ab_partitions.txt file
common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
- common.ZipClose(target_zip)
+ target_zip.close()
return target_file
@@ -1073,7 +769,7 @@
for part in pre_partition_state:
if part.partition_name in partition_timestamps:
partition_timestamps[part.partition_name] = \
- max(part.version, partition_timestamps[part.partition_name])
+ max(part.version, partition_timestamps[part.partition_name])
return [
"--partition_timestamps",
",".join([key + ":" + val for (key, val)
@@ -1201,7 +897,7 @@
# Metadata to comply with Android OTA package format.
metadata = GetPackageMetadata(target_info, source_info)
# Generate payload.
- payload = Payload()
+ payload = PayloadGenerator(OPTIONS.include_secondary, OPTIONS.wipe_user_data)
partition_timestamps_flags = []
# Enforce a max timestamp this payload can be applied on top of.
@@ -1221,6 +917,13 @@
"Builds doesn't support zucchini, or source/target don't have compatible zucchini versions. Disabling zucchini.")
OPTIONS.enable_zucchini = False
+ security_patch_level = target_info.GetBuildProp(
+ "ro.build.version.security_patch")
+ if OPTIONS.security_patch_level is not None:
+ security_patch_level = OPTIONS.security_patch_level
+
+ additional_args += ["--security_patch_level", security_patch_level]
+
additional_args += ["--enable_zucchini",
str(OPTIONS.enable_zucchini).lower()]
@@ -1266,7 +969,10 @@
)
# Sign the payload.
- payload_signer = PayloadSigner()
+ pw = OPTIONS.key_passwords[OPTIONS.package_key]
+ payload_signer = PayloadSigner(
+ OPTIONS.package_key, OPTIONS.private_key_suffix,
+ pw, OPTIONS.payload_signer)
payload.Sign(payload_signer)
# Write the payload into output zip.
@@ -1279,7 +985,7 @@
# building an incremental OTA. See the comments for "--include_secondary".
secondary_target_file = GetTargetFilesZipForSecondaryImages(
target_file, OPTIONS.skip_postinstall)
- secondary_payload = Payload(secondary=True)
+ secondary_payload = PayloadGenerator(secondary=True)
secondary_payload.Generate(secondary_target_file,
additional_args=["--max_timestamp",
max_timestamp])
@@ -1289,8 +995,7 @@
# If dm-verity is supported for the device, copy contents of care_map
# into A/B OTA package.
target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
- if (target_info.get("verity") == "true" or
- target_info.get("avb_enable") == "true"):
+ if target_info.get("avb_enable") == "true":
care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
"META/" + x in target_zip.namelist()]
@@ -1312,21 +1017,14 @@
common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
compress_type=zipfile.ZIP_STORED)
- common.ZipClose(target_zip)
+ target_zip.close()
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
- common.ZipClose(output_zip)
+ output_zip.close()
- # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
- # all the info of the latter. However, system updaters and OTA servers need to
- # take time to switch to the new flag. We keep both of the flags for
- # P-timeframe, and will remove StreamingPropertyFiles in later release.
- needed_property_files = (
- AbOtaPropertyFiles(),
- StreamingPropertyFiles(),
- )
- FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+ FinalizeMetadata(metadata, staging_file, output_file,
+ package_key=OPTIONS.package_key)
def main(argv):
@@ -1438,6 +1136,8 @@
OPTIONS.enable_lz4diff = a.lower() != "false"
elif o == "--vabc_compression_param":
OPTIONS.vabc_compression_param = a.lower()
+ elif o == "--security_patch_level":
+ OPTIONS.security_patch_level = a
else:
return False
return True
@@ -1488,6 +1188,7 @@
"enable_zucchini=",
"enable_lz4diff=",
"vabc_compression_param=",
+ "security_patch_level=",
], extra_option_handler=option_handler)
if len(args) != 2:
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
index 2552464..012d9ab 100644
--- a/tools/releasetools/ota_metadata_pb2.py
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -19,8 +19,8 @@
name='ota_metadata.proto',
package='build.tools.releasetools',
syntax='proto3',
- serialized_options=_b('H\003'),
- serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3')
+ serialized_options=_b('\n\013android.otaB\022OtaPackageMetadataH\003'),
+ serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"{\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\x12\x16\n\x0esource_version\x18\x05 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42#\n\x0b\x61ndroid.otaB\x12OtaPackageMetadataH\x03\x62\x06proto3')
)
@@ -50,8 +50,8 @@
],
containing_type=None,
serialized_options=None,
- serialized_start=972,
- serialized_end=1024,
+ serialized_start=996,
+ serialized_end=1048,
)
_sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
@@ -216,6 +216,13 @@
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='source_version', full_name='build.tools.releasetools.ApexInfo.source_version', index=4,
+ number=5, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -229,7 +236,7 @@
oneofs=[
],
serialized_start=347,
- serialized_end=446,
+ serialized_end=470,
)
@@ -259,8 +266,8 @@
extension_ranges=[],
oneofs=[
],
- serialized_start=448,
- serialized_end=517,
+ serialized_start=472,
+ serialized_end=541,
)
@@ -297,8 +304,8 @@
extension_ranges=[],
oneofs=[
],
- serialized_start=918,
- serialized_end=970,
+ serialized_start=942,
+ serialized_end=994,
)
_OTAMETADATA = _descriptor.Descriptor(
@@ -384,8 +391,8 @@
extension_ranges=[],
oneofs=[
],
- serialized_start=520,
- serialized_end=1024,
+ serialized_start=544,
+ serialized_end=1048,
)
_DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index ef1dca2..e36a2be 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -16,14 +16,18 @@
import itertools
import logging
import os
+import shutil
import struct
import zipfile
import ota_metadata_pb2
-from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
+import common
+from common import (ZipDelete, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
GetRamdiskFormat)
+from payload_signer import PayloadSigner
+
logger = logging.getLogger(__name__)
@@ -44,7 +48,7 @@
SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
-def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
+def FinalizeMetadata(metadata, input_file, output_file, needed_property_files=None, package_key=None, pw=None):
"""Finalizes the metadata and signs an A/B OTA package.
In order to stream an A/B OTA package, we need 'ota-streaming-property-files'
@@ -62,32 +66,42 @@
input_file: The input ZIP filename that doesn't contain the package METADATA
entry yet.
output_file: The final output ZIP filename.
- needed_property_files: The list of PropertyFiles' to be generated.
+ needed_property_files: The list of PropertyFiles' to be generated. Default is [AbOtaPropertyFiles(), StreamingPropertyFiles()]
+ package_key: The key used to sign this OTA package
+ pw: Password for the package_key
"""
+ no_signing = package_key is None
+
+ if needed_property_files is None:
+ # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
+ # all the info of the latter. However, system updaters and OTA servers need to
+ # take time to switch to the new flag. We keep both of the flags for
+ # P-timeframe, and will remove StreamingPropertyFiles in later release.
+ needed_property_files = (
+ AbOtaPropertyFiles(),
+ StreamingPropertyFiles(),
+ )
def ComputeAllPropertyFiles(input_file, needed_property_files):
# Write the current metadata entry with placeholders.
- with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
for property_files in needed_property_files:
metadata.property_files[property_files.name] = property_files.Compute(
input_zip)
- namelist = input_zip.namelist()
- if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
- ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
- output_zip = zipfile.ZipFile(input_file, 'a', allowZip64=True)
- WriteMetadata(metadata, output_zip)
- ZipClose(output_zip)
+ ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME], True)
+ with zipfile.ZipFile(input_file, 'a', allowZip64=True) as output_zip:
+ WriteMetadata(metadata, output_zip)
- if OPTIONS.no_signing:
+ if no_signing:
return input_file
prelim_signing = MakeTempFile(suffix='.zip')
- SignOutput(input_file, prelim_signing)
+ SignOutput(input_file, prelim_signing, package_key, pw)
return prelim_signing
def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
- with zipfile.ZipFile(prelim_signing, allowZip64=True) as prelim_signing_zip:
+ with zipfile.ZipFile(prelim_signing, 'r', allowZip64=True) as prelim_signing_zip:
for property_files in needed_property_files:
metadata.property_files[property_files.name] = property_files.Finalize(
prelim_signing_zip,
@@ -113,15 +127,14 @@
# Replace the METADATA entry.
ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
- output_zip = zipfile.ZipFile(prelim_signing, 'a', allowZip64=True)
- WriteMetadata(metadata, output_zip)
- ZipClose(output_zip)
+ with zipfile.ZipFile(prelim_signing, 'a', allowZip64=True) as output_zip:
+ WriteMetadata(metadata, output_zip)
# Re-sign the package after updating the metadata entry.
- if OPTIONS.no_signing:
- output_file = prelim_signing
+ if no_signing:
+ shutil.copy(prelim_signing, output_file)
else:
- SignOutput(prelim_signing, output_file)
+ SignOutput(prelim_signing, output_file, package_key, pw)
# Reopen the final signed zip to double check the streaming metadata.
with zipfile.ZipFile(output_file, allowZip64=True) as output_zip:
@@ -574,7 +587,7 @@
else:
tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
if METADATA_PROTO_NAME in zip_file.namelist():
- tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
+ tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
return ','.join(tokens)
@@ -596,10 +609,13 @@
return []
-def SignOutput(temp_zip_name, output_zip_name):
- pw = OPTIONS.key_passwords[OPTIONS.package_key]
+def SignOutput(temp_zip_name, output_zip_name, package_key=None, pw=None):
+ if package_key is None:
+ package_key = OPTIONS.package_key
+ if pw is None and OPTIONS.key_passwords:
+ pw = OPTIONS.key_passwords[package_key]
- SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
+ SignFile(temp_zip_name, output_zip_name, package_key, pw,
whole_file=True)
@@ -693,7 +709,7 @@
if entry in zfp.namelist():
return zfp.read(entry).decode()
else:
- entry_path = os.path.join(entry, path)
+ entry_path = os.path.join(path, entry)
if os.path.exists(entry_path):
with open(entry_path, "r") as fp:
return fp.read()
@@ -701,3 +717,247 @@
sourceEntry = ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
targetEntry = ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
return sourceEntry and targetEntry and sourceEntry == targetEntry
+
+
+class PayloadGenerator(object):
+ """Manages the creation and the signing of an A/B OTA Payload."""
+
+ PAYLOAD_BIN = 'payload.bin'
+ PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
+ SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
+ SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
+
+ def __init__(self, secondary=False, wipe_user_data=False):
+ """Initializes a Payload instance.
+
+ Args:
+ secondary: Whether it's generating a secondary payload (default: False).
+ """
+ self.payload_file = None
+ self.payload_properties = None
+ self.secondary = secondary
+ self.wipe_user_data = wipe_user_data
+
+ def _Run(self, cmd): # pylint: disable=no-self-use
+ # Don't pipe (buffer) the output if verbose is set. Let
+ # brillo_update_payload write to stdout/stderr directly, so its progress can
+ # be monitored.
+ if OPTIONS.verbose:
+ common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
+ else:
+ common.RunAndCheckOutput(cmd)
+
+ def Generate(self, target_file, source_file=None, additional_args=None):
+ """Generates a payload from the given target-files zip(s).
+
+ Args:
+ target_file: The filename of the target build target-files zip.
+ source_file: The filename of the source build target-files zip; or None if
+ generating a full OTA.
+ additional_args: A list of additional args that should be passed to
+ brillo_update_payload script; or None.
+ """
+ if additional_args is None:
+ additional_args = []
+
+ payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
+ cmd = ["brillo_update_payload", "generate",
+ "--payload", payload_file,
+ "--target_image", target_file]
+ if source_file is not None:
+ cmd.extend(["--source_image", source_file])
+ if OPTIONS.disable_fec_computation:
+ cmd.extend(["--disable_fec_computation", "true"])
+ if OPTIONS.disable_verity_computation:
+ cmd.extend(["--disable_verity_computation", "true"])
+ cmd.extend(additional_args)
+ self._Run(cmd)
+
+ self.payload_file = payload_file
+ self.payload_properties = None
+
+ def Sign(self, payload_signer):
+ """Generates and signs the hashes of the payload and metadata.
+
+ Args:
+ payload_signer: A PayloadSigner() instance that serves the signing work.
+
+ Raises:
+ AssertionError: On any failure when calling brillo_update_payload script.
+ """
+ assert isinstance(payload_signer, PayloadSigner)
+
+ # 1. Generate hashes of the payload and metadata files.
+ payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+ metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+ cmd = ["brillo_update_payload", "hash",
+ "--unsigned_payload", self.payload_file,
+ "--signature_size", str(payload_signer.maximum_signature_size),
+ "--metadata_hash_file", metadata_sig_file,
+ "--payload_hash_file", payload_sig_file]
+ self._Run(cmd)
+
+ # 2. Sign the hashes.
+ signed_payload_sig_file = payload_signer.SignHashFile(payload_sig_file)
+ signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
+
+ # 3. Insert the signatures back into the payload file.
+ signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+ suffix=".bin")
+ cmd = ["brillo_update_payload", "sign",
+ "--unsigned_payload", self.payload_file,
+ "--payload", signed_payload_file,
+ "--signature_size", str(payload_signer.maximum_signature_size),
+ "--metadata_signature_file", signed_metadata_sig_file,
+ "--payload_signature_file", signed_payload_sig_file]
+ self._Run(cmd)
+
+ self.payload_file = signed_payload_file
+
+ def WriteToZip(self, output_zip):
+ """Writes the payload to the given zip.
+
+ Args:
+ output_zip: The output ZipFile instance.
+ """
+ assert self.payload_file is not None
+ # 4. Dump the signed payload properties.
+ properties_file = common.MakeTempFile(prefix="payload-properties-",
+ suffix=".txt")
+ cmd = ["brillo_update_payload", "properties",
+ "--payload", self.payload_file,
+ "--properties_file", properties_file]
+ self._Run(cmd)
+
+ if self.secondary:
+ with open(properties_file, "a") as f:
+ f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+
+ if self.wipe_user_data:
+ with open(properties_file, "a") as f:
+ f.write("POWERWASH=1\n")
+
+ self.payload_properties = properties_file
+
+ if self.secondary:
+ payload_arcname = PayloadGenerator.SECONDARY_PAYLOAD_BIN
+ payload_properties_arcname = PayloadGenerator.SECONDARY_PAYLOAD_PROPERTIES_TXT
+ else:
+ payload_arcname = PayloadGenerator.PAYLOAD_BIN
+ payload_properties_arcname = PayloadGenerator.PAYLOAD_PROPERTIES_TXT
+
+ # Add the signed payload file and properties into the zip. In order to
+ # support streaming, we pack them as ZIP_STORED. So these entries can be
+ # read directly with the offset and length pairs.
+ common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
+ compress_type=zipfile.ZIP_STORED)
+ common.ZipWrite(output_zip, self.payload_properties,
+ arcname=payload_properties_arcname,
+ compress_type=zipfile.ZIP_STORED)
+
+
+class StreamingPropertyFiles(PropertyFiles):
+ """A subclass for computing the property-files for streaming A/B OTAs."""
+
+ def __init__(self):
+ super(StreamingPropertyFiles, self).__init__()
+ self.name = 'ota-streaming-property-files'
+ self.required = (
+ # payload.bin and payload_properties.txt must exist.
+ 'payload.bin',
+ 'payload_properties.txt',
+ )
+ self.optional = (
+ # apex_info.pb isn't directly used in the update flow
+ 'apex_info.pb',
+ # care_map is available only if dm-verity is enabled.
+ 'care_map.pb',
+ 'care_map.txt',
+ # compatibility.zip is available only if target supports Treble.
+ 'compatibility.zip',
+ )
+
+
+class AbOtaPropertyFiles(StreamingPropertyFiles):
+ """The property-files for A/B OTA that includes payload_metadata.bin info.
+
+ Since P, we expose one more token (aka property-file), in addition to the ones
+ for streaming A/B OTA, for a virtual entry of 'payload_metadata.bin'.
+ 'payload_metadata.bin' is the header part of a payload ('payload.bin'), which
+ doesn't exist as a separate ZIP entry, but can be used to verify if the
+ payload can be applied on the given device.
+
+ For backward compatibility, we keep both of the 'ota-streaming-property-files'
+ and the newly added 'ota-property-files' in P. The new token will only be
+ available in 'ota-property-files'.
+ """
+
+ def __init__(self):
+ super(AbOtaPropertyFiles, self).__init__()
+ self.name = 'ota-property-files'
+
+ def _GetPrecomputed(self, input_zip):
+ offset, size = self._GetPayloadMetadataOffsetAndSize(input_zip)
+ return ['payload_metadata.bin:{}:{}'.format(offset, size)]
+
+ @staticmethod
+ def _GetPayloadMetadataOffsetAndSize(input_zip):
+ """Computes the offset and size of the payload metadata for a given package.
+
+ (From system/update_engine/update_metadata.proto)
+ A delta update file contains all the deltas needed to update a system from
+ one specific version to another specific version. The update format is
+ represented by this struct pseudocode:
+
+ struct delta_update_file {
+ char magic[4] = "CrAU";
+ uint64 file_format_version;
+ uint64 manifest_size; // Size of protobuf DeltaArchiveManifest
+
+ // Only present if format_version > 1:
+ uint32 metadata_signature_size;
+
+ // The Bzip2 compressed DeltaArchiveManifest
+ char manifest[metadata_signature_size];
+
+ // The signature of the metadata (from the beginning of the payload up to
+ // this location, not including the signature itself). This is a
+ // serialized Signatures message.
+ char medatada_signature_message[metadata_signature_size];
+
+ // Data blobs for files, no specific format. The specific offset
+ // and length of each data blob is recorded in the DeltaArchiveManifest.
+ struct {
+ char data[];
+ } blobs[];
+
+ // These two are not signed:
+ uint64 payload_signatures_message_size;
+ char payload_signatures_message[];
+ };
+
+ 'payload-metadata.bin' contains all the bytes from the beginning of the
+ payload, till the end of 'medatada_signature_message'.
+ """
+ payload_info = input_zip.getinfo('payload.bin')
+ (payload_offset, payload_size) = GetZipEntryOffset(input_zip, payload_info)
+
+ # Read the underlying raw zipfile at specified offset
+ payload_fp = input_zip.fp
+ payload_fp.seek(payload_offset)
+ header_bin = payload_fp.read(24)
+
+ # network byte order (big-endian)
+ header = struct.unpack("!IQQL", header_bin)
+
+ # 'CrAU'
+ magic = header[0]
+ assert magic == 0x43724155, "Invalid magic: {:x}, computed offset {}" \
+ .format(magic, payload_offset)
+
+ manifest_size = header[2]
+ metadata_signature_size = header[3]
+ metadata_total = 24 + manifest_size + metadata_signature_size
+ assert metadata_total <= payload_size
+
+ return (payload_offset, metadata_total)
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
new file mode 100644
index 0000000..4f342ac
--- /dev/null
+++ b/tools/releasetools/payload_signer.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import common
+import logging
+from common import OPTIONS
+
+logger = logging.getLogger(__name__)
+
+
+class PayloadSigner(object):
+ """A class that wraps the payload signing works.
+
+ When generating a Payload, hashes of the payload and metadata files will be
+ signed with the device key, either by calling an external payload signer or
+ by calling openssl with the package key. This class provides a unified
+ interface, so that callers can just call PayloadSigner.Sign().
+
+ If an external payload signer has been specified (OPTIONS.payload_signer), it
+ calls the signer with the provided args (OPTIONS.payload_signer_args). Note
+ that the signing key should be provided as part of the payload_signer_args.
+ Otherwise without an external signer, it uses the package key
+ (OPTIONS.package_key) and calls openssl for the signing works.
+ """
+
+ def __init__(self, package_key=None, private_key_suffix=None, pw=None, payload_signer=None):
+ if package_key is None:
+ package_key = OPTIONS.package_key
+ if private_key_suffix is None:
+ private_key_suffix = OPTIONS.private_key_suffix
+
+ if payload_signer is None:
+ # Prepare the payload signing key.
+ private_key = package_key + private_key_suffix
+
+ cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
+ cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
+ signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
+ cmd.extend(["-out", signing_key])
+ common.RunAndCheckOutput(cmd, verbose=True)
+
+ self.signer = "openssl"
+ self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
+ "-pkeyopt", "digest:sha256"]
+ self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
+ signing_key)
+ else:
+ self.signer = payload_signer
+ self.signer_args = OPTIONS.payload_signer_args
+ if OPTIONS.payload_signer_maximum_signature_size:
+ self.maximum_signature_size = int(
+ OPTIONS.payload_signer_maximum_signature_size)
+ else:
+ # The legacy config uses RSA2048 keys.
+ logger.warning("The maximum signature size for payload signer is not"
+ " set, default to 256 bytes.")
+ self.maximum_signature_size = 256
+
+ @staticmethod
+ def _GetMaximumSignatureSizeInBytes(signing_key):
+ out_signature_size_file = common.MakeTempFile("signature_size")
+ cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
+ out_signature_size_file), "--private_key={}".format(signing_key)]
+ common.RunAndCheckOutput(cmd, verbose=True)
+ with open(out_signature_size_file) as f:
+ signature_size = f.read().rstrip()
+ logger.info("%s outputs the maximum signature size: %s", cmd[0],
+ signature_size)
+ return int(signature_size)
+
+ @staticmethod
+ def _Run(cmd):
+ common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
+
+ def SignPayload(self, unsigned_payload):
+
+ # 1. Generate hashes of the payload and metadata files.
+ payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+ metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+ cmd = ["brillo_update_payload", "hash",
+ "--unsigned_payload", unsigned_payload,
+ "--signature_size", str(self.maximum_signature_size),
+ "--metadata_hash_file", metadata_sig_file,
+ "--payload_hash_file", payload_sig_file]
+ self._Run(cmd)
+
+ # 2. Sign the hashes.
+ signed_payload_sig_file = self.SignHashFile(payload_sig_file)
+ signed_metadata_sig_file = self.SignHashFile(metadata_sig_file)
+
+ # 3. Insert the signatures back into the payload file.
+ signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+ suffix=".bin")
+ cmd = ["brillo_update_payload", "sign",
+ "--unsigned_payload", unsigned_payload,
+ "--payload", signed_payload_file,
+ "--signature_size", str(self.maximum_signature_size),
+ "--metadata_signature_file", signed_metadata_sig_file,
+ "--payload_signature_file", signed_payload_sig_file]
+ self._Run(cmd)
+ return signed_payload_file
+
+
+ def SignHashFile(self, in_file):
+ """Signs the given input file. Returns the output filename."""
+ out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
+ cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
+ common.RunAndCheckOutput(cmd)
+ return out_file
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index 6926467..d739982 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -42,20 +42,25 @@
--sign_tool <sign_tool>
Optional flag that specifies a custom signing tool for the contents of the apex.
+
+ --container_pw <name1=passwd,name2=passwd>
+ A mapping of key_name to password
"""
import logging
import shutil
+import re
import sys
import apex_utils
import common
logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
- apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None):
+ apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None, container_pw=None):
"""Signs the given apex file."""
with open(apex_file, 'rb') as input_fp:
apex_data = input_fp.read()
@@ -65,12 +70,13 @@
apex_data,
payload_key=payload_key,
container_key=container_key,
- container_pw=None,
+ container_pw=container_pw,
codename_to_api_level_map=codename_to_api_level_map,
no_hashtree=no_hashtree,
apk_keys=apk_keys,
signing_args=signing_args,
- sign_tool=sign_tool)
+ sign_tool=sign_tool,
+ is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name))
def main(argv):
@@ -106,6 +112,15 @@
options['extra_apks'].update({n: key})
elif o == '--sign_tool':
options['sign_tool'] = a
+ elif o == '--container_pw':
+ passwords = {}
+ pairs = a.split()
+ for pair in pairs:
+ if "=" not in pair:
+ continue
+ tokens = pair.split("=", maxsplit=1)
+ passwords[tokens[0].strip()] = tokens[1].strip()
+ options['container_pw'] = passwords
else:
return False
return True
@@ -121,6 +136,7 @@
'payload_key=',
'extra_apks=',
'sign_tool=',
+ 'container_pw=',
],
extra_option_handler=option_handler)
@@ -141,7 +157,9 @@
signing_args=options.get('payload_extra_args'),
codename_to_api_level_map=options.get(
'codename_to_api_level_map', {}),
- sign_tool=options.get('sign_tool', None))
+ sign_tool=options.get('sign_tool', None),
+ container_pw=options.get('container_pw'),
+ )
shutil.copyfile(signed_apex, args[1])
logger.info("done.")
diff --git a/tools/releasetools/sign_target_files_apks b/tools/releasetools/sign_target_files_apks
deleted file mode 120000
index b5ec59a..0000000
--- a/tools/releasetools/sign_target_files_apks
+++ /dev/null
@@ -1 +0,0 @@
-sign_target_files_apks.py
\ No newline at end of file
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 27e9dfb..5f48dd0 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -27,7 +27,7 @@
apkcerts.txt file, or the container key for an APEX. Option may be
repeated to give multiple extra packages.
- --extra_apex_payload_key <name=key>
+ --extra_apex_payload_key <name,name,...=key>
Add a mapping for APEX package name to payload signing key, which will
override the default payload signing key in apexkeys.txt. Note that the
container key should be overridden via the `--extra_apks` flag above.
@@ -99,15 +99,15 @@
The second dir will be used for lookup if BOARD_USES_RECOVERY_AS_BOOT is
set to true.
- --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
- vbmeta_vendor}_algorithm <algorithm>
- --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
- vbmeta_vendor}_key <key>
+ --avb_{boot,init_boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+ vbmeta_system,vbmeta_vendor}_algorithm <algorithm>
+ --avb_{boot,init_boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+ vbmeta_system,vbmeta_vendor}_key <key>
Use the specified algorithm (e.g. SHA256_RSA4096) and the key to AVB-sign
the specified image. Otherwise it uses the existing values in info dict.
- --avb_{apex,boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
- vbmeta_vendor}_extra_args <args>
+ --avb_{apex,init_boot,boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+ vbmeta_system,vbmeta_vendor}_extra_args <args>
Specify any additional args that are needed to AVB-sign the image
(e.g. "--signing_helper /path/to/helper"). The args will be appended to
the existing ones in info dict.
@@ -141,6 +141,12 @@
Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
(/system/system_ext|/system_ext)/etc/selinux.
If not set, error out when the file exists.
+
+ --override_apk_keys <path>
+ Replace all APK keys with this private key
+
+ --override_apex_keys <path>
+ Replace all APEX keys with this private key
"""
from __future__ import print_function
@@ -182,9 +188,6 @@
OPTIONS.key_map = {}
OPTIONS.rebuild_recovery = False
OPTIONS.replace_ota_keys = False
-OPTIONS.replace_verity_public_key = False
-OPTIONS.replace_verity_private_key = False
-OPTIONS.replace_verity_keyid = False
OPTIONS.remove_avb_public_keys = None
OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
OPTIONS.avb_keys = {}
@@ -197,6 +200,8 @@
OPTIONS.vendor_partitions = set()
OPTIONS.vendor_otatools = None
OPTIONS.allow_gsi_debug_sepolicy = False
+OPTIONS.override_apk_keys = None
+OPTIONS.override_apex_keys = None
AVB_FOOTER_ARGS_BY_PARTITION = {
@@ -245,6 +250,10 @@
def GetApkCerts(certmap):
+ if OPTIONS.override_apk_keys is not None:
+ for apk in certmap.keys():
+ certmap[apk] = OPTIONS.override_apk_keys
+
# apply the key remapping to the contents of the file
for apk, cert in certmap.items():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
@@ -275,6 +284,15 @@
Raises:
AssertionError: On invalid container / payload key overrides.
"""
+ if OPTIONS.override_apex_keys is not None:
+ for apex in keys_info.keys():
+ keys_info[apex] = (OPTIONS.override_apex_keys, keys_info[apex][1], keys_info[apex][2])
+
+ if OPTIONS.override_apk_keys is not None:
+ key = key_map.get(OPTIONS.override_apk_keys, OPTIONS.override_apk_keys)
+ for apex in keys_info.keys():
+ keys_info[apex] = (keys_info[apex][0], key, keys_info[apex][2])
+
# Apply all the --extra_apex_payload_key options to override the payload
# signing keys in the given keys_info.
for apex, key in OPTIONS.extra_apex_payload_keys.items():
@@ -642,11 +660,6 @@
elif filename == "META/misc_info.txt":
pass
- # Skip verity public key if we will replace it.
- elif (OPTIONS.replace_verity_public_key and
- filename in ("BOOT/RAMDISK/verity_key",
- "ROOT/verity_key")):
- pass
elif (OPTIONS.remove_avb_public_keys and
(filename.startswith("BOOT/RAMDISK/avb/") or
filename.startswith("BOOT/RAMDISK/first_stage_ramdisk/avb/"))):
@@ -660,10 +673,6 @@
# Copy it verbatim if we don't want to remove it.
common.ZipWriteStr(output_tf_zip, out_info, data)
- # Skip verity keyid (for system_root_image use) if we will replace it.
- elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
- pass
-
# Skip the vbmeta digest as we will recalculate it.
elif filename == "META/vbmeta_digest.txt":
pass
@@ -745,27 +754,6 @@
if OPTIONS.replace_ota_keys:
ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
- # Replace the keyid string in misc_info dict.
- if OPTIONS.replace_verity_private_key:
- ReplaceVerityPrivateKey(misc_info, OPTIONS.replace_verity_private_key[1])
-
- if OPTIONS.replace_verity_public_key:
- # Replace the one in root dir in system.img.
- ReplaceVerityPublicKey(
- output_tf_zip, 'ROOT/verity_key', OPTIONS.replace_verity_public_key[1])
-
- if not system_root_image:
- # Additionally replace the copy in ramdisk if not using system-as-root.
- ReplaceVerityPublicKey(
- output_tf_zip,
- 'BOOT/RAMDISK/verity_key',
- OPTIONS.replace_verity_public_key[1])
-
- # Replace the keyid string in BOOT/cmdline.
- if OPTIONS.replace_verity_keyid:
- ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
- OPTIONS.replace_verity_keyid[1])
-
# Replace the AVB signing keys, if any.
ReplaceAvbSigningKeys(misc_info)
@@ -881,7 +869,7 @@
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif key == "ro.build.description":
- pieces = value.split(" ")
+ pieces = value.split()
assert pieces[-1].endswith("-keys")
pieces[-1] = EditTags(pieces[-1])
value = " ".join(pieces)
@@ -913,7 +901,7 @@
certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
for k in keys:
common.ZipWrite(certs_zip, k)
- common.ZipClose(certs_zip)
+ certs_zip.close()
common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
@@ -982,64 +970,6 @@
WriteOtacerts(output_tf_zip, info.filename, mapped_keys + extra_keys)
-def ReplaceVerityPublicKey(output_zip, filename, key_path):
- """Replaces the verity public key at the given path in the given zip.
-
- Args:
- output_zip: The output target_files zip.
- filename: The archive name in the output zip.
- key_path: The path to the public key.
- """
- print("Replacing verity public key with %s" % (key_path,))
- common.ZipWrite(output_zip, key_path, arcname=filename)
-
-
-def ReplaceVerityPrivateKey(misc_info, key_path):
- """Replaces the verity private key in misc_info dict.
-
- Args:
- misc_info: The info dict.
- key_path: The path to the private key in PKCS#8 format.
- """
- print("Replacing verity private key with %s" % (key_path,))
- misc_info["verity_key"] = key_path
-
-
-def ReplaceVerityKeyId(input_zip, output_zip, key_path):
- """Replaces the veritykeyid parameter in BOOT/cmdline.
-
- Args:
- input_zip: The input target_files zip, which should be already open.
- output_zip: The output target_files zip, which should be already open and
- writable.
- key_path: The path to the PEM encoded X.509 certificate.
- """
- in_cmdline = input_zip.read("BOOT/cmdline").decode()
- # Copy in_cmdline to output_zip if veritykeyid is not present.
- if "veritykeyid" not in in_cmdline:
- common.ZipWriteStr(output_zip, "BOOT/cmdline", in_cmdline)
- return
-
- out_buffer = []
- for param in in_cmdline.split():
- if "veritykeyid" not in param:
- out_buffer.append(param)
- continue
-
- # Extract keyid using openssl command.
- p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- keyid, stderr = p.communicate()
- assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
- keyid = re.search(
- r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
- print("Replacing verity keyid with {}".format(keyid))
- out_buffer.append("veritykeyid=id:%s" % (keyid,))
-
- out_cmdline = ' '.join(out_buffer).strip() + '\n'
- common.ZipWriteStr(output_zip, "BOOT/cmdline", out_cmdline)
-
-
def ReplaceMiscInfoTxt(input_zip, output_zip, misc_info):
"""Replaces META/misc_info.txt.
@@ -1098,7 +1028,7 @@
tokens = []
changed = False
- for token in args.split(' '):
+ for token in args.split():
fingerprint_key = 'com.android.build.{}.fingerprint'.format(partition)
if not token.startswith(fingerprint_key):
tokens.append(token)
@@ -1304,6 +1234,7 @@
vendor_misc_info["avb_building_vbmeta_image"] = "false" # skip building vbmeta
vendor_misc_info["use_dynamic_partitions"] = "false" # super_empty
vendor_misc_info["build_super_partition"] = "false" # super split
+ vendor_misc_info["avb_vbmeta_system"] = "" # skip building vbmeta_system
with open(vendor_misc_info_path, "w") as output:
for key in sorted(vendor_misc_info):
output.write("{}={}\n".format(key, vendor_misc_info[key]))
@@ -1380,8 +1311,9 @@
for n in names:
OPTIONS.extra_apks[n] = key
elif o == "--extra_apex_payload_key":
- apex_name, key = a.split("=")
- OPTIONS.extra_apex_payload_keys[apex_name] = key
+ apex_names, key = a.split("=")
+ for name in apex_names.split(","):
+ OPTIONS.extra_apex_payload_keys[name] = key
elif o == "--skip_apks_with_path_prefix":
# Check the prefix, which must be in all upper case.
prefix = a.split('/')[0]
@@ -1403,11 +1335,14 @@
new.append(i[0] + i[1:].strip())
OPTIONS.tag_changes = tuple(new)
elif o == "--replace_verity_public_key":
- OPTIONS.replace_verity_public_key = (True, a)
+ raise ValueError("--replace_verity_public_key is no longer supported,"
+ " please switch to AVB")
elif o == "--replace_verity_private_key":
- OPTIONS.replace_verity_private_key = (True, a)
+ raise ValueError("--replace_verity_private_key is no longer supported,"
+ " please switch to AVB")
elif o == "--replace_verity_keyid":
- OPTIONS.replace_verity_keyid = (True, a)
+ raise ValueError("--replace_verity_keyid is no longer supported, please"
+ " switch to AVB")
elif o == "--remove_avb_public_keys":
OPTIONS.remove_avb_public_keys = a.split(",")
elif o == "--avb_vbmeta_key":
@@ -1428,6 +1363,18 @@
OPTIONS.avb_algorithms['dtbo'] = a
elif o == "--avb_dtbo_extra_args":
OPTIONS.avb_extra_args['dtbo'] = a
+ elif o == "--avb_init_boot_key":
+ OPTIONS.avb_keys['init_boot'] = a
+ elif o == "--avb_init_boot_algorithm":
+ OPTIONS.avb_algorithms['init_boot'] = a
+ elif o == "--avb_init_boot_extra_args":
+ OPTIONS.avb_extra_args['init_boot'] = a
+ elif o == "--avb_recovery_key":
+ OPTIONS.avb_keys['recovery'] = a
+ elif o == "--avb_recovery_algorithm":
+ OPTIONS.avb_algorithms['recovery'] = a
+ elif o == "--avb_recovery_extra_args":
+ OPTIONS.avb_extra_args['recovery'] = a
elif o == "--avb_system_key":
OPTIONS.avb_keys['system'] = a
elif o == "--avb_system_algorithm":
@@ -1484,6 +1431,10 @@
OPTIONS.vendor_partitions = set(a.split(","))
elif o == "--allow_gsi_debug_sepolicy":
OPTIONS.allow_gsi_debug_sepolicy = True
+ elif o == "--override_apk_keys":
+ OPTIONS.override_apk_keys = a
+ elif o == "--override_apex_keys":
+ OPTIONS.override_apex_keys = a
else:
return False
return True
@@ -1513,6 +1464,12 @@
"avb_dtbo_algorithm=",
"avb_dtbo_key=",
"avb_dtbo_extra_args=",
+ "avb_init_boot_algorithm=",
+ "avb_init_boot_key=",
+ "avb_init_boot_extra_args=",
+ "avb_recovery_algorithm=",
+ "avb_recovery_key=",
+ "avb_recovery_extra_args=",
"avb_system_algorithm=",
"avb_system_key=",
"avb_system_extra_args=",
@@ -1537,6 +1494,8 @@
"vendor_partitions=",
"vendor_otatools=",
"allow_gsi_debug_sepolicy",
+ "override_apk_keys=",
+ "override_apex_keys=",
],
extra_option_handler=option_handler)
@@ -1579,8 +1538,8 @@
platform_api_level, codename_to_api_level_map,
compressed_extension)
- common.ZipClose(input_zip)
- common.ZipClose(output_zip)
+ input_zip.close()
+ output_zip.close()
if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
BuildVendorPartitions(args[1])
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 524c0f2..a2f7e9e 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -41,8 +41,7 @@
"""
def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
- mode="rb", build_map=True, allow_shared_blocks=False,
- hashtree_info_generator=None):
+ mode="rb", build_map=True, allow_shared_blocks=False):
self.simg_f = f = open(simg_fn, mode)
header_bin = f.read(28)
@@ -74,8 +73,6 @@
blk_sz, total_chunks)
if not build_map:
- assert not hashtree_info_generator, \
- "Cannot generate the hashtree info without building the offset map."
return
pos = 0 # in blocks
@@ -83,7 +80,7 @@
self.offset_map = offset_map = []
self.clobbered_blocks = rangelib.RangeSet(data=clobbered_blocks)
- for i in range(total_chunks):
+ for _ in range(total_chunks):
header_bin = f.read(12)
header = struct.unpack("<2H2I", header_bin)
chunk_type = header[0]
@@ -114,16 +111,6 @@
if data_sz != 0:
raise ValueError("Don't care chunk input size is non-zero (%u)" %
(data_sz))
- # Fills the don't care data ranges with zeros.
- # TODO(xunchang) pass the care_map to hashtree info generator.
- if hashtree_info_generator:
- fill_data = '\x00' * 4
- # In order to compute verity hashtree on device, we need to write
- # zeros explicitly to the don't care ranges. Because these ranges may
- # contain non-zero data from the previous build.
- care_data.append(pos)
- care_data.append(pos + chunk_sz)
- offset_map.append((pos, chunk_sz, None, fill_data))
pos += chunk_sz
@@ -150,10 +137,6 @@
extended = extended.intersect(all_blocks).subtract(self.care_map)
self.extended = extended
- self.hashtree_info = None
- if hashtree_info_generator:
- self.hashtree_info = hashtree_info_generator.Generate(self)
-
if file_map_fn:
self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks,
allow_shared_blocks)
@@ -183,6 +166,11 @@
def ReadRangeSet(self, ranges):
return [d for d in self._GetRangeData(ranges)]
+ def ReadBlocks(self, start=0, num_blocks=None):
+ if num_blocks is None:
+ num_blocks = self.total_blocks
+ return self._GetRangeData([(start, start + num_blocks)])
+
def TotalSha1(self, include_clobbered_blocks=False):
"""Return the SHA-1 hash of all data in the 'care' regions.
@@ -286,8 +274,6 @@
remaining = remaining.subtract(ranges)
remaining = remaining.subtract(clobbered_blocks)
- if self.hashtree_info:
- remaining = remaining.subtract(self.hashtree_info.hashtree_range)
# For all the remaining blocks in the care_map (ie, those that
# aren't part of the data for any file nor part of the clobbered_blocks),
@@ -350,8 +336,6 @@
out["__NONZERO-%d" % i] = rangelib.RangeSet(data=blocks)
if clobbered_blocks:
out["__COPY"] = clobbered_blocks
- if self.hashtree_info:
- out["__HASHTREE"] = self.hashtree_info.hashtree_range
def ResetFileMap(self):
"""Throw away the file map and treat the entire image as
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index a5850d3..7b5476d 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -16,15 +16,16 @@
import os
import os.path
+import tempfile
import zipfile
import common
import test_utils
from add_img_to_target_files import (
AddPackRadioImages,
+ AddCareMapForAbOta, GetCareMap,
CheckAbOtaImages)
from rangelib import RangeSet
-from common import AddCareMapForAbOta, GetCareMap
OPTIONS = common.OPTIONS
@@ -124,9 +125,6 @@
def _test_AddCareMapForAbOta():
"""Helper function to set up the test for test_AddCareMapForAbOta()."""
OPTIONS.info_dict = {
- 'extfs_sparse_flag' : '-s',
- 'system_image_size' : 65536,
- 'vendor_image_size' : 40960,
'system_verity_block_device': '/dev/block/system',
'vendor_verity_block_device': '/dev/block/vendor',
'system.build.prop': common.PartitionBuildProps.FromDictionary(
@@ -149,13 +147,13 @@
system_image = test_utils.construct_sparse_image([
(0xCAC1, 6),
(0xCAC3, 4),
- (0xCAC1, 8)])
+ (0xCAC1, 6)], "system")
vendor_image = test_utils.construct_sparse_image([
- (0xCAC2, 12)])
+ (0xCAC2, 10)], "vendor")
image_paths = {
- 'system' : system_image,
- 'vendor' : vendor_image,
+ 'system': system_image,
+ 'vendor': vendor_image,
}
return image_paths
@@ -210,9 +208,6 @@
"""Tests the case for device using AVB."""
image_paths = self._test_AddCareMapForAbOta()
OPTIONS.info_dict = {
- 'extfs_sparse_flag': '-s',
- 'system_image_size': 65536,
- 'vendor_image_size': 40960,
'avb_system_hashtree_enable': 'true',
'avb_vendor_hashtree_enable': 'true',
'system.build.prop': common.PartitionBuildProps.FromDictionary(
@@ -244,9 +239,6 @@
"""Tests the case for partitions without fingerprint."""
image_paths = self._test_AddCareMapForAbOta()
OPTIONS.info_dict = {
- 'extfs_sparse_flag' : '-s',
- 'system_image_size' : 65536,
- 'vendor_image_size' : 40960,
'system_verity_block_device': '/dev/block/system',
'vendor_verity_block_device': '/dev/block/vendor',
}
@@ -255,8 +247,9 @@
AddCareMapForAbOta(care_map_file, ['system', 'vendor'], image_paths)
expected = ['system', RangeSet("0-5 10-15").to_string_raw(), "unknown",
- "unknown", 'vendor', RangeSet("0-9").to_string_raw(), "unknown",
- "unknown"]
+ "unknown", 'vendor', RangeSet(
+ "0-9").to_string_raw(), "unknown",
+ "unknown"]
self._verifyCareMap(expected, care_map_file)
@@ -265,9 +258,6 @@
"""Tests the case for partitions with thumbprint."""
image_paths = self._test_AddCareMapForAbOta()
OPTIONS.info_dict = {
- 'extfs_sparse_flag': '-s',
- 'system_image_size': 65536,
- 'vendor_image_size': 40960,
'system_verity_block_device': '/dev/block/system',
'vendor_verity_block_device': '/dev/block/vendor',
'system.build.prop': common.PartitionBuildProps.FromDictionary(
@@ -297,9 +287,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_AddCareMapForAbOta_skipPartition(self):
image_paths = self._test_AddCareMapForAbOta()
-
- # Remove vendor_image_size to invalidate the care_map for vendor.img.
- del OPTIONS.info_dict['vendor_image_size']
+ test_utils.erase_avb_footer(image_paths["vendor"])
care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
AddCareMapForAbOta(care_map_file, ['system', 'vendor'], image_paths)
@@ -313,10 +301,8 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_AddCareMapForAbOta_skipAllPartitions(self):
image_paths = self._test_AddCareMapForAbOta()
-
- # Remove the image_size properties for all the partitions.
- del OPTIONS.info_dict['system_image_size']
- del OPTIONS.info_dict['vendor_image_size']
+ test_utils.erase_avb_footer(image_paths["system"])
+ test_utils.erase_avb_footer(image_paths["vendor"])
care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
AddCareMapForAbOta(care_map_file, ['system', 'vendor'], image_paths)
@@ -395,35 +381,18 @@
sparse_image = test_utils.construct_sparse_image([
(0xCAC1, 6),
(0xCAC3, 4),
- (0xCAC1, 6)])
- OPTIONS.info_dict = {
- 'extfs_sparse_flag' : '-s',
- 'system_image_size' : 53248,
- }
+ (0xCAC1, 6)], "system")
name, care_map = GetCareMap('system', sparse_image)
self.assertEqual('system', name)
- self.assertEqual(RangeSet("0-5 10-12").to_string_raw(), care_map)
+ self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), care_map)
def test_GetCareMap_invalidPartition(self):
self.assertRaises(AssertionError, GetCareMap, 'oem', None)
- def test_GetCareMap_invalidAdjustedPartitionSize(self):
- sparse_image = test_utils.construct_sparse_image([
- (0xCAC1, 6),
- (0xCAC3, 4),
- (0xCAC1, 6)])
- OPTIONS.info_dict = {
- 'extfs_sparse_flag' : '-s',
- 'system_image_size' : -45056,
- }
- self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
-
def test_GetCareMap_nonSparseImage(self):
- OPTIONS.info_dict = {
- 'system_image_size' : 53248,
- }
- # 'foo' is the image filename, which is expected to be not used by
- # GetCareMap().
- name, care_map = GetCareMap('system', 'foo')
- self.assertEqual('system', name)
- self.assertEqual(RangeSet("0-12").to_string_raw(), care_map)
+ with tempfile.NamedTemporaryFile() as tmpfile:
+ tmpfile.truncate(4096 * 13)
+ test_utils.append_avb_footer(tmpfile.name, "system")
+ name, care_map = GetCareMap('system', tmpfile.name)
+ self.assertEqual('system', name)
+ self.assertEqual(RangeSet("0-12").to_string_raw(), care_map)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index f973263..8c9655ad0 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -222,17 +222,17 @@
info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
build_info = common.BuildInfo(info_dict)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/build-id/'
- 'version-incremental:build-type/build-tags', build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/build-id/'
+ 'version-incremental:build-type/build-tags', build_info.fingerprint)
build_props = info_dict['build.prop'].build_props
del build_props['ro.build.id']
build_props['ro.build.legacy.id'] = 'legacy-build-id'
build_info = common.BuildInfo(info_dict, use_legacy_id=True)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/'
- 'legacy-build-id/version-incremental:build-type/build-tags',
- build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/'
+ 'legacy-build-id/version-incremental:build-type/build-tags',
+ build_info.fingerprint)
self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
False)
@@ -241,9 +241,9 @@
info_dict['vbmeta_digest'] = 'abcde12345'
build_info = common.BuildInfo(info_dict, use_legacy_id=False)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/'
- 'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
- build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/'
+ 'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+ build_info.fingerprint)
def test___getitem__(self):
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +376,7 @@
info_dict['build.prop'].build_props[
'ro.product.property_source_order'] = 'bad-source'
with self.assertRaisesRegexp(common.ExternalError,
- 'Invalid ro.product.property_source_order'):
+ 'Invalid ro.product.property_source_order'):
info = common.BuildInfo(info_dict, None)
info.GetBuildProp('ro.product.device')
@@ -459,7 +459,7 @@
time.sleep(5) # Make sure the atime/mtime will change measurably.
common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
- common.ZipClose(zip_file)
+ zip_file.close()
self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
test_file_name, expected_stat, expected_mode,
@@ -494,7 +494,7 @@
expected_mode = extra_args.get("perms", zinfo_perms)
common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
- common.ZipClose(zip_file)
+ zip_file.close()
self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
expected_mode=expected_mode,
@@ -536,7 +536,7 @@
common.ZipWrite(zip_file, test_file_name, **extra_args)
common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
- common.ZipClose(zip_file)
+ zip_file.close()
# Verify the contents written by ZipWrite().
self._verify(zip_file, zip_file_name, arcname_large,
@@ -551,12 +551,6 @@
os.remove(zip_file_name)
os.remove(test_file_name)
- def _test_reset_ZIP64_LIMIT(self, func, *args):
- default_limit = (1 << 31) - 1
- self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
- func(*args)
- self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
-
def test_ZipWrite(self):
file_contents = os.urandom(1024)
self._test_ZipWrite(file_contents)
@@ -581,7 +575,7 @@
})
def test_ZipWrite_resets_ZIP64_LIMIT(self):
- self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
+ self._test_ZipWrite("")
def test_ZipWriteStr(self):
random_string = os.urandom(1024)
@@ -632,9 +626,9 @@
})
def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
- self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
+ self._test_ZipWriteStr('foo', b'')
zinfo = zipfile.ZipInfo(filename="foo")
- self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
+ self._test_ZipWriteStr(zinfo, b'')
def test_bug21309935(self):
zip_file = tempfile.NamedTemporaryFile(delete=False)
@@ -656,7 +650,7 @@
zinfo = zipfile.ZipInfo(filename="qux")
zinfo.external_attr = 0o700 << 16
common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
- common.ZipClose(zip_file)
+ zip_file.close()
self._verify(zip_file, zip_file_name, "foo",
sha1(random_string).hexdigest(),
@@ -683,7 +677,7 @@
common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
- common.ZipClose(output_zip)
+ output_zip.close()
zip_file.close()
try:
@@ -731,8 +725,8 @@
common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
- common.ZipClose(output_zip)
- common.ZipClose(output_zip)
+ output_zip.close()
+ output_zip.close()
return zip_file
@test_utils.SkipIfExternalToolsUnavailable()
@@ -819,9 +813,9 @@
)
APKCERTS_CERTMAP1 = {
- 'RecoveryLocalizer.apk' : 'certs/devkey',
- 'Settings.apk' : 'build/make/target/product/security/platform',
- 'TV.apk' : 'PRESIGNED',
+ 'RecoveryLocalizer.apk': 'certs/devkey',
+ 'Settings.apk': 'build/make/target/product/security/platform',
+ 'TV.apk': 'PRESIGNED',
}
APKCERTS_TXT2 = (
@@ -836,10 +830,10 @@
)
APKCERTS_CERTMAP2 = {
- 'Compressed1.apk' : 'certs/compressed1',
- 'Compressed2a.apk' : 'certs/compressed2',
- 'Compressed2b.apk' : 'certs/compressed2',
- 'Compressed3.apk' : 'certs/compressed3',
+ 'Compressed1.apk': 'certs/compressed1',
+ 'Compressed2a.apk': 'certs/compressed2',
+ 'Compressed2b.apk': 'certs/compressed2',
+ 'Compressed3.apk': 'certs/compressed3',
}
APKCERTS_TXT3 = (
@@ -848,7 +842,7 @@
)
APKCERTS_CERTMAP3 = {
- 'Compressed4.apk' : 'certs/compressed4',
+ 'Compressed4.apk': 'certs/compressed4',
}
# Test parsing with no optional fields, both optional fields, and only the
@@ -865,9 +859,9 @@
)
APKCERTS_CERTMAP4 = {
- 'RecoveryLocalizer.apk' : 'certs/devkey',
- 'Settings.apk' : 'build/make/target/product/security/platform',
- 'TV.apk' : 'PRESIGNED',
+ 'RecoveryLocalizer.apk': 'certs/devkey',
+ 'Settings.apk': 'build/make/target/product/security/platform',
+ 'TV.apk': 'PRESIGNED',
}
def setUp(self):
@@ -971,7 +965,7 @@
extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
with open(extracted_from_privkey, 'rb') as privkey_fp, \
- open(extracted_from_pubkey, 'rb') as pubkey_fp:
+ open(extracted_from_pubkey, 'rb') as pubkey_fp:
self.assertEqual(privkey_fp.read(), pubkey_fp.read())
def test_ParseCertificate(self):
@@ -1235,7 +1229,8 @@
self.assertEqual(
'1-5 9-10',
sparse_image.file_map['//system/file1'].extra['text_str'])
- self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
+ self.assertTrue(
+ sparse_image.file_map['//system/file2'].extra['incomplete'])
self.assertTrue(
sparse_image.file_map['/system/app/file3'].extra['incomplete'])
@@ -1343,7 +1338,7 @@
'recovery_api_version': 3,
'fstab_version': 2,
'system_root_image': 'true',
- 'no_recovery' : 'true',
+ 'no_recovery': 'true',
'recovery_as_boot': 'true',
}
@@ -1664,6 +1659,7 @@
self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
test_file.name, 'generic_kernel')
+
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
@@ -1673,7 +1669,7 @@
def setUp(self):
self._tempdir = common.MakeTempDir()
# Create a fake dict that contains the fstab info for boot&recovery.
- self._info = {"fstab" : {}}
+ self._info = {"fstab": {}}
fake_fstab = [
"/dev/soc.0/by-name/boot /boot emmc defaults defaults",
"/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2020,11 +2016,11 @@
input_zip, 'odm', placeholder_values)
self.assertEqual({
- 'ro.odm.build.date.utc': '1578430045',
- 'ro.odm.build.fingerprint':
- 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
- 'ro.product.odm.device': 'coral',
- 'ro.product.odm.name': 'product1',
+ 'ro.odm.build.date.utc': '1578430045',
+ 'ro.odm.build.fingerprint':
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device': 'coral',
+ 'ro.product.odm.name': 'product1',
}, partition_props.build_props)
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2186,3 +2182,29 @@
}
self.assertRaises(ValueError, common.PartitionBuildProps.FromInputFile,
input_zip, 'odm', placeholder_values)
+
+ def test_partitionBuildProps_fromInputFile_deepcopy(self):
+ build_prop = [
+ 'ro.odm.build.date.utc=1578430045',
+ 'ro.odm.build.fingerprint='
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device=coral',
+ ]
+ input_file = self._BuildZipFile({
+ 'ODM/etc/build.prop': '\n'.join(build_prop),
+ })
+
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
+ placeholder_values = {
+ 'ro.boot.product.device_name': ['std', 'pro']
+ }
+ partition_props = common.PartitionBuildProps.FromInputFile(
+ input_zip, 'odm', placeholder_values)
+
+ copied_props = copy.deepcopy(partition_props)
+ self.assertEqual({
+ 'ro.odm.build.date.utc': '1578430045',
+ 'ro.odm.build.fingerprint':
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device': 'coral',
+ }, copied_props.build_props)
diff --git a/tools/releasetools/test_merge_ota.py b/tools/releasetools/test_merge_ota.py
new file mode 100644
index 0000000..4fa7c02
--- /dev/null
+++ b/tools/releasetools/test_merge_ota.py
@@ -0,0 +1,86 @@
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import tempfile
+import test_utils
+import merge_ota
+import update_payload
+from update_metadata_pb2 import DynamicPartitionGroup
+from update_metadata_pb2 import DynamicPartitionMetadata
+from test_utils import SkipIfExternalToolsUnavailable, ReleaseToolsTestCase
+
+
+class MergeOtaTest(ReleaseToolsTestCase):
+ def setUp(self) -> None:
+ self.testdata_dir = test_utils.get_testdata_dir()
+ return super().setUp()
+
+ @SkipIfExternalToolsUnavailable()
+ def test_MergeThreeOtas(self):
+ ota1 = os.path.join(self.testdata_dir, "tuna_vbmeta.zip")
+ ota2 = os.path.join(self.testdata_dir, "tuna_vbmeta_system.zip")
+ ota3 = os.path.join(self.testdata_dir, "tuna_vbmeta_vendor.zip")
+ payloads = [update_payload.Payload(ota) for ota in [ota1, ota2, ota3]]
+ with tempfile.NamedTemporaryFile() as output_file:
+ merge_ota.main(["merge_ota", "-v", ota1, ota2, ota3,
+ "--output", output_file.name])
+ payload = update_payload.Payload(output_file.name)
+ partition_names = [
+ part.partition_name for part in payload.manifest.partitions]
+ self.assertEqual(partition_names, [
+ "vbmeta", "vbmeta_system", "vbmeta_vendor"])
+ payload.CheckDataHash()
+ for i in range(3):
+ self.assertEqual(payload.manifest.partitions[i].old_partition_info,
+ payloads[i].manifest.partitions[0].old_partition_info)
+ self.assertEqual(payload.manifest.partitions[i].new_partition_info,
+ payloads[i].manifest.partitions[0].new_partition_info)
+
+ def test_MergeDAPSnapshotDisabled(self):
+ dap1 = DynamicPartitionMetadata()
+ dap2 = DynamicPartitionMetadata()
+ merged_dap = DynamicPartitionMetadata()
+ dap1.snapshot_enabled = True
+ dap2.snapshot_enabled = False
+ merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap1)
+ merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap2)
+ self.assertFalse(merged_dap.snapshot_enabled)
+
+ def test_MergeDAPSnapshotEnabled(self):
+ dap1 = DynamicPartitionMetadata()
+ dap2 = DynamicPartitionMetadata()
+ merged_dap = DynamicPartitionMetadata()
+ merged_dap.snapshot_enabled = True
+ dap1.snapshot_enabled = True
+ dap2.snapshot_enabled = True
+ merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap1)
+ merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap2)
+ self.assertTrue(merged_dap.snapshot_enabled)
+
+ def test_MergeDAPGroups(self):
+ dap1 = DynamicPartitionMetadata()
+ dap1.groups.append(DynamicPartitionGroup(
+ name="abc", partition_names=["a", "b", "c"]))
+ dap2 = DynamicPartitionMetadata()
+ dap2.groups.append(DynamicPartitionGroup(
+ name="abc", partition_names=["d", "e", "f"]))
+ merged_dap = DynamicPartitionMetadata()
+ merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap1)
+ merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap2)
+ self.assertEqual(len(merged_dap.groups), 1)
+ self.assertEqual(merged_dap.groups[0].name, "abc")
+ self.assertEqual(merged_dap.groups[0].partition_names, [
+ "a", "b", "c", "d", "e", "f"])
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 11cfee1..ad0f7a8 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -17,6 +17,7 @@
import copy
import os
import os.path
+import tempfile
import zipfile
import common
@@ -24,17 +25,18 @@
import test_utils
from ota_utils import (
BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
- ConstructOtaApexInfo, FinalizeMetadata, GetPackageMetadata, PropertyFiles)
+ ConstructOtaApexInfo, FinalizeMetadata, GetPackageMetadata, PropertyFiles, AbOtaPropertyFiles, PayloadGenerator, StreamingPropertyFiles)
from ota_from_target_files import (
- _LoadOemDicts, AbOtaPropertyFiles,
+ _LoadOemDicts,
GetTargetFilesZipForCustomImagesUpdates,
GetTargetFilesZipForPartialUpdates,
GetTargetFilesZipForSecondaryImages,
GetTargetFilesZipWithoutPostinstallConfig,
- Payload, PayloadSigner, POSTINSTALL_CONFIG,
- StreamingPropertyFiles, AB_PARTITIONS)
+ POSTINSTALL_CONFIG, AB_PARTITIONS)
from apex_utils import GetApexInfoFromTargetFiles
from test_utils import PropertyFilesTestCase
+from common import OPTIONS
+from payload_signer import PayloadSigner
def construct_target_files(secondary=False, compressedApex=False):
@@ -973,7 +975,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetPayloadMetadataOffsetAndSize(self):
target_file = construct_target_files()
- payload = Payload()
+ payload = PayloadGenerator()
payload.Generate(target_file)
payload_signer = PayloadSigner()
@@ -1028,7 +1030,7 @@
0, proc.returncode,
'Failed to run brillo_update_payload:\n{}'.format(stdoutdata))
- signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+ signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
# Finally we can compare the two signatures.
with open(signed_metadata_sig_file, 'rb') as verify_fp:
@@ -1038,7 +1040,7 @@
def construct_zip_package_withValidPayload(with_metadata=False):
# Cannot use construct_zip_package() since we need a "valid" payload.bin.
target_file = construct_target_files()
- payload = Payload()
+ payload = PayloadGenerator()
payload.Generate(target_file)
payload_signer = PayloadSigner()
@@ -1142,10 +1144,10 @@
self.assertEqual('openssl', payload_signer.signer)
def test_init_withExternalSigner(self):
- common.OPTIONS.payload_signer = 'abc'
common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
common.OPTIONS.payload_signer_maximum_signature_size = '512'
- payload_signer = PayloadSigner()
+ payload_signer = PayloadSigner(
+ OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer='abc')
self.assertEqual('abc', payload_signer.signer)
self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
self.assertEqual(512, payload_signer.maximum_signature_size)
@@ -1168,35 +1170,36 @@
def test_Sign(self):
payload_signer = PayloadSigner()
input_file = os.path.join(self.testdata_dir, self.SIGFILE)
- signed_file = payload_signer.Sign(input_file)
+ signed_file = payload_signer.SignHashFile(input_file)
verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
self._assertFilesEqual(verify_file, signed_file)
def test_Sign_withExternalSigner_openssl(self):
"""Uses openssl as the external payload signer."""
- common.OPTIONS.payload_signer = 'openssl'
common.OPTIONS.payload_signer_args = [
'pkeyutl', '-sign', '-keyform', 'DER', '-inkey',
os.path.join(self.testdata_dir, 'testkey.pk8'),
'-pkeyopt', 'digest:sha256']
- payload_signer = PayloadSigner()
+ payload_signer = PayloadSigner(
+ OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer="openssl")
input_file = os.path.join(self.testdata_dir, self.SIGFILE)
- signed_file = payload_signer.Sign(input_file)
+ signed_file = payload_signer.SignHashFile(input_file)
verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
self._assertFilesEqual(verify_file, signed_file)
def test_Sign_withExternalSigner_script(self):
"""Uses testdata/payload_signer.sh as the external payload signer."""
- common.OPTIONS.payload_signer = os.path.join(
+ external_signer = os.path.join(
self.testdata_dir, 'payload_signer.sh')
- os.chmod(common.OPTIONS.payload_signer, 0o700)
+ os.chmod(external_signer, 0o700)
common.OPTIONS.payload_signer_args = [
os.path.join(self.testdata_dir, 'testkey.pk8')]
- payload_signer = PayloadSigner()
+ payload_signer = PayloadSigner(
+ OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer=external_signer)
input_file = os.path.join(self.testdata_dir, self.SIGFILE)
- signed_file = payload_signer.Sign(input_file)
+ signed_file = payload_signer.SignHashFile(input_file)
verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
self._assertFilesEqual(verify_file, signed_file)
@@ -1219,7 +1222,7 @@
@staticmethod
def _create_payload_full(secondary=False):
target_file = construct_target_files(secondary)
- payload = Payload(secondary)
+ payload = PayloadGenerator(secondary, OPTIONS.wipe_user_data)
payload.Generate(target_file)
return payload
@@ -1227,7 +1230,7 @@
def _create_payload_incremental():
target_file = construct_target_files()
source_file = construct_target_files()
- payload = Payload()
+ payload = PayloadGenerator()
payload.Generate(target_file, source_file)
return payload
@@ -1245,7 +1248,7 @@
def test_Generate_additionalArgs(self):
target_file = construct_target_files()
source_file = construct_target_files()
- payload = Payload()
+ payload = PayloadGenerator()
# This should work the same as calling payload.Generate(target_file,
# source_file).
payload.Generate(
@@ -1256,7 +1259,7 @@
def test_Generate_invalidInput(self):
target_file = construct_target_files()
common.ZipDelete(target_file, 'IMAGES/vendor.img')
- payload = Payload()
+ payload = PayloadGenerator()
self.assertRaises(common.ExternalError, payload.Generate, target_file)
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1292,6 +1295,9 @@
common.OPTIONS.wipe_user_data = True
payload = self._create_payload_full()
payload.Sign(PayloadSigner())
+ with tempfile.NamedTemporaryFile() as fp:
+ with zipfile.ZipFile(fp, "w") as zfp:
+ payload.WriteToZip(zfp)
with open(payload.payload_properties) as properties_fp:
self.assertIn("POWERWASH=1", properties_fp.read())
@@ -1300,6 +1306,9 @@
def test_Sign_secondary(self):
payload = self._create_payload_full(secondary=True)
payload.Sign(PayloadSigner())
+ with tempfile.NamedTemporaryFile() as fp:
+ with zipfile.ZipFile(fp, "w") as zfp:
+ payload.WriteToZip(zfp)
with open(payload.payload_properties) as properties_fp:
self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read())
@@ -1324,33 +1333,17 @@
with zipfile.ZipFile(output_file) as verify_zip:
# First make sure we have the essential entries.
namelist = verify_zip.namelist()
- self.assertIn(Payload.PAYLOAD_BIN, namelist)
- self.assertIn(Payload.PAYLOAD_PROPERTIES_TXT, namelist)
+ self.assertIn(PayloadGenerator.PAYLOAD_BIN, namelist)
+ self.assertIn(PayloadGenerator.PAYLOAD_PROPERTIES_TXT, namelist)
# Then assert these entries are stored.
for entry_info in verify_zip.infolist():
- if entry_info.filename not in (Payload.PAYLOAD_BIN,
- Payload.PAYLOAD_PROPERTIES_TXT):
+ if entry_info.filename not in (PayloadGenerator.PAYLOAD_BIN,
+ PayloadGenerator.PAYLOAD_PROPERTIES_TXT):
continue
self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
@test_utils.SkipIfExternalToolsUnavailable()
- def test_WriteToZip_unsignedPayload(self):
- """Unsigned payloads should not be allowed to be written to zip."""
- payload = self._create_payload_full()
-
- output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
- self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
-
- # Also test with incremental payload.
- payload = self._create_payload_incremental()
-
- output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
- self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
-
- @test_utils.SkipIfExternalToolsUnavailable()
def test_WriteToZip_secondary(self):
payload = self._create_payload_full(secondary=True)
payload.Sign(PayloadSigner())
@@ -1362,14 +1355,14 @@
with zipfile.ZipFile(output_file) as verify_zip:
# First make sure we have the essential entries.
namelist = verify_zip.namelist()
- self.assertIn(Payload.SECONDARY_PAYLOAD_BIN, namelist)
- self.assertIn(Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT, namelist)
+ self.assertIn(PayloadGenerator.SECONDARY_PAYLOAD_BIN, namelist)
+ self.assertIn(PayloadGenerator.SECONDARY_PAYLOAD_PROPERTIES_TXT, namelist)
# Then assert these entries are stored.
for entry_info in verify_zip.infolist():
if entry_info.filename not in (
- Payload.SECONDARY_PAYLOAD_BIN,
- Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
+ PayloadGenerator.SECONDARY_PAYLOAD_BIN,
+ PayloadGenerator.SECONDARY_PAYLOAD_PROPERTIES_TXT):
continue
self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index 8470f20..7723de7 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -59,6 +59,21 @@
self.assertTrue(os.path.exists(signed_test_apex))
@test_utils.SkipIfExternalToolsUnavailable()
+ def test_SignSepolicyApex(self):
+ test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
+ payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+ container_key = os.path.join(self.testdata_dir, 'testkey')
+ apk_keys = {'SEPolicy-33.zip': os.path.join(self.testdata_dir, 'testkey')}
+ signed_test_apex = sign_apex.SignApexFile(
+ 'avbtool',
+ test_apex,
+ payload_key,
+ container_key,
+ False,
+ None)
+ self.assertTrue(os.path.exists(signed_test_apex))
+
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_SignCompressedApexFile(self):
apex = os.path.join(test_utils.get_current_dir(), 'com.android.apex.compressed.v1.capex')
payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 0f13add..0cd7dac 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -23,8 +23,8 @@
import test_utils
from sign_target_files_apks import (
CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
- ReplaceCerts, ReplaceGkiSigningKey, ReplaceVerityKeyId, RewriteAvbProps,
- RewriteProps, WriteOtacerts)
+ ReplaceCerts, ReplaceGkiSigningKey, RewriteAvbProps, RewriteProps,
+ WriteOtacerts)
class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
@@ -154,64 +154,6 @@
'\n'.join([prop[1] for prop in props]) + '\n',
RewriteProps('\n'.join([prop[0] for prop in props])))
- def test_ReplaceVerityKeyId(self):
- BOOT_CMDLINE1 = (
- "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
- "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
- "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
- "buildvariant=userdebug "
- "veritykeyid=id:7e4333f9bba00adfe0ede979e28ed1920492b40f\n")
-
- BOOT_CMDLINE2 = (
- "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
- "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
- "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
- "buildvariant=userdebug "
- "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
-
- input_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
- input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
-
- # Test with the first certificate.
- cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
-
- output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
- zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, cert_file)
-
- with zipfile.ZipFile(output_file) as output_zip:
- self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline').decode())
-
- # Test with the second certificate.
- cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
-
- with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
- zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, cert_file)
-
- with zipfile.ZipFile(output_file) as output_zip:
- self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline').decode())
-
- def test_ReplaceVerityKeyId_no_veritykeyid(self):
- BOOT_CMDLINE = (
- "console=ttyHSL0,115200,n8 androidboot.hardware=bullhead boot_cpus=0-5 "
- "lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
- "loop.max_part=7\n")
-
- input_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
- input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
-
- output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
- zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, None)
-
- with zipfile.ZipFile(output_file) as output_zip:
- self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline').decode())
-
def test_ReplaceCerts(self):
cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
with open(cert1_path) as cert1_fp:
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index e30d2b9..5bbcf7f 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -19,6 +19,7 @@
Utils for running unittests.
"""
+import avbtool
import logging
import os
import os.path
@@ -57,12 +58,14 @@
current_dir = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_dir, 'testdata')
+
def get_current_dir():
"""Returns the current dir, relative to the script dir."""
# The script dir is the one we want, which could be different from pwd.
current_dir = os.path.dirname(os.path.realpath(__file__))
return current_dir
+
def get_search_path():
"""Returns the search path that has 'framework/signapk.jar' under."""
@@ -83,14 +86,33 @@
# In relative to 'build/make/tools/releasetools' in the Android source.
['..'] * 4 + ['out', 'host', 'linux-x86'],
# Or running the script unpacked from otatools.zip.
- ['..']):
+ ['..']):
full_path = os.path.realpath(os.path.join(current_dir, *path))
if signapk_exists(full_path):
return full_path
return None
-def construct_sparse_image(chunks):
+def append_avb_footer(file_path: str, partition_name: str = ""):
+ avb = avbtool.AvbTool()
+ try:
+ args = ["avbtool", "add_hashtree_footer", "--image", file_path,
+ "--partition_name", partition_name, "--do_not_generate_fec"]
+ avb.run(args)
+ except SystemExit:
+ raise ValueError(f"Failed to append hashtree footer {args}")
+
+
+def erase_avb_footer(file_path: str):
+ avb = avbtool.AvbTool()
+ try:
+ args = ["avbtool", "erase_footer", "--image", file_path]
+ avb.run(args)
+ except SystemExit:
+ raise ValueError(f"Failed to erase hashtree footer {args}")
+
+
+def construct_sparse_image(chunks, partition_name: str = ""):
"""Returns a sparse image file constructed from the given chunks.
From system/core/libsparse/sparse_format.h.
@@ -151,6 +173,7 @@
if data_size != 0:
fp.write(os.urandom(data_size))
+ append_avb_footer(sparse_image, partition_name)
return sparse_image
@@ -201,6 +224,7 @@
def tearDown(self):
common.Cleanup()
+
class PropertyFilesTestCase(ReleaseToolsTestCase):
@staticmethod
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index e2a022a..4a0ff09 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -27,249 +27,11 @@
from test_utils import (
get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable)
from verity_utils import (
- CalculateVbmetaDigest, CreateHashtreeInfoGenerator,
- CreateVerityImageBuilder, HashtreeInfo,
- VerifiedBootVersion1HashtreeInfoGenerator)
+ CalculateVbmetaDigest, CreateVerityImageBuilder)
BLOCK_SIZE = common.BLOCK_SIZE
-class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase):
-
- def setUp(self):
- self.testdata_dir = get_testdata_dir()
-
- self.partition_size = 1024 * 1024
- self.prop_dict = {
- 'verity': 'true',
- 'verity_fec': 'true',
- 'system_verity_block_device': '/dev/block/system',
- 'system_size': self.partition_size
- }
-
- self.hash_algorithm = "sha256"
- self.fixed_salt = (
- "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7")
- self.expected_root_hash = (
- "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d")
-
- def _CreateSimg(self, raw_data): # pylint: disable=no-self-use
- output_file = common.MakeTempFile()
- raw_image = common.MakeTempFile()
- with open(raw_image, 'wb') as f:
- f.write(raw_data)
-
- cmd = ["img2simg", raw_image, output_file, '4096']
- common.RunAndCheckOutput(cmd)
- return output_file
-
- def _GenerateImage(self):
- partition_size = 1024 * 1024
- prop_dict = {
- 'partition_size': str(partition_size),
- 'verity': 'true',
- 'verity_block_device': '/dev/block/system',
- 'verity_key': os.path.join(self.testdata_dir, 'testkey'),
- 'verity_fec': 'true',
- 'verity_signer_cmd': 'verity_signer',
- }
- verity_image_builder = CreateVerityImageBuilder(prop_dict)
- self.assertIsNotNone(verity_image_builder)
- adjusted_size = verity_image_builder.CalculateMaxImageSize()
-
- raw_image = bytearray(adjusted_size)
- for i in range(adjusted_size):
- raw_image[i] = ord('0') + i % 10
-
- output_file = self._CreateSimg(raw_image)
-
- # Append the verity metadata.
- verity_image_builder.Build(output_file)
-
- return output_file
-
- @SkipIfExternalToolsUnavailable()
- def test_CreateHashtreeInfoGenerator(self):
- image_file = sparse_img.SparseImage(self._GenerateImage())
-
- generator = CreateHashtreeInfoGenerator(
- 'system', image_file, self.prop_dict)
- self.assertEqual(
- VerifiedBootVersion1HashtreeInfoGenerator, type(generator))
- self.assertEqual(self.partition_size, generator.partition_size)
- self.assertTrue(generator.fec_supported)
-
- @SkipIfExternalToolsUnavailable()
- def test_DecomposeSparseImage(self):
- image_file = sparse_img.SparseImage(self._GenerateImage())
-
- generator = VerifiedBootVersion1HashtreeInfoGenerator(
- self.partition_size, 4096, True)
- generator.DecomposeSparseImage(image_file)
- self.assertEqual(991232, generator.filesystem_size)
- self.assertEqual(12288, generator.hashtree_size)
- self.assertEqual(32768, generator.metadata_size)
-
- @SkipIfExternalToolsUnavailable()
- def test_ParseHashtreeMetadata(self):
- image_file = sparse_img.SparseImage(self._GenerateImage())
- generator = VerifiedBootVersion1HashtreeInfoGenerator(
- self.partition_size, 4096, True)
- generator.DecomposeSparseImage(image_file)
-
- # pylint: disable=protected-access
- generator._ParseHashtreeMetadata()
-
- self.assertEqual(
- self.hash_algorithm, generator.hashtree_info.hash_algorithm)
- self.assertEqual(self.fixed_salt, generator.hashtree_info.salt)
- self.assertEqual(self.expected_root_hash, generator.hashtree_info.root_hash)
-
- @SkipIfExternalToolsUnavailable()
- def test_ValidateHashtree_smoke(self):
- generator = VerifiedBootVersion1HashtreeInfoGenerator(
- self.partition_size, 4096, True)
- generator.image = sparse_img.SparseImage(self._GenerateImage())
-
- generator.hashtree_info = info = HashtreeInfo()
- info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
- info.hashtree_range = RangeSet(
- data=[991232 // 4096, (991232 + 12288) // 4096])
- info.hash_algorithm = self.hash_algorithm
- info.salt = self.fixed_salt
- info.root_hash = self.expected_root_hash
-
- self.assertTrue(generator.ValidateHashtree())
-
- @SkipIfExternalToolsUnavailable()
- def test_ValidateHashtree_failure(self):
- generator = VerifiedBootVersion1HashtreeInfoGenerator(
- self.partition_size, 4096, True)
- generator.image = sparse_img.SparseImage(self._GenerateImage())
-
- generator.hashtree_info = info = HashtreeInfo()
- info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
- info.hashtree_range = RangeSet(
- data=[991232 // 4096, (991232 + 12288) // 4096])
- info.hash_algorithm = self.hash_algorithm
- info.salt = self.fixed_salt
- info.root_hash = "a" + self.expected_root_hash[1:]
-
- self.assertFalse(generator.ValidateHashtree())
-
- @SkipIfExternalToolsUnavailable()
- def test_Generate(self):
- image_file = sparse_img.SparseImage(self._GenerateImage())
- generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
- info = generator.Generate(image_file)
-
- self.assertEqual(RangeSet(data=[0, 991232 // 4096]), info.filesystem_range)
- self.assertEqual(RangeSet(data=[991232 // 4096, (991232 + 12288) // 4096]),
- info.hashtree_range)
- self.assertEqual(self.hash_algorithm, info.hash_algorithm)
- self.assertEqual(self.fixed_salt, info.salt)
- self.assertEqual(self.expected_root_hash, info.root_hash)
-
-
-class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase):
-
- DEFAULT_PARTITION_SIZE = 4096 * 1024
- DEFAULT_PROP_DICT = {
- 'partition_size': str(DEFAULT_PARTITION_SIZE),
- 'verity': 'true',
- 'verity_block_device': '/dev/block/system',
- 'verity_key': os.path.join(get_testdata_dir(), 'testkey'),
- 'verity_fec': 'true',
- 'verity_signer_cmd': 'verity_signer',
- }
-
- def test_init(self):
- prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
- verity_image_builder = CreateVerityImageBuilder(prop_dict)
- self.assertIsNotNone(verity_image_builder)
- self.assertEqual(1, verity_image_builder.version)
-
- def test_init_MissingProps(self):
- prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
- del prop_dict['verity']
- self.assertIsNone(CreateVerityImageBuilder(prop_dict))
-
- prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
- del prop_dict['verity_block_device']
- self.assertIsNone(CreateVerityImageBuilder(prop_dict))
-
- @SkipIfExternalToolsUnavailable()
- def test_CalculateMaxImageSize(self):
- verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
- size = verity_image_builder.CalculateMaxImageSize()
- self.assertLess(size, self.DEFAULT_PARTITION_SIZE)
-
- # Same result by explicitly passing the partition size.
- self.assertEqual(
- verity_image_builder.CalculateMaxImageSize(),
- verity_image_builder.CalculateMaxImageSize(
- self.DEFAULT_PARTITION_SIZE))
-
- @staticmethod
- def _BuildAndVerify(prop, verify_key):
- verity_image_builder = CreateVerityImageBuilder(prop)
- image_size = verity_image_builder.CalculateMaxImageSize()
-
- # Build the sparse image with verity metadata.
- input_dir = common.MakeTempDir()
- image = common.MakeTempFile(suffix='.img')
- cmd = ['mkuserimg_mke2fs', input_dir, image, 'ext4', '/system',
- str(image_size), '-j', '0', '-s']
- common.RunAndCheckOutput(cmd)
- verity_image_builder.Build(image)
-
- # Verify the verity metadata.
- cmd = ['verity_verifier', image, '-mincrypt', verify_key]
- common.RunAndCheckOutput(cmd)
-
- @SkipIfExternalToolsUnavailable()
- def test_Build(self):
- self._BuildAndVerify(
- self.DEFAULT_PROP_DICT,
- os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
-
- @SkipIfExternalToolsUnavailable()
- def test_Build_ValidationCheck(self):
- # A validity check for the test itself: the image shouldn't be verifiable
- # with wrong key.
- self.assertRaises(
- common.ExternalError,
- self._BuildAndVerify,
- self.DEFAULT_PROP_DICT,
- os.path.join(get_testdata_dir(), 'verity_mincrypt'))
-
- @SkipIfExternalToolsUnavailable()
- def test_Build_FecDisabled(self):
- prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
- del prop_dict['verity_fec']
- self._BuildAndVerify(
- prop_dict,
- os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
-
- @SkipIfExternalToolsUnavailable()
- def test_Build_SquashFs(self):
- verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
- verity_image_builder.CalculateMaxImageSize()
-
- # Build the sparse image with verity metadata.
- input_dir = common.MakeTempDir()
- image = common.MakeTempFile(suffix='.img')
- cmd = ['mksquashfsimage.sh', input_dir, image, '-s']
- common.RunAndCheckOutput(cmd)
- verity_image_builder.PadSparseImage(image)
- verity_image_builder.Build(image)
-
- # Verify the verity metadata.
- cmd = ["verity_verifier", image, '-mincrypt',
- os.path.join(get_testdata_dir(), 'testkey_mincrypt')]
- common.RunAndCheckOutput(cmd)
-
-
class VerifiedBootVersion2VerityImageBuilderTest(ReleaseToolsTestCase):
DEFAULT_PROP_DICT = {
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
new file mode 100644
index 0000000..2c646cd
--- /dev/null
+++ b/tools/releasetools/testdata/sepolicy.apex
Binary files differ
diff --git a/tools/releasetools/testdata/tuna_vbmeta.zip b/tools/releasetools/testdata/tuna_vbmeta.zip
new file mode 100644
index 0000000..64e7bb3
--- /dev/null
+++ b/tools/releasetools/testdata/tuna_vbmeta.zip
Binary files differ
diff --git a/tools/releasetools/testdata/tuna_vbmeta_system.zip b/tools/releasetools/testdata/tuna_vbmeta_system.zip
new file mode 100644
index 0000000..3d76ef0
--- /dev/null
+++ b/tools/releasetools/testdata/tuna_vbmeta_system.zip
Binary files differ
diff --git a/tools/releasetools/testdata/tuna_vbmeta_vendor.zip b/tools/releasetools/testdata/tuna_vbmeta_vendor.zip
new file mode 100644
index 0000000..6994c59
--- /dev/null
+++ b/tools/releasetools/testdata/tuna_vbmeta_vendor.zip
Binary files differ
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index d55ad88..755241d 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -49,107 +49,6 @@
Exception.__init__(self, message)
-def GetVerityFECSize(image_size):
- cmd = ["fec", "-s", str(image_size)]
- output = common.RunAndCheckOutput(cmd, verbose=False)
- return int(output)
-
-
-def GetVerityTreeSize(image_size):
- cmd = ["build_verity_tree", "-s", str(image_size)]
- output = common.RunAndCheckOutput(cmd, verbose=False)
- return int(output)
-
-
-def GetVerityMetadataSize(image_size):
- cmd = ["build_verity_metadata", "size", str(image_size)]
- output = common.RunAndCheckOutput(cmd, verbose=False)
- return int(output)
-
-
-def GetVeritySize(image_size, fec_supported):
- verity_tree_size = GetVerityTreeSize(image_size)
- verity_metadata_size = GetVerityMetadataSize(image_size)
- verity_size = verity_tree_size + verity_metadata_size
- if fec_supported:
- fec_size = GetVerityFECSize(image_size + verity_size)
- return verity_size + fec_size
- return verity_size
-
-
-def GetSimgSize(image_file):
- simg = sparse_img.SparseImage(image_file, build_map=False)
- return simg.blocksize * simg.total_blocks
-
-
-def ZeroPadSimg(image_file, pad_size):
- blocks = pad_size // BLOCK_SIZE
- logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
- simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
- simg.AppendFillChunk(0, blocks)
-
-
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
- padding_size):
- cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
- verity_path, verity_fec_path]
- common.RunAndCheckOutput(cmd)
-
-
-def BuildVerityTree(sparse_image_path, verity_image_path):
- cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
- verity_image_path]
- output = common.RunAndCheckOutput(cmd)
- root, salt = output.split()
- return root, salt
-
-
-def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
- block_device, signer_path, key, signer_args,
- verity_disable):
- cmd = ["build_verity_metadata", "build", str(image_size),
- verity_metadata_path, root_hash, salt, block_device, signer_path, key]
- if signer_args:
- cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
- if verity_disable:
- cmd.append("--verity_disable")
- common.RunAndCheckOutput(cmd)
-
-
-def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
- """Appends the unsparse image to the given sparse image.
-
- Args:
- sparse_image_path: the path to the (sparse) image
- unsparse_image_path: the path to the (unsparse) image
-
- Raises:
- BuildVerityImageError: On error.
- """
- cmd = ["append2simg", sparse_image_path, unsparse_image_path]
- try:
- common.RunAndCheckOutput(cmd)
- except:
- logger.exception(error_message)
- raise BuildVerityImageError(error_message)
-
-
-def Append(target, file_to_append, error_message):
- """Appends file_to_append to target.
-
- Raises:
- BuildVerityImageError: On error.
- """
- try:
- with open(target, 'ab') as out_file, \
- open(file_to_append, 'rb') as input_file:
- for line in input_file:
- out_file.write(line)
- except IOError:
- logger.exception(error_message)
- raise BuildVerityImageError(error_message)
-
-
def CreateVerityImageBuilder(prop_dict):
"""Returns a verity image builder based on the given build properties.
@@ -166,23 +65,6 @@
if partition_size:
partition_size = int(partition_size)
- # Verified Boot 1.0
- verity_supported = prop_dict.get("verity") == "true"
- is_verity_partition = "verity_block_device" in prop_dict
- if verity_supported and is_verity_partition:
- if OPTIONS.verity_signer_path is not None:
- signer_path = OPTIONS.verity_signer_path
- else:
- signer_path = prop_dict["verity_signer_cmd"]
- return Version1VerityImageBuilder(
- partition_size,
- prop_dict["verity_block_device"],
- prop_dict.get("verity_fec") == "true",
- signer_path,
- prop_dict["verity_key"] + ".pk8",
- OPTIONS.verity_signer_args,
- "verity_disable" in prop_dict)
-
# Verified Boot 2.0
if (prop_dict.get("avb_hash_enable") == "true" or
prop_dict.get("avb_hashtree_enable") == "true"):
@@ -245,125 +127,6 @@
raise NotImplementedError
-class Version1VerityImageBuilder(VerityImageBuilder):
- """A VerityImageBuilder for Verified Boot 1.0."""
-
- def __init__(self, partition_size, block_dev, fec_supported, signer_path,
- signer_key, signer_args, verity_disable):
- self.version = 1
- self.partition_size = partition_size
- self.block_device = block_dev
- self.fec_supported = fec_supported
- self.signer_path = signer_path
- self.signer_key = signer_key
- self.signer_args = signer_args
- self.verity_disable = verity_disable
- self.image_size = None
- self.verity_size = None
-
- def CalculateDynamicPartitionSize(self, image_size):
- # This needs to be implemented. Note that returning the given image size as
- # the partition size doesn't make sense, as it will fail later.
- raise NotImplementedError
-
- def CalculateMaxImageSize(self, partition_size=None):
- """Calculates the max image size by accounting for the verity metadata.
-
- Args:
- partition_size: The partition size, which defaults to self.partition_size
- if unspecified.
-
- Returns:
- The size of the image adjusted for verity metadata.
- """
- if partition_size is None:
- partition_size = self.partition_size
- assert partition_size > 0, \
- "Invalid partition size: {}".format(partition_size)
-
- hi = partition_size
- if hi % BLOCK_SIZE != 0:
- hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
-
- # verity tree and fec sizes depend on the partition size, which
- # means this estimate is always going to be unnecessarily small
- verity_size = GetVeritySize(hi, self.fec_supported)
- lo = partition_size - verity_size
- result = lo
-
- # do a binary search for the optimal size
- while lo < hi:
- i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
- v = GetVeritySize(i, self.fec_supported)
- if i + v <= partition_size:
- if result < i:
- result = i
- verity_size = v
- lo = i + BLOCK_SIZE
- else:
- hi = i
-
- self.image_size = result
- self.verity_size = verity_size
-
- logger.info(
- "Calculated image size for verity: partition_size %d, image_size %d, "
- "verity_size %d", partition_size, result, verity_size)
- return result
-
- def Build(self, out_file):
- """Creates an image that is verifiable using dm-verity.
-
- Args:
- out_file: the output image.
-
- Returns:
- AssertionError: On invalid partition sizes.
- BuildVerityImageError: On other errors.
- """
- image_size = int(self.image_size)
- tempdir_name = common.MakeTempDir(suffix="_verity_images")
-
- # Get partial image paths.
- verity_image_path = os.path.join(tempdir_name, "verity.img")
- verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
-
- # Build the verity tree and get the root hash and salt.
- root_hash, salt = BuildVerityTree(out_file, verity_image_path)
-
- # Build the metadata blocks.
- BuildVerityMetadata(
- image_size, verity_metadata_path, root_hash, salt, self.block_device,
- self.signer_path, self.signer_key, self.signer_args,
- self.verity_disable)
-
- padding_size = self.partition_size - self.image_size - self.verity_size
- assert padding_size >= 0
-
- # Build the full verified image.
- Append(
- verity_image_path, verity_metadata_path,
- "Failed to append verity metadata")
-
- if self.fec_supported:
- # Build FEC for the entire partition, including metadata.
- verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
- BuildVerityFEC(
- out_file, verity_image_path, verity_fec_path, padding_size)
- Append(verity_image_path, verity_fec_path, "Failed to append FEC")
-
- Append2Simg(
- out_file, verity_image_path, "Failed to append verity data")
-
- def PadSparseImage(self, out_file):
- sparse_image_size = GetSimgSize(out_file)
- if sparse_image_size > self.image_size:
- raise BuildVerityImageError(
- "Error: image size of {} is larger than partition size of "
- "{}".format(sparse_image_size, self.image_size))
- ZeroPadSimg(out_file, self.image_size - sparse_image_size)
-
-
class VerifiedBootVersion2VerityImageBuilder(VerityImageBuilder):
"""A VerityImageBuilder for Verified Boot 2.0."""
@@ -519,199 +282,6 @@
raise BuildVerityImageError("Failed to add AVB footer: {}".format(output))
-class HashtreeInfoGenerationError(Exception):
- """An Exception raised during hashtree info generation."""
-
- def __init__(self, message):
- Exception.__init__(self, message)
-
-
-class HashtreeInfo(object):
- def __init__(self):
- self.hashtree_range = None
- self.filesystem_range = None
- self.hash_algorithm = None
- self.salt = None
- self.root_hash = None
-
-
-def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
- generator = None
- if (info_dict.get("verity") == "true" and
- info_dict.get("{}_verity_block_device".format(partition_name))):
- partition_size = info_dict["{}_size".format(partition_name)]
- fec_supported = info_dict.get("verity_fec") == "true"
- generator = VerifiedBootVersion1HashtreeInfoGenerator(
- partition_size, block_size, fec_supported)
-
- return generator
-
-
-class HashtreeInfoGenerator(object):
- def Generate(self, image):
- raise NotImplementedError
-
- def DecomposeSparseImage(self, image):
- raise NotImplementedError
-
- def ValidateHashtree(self):
- raise NotImplementedError
-
-
-class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
- """A class that parses the metadata of hashtree for a given partition."""
-
- def __init__(self, partition_size, block_size, fec_supported):
- """Initialize VerityTreeInfo with the sparse image and input property.
-
- Arguments:
- partition_size: The whole size in bytes of a partition, including the
- filesystem size, padding size, and verity size.
- block_size: Expected size in bytes of each block for the sparse image.
- fec_supported: True if the verity section contains fec data.
- """
-
- self.block_size = block_size
- self.partition_size = partition_size
- self.fec_supported = fec_supported
-
- self.image = None
- self.filesystem_size = None
- self.hashtree_size = None
- self.metadata_size = None
-
- prop_dict = {
- 'partition_size': str(partition_size),
- 'verity': 'true',
- 'verity_fec': 'true' if fec_supported else None,
- # 'verity_block_device' needs to be present to indicate a verity-enabled
- # partition.
- 'verity_block_device': '',
- # We don't need the following properties that are needed for signing the
- # verity metadata.
- 'verity_key': '',
- 'verity_signer_cmd': None,
- }
- self.verity_image_builder = CreateVerityImageBuilder(prop_dict)
-
- self.hashtree_info = HashtreeInfo()
-
- def DecomposeSparseImage(self, image):
- """Calculate the verity size based on the size of the input image.
-
- Since we already know the structure of a verity enabled image to be:
- [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
- calculate the size and offset of each section.
- """
-
- self.image = image
- assert self.block_size == image.blocksize
- assert self.partition_size == image.total_blocks * self.block_size, \
- "partition size {} doesn't match with the calculated image size." \
- " total_blocks: {}".format(self.partition_size, image.total_blocks)
-
- adjusted_size = self.verity_image_builder.CalculateMaxImageSize()
- assert adjusted_size % self.block_size == 0
-
- verity_tree_size = GetVerityTreeSize(adjusted_size)
- assert verity_tree_size % self.block_size == 0
-
- metadata_size = GetVerityMetadataSize(adjusted_size)
- assert metadata_size % self.block_size == 0
-
- self.filesystem_size = adjusted_size
- self.hashtree_size = verity_tree_size
- self.metadata_size = metadata_size
-
- self.hashtree_info.filesystem_range = RangeSet(
- data=[0, adjusted_size // self.block_size])
- self.hashtree_info.hashtree_range = RangeSet(
- data=[adjusted_size // self.block_size,
- (adjusted_size + verity_tree_size) // self.block_size])
-
- def _ParseHashtreeMetadata(self):
- """Parses the hash_algorithm, root_hash, salt from the metadata block."""
-
- metadata_start = self.filesystem_size + self.hashtree_size
- metadata_range = RangeSet(
- data=[metadata_start // self.block_size,
- (metadata_start + self.metadata_size) // self.block_size])
- meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
-
- # More info about the metadata structure available in:
- # system/extras/verity/build_verity_metadata.py
- META_HEADER_SIZE = 268
- header_bin = meta_data[0:META_HEADER_SIZE]
- header = struct.unpack("II256sI", header_bin)
-
- # header: magic_number, version, signature, table_len
- assert header[0] == 0xb001b001, header[0]
- table_len = header[3]
- verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
- table_entries = verity_table.rstrip().split()
-
- # Expected verity table format: "1 block_device block_device block_size
- # block_size data_blocks data_blocks hash_algorithm root_hash salt"
- assert len(table_entries) == 10, "Unexpected verity table size {}".format(
- len(table_entries))
- assert (int(table_entries[3]) == self.block_size and
- int(table_entries[4]) == self.block_size)
- assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
- int(table_entries[6]) * self.block_size == self.filesystem_size)
-
- self.hashtree_info.hash_algorithm = table_entries[7].decode()
- self.hashtree_info.root_hash = table_entries[8].decode()
- self.hashtree_info.salt = table_entries[9].decode()
-
- def ValidateHashtree(self):
- """Checks that we can reconstruct the verity hash tree."""
-
- # Writes the filesystem section to a temp file; and calls the executable
- # build_verity_tree to construct the hash tree.
- adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
- with open(adjusted_partition, "wb") as fd:
- self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
-
- generated_verity_tree = common.MakeTempFile(prefix="verity")
- root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
-
- # The salt should be always identical, as we use fixed value.
- assert salt == self.hashtree_info.salt, \
- "Calculated salt {} doesn't match the one in metadata {}".format(
- salt, self.hashtree_info.salt)
-
- if root_hash != self.hashtree_info.root_hash:
- logger.warning(
- "Calculated root hash %s doesn't match the one in metadata %s",
- root_hash, self.hashtree_info.root_hash)
- return False
-
- # Reads the generated hash tree and checks if it has the exact same bytes
- # as the one in the sparse image.
- with open(generated_verity_tree, 'rb') as fd:
- return fd.read() == b''.join(self.image.ReadRangeSet(
- self.hashtree_info.hashtree_range))
-
- def Generate(self, image):
- """Parses and validates the hashtree info in a sparse image.
-
- Returns:
- hashtree_info: The information needed to reconstruct the hashtree.
-
- Raises:
- HashtreeInfoGenerationError: If we fail to generate the exact bytes of
- the hashtree.
- """
-
- self.DecomposeSparseImage(image)
- self._ParseHashtreeMetadata()
-
- if not self.ValidateHashtree():
- raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
-
- return self.hashtree_info
-
-
def CreateCustomImageBuilder(info_dict, partition_name, partition_size,
key_path, algorithm, signing_args):
builder = None
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 36a220c..25c53d3 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -1097,6 +1097,7 @@
boolean signUsingApkSignatureSchemeV2 = true;
boolean signUsingApkSignatureSchemeV4 = false;
SigningCertificateLineage certLineage = null;
+ Integer rotationMinSdkVersion = null;
int argstart = 0;
while (argstart < args.length && args[argstart].startsWith("-")) {
@@ -1157,6 +1158,15 @@
"Error reading lineage file: " + e.getMessage());
}
++argstart;
+ } else if ("--rotation-min-sdk-version".equals(args[argstart])) {
+ String rotationMinSdkVersionString = args[++argstart];
+ try {
+ rotationMinSdkVersion = Integer.parseInt(rotationMinSdkVersionString);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException(
+ "--rotation-min-sdk-version must be a decimal number: " + rotationMinSdkVersionString);
+ }
+ ++argstart;
} else {
usage();
}
@@ -1248,15 +1258,22 @@
}
}
- try (ApkSignerEngine apkSigner =
- new DefaultApkSignerEngine.Builder(
- createSignerConfigs(privateKey, publicKey), minSdkVersion)
- .setV1SigningEnabled(true)
- .setV2SigningEnabled(signUsingApkSignatureSchemeV2)
- .setOtherSignersSignaturesPreserved(false)
- .setCreatedBy("1.0 (Android SignApk)")
- .setSigningCertificateLineage(certLineage)
- .build()) {
+ DefaultApkSignerEngine.Builder builder = new DefaultApkSignerEngine.Builder(
+ createSignerConfigs(privateKey, publicKey), minSdkVersion)
+ .setV1SigningEnabled(true)
+ .setV2SigningEnabled(signUsingApkSignatureSchemeV2)
+ .setOtherSignersSignaturesPreserved(false)
+ .setCreatedBy("1.0 (Android SignApk)");
+
+ if (certLineage != null) {
+ builder = builder.setSigningCertificateLineage(certLineage);
+ }
+
+ if (rotationMinSdkVersion != null) {
+ builder = builder.setMinSdkVersionForRotation(rotationMinSdkVersion);
+ }
+
+ try (ApkSignerEngine apkSigner = builder.build()) {
// We don't preserve the input APK's APK Signing Block (which contains v2
// signatures)
apkSigner.inputApkSigningBlock(null);
diff --git a/tools/soong_to_convert.py b/tools/soong_to_convert.py
index 949131b..649829f 100755
--- a/tools/soong_to_convert.py
+++ b/tools/soong_to_convert.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2016 The Android Open Source Project
#
@@ -50,9 +50,6 @@
Not all problems can be discovered, but this is a starting point.
"""
-
-from __future__ import print_function
-
import csv
import sys
@@ -113,7 +110,7 @@
def main(filename):
"""Read the CSV file, print the results"""
- with open(filename, 'rb') as csvfile:
+ with open(filename, 'r') as csvfile:
results = process(csv.reader(csvfile))
native_results = filter(results, "native")
diff --git a/tools/stub_diff_analyzer.py b/tools/stub_diff_analyzer.py
new file mode 100644
index 0000000..e49d092
--- /dev/null
+++ b/tools/stub_diff_analyzer.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from sys import exit
+from typing import List
+from glob import glob
+from pathlib import Path
+from collections import defaultdict
+from difflib import Differ
+from re import split
+from tqdm import tqdm
+import argparse
+
+
+DIFFER_CODE_LEN = 2
+
+class DifferCodes:
+ COMMON = ' '
+ UNIQUE_FIRST = '- '
+ UNIQUE_SECOND = '+ '
+ DIFF_IDENT = '? '
+
+class FilesDiffAnalyzer:
+ def __init__(self, args) -> None:
+ self.out_dir = args.out_dir
+ self.show_diff = args.show_diff
+ self.skip_words = args.skip_words
+ self.first_dir = args.first_dir
+ self.second_dir = args.second_dir
+ self.include_common = args.include_common
+
+ self.first_dir_files = self.get_files(self.first_dir)
+ self.second_dir_files = self.get_files(self.second_dir)
+ self.common_file_map = defaultdict(set)
+
+ self.map_common_files(self.first_dir_files, self.first_dir)
+ self.map_common_files(self.second_dir_files, self.second_dir)
+
+ def get_files(self, dir: str) -> List[str]:
+ """Get all files directory in the input directory including the files in the subdirectories
+
+ Recursively finds all files in the input directory.
+ Returns a list of file directory strings, which do not include directories but only files.
+ List is sorted in alphabetical order of the file directories.
+
+ Args:
+ dir: Directory to get the files. String.
+
+ Returns:
+ A list of file directory strings within the input directory.
+ Sorted in Alphabetical order.
+
+ Raises:
+ FileNotFoundError: An error occurred accessing the non-existing directory
+ """
+
+ if not dir_exists(dir):
+ raise FileNotFoundError("Directory does not exist")
+
+ if dir[:-2] != "**":
+ if dir[:-1] != "/":
+ dir += "/"
+ dir += "**"
+
+ return [file for file in sorted(glob(dir, recursive=True)) if Path(file).is_file()]
+
+ def map_common_files(self, files: List[str], dir: str) -> None:
+ for file in files:
+ file_name = file.split(dir, 1)[-1]
+ self.common_file_map[file_name].add(dir)
+ return
+
+ def compare_file_contents(self, first_file: str, second_file: str) -> List[str]:
+ """Compare the contents of the files and return different lines
+
+ Given two file directory strings, compare the contents of the two files
+ and return the list of file contents string prepended with unique identifier codes.
+ The identifier codes include:
+ - ' '(two empty space characters): Line common to two files
+ - '- '(minus followed by a space) : Line unique to first file
+ - '+ '(plus followed by a space) : Line unique to second file
+
+ Args:
+ first_file: First file directory string to compare the content
+ second_file: Second file directory string to compare the content
+
+ Returns:
+ A list of the file content strings. For example:
+
+ [
+ " Foo",
+ "- Bar",
+ "+ Baz"
+ ]
+ """
+
+ d = Differ()
+ first_file_contents = sort_methods(get_file_contents(first_file))
+ second_file_contents = sort_methods(get_file_contents(second_file))
+ diff = list(d.compare(first_file_contents, second_file_contents))
+ ret = [f"diff {first_file} {second_file}"]
+
+ idx = 0
+ while idx < len(diff):
+ line = diff[idx]
+ line_code = line[:DIFFER_CODE_LEN]
+
+ match line_code:
+ case DifferCodes.COMMON:
+ if self.include_common:
+ ret.append(line)
+
+ case DifferCodes.UNIQUE_FIRST:
+ # Should compare line
+ if (idx < len(diff) - 1 and
+ (next_line_code := diff[idx + 1][:DIFFER_CODE_LEN])
+ not in (DifferCodes.UNIQUE_FIRST, DifferCodes.COMMON)):
+ delta = 1 if next_line_code == DifferCodes.UNIQUE_SECOND else 2
+ line_to_compare = diff[idx + delta]
+ if self.lines_differ(line, line_to_compare):
+ ret.extend([line, line_to_compare])
+ else:
+ if self.include_common:
+ ret.append(DifferCodes.COMMON +
+ line[DIFFER_CODE_LEN:])
+ idx += delta
+ else:
+ ret.append(line)
+
+ case DifferCodes.UNIQUE_SECOND:
+ ret.append(line)
+
+ case DifferCodes.DIFF_IDENT:
+ pass
+ idx += 1
+ return ret
+
+ def lines_differ(self, line1: str, line2: str) -> bool:
+ """Check if the input lines are different or not
+
+ Compare the two lines word by word and check if the two lines are different or not.
+ If the different words in the comparing lines are included in skip_words,
+ the lines are not considered different.
+
+ Args:
+ line1: first line to compare
+ line2: second line to compare
+
+ Returns:
+ Boolean value indicating if the two lines are different or not
+
+ """
+ # Split by '.' or ' '(whitespace)
+ def split_words(line: str) -> List[str]:
+ return split('\\s|\\.', line[DIFFER_CODE_LEN:])
+
+ line1_words, line2_words = split_words(line1), split_words(line2)
+ if len(line1_words) != len(line2_words):
+ return True
+
+ for word1, word2 in zip(line1_words, line2_words):
+ if word1 != word2:
+ # not check if words are equal to skip word, but
+ # check if words contain skip word as substring
+ if all(sw not in word1 and sw not in word2 for sw in self.skip_words):
+ return True
+
+ return False
+
+ def analyze(self) -> None:
+ """Analyze file contents in both directories and write to output or console.
+ """
+ for file in tqdm(sorted(self.common_file_map.keys())):
+ val = self.common_file_map[file]
+
+ # When file exists in both directories
+ lines = list()
+ if val == set([self.first_dir, self.second_dir]):
+ lines = self.compare_file_contents(
+ self.first_dir + file, self.second_dir + file)
+ else:
+ existing_dir, not_existing_dir = (
+ (self.first_dir, self.second_dir) if self.first_dir in val
+ else (self.second_dir, self.first_dir))
+
+ lines = [f"{not_existing_dir}{file} does not exist."]
+
+ if self.show_diff:
+ lines.append(f"Content of {existing_dir}{file}: \n")
+ lines.extend(get_file_contents(existing_dir + file))
+
+ self.write(lines)
+
+ def write(self, lines: List[str]) -> None:
+ if self.out_dir == "":
+ pprint(lines)
+ else:
+ write_lines(self.out_dir, lines)
+
+###
+# Helper functions
+###
+
+def sort_methods(lines: List[str]) -> List[str]:
+ """Sort class methods in the file contents by alphabetical order
+
+ Given lines of Java file contents, return lines with class methods sorted in alphabetical order.
+ Also omit empty lines or lines with spaces.
+ For example:
+ l = [
+ "package android.test;",
+ "",
+ "public static final int ORANGE = 1;",
+ "",
+ "public class TestClass {",
+ "public TestClass() { throw new RuntimeException("Stub!"); }",
+ "public void foo() { throw new RuntimeException("Stub!"); }",
+ "public void bar() { throw new RuntimeException("Stub!"); }",
+ "}"
+ ]
+ sort_methods(l) returns
+ [
+ "package android.test;",
+ "public static final int ORANGE = 1;",
+ "public class TestClass {",
+ "public TestClass() { throw new RuntimeException("Stub!"); }",
+ "public void bar() { throw new RuntimeException("Stub!"); }",
+ "public void foo() { throw new RuntimeException("Stub!"); }",
+ "}"
+ ]
+
+ Args:
+ lines: List of strings consisted of Java file contents.
+
+ Returns:
+ A list of string with sorted class methods.
+
+ """
+ def is_not_blank(l: str) -> bool:
+ return bool(l) and not l.isspace()
+
+ ret = list()
+
+ in_class = False
+ buffer = list()
+ for line in lines:
+ if not in_class:
+ if "class" in line:
+ in_class = True
+ ret.append(line)
+ else:
+ # Adding static variables, package info, etc.
+ # Skipping empty or space lines.
+ if is_not_blank(line):
+ ret.append(line)
+ else:
+ # End of class
+ if line and line[0] == "}":
+ in_class = False
+ ret.extend(sorted(buffer))
+ buffer = list()
+ ret.append(line)
+ else:
+ if is_not_blank(line):
+ buffer.append(line)
+
+ return ret
+
+def get_file_contents(file_path: str) -> List[str]:
+ lines = list()
+ with open(file_path) as f:
+ lines = [line.rstrip('\n') for line in f]
+ f.close()
+ return lines
+
+def pprint(l: List[str]) -> None:
+ for line in l:
+ print(line)
+
+def write_lines(out_dir: str, lines: List[str]) -> None:
+ with open(out_dir, "a") as f:
+ f.writelines(line + '\n' for line in lines)
+ f.write("\n")
+ f.close()
+
+def dir_exists(dir: str) -> bool:
+ return Path(dir).exists()
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('first_dir', action='store', type=str,
+ help="first path to compare file directory and contents")
+ parser.add_argument('second_dir', action='store', type=str,
+ help="second path to compare file directory and contents")
+ parser.add_argument('--out', dest='out_dir',
+ action='store', default="", type=str,
+ help="optional directory to write log. If not set, will print to console")
+ parser.add_argument('--show-diff-file', dest='show_diff',
+ action=argparse.BooleanOptionalAction,
+ help="optional flag. If passed, will print out the content of the file unique to each directories")
+ parser.add_argument('--include-common', dest='include_common',
+ action=argparse.BooleanOptionalAction,
+ help="optional flag. If passed, will print out the contents common to both files as well,\
+ instead of printing only diff lines.")
+ parser.add_argument('--skip-words', nargs='+',
+ dest='skip_words', default=[], help="optional words to skip in comparison")
+
+ args = parser.parse_args()
+
+ if not args.first_dir or not args.second_dir:
+ parser.print_usage()
+ exit(0)
+
+ analyzer = FilesDiffAnalyzer(args)
+ analyzer.analyze()
diff --git a/tools/whichgit b/tools/whichgit
new file mode 100755
index 0000000..b0bf2e4
--- /dev/null
+++ b/tools/whichgit
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+
+import argparse
+import os
+import subprocess
+import sys
+
+def get_build_var(var):
+ return subprocess.run(["build/soong/soong_ui.bash","--dumpvar-mode", var],
+ check=True, capture_output=True, text=True).stdout.strip()
+
+
+def get_sources(modules):
+ result = subprocess.run(["./prebuilts/build-tools/linux-x86/bin/ninja", "-f",
+ "out/combined-" + os.environ["TARGET_PRODUCT"] + ".ninja",
+ "-t", "inputs", "-d", ] + modules,
+ stderr=subprocess.STDOUT, stdout=subprocess.PIPE, check=False, text=True)
+ if result.returncode != 0:
+ sys.stderr.write(result.stdout)
+ sys.exit(1)
+ return set([f for f in result.stdout.split("\n") if not f.startswith("out/")])
+
+
+def m_nothing():
+ result = subprocess.run(["build/soong/soong_ui.bash", "--build-mode", "--all-modules",
+ "--dir=" + os.getcwd(), "nothing"],
+ check=False, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, text=True)
+ if result.returncode != 0:
+ sys.stderr.write(result.stdout)
+ sys.exit(1)
+
+
+def get_git_dirs():
+ text = subprocess.run(["repo","list"], check=True, capture_output=True, text=True).stdout
+ return [line.split(" : ")[0] + "/" for line in text.split("\n")]
+
+
+def get_referenced_projects(git_dirs, files):
+ # files must be sorted
+ referenced_dirs = set()
+ prev_dir = None
+ for f in files:
+ # Optimization is ~5x speedup for large sets of files
+ if prev_dir:
+ if f.startswith(prev_dir):
+ referenced_dirs.add(d)
+ continue
+ for d in git_dirs:
+ if f.startswith(d):
+ referenced_dirs.add(d)
+ prev_dir = d
+ break
+ return [d[0:-1] for d in referenced_dirs]
+
+
+def main(argv):
+ # Argument parsing
+ ap = argparse.ArgumentParser(description="List the required git projects for the given modules")
+ ap.add_argument("--products", nargs="*",
+ help="The TARGET_PRODUCT to check. If not provided just uses whatever has"
+ + " already been built")
+ ap.add_argument("--variants", nargs="*",
+ help="The TARGET_BUILD_VARIANTS to check. If not provided just uses whatever has"
+ + " already been built, or eng if --products is supplied")
+ ap.add_argument("--modules", nargs="*",
+ help="The build modules to check, or droid it not supplied")
+ ap.add_argument("--why", nargs="*",
+ help="Also print the input files used in these projects, or \"*\" for all")
+ args = ap.parse_args(argv[1:])
+
+ modules = args.modules if args.modules else ["droid"]
+
+ # Get the list of sources for all of the requested build combos
+ if not args.products and not args.variants:
+ sources = get_sources(modules)
+ else:
+ if not args.products:
+ sys.stderr.write("Error: --products must be supplied if --variants is supplied")
+ sys.exit(1)
+ sources = set()
+ build_num = 1
+ for product in args.products:
+ os.environ["TARGET_PRODUCT"] = product
+ variants = args.variants if args.variants else ["user", "userdebug", "eng"]
+ for variant in variants:
+ sys.stderr.write(f"Analyzing build {build_num} of {len(args.products)*len(variants)}\r")
+ os.environ["TARGET_BUILD_VARIANT"] = variant
+ m_nothing()
+ sources.update(get_sources(modules))
+ build_num += 1
+ sys.stderr.write("\n\n")
+
+ sources = sorted(sources)
+
+ # Print the list of git directories that has one or more of the sources in it
+ for project in sorted(get_referenced_projects(get_git_dirs(), sources)):
+ print(project)
+ if args.why:
+ if "*" in args.why or project in args.why:
+ prefix = project + "/"
+ for f in sources:
+ if f.startswith(prefix):
+ print(" " + f)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
+
+
+# vim: set ts=2 sw=2 sts=2 expandtab nocindent tw=100:
diff --git a/tools/zipalign/Android.bp b/tools/zipalign/Android.bp
index 8cab04c..0e1d58e 100644
--- a/tools/zipalign/Android.bp
+++ b/tools/zipalign/Android.bp
@@ -70,6 +70,7 @@
"libgmock",
],
data: [
+ "tests/data/archiveWithOneDirectoryEntry.zip",
"tests/data/diffOrders.zip",
"tests/data/holes.zip",
"tests/data/unaligned.zip",
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index 08f67ff..23840e3 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -22,6 +22,19 @@
namespace android {
+// An entry is considered a directory if it has a stored size of zero
+// and it ends with '/' or '\' character.
+static bool isDirectory(ZipEntry* entry) {
+ if (entry->getUncompressedLen() != 0) {
+ return false;
+ }
+
+ const char* name = entry->getFileName();
+ size_t nameLength = strlen(name);
+ char lastChar = name[nameLength-1];
+ return lastChar == '/' || lastChar == '\\';
+}
+
static int getAlignment(bool pageAlignSharedLibs, int defaultAlignment,
ZipEntry* pEntry) {
@@ -59,7 +72,7 @@
return 1;
}
- if (pEntry->isCompressed()) {
+ if (pEntry->isCompressed() || isDirectory(pEntry)) {
/* copy the entry without padding */
//printf("--- %s: orig at %ld len=%ld (compressed)\n",
// pEntry->getFileName(), (long) pEntry->getFileOffset(),
@@ -160,7 +173,13 @@
printf("%8jd %s (OK - compressed)\n",
(intmax_t) pEntry->getFileOffset(), pEntry->getFileName());
}
- } else {
+ } else if(isDirectory(pEntry)) {
+ // Directory entries do not need to be aligned.
+ if (verbose)
+ printf("%8jd %s (OK - directory)\n",
+ (intmax_t) pEntry->getFileOffset(), pEntry->getFileName());
+ continue;
+ } else {
off_t offset = pEntry->getFileOffset();
const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry);
if ((offset % alignTo) != 0) {
diff --git a/tools/zipalign/tests/data/archiveWithOneDirectoryEntry.zip b/tools/zipalign/tests/data/archiveWithOneDirectoryEntry.zip
new file mode 100644
index 0000000..00be0ce
--- /dev/null
+++ b/tools/zipalign/tests/data/archiveWithOneDirectoryEntry.zip
Binary files differ
diff --git a/tools/zipalign/tests/src/align_test.cpp b/tools/zipalign/tests/src/align_test.cpp
index ff45187..a8433fa 100644
--- a/tools/zipalign/tests/src/align_test.cpp
+++ b/tools/zipalign/tests/src/align_test.cpp
@@ -12,6 +12,28 @@
using namespace android;
using namespace base;
+// This load the whole file to memory so be careful!
+static bool sameContent(const std::string& path1, const std::string& path2) {
+ std::string f1;
+ if (!ReadFileToString(path1, &f1)) {
+ printf("Unable to read '%s' content: %m\n", path1.c_str());
+ return false;
+ }
+
+ std::string f2;
+ if (!ReadFileToString(path2, &f2)) {
+ printf("Unable to read '%s' content %m\n", path1.c_str());
+ return false;
+ }
+
+ if (f1.size() != f2.size()) {
+ printf("File '%s' and '%s' are not the same\n", path1.c_str(), path2.c_str());
+ return false;
+ }
+
+ return f1.compare(f2) == 0;
+}
+
static std::string GetTestPath(const std::string& filename) {
static std::string test_data_dir = android::base::GetExecutableDirectory() + "/tests/data/";
return test_data_dir + filename;
@@ -87,3 +109,21 @@
int verified = verify(dst.c_str(), 4, false, true);
ASSERT_EQ(0, verified);
}
+
+TEST(Align, DirectoryEntryDoNotRequireAlignment) {
+ const std::string src = GetTestPath("archiveWithOneDirectoryEntry.zip");
+ int verified = verify(src.c_str(), 4, false, true);
+ ASSERT_EQ(0, verified);
+}
+
+TEST(Align, DirectoryEntry) {
+ const std::string src = GetTestPath("archiveWithOneDirectoryEntry.zip");
+ const std::string dst = GetTempPath("archiveWithOneDirectoryEntry_out.zip");
+
+ int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+ ASSERT_EQ(0, processed);
+ ASSERT_EQ(true, sameContent(src, dst));
+
+ int verified = verify(dst.c_str(), 4, false, true);
+ ASSERT_EQ(0, verified);
+}