Merge "Add support for only starting 64 bit zygote."
diff --git a/.gitignore b/.gitignore
index f1f4a52..54c90ed 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+*.iml
*.pyc
*.swp
blueprint/
diff --git a/Changes.md b/Changes.md
index cabbed6..a03a48c 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,62 @@
# Build System Changes for Android.mk Writers
+## Gensrcs starts disallowing depfile property
+
+To migrate all gensrcs to Bazel, we are restricting the use of depfile property
+because Bazel requires specifying the dependencies directly.
+
+To fix existing uses, remove depfile and directly specify all the dependencies
+in .bp files. For example:
+
+```
+gensrcs {
+ name: "framework-cppstream-protos",
+ tools: [
+ "aprotoc",
+ "protoc-gen-cppstream",
+ ],
+ cmd: "mkdir -p $(genDir)/$(in) " +
+ "&& $(location aprotoc) " +
+ " --plugin=$(location protoc-gen-cppstream) " +
+ " -I . " +
+ " $(in) ",
+ srcs: [
+ "bar.proto",
+ ],
+ output_extension: "srcjar",
+}
+```
+where `bar.proto` imports `external.proto` would become
+
+```
+gensrcs {
+ name: "framework-cppstream-protos",
+ tools: [
+ "aprotoc",
+ "protoc-gen-cpptream",
+ ],
+ tool_files: [
+ "external.proto",
+ ],
+ cmd: "mkdir -p $(genDir)/$(in) " +
+ "&& $(location aprotoc) " +
+ " --plugin=$(location protoc-gen-cppstream) " +
+ " $(in) ",
+ srcs: [
+ "bar.proto",
+ ],
+ output_extension: "srcjar",
+}
+```
+as in https://android-review.googlesource.com/c/platform/frameworks/base/+/2125692/.
+
+`BUILD_BROKEN_DEPFILE` can be used to allowlist usage of depfile in `gensrcs`.
+
+If `depfile` is needed for generating javastream proto, `java_library` with `proto.type`
+set `stream` is the alternative solution. Sees
+https://android-review.googlesource.com/c/platform/packages/modules/Permission/+/2118004/
+for an example.
+
## Genrule starts disallowing directory inputs
To better specify the inputs to the build, we are restricting use of directories
diff --git a/OWNERS b/OWNERS
index 4cac0f5..6e7c0ea 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1 +1,3 @@
include platform/build/soong:/OWNERS
+
+per-file finalize_branch_for_release.sh = smoreland@google.com
diff --git a/core/Makefile b/core/Makefile
index 4081f4a..6a9ef49 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -94,6 +94,8 @@
$(pcf_ignored_file):
echo "$(PRIVATE_IGNORED)" | tr " " "\n" >$@
+$(call declare-0p-target,$(pcf_ignored_file))
+
$(call dist-for-goals,droidcore-unbundled,$(pcf_ignored_file):logs/$(notdir $(pcf_ignored_file)))
pcf_ignored_file :=
@@ -168,6 +170,8 @@
echo "HTML_OUTPUT=$(ndk_doxygen_out)" \
) | doxygen -
+$(call declare-1p-target,$(ndk_doxygen_out)/index.html,)
+
# Note: Not a part of the docs target because we don't have doxygen available.
# You can run this target locally if you have doxygen installed.
ndk-docs: $(ndk_doxygen_out)/index.html
@@ -223,6 +227,8 @@
echo "$$x"generic >> $@.tmp; done
$(hide) mv $@.tmp $@
+$(call declare-0p-target,$(INSTALLED_SDK_BUILD_PROP_TARGET))
+
# -----------------------------------------------------------------
# declare recovery ramdisk files
ifeq ($(BUILDING_RECOVERY_IMAGE),true)
@@ -598,6 +604,8 @@
# In case value of PACKAGES is empty.
$(hide) touch $@
+$(call declare-0p-target,$(APKCERTS_FILE))
+
.PHONY: apkcerts-list
apkcerts-list: $(APKCERTS_FILE)
@@ -614,6 +622,7 @@
@rm -f $@
@$(foreach s,$(STATS.MODULE_TYPE),echo "modules_type_make,$(s),$(words $(STATS.MODULE_TYPE.$(s)))" >>$@;)
@$(foreach s,$(STATS.SOONG_MODULE_TYPE),echo "modules_type_soong,$(s),$(STATS.SOONG_MODULE_TYPE.$(s))" >>$@;)
+$(call declare-1p-target,$(BUILD_SYSTEM_STATS),build)
$(call dist-for-goals,droidcore-unbundled,$(BUILD_SYSTEM_STATS))
# -----------------------------------------------------------------
@@ -634,11 +643,14 @@
@rm -f $@
@$(foreach s,$(SOONG_CONV),echo "$(s),$(SOONG_CONV.$(s).TYPE),$(sort $(SOONG_CONV.$(s).PROBLEMS)),$(sort $(filter-out $(SOONG_ALREADY_CONV),$(SOONG_CONV.$(s).DEPS))),$(sort $(SOONG_CONV.$(s).MAKEFILES)),$(sort $(SOONG_CONV.$(s).INSTALLED))" >>$@;)
+$(call declare-1p-target,$(SOONG_CONV_DATA),build)
+
SOONG_TO_CONVERT_SCRIPT := build/make/tools/soong_to_convert.py
SOONG_TO_CONVERT := $(PRODUCT_OUT)/soong_to_convert.txt
$(SOONG_TO_CONVERT): $(SOONG_CONV_DATA) $(SOONG_TO_CONVERT_SCRIPT)
@rm -f $@
$(hide) $(SOONG_TO_CONVERT_SCRIPT) $< >$@
+$(call declare-1p-target,$(SOONG_TO_CONVERT),build)
$(call dist-for-goals,droidcore-unbundled,$(SOONG_TO_CONVERT))
$(PRODUCT_OUT)/product_packages.txt:
@@ -660,6 +672,7 @@
--out-dir="$(OUT_DIR)" \
--mode=html \
> $@
+$(call declare-1p-target,$(MK2BP_REMAINING_HTML),build)
$(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_HTML))
MK2BP_REMAINING_CSV := $(PRODUCT_OUT)/mk2bp_remaining.csv
@@ -671,6 +684,7 @@
--out-dir="$(OUT_DIR)" \
--mode=csv \
> $@
+$(call declare-1p-target,$(MK2BP_REMAINING_CSV))
$(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_CSV))
# -----------------------------------------------------------------
@@ -680,8 +694,10 @@
@rm -f $@
echo "# Modules using -Wno-error" >> $@
for m in $(sort $(SOONG_MODULES_USING_WNO_ERROR) $(MODULES_USING_WNO_ERROR)); do echo $$m >> $@; done
- echo "# Modules added default -Wall" >> $@
- for m in $(sort $(SOONG_MODULES_ADDED_WALL) $(MODULES_ADDED_WALL)); do echo $$m >> $@; done
+ echo "# Modules that allow warnings" >> $@
+ for m in $(sort $(SOONG_MODULES_WARNINGS_ALLOWED) $(MODULES_WARNINGS_ALLOWED)); do echo $$m >> $@; done
+
+$(call declare-0p-target,$(WALL_WERROR))
$(call dist-for-goals,droidcore-unbundled,$(WALL_WERROR))
@@ -689,6 +705,8 @@
# C/C++ flag information for modules
$(call dist-for-goals,droidcore-unbundled,$(SOONG_MODULES_CFLAG_ARTIFACTS))
+$(foreach a,$(SOONG_MODULES_CFLAG_ARTIFACTS),$(call declare-0p-target,$(call word-colon,1,$(a))))
+
# -----------------------------------------------------------------
# Modules missing profile files
PGO_PROFILE_MISSING := $(PRODUCT_OUT)/pgo_profile_file_missing.txt
@@ -697,12 +715,15 @@
echo "# Modules missing PGO profile files" >> $@
for m in $(SOONG_MODULES_MISSING_PGO_PROFILE_FILE); do echo $$m >> $@; done
+$(call declare-0p-target,$(PGO_PROFILE_MISSING))
+
$(call dist-for-goals,droidcore,$(PGO_PROFILE_MISSING))
CERTIFICATE_VIOLATION_MODULES_FILENAME := $(PRODUCT_OUT)/certificate_violation_modules.txt
$(CERTIFICATE_VIOLATION_MODULES_FILENAME):
rm -f $@
$(foreach m,$(sort $(CERTIFICATE_VIOLATION_MODULES)), echo $(m) >> $@;)
+$(call declare-0p-target,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
$(call dist-for-goals,droidcore,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
# -----------------------------------------------------------------
@@ -739,6 +760,8 @@
$(hide) mkdir -p $(dir $@)
$(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES)
+$(call declare-0p-target,$(all_event_log_tags_file))
+
# Include tags from all packages included in this product, plus all
# tags that are part of the system (ie, not in a vendor/ or device/
# directory).
@@ -817,10 +840,7 @@
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(call declare-0p-target,$(INSTALLED_FILES_FILE_ROOT))
-
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
-endif
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_ROOT))
#------------------------------------------------------------------
# dtb
@@ -851,10 +871,8 @@
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RAMDISK)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_RAMDISK)))
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
-endif
BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
@@ -949,13 +967,12 @@
INTERNAL_BOOTIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
-INTERNAL_INIT_BOOT_IMAGE_ARGS :=
-
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
- INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
- else
- INTERNAL_INIT_BOOT_IMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
+# TODO(b/229701033): clean up BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK.
+ifneq ($(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK),true)
+ ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
+ INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
+ endif
endif
endif
@@ -1201,6 +1218,8 @@
INSTALLED_INIT_BOOT_IMAGE_TARGET := $(PRODUCT_OUT)/init_boot.img
$(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_RAMDISK_TARGET)
+INTERNAL_INIT_BOOT_IMAGE_ARGS := --ramdisk $(INSTALLED_RAMDISK_TARGET)
+
ifdef BOARD_KERNEL_PAGESIZE
INTERNAL_INIT_BOOT_IMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
endif
@@ -1309,6 +1328,7 @@
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_RAMDISK)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_RAMDISK)))
ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
ifneq ($(BUILDING_VENDOR_KERNEL_BOOT_IMAGE),true)
@@ -1429,6 +1449,9 @@
$(FILESLIST) $(TARGET_VENDOR_KERNEL_RAMDISK_OUT) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_KERNEL_RAMDISK))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_KERNEL_RAMDISK))
+
INTERNAL_VENDOR_KERNEL_BOOTIMAGE_ARGS := --vendor_ramdisk $(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET)
INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_kernel_boot.img
$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET)
@@ -1458,6 +1481,14 @@
$(MKBOOTIMG) $(INTERNAL_VENDOR_KERNEL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_boot $@
$(call assert-max-image-size,$@,$(BOARD_VENDOR_KERNEL_BOOTIMAGE_PARTITION_SIZE))
endif
+$(call declare-1p-container,$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),)
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+$(call declare-container-license-deps,$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),\
+ $(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET) $(INSTALLED_DTBIMAGE_TARGET),\
+ $(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET):)
+else
+$(call declare-container-license-deps,$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),$(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET),$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET):)
+endif
endif # BUILDING_VENDOR_KERNEL_BOOT_IMAGE
# -----------------------------------------------------------------
@@ -1484,8 +1515,9 @@
define xml-notice-rule
$(1): PRIVATE_PRODUCT := $(2)
$(1): PRIVATE_MESSAGE := $(3)
+$(1): PRIVATE_DEPS := $(call corresponding-license-metadata,$(4))
$(1): $(call corresponding-license-metadata,$(4)) $(XMLNOTICE) $(BUILD_SYSTEM)/Makefile
- OUT_DIR=$(OUT_DIR) $(XMLNOTICE) -o $$@ -product=$$(PRIVATE_PRODUCT) -title=$$(PRIVATE_MESSAGE) $(foreach prefix, $(5), -strip_prefix=$(prefix)) $(call corresponding-license-metadata,$(4))
+ OUT_DIR=$(OUT_DIR) $(XMLNOTICE) -o $$@ -product=$$(PRIVATE_PRODUCT) -title=$$(PRIVATE_MESSAGE) $(foreach prefix, $(5), -strip_prefix=$(prefix)) $$(PRIVATE_DEPS)
notice_files: $(1)
endef
@@ -1530,7 +1562,6 @@
# TODO These intermediate NOTICE.txt/NOTICE.html files should go into
# TARGET_OUT_NOTICE_FILES now that the notice files are gathered from
# the src subdirectory.
-target_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE.txt
kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
@@ -1672,19 +1703,6 @@
# Targets for user images
# #################################################################
-INTERNAL_USERIMAGES_EXT_VARIANT :=
-ifeq ($(TARGET_USERIMAGES_USE_EXT2),true)
-INTERNAL_USERIMAGES_EXT_VARIANT := ext2
-else
-ifeq ($(TARGET_USERIMAGES_USE_EXT3),true)
-INTERNAL_USERIMAGES_EXT_VARIANT := ext3
-else
-ifeq ($(TARGET_USERIMAGES_USE_EXT4),true)
-INTERNAL_USERIMAGES_EXT_VARIANT := ext4
-endif
-endif
-endif
-
# These options tell the recovery updater/installer how to mount the partitions writebale.
# <fstype>=<fstype_opts>[|<fstype_opts>]...
# fstype_opts := <opt>[,<opt>]...
@@ -1692,19 +1710,6 @@
# The following worked on Nexus devices with Kernel 3.1, 3.4, 3.10
DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS := ext4=max_batch_time=0,commit=1,data=ordered,barrier=1,errors=panic,nodelalloc
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
- INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
-endif
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED))
- INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG := -s
-endif
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
- INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
-endif
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED))
- INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG := -S
-endif
-
INTERNAL_USERIMAGES_DEPS := \
$(BUILD_IMAGE) \
$(MKE2FS_CONF) \
@@ -1727,8 +1732,12 @@
$(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
,erofs),)
INTERNAL_USERIMAGES_DEPS += $(MKEROFS)
+ifeq ($(BOARD_EROFS_USE_LEGACY_COMPRESSION),true)
+BOARD_EROFS_COMPRESSOR ?= "lz4"
+else
BOARD_EROFS_COMPRESSOR ?= "lz4hc,9"
endif
+endif
ifneq ($(filter \
$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) \
@@ -1787,6 +1796,7 @@
define add-common-ro-flags-to-image-props
$(eval _var := $(call to-upper,$(1)))
$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR),$(hide) echo "$(1)_erofs_compressor=$(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR)" >> $(2))
+$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS),$(hide) echo "$(1)_erofs_compress_hints=$(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE),$(hide) echo "$(1)_erofs_pcluster_size=$(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT),$(hide) echo "$(1)_extfs_inode_count=$(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT),$(hide) echo "$(1)_extfs_rsv_pct=$(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT)" >> $(2))
@@ -1866,14 +1876,19 @@
)
$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(1)
-$(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG),$(hide) echo "erofs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG),$(hide) echo "f2fs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG)" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_USE_EXT2)),$(hide) echo "fs_type=ext2" >> $(1),
+ $(if $(filter true,$(TARGET_USERIMAGES_USE_EXT3)),$(hide) echo "fs_type=ext3" >> $(1),
+ $(if $(filter true,$(TARGET_USERIMAGES_USE_EXT4)),$(hide) echo "fs_type=ext4" >> $(1))))
+
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)),,$(hide) echo "extfs_sparse_flag=-s" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED)),,$(hide) echo "erofs_sparse_flag=-s" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)),,$(hide) echo "squashfs_sparse_flag=-s" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)),,$(hide) echo "f2fs_sparse_flag=-S" >> $(1))
$(if $(BOARD_EROFS_COMPRESSOR),$(hide) echo "erofs_default_compressor=$(BOARD_EROFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_EROFS_COMPRESS_HINTS),$(hide) echo "erofs_default_compress_hints=$(BOARD_EROFS_COMPRESS_HINTS)" >> $(1))
$(if $(BOARD_EROFS_PCLUSTER_SIZE),$(hide) echo "erofs_pcluster_size=$(BOARD_EROFS_PCLUSTER_SIZE)" >> $(1))
$(if $(BOARD_EROFS_SHARE_DUP_BLOCKS),$(hide) echo "erofs_share_dup_blocks=$(BOARD_EROFS_SHARE_DUP_BLOCKS)" >> $(1))
+$(if $(BOARD_EROFS_USE_LEGACY_COMPRESSION),$(hide) echo "erofs_use_legacy_compression=$(BOARD_EROFS_USE_LEGACY_COMPRESSION)" >> $(1))
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
$(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
$(if $(BOARD_FLASH_ERASE_BLOCK_SIZE), $(hide) echo "flash_erase_block_size=$(BOARD_FLASH_ERASE_BLOCK_SIZE)" >> $(1))
@@ -1966,6 +1981,8 @@
$(hide) echo "recovery_as_boot=true" >> $(1))
$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
$(hide) echo "system_root_image=true" >> $(1))
+$(if $(filter true,$(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK)),\
+ $(hide) echo "gki_boot_image_without_ramdisk=true" >> $(1))
$(hide) echo "root_dir=$(TARGET_ROOT_OUT)" >> $(1)
$(if $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)),\
$(hide) echo "use_dynamic_partition_size=true" >> $(1))
@@ -2031,8 +2048,6 @@
INSTALLED_FILES_FILE_RECOVERY := $(PRODUCT_OUT)/installed-files-recovery.txt
INSTALLED_FILES_JSON_RECOVERY := $(INSTALLED_FILES_FILE_RECOVERY:.txt=.json)
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RECOVERY)))
-
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
endif
@@ -2051,6 +2066,9 @@
$(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RECOVERY)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_RECOVERY)))
+
recovery_sepolicy := \
$(TARGET_RECOVERY_ROOT_OUT)/sepolicy \
$(TARGET_RECOVERY_ROOT_OUT)/plat_file_contexts \
@@ -2306,6 +2324,11 @@
$(hide) cat $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET) >> $@
$(call append-recovery-ui-properties,$(PRIVATE_RECOVERY_UI_PROPERTIES),$@)
+$(call declare-1p-target,$(INSTALLED_RECOVERY_BUILD_PROP_TARGET),build)
+$(call declare-license-deps,$(INSTALLED_RECOVERY_BUILD_PROP_TARGET),\
+ $(INSTALLED_BUILD_PROP_TARGET) $(INSTALLED_VENDOR_BUILD_PROP_TARGET) $(INSTALLED_ODM_BUILD_PROP_TARGET) \
+ $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET))
+
# Only install boot/etc/build.prop to recovery image on recovery_as_boot.
# On device with dedicated recovery partition, the file should come from the boot
# ramdisk.
@@ -2313,6 +2336,9 @@
INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RECOVERY_ROOT_OUT)/$(RAMDISK_BUILD_PROP_REL_PATH)
$(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET): $(INSTALLED_RAMDISK_BUILD_PROP_TARGET)
$(copy-file-to-target)
+
+$(call declare-1p-target,$(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET),build)
+$(call declare-license-deps,$(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET),$(INSTALLED_RAMDISK_BUILD_PROP_TARGET))
endif
INTERNAL_RECOVERYIMAGE_ARGS := --ramdisk $(recovery_ramdisk)
@@ -2533,6 +2559,7 @@
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_DEBUG_RAMDISK)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_DEBUG_RAMDISK)))
ifdef BUILDING_DEBUG_BOOT_IMAGE
@@ -2666,7 +2693,8 @@
$(FILESLIST) $(INTERNAL_DEBUG_VENDOR_RAMDISK_SRC_DIRS) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK))
INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor_ramdisk-debug.cpio$(RAMDISK_EXT)
@@ -2949,20 +2977,29 @@
$(FSVERITY_APK_OUT): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
$(FSVERITY_APK_OUT): PRIVATE_KEY := $(FSVERITY_APK_KEY_PATH)
$(FSVERITY_APK_OUT): PRIVATE_INPUTS := $(fsverity-metadata-targets)
+$(FSVERITY_APK_OUT): PRIVATE_ASSETS := $(call intermediates-dir-for,ETC,build_manifest)/assets
$(FSVERITY_APK_OUT): $(HOST_OUT_EXECUTABLES)/fsverity_manifest_generator \
$(HOST_OUT_EXECUTABLES)/fsverity $(HOST_OUT_EXECUTABLES)/aapt2 \
$(HOST_OUT_EXECUTABLES)/apksigner $(FSVERITY_APK_MANIFEST_PATH) \
$(FSVERITY_APK_KEY_PATH).x509.pem $(FSVERITY_APK_KEY_PATH).pk8 \
$(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk \
$(fsverity-metadata-targets)
- $< --fsverity-path $(PRIVATE_FSVERITY) --aapt2-path $(PRIVATE_AAPT2) \
+ rm -rf $(PRIVATE_ASSETS)
+ mkdir -p $(PRIVATE_ASSETS)
+ $< --fsverity-path $(PRIVATE_FSVERITY) \
+ --base-dir $(PRODUCT_OUT) \
+ --output $(PRIVATE_ASSETS)/build_manifest.pb \
+ $(PRIVATE_INPUTS)
+ $(PRIVATE_AAPT2) link -o $@ \
+ -A $(PRIVATE_ASSETS) \
+ -I $(PRIVATE_FRAMEWORK_RES) \
--min-sdk-version $(PRIVATE_MIN_SDK_VERSION) \
--version-code $(PRIVATE_VERSION_CODE) \
--version-name $(PRIVATE_VERSION_NAME) \
- --apksigner-path $(PRIVATE_APKSIGNER) --apk-key-path $(PRIVATE_KEY) \
- --apk-manifest-path $(PRIVATE_MANIFEST) --framework-res $(PRIVATE_FRAMEWORK_RES) \
- --output $@ \
- --base-dir $(PRODUCT_OUT) $(PRIVATE_INPUTS)
+ --manifest $(PRIVATE_MANIFEST)
+ $(PRIVATE_APKSIGNER) sign --in $@ \
+ --cert $(PRIVATE_KEY).x509.pem \
+ --key $(PRIVATE_KEY).pk8
ALL_DEFAULT_INSTALLED_MODULES += $(FSVERITY_APK_OUT)
@@ -3045,14 +3082,11 @@
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON)))
.PHONY: installed-file-list
installed-file-list: $(INSTALLED_FILES_FILE)
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE))
-endif
-
systemimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,systemimage)
BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
@@ -3321,6 +3355,7 @@
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEMOTHER)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_SYSTEMOTHER)))
# Determines partition size for system_other.img.
ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
@@ -3412,7 +3447,8 @@
$(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR))
vendorimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor)
@@ -3440,7 +3476,7 @@
VENDOR_NOTICE_DEPS += $(INSTALLED_VENDORIMAGE_TARGET)
-$(call declare-1p-container,$(INSTALLED_VENDORIMAGE_TARGET),vendor)
+$(call declare-container-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),legacy_proprietary,proprietary,,"Vendor Image",vendor)
$(call declare-container-license-deps,$(INSTALLED_VENDORIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(RECOVERY_FROM_BOOT_PATH),$(PRODUCT_OUT)/:/)
.PHONY: vendorimage-nodeps vnod
@@ -3452,6 +3488,10 @@
else ifdef BOARD_PREBUILT_VENDORIMAGE
INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
$(eval $(call copy-one-file,$(BOARD_PREBUILT_VENDORIMAGE),$(INSTALLED_VENDORIMAGE_TARGET)))
+$(if $(strip $(ALL_TARGETS.$(INSTALLED_VENDORIMAGE_TARGET).META_LIC)),,\
+ $(if $(strip $(ALL_TARGETS.$(BOARD_PREBUILT_VENDORIMAGE).META_LIC)),\
+ $(call declare-copy-target-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_PREBUILT_VENDORIMAGE)),\
+ $(call declare-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),legacy_proprietary,proprietary,,"Vendor Image",vendor)))
endif
# -----------------------------------------------------------------
@@ -3472,7 +3512,8 @@
$(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_PRODUCT)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_PRODUCT))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_PRODUCT))
productimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,product)
@@ -3531,7 +3572,8 @@
$(FILESLIST) $(TARGET_OUT_SYSTEM_EXT) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_EXT)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_EXT))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_SYSTEM_EXT))
system_extimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,system_ext)
@@ -3610,7 +3652,8 @@
$(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_ODM))
odmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,odm)
@@ -3670,7 +3713,8 @@
$(FILESLIST) $(TARGET_OUT_VENDOR_DLKM) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DLKM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DLKM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_DLKM))
vendor_dlkmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor_dlkm)
@@ -3730,7 +3774,8 @@
$(FILESLIST) $(TARGET_OUT_ODM_DLKM) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM_DLKM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM_DLKM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_ODM_DLKM))
odm_dlkmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,odm_dlkm)
@@ -3793,7 +3838,8 @@
$(FILESLIST) $(TARGET_OUT_SYSTEM_DLKM) > $(@:.txt=.json)
$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_DLKM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_DLKM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_SYSTEM_DLKM))
system_dlkmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,system_dlkm)
@@ -4539,6 +4585,7 @@
check_vintf_all_deps += $(check_vintf_system_log)
$(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
+$(call declare-0p-target,$(check_vintf_system_log))
check_vintf_system_log :=
vintffm_log := $(intermediates)/vintffm.log
@@ -4547,6 +4594,8 @@
@( $< --check --dirmap /system:$(TARGET_OUT) \
$(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
+$(call declare-0p-target,$(vintffm_log))
+
endif # check_vintf_system_deps
check_vintf_system_deps :=
@@ -4568,6 +4617,7 @@
( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) \
--property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
> $@ 2>&1 ) || ( cat $@ && exit 1 ); )
+$(call declare-0p-target,$(check_vintf_vendor_log))
check_vintf_vendor_log :=
endif # check_vintf_vendor_deps
check_vintf_vendor_deps :=
@@ -4589,6 +4639,9 @@
$(BUILT_KERNEL_VERSION_FILE):
echo $(BOARD_KERNEL_VERSION) > $@
+$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-0p-target,$(BUILT_KERNEL_VERSION_FILE))
+
my_board_extracted_kernel := true
endif # BOARD_KERNEL_VERSION
endif # BOARD_KERNEL_CONFIG_FILE
@@ -4612,6 +4665,8 @@
--output-configs $@ \
--output-release $(BUILT_KERNEL_VERSION_FILE)
+$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+
my_board_extracted_kernel := true
endif # INSTALLED_KERNEL_TARGET
endif # my_board_extracted_kernel
@@ -4631,6 +4686,8 @@
--output-configs $@ \
--output-release $(BUILT_KERNEL_VERSION_FILE)
+$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+
my_board_extracted_kernel := true
endif # INSTALLED_BOOTIMAGE_TARGET
endif # my_board_extracted_kernel
@@ -4720,6 +4777,8 @@
--property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
>> $@ 2>&1 ) || (cat $@ && exit 1); ))
+$(call declare-0p-target,$(check_vintf_compatible_log))
+
check_vintf_compatible_log :=
check_vintf_compatible_args :=
check_vintf_compatible_deps :=
@@ -4783,6 +4842,8 @@
$(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/misc_info.txt, \
$@)
+$(call declare-0p-target,$(check_all_partition_sizes_log))
+
.PHONY: check-all-partition-sizes
check-all-partition-sizes: $(check_all_partition_sizes_log)
@@ -4992,6 +5053,9 @@
cp $(SOONG_ZIP) $(ZIP2ZIP) $(MERGE_ZIPS) $(PRIVATE_ZIP_ROOT)/bin/
$(SOONG_ZIP) -o $@ -C $(PRIVATE_ZIP_ROOT) -D $(PRIVATE_ZIP_ROOT)
+$(call declare-1p-container,$(BUILT_OTATOOLS_PACKAGE),build)
+$(call declare-container-license-deps,$(INTERNAL_OTATOOLS_PACKAGE_FILES) $(INTERNAL_OTATOOLS_RELEASETOOLS),$(BUILT_OTATOOLS_PACKAGE):)
+
.PHONY: otatools-package
otatools-package: $(BUILT_OTATOOLS_PACKAGE)
@@ -5158,7 +5222,7 @@
endif # BOARD_AVB_VBMETA_SYSTEM
ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
$(hide) echo "avb_vbmeta_vendor=$(BOARD_AVB_VBMETA_VENDOR)" >> $@
- $(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $@
+ $(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS)" >> $@
$(hide) echo "avb_vbmeta_vendor_key_path=$(BOARD_AVB_VBMETA_VENDOR_KEY_PATH)" >> $@
$(hide) echo "avb_vbmeta_vendor_algorithm=$(BOARD_AVB_VBMETA_VENDOR_ALGORITHM)" >> $@
$(hide) echo "avb_vbmeta_vendor_rollback_index_location=$(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $@
@@ -5285,10 +5349,12 @@
tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
-updaer_dep :=
+updater_dep :=
ifeq ($(AB_OTA_UPDATER),true)
updater_dep += system/update_engine/update_engine.conf
+$(call declare-1p-target,system/update_engine/update_engine.conf,system/update_engine)
updater_dep += external/zucchini/version_info.h
+$(call declare-license-metadata,external/zucchini/version_info.h,legacy_notice,notice,external/zucchini/LICENSE,external/zucchini)
updater_dep += $(HOST_OUT_SHARED_LIBRARIES)/liblz4.so
endif
@@ -5883,6 +5949,8 @@
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
$(hide) cp $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) $(zip_root)/PREBUILT_IMAGES/
+ $(hide) mkdir -p $(zip_root)/PVMFW
+ $(hide) cp $(PREBUILT_PVMFWIMAGE_TARGET) $(zip_root)/PVMFW/
endif
ifdef BOARD_PREBUILT_BOOTLOADER
$(hide) mkdir -p $(zip_root)/IMAGES
@@ -5976,6 +6044,41 @@
.PHONY: target-files-package
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
+$(call declare-1p-container,$(BUILT_TARGET_FILES_PACKAGE),)
+$(call declare-container-license-deps,$(BUILT_TARGET_FILES_PACKAGE), $(INSTALLED_RADIOIMAGE_TARGET) \
+ $(INSTALLED_RECOVERYIMAGE_TARGET) \
+ $(INSTALLED_CACHEIMAGE_TARGET) \
+ $(INSTALLED_DTBOIMAGE_TARGET) \
+ $(INSTALLED_PVMFWIMAGE_TARGET) \
+ $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) \
+ $(INSTALLED_CUSTOMIMAGES_TARGET) \
+ $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
+ $(INSTALLED_KERNEL_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_DTBIMAGE_TARGET) \
+ $(INSTALLED_2NDBOOTLOADER_TARGET) \
+ $(BOARD_PREBUILT_DTBOIMAGE) \
+ $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \
+ $(BOARD_RECOVERY_ACPIO) \
+ $(PRODUCT_SYSTEM_BASE_FS_PATH) \
+ $(PRODUCT_VENDOR_BASE_FS_PATH) \
+ $(PRODUCT_PRODUCT_BASE_FS_PATH) \
+ $(PRODUCT_SYSTEM_EXT_BASE_FS_PATH) \
+ $(PRODUCT_ODM_BASE_FS_PATH) \
+ $(PRODUCT_VENDOR_DLKM_BASE_FS_PATH) \
+ $(PRODUCT_ODM_DLKM_BASE_FS_PATH) \
+ $(PRODUCT_SYSTEM_DLKM_BASE_FS_PATH) \
+ $(LPMAKE) \
+ $(SELINUX_FC) \
+ $(INSTALLED_MISC_INFO_TARGET) \
+ $(APKCERTS_FILE) \
+ $(SOONG_APEX_KEYS_FILE) \
+ $(HOST_OUT_EXECUTABLES)/fs_config \
+ $(ADD_IMG_TO_TARGET_FILES) \
+ $(MAKE_RECOVERY_PATCH) \
+ $(BUILT_KERNEL_CONFIGS_FILE) \
+ $(BUILT_KERNEL_VERSION_FILE),$(BUILT_TARGET_FILES_PACKAGE):)
+
$(call dist-for-goals, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
# -----------------------------------------------------------------
@@ -6015,12 +6118,17 @@
INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
INTERNAL_OTA_METADATA := $(PRODUCT_OUT)/ota_metadata
+$(call declare-0p-target,$(INTERNAL_OTA_METADATA))
+
$(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
$(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
@echo "Package OTA: $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
+$(call declare-1p-container,$(INTERNAL_OTA_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_OTA_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES),$(PRODUCT_OUT)/:/)
+
.PHONY: otapackage
otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
@@ -6036,6 +6144,9 @@
@echo "Package OTA (retrofit dynamic partitions): $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --retrofit_dynamic_partitions)
+$(call declare-1p-container,$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES),$(PRODUCT_OUT)/:/)
+
.PHONY: otardppackage
otapackage otardppackage: $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET)
@@ -6051,6 +6162,10 @@
@echo "Package partial OTA: $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --partial "$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)")
+$(call declare-1p-container,$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES),$(PRODUCT_OUT)/:/)
+
+
.PHONY: partialotapackage
partialotapackage: $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET)
@@ -6090,6 +6205,7 @@
$(DEXPREOPT_TOOLS_ZIP): $(SOONG_ZIP)
$(hide) mkdir -p $(dir $@)
$(hide) $(SOONG_ZIP) -d -o $@ -j $(addprefix -f ,$(PRIVATE_DEXPREOPT_TOOLS_DEPS)) -f $$(realpath $(DEX2OAT))
+$(call declare-1p-target,$(DEXPREOPT_TOOLS_ZIP),)
endif # DEX2OAT is set
endif # BUILD_OS == linux
@@ -6122,6 +6238,8 @@
.PHONY: dexpreopt_config_zip
dexpreopt_config_zip: $(DEXPREOPT_CONFIG_ZIP)
+$(call declare-1p-target,$(DEXPREOPT_CONFIG_ZIP),)
+
# -----------------------------------------------------------------
# A zip of the symbols directory. Keep the full paths to make it
# more obvious where these files came from.
@@ -6158,6 +6276,11 @@
$(hide) $(SYMBOLS_MAP) -merge $(SYMBOLS_MAPPING) -ignore_missing_files @$(PRIVATE_LIST_FILE)
$(SYMBOLS_ZIP): .KATI_IMPLICIT_OUTPUTS := $(SYMBOLS_MAPPING)
+$(call declare-1p-container,$(SYMBOLS_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(call declare-container-license-deps,$(SYMBOLS_ZIP),$(INTERNAL_ALLIMAGES_FILES) $(updater_dep),$(PRODUCT_OUT)/:/)
+endif
+
# -----------------------------------------------------------------
# A zip of the coverage directory.
#
@@ -6178,7 +6301,9 @@
$(hide) $(SOONG_ZIP) -d -o $@ -C $(TARGET_OUT_COVERAGE) -l $(PRIVATE_LIST_FILE)
$(call declare-1p-container,$(COVERAGE_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
$(call declare-container-license-deps,$(COVERAGE_ZIP),$(INTERNAL_ALLIMAGE_FILES),$(PRODUCT_OUT)/:/)
+endif
SYSTEM_NOTICE_DEPS += $(COVERAGE_ZIP)
@@ -6280,6 +6405,11 @@
$(SYMBOLS_MAP) -merge $(PROGUARD_DICT_MAPPING) -strip_prefix $(PRIVATE_PACKAGING_DIR)/ -ignore_missing_files @$(PRIVATE_LIST_FILE)
$(PROGUARD_DICT_ZIP): .KATI_IMPLICIT_OUTPUTS := $(PROGUARD_DICT_MAPPING)
+$(call declare-1p-container,$(PROGUARD_DICT_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(call declare-container-license-deps,$(PROGUARD_DICT_ZIP),$(INTERNAL_ALLIMAGES_FILES) $(updater_dep),$(PRODUCT_OUT)/:/)
+endif
+
#------------------------------------------------------------------
# A zip of Proguard usage files.
#
@@ -6309,6 +6439,23 @@
find $(PRIVATE_PACKAGING_DIR) -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
$(MERGE_ZIPS) $@ @$(PRIVATE_LIST_FILE)
+$(call declare-1p-container,$(PROGUARD_USAGE_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(call declare-container-license-deps,$(PROGUARD_USAGE_ZIP),$(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(INSTALLED_INIT_BOOT_IMAGE_TARGET) \
+ $(INSTALLED_USERDATAIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
+ $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+ $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) \
+ $(updater_dep),$(PROGUARD_USAGE_ZIP):/)
+endif
+
ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
# Dump variables used by build_super_image.py (for building super.img and super_empty.img).
@@ -6454,6 +6601,9 @@
--additional IMAGES/VerifiedBootParams.textproto:VerifiedBootParams.textproto \
$(BUILT_TARGET_FILES_PACKAGE) $@
+$(call declare-1p-container,$(INTERNAL_UPDATE_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_UPDATE_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(IMG_FROM_TARGET_FILES),$(PRODUCT_OUT)/:/)
+
.PHONY: updatepackage
updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
$(call dist-for-goals,updatepackage,$(INTERNAL_UPDATE_PACKAGE_TARGET))
@@ -6625,8 +6775,6 @@
# if we don't have a real list, then use "everything"
ifeq ($(strip $(ATREE_FILES)),)
ATREE_FILES := \
- $(ALL_DEFAULT_INSTALLED_MODULES) \
- $(INSTALLED_RAMDISK_TARGET) \
$(ALL_DOCS) \
$(ALL_SDK_FILES)
endif
@@ -6653,21 +6801,9 @@
include $(BUILD_SYSTEM)/sdk_font.mk
deps := \
- $(target_notice_file_txt) \
$(OUT_DOCS)/offline-sdk-timestamp \
$(SDK_METADATA_FILES) \
- $(SYMBOLS_ZIP) \
- $(COVERAGE_ZIP) \
- $(APPCOMPAT_ZIP) \
- $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_QEMU_SYSTEMIMAGE) \
- $(INSTALLED_QEMU_RAMDISKIMAGE) \
- $(INSTALLED_QEMU_VENDORIMAGE) \
- $(QEMU_VERIFIED_BOOT_PARAMS) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_SDK_BUILD_PROP_TARGET) \
- $(INSTALLED_BUILD_PROP_TARGET) \
+ $(INSTALLED_SDK_BUILD_PROP_TARGET) \
$(ATREE_FILES) \
$(sdk_atree_files) \
$(HOST_OUT_EXECUTABLES)/atree \
@@ -6713,8 +6849,6 @@
-v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
-v "FONT_OUT=$(SDK_FONT_TEMP)" \
-o $(PRIVATE_DIR) && \
- cp -f $(target_notice_file_txt) \
- $(PRIVATE_DIR)/system-images/android-$(PLATFORM_VERSION)/$(TARGET_CPU_ABI)/NOTICE.txt && \
HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
chmod -R ug+rwX $(PRIVATE_DIR) && \
diff --git a/core/OWNERS b/core/OWNERS
index 8794434..980186c 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,6 +1,6 @@
-per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,skvadrik@google.com
-per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,skvadrik@google.com
+per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
+per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
# For version updates
-per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
+per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 254e09b..ff49262 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -87,13 +87,23 @@
endif
endif
+# TODO: Replace this hardcoded list of optional uses-libraries with build logic
+# that propagates optionality via the generated exported-sdk-libs files.
+# Hardcodng doesn't scale and enforces a single choice on each library, while in
+# reality this is a choice of the library users (which may differ).
+my_optional_sdk_lib_names := \
+ android.test.base \
+ android.test.mock \
+ androidx.window.extensions \
+ androidx.window.sidecar
+
$(fixed_android_manifest): PRIVATE_MANIFEST_FIXER_FLAGS := $(my_manifest_fixer_flags)
# These two libs are added as optional dependencies (<uses-library> with
# android:required set to false). This is because they haven't existed in pre-P
# devices, but classes in them were in bootclasspath jars, etc. So making them
# hard dependencies (andriod:required=true) would prevent apps from being
# installed to such legacy devices.
-$(fixed_android_manifest): PRIVATE_OPTIONAL_SDK_LIB_NAMES := android.test.base android.test.mock
+$(fixed_android_manifest): PRIVATE_OPTIONAL_SDK_LIB_NAMES := $(my_optional_sdk_lib_names)
$(fixed_android_manifest): $(MANIFEST_FIXER)
$(fixed_android_manifest): $(main_android_manifest)
echo $(PRIVATE_OPTIONAL_SDK_LIB_NAMES) | tr ' ' '\n' > $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
@@ -109,3 +119,5 @@
) \
$< $@
rm $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
+
+my_optional_sdk_lib_names :=
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6c32da4..e229e6b 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -43,13 +43,13 @@
ifneq (,$(MODULE_BUILD_FROM_SOURCE))
# Keep an explicit setting.
-else ifeq (,$(filter sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
+else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
# Prebuilt module SDKs require prebuilt modules to work, and currently
# prebuilt modules are only provided for com.google.android.xxx. If we can't
# find one of them in PRODUCT_PACKAGES then assume com.android.xxx are in use,
# and disable prebuilt SDKs. In particular this applies to AOSP builds.
#
- # However, sdk/win_sdk/sdk_addon builds might not include com.google.android.xxx
+ # However, docs/sdk/win_sdk/sdk_addon builds might not include com.google.android.xxx
# packages, so for those we respect the default behavior.
MODULE_BUILD_FROM_SOURCE := true
else ifneq (,$(PRODUCT_MODULE_BUILD_FROM_SOURCE))
@@ -71,6 +71,17 @@
$(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
+# Ensure that those mainline modules who have individually toggleable prebuilts
+# are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
+# default.
+INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
+ permission \
+ wifi \
+
+$(foreach m, $(INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES),\
+ $(if $(call soong_config_get,$(m)_module,source_build),,\
+ $(call soong_config_set,$(m)_module,source_build,$(MODULE_BUILD_FROM_SOURCE))))
+
# Apex build mode variables
ifdef APEX_BUILD_FOR_PRE_S_DEVICES
$(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
@@ -88,6 +99,7 @@
# TODO(b/203088572): Remove when Java optimizations enabled by default for
# SystemUI.
$(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
-# TODO(b/196084106): Remove when Java optimizations enabled by default for
-# system packages.
+# Enable by default unless explicitly set or overridden.
+# See frameworks/base/services/Android.bp for additional notes on side effects.
+SYSTEM_OPTIMIZE_JAVA ?= true
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
diff --git a/core/artifact_path_requirements.mk b/core/artifact_path_requirements.mk
index ceaefa2..566b9f7 100644
--- a/core/artifact_path_requirements.mk
+++ b/core/artifact_path_requirements.mk
@@ -22,6 +22,10 @@
$(TARGET_OUT_SYSTEM_OTHER)/%.art
endif
+ifneq (,$(filter-out true false relaxed strict,$(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS))$(filter-out 1 0,$(words $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS))))
+ $(error PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS must be one of [true, false, relaxed, strict], found: $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS))
+endif
+
all_offending_files :=
$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
$(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
@@ -46,7 +50,7 @@
$(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
$(eval offending_files := $(filter-out $(allowed_patterns),$(files_in_requirement))) \
$(eval enforcement := $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
- $(if $(enforcement),\
+ $(if $(filter-out false,$(enforcement)),\
$(call maybe-print-list-and-error,$(offending_files),\
$(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement. \
$(PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT)) \
diff --git a/core/base_rules.mk b/core/base_rules.mk
index e6b8f20..355a22e 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -20,7 +20,11 @@
# Users can define base-rules-hook in their buildspec.mk to perform
# arbitrary operations as each module is included.
ifdef base-rules-hook
-$(if $(base-rules-hook),)
+ ifndef _has_warned_about_base_rules_hook
+ $(warning base-rules-hook is deprecated, please remove usages of it and/or convert to Soong.)
+ _has_warned_about_base_rules_hook := true
+ endif
+ $(if $(base-rules-hook),)
endif
###########################################################
@@ -590,10 +594,18 @@
my_init_rc := $(foreach rc,$(LOCAL_INIT_RC_$(my_32_64_bit_suffix)) $(LOCAL_INIT_RC),$(LOCAL_PATH)/$(rc))
endif
ifneq ($(strip $(my_init_rc)),)
- # Make doesn't support recovery as an output partition, but some Soong modules installed in recovery
- # have init.rc files that need to be installed alongside them. Manually handle the case where the
- # output file is in the recovery partition.
- my_init_rc_path := $(if $(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)),$(TARGET_RECOVERY_ROOT_OUT)/system/etc,$(TARGET_OUT$(partition_tag)_ETC))
+ # Make doesn't support recovery or ramdisk as an output partition,
+ # but some Soong modules installed in recovery or ramdisk
+ # have init.rc files that need to be installed alongside them.
+ # Manually handle the case where the
+ # output file is in the recovery or ramdisk partition.
+ ifneq (,$(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)))
+ my_init_rc_path := $(TARGET_RECOVERY_ROOT_OUT)/system/etc
+ else ifneq (,$(filter $(TARGET_RAMDISK_OUT)/%,$(my_module_path)))
+ my_init_rc_path := $(TARGET_RAMDISK_OUT)/system/etc
+ else
+ my_init_rc_path := $(TARGET_OUT$(partition_tag)_ETC)
+ endif
my_init_rc_pairs := $(foreach rc,$(my_init_rc),$(rc):$(my_init_rc_path)/init/$(notdir $(rc)))
my_init_rc_installed := $(foreach rc,$(my_init_rc_pairs),$(call word-colon,2,$(rc)))
@@ -713,6 +725,7 @@
endif
ifdef LOCAL_MULTILIB
multi_arch := true
+# These conditionals allow this functionality to be mimicked in Soong
else ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
multi_arch := true
diff --git a/core/binary.mk b/core/binary.mk
index 665270e..3f32fa9 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1506,7 +1506,7 @@
ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
my_cflags := -Wall -Werror $(my_cflags)
else
- $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
+ $(eval MODULES_WARNINGS_ALLOWED := $(MODULES_USING_WNO_ERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
my_cflags := -Wall $(my_cflags)
endif
endif
diff --git a/core/board_config.mk b/core/board_config.mk
index dc50a68..d280349 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -174,6 +174,7 @@
_build_broken_var_list := \
+ BUILD_BROKEN_DEPFILE \
BUILD_BROKEN_DUP_RULES \
BUILD_BROKEN_DUP_SYSPROP \
BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \
@@ -234,10 +235,7 @@
.KATI_READONLY := TARGET_DEVICE_DIR
endif
-# TODO(colefaust) change this if to RBC_PRODUCT_CONFIG when
-# the board configuration is known to work on everything
-# the product config works on.
-ifndef RBC_BOARD_CONFIG
+ifndef RBC_PRODUCT_CONFIG
include $(board_config_mk)
else
$(shell mkdir -p $(OUT_DIR)/rbc)
diff --git a/core/config.mk b/core/config.mk
index 205a2fb..c0dea95 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -155,8 +155,8 @@
$(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported)
$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead)
-$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
-$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
+$(KATI_obsolete_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
+$(KATI_obsolete_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
$(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead)
$(KATI_obsolete_var PRODUCT_CHECK_ELF_FILES,Use BUILD_BROKEN_PREBUILT_ELF_FILES instead)
$(KATI_obsolete_var ALL_GENERATED_SOURCES,ALL_GENERATED_SOURCES is no longer used)
@@ -226,8 +226,6 @@
BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
-BUILD_HOST_DALVIK_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_static_java_library.mk
include $(BUILD_SYSTEM)/deprecation.mk
@@ -861,6 +859,7 @@
30.0 \
31.0 \
32.0 \
+ 33.0 \
.KATI_READONLY := \
PLATFORM_SEPOLICY_COMPAT_VERSIONS \
@@ -1233,10 +1232,27 @@
endif
.KATI_READONLY := GOMA_POOL RBE_POOL GOMA_OR_RBE_POOL
+JAVAC_NINJA_POOL :=
+R8_NINJA_POOL :=
+D8_NINJA_POOL :=
+
+ifneq ($(filter-out false,$(USE_RBE)),)
+ ifdef RBE_JAVAC
+ JAVAC_NINJA_POOL := $(RBE_POOL)
+ endif
+ ifdef RBE_R8
+ R8_NINJA_POOL := $(RBE_POOL)
+ endif
+ ifdef RBE_D8
+ D8_NINJA_POOL := $(RBE_POOL)
+ endif
+endif
+
+.KATI_READONLY := JAVAC_NINJA_POOL R8_NINJA_POOL D8_NINJA_POOL
+
# These goals don't need to collect and include Android.mks/CleanSpec.mks
# in the source tree.
-dont_bother_goals := out \
- product-graph dump-products
+dont_bother_goals := out product-graph
# Make ANDROID Soong config variables visible to Android.mk files, for
# consistency with those defined in BoardConfig.mk files.
diff --git a/core/definitions.mk b/core/definitions.mk
index 0d72473..a9d5733 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -41,6 +41,9 @@
ALL_NON_MODULES:=
NON_MODULES_WITHOUT_LICENSE_METADATA:=
+# List of copied targets that need license metadata copied.
+ALL_COPIED_TARGETS:=
+
# Full paths to targets that should be added to the "make droid"
# set of installed targets.
ALL_DEFAULT_INSTALLED_MODULES:=
@@ -570,16 +573,34 @@
$(call generated-sources-dir-for,META,lic,)
endef
+TARGETS_MISSING_LICENSE_METADATA:=
+
###########################################################
# License metadata targets corresponding to targets in $(1)
###########################################################
define corresponding-license-metadata
-$(strip $(foreach target, $(sort $(1)), \
+$(strip $(filter-out 0p,$(foreach target, $(sort $(1)), \
$(if $(strip $(ALL_MODULES.$(target).META_LIC)), \
$(ALL_MODULES.$(target).META_LIC), \
$(if $(strip $(ALL_TARGETS.$(target).META_LIC)), \
$(ALL_TARGETS.$(target).META_LIC), \
- $(call append-path,$(call license-metadata-dir),$(patsubst $(OUT_DIR)%,out%,$(target).meta_lic))))))
+ $(eval TARGETS_MISSING_LICENSE_METADATA += $(target)) \
+ ) \
+ ) \
+)))
+endef
+
+###########################################################
+## Record a target $(1) copied from another target(s) $(2) that will need
+## license metadata.
+###########################################################
+define declare-copy-target-license-metadata
+$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir))\
+ $(eval _tgt:=$(strip $(1)))\
+ $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic)))\
+ $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+ $(eval ALL_COPIED_TARGETS += $(_tgt)),\
+ $(eval ALL_TARGETS.$(1).META_LIC:=$(module_license_metadata))))
endef
###########################################################
@@ -661,13 +682,6 @@
$(strip $(eval _notices := $(sort $(ALL_NON_MODULES.$(_tgt).NOTICES))))
$(strip $(eval _path := $(sort $(ALL_NON_MODULES.$(_tgt).PATH))))
$(strip $(eval _install_map := $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS)))
-$(strip $(eval \
- $$(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
- $$(if $$(strip $$(ALL_TARGETS.$$(d).META_LIC)), \
- , \
- $$(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $$(d))) \
- )) \
-)
$(_meta): PRIVATE_KINDS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_KINDS))
$(_meta): PRIVATE_CONDITIONS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS))
@@ -705,6 +719,60 @@
endef
###########################################################
+## Record missing dependencies for non-module target $(1)
+###########################################################
+define record-missing-non-module-dependencies
+$(strip $(eval _tgt := $(strip $(1))))
+$(strip $(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
+ $(if $(strip $(ALL_TARGETS.$(d).META_LIC)), \
+ , \
+ $(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $(d))) \
+))
+endef
+
+###########################################################
+## License metadata build rule for copied target $(1)
+###########################################################
+define copied-target-license-metadata-rule
+$(if $(strip $(ALL_TARGETS.$(1).META_LIC)),,$(call _copied-target-license-metadata-rule,$(1)))
+endef
+
+define _copied-target-license-metadata-rule
+$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(1).meta_lic))))
+$(strip $(eval ALL_TARGETS.$(1).META_LIC:=$(_meta)))
+$(strip $(eval _dep:=))
+$(strip $(foreach s,$(ALL_COPIED_TARGETS.$(1).SOURCES),\
+ $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+ $(if $(filter 0p,$(_dmeta)),\
+ $(if $(filter-out 0p,$(_dep)),,$(eval ALL_TARGETS.$(1).META_LIC:=0p)),\
+ $(if $(_dep),\
+ $(if $(filter-out $(_dep),$(_dmeta)),$(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta))),
+ $(eval _dep:=$(_dmeta))))))
+$(strip $(if $(strip $(_dep)),,$(error cannot copy target from unknown module: $(1) from $(ALL_COPIED_TARGETS.$(1).SOURCES))))
+
+ifneq (0p,$(ALL_TARGETS.$(1).META_LIC))
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(ALL_COPIED_TARGETS.$(1).SOURCES)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,PACKAGING,copynotice)/$(_meta)/arguments
+$(_meta) : $(_dep) $(COPY_LICENSE_METADATA)
+ rm -f $$@
+ mkdir -p $$(dir $$@)
+ mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+ $$(call dump-words-to-file,\
+ $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+ $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+ $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+ $$(PRIVATE_ARGUMENT_FILE))
+ OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+ @$$(PRIVATE_ARGUMENT_FILE) \
+ -o $$@
+
+endif
+endef
+
+###########################################################
## Declare the license metadata for non-module target $(1).
##
## $(2) -- license kinds e.g. SPDX-license-identifier-Apache-2.0
@@ -717,6 +785,7 @@
$(strip \
$(eval _tgt := $(subst //,/,$(strip $(1)))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_KINDS := $(strip $(2))) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS := $(strip $(3))) \
$(eval ALL_NON_MODULES.$(_tgt).NOTICES := $(strip $(4))) \
@@ -757,6 +826,7 @@
$(strip \
$(eval _tgt := $(subst //,/,$(strip $(1)))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_KINDS := $(strip $(2))) \
$(eval ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS := $(strip $(3))) \
$(eval ALL_NON_MODULES.$(_tgt).NOTICES := $(strip $(4))) \
@@ -829,6 +899,7 @@
$(strip \
$(eval _tgt := $(strip $(1))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).DEPENDENCIES := $(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES) $(2))) \
)
endef
@@ -845,6 +916,7 @@
$(strip \
$(eval _tgt := $(strip $(1))) \
$(eval ALL_NON_MODULES += $(_tgt)) \
+ $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir)/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
$(eval ALL_NON_MODULES.$(_tgt).DEPENDENCIES := $(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES) $(2))) \
$(eval ALL_NON_MODULES.$(_tgt).IS_CONTAINER := true) \
$(eval ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS := $(strip $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS) $(3))) \
@@ -856,12 +928,14 @@
###########################################################
define report-missing-licenses-rule
.PHONY: reportmissinglicenses
-reportmissinglicenses: PRIVATE_NON_MODULES:=$(sort $(NON_MODULES_WITHOUT_LICENSE_METADATA))
-reportmissinglicenses: PRIVATE_COPIED_FILES:=$(sort $(filter $(NON_MODULES_WITHOUT_LICENSE_METADATA),$(foreach _pair,$(PRODUCT_COPY_FILES), $(PRODUCT_OUT)/$(call word-colon,2,$(_pair)))))
+reportmissinglicenses: PRIVATE_NON_MODULES:=$(sort $(NON_MODULES_WITHOUT_LICENSE_METADATA) $(TARGETS_MISSING_LICENSE_METADATA))
+reportmissinglicenses: PRIVATE_COPIED_FILES:=$(sort $(filter $(NON_MODULES_WITHOUT_LICENSE_METADATA) $(TARGETS_MISSING_LICENSE_METADATA),\
+ $(foreach _pair,$(PRODUCT_COPY_FILES), $(PRODUCT_OUT)/$(call word-colon,2,$(_pair)))))
reportmissinglicenses:
@echo Reporting $$(words $$(PRIVATE_NON_MODULES)) targets without license metadata
$$(foreach t,$$(PRIVATE_NON_MODULES),if ! [ -h $$(t) ]; then echo No license metadata for $$(t) >&2; fi;)
$$(foreach t,$$(PRIVATE_COPIED_FILES),if ! [ -h $$(t) ]; then echo No license metadata for copied file $$(t) >&2; fi;)
+ echo $$(words $$(PRIVATE_NON_MODULES)) targets missing license metadata >&2
endef
@@ -914,13 +988,9 @@
$(foreach t,$(sort $(ALL_0P_TARGETS)), \
$(eval ALL_TARGETS.$(t).META_LIC := 0p) \
) \
- $(foreach t,$(sort $(ALL_NON_MODULES)), \
- $(eval ALL_TARGETS.$(t).META_LIC := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(t).meta_lic))) \
- ) \
$(foreach t,$(sort $(ALL_NON_MODULES)),$(eval $(call non-module-license-metadata-rule,$(t)))) \
$(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m)))) \
- $(eval $(call report-missing-licenses-rule)) \
- $(eval $(call report-all-notice-library-names-rule)) \
+ $(foreach t,$(sort $(ALL_COPIED_TARGETS)),$(eval $(call copied-target-license-metadata-rule,$(t)))) \
$(eval $(call build-all-license-metadata-rule)))
endef
@@ -992,6 +1062,22 @@
)
endef
+# Uses LOCAL_MODULE_CLASS, LOCAL_MODULE, and LOCAL_IS_HOST_MODULE
+# to determine the intermediates directory.
+#
+# $(1): if non-empty, force the intermediates to be COMMON
+# $(2): if non-empty, force the intermediates to be for the 2nd arch
+# $(3): if non-empty, force the intermediates to be for the host cross os
+define local-meta-intermediates-dir
+$(strip \
+ $(if $(strip $(LOCAL_MODULE_CLASS)),, \
+ $(error $(LOCAL_PATH): LOCAL_MODULE_CLASS not defined before call to local-meta-intermediates-dir)) \
+ $(if $(strip $(LOCAL_MODULE)),, \
+ $(error $(LOCAL_PATH): LOCAL_MODULE not defined before call to local-meta-intermediates-dir)) \
+ $(call intermediates-dir-for,META$(LOCAL_MODULE_CLASS),$(LOCAL_MODULE),$(if $(strip $(LOCAL_IS_HOST_MODULE)),HOST),$(1),$(2),$(3)) \
+)
+endef
+
###########################################################
## The generated sources directory. Placing generated
## source files directly in the intermediates directory
@@ -2411,7 +2497,47 @@
@$(call emit-line,$(wordlist 38001,38500,$(1)),$(2))
@$(call emit-line,$(wordlist 38501,39000,$(1)),$(2))
@$(call emit-line,$(wordlist 39001,39500,$(1)),$(2))
- @$(if $(wordlist 39501,39502,$(1)),$(error Too many words ($(words $(1)))))
+ @$(call emit-line,$(wordlist 39501,40000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 40001,40500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 40501,41000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 41001,41500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 41501,42000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 42001,42500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 42501,43000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 43001,43500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 43501,44000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 44001,44500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 44501,45000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 45001,45500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 45501,46000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 46001,46500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 46501,47000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 47001,47500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 47501,48000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 48001,48500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 48501,49000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 49001,49500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 49501,50000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 50001,50500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 50501,51000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 51001,51500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 51501,52000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 52001,52500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 52501,53000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 53001,53500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 53501,54000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 54001,54500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 54501,55000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 55001,55500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 55501,56000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 56001,56500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 56501,57000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 57001,57500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 57501,58000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 58001,58500,$(1)),$(2))
+ @$(call emit-line,$(wordlist 58501,59000,$(1)),$(2))
+ @$(call emit-line,$(wordlist 59001,59500,$(1)),$(2))
+ @$(if $(wordlist 59501,59502,$(1)),$(error Too many words ($(words $(1)))))
endef
# Return jar arguments to compress files in a given directory
# $(1): directory
@@ -2609,7 +2735,7 @@
@mkdir -p $(dir $@)tmp
$(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
$(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
-$(hide) $(D8_WRAPPER) $(DX_COMMAND) $(DEX_FLAGS) \
+$(hide) $(D8_WRAPPER) $(D8_COMMAND) \
--output $(dir $@)tmp \
$(addprefix --lib ,$(PRIVATE_D8_LIBS)) \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
@@ -2898,7 +3024,7 @@
# $(2): destination file
define copy-init-script-file-checked
ifdef TARGET_BUILD_UNBUNDLED
-# TODO (b/185624993): Remove the chck on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
+# TODO (b/185624993): Remove the check on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
# without requiring the HIDL interface map.
$(2): $(1)
else ifneq ($(HOST_OS),darwin)
@@ -3213,7 +3339,7 @@
define transform-jar-to-dex-r8
@echo R8: $@
$(hide) rm -f $(PRIVATE_PROGUARD_DICTIONARY)
-$(hide) $(R8_WRAPPER) $(R8_COMPAT_PROGUARD) $(DEX_FLAGS) \
+$(hide) $(R8_WRAPPER) $(R8_COMMAND) \
-injars '$<' \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
--no-data-resources \
@@ -3360,8 +3486,6 @@
STATIC_TEST_LIBRARY \
HOST_STATIC_TEST_LIBRARY \
NOTICE_FILE \
- HOST_DALVIK_JAVA_LIBRARY \
- HOST_DALVIK_STATIC_JAVA_LIBRARY \
base_rules \
HEADER_LIBRARY \
HOST_TEST_CONFIG \
@@ -3404,11 +3528,11 @@
define create-suite-dependencies
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval $(if $(strip $(module_license_metadata)),\
- $$(foreach f,$$(my_compat_dist_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
+ $$(foreach f,$$(my_compat_dist_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
$$(eval my_test_data += $$(foreach f,$$(my_compat_dist_$(suite)), $$(call word-colon,2,$$(f)))) \
)) \
$(eval $(if $(strip $(module_license_metadata)),\
- $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
+ $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
$$(eval my_test_config += $$(foreach f,$$(my_compat_dist_config_$(suite)), $$(call word-colon,2,$$(f)))) \
)) \
$(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,\
diff --git a/core/deprecation.mk b/core/deprecation.mk
index 2b7a869..ed4215e 100644
--- a/core/deprecation.mk
+++ b/core/deprecation.mk
@@ -3,8 +3,6 @@
BUILD_EXECUTABLE \
BUILD_FUZZ_TEST \
BUILD_HEADER_LIBRARY \
- BUILD_HOST_DALVIK_JAVA_LIBRARY \
- BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
BUILD_HOST_JAVA_LIBRARY \
BUILD_HOST_PREBUILT \
BUILD_JAVA_LIBRARY \
@@ -39,6 +37,8 @@
OBSOLETE_BUILD_MODULE_TYPES :=$= \
BUILD_AUX_EXECUTABLE \
BUILD_AUX_STATIC_LIBRARY \
+ BUILD_HOST_DALVIK_JAVA_LIBRARY \
+ BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
BUILD_HOST_FUZZ_TEST \
BUILD_HOST_NATIVE_TEST \
BUILD_HOST_SHARED_TEST_LIBRARY \
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index d5293cf..c11b7f4 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -96,7 +96,6 @@
$(call add_json_list, DisablePreoptModules, $(DEXPREOPT_DISABLED_MODULES))
$(call add_json_bool, OnlyPreoptBootImageAndSystemServer, $(filter true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY)))
$(call add_json_bool, PreoptWithUpdatableBcp, $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)))
- $(call add_json_bool, UseArtImage, $(filter true,$(DEXPREOPT_USE_ART_IMAGE)))
$(call add_json_bool, DontUncompressPrivAppsDex, $(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS)))
$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
$(call add_json_bool, HasSystemOther, $(BOARD_USES_SYSTEM_OTHER_ODEX))
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index ea50313..216168b 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -272,11 +272,13 @@
my_dexpreopt_images_deps :=
my_dexpreopt_image_locations_on_host :=
my_dexpreopt_image_locations_on_device :=
+# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
+# for each boot image (primary and the framework extension). The only reason why
+# the primary image is exposed to Make is testing (art gtests) and benchmarking
+# (art golem benchmarks). Install rules that use those variables are in
+# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
my_dexpreopt_infix := boot
my_create_dexpreopt_config :=
-ifeq (true, $(DEXPREOPT_USE_ART_IMAGE))
- my_dexpreopt_infix := art
-endif
ifdef LOCAL_DEX_PREOPT
ifeq (,$(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
diff --git a/core/distdir.mk b/core/distdir.mk
index aad8ff3..bce8e7f 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -45,6 +45,140 @@
$(eval _all_dist_goal_output_pairs += $$(goal):$$(dst))))
endef
+.PHONY: shareprojects
+
+define __share-projects-rule
+$(1) : PRIVATE_TARGETS := $(2)
+$(1): $(2) $(COMPLIANCE_LISTSHARE)
+ $(hide) rm -f $$@
+ mkdir -p $$(dir $$@)
+ $$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(COMPLIANCE_LISTSHARE) -o $$@ $$(PRIVATE_TARGETS),touch $$@)
+endef
+
+# build list of projects to share in $(1) for meta_lic in $(2)
+#
+# $(1): the intermediate project sharing file
+# $(2): the license metadata to base the sharing on
+define _share-projects-rule
+$(eval $(call __share-projects-rule,$(1),$(2)))
+endef
+
+.PHONY: alllicensetexts
+
+define __license-texts-rule
+$(2) : PRIVATE_GOAL := $(1)
+$(2) : PRIVATE_TARGETS := $(3)
+$(2) : PRIVATE_ROOTS := $(4)
+$(2) : PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,licensetexts)/$(2)/arguments
+$(2): $(3) $(TEXTNOTICE)
+ $(hide) rm -f $$@
+ mkdir -p $$(dir $$@)
+ mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+ $$(if $$(strip $$(PRIVATE_TARGETS)),$$(call dump-words-to-file,\
+ -product="$$(PRIVATE_GOAL)" -title="$$(PRIVATE_GOAL)" \
+ $$(addprefix -strip_prefix ,$$(PRIVATE_ROOTS)) \
+ -strip_prefix=$(PRODUCT_OUT)/ -strip_prefix=$(HOST_OUT)/\
+ $$(PRIVATE_TARGETS),\
+ $$(PRIVATE_ARGUMENT_FILE)))
+ $$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(TEXTNOTICE) -o $$@ @$$(PRIVATE_ARGUMENT_FILE),touch $$@)
+endef
+
+# build list of projects to share in $(2) for meta_lic in $(3) for dist goals $(1)
+# Strip `out/dist/` used as proxy for 'DIST_DIR'
+#
+# $(1): the name of the dist goals
+# $(2): the intermediate project sharing file
+# $(3): the license metadata to base the sharing on
+define _license-texts-rule
+$(eval $(call __license-texts-rule,$(1),$(2),$(3),out/dist/))
+endef
+
+###########################################################
+## License metadata build rule for dist target $(1) with meta_lic $(2) copied from $(3)
+###########################################################
+define _dist-target-license-metadata-rule
+$(strip $(eval _meta :=$(2)))
+$(strip $(eval _dep:=))
+# 0p is the indicator for a non-copyrightable file where no party owns the copyright.
+# i.e. pure data with no copyrightable expression.
+# If all of the sources are 0p and only 0p, treat the copied file as 0p. Otherwise, all
+# of the sources must either be 0p or originate from a single metadata file to copy.
+$(strip $(foreach s,$(strip $(3)),\
+ $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+ $(if $(strip $(_dmeta)),\
+ $(if $(filter-out 0p,$(_dep)),\
+ $(if $(filter-out $(_dep) 0p,$(_dmeta)),\
+ $(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta)),\
+ $(if $(filter 0p,$(_dep)),$(eval _dep:=$(_dmeta)))),\
+ $(eval _dep:=$(_dmeta))\
+ ),\
+ $(eval TARGETS_MISSING_LICENSE_METADATA += $(s) $(1)))))
+
+
+ifeq (0p,$(strip $(_dep)))
+# Not copyrightable. No emcumbrances, no license text, no license kind etc.
+$(_meta): PRIVATE_CONDITIONS := unencumbered
+$(_meta): PRIVATE_SOURCES := $(3)
+$(_meta): PRIVATE_INSTALLED := $(1)
+# use `$(1)` which is the unique and relatively short `out/dist/$(target)`
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,notice)/$(1)/arguments
+$(_meta): $(BUILD_LICENSE_METADATA)
+$(_meta) :
+ rm -f $$@
+ mkdir -p $$(dir $$@)
+ mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+ $$(call dump-words-to-file,\
+ $$(addprefix -c ,$$(PRIVATE_CONDITIONS))\
+ $$(addprefix -s ,$$(PRIVATE_SOURCES))\
+ $$(addprefix -t ,$$(PRIVATE_TARGETS))\
+ $$(addprefix -i ,$$(PRIVATE_INSTALLED)),\
+ $$(PRIVATE_ARGUMENT_FILE))
+ OUT_DIR=$(OUT_DIR) $(BUILD_LICENSE_METADATA) \
+ @$$(PRIVATE_ARGUMENT_FILE) \
+ -o $$@
+
+else ifneq (,$(strip $(_dep)))
+# Not a missing target, copy metadata and `is_container` etc. from license metadata file `$(_dep)`
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(3)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+# use `$(1)` which is the unique and relatively short `out/dist/$(target)`
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,copynotice)/$(1)/arguments
+$(_meta) : $(_dep) $(COPY_LICENSE_METADATA)
+ rm -f $$@
+ mkdir -p $$(dir $$@)
+ mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+ $$(call dump-words-to-file,\
+ $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+ $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+ $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+ $$(PRIVATE_ARGUMENT_FILE))
+ OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+ @$$(PRIVATE_ARGUMENT_FILE) \
+ -o $$@
+
+endif
+endef
+
+# use `out/dist/` as a proxy for 'DIST_DIR'
+define _add_projects_to_share
+$(strip $(eval _mdir := $(call intermediates-dir-for,METAPACKAGING,meta)/out/dist)) \
+$(strip $(eval _idir := $(call intermediates-dir-for,METAPACKAGING,shareprojects))) \
+$(strip $(eval _tdir := $(call intermediates-dir-for,METAPACKAGING,licensetexts))) \
+$(strip $(eval _allt := $(sort $(foreach goal,$(_all_dist_goal_output_pairs),$(call word-colon,2,$(goal)))))) \
+$(foreach target,$(_allt), \
+ $(eval _goals := $(sort $(foreach dg,$(filter %:$(target),$(_all_dist_goal_output_pairs)),$(call word-colon,1,$(dg))))) \
+ $(eval _srcs := $(sort $(foreach sdp,$(filter %:$(target),$(_all_dist_src_dst_pairs)),$(call word-colon,1,$(sdp))))) \
+ $(eval $(call _dist-target-license-metadata-rule,out/dist/$(target),$(_mdir)/out/dist/$(target).meta_lic,$(_srcs))) \
+ $(eval _f := $(_idir)/$(target).shareprojects) \
+ $(eval _n := $(_tdir)/$(target).txt) \
+ $(eval $(call dist-for-goals,$(_goals),$(_f):shareprojects/$(target).shareprojects)) \
+ $(eval $(call dist-for-goals,$(_goals),$(_n):licensetexts/$(target).txt)) \
+ $(eval $(call _share-projects-rule,$(_f),$(foreach t, $(filter-out $(TARGETS_MISSING_LICENSE_METADATA),out/dist/$(target)),$(_mdir)/$(t).meta_lic))) \
+ $(eval $(call _license-texts-rule,$(_goals),$(_n),$(foreach t,$(filter-out $(TARGETS_MISSING_LICENSE_METADATA),out/dist/$(target)),$(_mdir)/$(t).meta_lic))) \
+)
+endef
+
#------------------------------------------------------------------
# To be used at the end of the build to collect all the uses of
# dist-for-goals, and write them into a file for the packaging step to use.
@@ -52,6 +186,15 @@
# $(1): The file to write
define dist-write-file
$(strip \
+ $(call _add_projects_to_share)\
+ $(if $(strip $(ANDROID_REQUIRE_LICENSE_METADATA)),\
+ $(foreach target,$(sort $(TARGETS_MISSING_LICENSE_METADATA)),$(warning target $(target) missing license metadata))\
+ $(if $(strip $(TARGETS_MISSING_LICENSE_METADATA)),\
+ $(if $(filter true error,$(ANDROID_REQUIRE_LICENSE_METADATA)),\
+ $(error $(words $(sort $(TARGETS_MISSING_LICENSE_METADATA))) targets need license metadata))))\
+ $(foreach t,$(sort $(ALL_NON_MODULES)),$(call record-missing-non-module-dependencies,$(t))) \
+ $(eval $(call report-missing-licenses-rule)) \
+ $(eval $(call report-all-notice-library-names-rule)) \
$(KATI_obsolete_var dist-for-goals,Cannot be used after dist-write-file) \
$(foreach goal,$(sort $(_all_dist_goals)), \
$(eval $$(goal): _dist_$$(goal))) \
diff --git a/core/dumpvar.mk b/core/dumpvar.mk
index 6b5c030..6f3d14f 100644
--- a/core/dumpvar.mk
+++ b/core/dumpvar.mk
@@ -35,3 +35,7 @@
printf "'\n";)
endif # CALLED_FROM_SETUP
+
+ifneq (,$(RBC_DUMP_CONFIG_FILE))
+$(call dump-variables-rbc,$(RBC_DUMP_CONFIG_FILE))
+endif
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 049a4d6..d116aaf 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -326,20 +326,24 @@
# raw ones.
define dump-variables-rbc
$(eval _dump_variables_rbc_excluded := \
+ BUILD_NUMBER \
+ DATE \
LOCAL_PATH \
+ MAKEFILE_LIST \
+ PRODUCTS \
+ PRODUCT_COPY_OUT_% \
+ RBC_PRODUCT_CONFIG \
+ RBC_BOARD_CONFIG \
+ SOONG_% \
TOPDIR \
TRACE_BEGIN_SOONG \
- BOARD_PLAT_PUBLIC_SEPOLICY_DIR \
- BOARD_PLAT_PRIVATE_SEPOLICY_DIR \
- USER \
- SOONG_% \
- PRODUCT_COPY_OUT_%)\
-$(file >$(OUT_DIR)/dump-variables-rbc-temp.txt,$(subst $(space),$(newline),$(filter-out $(_dump_variables_rbc_excluded),$(.VARIABLES))))
+ USER)
+$(file >$(OUT_DIR)/dump-variables-rbc-temp.txt,$(subst $(space),$(newline),$(sort $(filter-out $(_dump_variables_rbc_excluded),$(.VARIABLES)))))
$(file >$(1),\
$(foreach v, $(shell grep -he "^[A-Z][A-Z0-9_]*$$" $(OUT_DIR)/dump-variables-rbc-temp.txt),\
$(v) := $(strip $($(v)))$(newline))\
-$(foreach ns,$(SOONG_CONFIG_NAMESPACES),\
-$(foreach v,$(SOONG_CONFIG_$(ns)),\
+$(foreach ns,$(sort $(SOONG_CONFIG_NAMESPACES)),\
+$(foreach v,$(sort $(SOONG_CONFIG_$(ns))),\
$$(call soong_config_set,$(ns),$(v),$(SOONG_CONFIG_$(ns)_$(v)))$(newline))))
endef
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
deleted file mode 100644
index 5eeb8ac..0000000
--- a/core/host_dalvik_java_library.mk
+++ /dev/null
@@ -1,191 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-$(call record-module-type,HOST_DALVIK_JAVA_LIBRARY)
-
-#
-# Rules for building a host dalvik java library. These libraries
-# are meant to be used by a dalvik VM instance running on the host.
-# They will be compiled against libcore and not the host JRE.
-#
-
-ifeq ($(HOST_OS),linux)
-USE_CORE_LIB_BOOTCLASSPATH := true
-
-#######################################
-include $(BUILD_SYSTEM)/host_java_library_common.mk
-#######################################
-
-full_classes_turbine_jar := $(intermediates.COMMON)/classes-turbine.jar
-full_classes_header_jarjar := $(intermediates.COMMON)/classes-header-jarjar.jar
-full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
-full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
-full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
-full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
-full_classes_jar := $(intermediates.COMMON)/classes.jar
-built_dex := $(intermediates.COMMON)/classes.dex
-java_source_list_file := $(intermediates.COMMON)/java-source-list
-
-LOCAL_INTERMEDIATE_TARGETS += \
- $(full_classes_turbine_jar) \
- $(full_classes_compiled_jar) \
- $(full_classes_combined_jar) \
- $(full_classes_jarjar_jar) \
- $(full_classes_jar) \
- $(built_dex) \
- $(java_source_list_file)
-
-# See comment in java.mk
-ifndef LOCAL_CHECKED_MODULE
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
-else
-LOCAL_CHECKED_MODULE := $(built_dex)
-endif
-endif
-
-#######################################
-include $(BUILD_SYSTEM)/base_rules.mk
-#######################################
-java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) \
- $(filter %.java,$(LOCAL_GENERATED_SOURCES))
-all_java_sources := $(java_sources)
-
-include $(BUILD_SYSTEM)/java_common.mk
-
-include $(BUILD_SYSTEM)/sdk_check.mk
-
-$(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
-
-# List of dependencies for anything that needs all java sources in place
-java_sources_deps := \
- $(java_sources) \
- $(java_resource_sources) \
- $(LOCAL_SRCJARS) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
-
-$(java_source_list_file): $(java_sources_deps)
- $(write-java-source-list)
-
-# TODO(b/143658984): goma can't handle the --system argument to javac.
-#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_compiled_jar): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/srcjar-list
-$(full_classes_compiled_jar): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
-$(full_classes_compiled_jar): \
- $(java_source_list_file) \
- $(java_sources_deps) \
- $(full_java_header_libs) \
- $(full_java_bootclasspath_libs) \
- $(full_java_system_modules_deps) \
- $(annotation_processor_deps) \
- $(NORMALIZE_PATH) \
- $(JAR_ARGS) \
- $(ZIPSYNC) \
- $(SOONG_ZIP) \
- | $(SOONG_JAVAC_WRAPPER)
- $(transform-host-java-to-dalvik-package)
-
-ifneq ($(TURBINE_ENABLED),false)
-
-$(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_turbine_jar): \
- $(java_source_list_file) \
- $(java_sources_deps) \
- $(full_java_header_libs) \
- $(full_java_bootclasspath_libs) \
- $(NORMALIZE_PATH) \
- $(JAR_ARGS) \
- $(ZIPTIME) \
- | $(TURBINE) \
- $(MERGE_ZIPS)
- $(transform-java-to-header.jar)
-
-.KATI_RESTAT: $(full_classes_turbine_jar)
-
-# Run jarjar before generate classes-header.jar if necessary.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- $(call transform-jarjar)
-else
-full_classes_header_jarjar := $(full_classes_turbine_jar)
-endif
-
-$(eval $(call copy-one-file,$(full_classes_header_jarjar),$(full_classes_header_jar)))
-
-endif # TURBINE_ENABLED != false
-
-$(full_classes_combined_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
-$(full_classes_combined_jar): $(full_classes_compiled_jar) \
- $(jar_manifest_file) \
- $(full_static_java_libs) | $(MERGE_ZIPS)
- $(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
- $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
- $(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
- $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
- $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
-
-# Run jarjar if necessary, otherwise just copy the file.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- $(call transform-jarjar)
-else
-full_classes_jarjar_jar := $(full_classes_combined_jar)
-endif
-
-$(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
-
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-# No dex; all we want are the .class files with resources.
-$(LOCAL_BUILT_MODULE) : $(java_resource_sources)
-$(LOCAL_BUILT_MODULE) : $(full_classes_jar)
- @echo "host Static Jar: $(PRIVATE_MODULE) ($@)"
- $(copy-file-to-target)
-
-else # !LOCAL_IS_STATIC_JAVA_LIBRARY
-$(built_dex): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
-$(built_dex): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(built_dex): $(full_classes_jar) $(DX) $(ZIP2ZIP)
- $(transform-classes.jar-to-dex)
-
-$(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-$(LOCAL_BUILT_MODULE): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
-$(LOCAL_BUILT_MODULE): $(built_dex) $(java_resource_sources)
- @echo "Host Jar: $(PRIVATE_MODULE) ($@)"
- rm -rf $@.parts
- mkdir -p $@.parts
- $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
- $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
- $(MERGE_ZIPS) -j $@ $@.parts/dex.zip $@.parts/res.zip
- rm -rf $@.parts
-
-endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(call module-sdk-version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(call codename-or-sdk-to-sdk,$(call module-min-sdk-version))
-
-USE_CORE_LIB_BOOTCLASSPATH :=
-
-endif
diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk
deleted file mode 100644
index 78faf73..0000000
--- a/core/host_dalvik_static_java_library.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-$(call record-module-type,HOST_DALVIK_STATIC_JAVA_LIBRARY)
-
-#
-# Rules for building a host dalvik static java library.
-# These libraries will be compiled against libcore and not the host
-# JRE.
-#
-LOCAL_UNINSTALLABLE_MODULE := true
-LOCAL_IS_STATIC_JAVA_LIBRARY := true
-
-include $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-
-LOCAL_IS_STATIC_JAVA_LIBRARY :=
diff --git a/core/java.mk b/core/java.mk
index 123cbe8..01951c0 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -266,6 +266,7 @@
# TODO(b/143658984): goma can't handle the --system argument to javac.
#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(JAVAC_NINJA_POOL)
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
@@ -489,15 +490,17 @@
$(built_dex_intermediate): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
ifdef LOCAL_PROGUARD_ENABLED
+ $(built_dex_intermediate): .KATI_NINJA_POOL := $(R8_NINJA_POOL)
$(built_dex_intermediate): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
$(built_dex_intermediate): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
$(built_dex_intermediate): PRIVATE_PROGUARD_DICTIONARY := $(proguard_dictionary)
- $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(legacy_proguard_lib_deps) $(R8_COMPAT_PROGUARD) $(LOCAL_PROGUARD_FLAGS_DEPS)
+ $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(legacy_proguard_lib_deps) $(R8) $(LOCAL_PROGUARD_FLAGS_DEPS)
$(transform-jar-to-dex-r8)
else # !LOCAL_PROGUARD_ENABLED
+ $(built_dex_intermediate): .KATI_NINJA_POOL := $(D8_NINJA_POOL)
$(built_dex_intermediate): PRIVATE_D8_LIBS := $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
$(built_dex_intermediate): $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
- $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(DX) $(ZIP2ZIP)
+ $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(D8) $(ZIP2ZIP)
$(transform-classes.jar-to-dex)
endif
diff --git a/core/main.mk b/core/main.mk
index 7a12bf3..cdbc3ef 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -460,6 +460,9 @@
ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android
+# This property is set by flashing debug boot image, so default to false.
+ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0
+
# ------------------------------------------------------------
# Define a function that, given a list of module tags, returns
# non-empty if that module should be installed in /system.
@@ -931,11 +934,11 @@
$(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
$(foreach dep,$(my_deps),\
$(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
- $(if $(filter $(suite),device-tests general-tests),\
+ $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\
$(eval my_testcases := $(HOST_OUT_TESTCASES)),\
$(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
$(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
- $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(eval ALL_TARGETS.$(target).META_LIC:=$(module_license_metadata)))\
+ $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\
$(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
$$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
$(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
@@ -1235,18 +1238,14 @@
# See the select-bitness-of-required-modules definition.
# $(1): product makefile
-define _product-var
- $(call get-product-var,$(1),$(2))
-endef
-
define product-installed-files
$(eval _pif_modules := \
- $(call _product-var,$(1),PRODUCT_PACKAGES) \
- $(if $(filter eng,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_ENG)) \
- $(if $(filter debug,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_DEBUG)) \
- $(if $(filter tests,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
- $(if $(filter asan,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
- $(if $(filter java_coverage,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+ $(call get-product-var,$(1),PRODUCT_PACKAGES) \
+ $(if $(filter eng,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ENG)) \
+ $(if $(filter debug,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG)) \
+ $(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
+ $(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
+ $(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
$(call auto-included-modules) \
) \
$(eval ### Filter out the overridden packages and executables before doing expansion) \
@@ -1257,13 +1256,13 @@
$(call expand-required-modules,_pif_modules,$(_pif_modules),$(_pif_overrides)) \
$(filter-out $(HOST_OUT_ROOT)/%,$(call module-installed-files, $(_pif_modules))) \
$(call resolve-product-relative-paths,\
- $(foreach cf,$(call _product-var,$(1),PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
+ $(foreach cf,$(call get-product-var,$(1),PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
endef
# Similar to product-installed-files above, but handles PRODUCT_HOST_PACKAGES instead
# This does support the :32 / :64 syntax, but does not support module overrides.
define host-installed-files
- $(eval _hif_modules := $(call _product-var,$(1),PRODUCT_HOST_PACKAGES)) \
+ $(eval _hif_modules := $(call get-product-var,$(1),PRODUCT_HOST_PACKAGES)) \
$(eval ### Split host vs host cross modules) \
$(eval _hcif_modules := $(filter host_cross_%,$(_hif_modules))) \
$(eval _hif_modules := $(filter-out host_cross_%,$(_hif_modules))) \
@@ -1940,10 +1939,6 @@
sdk: $(ALL_SDK_TARGETS)
$(call dist-for-goals,sdk, \
$(ALL_SDK_TARGETS) \
- $(SYMBOLS_ZIP) \
- $(SYMBOLS_MAPPING) \
- $(COVERAGE_ZIP) \
- $(APPCOMPAT_ZIP) \
$(INSTALLED_BUILD_PROP_TARGET) \
)
endif
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 2157c9e..e436b2c 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -25,7 +25,6 @@
cts \
custom_images \
dicttool_aosp \
- dump-products \
eng \
oem_image \
online-system-api-sdk-docs \
diff --git a/core/node_fns.mk b/core/node_fns.mk
index 2243cd7..144eb8b 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -83,27 +83,17 @@
# If needle appears multiple times, only the first occurrance
# will survive.
#
-# How it works:
-#
-# - Stick everything in haystack into a single word,
-# with "|||" separating the words.
-# - Replace occurrances of "|||$(needle)|||" with "||| |||",
-# breaking haystack back into multiple words, with spaces
-# where needle appeared.
-# - Add needle between the first and second words of haystack.
-# - Replace "|||" with spaces, breaking haystack back into
-# individual words.
-#
define uniq-word
$(strip \
$(if $(filter-out 0 1,$(words $(filter $(2),$(1)))), \
- $(eval h := |||$(subst $(space),|||,$(strip $(1)))|||) \
- $(eval h := $(subst |||$(strip $(2))|||,|||$(space)|||,$(h))) \
- $(eval h := $(word 1,$(h)) $(2) $(wordlist 2,9999,$(h))) \
- $(subst |||,$(space),$(h)) \
- , \
- $(1) \
- ))
+ $(eval _uniq_word_seen :=) \
+ $(foreach w,$(1), \
+ $(if $(filter $(2),$(w)), \
+ $(if $(_uniq_word_seen),, \
+ $(w) \
+ $(eval _uniq_word_seen := true)), \
+ $(w))), \
+ $(1)))
endef
INHERIT_TAG := @inherit:
diff --git a/core/notice_files.mk b/core/notice_files.mk
index c05d4ea..cbfcaa4 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -125,9 +125,10 @@
module_license_metadata :=
ifdef my_register_name
- module_license_metadata := $(call local-intermediates-dir)/$(my_register_name).meta_lic
+ module_license_metadata := $(call local-meta-intermediates-dir)/$(my_register_name).meta_lic
- $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(my_test_data) $(my_test_config),\
+ $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))) \
+ $(my_test_data) $(my_test_config),\
$(eval ALL_TARGETS.$(target).META_LIC := $(module_license_metadata)))
ALL_MODULES.$(my_register_name).META_LIC := $(strip $(ALL_MODULES.$(my_register_name).META_LIC) $(module_license_metadata))
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 63e9040..4a44837 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -15,45 +15,27 @@
#
# the sort also acts as a strip to remove the single space entries that creep in because of the evals
-define gather-all-products
+define gather-all-makefiles-for-current-product
$(eval _all_products_visited := )\
-$(sort $(call all-products-inner, $(PRODUCTS)))
+$(sort $(call gather-all-makefiles-for-current-product-inner,$(INTERNAL_PRODUCT)))
endef
-define all-products-inner
+define gather-all-makefiles-for-current-product-inner
$(foreach p,$(1),\
$(if $(filter $(p),$(_all_products_visited)),, \
$(p) \
$(eval _all_products_visited += $(p)) \
- $(call all-products-inner, $(PRODUCTS.$(strip $(p)).INHERITS_FROM))
+ $(call gather-all-makefiles-for-current-product-inner, $(PRODUCTS.$(strip $(p)).INHERITS_FROM))
) \
)
endef
-this_makefile := build/make/core/product-graph.mk
-
-products_graph := $(OUT_DIR)/products.dot
-ifeq ($(strip $(ANDROID_PRODUCT_GRAPH)),)
-products_list := $(INTERNAL_PRODUCT)
-else
-ifeq ($(strip $(ANDROID_PRODUCT_GRAPH)),--all)
-products_list := --all
-else
-products_list := $(foreach prod,$(ANDROID_PRODUCT_GRAPH),$(call resolve-short-product-name,$(prod)))
-endif
-endif
-
-all_products := $(call gather-all-products)
-
-open_parethesis := (
-close_parenthesis := )
-
node_color_target := orange
node_color_common := beige
node_color_vendor := lavenderblush
node_color_default := white
define node-color
-$(if $(filter $(1),$(PRIVATE_PRODUCTS_FILTER)),\
+$(if $(filter $(1),$(PRIVATE_TOP_LEVEL_MAKEFILE)),\
$(node_color_target),\
$(if $(filter build/make/target/product/%,$(1)),\
$(node_color_common),\
@@ -62,30 +44,33 @@
)
endef
+open_parethesis := (
+close_parenthesis := )
+
# Emit properties of a product node to a file.
# $(1) the product
# $(2) the output file
define emit-product-node-props
$(hide) echo \"$(1)\" [ \
-label=\"$(dir $(1))\\n$(notdir $(1))\\n\\n$(subst $(close_parenthesis),,$(subst $(open_parethesis),,$(call get-product-var,$(1),PRODUCT_MODEL)))\\n$(call get-product-var,$(1),PRODUCT_DEVICE)\" \
+label=\"$(dir $(1))\\n$(notdir $(1))$(if $(filter $(1),$(PRIVATE_TOP_LEVEL_MAKEFILE)),$(subst $(open_parethesis),,$(subst $(close_parenthesis),,\\n\\n$(PRODUCT_MODEL)\\n$(PRODUCT_DEVICE))))\" \
style=\"filled\" fillcolor=\"$(strip $(call node-color,$(1)))\" \
colorscheme=\"svg\" fontcolor=\"darkblue\" \
] >> $(2)
endef
-$(products_graph): PRIVATE_PRODUCTS := $(all_products)
-$(products_graph): PRIVATE_PRODUCTS_FILTER := $(products_list)
+products_graph := $(OUT_DIR)/products.dot
-$(products_graph): $(this_makefile)
- @echo Product graph DOT: $@ for $(PRIVATE_PRODUCTS_FILTER)
- $(hide) echo 'digraph {' > $@.in
- $(hide) echo 'graph [ ratio=.5 ];' >> $@.in
- $(hide) $(foreach p,$(PRIVATE_PRODUCTS), \
- $(foreach d,$(PRODUCTS.$(strip $(p)).INHERITS_FROM), echo \"$(d)\" -\> \"$(p)\" >> $@.in;))
- $(foreach p,$(PRIVATE_PRODUCTS),$(call emit-product-node-props,$(p),$@.in))
- $(hide) echo '}' >> $@.in
- $(hide) build/make/tools/filter-product-graph.py $(PRIVATE_PRODUCTS_FILTER) < $@.in > $@
+$(products_graph): PRIVATE_ALL_MAKEFILES_FOR_THIS_PRODUCT := $(call gather-all-makefiles-for-current-product)
+$(products_graph): PRIVATE_TOP_LEVEL_MAKEFILE := $(INTERNAL_PRODUCT)
+$(products_graph):
+ @echo Product graph DOT: $@ for $(PRIVATE_TOP_LEVEL_MAKEFILE)
+ $(hide) echo 'digraph {' > $@
+ $(hide) echo 'graph [ ratio=.5 ];' >> $@
+ $(hide) $(foreach p,$(PRIVATE_ALL_MAKEFILES_FOR_THIS_PRODUCT), \
+ $(foreach d,$(PRODUCTS.$(strip $(p)).INHERITS_FROM), echo \"$(d)\" -\> \"$(p)\" >> $@;))
+ $(foreach p,$(PRIVATE_ALL_MAKEFILES_FOR_THIS_PRODUCT),$(call emit-product-node-props,$(p),$@))
+ $(hide) echo '}' >> $@
.PHONY: product-graph
product-graph: $(products_graph)
diff --git a/core/product.mk b/core/product.mk
index f316114..7351313 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -377,17 +377,6 @@
.KATI_READONLY := _product_single_value_vars _product_list_vars
_product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
-define dump-product
-$(warning ==== $(1) ====)\
-$(foreach v,$(_product_var_list),\
-$(warning PRODUCTS.$(1).$(v) := $(call get-product-var,$(1),$(v))))\
-$(warning --------)
-endef
-
-define dump-products
-$(foreach p,$(PRODUCTS),$(call dump-product,$(p)))
-endef
-
#
# Functions for including product makefiles
#
@@ -415,7 +404,7 @@
$(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
$(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
$(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
- $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+ $(call dump-inherit,$(current_mk),$(1)) \
$(call dump-config-vals,$(current_mk),inherit))
endef
@@ -464,64 +453,18 @@
#
-# Does various consistency checks on all of the known products.
+# Does various consistency checks on the current product.
# Takes no parameters, so $(call ) is not necessary.
#
-define check-all-products
+define check-current-product
$(if ,, \
- $(eval _cap_names :=) \
- $(foreach p,$(PRODUCTS), \
- $(eval pn := $(strip $(PRODUCTS.$(p).PRODUCT_NAME))) \
- $(if $(pn),,$(error $(p): PRODUCT_NAME must be defined.)) \
- $(if $(filter $(pn),$(_cap_names)), \
- $(error $(p): PRODUCT_NAME must be unique; "$(pn)" already used by $(strip \
- $(foreach \
- pp,$(PRODUCTS),
- $(if $(filter $(pn),$(PRODUCTS.$(pp).PRODUCT_NAME)), \
- $(pp) \
- ))) \
- ) \
- ) \
- $(eval _cap_names += $(pn)) \
- $(if $(call is-c-identifier,$(pn)),, \
- $(error $(p): PRODUCT_NAME must be a valid C identifier, not "$(pn)") \
- ) \
- $(eval pb := $(strip $(PRODUCTS.$(p).PRODUCT_BRAND))) \
- $(if $(pb),,$(error $(p): PRODUCT_BRAND must be defined.)) \
- $(foreach cf,$(strip $(PRODUCTS.$(p).PRODUCT_COPY_FILES)), \
- $(if $(filter 2 3,$(words $(subst :,$(space),$(cf)))),, \
- $(error $(p): malformed COPY_FILE "$(cf)") \
- ) \
- ) \
- ) \
-)
-endef
-
-
-#
-# Returns the product makefile path for the product with the provided name
-#
-# $(1): short product name like "generic"
-#
-define _resolve-short-product-name
- $(eval pn := $(strip $(1)))
- $(eval p := \
- $(foreach p,$(PRODUCTS), \
- $(if $(filter $(pn),$(PRODUCTS.$(p).PRODUCT_NAME)), \
- $(p) \
- )) \
- )
- $(eval p := $(sort $(p)))
- $(if $(filter 1,$(words $(p))), \
- $(p), \
- $(if $(filter 0,$(words $(p))), \
- $(error No matches for product "$(pn)"), \
- $(error Product "$(pn)" ambiguous: matches $(p)) \
- ) \
- )
-endef
-define resolve-short-product-name
-$(strip $(call _resolve-short-product-name,$(1)))
+ $(if $(call is-c-identifier,$(PRODUCT_NAME)),, \
+ $(error $(INTERNAL_PRODUCT): PRODUCT_NAME must be a valid C identifier, not "$(pn)")) \
+ $(if $(PRODUCT_BRAND),, \
+ $(error $(INTERNAL_PRODUCT): PRODUCT_BRAND must be defined.)) \
+ $(foreach cf,$(strip $(PRODUCT_COPY_FILES)), \
+ $(if $(filter 2 3,$(words $(subst :,$(space),$(cf)))),, \
+ $(error $(p): malformed COPY_FILE "$(cf)"))))
endef
# BoardConfig variables that are also inherited in product mks. Should ideally
diff --git a/core/product_config.mk b/core/product_config.mk
index 939a022..540289a 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -208,38 +208,27 @@
)
# Dedup, extract product names, etc.
-product_paths :=$(sort $(product_paths))
-all_named_products := $(call _first,$(product_paths),:)
-all_product_makefiles := $(call _second,$(product_paths),:)
+product_paths := $(sort $(product_paths))
+all_named_products := $(sort $(call _first,$(product_paths),:))
current_product_makefile := $(call _second,$(filter $(TARGET_PRODUCT):%,$(product_paths)),:)
COMMON_LUNCH_CHOICES := $(sort $(common_lunch_choices))
-load_all_product_makefiles :=
-ifneq (,$(filter product-graph, $(MAKECMDGOALS)))
-ifeq ($(ANDROID_PRODUCT_GRAPH),--all)
-load_all_product_makefiles := true
-endif
-endif
-ifneq (,$(filter dump-products,$(MAKECMDGOALS)))
-ifeq ($(ANDROID_DUMP_PRODUCTS),all)
-load_all_product_makefiles := true
-endif
-endif
+# Check that there are no duplicate product names
+$(foreach p,$(all_named_products), \
+ $(if $(filter 1,$(words $(filter $(p):%,$(product_paths)))),, \
+ $(error Product name must be unique, "$(p)" used by $(call _second,$(filter $(p):%,$(product_paths)),:))))
ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
_product_config_saved_KATI_ALLOW_RULES := $(.KATI_ALLOW_RULES)
.KATI_ALLOW_RULES := $(ALLOW_RULES_IN_PRODUCT_CONFIG)
endif
-ifeq ($(load_all_product_makefiles),true)
-# Import all product makefiles.
-$(call import-products, $(all_product_makefiles))
-else
-# Import just the current product.
-$(if $(current_product_makefile),,$(error Can not locate config makefile for product "$(TARGET_PRODUCT)"))
+ifeq (,$(current_product_makefile))
+ $(error Can not locate config makefile for product "$(TARGET_PRODUCT)")
+endif
+
ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
RBC_PRODUCT_CONFIG := true
- RBC_BOARD_CONFIG := true
endif
ifndef RBC_PRODUCT_CONFIG
@@ -258,48 +247,31 @@
endif
include $(OUT_DIR)/rbc/rbc_product_config_results.mk
endif
-endif # Import all or just the current product makefile
-
-# Quick check
-$(check-all-products)
# This step was already handled in the RBC product configuration.
-# Since the equivalent starlark code will not add the partial products to
-# the PRODUCTS variable, it's ok for them to be set before check-all-products
ifeq ($(RBC_PRODUCT_CONFIG)$(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
# Import all the products that have made artifact path requirements, so that we can verify
-# the artifacts they produce.
-# These are imported after check-all-products because some of them might not be real products.
+# the artifacts they produce. They might be intermediate makefiles instead of real products.
$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
$(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
)
endif
+INTERNAL_PRODUCT := $(current_product_makefile)
+# Strip and assign the PRODUCT_ variables.
+$(call strip-product-vars)
+
+# Quick check
+$(check-current-product)
+
ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
.KATI_ALLOW_RULES := $(_saved_KATI_ALLOW_RULES)
_product_config_saved_KATI_ALLOW_RULES :=
endif
-ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
-$(dump-products)
-endif
-
-# Convert a short name like "sooner" into the path to the product
-# file defining that product.
-#
-INTERNAL_PRODUCT := $(call resolve-short-product-name, $(TARGET_PRODUCT))
-ifneq ($(current_product_makefile),$(INTERNAL_PRODUCT))
-$(error PRODUCT_NAME inconsistent in $(current_product_makefile) and $(INTERNAL_PRODUCT))
-endif
-
-
############################################################################
-# Strip and assign the PRODUCT_ variables.
-$(call strip-product-vars)
current_product_makefile :=
-all_product_makefiles :=
-all_product_configs :=
#############################################################################
# Quick check and assign default values
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 11064f3..7a5e501 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -104,6 +104,11 @@
seen = {item: 0 for item in value_list}
return sorted(seen.keys()) if _options.rearrange == "sort" else seen.keys()
+def __sort_pcm_names(pcm_names):
+ # We have to add an extension back onto the pcm names when sorting,
+ # or else the sort order could be wrong when one is a prefix of another.
+ return [x[:-3] for x in sorted([y + ".mk" for y in pcm_names], reverse=True)]
+
def _product_configuration(top_pcm_name, top_pcm, input_variables_init):
"""Creates configuration."""
@@ -120,25 +125,19 @@
globals, globals_base = _init_globals(input_variables_init)
- config_postfix = [] # Configs in postfix order
-
# Each PCM is represented by a quadruple of function, config, children names
# and readyness (that is, the configurations from inherited PCMs have been
# substituted).
configs = {top_pcm_name: (top_pcm, None, [], False)} # All known PCMs
- stash = [] # Configs to push once their descendants are done
-
- # Stack containing PCMs to be processed. An item in the stack
- # is a pair of PCMs name and its height in the product inheritance tree.
- pcm_stack = [(top_pcm_name, 0)]
- pcm_count = 0
+ # Stack containing PCMs to be processed
+ pcm_stack = [top_pcm_name]
# Run it until pcm_stack is exhausted, but no more than N times
for n in range(1000):
if not pcm_stack:
break
- (name, height) = pcm_stack.pop()
+ name = pcm_stack.pop()
pcm, cfg, c, _ = configs[name]
# cfg is set only after PCM has been called, leverage this
@@ -146,12 +145,9 @@
if cfg != None:
continue
- # Push ancestors until we reach this node's height
- config_postfix.extend([stash.pop() for i in range(len(stash) - height)])
-
# Run this one, obtaining its configuration and child PCMs.
if _options.trace_modules:
- print("#%d: %s" % (n, name))
+ rblf_log("%d: %s" % (n, name))
# Run PCM.
handle = __h_new()
@@ -162,43 +158,88 @@
globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_ALLOWED_LIST"] = handle.artifact_path_allowed_list
globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENT_IS_RELAXED"] = "true" if handle.artifact_path_requirement_is_relaxed[0] else ""
globals.setdefault("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", [])
- globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] += [name+".mk"]
+ globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] = sorted(globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] + [name+".mk"])
+
+ if handle.product_enforce_packages_exist[0]:
+ globals["PRODUCTS."+name+".mk.PRODUCT_ENFORCE_PACKAGES_EXIST"] = "true"
+ globals["PRODUCTS."+name+".mk.PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST"] = handle.product_enforce_packages_exist_allow_list
# Now we know everything about this PCM, record it in 'configs'.
children = handle.inherited_modules
if _options.trace_modules:
- print("# ", " ".join(children.keys()))
+ rblf_log(" ", " ".join(children.keys()))
# Starlark dictionaries are guaranteed to iterate through in insertion order,
# so children.keys() will be ordered by the inherit() calls
configs[name] = (pcm, handle.cfg, children.keys(), False)
- pcm_count = pcm_count + 1
- if len(children) == 0:
- # Leaf PCM goes straight to the config_postfix
- config_postfix.append(name)
- continue
-
- # Stash this PCM, process children in the sorted order
- stash.append(name)
- for child_name in sorted(children, reverse = True):
+ for child_name in __sort_pcm_names(children.keys()):
if child_name not in configs:
configs[child_name] = (children[child_name], None, [], False)
- pcm_stack.append((child_name, len(stash)))
+ pcm_stack.append(child_name)
if pcm_stack:
fail("Inheritance processing took too many iterations")
- # Flush the stash
- config_postfix.extend([stash.pop() for i in range(len(stash))])
- if len(config_postfix) != pcm_count:
- fail("Ran %d modules but postfix tree has only %d entries" % (pcm_count, len(config_postfix)))
+ for pcm_name in globals.get("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", []):
+ for var, val in evaluate_finalized_product_variables(configs, pcm_name[:-3]).items():
+ globals["PRODUCTS."+pcm_name+"."+var] = val
- if _options.trace_modules:
- print("\n#---Postfix---")
- for x in config_postfix:
- print("# ", x)
+ # Copy product config variables from the cfg dictionary to the
+ # PRODUCTS.<top_level_makefile_name>.<var_name> global variables.
+ for var, val in evaluate_finalized_product_variables(configs, top_pcm_name, _options.trace_modules).items():
+ globals["PRODUCTS."+top_pcm_name+".mk."+var] = val
+
+ # Record inheritance hierarchy in PRODUCTS.<file>.INHERITS_FROM variables.
+ # This is required for m product-graph.
+ for config in configs:
+ if len(configs[config][2]) > 0:
+ globals["PRODUCTS."+config+".mk.INHERITS_FROM"] = sorted([x + ".mk" for x in configs[config][2]])
+ globals["PRODUCTS"] = __words(globals.get("PRODUCTS", [])) + [top_pcm_name + ".mk"]
+
+ return (globals, globals_base)
+
+def evaluate_finalized_product_variables(configs, top_level_pcm_name, trace=False):
+ configs_postfix = []
+ pcm_stack = [(top_level_pcm_name, True)]
+ for i in range(1000):
+ if not pcm_stack:
+ break
+
+ pcm_name, before = pcm_stack.pop()
+ if before:
+ pcm_stack.append((pcm_name, False))
+ for child in __sort_pcm_names(configs[pcm_name][2]):
+ pcm_stack.append((child, True))
+ else:
+ configs_postfix.append(pcm_name)
+ if pcm_stack:
+ fail("Inheritance processing took too many iterations")
+
+ # clone the configs, because in the process of evaluating the
+ # final cfg dictionary we will remove values from the intermediate
+ # cfg dictionaries. We need to be able to call evaluate_finalized_product_variables()
+ # multiple times, so we can't change the origional configs object.
+ cloned_configs = {}
+ for pcm_name in configs:
+ # skip unneeded pcms
+ if pcm_name not in configs_postfix:
+ continue
+ pcm, cfg, children_names, ready = configs[pcm_name]
+ cloned_cfg = {}
+ for var, val in cfg.items():
+ if type(val) == 'list':
+ cloned_cfg[var] = list(val)
+ else:
+ cloned_cfg[var] = val
+ cloned_configs[pcm_name] = (pcm, cloned_cfg, children_names, ready)
+ configs = cloned_configs
+
+ if trace:
+ rblf_log("\n---Postfix---")
+ for x in configs_postfix:
+ rblf_log(" ", x)
# Traverse the tree from the bottom, evaluating inherited values
- for pcm_name in config_postfix:
+ for pcm_name in configs_postfix:
pcm, cfg, children_names, ready = configs[pcm_name]
# Should run
@@ -217,25 +258,7 @@
_substitute_inherited(configs, pcm_name, cfg)
_percolate_inherited(configs, pcm_name, cfg, children_names)
configs[pcm_name] = pcm, cfg, children_names, True
-
- if (pcm_name + ".mk") in globals.get("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", []):
- for var, val in cfg.items():
- globals["PRODUCTS."+pcm_name+".mk."+var] = val
-
- # Copy product config variables from the cfg dictionary to the
- # PRODUCTS.<top_level_makefile_name>.<var_name> global variables.
- for var, val in configs[top_pcm_name][1].items():
- globals["PRODUCTS."+top_pcm_name+".mk."+var] = val
-
- # Record inheritance hierarchy in PRODUCTS.<file>.INHERITS_FROM variables.
- # This is required for m product-graph.
- for config in configs:
- if len(configs[config][2]) > 0:
- globals["PRODUCTS."+config+".mk.INHERITS_FROM"] = sorted([x + ".mk" for x in configs[config][2]])
- globals["PRODUCTS"] = __words(globals.get("PRODUCTS", [])) + [top_pcm_name + ".mk"]
-
- return (globals, globals_base)
-
+ return configs[top_level_pcm_name][1]
def _dictionary_difference(a, b):
result = {}
@@ -286,7 +309,7 @@
old_val = val
new_val = _value_expand(configs, attr, val)
if new_val != old_val:
- print("%s(i): %s=%s (was %s)" % (pcm_name, attr, new_val, old_val))
+ rblf_log("%s(i): %s=%s (was %s)" % (pcm_name, attr, new_val, old_val))
cfg[attr] = new_val
def _value_expand(configs, attr, values_list):
@@ -340,7 +363,7 @@
for attr in _options.trace_variables:
if attr in percolated_attrs:
- print("%s: %s^=%s" % (cfg_name, attr, cfg[attr]))
+ rblf_log("%s: %s^=%s" % (cfg_name, attr, cfg[attr]))
def __move_items(to_list, from_cfg, attr):
value = from_cfg.get(attr, [])
@@ -382,10 +405,26 @@
"""Gets to the value of the variable in the namespace."""
return g.get(_soong_config_namespaces_key, {}).get(nsname, {}).get(var, None)
-
-def _abspath(path):
+def _abspath(paths):
"""Provided for compatibility, to be removed later."""
- return path
+ cwd = rblf_shell('pwd')
+ results = []
+ for path in __words(paths):
+ if path[0] != "/":
+ path = cwd + "/" + path
+
+ resultparts = []
+ for part in path.split('/'):
+ if part == "." or part == "":
+ continue
+ elif part == "..":
+ if resultparts:
+ resultparts.pop()
+ else:
+ resultparts.append(part)
+ results.append("/" + "/".join(resultparts))
+
+ return " ".join(results)
def _addprefix(prefix, string_or_list):
@@ -434,6 +473,8 @@
artifact_path_requirements = list(),
artifact_path_allowed_list = list(),
artifact_path_requirement_is_relaxed = [False], # as a list so that we can reassign it
+ product_enforce_packages_exist = [False],
+ product_enforce_packages_exist_allow_list = [],
)
def __h_cfg(handle):
@@ -495,14 +536,17 @@
"""If from file exists, returns [from:to] pair."""
value = path_pair.split(":", 2)
+ if value[0].find('*') != -1:
+ fail("copy_if_exists: input file cannot contain *")
+
# Check that l[0] exists
- return [":".join(value)] if rblf_file_exists(value[0]) else []
+ return [":".join(value)] if rblf_wildcard(value[0]) else []
-def _enforce_product_packages_exist(pkg_string_or_list):
+def _enforce_product_packages_exist(handle, pkg_string_or_list=[]):
"""Makes including non-existent modules in PRODUCT_PACKAGES an error."""
-
- #TODO(asmundak)
- pass
+ handle.product_enforce_packages_exist[0] = True
+ handle.product_enforce_packages_exist_allow_list.clear()
+ handle.product_enforce_packages_exist_allow_list.extend(__words(pkg_string_or_list))
def _add_product_dex_preopt_module_config(handle, modules, config):
"""Equivalent to add-product-dex-preopt-module-config from build/make/core/product.mk."""
@@ -511,10 +555,6 @@
_setdefault(handle, "PRODUCT_DEX_PREOPT_MODULE_CONFIGS")
handle.cfg["PRODUCT_DEX_PREOPT_MODULE_CONFIGS"] += [m + "=" + config for m in modules]
-def _file_wildcard_exists(file_pattern):
- """Return True if there are files matching given bash pattern."""
- return len(rblf_wildcard(file_pattern)) > 0
-
def _find_and_copy(pattern, from_dir, to_dir):
"""Return a copy list for the files matching the pattern."""
return sorted([("%s/%s:%s/%s" % (from_dir, f, to_dir, f))
@@ -564,6 +604,27 @@
break
return res
+def _first_word(input):
+ """Equivalent to the GNU make function $(firstword)."""
+ input = __words(input)
+ if len(input) == 0:
+ return ""
+ return input[0]
+
+def _last_word(input):
+ """Equivalent to the GNU make function $(lastword)."""
+ input = __words(input)
+ l = len(input)
+ if l == 0:
+ return ""
+ return input[l-1]
+
+def _flatten_2d_list(list):
+ result = []
+ for x in list:
+ result += x
+ return result
+
def _dir(paths):
"""Equivalent to the GNU make function $(dir).
@@ -726,8 +787,11 @@
That is, removes string's leading and trailing whitespace characters and
replaces any sequence of whitespace characters with with a single space.
"""
- if type(s) != "string":
- return s
+ t = type(s)
+ if t == "list":
+ s = " ".join(s)
+ elif t != "string":
+ fail("Argument to mkstrip must be a string or list, got: "+t)
result = ""
was_space = False
for ch in s.strip().elems():
@@ -815,12 +879,13 @@
dir = _dir,
enforce_product_packages_exist = _enforce_product_packages_exist,
expand_wildcard = _expand_wildcard,
- file_exists = rblf_file_exists,
- file_wildcard_exists = _file_wildcard_exists,
filter = _filter,
filter_out = _filter_out,
find_and_copy = _find_and_copy,
findstring = _findstring,
+ first_word = _first_word,
+ last_word = _last_word,
+ flatten_2d_list = _flatten_2d_list,
inherit = _inherit,
indirect = _indirect,
mk2rbc_error = _mk2rbc_error,
diff --git a/core/proguard.flags b/core/proguard.flags
index 185275e..aee5271 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -15,6 +15,12 @@
@**.VisibleForTesting *;
}
+# Keep rule for members that are needed solely to keep alive downstream weak
+# references, and could otherwise be removed after tree shaking optimizations.
+-keepclassmembers,allowaccessmodification,allowobfuscation,allowshrinking class * {
+ @com.android.internal.annotations.KeepForWeakReference <fields>;
+}
+
# Understand the common @Keep annotation from various Android packages:
# * android.support.annotation
# * androidx.annotation
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 30c2341..54cbdcc 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -74,5 +74,6 @@
-dontnote
# The lite proto runtime uses reflection to access fields based on the names in
-# the schema, keep all the fields.
--keepclassmembers class * extends com.google.protobuf.MessageLite { <fields>; }
+# the schema, keep all the fields. Wildcard is used to apply the rule to classes
+# that have been renamed with jarjar.
+-keepclassmembers class * extends **.protobuf.MessageLite { <fields>; }
diff --git a/core/rbe.mk b/core/rbe.mk
index 370d4bd..90328d3 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -87,11 +87,11 @@
endif
ifdef RBE_R8
- R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+ R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
endif
ifdef RBE_D8
- D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+ D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
endif
rbe_dir :=
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index 7a177ff..05b4b6b 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -50,6 +50,28 @@
# to avoid checkbuilds making an extra copy of every module.
LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
+my_check_same_vndk_variants :=
+same_vndk_variants_stamp :=
+ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
+ ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
+ ifneq ($(CLANG_COVERAGE),true)
+ # Do not compare VNDK variant for special cases e.g. coverage builds.
+ ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
+ my_check_same_vndk_variants := true
+ same_vndk_variants_stamp := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/same_vndk_variants.timestamp
+ endif
+ endif
+ endif
+endif
+
+ifeq ($(my_check_same_vndk_variants),true)
+ # Add the timestamp to the CHECKED list so that `checkbuild` can run it.
+ # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding
+ # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant
+ # isn't used at all and it may break in the downstream trees.
+ LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp)
+endif
+
#######################################
include $(BUILD_SYSTEM)/base_rules.mk
#######################################
@@ -125,21 +147,7 @@
endif
endif
-my_check_same_vndk_variants :=
-ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
- ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
- ifneq ($(CLANG_COVERAGE),true)
- # Do not compare VNDK variant for special cases e.g. coverage builds.
- ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
- my_check_same_vndk_variants := true
- endif
- endif
- endif
-endif
-
ifeq ($(my_check_same_vndk_variants),true)
- same_vndk_variants_stamp := $(intermediates)/same_vndk_variants.timestamp
-
my_core_register_name := $(subst .vendor,,$(subst .product,,$(my_register_name)))
my_core_variant_files := $(call module-target-built-files,$(my_core_register_name))
my_core_shared_lib := $(sort $(filter %.so,$(my_core_variant_files)))
@@ -260,6 +268,9 @@
installed_static_library_notice_file_targets += \
$(foreach lib,$(LOCAL_RLIB_LIBRARIES), \
NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-RLIB_LIBRARIES-$(lib))
+installed_static_library_notice_file_targets += \
+ $(foreach lib,$(LOCAL_PROC_MACRO_LIBRARIES), \
+ NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-PROC_MACRO_LIBRARIES-$(lib))
$(notice_target): | $(installed_static_library_notice_file_targets)
$(LOCAL_INSTALLED_MODULE): | $(notice_target)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 32675f2..feffcc7 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -33,6 +33,8 @@
$(call add_json_str, Platform_security_patch, $(PLATFORM_SECURITY_PATCH))
$(call add_json_str, Platform_preview_sdk_version, $(PLATFORM_PREVIEW_SDK_VERSION))
$(call add_json_str, Platform_base_os, $(PLATFORM_BASE_OS))
+$(call add_json_str, Platform_version_last_stable, $(PLATFORM_VERSION_LAST_STABLE))
+$(call add_json_str, Platform_version_known_codenames, $(PLATFORM_VERSION_KNOWN_CODENAMES))
$(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
@@ -169,6 +171,8 @@
$(call add_json_list, RecoverySnapshotDirsExcluded, $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
$(call add_json_bool, HostFakeSnapshotEnabled, $(HOST_FAKE_SNAPSHOT_ENABLE))
+$(call add_json_bool, MultitreeUpdateMeta, $(filter true,$(TARGET_MULTITREE_UPDATE_META)))
+
$(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
$(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
@@ -204,9 +208,8 @@
$(call add_json_list, BoardVendorDlkmSepolicyDirs, $(BOARD_VENDOR_DLKM_SEPOLICY_DIRS))
$(call add_json_list, BoardOdmDlkmSepolicyDirs, $(BOARD_ODM_DLKM_SEPOLICY_DIRS))
$(call add_json_list, BoardSystemDlkmSepolicyDirs, $(BOARD_SYSTEM_DLKM_SEPOLICY_DIRS))
-# TODO: BOARD_PLAT_* dirs only kept for compatibility reasons. Will be a hard error on API level 31
-$(call add_json_list, SystemExtPublicSepolicyDirs, $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS) $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
-$(call add_json_list, SystemExtPrivateSepolicyDirs, $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS) $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
+$(call add_json_list, SystemExtPublicSepolicyDirs, $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS))
+$(call add_json_list, SystemExtPrivateSepolicyDirs, $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS))
$(call add_json_list, BoardSepolicyM4Defs, $(BOARD_SEPOLICY_M4DEFS))
$(call add_json_str, BoardSepolicyVers, $(BOARD_SEPOLICY_VERS))
$(call add_json_str, SystemExtSepolicyPrebuiltApiDir, $(BOARD_SYSTEM_EXT_PREBUILT_DIR))
@@ -271,6 +274,7 @@
$(call add_json_str, ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
+$(call add_json_bool, BuildBrokenDepfile, $(filter true,$(BUILD_BROKEN_DEPFILE)))
$(call add_json_bool, BuildBrokenEnforceSyspropOwner, $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
$(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
$(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
diff --git a/core/soong_droiddoc_prebuilt.mk b/core/soong_droiddoc_prebuilt.mk
index 4dc5d08..ba597c5 100644
--- a/core/soong_droiddoc_prebuilt.mk
+++ b/core/soong_droiddoc_prebuilt.mk
@@ -6,6 +6,7 @@
ifdef LOCAL_DROIDDOC_STUBS_SRCJAR
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_SRCJAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar))
+$(eval ALL_TARGETS.$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
.PHONY: $(LOCAL_MODULE)
@@ -14,6 +15,7 @@
ifdef LOCAL_DROIDDOC_DOC_ZIP
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+$(eval ALL_TARGETS.$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
$(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip)
.PHONY: $(LOCAL_MODULE) $(LOCAL_MODULE)-docs.zip
@@ -23,12 +25,15 @@
ifdef LOCAL_DROIDDOC_ANNOTATIONS_ZIP
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_ANNOTATIONS_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip))
+$(eval ALL_TARGETS.$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
endif
ifdef LOCAL_DROIDDOC_API_VERSIONS_XML
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_API_VERSIONS_XML),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml))
+$(eval ALL_TARGETS.$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
endif
ifdef LOCAL_DROIDDOC_METADATA_ZIP
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_METADATA_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip))
+$(eval ALL_TARGETS.$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
endif
diff --git a/core/tasks/README.dex_preopt_check.md b/core/tasks/README.dex_preopt_check.md
new file mode 100644
index 0000000..b0baa9e
--- /dev/null
+++ b/core/tasks/README.dex_preopt_check.md
@@ -0,0 +1,43 @@
+# `dex_preopt_check`
+
+`dex_preopt_check` is a build-time check to make sure that all system server
+jars are dexpreopted. When the check fails, you will see the following error
+message:
+
+```
+FAILED:
+build/make/core/tasks/dex_preopt_check.mk:13: warning: Missing compilation artifacts. Dexpreopting is not working for some system server jars
+Offending entries:
+```
+
+Possible causes are:
+
+1. There is an APEX/SDK mismatch. (E.g., the APEX is built from source while
+ the SDK is built from prebuilt.)
+
+1. The `systemserverclasspath_fragment` is not added as
+ `systemserverclasspath_fragments` of the corresponding `apex` module, or not
+ added as `exported_systemserverclasspath_fragments` of the corresponding
+ `prebuilt_apex`/`apex_set` module when building from prebuilt.
+
+1. The expected version of the system server java library is not preferred.
+ (E.g., the `java_import` module has `prefer: false` when building from
+ prebuilt.)
+
+1. Dexpreopting is disabled for the system server java library. This can be due
+ to various reasons including but not limited to:
+
+ - The java library has `dex_preopt: { enabled: false }` in the Android.bp
+ file.
+
+ - The java library is listed in `DEXPREOPT_DISABLED_MODULES` in a Makefile.
+
+ - The java library is missing `installable: true` in the Android.bp
+ file when building from source.
+
+ - Sanitizer is enabled.
+
+1. `PRODUCT_SYSTEM_SERVER_JARS`, `PRODUCT_APEX_SYSTEM_SERVER_JARS`,
+ `PRODUCT_STANDALONE_SYSTEM_SERVER_JARS`, or
+ `PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS` has an extra entry that is not
+ needed by the product.
diff --git a/core/tasks/dex_preopt_check.mk b/core/tasks/dex_preopt_check.mk
index bfa1ec5..5fd60c8 100644
--- a/core/tasks/dex_preopt_check.mk
+++ b/core/tasks/dex_preopt_check.mk
@@ -12,7 +12,8 @@
ifneq (,$(filter services,$(PRODUCT_PACKAGES)))
$(call maybe-print-list-and-error,\
$(filter-out $(ALL_DEFAULT_INSTALLED_MODULES),$(DEXPREOPT_SYSTEMSERVER_ARTIFACTS)),\
- Missing compilation artifacts. Dexpreopting is not working for some system server jars \
+ Missing compilation artifacts. Dexpreopting is not working for some system server jars. See \
+ https://cs.android.com/android/platform/superproject/+/master:build/make/core/tasks/README.dex_preopt_check.md \
)
endif
endif
diff --git a/core/tasks/find-shareduid-violation.mk b/core/tasks/find-shareduid-violation.mk
index d6885eb..b5feef1 100644
--- a/core/tasks/find-shareduid-violation.mk
+++ b/core/tasks/find-shareduid-violation.mk
@@ -35,4 +35,5 @@
--copy_out_system_ext $(TARGET_COPY_OUT_SYSTEM_EXT) \
> $@
+$(call declare-0p-target,$(shareduid_violation_modules_filename))
$(call dist-for-goals,droidcore,$(shareduid_violation_modules_filename))
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 0daf446..5252394 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -40,6 +40,18 @@
# Create an artifact to include all shared librariy files in general-tests.
general_tests_host_shared_libs_zip := $(PRODUCT_OUT)/general-tests_host-shared-libs.zip
+# Copy kernel test modules to testcases directories
+include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
+kernel_test_copy_pairs := \
+ $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_host_out))
+copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
+
+# PHONY target to be used to build and test `vts_kernel_tests` without building full vts
+.PHONY: vts_kernel_tests
+vts_kernel_tests: $(copy_kernel_tests)
+
+$(general_tests_zip) : $(copy_kernel_tests)
+$(general_tests_zip) : PRIVATE_KERNEL_TEST_HOST_OUT := $(kernel_test_host_out)
$(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
$(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip)
$(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
@@ -52,6 +64,7 @@
rm -f $@ $(PRIVATE_general_tests_list_zip)
mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+ find $(PRIVATE_KERNEL_TEST_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 8097535..4ef6eb8 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -28,6 +28,7 @@
'"runtime_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES)),"$(w)", )], ' \
'"data_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA_BINS)),"$(w)", )], ' \
'"supported_variants": [$(foreach w,$(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS)),"$(w)", )], ' \
+ '"host_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET)),"$(w)", )], ' \
'},\n' \
) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
$(hide) echo '}' >> $@
diff --git a/core/tasks/multitree.mk b/core/tasks/multitree.mk
new file mode 100644
index 0000000..225477e
--- /dev/null
+++ b/core/tasks/multitree.mk
@@ -0,0 +1,16 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.PHONY: update-meta
+update-meta: $(SOONG_MULTITREE_METADATA)
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index add580d..4b8bd16 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -132,6 +132,9 @@
$(call declare-0p-target,$(test_suite_notice_html))
$(call declare-0p-target,$(test_suite_notice_txt))
+$(call declare-1p-copy-files,$(test_suite_dynamic_config),)
+$(call declare-1p-copy-files,$(test_suite_prebuilt_tools),)
+
# Reset all input variables
test_suite_name :=
test_suite_tradefed :=
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 20a1694..c41aec5 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -21,11 +21,13 @@
LOCAL_MODULE := $(my_package_name)
LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_LICENSE_PACKAGE_NAME := Android
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
LOCAL_MODULE_CLASS := PACKAGING
LOCAL_MODULE_STEM := $(my_package_name).zip
LOCAL_UNINSTALLABLE_MODULE := true
include $(BUILD_SYSTEM)/base_rules.mk
-my_staging_dir := $(intermediates)
+my_staging_dir := $(intermediates)/staging
my_package_zip := $(LOCAL_BUILT_MODULE)
my_built_modules := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)))
@@ -92,17 +94,18 @@
endif
$(my_package_zip): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
+$(my_package_zip): PRIVATE_STAGING_DIR := $(my_staging_dir)
$(my_package_zip): PRIVATE_PICKUP_FILES := $(my_pickup_files)
$(my_package_zip) : $(my_built_modules)
@echo "Package $@"
- @rm -rf $(dir $@) && mkdir -p $(dir $@)
+ @rm -rf $(PRIVATE_STAGING_DIR) && mkdir -p $(PRIVATE_STAGING_DIR)
$(foreach p, $(PRIVATE_COPY_PAIRS),\
$(eval pair := $(subst :,$(space),$(p)))\
mkdir -p $(dir $(word 2,$(pair))) && \
cp -Rf $(word 1,$(pair)) $(word 2,$(pair)) && ) true
$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES),\
- cp -RfL $(f) $(dir $@) && ) true
- $(hide) cd $(dir $@) && zip -rqX $(notdir $@) *
+ cp -RfL $(f) $(PRIVATE_STAGING_DIR) && ) true
+ $(hide) cd $(PRIVATE_STAGING_DIR) && zip -rqX ../$(notdir $@) *
my_makefile :=
my_staging_dir :=
diff --git a/core/tasks/tools/vts-kernel-tests.mk b/core/tasks/tools/vts-kernel-tests.mk
new file mode 100644
index 0000000..5fbb589
--- /dev/null
+++ b/core/tasks/tools/vts-kernel-tests.mk
@@ -0,0 +1,26 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+-include external/linux-kselftest/android/kselftest_test_list.mk
+-include external/ltp/android/ltp_package_list.mk
+
+include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
+
+# Copy kernel test modules to testcases directories
+kernel_test_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_tests
+kernel_test_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_tests
+kernel_test_modules := \
+ $(kselftest_modules) \
+ ltp \
+ $(ltp_packages)
\ No newline at end of file
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
index f1159b3..06161f0 100644
--- a/core/tasks/tools/vts_package_utils.mk
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -29,6 +29,6 @@
$(eval my_copy_dest := $(patsubst data/%,DATA/%,\
$(patsubst system/%,DATA/%,\
$(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
- $(eval ALL_TARGETS.$(2)/$(my_copy_dest).META_LIC := $(if $(strip $(ALL_MODULES.$(m).META_LIC)),$(ALL_MODULES.$(m).META_LIC),$(ALL_MODULES.$(m).DELAYED_META_LIC)))\
+ $(call declare-copy-target-license-metadata,$(2)/$(my_copy_dest),$(bui))\
$(bui):$(2)/$(my_copy_dest))))
endef
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index befde87..5e1b5d5 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -12,35 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
--include external/linux-kselftest/android/kselftest_test_list.mk
--include external/ltp/android/ltp_package_list.mk
-
-include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
-
test_suite_name := vts
test_suite_tradefed := vts-tradefed
test_suite_readme := test/vts/tools/vts-core-tradefed/README
-# Copy kernel test modules to testcases directories
-kernel_test_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_tests
-kernel_test_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_tests
-kernel_test_modules := \
- $(kselftest_modules) \
- ltp \
- $(ltp_packages)
+include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
kernel_test_copy_pairs := \
- $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_vts_out)) \
- $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_host_out))
+ $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_vts_out))
copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
test_suite_extra_deps := $(copy_kernel_tests)
-# PHONY target to be used to build and test `vts_kernel_tests` without building full vts
-.PHONY: vts_kernel_tests
-vts_kernel_tests: $(copy_kernel_tests)
-
include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
.PHONY: vts
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index c8b381d..ce25ee2 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,10 +40,10 @@
include $(INTERNAL_BUILD_ID_MAKEFILE)
endif
-DEFAULT_PLATFORM_VERSION := TP1A
+DEFAULT_PLATFORM_VERSION := UP1A
.KATI_READONLY := DEFAULT_PLATFORM_VERSION
MIN_PLATFORM_VERSION := TP1A
-MAX_PLATFORM_VERSION := TP1A
+MAX_PLATFORM_VERSION := UP1A
# The last stable version name of the platform that was released. During
# development, this stays at that previous version, while the codename indicates
@@ -54,6 +54,7 @@
# These are the current development codenames, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
PLATFORM_VERSION_CODENAME.TP1A := Tiramisu
+PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
# This is the user-visible version. In a final release build it should
# be empty to use PLATFORM_VERSION as the user-visible version. For
@@ -85,8 +86,13 @@
PLATFORM_BASE_SDK_EXTENSION_VERSION := 1
.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
-# This is are all known codenames starting from Q.
-PLATFORM_VERSION_KNOWN_CODENAMES := Q R S Sv2 Tiramisu
+# This are all known codenames.
+PLATFORM_VERSION_KNOWN_CODENAMES := \
+Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
+Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
+JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
+Q R S Sv2 Tiramisu UpsideDownCake
+
# Convert from space separated list to comma separated
PLATFORM_VERSION_KNOWN_CODENAMES := \
$(call normalize-comma-list,$(PLATFORM_VERSION_KNOWN_CODENAMES))
@@ -98,7 +104,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2022-04-05
+ PLATFORM_SECURITY_PATCH := 2022-07-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/core/version_util.mk b/core/version_util.mk
index 3a0d4b5..cbfef96 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -56,36 +56,34 @@
# unreleased API level targetable by this branch, not just those that are valid
# lunch targets for this branch.
+PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
ifndef PLATFORM_VERSION_CODENAME
- PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
- ifndef PLATFORM_VERSION_CODENAME
- # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
- PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
- endif
-
- # This is all of the *active* development codenames.
- # This confusing name is needed because
- # all_codenames has been baked into build.prop for ages.
- #
- # Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
- # list of additional codenames after PLATFORM_VERSION_CODENAME.
- PLATFORM_VERSION_ALL_CODENAMES :=
-
- # Build a list of all active code names. Avoid duplicates, and stop when we
- # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
- # that is not included in our build).
- _versions_in_target := \
- $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
- $(foreach version,$(_versions_in_target),\
- $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
- $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
- $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
-
- # And convert from space separated to comma separated.
- PLATFORM_VERSION_ALL_CODENAMES := \
- $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
-
+ # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
+ PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
endif
+
+# This is all of the *active* development codenames.
+# This confusing name is needed because
+# all_codenames has been baked into build.prop for ages.
+#
+# Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
+# list of additional codenames after PLATFORM_VERSION_CODENAME.
+PLATFORM_VERSION_ALL_CODENAMES :=
+
+# Build a list of all active code names. Avoid duplicates, and stop when we
+# reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+# that is not included in our build).
+_versions_in_target := \
+ $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+$(foreach version,$(_versions_in_target),\
+ $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+ $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+ $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+# And convert from space separated to comma separated.
+PLATFORM_VERSION_ALL_CODENAMES := \
+ $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
.KATI_READONLY := \
PLATFORM_VERSION_CODENAME \
PLATFORM_VERSION_ALL_CODENAMES
diff --git a/envsetup.sh b/envsetup.sh
index e7b8538..8856212 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -395,7 +395,7 @@
fi
local completion_files=(
- system/core/adb/adb.bash
+ packages/modules/adb/adb.bash
system/core/fastboot/fastboot.bash
tools/asuite/asuite.sh
)
@@ -403,8 +403,12 @@
# e.g.
# ENVSETUP_NO_COMPLETION=adb # -> disable adb completion
# ENVSETUP_NO_COMPLETION=adb:bit # -> disable adb and bit completion
+ local T=$(gettop)
for f in ${completion_files[*]}; do
- if [ -f "$f" ] && should_add_completion "$f"; then
+ f="$T/$f"
+ if [ ! -f "$f" ]; then
+ echo "Warning: completion file $f not found"
+ elif should_add_completion "$f"; then
. $f
fi
done
@@ -454,7 +458,7 @@
if $(echo "$1" | grep -q '^-') ; then
# Calls starting with a -- argument are passed directly and the function
# returns with the lunch.py exit code.
- build/make/orchestrator/core/lunch.py "$@"
+ build/build/make/orchestrator/core/lunch.py "$@"
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -465,7 +469,7 @@
fi
else
# All other calls go through the --lunch variant of lunch.py
- results=($(build/make/orchestrator/core/lunch.py --lunch "$@"))
+ results=($(build/build/make/orchestrator/core/lunch.py --lunch "$@"))
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -813,7 +817,9 @@
local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|arm64|x86_64)$' | xargs)"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
- local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|arm64|x86_64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+ local keys="$(echo $* | xargs -n 1 echo | \grep -E '^(devkeys)$' | xargs)"
+ local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|arm64|x86_64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi|devkeys)$' | xargs)"
+
if [ "$showHelp" != "" ]; then
$(gettop)/build/make/tapasHelp.sh
@@ -832,6 +838,10 @@
echo "tapas: Error: Multiple densities supplied: $density"
return
fi
+ if [ $(echo $keys | wc -w) -gt 1 ]; then
+ echo "tapas: Error: Multiple keys supplied: $keys"
+ return
+ fi
local product=aosp_arm
case $arch in
@@ -839,6 +849,10 @@
arm64) product=aosp_arm64;;
x86_64) product=aosp_x86_64;;
esac
+ if [ -n "$keys" ]; then
+ product=${product/aosp_/aosp_${keys}_}
+ fi;
+
if [ -z "$variant" ]; then
variant=eng
fi
@@ -876,7 +890,7 @@
fi
if [ -z "$product" ]; then
- product=arm
+ product=arm64
elif [ $(echo $product | wc -w) -gt 1 ]; then
echo "banchan: Error: Multiple build archs or products supplied: $products"
return
@@ -942,6 +956,34 @@
fi
}
+# TODO: Merge into gettop as part of launching multitree
+function multitree_gettop
+{
+ local TOPFILE=build/build/make/core/envsetup.mk
+ if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
+ # The following circumlocution ensures we remove symlinks from TOP.
+ (cd "$TOP"; PWD= /bin/pwd)
+ else
+ if [ -f $TOPFILE ] ; then
+ # The following circumlocution (repeated below as well) ensures
+ # that we record the true directory name and not one that is
+ # faked up with symlink names.
+ PWD= /bin/pwd
+ else
+ local HERE=$PWD
+ local T=
+ while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
+ \cd ..
+ T=`PWD= /bin/pwd -P`
+ done
+ \cd "$HERE"
+ if [ -f "$T/$TOPFILE" ]; then
+ echo "$T"
+ fi
+ fi
+ fi
+}
+
function croot()
{
local T=$(gettop)
@@ -1065,7 +1107,7 @@
return;
fi;
echo "Setting core limit for $PID to infinite...";
- adb shell /system/bin/ulimit -p $PID -c unlimited
+ adb shell /system/bin/ulimit -P $PID -c unlimited
}
# core - send SIGV and pull the core for process
@@ -1824,6 +1866,21 @@
_wrap_build $(get_make_command "$@") "$@"
}
+function _multitree_lunch_error()
+{
+ >&2 echo "Couldn't locate the top of the tree. Please run \'source build/envsetup.sh\' and multitree_lunch from the root of your workspace."
+}
+
+function multitree_build()
+{
+ if T="$(multitree_gettop)"; then
+ "$T/build/build/orchestrator/core/orchestrator.py" "$@"
+ else
+ _multitree_lunch_error
+ return 1
+ fi
+}
+
function provision()
{
if [ ! "$ANDROID_PRODUCT_OUT" ]; then
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
new file mode 100755
index 0000000..12b096f
--- /dev/null
+++ b/finalize_branch_for_release.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main() {
+ local top="$(dirname "$0")"/../..
+
+ # default target to modify tree and build SDK
+ local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+ # This script is WIP and only finalizes part of the Android branch for release.
+ # The full process can be found at (INTERNAL) go/android-sdk-finalization.
+
+ # VNDK snapshot (TODO)
+ # SDK snapshots (TODO)
+ # Update references in the codebase to new API version (TODO)
+ # ...
+
+ AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api
+
+ # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
+ $m check-vndk-list || \
+ { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
+
+ # for now, we simulate the release state for AIDL, but in the future, we would want
+ # to actually turn the branch into the REL state and test with that
+ AIDL_FROZEN_REL=true $m nothing # test build
+
+ # Build SDK (TODO)
+ # lunch sdk...
+ # m ...
+}
+
+finalize_main
diff --git a/orchestrator/README b/orchestrator/README
new file mode 100644
index 0000000..9a1e302
--- /dev/null
+++ b/orchestrator/README
@@ -0,0 +1,8 @@
+DEMO
+
+from the root of the workspace
+
+multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
+
+rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
+
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
new file mode 100644
index 0000000..d7abef7
--- /dev/null
+++ b/orchestrator/core/api_assembly.py
@@ -0,0 +1,156 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import json
+import os
+import sys
+
+import api_assembly_cc
+import ninja_tools
+
+
+ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
+
+def assemble_apis(context, inner_trees):
+ # Find all of the contributions from the inner tree
+ contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
+
+ # Load and validate the contribution files
+ # TODO: Check timestamps and skip unnecessary work
+ contributions = []
+ for tree_key, filenames in contribution_files_dict.items():
+ for filename in filenames:
+ json_data = load_contribution_file(context, filename)
+ if not json_data:
+ continue
+ # TODO: Validate the configs, especially that the domains match what we asked for
+ # from the lunch config.
+ contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
+
+ # Group contributions by language and API surface
+ stub_libraries = collate_contributions(contributions)
+
+ # Initialize the ninja file writer
+ with open(context.out.api_ninja_file(), "w") as ninja_file:
+ ninja = ninja_tools.Ninja(context, ninja_file)
+
+ # Initialize the build file writer
+ build_file = BuildFile() # TODO: parameters?
+
+ # Iterate through all of the stub libraries and generate rules to assemble them
+ # and Android.bp/BUILD files to make those available to inner trees.
+ # TODO: Parallelize? Skip unnecessary work?
+ for stub_library in stub_libraries:
+ STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
+
+ # TODO: Handle host_executables separately or as a StubLibrary language?
+
+ # Finish writing the ninja file
+ ninja.write()
+
+
+def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
+ "Scan an inner_tree's out dir for the api contribution files."
+ directory = inner_tree.out.api_contributions_dir()
+ result = []
+ with os.scandir(directory) as it:
+ for dirent in it:
+ if not dirent.is_file():
+ break
+ if dirent.name.endswith(".json"):
+ result.append(os.path.join(directory, dirent.name))
+ return result
+
+
+def load_contribution_file(context, filename):
+ "Load and return the API contribution at filename. On error report error and return None."
+ with open(filename) as f:
+ try:
+ return json.load(f)
+ except json.decoder.JSONDecodeError as ex:
+ # TODO: Error reporting
+ context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
+ raise ex
+
+
+class StubLibraryContribution(object):
+ def __init__(self, inner_tree, api_domain, library_contribution):
+ self.inner_tree = inner_tree
+ self.api_domain = api_domain
+ self.library_contribution = library_contribution
+
+
+class StubLibrary(object):
+ def __init__(self, language, api_surface, api_surface_version, name):
+ self.language = language
+ self.api_surface = api_surface
+ self.api_surface_version = api_surface_version
+ self.name = name
+ self.contributions = []
+
+ def add_contribution(self, contrib):
+ self.contributions.append(contrib)
+
+
+def collate_contributions(contributions):
+ """Take the list of parsed API contribution files, and group targets by API Surface, version,
+ language and library name, and return a StubLibrary object for each of those.
+ """
+ grouped = {}
+ for contribution in contributions:
+ for language in STUB_LANGUAGE_HANDLERS.keys():
+ for library in contribution.json_data.get(language, []):
+ key = (language, contribution.json_data["name"],
+ contribution.json_data["version"], library["name"])
+ stub_library = grouped.get(key)
+ if not stub_library:
+ stub_library = StubLibrary(language, contribution.json_data["name"],
+ contribution.json_data["version"], library["name"])
+ grouped[key] = stub_library
+ stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
+ contribution.json_data["api_domain"], library))
+ return list(grouped.values())
+
+
+def assemble_java_api_library(context, ninja, build_file, stub_library):
+ print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name))
+ for contrib in stub_library.contributions:
+ print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
+ # TODO: Implement me
+
+
+def assemble_resource_api_library(context, ninja, build_file, stub_library):
+ print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name))
+ for contrib in stub_library.contributions:
+ print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
+ # TODO: Implement me
+
+
+STUB_LANGUAGE_HANDLERS = {
+ "cc_libraries": api_assembly_cc.assemble_cc_api_library,
+ "java_libraries": assemble_java_api_library,
+ "resource_libraries": assemble_resource_api_library,
+}
+
+
+class BuildFile(object):
+ "Abstract generator for Android.bp files and BUILD files."
+ pass
+
+
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
new file mode 100644
index 0000000..ca9b2a4
--- /dev/null
+++ b/orchestrator/core/api_assembly_cc.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+def assemble_cc_api_library(context, ninja, build_file, stub_library):
+ staging_dir = context.out.api_library_dir(stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name)
+ work_dir = context.out.api_library_work_dir(stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name)
+
+ # Generate rules to copy headers
+ includes = []
+ include_dir = os.path.join(staging_dir, "include")
+ for contrib in stub_library.contributions:
+ for headers in contrib.library_contribution["headers"]:
+ root = headers["root"]
+ for file in headers["files"]:
+ # TODO: Deal with collisions of the same name from multiple contributions
+ include = os.path.join(include_dir, file)
+ ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
+ includes.append(include)
+
+ # Generate rule to run ndkstubgen
+
+
+ # Generate rule to compile stubs to library
+
+ # Generate phony rule to build the library
+ # TODO: This name probably conflictgs with something
+ ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
+ stub_library.name)), includes)
+
+ # Generate build files
+
diff --git a/orchestrator/core/api_domain.py b/orchestrator/core/api_domain.py
new file mode 100644
index 0000000..bb7306c
--- /dev/null
+++ b/orchestrator/core/api_domain.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class ApiDomain(object):
+ def __init__(self, name, tree, product):
+ # Product will be null for modules
+ self.name = name
+ self.tree = tree
+ self.product = product
+
+ def __str__(self):
+ return "ApiDomain(name=\"%s\" tree.root=\"%s\" product=%s)" % (
+ self.name, self.tree.root,
+ "None" if self.product is None else "\"%s\"" % self.product)
+
diff --git a/orchestrator/core/api_export.py b/orchestrator/core/api_export.py
new file mode 100644
index 0000000..2f26b02
--- /dev/null
+++ b/orchestrator/core/api_export.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def export_apis_from_tree(tree_key, inner_tree, cookie):
+ inner_tree.invoke(["export_api_contributions"])
+
+
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
new file mode 100644
index 0000000..03fe890
--- /dev/null
+++ b/orchestrator/core/final_packaging.py
@@ -0,0 +1,117 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import sys
+
+import ninja_tools
+import ninja_syntax # Has to be after ninja_tools because of the path hack
+
+def final_packaging(context, inner_trees):
+ """Pull together all of the previously defined rules into the final build stems."""
+
+ with open(context.out.outer_ninja_file(), "w") as ninja_file:
+ ninja = ninja_tools.Ninja(context, ninja_file)
+
+ # Add the api surfaces file
+ ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
+
+ # For each inner tree
+ for tree in inner_trees.keys():
+ # TODO: Verify that inner_tree.ninja was generated
+
+ # Read and verify file
+ build_targets = read_build_targets_json(context, tree)
+ if not build_targets:
+ continue
+
+ # Generate the ninja and build files for this inner tree
+ generate_cross_domain_build_rules(context, ninja, tree, build_targets)
+
+ # Finish writing the ninja file
+ ninja.write()
+
+
+def read_build_targets_json(context, tree):
+ """Read and validate the build_targets.json file for the given tree."""
+ try:
+ f = open(tree.out.build_targets_file())
+ except FileNotFoundError:
+ # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
+ return None
+
+ data = None
+ with f:
+ try:
+ data = json.load(f)
+ except json.decoder.JSONDecodeError as ex:
+ sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
+ # TODO: Error reporting
+ raise ex
+
+ # TODO: Better error handling
+ # TODO: Validate json schema
+ return data
+
+
+def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
+ "Generate the ninja and build files for the inner tree."
+ # Include the inner tree's inner_tree.ninja
+ ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
+
+ # Generate module rules and files
+ for module in build_targets.get("modules", []):
+ generate_shared_module(context, ninja, tree, module)
+
+ # Generate staging rules
+ staging_dir = context.out.staging_dir()
+ for staged in build_targets.get("staging", []):
+ # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
+ dest = staged["dest"]
+ dest = os.path.join(staging_dir, dest)
+ if "src" in staged and "obj" in staged:
+ context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
+ ) # TODO: Filename and line if possible
+ if "src" in staged:
+ ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
+ elif "obj" in staged:
+ ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
+ ninja.add_global_phony("staging", [dest])
+
+ # Generate dist rules
+ dist_dir = context.out.dist_dir()
+ for disted in build_targets.get("dist", []):
+ # TODO: Enforce that dest absolute
+ dest = disted["dest"]
+ dest = os.path.join(dist_dir, dest)
+ ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
+ ninja.add_global_phony("dist", [dest])
+
+
+def generate_shared_module(context, ninja, tree, module):
+ """Generate ninja rules for the given build_targets.json defined module."""
+ module_name = module["name"]
+ module_type = module["type"]
+ share_dir = context.out.module_share_dir(module_type, module_name)
+ src_file = os.path.join(tree.root, module["file"])
+
+ if module_type == "apex":
+ ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
+ # TODO: Generate build file
+
+ else:
+ # TODO: Better error handling
+ raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
new file mode 100644
index 0000000..d348ee7
--- /dev/null
+++ b/orchestrator/core/inner_tree.py
@@ -0,0 +1,193 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+import textwrap
+
+class InnerTreeKey(object):
+ """Trees are identified uniquely by their root and the TARGET_PRODUCT they will use to build.
+ If a single tree uses two different prdoucts, then we won't make assumptions about
+ them sharing _anything_.
+ TODO: This is true for soong. It's more likely that bazel could do analysis for two
+ products at the same time in a single tree, so there's an optimization there to do
+ eventually."""
+ def __init__(self, root, product):
+ self.root = root
+ self.product = product
+
+ def __str__(self):
+ return "TreeKey(root=%s product=%s)" % (enquote(self.root), enquote(self.product))
+
+ def __hash__(self):
+ return hash((self.root, self.product))
+
+ def _cmp(self, other):
+ if self.root < other.root:
+ return -1
+ if self.root > other.root:
+ return 1
+ if self.product == other.product:
+ return 0
+ if self.product is None:
+ return -1
+ if other.product is None:
+ return 1
+ if self.product < other.product:
+ return -1
+ return 1
+
+ def __eq__(self, other):
+ return self._cmp(other) == 0
+
+ def __ne__(self, other):
+ return self._cmp(other) != 0
+
+ def __lt__(self, other):
+ return self._cmp(other) < 0
+
+ def __le__(self, other):
+ return self._cmp(other) <= 0
+
+ def __gt__(self, other):
+ return self._cmp(other) > 0
+
+ def __ge__(self, other):
+ return self._cmp(other) >= 0
+
+
+class InnerTree(object):
+ def __init__(self, context, root, product):
+ """Initialize with the inner tree root (relative to the workspace root)"""
+ self.root = root
+ self.product = product
+ self.domains = {}
+ # TODO: Base directory on OUT_DIR
+ out_root = context.out.inner_tree_dir(root)
+ if product:
+ out_root += "_" + product
+ else:
+ out_root += "_unbundled"
+ self.out = OutDirLayout(out_root)
+
+ def __str__(self):
+ return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
+ enquote(self.product),
+ " ".join([enquote(d) for d in sorted(self.domains.keys())]))
+
+ def invoke(self, args):
+ """Call the inner tree command for this inner tree. Exits on failure."""
+ # TODO: Build time tracing
+
+ # Validate that there is a .inner_build command to run at the root of the tree
+ # so we can print a good error message
+ inner_build_tool = os.path.join(self.root, ".inner_build")
+ if not os.access(inner_build_tool, os.X_OK):
+ sys.stderr.write(("Unable to execute %s. Is there an inner tree or lunch combo"
+ + " misconfiguration?\n") % inner_build_tool)
+ sys.exit(1)
+
+ # TODO: This is where we should set up the shared trees
+
+ # Build the command
+ cmd = [inner_build_tool, "--out_dir", self.out.root()]
+ for domain_name in sorted(self.domains.keys()):
+ cmd.append("--api_domain")
+ cmd.append(domain_name)
+ cmd += args
+
+ # Run the command
+ process = subprocess.run(cmd, shell=False)
+
+ # TODO: Probably want better handling of inner tree failures
+ if process.returncode:
+ sys.stderr.write("Build error in inner tree: %s\nstopping multitree build.\n"
+ % self.root)
+ sys.exit(1)
+
+
+class InnerTrees(object):
+ def __init__(self, trees, domains):
+ self.trees = trees
+ self.domains = domains
+
+ def __str__(self):
+ "Return a debugging dump of this object"
+ return textwrap.dedent("""\
+ InnerTrees {
+ trees: [
+ %(trees)s
+ ]
+ domains: [
+ %(domains)s
+ ]
+ }""" % {
+ "trees": "\n ".join(sorted([str(t) for t in self.trees.values()])),
+ "domains": "\n ".join(sorted([str(d) for d in self.domains.values()])),
+ })
+
+
+ def for_each_tree(self, func, cookie=None):
+ """Call func for each of the inner trees once for each product that will be built in it.
+
+ The calls will be in a stable order.
+
+ Return a map of the InnerTreeKey to any results returned from func().
+ """
+ result = {}
+ for key in sorted(self.trees.keys()):
+ result[key] = func(key, self.trees[key], cookie)
+ return result
+
+
+ def get(self, tree_key):
+ """Get an inner tree for tree_key"""
+ return self.trees.get(tree_key)
+
+ def keys(self):
+ "Get the keys for the inner trees in name order."
+ return [self.trees[k] for k in sorted(self.trees.keys())]
+
+
+class OutDirLayout(object):
+ """Encapsulates the logic about the layout of the inner tree out directories.
+ See also context.OutDir for outer tree out dir contents."""
+
+ def __init__(self, root):
+ "Initialize with the root of the OUT_DIR for the inner tree."
+ self._root = root
+
+ def root(self):
+ return self._root
+
+ def tree_info_file(self):
+ return os.path.join(self._root, "tree_info.json")
+
+ def api_contributions_dir(self):
+ return os.path.join(self._root, "api_contributions")
+
+ def build_targets_file(self):
+ return os.path.join(self._root, "build_targets.json")
+
+ def main_ninja_file(self):
+ return os.path.join(self._root, "inner_tree.ninja")
+
+
+def enquote(s):
+ return "None" if s is None else "\"%s\"" % s
+
+
diff --git a/orchestrator/core/interrogate.py b/orchestrator/core/interrogate.py
new file mode 100644
index 0000000..9fe769e
--- /dev/null
+++ b/orchestrator/core/interrogate.py
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+
+def interrogate_tree(tree_key, inner_tree, cookie):
+ inner_tree.invoke(["describe"])
+
+ info_json_filename = inner_tree.out.tree_info_file()
+
+ # TODO: Error handling
+ with open(info_json_filename) as f:
+ info_json = json.load(f)
+
+ # TODO: Check orchestrator protocol
+
diff --git a/orchestrator/core/lunch.py b/orchestrator/core/lunch.py
index 35dac73..70a2d1d 100755
--- a/orchestrator/core/lunch.py
+++ b/orchestrator/core/lunch.py
@@ -24,8 +24,10 @@
EXIT_STATUS_ERROR = 1
EXIT_STATUS_NEED_HELP = 2
-def FindDirs(path, name, ttl=6):
- """Search at most ttl directories deep inside path for a directory called name."""
+
+def find_dirs(path, name, ttl=6):
+ """Search at most ttl directories deep inside path for a directory called name
+ and yield directories that match."""
# The dance with subdirs is so that we recurse in sorted order.
subdirs = []
with os.scandir(path) as it:
@@ -40,10 +42,10 @@
# Consume filesystem errors, e.g. too many links, permission etc.
pass
for subdir in subdirs:
- yield from FindDirs(os.path.join(path, subdir), name, ttl-1)
+ yield from find_dirs(os.path.join(path, subdir), name, ttl-1)
-def WalkPaths(path, matcher, ttl=10):
+def walk_paths(path, matcher, ttl=10):
"""Do a traversal of all files under path yielding each file that matches
matcher."""
# First look for files, then recurse into directories as needed.
@@ -62,22 +64,28 @@
# Consume filesystem errors, e.g. too many links, permission etc.
pass
for subdir in sorted(subdirs):
- yield from WalkPaths(os.path.join(path, subdir), matcher, ttl-1)
+ yield from walk_paths(os.path.join(path, subdir), matcher, ttl-1)
-def FindFile(path, filename):
+def find_file(path, filename):
"""Return a file called filename inside path, no more than ttl levels deep.
Directories are searched alphabetically.
"""
- for f in WalkPaths(path, lambda x: x == filename):
+ for f in walk_paths(path, lambda x: x == filename):
return f
+# TODO: When orchestrator is in its own git project remove the "build" and "make" here
+class LunchContext(object):
+ """Mockable container for lunch"""
+ def __init__(self, workspace_root, orchestrator_path_prefix_components=["build", "build", "make"]):
+ self.workspace_root = workspace_root
+ self.orchestrator_path_prefix_components = orchestrator_path_prefix_components
-def FindConfigDirs(workspace_root):
+def find_config_dirs(context):
"""Find the configuration files in the well known locations inside workspace_root
- <workspace_root>/build/orchestrator/multitree_combos
+ <workspace_root>/<orchestrator>/<path>/<prefix>/orchestrator/multitree_combos
(AOSP devices, such as cuttlefish)
<workspace_root>/vendor/**/multitree_combos
@@ -89,29 +97,29 @@
Directories are returned specifically in this order, so that aosp can't be
overridden, but vendor overrides device.
"""
+ # TODO: This is not looking in inner trees correctly.
- # TODO: When orchestrator is in its own git project remove the "make/" here
- yield os.path.join(workspace_root, "build/make/orchestrator/multitree_combos")
+ yield os.path.join(context.workspace_root, *context.orchestrator_path_prefix_components, "orchestrator/multitree_combos")
dirs = ["vendor", "device"]
for d in dirs:
- yield from FindDirs(os.path.join(workspace_root, d), "multitree_combos")
+ yield from find_dirs(os.path.join(context.workspace_root, d), "multitree_combos")
-def FindNamedConfig(workspace_root, shortname):
- """Find the config with the given shortname inside workspace_root.
+def find_named_config(context, shortname):
+ """Find the config with the given shortname inside context.workspace_root.
- Config directories are searched in the order described in FindConfigDirs,
+ Config directories are searched in the order described in find_config_dirs,
and inside those directories, alphabetically."""
filename = shortname + ".mcombo"
- for config_dir in FindConfigDirs(workspace_root):
- found = FindFile(config_dir, filename)
+ for config_dir in find_config_dirs(context):
+ found = find_file(config_dir, filename)
if found:
return found
return None
-def ParseProductVariant(s):
+def parse_product_variant(s):
"""Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
split = s.split("-")
if len(split) != 2:
@@ -119,15 +127,15 @@
return split
-def ChooseConfigFromArgs(workspace_root, args):
+def choose_config_from_args(context, args):
"""Return the config file we should use for the given argument,
or null if there's no file that matches that."""
if len(args) == 1:
# Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
# file we don't match that.
- pv = ParseProductVariant(args[0])
+ pv = parse_product_variant(args[0])
if pv:
- config = FindNamedConfig(workspace_root, pv[0])
+ config = find_named_config(context, pv[0])
if config:
return (config, pv[1])
return None, None
@@ -139,10 +147,12 @@
class ConfigException(Exception):
+ ERROR_IDENTIFY = "identify"
ERROR_PARSE = "parse"
ERROR_CYCLE = "cycle"
+ ERROR_VALIDATE = "validate"
- def __init__(self, kind, message, locations, line=0):
+ def __init__(self, kind, message, locations=[], line=0):
"""Error thrown when loading and parsing configurations.
Args:
@@ -169,13 +179,13 @@
self.line = line
-def LoadConfig(filename):
+def load_config(filename):
"""Load a config, including processing the inherits fields.
Raises:
ConfigException on errors
"""
- def LoadAndMerge(fn, visited):
+ def load_and_merge(fn, visited):
with open(fn) as f:
try:
contents = json.load(f)
@@ -191,34 +201,74 @@
if parent in visited:
raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
visited)
- DeepMerge(inherited_data, LoadAndMerge(parent, [parent,] + visited))
+ deep_merge(inherited_data, load_and_merge(parent, [parent,] + visited))
# Then merge inherited_data into contents, but what's already there will win.
- DeepMerge(contents, inherited_data)
+ deep_merge(contents, inherited_data)
contents.pop("inherits", None)
return contents
- return LoadAndMerge(filename, [filename,])
+ return load_and_merge(filename, [filename,])
-def DeepMerge(merged, addition):
+def deep_merge(merged, addition):
"""Merge all fields of addition into merged. Pre-existing fields win."""
for k, v in addition.items():
if k in merged:
if isinstance(v, dict) and isinstance(merged[k], dict):
- DeepMerge(merged[k], v)
+ deep_merge(merged[k], v)
else:
merged[k] = v
-def Lunch(args):
+def make_config_header(config_file, config, variant):
+ def make_table(rows):
+ maxcols = max([len(row) for row in rows])
+ widths = [0] * maxcols
+ for row in rows:
+ for i in range(len(row)):
+ widths[i] = max(widths[i], len(row[i]))
+ text = []
+ for row in rows:
+ rowtext = []
+ for i in range(len(row)):
+ cell = row[i]
+ rowtext.append(str(cell))
+ rowtext.append(" " * (widths[i] - len(cell)))
+ rowtext.append(" ")
+ text.append("".join(rowtext))
+ return "\n".join(text)
+
+ trees = [("Component", "Path", "Product"),
+ ("---------", "----", "-------")]
+ entry = config.get("system", None)
+ def add_config_tuple(trees, entry, name):
+ if entry:
+ trees.append((name, entry.get("tree"), entry.get("product", "")))
+ add_config_tuple(trees, config.get("system"), "system")
+ add_config_tuple(trees, config.get("vendor"), "vendor")
+ for k, v in config.get("modules", {}).items():
+ add_config_tuple(trees, v, k)
+
+ return """========================================
+TARGET_BUILD_COMBO=%(TARGET_BUILD_COMBO)s
+TARGET_BUILD_VARIANT=%(TARGET_BUILD_VARIANT)s
+
+%(trees)s
+========================================\n""" % {
+ "TARGET_BUILD_COMBO": config_file,
+ "TARGET_BUILD_VARIANT": variant,
+ "trees": make_table(trees),
+ }
+
+
+def do_lunch(args):
"""Handle the lunch command."""
- # Check that we're at the top of a multitree workspace
- # TODO: Choose the right sentinel file
- if not os.path.exists("build/make/orchestrator"):
+ # Check that we're at the top of a multitree workspace by seeing if this script exists.
+ if not os.path.exists("build/build/make/orchestrator/core/lunch.py"):
sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
return EXIT_STATUS_ERROR
# Choose the config file
- config_file, variant = ChooseConfigFromArgs(".", args)
+ config_file, variant = choose_config_from_args(".", args)
if config_file == None:
sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
@@ -229,7 +279,7 @@
# Parse the config file
try:
- config = LoadConfig(config_file)
+ config = load_config(config_file)
except ConfigException as ex:
sys.stderr.write(str(ex))
return EXIT_STATUS_ERROR
@@ -244,47 +294,82 @@
sys.stdout.write("%s\n" % config_file)
sys.stdout.write("%s\n" % variant)
+ # Write confirmation message to stderr
+ sys.stderr.write(make_config_header(config_file, config, variant))
+
return EXIT_STATUS_OK
-def FindAllComboFiles(workspace_root):
+def find_all_combo_files(context):
"""Find all .mcombo files in the prescribed locations in the tree."""
- for dir in FindConfigDirs(workspace_root):
- for file in WalkPaths(dir, lambda x: x.endswith(".mcombo")):
+ for dir in find_config_dirs(context):
+ for file in walk_paths(dir, lambda x: x.endswith(".mcombo")):
yield file
-def IsFileLunchable(config_file):
+def is_file_lunchable(config_file):
"""Parse config_file, flatten the inheritance, and return whether it can be
used as a lunch target."""
try:
- config = LoadConfig(config_file)
+ config = load_config(config_file)
except ConfigException as ex:
sys.stderr.write("%s" % ex)
return False
return config.get("lunchable", False)
-def FindAllLunchable(workspace_root):
- """Find all mcombo files in the tree (rooted at workspace_root) that when
+def find_all_lunchable(context):
+ """Find all mcombo files in the tree (rooted at context.workspace_root) that when
parsed (and inheritance is flattened) have lunchable: true."""
- for f in [x for x in FindAllComboFiles(workspace_root) if IsFileLunchable(x)]:
+ for f in [x for x in find_all_combo_files(context) if is_file_lunchable(x)]:
yield f
-def List():
+def load_current_config():
+ """Load, validate and return the config as specified in TARGET_BUILD_COMBO. Throws
+ ConfigException if there is a problem."""
+
+ # Identify the config file
+ config_file = os.environ.get("TARGET_BUILD_COMBO")
+ if not config_file:
+ raise ConfigException(ConfigException.ERROR_IDENTIFY,
+ "TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.")
+
+ # Parse the config file
+ config = load_config(config_file)
+
+ # Validate the config file
+ if not config.get("lunchable", False):
+ raise ConfigException(ConfigException.ERROR_VALIDATE,
+ "Lunch config file (or inherited files) does not have the 'lunchable'"
+ + " flag set, which means it is probably not a complete lunch spec.",
+ [config_file,])
+
+ # TODO: Validate that:
+ # - there are no modules called system or vendor
+ # - everything has all the required files
+
+ variant = os.environ.get("TARGET_BUILD_VARIANT")
+ if not variant:
+ variant = "eng" # TODO: Is this the right default?
+ # Validate variant is user, userdebug or eng
+
+ return config_file, config, variant
+
+def do_list():
"""Handle the --list command."""
- for f in sorted(FindAllLunchable(".")):
+ lunch_context = LunchContext(".")
+ for f in sorted(find_all_lunchable(lunch_context)):
print(f)
-def Print(args):
+def do_print(args):
"""Handle the --print command."""
# Parse args
if len(args) == 0:
config_file = os.environ.get("TARGET_BUILD_COMBO")
if not config_file:
- sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.\n")
+ sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch before building.\n")
return EXIT_STATUS_NEED_HELP
elif len(args) == 1:
config_file = args[0]
@@ -293,7 +378,7 @@
# Parse the config file
try:
- config = LoadConfig(config_file)
+ config = load_config(config_file)
except ConfigException as ex:
sys.stderr.write(str(ex))
return EXIT_STATUS_ERROR
@@ -309,15 +394,15 @@
return EXIT_STATUS_NEED_HELP
if len(argv) == 2 and argv[1] == "--list":
- List()
+ do_list()
return EXIT_STATUS_OK
if len(argv) == 2 and argv[1] == "--print":
- return Print(argv[2:])
+ return do_print(argv[2:])
return EXIT_STATUS_OK
- if (len(argv) == 2 or len(argv) == 3) and argv[1] == "--lunch":
- return Lunch(argv[2:])
+ if (len(argv) == 3 or len(argv) == 4) and argv[1] == "--lunch":
+ return do_lunch(argv[2:])
sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
return EXIT_STATUS_NEED_HELP
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
new file mode 100644
index 0000000..ab81d66
--- /dev/null
+++ b/orchestrator/core/ninja_runner.py
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import sys
+
+def run_ninja(context, targets):
+ """Run ninja.
+ """
+
+ # Construct the command
+ cmd = [
+ context.tools.ninja(),
+ "-f",
+ context.out.outer_ninja_file(),
+ ] + targets
+
+ # Run the command
+ process = subprocess.run(cmd, shell=False)
+
+ # TODO: Probably want better handling of inner tree failures
+ if process.returncode:
+ sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
+ sys.exit(1)
+
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
new file mode 100644
index 0000000..16101ea
--- /dev/null
+++ b/orchestrator/core/ninja_tools.py
@@ -0,0 +1,59 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+# Workaround for python include path
+_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
+if _ninja_dir not in sys.path:
+ sys.path.append(_ninja_dir)
+import ninja_writer
+from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
+
+
+class Ninja(ninja_writer.Writer):
+ """Some higher level constructs on top of raw ninja writing.
+ TODO: Not sure where these should be."""
+ def __init__(self, context, file):
+ super(Ninja, self).__init__(file)
+ self._context = context
+ self._did_copy_file = False
+ self._phonies = {}
+
+ def add_copy_file(self, copy_to, copy_from):
+ if not self._did_copy_file:
+ self._did_copy_file = True
+ rule = Rule("copy_file")
+ rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
+ + " -f ${in} ${out}")
+ self.add_rule(rule)
+ build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
+ implicits=[self._context.tools.acp()])
+ build_action.add_variable("out_dir", os.path.dirname(copy_to))
+ self.add_build_action(build_action)
+
+ def add_global_phony(self, name, deps):
+ """Add a phony target where there are multiple places that will want to add to
+ the same phony. If you can, to save memory, use add_phony instead of this function."""
+ if type(deps) not in (list, tuple):
+ raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
+ self._phonies.setdefault(name, []).extend(deps)
+
+ def write(self):
+ for phony, deps in self._phonies.items():
+ self.add_phony(phony, deps)
+ super(Ninja, self).write()
+
+
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
new file mode 100755
index 0000000..508f73a
--- /dev/null
+++ b/orchestrator/core/orchestrator.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+
+sys.dont_write_bytecode = True
+import api_assembly
+import api_domain
+import api_export
+import final_packaging
+import inner_tree
+import tree_analysis
+import interrogate
+import lunch
+import ninja_runner
+import utils
+
+EXIT_STATUS_OK = 0
+EXIT_STATUS_ERROR = 1
+
+API_DOMAIN_SYSTEM = "system"
+API_DOMAIN_VENDOR = "vendor"
+API_DOMAIN_MODULE = "module"
+
+def process_config(context, lunch_config):
+ """Returns a InnerTrees object based on the configuration requested in the lunch config."""
+ def add(domain_name, tree_root, product):
+ tree_key = inner_tree.InnerTreeKey(tree_root, product)
+ if tree_key in trees:
+ tree = trees[tree_key]
+ else:
+ tree = inner_tree.InnerTree(context, tree_root, product)
+ trees[tree_key] = tree
+ domain = api_domain.ApiDomain(domain_name, tree, product)
+ domains[domain_name] = domain
+ tree.domains[domain_name] = domain
+
+ trees = {}
+ domains = {}
+
+ system_entry = lunch_config.get("system")
+ if system_entry:
+ add(API_DOMAIN_SYSTEM, system_entry["tree"], system_entry["product"])
+
+ vendor_entry = lunch_config.get("vendor")
+ if vendor_entry:
+ add(API_DOMAIN_VENDOR, vendor_entry["tree"], vendor_entry["product"])
+
+ for module_name, module_entry in lunch_config.get("modules", []).items():
+ add(module_name, module_entry["tree"], None)
+
+ return inner_tree.InnerTrees(trees, domains)
+
+
+def build():
+ # Choose the out directory, set up error handling, etc.
+ context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
+
+ # Read the lunch config file
+ try:
+ config_file, config, variant = lunch.load_current_config()
+ except lunch.ConfigException as ex:
+ sys.stderr.write("%s\n" % ex)
+ return EXIT_STATUS_ERROR
+ sys.stdout.write(lunch.make_config_header(config_file, config, variant))
+
+ # Construct the trees and domains dicts
+ inner_trees = process_config(context, config)
+
+ # 1. Interrogate the trees
+ inner_trees.for_each_tree(interrogate.interrogate_tree)
+ # TODO: Detect bazel-only mode
+
+ # 2a. API Export
+ inner_trees.for_each_tree(api_export.export_apis_from_tree)
+
+ # 2b. API Surface Assembly
+ api_assembly.assemble_apis(context, inner_trees)
+
+ # 3a. Inner tree analysis
+ tree_analysis.analyze_trees(context, inner_trees)
+
+ # 3b. Final Packaging Rules
+ final_packaging.final_packaging(context, inner_trees)
+
+ # 4. Build Execution
+ # TODO: Decide what we want the UX for selecting targets to be across
+ # branches... since there are very likely to be conflicting soong short
+ # names.
+ print("Running ninja...")
+ targets = ["staging", "system"]
+ ninja_runner.run_ninja(context, targets)
+
+ # Success!
+ return EXIT_STATUS_OK
+
+def main(argv):
+ return build()
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
+
+
+# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/test_lunch.py b/orchestrator/core/test_lunch.py
index 3c39493..5b890fb 100755
--- a/orchestrator/core/test_lunch.py
+++ b/orchestrator/core/test_lunch.py
@@ -20,76 +20,81 @@
sys.dont_write_bytecode = True
import lunch
+# Create a test LunchContext object
+# Test workspace is in test/configs
+# Orchestrator prefix inside it is build/make
+test_lunch_context = lunch.LunchContext("test/configs", ["build", "make"])
+
class TestStringMethods(unittest.TestCase):
def test_find_dirs(self):
- self.assertEqual([x for x in lunch.FindDirs("test/configs", "multitree_combos")], [
+ self.assertEqual([x for x in lunch.find_dirs("test/configs", "multitree_combos")], [
"test/configs/build/make/orchestrator/multitree_combos",
"test/configs/device/aa/bb/multitree_combos",
"test/configs/vendor/aa/bb/multitree_combos"])
def test_find_file(self):
# Finds the one in device first because this is searching from the root,
- # not using FindNamedConfig.
- self.assertEqual(lunch.FindFile("test/configs", "v.mcombo"),
+ # not using find_named_config.
+ self.assertEqual(lunch.find_file("test/configs", "v.mcombo"),
"test/configs/device/aa/bb/multitree_combos/v.mcombo")
def test_find_config_dirs(self):
- self.assertEqual([x for x in lunch.FindConfigDirs("test/configs")], [
+ self.assertEqual([x for x in lunch.find_config_dirs(test_lunch_context)], [
"test/configs/build/make/orchestrator/multitree_combos",
"test/configs/vendor/aa/bb/multitree_combos",
"test/configs/device/aa/bb/multitree_combos"])
def test_find_named_config(self):
# Inside build/orchestrator, overriding device and vendor
- self.assertEqual(lunch.FindNamedConfig("test/configs", "b"),
+ self.assertEqual(lunch.find_named_config(test_lunch_context, "b"),
"test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
# Nested dir inside a combo dir
- self.assertEqual(lunch.FindNamedConfig("test/configs", "nested"),
+ self.assertEqual(lunch.find_named_config(test_lunch_context, "nested"),
"test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
# Inside vendor, overriding device
- self.assertEqual(lunch.FindNamedConfig("test/configs", "v"),
+ self.assertEqual(lunch.find_named_config(test_lunch_context, "v"),
"test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
# Inside device
- self.assertEqual(lunch.FindNamedConfig("test/configs", "d"),
+ self.assertEqual(lunch.find_named_config(test_lunch_context, "d"),
"test/configs/device/aa/bb/multitree_combos/d.mcombo")
# Make sure we don't look too deep (for performance)
- self.assertIsNone(lunch.FindNamedConfig("test/configs", "too_deep"))
+ self.assertIsNone(lunch.find_named_config(test_lunch_context, "too_deep"))
def test_choose_config_file(self):
# Empty string argument
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", [""]),
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context, [""]),
(None, None))
# A PRODUCT-VARIANT name
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["v-eng"]),
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["v-eng"]),
("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
# A PRODUCT-VARIANT name that conflicts with a file
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["b-eng"]),
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["b-eng"]),
("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
# A PRODUCT-VARIANT that doesn't exist
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["z-user"]),
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["z-user"]),
(None, None))
# An explicit file
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
# An explicit file that doesn't exist
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
["test/configs/doesnt_exist.mcombo", "eng"]),
(None, None))
# An explicit file without a variant should fail
- self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+ self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
@@ -97,12 +102,12 @@
def test_config_cycles(self):
# Test that we catch cycles
with self.assertRaises(lunch.ConfigException) as context:
- lunch.LoadConfig("test/configs/parsing/cycles/1.mcombo")
+ lunch.load_config("test/configs/parsing/cycles/1.mcombo")
self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
def test_config_merge(self):
# Test the merge logic
- self.assertEqual(lunch.LoadConfig("test/configs/parsing/merge/1.mcombo"), {
+ self.assertEqual(lunch.load_config("test/configs/parsing/merge/1.mcombo"), {
"in_1": "1",
"in_1_2": "1",
"merged": {"merged_1": "1",
@@ -119,7 +124,7 @@
})
def test_list(self):
- self.assertEqual(sorted(lunch.FindAllLunchable("test/configs")),
+ self.assertEqual(sorted(lunch.find_all_lunchable(test_lunch_context)),
["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
if __name__ == "__main__":
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
new file mode 100644
index 0000000..052cad6
--- /dev/null
+++ b/orchestrator/core/tree_analysis.py
@@ -0,0 +1,24 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def analyze_trees(context, inner_trees):
+ inner_trees.for_each_tree(run_analysis)
+
+def run_analysis(tree_key, inner_tree, cookie):
+ inner_tree.invoke(["analyze"])
+
+
+
+
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
new file mode 100644
index 0000000..41310e0
--- /dev/null
+++ b/orchestrator/core/utils.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import platform
+
+class Context(object):
+ """Mockable container for global state."""
+ def __init__(self, out_root, errors):
+ self.out = OutDir(out_root)
+ self.errors = errors
+ self.tools = HostTools()
+
+class TestContext(Context):
+ "Context for testing. The real Context is manually constructed in orchestrator.py."
+
+ def __init__(self, test_work_dir, test_name):
+ super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
+ Errors(None))
+
+
+class OutDir(object):
+ """Encapsulates the logic about the out directory at the outer-tree level.
+ See also inner_tree.OutDirLayout for inner tree out dir contents."""
+
+ def __init__(self, root):
+ "Initialize with the root of the OUT_DIR for the outer tree."
+ self._out_root = root
+ self._intermediates = "intermediates"
+
+ def root(self):
+ return self._out_root
+
+ def inner_tree_dir(self, tree_root):
+ """Root directory for inner tree inside the out dir."""
+ return os.path.join(self._out_root, "trees", tree_root)
+
+ def api_ninja_file(self):
+ """The ninja file that assembles API surfaces."""
+ return os.path.join(self._out_root, "api_surfaces.ninja")
+
+ def api_library_dir(self, surface, version, library):
+ """Directory for all the contents of a library inside an API surface, including
+ the build files. Any intermediates should go in api_library_work_dir."""
+ return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
+
+ def api_library_work_dir(self, surface, version, library):
+ """Intermediates / scratch directory for library inside an API surface."""
+ return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
+ str(version), library)
+
+ def outer_ninja_file(self):
+ return os.path.join(self._out_root, "multitree.ninja")
+
+ def module_share_dir(self, module_type, module_name):
+ return os.path.join(self._out_root, "shared", module_type, module_name)
+
+ def staging_dir(self):
+ return os.path.join(self._out_root, "staging")
+
+ def dist_dir(self):
+ "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
+ return os.path.join(self._out_root, "dist")
+
+class Errors(object):
+ """Class for reporting and tracking errors."""
+ def __init__(self, stream):
+ """Initialize Error reporter with a file-like object."""
+ self._stream = stream
+ self._all = []
+
+ def error(self, message, file=None, line=None, col=None):
+ """Record the error message."""
+ s = ""
+ if file:
+ s += str(file)
+ s += ":"
+ if line:
+ s += str(line)
+ s += ":"
+ if col:
+ s += str(col)
+ s += ":"
+ if s:
+ s += " "
+ s += str(message)
+ if s[-1] != "\n":
+ s += "\n"
+ self._all.append(s)
+ if self._stream:
+ self._stream.write(s)
+
+ def had_error(self):
+ """Return if there were any errors reported."""
+ return len(self._all)
+
+ def get_errors(self):
+ """Get all errors that were reported."""
+ return self._all
+
+
+class HostTools(object):
+ def __init__(self):
+ if platform.system() == "Linux":
+ self._arch = "linux-x86"
+ else:
+ raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
+
+ # Some of these are called a lot, so pre-compute the strings to save memory
+ self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
+ self._acp = os.path.join(self._prebuilts, "acp")
+ self._ninja = os.path.join(self._prebuilts, "ninja")
+
+ def acp(self):
+ return self._acp
+
+ def ninja(self):
+ return self._ninja
+
+
+def choose_out_dir():
+ """Get the root of the out dir, either from the environment or by picking
+ a default."""
+ result = os.environ.get("OUT_DIR")
+ if result:
+ return result
+ else:
+ return "out"
diff --git a/orchestrator/demo/buffet_helper.py b/orchestrator/demo/buffet_helper.py
new file mode 100644
index 0000000..fa29aeb
--- /dev/null
+++ b/orchestrator/demo/buffet_helper.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+import os
+import sys
+import yaml
+
+from hierarchy import parse_hierarchy
+
+
+def main():
+ if len(sys.argv) != 2:
+ print('usage: %s target' % sys.argv[0])
+ exit(1)
+
+ args = sys.argv[1].split('-')
+ if len(args) != 2:
+ print('target format: {target}-{variant}')
+ exit(1)
+
+ target, variant = args
+
+ if variant not in ['eng', 'user', 'userdebug']:
+ print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
+ variant)
+ exit(1)
+
+ build_top = os.getenv('BUFFET_BUILD_TOP')
+ if not build_top:
+ print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
+ exit(1)
+
+ hierarchy_map = parse_hierarchy(build_top)
+
+ if target not in hierarchy_map:
+ raise RuntimeError(
+ "unknown target '%s': couldn't find the target. Supported targets are: %s"
+ % (target, list(hierarchy_map.keys())))
+
+ hierarchy = [target]
+ while hierarchy_map[hierarchy[-1]]:
+ hierarchy.append(hierarchy_map[hierarchy[-1]])
+
+ print('Target hierarchy for %s: %s' % (target, hierarchy))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/orchestrator/demo/build_helper.py b/orchestrator/demo/build_helper.py
new file mode 100644
index 0000000..c481f80
--- /dev/null
+++ b/orchestrator/demo/build_helper.py
@@ -0,0 +1,367 @@
+#!/usr/bin/env python3
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import copy
+import hierarchy
+import json
+import logging
+import filecmp
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import collect_metadata
+import utils
+
+BUILD_CMD_TO_ALL = (
+ 'clean',
+ 'installclean',
+ 'update-meta',
+)
+BUILD_ALL_EXEMPTION = (
+ 'art',
+)
+
+def get_supported_product(ctx, supported_products):
+ hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
+ target = ctx.target_product()
+
+ while target not in supported_products:
+ if target not in hierarchy_map:
+ return None
+ target = hierarchy_map[target]
+ return target
+
+
+def parse_goals(ctx, metadata, goals):
+ """Parse goals and returns a map from each component to goals.
+
+ e.g.
+
+ "m main art timezone:foo timezone:bar" will return the following dict: {
+ "main": {"all"},
+ "art": {"all"},
+ "timezone": {"foo", "bar"},
+ }
+ """
+ # for now, goal should look like:
+ # {component} or {component}:{subgoal}
+
+ ret = collections.defaultdict(set)
+
+ for goal in goals:
+ # check if the command is for all components
+ if goal in BUILD_CMD_TO_ALL:
+ ret['all'].add(goal)
+ continue
+
+ # should be {component} or {component}:{subgoal}
+ try:
+ component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
+ except ValueError:
+ raise RuntimeError(
+ 'unknown goal: %s: should be {component} or {component}:{subgoal}' %
+ goal)
+ if component not in metadata:
+ raise RuntimeError('unknown goal: %s: component %s not found' %
+ (goal, component))
+ if not get_supported_product(ctx, metadata[component]['lunch_targets']):
+ raise RuntimeError("can't find matching target. Supported targets are: " +
+ str(metadata[component]['lunch_targets']))
+
+ ret[component].add(subgoal)
+
+ return ret
+
+
+def find_cycle(metadata):
+ """ Finds a cyclic dependency among components.
+
+ This is for debugging.
+ """
+ visited = set()
+ parent_node = dict()
+ in_stack = set()
+
+ # Returns a cycle if one is found
+ def dfs(node):
+ # visit_order[visit_time[node] - 1] == node
+ nonlocal visited, parent_node, in_stack
+
+ visited.add(node)
+ in_stack.add(node)
+ if 'deps' not in metadata[node]:
+ in_stack.remove(node)
+ return None
+ for next in metadata[node]['deps']:
+ # We found a cycle (next ~ node) if next is still in the stack
+ if next in in_stack:
+ cycle = [node]
+ while cycle[-1] != next:
+ cycle.append(parent_node[cycle[-1]])
+ return cycle
+
+ # Else, continue searching
+ if next in visited:
+ continue
+
+ parent_node[next] = node
+ result = dfs(next)
+ if result:
+ return result
+
+ in_stack.remove(node)
+ return None
+
+ for component in metadata:
+ if component in visited:
+ continue
+
+ result = dfs(component)
+ if result:
+ return result
+
+ return None
+
+
+def topological_sort_components(metadata):
+ """ Performs topological sort on components.
+
+ If A depends on B, B appears first.
+ """
+ # If A depends on B, we want B to appear before A. But the graph in metadata
+ # is represented as A -> B (B in metadata[A]['deps']). So we sort in the
+ # reverse order, and then reverse the result again to get the desired order.
+ indegree = collections.defaultdict(int)
+ for component in metadata:
+ if 'deps' not in metadata[component]:
+ continue
+ for dep in metadata[component]['deps']:
+ indegree[dep] += 1
+
+ component_queue = collections.deque()
+ for component in metadata:
+ if indegree[component] == 0:
+ component_queue.append(component)
+
+ result = []
+ while component_queue:
+ component = component_queue.popleft()
+ result.append(component)
+ if 'deps' not in metadata[component]:
+ continue
+ for dep in metadata[component]['deps']:
+ indegree[dep] -= 1
+ if indegree[dep] == 0:
+ component_queue.append(dep)
+
+ # If topological sort fails, there must be a cycle.
+ if len(result) != len(metadata):
+ cycle = find_cycle(metadata)
+ raise RuntimeError('circular dependency found among metadata: %s' % cycle)
+
+ return result[::-1]
+
+
+def add_dependency_goals(ctx, metadata, component, goals):
+ """ Adds goals that given component depends on."""
+ # For now, let's just add "all"
+ # TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
+ if 'deps' not in metadata[component]:
+ return
+
+ for dep in metadata[component]['deps']:
+ goals[dep].add('all')
+
+
+def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
+ """ Analyzes the dependency graph among components, adds build commands for
+
+ dependencies, and then sorts the goals.
+
+ Returns a list of tuples: (component_name, set of subgoals).
+ Builds should be run in the list's order.
+ """
+ # TODO(inseob@): after topological sort, some components may be built in
+ # parallel.
+
+ topological_order = topological_sort_components(metadata)
+ combined_goals = copy.deepcopy(parsed_goals)
+
+ # Add build rules for each component's dependencies
+ # We do this in reverse order, so it can be transitive.
+ # e.g. if A depends on B and B depends on C, and we build A,
+ # C should also be built, in addition to B.
+ for component in topological_order[::-1]:
+ if component in combined_goals:
+ add_dependency_goals(ctx, metadata, component, combined_goals)
+
+ ret = []
+ for component in ['all'] + topological_order:
+ if component in combined_goals:
+ ret.append((component, combined_goals[component]))
+
+ return ret
+
+
+def run_build(ctx, metadata, component, subgoals):
+ build_cmd = metadata[component]['build_cmd']
+ out_dir = metadata[component]['out_dir']
+ default_goals = ''
+ if 'default_goals' in metadata[component]:
+ default_goals = metadata[component]['default_goals']
+
+ if 'all' in subgoals:
+ goal = default_goals
+ else:
+ goal = ' '.join(subgoals)
+
+ build_vars = ''
+ if 'update-meta' in subgoals:
+ build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
+ # TODO(inseob@): shell escape
+ cmd = [
+ '/bin/bash', '-c',
+ 'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
+ (get_supported_product(ctx, metadata[component]['lunch_targets']),
+ ctx.target_build_variant(), build_vars, build_cmd, goal)
+ ]
+ logging.debug('cwd: ' + metadata[component]['path'])
+ logging.debug('running build: ' + str(cmd))
+
+ subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
+
+
+def run_build_all(ctx, metadata, subgoals):
+ for component in metadata:
+ if component in BUILD_ALL_EXEMPTION:
+ continue
+ run_build(ctx, metadata, component, subgoals)
+
+
+def find_components(metadata, predicate):
+ for component in metadata:
+ if predicate(component):
+ yield component
+
+
+def import_filegroups(metadata, component, exporting_component, target_file_pairs):
+ imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
+
+ bp_content = ''
+ for name, outpaths in target_file_pairs:
+ bp_content += ('filegroup {{\n'
+ ' name: "{fname}",\n'
+ ' srcs: [\n'.format(fname=name))
+ for outpath in outpaths:
+ bp_content += ' "{outfile}",\n'.format(outfile=os.path.basename(outpath))
+ bp_content += (' ],\n'
+ '}\n')
+
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
+ fout.write(bp_content)
+ for _, outpaths in target_file_pairs:
+ for outpath in outpaths:
+ os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
+ os.path.join(tmp_dir, os.path.basename(outpath)))
+ cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
+ if os.path.exists(imported_filegroup_dir) and len(
+ cmp_result.left_only) + len(cmp_result.right_only) + len(
+ cmp_result.diff_files) == 0:
+ # Files are identical, it doesn't need to be written
+ logging.info(
+ 'imported files exists and the contents are identical: {} -> {}'
+ .format(component, exporting_component))
+ continue
+ logging.info('creating symlinks for imported files: {} -> {}'.format(
+ component, exporting_component))
+ os.makedirs(imported_filegroup_dir, exist_ok=True)
+ shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
+ shutil.move(tmp_dir, imported_filegroup_dir)
+
+
+def prepare_build(metadata, component):
+ imported_dir = os.path.join(metadata[component]['path'], 'imported')
+ if utils.META_DEPS not in metadata[component]:
+ if os.path.exists(imported_dir):
+ logging.debug('remove {}'.format(imported_dir))
+ shutil.rmtree(imported_dir)
+ return
+
+ imported_components = set()
+ for exp_comp in metadata[component][utils.META_DEPS]:
+ if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
+ filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
+ target_file_pairs = []
+ for name in filegroups:
+ target_file_pairs.append((name, filegroups[name]))
+ import_filegroups(metadata, component, exp_comp, target_file_pairs)
+ imported_components.add(exp_comp)
+
+ # Remove directories that are not generated this time.
+ if os.path.exists(imported_dir):
+ if len(imported_components) == 0:
+ shutil.rmtree(imported_dir)
+ else:
+ for remove_target in set(os.listdir(imported_dir)) - imported_components:
+ logging.info('remove unnecessary imported dir: {}'.format(remove_target))
+ shutil.rmtree(os.path.join(imported_dir, remove_target))
+
+
+def main():
+ utils.set_logging_config(logging.DEBUG)
+ ctx = utils.get_build_context()
+
+ logging.info('collecting metadata')
+
+ utils.set_logging_config(True)
+
+ goals = sys.argv[1:]
+ if not goals:
+ logging.debug('empty goals. defaults to main')
+ goals = ['main']
+
+ logging.debug('goals: ' + str(goals))
+
+ # Force update the metadata for the 'update-meta' build
+ metadata_collector = collect_metadata.MetadataCollector(
+ ctx.components_top(), ctx.out_dir(),
+ collect_metadata.COMPONENT_METADATA_DIR,
+ collect_metadata.COMPONENT_METADATA_FILE,
+ force_update='update-meta' in goals)
+ metadata_collector.collect()
+
+ metadata = metadata_collector.get_metadata()
+ logging.debug('metadata: ' + str(metadata))
+
+ parsed_goals = parse_goals(ctx, metadata, goals)
+ logging.debug('parsed goals: ' + str(parsed_goals))
+
+ sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
+ logging.debug('sorted goals with deps: ' + str(sorted_goals))
+
+ for component, subgoals in sorted_goals:
+ if component == 'all':
+ run_build_all(ctx, metadata, subgoals)
+ continue
+ prepare_build(metadata, component)
+ run_build(ctx, metadata, component, subgoals)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/orchestrator/demo/collect_metadata.py b/orchestrator/demo/collect_metadata.py
new file mode 100755
index 0000000..148167d
--- /dev/null
+++ b/orchestrator/demo/collect_metadata.py
@@ -0,0 +1,428 @@
+#!/usr/bin/env python3
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import copy
+import json
+import logging
+import os
+import sys
+import yaml
+from collections import defaultdict
+from typing import (
+ List,
+ Set,
+)
+
+import utils
+
+# SKIP_COMPONENT_SEARCH = (
+# 'tools',
+# )
+COMPONENT_METADATA_DIR = '.repo'
+COMPONENT_METADATA_FILE = 'treeinfo.yaml'
+GENERATED_METADATA_FILE = 'metadata.json'
+COMBINED_METADATA_FILENAME = 'multitree_meta.json'
+
+
+class Dep(object):
+ def __init__(self, name, component, deps_type):
+ self.name = name
+ self.component = component
+ self.type = deps_type
+ self.out_paths = list()
+
+
+class ExportedDep(Dep):
+ def __init__(self, name, component, deps_type):
+ super().__init__(name, component, deps_type)
+
+ def setOutputPaths(self, output_paths: list):
+ self.out_paths = output_paths
+
+
+class ImportedDep(Dep):
+ required_type_map = {
+ # import type: (required type, get imported module list)
+ utils.META_FILEGROUP: (utils.META_MODULES, True),
+ }
+
+ def __init__(self, name, component, deps_type, import_map):
+ super().__init__(name, component, deps_type)
+ self.exported_deps: Set[ExportedDep] = set()
+ self.imported_modules: List[str] = list()
+ self.required_type = deps_type
+ get_imported_module = False
+ if deps_type in ImportedDep.required_type_map:
+ self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
+ if get_imported_module:
+ self.imported_modules = import_map[name]
+ else:
+ self.imported_modules.append(name)
+
+ def verify_and_add(self, exported: ExportedDep):
+ if self.required_type != exported.type:
+ raise RuntimeError(
+ '{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
+ .format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
+ self.exported_deps.add(exported)
+ self.out_paths.extend(exported.out_paths)
+ # Remove duplicates. We may not use set() which is not JSON serializable
+ self.out_paths = list(dict.fromkeys(self.out_paths))
+
+
+class MetadataCollector(object):
+ """Visit all component directories and collect the metadata from them.
+
+Example of metadata:
+==========
+build_cmd: m # build command for this component. 'm' if omitted
+out_dir: out # out dir of this component. 'out' if omitted
+exports:
+ libraries:
+ - name: libopenjdkjvm
+ - name: libopenjdkjvmd
+ build_cmd: mma # build command for libopenjdkjvmd if specified
+ out_dir: out/soong # out dir for libopenjdkjvmd if specified
+ - name: libctstiagent
+ APIs:
+ - api1
+ - api2
+imports:
+ libraries:
+ - lib1
+ - lib2
+ APIs:
+ - import_api1
+ - import_api2
+lunch_targets:
+ - arm64
+ - x86_64
+"""
+
+ def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
+ if not os.path.exists(out_dir):
+ os.makedirs(out_dir)
+
+ self.__component_top = component_top
+ self.__out_dir = out_dir
+ self.__metadata_path = os.path.join(meta_dir, meta_file)
+ self.__combined_metadata_path = os.path.join(self.__out_dir,
+ COMBINED_METADATA_FILENAME)
+ self.__force_update = force_update
+
+ self.__metadata = dict()
+ self.__map_exports = dict()
+ self.__component_set = set()
+
+ def collect(self):
+ """ Read precomputed combined metadata from the json file.
+
+ If any components have updated their metadata, update the metadata
+ information and the json file.
+ """
+ timestamp = self.__restore_metadata()
+ if timestamp and os.path.getmtime(__file__) > timestamp:
+ logging.info('Update the metadata as the orchestrator has been changed')
+ self.__force_update = True
+ self.__collect_from_components(timestamp)
+
+ def get_metadata(self):
+ """ Returns collected metadata from all components"""
+ if not self.__metadata:
+ logging.warning('Metadata is empty')
+ return copy.deepcopy(self.__metadata)
+
+ def __collect_from_components(self, timestamp):
+ """ Read metadata from all components
+
+ If any components have newer metadata files or are removed, update the
+ combined metadata.
+ """
+ metadata_updated = False
+ for component in os.listdir(self.__component_top):
+ # if component in SKIP_COMPONENT_SEARCH:
+ # continue
+ if self.__read_component_metadata(timestamp, component):
+ metadata_updated = True
+ if self.__read_generated_metadata(timestamp, component):
+ metadata_updated = True
+
+ deleted_components = set()
+ for meta in self.__metadata:
+ if meta not in self.__component_set:
+ logging.info('Component {} is removed'.format(meta))
+ deleted_components.add(meta)
+ metadata_updated = True
+ for meta in deleted_components:
+ del self.__metadata[meta]
+
+ if metadata_updated:
+ self.__update_dependencies()
+ self.__store_metadata()
+ logging.info('Metadata updated')
+
+ def __read_component_metadata(self, timestamp, component):
+ """ Search for the metadata file from a component.
+
+ If the metadata is modified, read the file and update the metadata.
+ """
+ component_path = os.path.join(self.__component_top, component)
+ metadata_file = os.path.join(component_path, self.__metadata_path)
+ logging.info(
+ 'Reading a metadata file from {} component ...'.format(component))
+ if not os.path.isfile(metadata_file):
+ logging.warning('Metadata file {} not found!'.format(metadata_file))
+ return False
+
+ self.__component_set.add(component)
+ if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
+ logging.info('... yaml not changed. Skip')
+ return False
+
+ with open(metadata_file) as f:
+ meta = yaml.load(f, Loader=yaml.SafeLoader)
+
+ meta['path'] = component_path
+ if utils.META_BUILDCMD not in meta:
+ meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
+ if utils.META_OUTDIR not in meta:
+ meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
+
+ if utils.META_IMPORTS not in meta:
+ meta[utils.META_IMPORTS] = defaultdict(dict)
+ if utils.META_EXPORTS not in meta:
+ meta[utils.META_EXPORTS] = defaultdict(dict)
+
+ self.__metadata[component] = meta
+ return True
+
+ def __read_generated_metadata(self, timestamp, component):
+ """ Read a metadata gerated by 'update-meta' build command from the soong build system
+
+ Soong generate the metadata that has the information of import/export module/files.
+ Build orchestrator read the generated metadata to collect the dependency information.
+
+ Generated metadata has the following format:
+ {
+ "Imported": {
+ "FileGroups": {
+ "<name_of_filegroup>": [
+ "<exported_module_name>",
+ ...
+ ],
+ ...
+ }
+ }
+ "Exported": {
+ "<exported_module_name>": [
+ "<output_file_path>",
+ ...
+ ],
+ ...
+ }
+ }
+ """
+ if component not in self.__component_set:
+ # skip reading generated metadata if the component metadata file was missing
+ return False
+ component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
+ generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
+ if not os.path.isfile(generated_metadata_file):
+ logging.info('... Soong did not generated the metadata file. Skip')
+ return False
+ if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
+ logging.info('... Soong generated metadata not changed. Skip')
+ return False
+
+ with open(generated_metadata_file, 'r') as gen_meta_json:
+ try:
+ gen_metadata = json.load(gen_meta_json)
+ except json.decoder.JSONDecodeError:
+ logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
+ generated_metadata_file))
+ return False
+
+ if utils.SOONG_IMPORTED in gen_metadata:
+ imported = gen_metadata[utils.SOONG_IMPORTED]
+ if utils.SOONG_IMPORTED_FILEGROUPS in imported:
+ self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
+ if utils.SOONG_EXPORTED in gen_metadata:
+ self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
+
+ return True
+
+ def __update_export_map(self):
+ """ Read metadata of all components and update the export map
+
+ 'libraries' and 'APIs' are special exproted types that are provided manually
+ from the .yaml metadata files. These need to be replaced with the implementation
+ in soong gerated metadata.
+ The export type 'module' is generated from the soong build system from the modules
+ with 'export: true' property. This export type includes a dictionary with module
+ names as keys and their output files as values. These output files will be used as
+ prebuilt sources when generating the imported modules.
+ """
+ self.__map_exports = dict()
+ for comp in self.__metadata:
+ if utils.META_EXPORTS not in self.__metadata[comp]:
+ continue
+ exports = self.__metadata[comp][utils.META_EXPORTS]
+
+ for export_type in exports:
+ for module in exports[export_type]:
+ if export_type == utils.META_LIBS:
+ name = module[utils.META_LIB_NAME]
+ else:
+ name = module
+
+ if name in self.__map_exports:
+ raise RuntimeError(
+ 'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
+ .format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
+ exported_deps = ExportedDep(name, comp, export_type)
+ if export_type == utils.META_MODULES:
+ exported_deps.setOutputPaths(exports[export_type][module])
+ self.__map_exports[name] = exported_deps
+
+ def __verify_and_add_dependencies(self, component):
+ """ Search all imported items from the export_map.
+
+ If any imported items are not provided by the other components, report
+ an error.
+ Otherwise, add the component dependency and update the exported information to the
+ import maps.
+ """
+ def verify_and_add_dependencies(imported_dep: ImportedDep):
+ for module in imported_dep.imported_modules:
+ if module not in self.__map_exports:
+ raise RuntimeError(
+ 'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
+ .format(module=module, comp=imported_dep.component))
+ imported_dep.verify_and_add(self.__map_exports[module])
+
+ deps = self.__metadata[component][utils.META_DEPS]
+ exp_comp = self.__map_exports[module].component
+ if exp_comp not in deps:
+ deps[exp_comp] = defaultdict(defaultdict)
+ deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
+
+ self.__metadata[component][utils.META_DEPS] = defaultdict()
+ imports = self.__metadata[component][utils.META_IMPORTS]
+ for import_type in imports:
+ for module in imports[import_type]:
+ verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
+
+ def __check_imports(self):
+ """ Search the export map to find the component to import libraries or APIs.
+
+ Update the 'deps' field that includes the dependent components.
+ """
+ for component in self.__metadata:
+ self.__verify_and_add_dependencies(component)
+ if utils.META_DEPS in self.__metadata[component]:
+ logging.debug('{comp} depends on {list} components'.format(
+ comp=component, list=self.__metadata[component][utils.META_DEPS]))
+
+ def __update_dependencies(self):
+ """ Generate a dependency graph for the components
+
+ Update __map_exports and the dependency graph with the maps.
+ """
+ self.__update_export_map()
+ self.__check_imports()
+
+ def __store_metadata(self):
+ """ Store the __metadata dictionary as json format"""
+ with open(self.__combined_metadata_path, 'w') as json_file:
+ json.dump(self.__metadata, json_file, indent=2)
+
+ def __restore_metadata(self):
+ """ Read the stored json file and return the time stamps of the
+
+ metadata file.
+ """
+ if not os.path.exists(self.__combined_metadata_path):
+ return None
+
+ with open(self.__combined_metadata_path, 'r') as json_file:
+ try:
+ self.__metadata = json.load(json_file)
+ except json.decoder.JSONDecodeError:
+ logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
+ self.__combined_metadata_path))
+ return None
+
+ logging.info('Metadata restored from {}'.format(
+ self.__combined_metadata_path))
+ self.__update_export_map()
+ return os.path.getmtime(self.__combined_metadata_path)
+
+
+def get_args():
+
+ def check_dir(path):
+ if os.path.exists(path) and os.path.isdir(path):
+ return os.path.normpath(path)
+ else:
+ raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--component-top',
+ help='Scan all components under this directory.',
+ default=os.path.join(os.path.dirname(__file__), '../../../components'),
+ type=check_dir)
+ parser.add_argument(
+ '--meta-file',
+ help='Name of the metadata file.',
+ default=COMPONENT_METADATA_FILE,
+ type=str)
+ parser.add_argument(
+ '--meta-dir',
+ help='Each component has the metadata in this directory.',
+ default=COMPONENT_METADATA_DIR,
+ type=str)
+ parser.add_argument(
+ '--out-dir',
+ help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
+ default=os.path.join(os.path.dirname(__file__), '../../../out'),
+ type=os.path.normpath)
+ parser.add_argument(
+ '--force',
+ '-f',
+ action='store_true',
+ help='Force to collect metadata',
+ )
+ parser.add_argument(
+ '--verbose',
+ '-v',
+ help='Increase output verbosity, e.g. "-v", "-vv".',
+ action='count',
+ default=0)
+ return parser.parse_args()
+
+
+def main():
+ args = get_args()
+ utils.set_logging_config(args.verbose)
+
+ metadata_collector = MetadataCollector(args.component_top, args.out_dir,
+ args.meta_dir, args.meta_file, args.force)
+ metadata_collector.collect()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/orchestrator/demo/envsetup.sh b/orchestrator/demo/envsetup.sh
new file mode 100644
index 0000000..902a37c
--- /dev/null
+++ b/orchestrator/demo/envsetup.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+function buffet()
+{
+ local product variant selection
+ if [[ $# -ne 1 ]]; then
+ echo "usage: buffet [target]" >&2
+ return 1
+ fi
+
+ selection=$1
+ product=${selection%%-*} # Trim everything after first dash
+ variant=${selection#*-} # Trim everything up to first dash
+
+ if [ -z "$product" ]
+ then
+ echo
+ echo "Invalid lunch combo: $selection"
+ return 1
+ fi
+
+ if [ -z "$variant" ]
+ then
+ if [[ "$product" =~ .*_(eng|user|userdebug) ]]
+ then
+ echo "Did you mean -${product/*_/}? (dash instead of underscore)"
+ fi
+ return 1
+ fi
+
+ BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
+
+ export BUFFET_BUILD_TOP=$(pwd)
+ export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
+ export BUFFET_TARGET_PRODUCT=$product
+ export BUFFET_TARGET_BUILD_VARIANT=$variant
+ export BUFFET_TARGET_BUILD_TYPE=release
+}
+
+function m()
+{
+ if [ -z "$BUFFET_BUILD_TOP" ]
+ then
+ echo "Run \"buffet [target]\" first"
+ return 1
+ fi
+ python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
+}
diff --git a/orchestrator/demo/hierarchy.py b/orchestrator/demo/hierarchy.py
new file mode 100644
index 0000000..ae1825c
--- /dev/null
+++ b/orchestrator/demo/hierarchy.py
@@ -0,0 +1,79 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import yaml
+
+
+def parse_hierarchy(build_top):
+ """Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
+
+ Example of hierarchy file:
+ ==========
+ aosp_arm64:
+ - armv8
+ - aosp_cf_arm64_phone
+
+ armv8:
+ - aosp_oriole
+ - aosp_sunfish
+
+ aosp_oriole:
+ - oriole
+
+ aosp_sunfish:
+ - sunfish
+
+ oriole:
+ # leaf
+
+ sunfish:
+ # leaf
+ ==========
+
+ If we parse this yaml, we get a dict looking like:
+
+ {
+ "sunfish": "aosp_sunfish",
+ "oriole": "aosp_oriole",
+ "aosp_oriole": "armv8",
+ "aosp_sunfish": "armv8",
+ "armv8": "aosp_arm64",
+ "aosp_cf_arm64_phone": "aosp_arm64",
+ "aosp_arm64": None, # no parent
+ }
+ """
+ metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
+ if not os.path.isfile(metadata_path):
+ raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
+
+ with open(metadata_path, 'r') as f:
+ hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
+
+ hierarchy_map = dict()
+
+ for parent_target, child_targets in hierarchy_yaml.items():
+ if not child_targets:
+ # leaf
+ continue
+ for child_target in child_targets:
+ hierarchy_map[child_target] = parent_target
+
+ for parent_target in hierarchy_yaml:
+ # targets with no parent
+ if parent_target not in hierarchy_map:
+ hierarchy_map[parent_target] = None
+
+ return hierarchy_map
diff --git a/orchestrator/demo/hierarchy.yaml b/orchestrator/demo/hierarchy.yaml
new file mode 100644
index 0000000..cc6de4d
--- /dev/null
+++ b/orchestrator/demo/hierarchy.yaml
@@ -0,0 +1,37 @@
+# hierarchy of targets
+
+aosp_arm64:
+- armv8
+- aosp_cf_arm64_phone
+
+armv8:
+- mainline_modules_arm64
+
+mainline_modules_arm64:
+- aosp_oriole
+- aosp_sunfish
+- aosp_raven
+
+aosp_oriole:
+- oriole
+
+aosp_sunfish:
+- sunfish
+
+aosp_raven:
+- raven
+
+oriole:
+# leaf
+
+sunfish:
+# leaf
+
+raven:
+# leaf
+
+aosp_cf_arm64_phone:
+- cf_arm64_phone
+
+cf_arm64_phone:
+# leaf
diff --git a/orchestrator/demo/utils.py b/orchestrator/demo/utils.py
new file mode 100644
index 0000000..5dbbe4a
--- /dev/null
+++ b/orchestrator/demo/utils.py
@@ -0,0 +1,89 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import logging
+import os
+
+# default build configuration for each component
+DEFAULT_BUILDCMD = 'm'
+DEFAULT_OUTDIR = 'out'
+
+# yaml fields
+META_BUILDCMD = 'build_cmd'
+META_OUTDIR = 'out_dir'
+META_EXPORTS = 'exports'
+META_IMPORTS = 'imports'
+META_TARGETS = 'lunch_targets'
+META_DEPS = 'deps'
+# fields under 'exports' and 'imports'
+META_LIBS = 'libraries'
+META_APIS = 'APIs'
+META_FILEGROUP = 'filegroup'
+META_MODULES = 'modules'
+# fields under 'libraries'
+META_LIB_NAME = 'name'
+
+# fields for generated metadata file
+SOONG_IMPORTED = 'Imported'
+SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
+SOONG_EXPORTED = 'Exported'
+
+# export map items
+EXP_COMPONENT = 'component'
+EXP_TYPE = 'type'
+EXP_OUTPATHS = 'outpaths'
+
+class BuildContext:
+
+ def __init__(self):
+ self._build_top = os.getenv('BUFFET_BUILD_TOP')
+ self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
+ self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
+ self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
+ self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
+ self._out_dir = os.path.join(self._build_top, 'out')
+
+ if not self._build_top:
+ raise RuntimeError("Can't find root. Did you run buffet?")
+
+ def build_top(self):
+ return self._build_top
+
+ def components_top(self):
+ return self._components_top
+
+ def target_product(self):
+ return self._target_product
+
+ def target_build_variant(self):
+ return self._target_build_variant
+
+ def target_build_type(self):
+ return self._target_build_type
+
+ def out_dir(self):
+ return self._out_dir
+
+
+def get_build_context():
+ return BuildContext()
+
+
+def set_logging_config(verbose_level):
+ verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
+ verbosity = min(verbose_level, 2)
+ logging.basicConfig(
+ format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
+ level=verbose_map[verbosity])
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
new file mode 100644
index 0000000..382844b
--- /dev/null
+++ b/orchestrator/inner_build/common.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+
+def _parse_arguments(argv):
+ argv = argv[1:]
+ """Return an argparse options object."""
+ # Top-level parser
+ parser = argparse.ArgumentParser(prog=".inner_build")
+
+ parser.add_argument("--out_dir", action="store", required=True,
+ help="root of the output directory for this inner tree's API contributions")
+
+ parser.add_argument("--api_domain", action="append", required=True,
+ help="which API domains are to be built in this inner tree")
+
+ subparsers = parser.add_subparsers(required=True, dest="command",
+ help="subcommands")
+
+ # inner_build describe command
+ describe_parser = subparsers.add_parser("describe",
+ help="describe the capabilities of this inner tree's build system")
+
+ # create the parser for the "b" command
+ export_parser = subparsers.add_parser("export_api_contributions",
+ help="export the API contributions of this inner tree")
+
+ # create the parser for the "b" command
+ export_parser = subparsers.add_parser("analyze",
+ help="main build analysis for this inner tree")
+
+ # Parse the arguments
+ return parser.parse_args(argv)
+
+
+class Commands(object):
+ def Run(self, argv):
+ """Parse the command arguments and call the corresponding subcommand method on
+ this object.
+
+ Throws AttributeError if the method for the command wasn't found.
+ """
+ args = _parse_arguments(argv)
+ return getattr(self, args.command)(args)
+
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
new file mode 100755
index 0000000..264739b
--- /dev/null
+++ b/orchestrator/inner_build/inner_build_demo.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import textwrap
+
+sys.dont_write_bytecode = True
+import common
+
+def mkdirs(path):
+ try:
+ os.makedirs(path)
+ except FileExistsError:
+ pass
+
+
+class InnerBuildSoong(common.Commands):
+ def describe(self, args):
+ mkdirs(args.out_dir)
+
+ with open(os.path.join(args.out_dir, "tree_info.json"), "w") as f:
+ f.write(textwrap.dedent("""\
+ {
+ "requires_ninja": true,
+ "orchestrator_protocol_version": 1
+ }"""))
+
+ def export_api_contributions(self, args):
+ contributions_dir = os.path.join(args.out_dir, "api_contributions")
+ mkdirs(contributions_dir)
+
+ if "system" in args.api_domain:
+ with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
+ # 'name: android' is android.jar
+ f.write(textwrap.dedent("""\
+ {
+ "name": "api_a",
+ "version": 1,
+ "api_domain": "system",
+ "cc_libraries": [
+ {
+ "name": "libhello1",
+ "headers": [
+ {
+ "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+ "files": [
+ "hello1.h"
+ ]
+ }
+ ],
+ "api": [
+ "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
+ ]
+ }
+ ]
+ }"""))
+
+ def analyze(self, args):
+ if "system" in args.api_domain:
+ # Nothing to export in this demo
+ # Write a fake inner_tree.ninja; what the inner tree would have generated
+ with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
+ # TODO: Note that this uses paths relative to the workspace not the iner tree
+ # for demo purposes until we get the ninja chdir change in.
+ f.write(textwrap.dedent("""\
+ rule compile_c
+ command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
+ rule link_so
+ command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
+ build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+ out_dir = %(OUT_DIR)s/libhello1
+ cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
+ build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
+ out_dir = %(OUT_DIR)s/libhello1
+ build system: phony %(OUT_DIR)s/libhello1/libhello1.so
+ """ % { "OUT_DIR": args.out_dir }))
+ with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
+ f.write(textwrap.dedent("""\
+ {
+ "staging": [
+ {
+ "dest": "staging/system/lib/libhello1.so",
+ "obj": "libhello1/libhello1.so"
+ }
+ ]
+ }""" % { "OUT_DIR": args.out_dir }))
+
+def main(argv):
+ return InnerBuildSoong().Run(argv)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
+
+
+# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/inner_build/inner_build_soong.py b/orchestrator/inner_build/inner_build_soong.py
new file mode 100755
index 0000000..a653dcc
--- /dev/null
+++ b/orchestrator/inner_build/inner_build_soong.py
@@ -0,0 +1,37 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+
+sys.dont_write_bytecode = True
+import common
+
+class InnerBuildSoong(common.Commands):
+ def describe(self, args):
+ pass
+
+
+ def export_api_contributions(self, args):
+ pass
+
+
+def main(argv):
+ return InnerBuildSoong().Run(argv)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
new file mode 100644
index 0000000..0790226
--- /dev/null
+++ b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
@@ -0,0 +1,16 @@
+{
+ "lunchable": true,
+ "system": {
+ "tree": "master",
+ "product": "aosp_cf_arm64_phone"
+ },
+ "vendor": {
+ "tree": "master",
+ "product": "aosp_cf_arm64_phone"
+ },
+ "modules": {
+ "com.android.bionic": {
+ "tree": "sc-mainline-prod"
+ }
+ }
+}
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
new file mode 100644
index 0000000..df97b68
--- /dev/null
+++ b/orchestrator/ninja/ninja_syntax.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABC, abstractmethod
+
+from collections.abc import Iterator
+from typing import List
+
+TAB = " "
+
+class Node(ABC):
+ '''An abstract class that can be serialized to a ninja file
+ All other ninja-serializable classes inherit from this class'''
+
+ @abstractmethod
+ def stream(self) -> Iterator[str]:
+ pass
+
+class Variable(Node):
+ '''A ninja variable that can be reused across build actions
+ https://ninja-build.org/manual.html#_variables'''
+
+ def __init__(self, name:str, value:str, indent=0):
+ self.name = name
+ self.value = value
+ self.indent = indent
+
+ def stream(self) -> Iterator[str]:
+ indent = TAB * self.indent
+ yield f"{indent}{self.name} = {self.value}"
+
+class RuleException(Exception):
+ pass
+
+# Ninja rules recognize a limited set of variables
+# https://ninja-build.org/manual.html#ref_rule
+# Keep this list sorted
+RULE_VARIABLES = ["command",
+ "depfile",
+ "deps",
+ "description",
+ "dyndep",
+ "generator",
+ "msvc_deps_prefix",
+ "restat",
+ "rspfile",
+ "rspfile_content"]
+
+class Rule(Node):
+ '''A shorthand for a command line that can be reused
+ https://ninja-build.org/manual.html#_rules'''
+
+ def __init__(self, name:str):
+ self.name = name
+ self.variables = []
+
+ def add_variable(self, name: str, value: str):
+ if name not in RULE_VARIABLES:
+ raise RuleException(f"{name} is not a recognized variable in a ninja rule")
+
+ self.variables.append(Variable(name=name, value=value, indent=1))
+
+ def stream(self) -> Iterator[str]:
+ self._validate_rule()
+
+ yield f"rule {self.name}"
+ # Yield rule variables sorted by `name`
+ for var in sorted(self.variables, key=lambda x: x.name):
+ # variables yield a single item, next() is sufficient
+ yield next(var.stream())
+
+ def _validate_rule(self):
+ # command is a required variable in a ninja rule
+ self._assert_variable_is_not_empty(variable_name="command")
+
+ def _assert_variable_is_not_empty(self, variable_name: str):
+ if not any(var.name == variable_name for var in self.variables):
+ raise RuleException(f"{variable_name} is required in a ninja rule")
+
+class BuildActionException(Exception):
+ pass
+
+class BuildAction(Node):
+ '''Describes the dependency edge between inputs and output
+ https://ninja-build.org/manual.html#_build_statements'''
+
+ def __init__(self, output: str, rule: str, inputs: List[str]=None, implicits: List[str]=None, order_only: List[str]=None):
+ self.output = output
+ self.rule = rule
+ self.inputs = self._as_list(inputs)
+ self.implicits = self._as_list(implicits)
+ self.order_only = self._as_list(order_only)
+ self.variables = []
+
+ def add_variable(self, name: str, value: str):
+ '''Variables limited to the scope of this build action'''
+ self.variables.append(Variable(name=name, value=value, indent=1))
+
+ def stream(self) -> Iterator[str]:
+ self._validate()
+
+ build_statement = f"build {self.output}: {self.rule}"
+ if len(self.inputs) > 0:
+ build_statement += " "
+ build_statement += " ".join(self.inputs)
+ if len(self.implicits) > 0:
+ build_statement += " | "
+ build_statement += " ".join(self.implicits)
+ if len(self.order_only) > 0:
+ build_statement += " || "
+ build_statement += " ".join(self.order_only)
+ yield build_statement
+ # Yield variables sorted by `name`
+ for var in sorted(self.variables, key=lambda x: x.name):
+ # variables yield a single item, next() is sufficient
+ yield next(var.stream())
+
+ def _validate(self):
+ if not self.output:
+ raise BuildActionException("Output is required in a ninja build statement")
+ if not self.rule:
+ raise BuildActionException("Rule is required in a ninja build statement")
+
+ def _as_list(self, list_like):
+ if list_like is None:
+ return []
+ if isinstance(list_like, list):
+ return list_like
+ return [list_like]
+
+class Pool(Node):
+ '''https://ninja-build.org/manual.html#ref_pool'''
+
+ def __init__(self, name: str, depth: int):
+ self.name = name
+ self.depth = Variable(name="depth", value=depth, indent=1)
+
+ def stream(self) -> Iterator[str]:
+ yield f"pool {self.name}"
+ yield next(self.depth.stream())
+
+class Subninja(Node):
+
+ def __init__(self, subninja: str, chDir: str):
+ self.subninja = subninja
+ self.chDir = chDir
+
+ # TODO(spandandas): Update the syntax when aosp/2064612 lands
+ def stream(self) -> Iterator[str]:
+ yield f"subninja {self.subninja}"
+
+class Line(Node):
+ '''Generic class that can be used for comments/newlines/default_target etc'''
+
+ def __init__(self, value:str):
+ self.value = value
+
+ def stream(self) -> Iterator[str]:
+ yield self.value
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
new file mode 100644
index 0000000..9e80b4b
--- /dev/null
+++ b/orchestrator/ninja/ninja_writer.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
+
+# TODO: Format the output according to a configurable width variable
+# This will ensure that the generated content fits on a screen and does not
+# require horizontal scrolling
+class Writer:
+
+ def __init__(self, file):
+ self.file = file
+ self.nodes = [] # type Node
+
+ def add_variable(self, variable: Variable):
+ self.nodes.append(variable)
+
+ def add_rule(self, rule: Rule):
+ self.nodes.append(rule)
+
+ def add_build_action(self, build_action: BuildAction):
+ self.nodes.append(build_action)
+
+ def add_pool(self, pool: Pool):
+ self.nodes.append(pool)
+
+ def add_comment(self, comment: str):
+ self.nodes.append(Line(value=f"# {comment}"))
+
+ def add_default(self, default: str):
+ self.nodes.append(Line(value=f"default {default}"))
+
+ def add_newline(self):
+ self.nodes.append(Line(value=""))
+
+ def add_subninja(self, subninja: Subninja):
+ self.nodes.append(subninja)
+
+ def add_phony(self, name, deps):
+ build_action = BuildAction(name, "phony", inputs=deps)
+ self.add_build_action(build_action)
+
+ def write(self):
+ for node in self.nodes:
+ for line in node.stream():
+ print(line, file=self.file)
diff --git a/orchestrator/ninja/test_ninja_syntax.py b/orchestrator/ninja/test_ninja_syntax.py
new file mode 100644
index 0000000..d922fd2
--- /dev/null
+++ b/orchestrator/ninja/test_ninja_syntax.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from ninja_syntax import Variable, Rule, RuleException, BuildAction, BuildActionException, Pool
+
+class TestVariable(unittest.TestCase):
+
+ def test_assignment(self):
+ variable = Variable(name="key", value="value")
+ self.assertEqual("key = value", next(variable.stream()))
+ variable = Variable(name="key", value="value with spaces")
+ self.assertEqual("key = value with spaces", next(variable.stream()))
+ variable = Variable(name="key", value="$some_other_variable")
+ self.assertEqual("key = $some_other_variable", next(variable.stream()))
+
+ def test_indentation(self):
+ variable = Variable(name="key", value="value", indent=0)
+ self.assertEqual("key = value", next(variable.stream()))
+ variable = Variable(name="key", value="value", indent=1)
+ self.assertEqual(" key = value", next(variable.stream()))
+
+class TestRule(unittest.TestCase):
+
+ def test_rulename_comes_first(self):
+ rule = Rule(name="myrule")
+ rule.add_variable("command", "/bin/bash echo")
+ self.assertEqual("rule myrule", next(rule.stream()))
+
+ def test_command_is_a_required_variable(self):
+ rule = Rule(name="myrule")
+ with self.assertRaises(RuleException):
+ next(rule.stream())
+
+ def test_bad_rule_variable(self):
+ rule = Rule(name="myrule")
+ with self.assertRaises(RuleException):
+ rule.add_variable(name="unrecognize_rule_variable", value="value")
+
+ def test_rule_variables_are_indented(self):
+ rule = Rule(name="myrule")
+ rule.add_variable("command", "/bin/bash echo")
+ stream = rule.stream()
+ self.assertEqual("rule myrule", next(stream)) # top-level rule should not be indented
+ self.assertEqual(" command = /bin/bash echo", next(stream))
+
+ def test_rule_variables_are_sorted(self):
+ rule = Rule(name="myrule")
+ rule.add_variable("description", "Adding description before command")
+ rule.add_variable("command", "/bin/bash echo")
+ stream = rule.stream()
+ self.assertEqual("rule myrule", next(stream)) # rule always comes first
+ self.assertEqual(" command = /bin/bash echo", next(stream))
+ self.assertEqual(" description = Adding description before command", next(stream))
+
+class TestBuildAction(unittest.TestCase):
+
+ def test_no_inputs(self):
+ build = BuildAction(output="out", rule="phony")
+ stream = build.stream()
+ self.assertEqual("build out: phony", next(stream))
+ # Empty output
+ build = BuildAction(output="", rule="phony")
+ with self.assertRaises(BuildActionException):
+ next(build.stream())
+ # Empty rule
+ build = BuildAction(output="out", rule="")
+ with self.assertRaises(BuildActionException):
+ next(build.stream())
+
+ def test_inputs(self):
+ build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
+ self.assertEqual("build out: cat input1 input2", next(build.stream()))
+ build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"], implicits=["implicits1", "implicits2"], order_only=["order_only1", "order_only2"])
+ self.assertEqual("build out: cat input1 input2 | implicits1 implicits2 || order_only1 order_only2", next(build.stream()))
+
+ def test_variables(self):
+ build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
+ build.add_variable(name="myvar", value="myval")
+ stream = build.stream()
+ next(stream)
+ self.assertEqual(" myvar = myval", next(stream))
+
+class TestPool(unittest.TestCase):
+
+ def test_pool(self):
+ pool = Pool(name="mypool", depth=10)
+ stream = pool.stream()
+ self.assertEqual("pool mypool", next(stream))
+ self.assertEqual(" depth = 10", next(stream))
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/orchestrator/ninja/test_ninja_writer.py b/orchestrator/ninja/test_ninja_writer.py
new file mode 100644
index 0000000..703dd4d
--- /dev/null
+++ b/orchestrator/ninja/test_ninja_writer.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from io import StringIO
+
+from ninja_writer import Writer
+from ninja_syntax import Variable, Rule, BuildAction
+
+class TestWriter(unittest.TestCase):
+
+ def test_simple_writer(self):
+ with StringIO() as f:
+ writer = Writer(f)
+ writer.add_variable(Variable(name="cflags", value="-Wall"))
+ writer.add_newline()
+ cc = Rule(name="cc")
+ cc.add_variable(name="command", value="gcc $cflags -c $in -o $out")
+ writer.add_rule(cc)
+ writer.add_newline()
+ build_action = BuildAction(output="foo.o", rule="cc", inputs=["foo.c"])
+ writer.add_build_action(build_action)
+ writer.write()
+ self.assertEqual('''cflags = -Wall
+
+rule cc
+ command = gcc $cflags -c $in -o $out
+
+build foo.o: cc foo.c
+''', f.getvalue())
+
+ def test_comment(self):
+ with StringIO() as f:
+ writer = Writer(f)
+ writer.add_comment("This is a comment in a ninja file")
+ writer.write()
+ self.assertEqual("# This is a comment in a ninja file\n", f.getvalue())
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
new file mode 100644
index 0000000..8200dc0
--- /dev/null
+++ b/orchestrator/test_workspace/combo.mcombo
@@ -0,0 +1,17 @@
+{
+ "lunchable": true,
+ "system": {
+ "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+ "product": "test_product1"
+ },
+ "vendor": {
+ "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+ "product": "test_product2"
+ },
+ "modules": {
+ "module_1": {
+ "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
+ }
+ }
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
new file mode 120000
index 0000000..d8f235f
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/.inner_build
@@ -0,0 +1 @@
+../../inner_build/inner_build_demo.py
\ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
new file mode 100644
index 0000000..1415082
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+#include "hello1.h"
+
+void hello1(void) {
+ printf("hello1");
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
new file mode 100644
index 0000000..0309c1c
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
@@ -0,0 +1,4 @@
+#pragma once
+
+extern "C" void hello1(void);
+
diff --git a/rbesetup.sh b/rbesetup.sh
index 3b0e7cf..8386628 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -33,20 +33,15 @@
# This function prefixes the given command with appropriate variables needed
# for the build to be executed with RBE.
function use_rbe() {
- local RBE_LOG_DIR="/tmp"
local RBE_BINARIES_DIR="prebuilts/remoteexecution-client/latest"
local DOCKER_IMAGE="gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62"
# Do not set an invocation-ID and let reproxy auto-generate one.
USE_RBE="true" \
- FLAG_server_address="unix:///tmp/reproxy_$RANDOM.sock" \
FLAG_exec_root="$(gettop)" \
FLAG_platform="container-image=docker://${DOCKER_IMAGE}" \
RBE_use_application_default_credentials="true" \
- RBE_log_dir="${RBE_LOG_DIR}" \
RBE_reproxy_wait_seconds="20" \
- RBE_output_dir="${RBE_LOG_DIR}" \
- RBE_log_path="text://${RBE_LOG_DIR}/reproxy_log.txt" \
RBE_CXX_EXEC_STRATEGY="remote_local_fallback" \
RBE_cpp_dependency_scanner_plugin="${RBE_BINARIES_DIR}/dependency_scanner_go_plugin.so" \
RBE_DIR=${RBE_BINARIES_DIR} \
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 0f46130..7cb5f2c 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
cd ../..
TOP="${PWD}"
-message='usage: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user] [devkeys]
tapas selects individual apps to be built by the Android build system. Unlike
"lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 45cd515..53714a8 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -38,6 +38,12 @@
# updating the last seen rollback index in the tamper-evident storage.
BOARD_AVB_ROLLBACK_INDEX := 0
+# The chained vbmeta settings for boot images.
+BOARD_AVB_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+BOARD_AVB_BOOT_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_BOOT_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_BOOT_ROLLBACK_INDEX_LOCATION := 2
+
# Enable AVB chained partition for system.
# https://android.googlesource.com/platform/external/avb/+/master/README.md
BOARD_AVB_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
diff --git a/target/board/linux_bionic/BoardConfig.mk b/target/board/linux_bionic/BoardConfig.mk
new file mode 100644
index 0000000..7938bdb
--- /dev/null
+++ b/target/board/linux_bionic/BoardConfig.mk
@@ -0,0 +1,24 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This "device" is only intended to be used for host Bionic build targets, so
+# (device) target architectures are irrelevant. However, the build system isn't
+# prepared to handle no target architectures at all, so pick something
+# arbitrarily.
+TARGET_ARCH_SUITE := ndk
+
+HOST_CROSS_OS := linux_bionic
+HOST_CROSS_ARCH := x86_64
+HOST_CROSS_2ND_ARCH :=
diff --git a/target/board/linux_bionic/README.md b/target/board/linux_bionic/README.md
new file mode 100644
index 0000000..8db77f2
--- /dev/null
+++ b/target/board/linux_bionic/README.md
@@ -0,0 +1,6 @@
+This "device" is suitable for Soong-only builds to create Bionic binaries for
+Linux hosts:
+
+```
+build/soong/soong_ui.bash --make-mode --soong-only TARGET_PRODUCT=linux_bionic ...
+```
diff --git a/target/board/mainline_sdk/BoardConfig.mk b/target/board/mainline_sdk/BoardConfig.mk
index 84f8b2d..f5c2dc6 100644
--- a/target/board/mainline_sdk/BoardConfig.mk
+++ b/target/board/mainline_sdk/BoardConfig.mk
@@ -18,3 +18,6 @@
HOST_CROSS_OS := linux_bionic
HOST_CROSS_ARCH := x86_64
HOST_CROSS_2ND_ARCH :=
+
+# Required flag for non-64 bit devices from P.
+TARGET_USES_64_BIT_BINDER := true
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index ee702e5..67b0b17 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -74,6 +74,7 @@
endif
PRODUCT_MAKEFILES += \
+ $(LOCAL_DIR)/linux_bionic.mk \
$(LOCAL_DIR)/mainline_sdk.mk \
$(LOCAL_DIR)/module_arm.mk \
$(LOCAL_DIR)/module_arm64.mk \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 05ddfe5..90a2577 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -24,7 +24,7 @@
android.hidl.manager-V1.0-java \
android.hidl.memory@1.0-impl \
android.hidl.memory@1.0-impl.vendor \
- android.system.suspend@1.0-service \
+ android.system.suspend-service \
android.test.base \
android.test.mock \
android.test.runner \
@@ -271,7 +271,6 @@
traced \
traced_probes \
tune2fs \
- tzdatacheck \
uiautomator \
uinput \
uncrypt \
@@ -341,7 +340,6 @@
sqlite3 \
tinyplay \
tune2fs \
- tzdatacheck \
unwind_info \
unwind_reg_info \
unwind_symbols \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 851a2cb..5695803 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -76,7 +76,11 @@
com.android.media:service-media-s \
com.android.permission:service-permission \
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
+# art project.
+ifneq (,$(wildcard art))
+ PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+endif
# List of jars on the platform that system_server loads dynamically using separate classloaders.
# Keep the list sorted library names.
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index fb0370e..c7dcd60 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -22,10 +22,7 @@
# Ramdisk
PRODUCT_PACKAGES += \
init_first_stage \
- e2fsck.ramdisk \
- fsck.f2fs.ramdisk \
- tune2fs.ramdisk \
- snapuserd.ramdisk \
+ snapuserd_ramdisk \
# Debug ramdisk
PRODUCT_PACKAGES += \
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index f9c1f3d..f33e626 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -29,14 +29,13 @@
VNDK-SP: android.hardware.graphics.mapper@3.0.so
VNDK-SP: android.hardware.graphics.mapper@4.0.so
VNDK-SP: android.hardware.renderscript@1.0.so
-VNDK-SP: android.hidl.safe_union@1.0.so
VNDK-SP: android.hidl.memory.token@1.0.so
VNDK-SP: android.hidl.memory@1.0-impl.so
VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
VNDK-SP: libRSCpuRef.so
VNDK-SP: libRSDriver.so
VNDK-SP: libRS_internal.so
-VNDK-SP: libbacktrace.so
VNDK-SP: libbase.so
VNDK-SP: libbcinfo.so
VNDK-SP: libblas.so
@@ -56,59 +55,25 @@
VNDK-SP: libutils.so
VNDK-SP: libutilscallstack.so
VNDK-SP: libz.so
-VNDK-core: android.hardware.audio.common-V1-ndk.so
VNDK-core: android.hardware.audio.common@2.0.so
-VNDK-core: android.hardware.authsecret-V1-ndk.so
-VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk.so
-VNDK-core: android.hardware.bluetooth.audio-V1-ndk.so
VNDK-core: android.hardware.configstore-utils.so
VNDK-core: android.hardware.configstore@1.0.so
VNDK-core: android.hardware.configstore@1.1.so
VNDK-core: android.hardware.confirmationui-support-lib.so
-VNDK-core: android.hardware.dumpstate-V1-ndk.so
-VNDK-core: android.hardware.gnss-V1-ndk.so
VNDK-core: android.hardware.graphics.allocator@2.0.so
VNDK-core: android.hardware.graphics.allocator@3.0.so
VNDK-core: android.hardware.graphics.allocator@4.0.so
VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
-VNDK-core: android.hardware.health-V1-ndk.so
-VNDK-core: android.hardware.health.storage-V1-ndk.so
-VNDK-core: android.hardware.identity-V3-ndk.so
-VNDK-core: android.hardware.keymaster-V3-ndk.so
-VNDK-core: android.hardware.light-V1-ndk.so
VNDK-core: android.hardware.media.bufferpool@2.0.so
VNDK-core: android.hardware.media.omx@1.0.so
VNDK-core: android.hardware.media@1.0.so
VNDK-core: android.hardware.memtrack-V1-ndk.so
VNDK-core: android.hardware.memtrack@1.0.so
-VNDK-core: android.hardware.nfc-V1-ndk.so
-VNDK-core: android.hardware.oemlock-V1-ndk.so
-VNDK-core: android.hardware.power-V2-ndk.so
-VNDK-core: android.hardware.power.stats-V1-ndk.so
-VNDK-core: android.hardware.radio-V1-ndk.so
-VNDK-core: android.hardware.radio.config-V1-ndk.so
-VNDK-core: android.hardware.radio.data-V1-ndk.so
-VNDK-core: android.hardware.radio.messaging-V1-ndk.so
-VNDK-core: android.hardware.radio.modem-V1-ndk.so
-VNDK-core: android.hardware.radio.network-V1-ndk.so
-VNDK-core: android.hardware.radio.sim-V1-ndk.so
-VNDK-core: android.hardware.radio.voice-V1-ndk.so
-VNDK-core: android.hardware.rebootescrow-V1-ndk.so
-VNDK-core: android.hardware.security.dice-V1-ndk.so
-VNDK-core: android.hardware.security.keymint-V1-ndk.so
-VNDK-core: android.hardware.security.secureclock-V1-ndk.so
-VNDK-core: android.hardware.security.sharedsecret-V1-ndk.so
VNDK-core: android.hardware.soundtrigger@2.0-core.so
VNDK-core: android.hardware.soundtrigger@2.0.so
-VNDK-core: android.hardware.vibrator-V2-ndk.so
-VNDK-core: android.hardware.weaver-V1-ndk.so
-VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
-VNDK-core: android.hardware.wifi.supplicant-V1-ndk.so
VNDK-core: android.hidl.token@1.0-utils.so
VNDK-core: android.hidl.token@1.0.so
-VNDK-core: android.media.audio.common.types-V1-ndk.so
-VNDK-core: android.system.keystore2-V1-ndk.so
VNDK-core: android.system.suspend-V1-ndk.so
VNDK-core: android.system.suspend@1.0.so
VNDK-core: libaudioroute.so
@@ -167,7 +132,6 @@
VNDK-core: libxml2.so
VNDK-core: libyuv.so
VNDK-core: libziparchive.so
-VNDK-private: libbacktrace.so
VNDK-private: libblas.so
VNDK-private: libcompiler_rt.so
VNDK-private: libft2.so
@@ -199,7 +163,6 @@
VNDK-product: android.hidl.token@1.0.so
VNDK-product: android.system.suspend@1.0.so
VNDK-product: libaudioutils.so
-VNDK-product: libbacktrace.so
VNDK-product: libbase.so
VNDK-product: libc++.so
VNDK-product: libcamera_metadata.so
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
index 2199c57..8755ae6 100644
--- a/target/product/handheld_product.mk
+++ b/target/product/handheld_product.mk
@@ -30,7 +30,6 @@
Gallery2 \
LatinIME \
Music \
- OneTimeInitializer \
preinstalled-packages-platform-handheld-product.xml \
QuickSearchBox \
SettingsIntelligence \
diff --git a/target/product/linux_bionic.mk b/target/product/linux_bionic.mk
new file mode 100644
index 0000000..da6b890
--- /dev/null
+++ b/target/product/linux_bionic.mk
@@ -0,0 +1,18 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_NAME := linux_bionic
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := linux_bionic
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b6560fc..1ebd4ab 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -157,3 +157,11 @@
dalvik.vm.madvise.vdexfile.size=104857600 \
dalvik.vm.madvise.odexfile.size=104857600 \
dalvik.vm.madvise.artfile.size=4294967295
+
+# Properties for the Unspecialized App Process Pool
+PRODUCT_SYSTEM_PROPERTIES += \
+ dalvik.vm.usap_pool_enabled?=false \
+ dalvik.vm.usap_refill_threshold?=1 \
+ dalvik.vm.usap_pool_size_max?=3 \
+ dalvik.vm.usap_pool_size_min?=1 \
+ dalvik.vm.usap_pool_refill_delay_ms?=3000
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 96d8cc9..fa7e1ad 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -14,8 +14,11 @@
# limitations under the License.
#
-# Don't modify this file - It's just an alias!
+# This is a simple product that uses configures the minimum amount
+# needed to build the SDK (without the emulator).
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_armv7.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
PRODUCT_NAME := sdk
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_x86
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 18e08e4..716c8e0 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -38,15 +38,3 @@
PRODUCT_PACKAGES += \
snapuserd \
-# For dedicated recovery partitions, we need to include snapuserd
-# For GKI devices, BOARD_USES_RECOVERY_AS_BOOT is empty, but
-# so is BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT.
-ifdef BUILDING_RECOVERY_IMAGE
-ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-PRODUCT_PACKAGES += \
- snapuserd.recovery
-endif
-endif
-endif
-
diff --git a/tests/artifact_path_requirements/inherit1.rbc b/tests/artifact_path_requirements/inherit1.rbc
new file mode 100644
index 0000000..dcef1bf
--- /dev/null
+++ b/tests/artifact_path_requirements/inherit1.rbc
@@ -0,0 +1,21 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit3.rbc", _inherit3_init = "init")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ rblf.inherit(handle, "test/inherit3", _inherit3_init)
diff --git a/tests/artifact_path_requirements/inherit2.rbc b/tests/artifact_path_requirements/inherit2.rbc
new file mode 100644
index 0000000..597b4e9
--- /dev/null
+++ b/tests/artifact_path_requirements/inherit2.rbc
@@ -0,0 +1,22 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit4.rbc", _inherit4_init = "init")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ rblf.inherit(handle, "test/inherit4", _inherit4_init)
+ rblf.require_artifacts_in_path(handle, "vendor/", "")
diff --git a/tests/artifact_path_requirements/inherit3.rbc b/tests/artifact_path_requirements/inherit3.rbc
new file mode 100644
index 0000000..597b4e9
--- /dev/null
+++ b/tests/artifact_path_requirements/inherit3.rbc
@@ -0,0 +1,22 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit4.rbc", _inherit4_init = "init")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ rblf.inherit(handle, "test/inherit4", _inherit4_init)
+ rblf.require_artifacts_in_path(handle, "vendor/", "")
diff --git a/tests/artifact_path_requirements/inherit4.rbc b/tests/artifact_path_requirements/inherit4.rbc
new file mode 100644
index 0000000..52028fe
--- /dev/null
+++ b/tests/artifact_path_requirements/inherit4.rbc
@@ -0,0 +1,21 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+ cfg["PRODUCT_COPY_FILES"] += ["foo/bar/baz.txt:vendor/etc/baz.txt"]
diff --git a/tests/artifact_path_requirements/product.rbc b/tests/artifact_path_requirements/product.rbc
new file mode 100644
index 0000000..7d1f169
--- /dev/null
+++ b/tests/artifact_path_requirements/product.rbc
@@ -0,0 +1,24 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit1.rbc", _inherit1_init = "init")
+load(":inherit2.rbc", _inherit2_init = "init")
+load(":inherit3.rbc", _inherit3_init = "init")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+ rblf.inherit(handle, "test/inherit1", _inherit1_init)
+ rblf.inherit(handle, "test/inherit2", _inherit2_init)
+ rblf.inherit(handle, "test/inherit3", _inherit3_init)
diff --git a/tests/artifact_path_requirements/test.rbc b/tests/artifact_path_requirements/test.rbc
new file mode 100644
index 0000000..0a344d1
--- /dev/null
+++ b/tests/artifact_path_requirements/test.rbc
@@ -0,0 +1,27 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load("//build/make/tests/input_variables.rbc", input_variables_init = "init")
+load(":product.rbc", "init")
+
+def assert_eq(expected, actual):
+ if expected != actual:
+ fail("Expected '%s', got '%s'" % (expected, actual))
+
+def test():
+ (globals, globals_base) = rblf.product_configuration("test/product", init, input_variables_init)
+ assert_eq(["foo/bar/baz.txt:vendor/etc/baz.txt"], globals["PRODUCTS.test/product.mk.PRODUCT_COPY_FILES"])
+ assert_eq(["foo/bar/baz.txt:vendor/etc/baz.txt"], globals["PRODUCTS.test/inherit2.mk.PRODUCT_COPY_FILES"])
+ assert_eq(["foo/bar/baz.txt:vendor/etc/baz.txt"], globals["PRODUCTS.test/inherit3.mk.PRODUCT_COPY_FILES"])
diff --git a/tests/prefixed_sort_order/base-secondary.rbc b/tests/prefixed_sort_order/base-secondary.rbc
new file mode 100644
index 0000000..5446e8f
--- /dev/null
+++ b/tests/prefixed_sort_order/base-secondary.rbc
@@ -0,0 +1,21 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ g.setdefault("MY_VAR", [])
+ g["MY_VAR"] += ["foo"]
diff --git a/tests/prefixed_sort_order/base.rbc b/tests/prefixed_sort_order/base.rbc
new file mode 100644
index 0000000..05b0d5d
--- /dev/null
+++ b/tests/prefixed_sort_order/base.rbc
@@ -0,0 +1,21 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ g.setdefault("MY_VAR", [])
+ g["MY_VAR"] += ["bar"]
diff --git a/tests/prefixed_sort_order/product.rbc b/tests/prefixed_sort_order/product.rbc
new file mode 100644
index 0000000..619b2c0
--- /dev/null
+++ b/tests/prefixed_sort_order/product.rbc
@@ -0,0 +1,29 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":base.rbc", _base_init = "init")
+load(":base-secondary.rbc", _base_secondary_init = "init")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+
+ # It's important that base-secondary uses a dash, an underscore won't expose the sort order issue:
+ # >>> sorted(["base", "base-secondary"])
+ # ['base', 'base-secondary']
+ # >>> sorted(["base.mk", "base-secondary.mk"])
+ # ['base-secondary.mk', 'base.mk']
+
+ rblf.inherit(handle, "base", _base_init)
+ rblf.inherit(handle, "base-secondary", _base_secondary_init)
diff --git a/tests/prefixed_sort_order/test.rbc b/tests/prefixed_sort_order/test.rbc
new file mode 100644
index 0000000..e59a509
--- /dev/null
+++ b/tests/prefixed_sort_order/test.rbc
@@ -0,0 +1,26 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load("//build/make/tests/input_variables.rbc", input_variables_init = "init")
+load(":product.rbc", "init")
+
+
+def assert_eq(expected, actual):
+ if expected != actual:
+ fail("Expected '%s', got '%s'" % (expected, actual))
+
+def test():
+ (globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+ assert_eq(["foo", "bar"], globals["MY_VAR"])
diff --git a/tests/run.rbc b/tests/run.rbc
index 58cc4d6..c6dfeba 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -26,18 +26,24 @@
load(":board.rbc", board_init = "init")
load(":board_input_vars.rbc", board_input_vars_init = "init")
load("//build/make/tests/single_value_inheritance:test.rbc", test_single_value_inheritance = "test")
+load("//build/make/tests/artifact_path_requirements:test.rbc", test_artifact_path_requirements = "test")
+load("//build/make/tests/prefixed_sort_order:test.rbc", test_prefixed_sort_order = "test")
def assert_eq(expected, actual):
if expected != actual:
fail("Expected '%s', got '%s'" % (expected, actual))
+def assert_dict_subset(expected, actual):
+ for key, val in expected.items():
+ assert_eq(val, actual[key])
+
# Unit tests for non-trivial runtime functions
assert_eq(["a", "b", "c"], rblf.mksort("b a c c"))
assert_eq(["a", "b", "c"], rblf.mksort(["b", "a", "c", "c"]))
assert_eq("", rblf.mkstrip(" \n \t "))
assert_eq("a b c", rblf.mkstrip(" a b \n c \t"))
-assert_eq(1, rblf.mkstrip(1))
+assert_eq("1", rblf.mkstrip("1 "))
assert_eq("b1 b2", rblf.mksubst("a", "b", "a1 a2"))
assert_eq(["b1", "x2"], rblf.mksubst("a", "b", ["a1", "x2"]))
@@ -67,6 +73,27 @@
assert_eq("", rblf.notdir("/"))
assert_eq("", rblf.notdir(""))
+cwd = rblf_shell('pwd')
+assert_eq(cwd+"/foo/bar", rblf.abspath("foo/bar"))
+assert_eq(cwd+"/bar", rblf.abspath("foo/.././bar"))
+assert_eq(cwd+"/bar", rblf.abspath("foo/..////bar//"))
+assert_eq("/foo/baz", rblf.abspath("/foo/bar/../baz"))
+assert_eq(cwd+"/foo/bar "+cwd+"/foo/baz", rblf.abspath("foo/bar foo/baz"))
+assert_eq("/baz", rblf.abspath("/../../../../../../../../../../../../../../../../baz"))
+
+assert_eq("foo", rblf.first_word("foo bar"))
+assert_eq("foo", rblf.first_word(["foo", "bar"]))
+assert_eq("", rblf.first_word(""))
+assert_eq("", rblf.first_word([]))
+assert_eq("bar", rblf.last_word("foo bar"))
+assert_eq("bar", rblf.last_word(["foo", "bar"]))
+assert_eq("", rblf.last_word(""))
+assert_eq("", rblf.last_word([]))
+
+assert_eq(["foo", "bar"], rblf.flatten_2d_list([["foo", "bar"]]))
+assert_eq(["foo", "bar"], rblf.flatten_2d_list([["foo"], ["bar"]]))
+assert_eq([], rblf.flatten_2d_list([]))
+
assert_eq(
["build/make/tests/board.rbc", "build/make/tests/board_input_vars.rbc"],
rblf.expand_wildcard("build/make/tests/board*.rbc")
@@ -80,31 +107,28 @@
rblf.expand_wildcard("build/make/tests/run.rbc build/make/tests/nonexistent.rbc")
)
-(globals, config, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
-assert_eq(
- {
- "PRODUCT_COPY_FILES": [
- "part_from:part_to",
- "device_from:device_to",
- "device/google/redfin/audio/audio_platform_info_noextcodec_snd.xml:||VENDOR-PATH-PH||/etc/audio/audio_platform_info_noextcodec_snd.xml",
- "xyz:/etc/xyz",
- "x.xml:/etc/x.xml",
- "y.xml:/etc/y.xml",
- "from/sub/x:to/x",
- "from/sub/y:to/y",
- ],
- "PRODUCT_HOST_PACKAGES": ["host"],
- "PRODUCT_PACKAGES": [
- "dev",
- "inc",
- "dev_after",
- "board1_in",
- "board1_is",
- ],
- "PRODUCT_PRODUCT_PROPERTIES": ["part_properties"]
- },
- { k:v for k, v in sorted(config.items()) }
-)
+(globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+assert_dict_subset({
+ "PRODUCTS.test/device.mk.PRODUCT_COPY_FILES": [
+ "part_from:part_to",
+ "device_from:device_to",
+ "device/google/redfin/audio/audio_platform_info_noextcodec_snd.xml:||VENDOR-PATH-PH||/etc/audio/audio_platform_info_noextcodec_snd.xml",
+ "xyz:/etc/xyz",
+ "x.xml:/etc/x.xml",
+ "y.xml:/etc/y.xml",
+ "from/sub/x:to/x",
+ "from/sub/y:to/y",
+ ],
+ "PRODUCTS.test/device.mk.PRODUCT_HOST_PACKAGES": ["host"],
+ "PRODUCTS.test/device.mk.PRODUCT_PACKAGES": [
+ "dev",
+ "inc",
+ "dev_after",
+ "board1_in",
+ "board1_is",
+ ],
+ "PRODUCTS.test/device.mk.PRODUCT_PRODUCT_PROPERTIES": ["part_properties"]
+}, globals)
ns = globals["$SOONG_CONFIG_NAMESPACES"]
assert_eq(
@@ -134,8 +158,10 @@
{ k:v for k,v in sorted(goals.items()) }
)
-(board_globals, board_config, board_globals_base) = rblf.board_configuration(board_init, board_input_vars_init)
+(board_globals, board_globals_base) = rblf.board_configuration(board_init, board_input_vars_init)
assert_eq({"A_LIST_VARIABLE": ["foo", "bar"]}, board_globals)
assert_eq({"A_LIST_VARIABLE": ["foo"]}, board_globals_base)
test_single_value_inheritance()
+test_artifact_path_requirements()
+test_prefixed_sort_order()
diff --git a/tests/single_value_inheritance/test.rbc b/tests/single_value_inheritance/test.rbc
index dcde7e0..e4f44f4 100644
--- a/tests/single_value_inheritance/test.rbc
+++ b/tests/single_value_inheritance/test.rbc
@@ -22,7 +22,7 @@
fail("Expected '%s', got '%s'" % (expected, actual))
def test():
- (globals, config, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
- assert_eq("tablet", config["PRODUCT_CHARACTERISTICS"])
- assert_eq("vendor/myvendor/certs/devkeys/devkey", config["PRODUCT_DEFAULT_DEV_CERTIFICATE"])
- assert_eq(["foo", "bar"], config["PRODUCT_PACKAGES"])
+ (globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+ assert_eq("tablet", globals["PRODUCTS.test/device.mk.PRODUCT_CHARACTERISTICS"])
+ assert_eq("vendor/myvendor/certs/devkeys/devkey", globals["PRODUCTS.test/device.mk.PRODUCT_DEFAULT_DEV_CERTIFICATE"])
+ assert_eq(["foo", "bar"], globals["PRODUCTS.test/device.mk.PRODUCT_PACKAGES"])
diff --git a/tools/Android.bp b/tools/Android.bp
index 6601c60..bd326f1 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -49,3 +49,8 @@
out: ["kernel_release.txt"],
cmd: "$(location) --tools lz4:$(location lz4) --input $(in) --output-release > $(out)"
}
+
+cc_binary_host {
+ name: "build-runfiles",
+ srcs: ["build-runfiles.cc"],
+}
diff --git a/tools/build-runfiles.cc b/tools/build-runfiles.cc
new file mode 100644
index 0000000..b6197f0
--- /dev/null
+++ b/tools/build-runfiles.cc
@@ -0,0 +1,426 @@
+// Copyright 2014 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// This program creates a "runfiles tree" from a "runfiles manifest".
+//
+// The command line arguments are an input manifest INPUT and an output
+// directory RUNFILES. First, the files in the RUNFILES directory are scanned
+// and any extraneous ones are removed. Second, any missing files are created.
+// Finally, a copy of the input manifest is written to RUNFILES/MANIFEST.
+//
+// The input manifest consists of lines, each containing a relative path within
+// the runfiles, a space, and an optional absolute path. If this second path
+// is present, a symlink is created pointing to it; otherwise an empty file is
+// created.
+//
+// Given the line
+// <workspace root>/output/path /real/path
+// we will create directories
+// RUNFILES/<workspace root>
+// RUNFILES/<workspace root>/output
+// a symlink
+// RUNFILES/<workspace root>/output/path -> /real/path
+// and the output manifest will contain a line
+// <workspace root>/output/path /real/path
+//
+// If --use_metadata is supplied, every other line is treated as opaque
+// metadata, and is ignored here.
+//
+// All output paths must be relative and generally (but not always) begin with
+// <workspace root>. No output path may be equal to another. No output path may
+// be a path prefix of another.
+
+#define _FILE_OFFSET_BITS 64
+
+#include <dirent.h>
+#include <err.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include <map>
+#include <string>
+
+// program_invocation_short_name is not portable.
+static const char *argv0;
+
+const char *input_filename;
+const char *output_base_dir;
+
+enum FileType {
+ FILE_TYPE_REGULAR,
+ FILE_TYPE_DIRECTORY,
+ FILE_TYPE_SYMLINK
+};
+
+struct FileInfo {
+ FileType type;
+ std::string symlink_target;
+
+ bool operator==(const FileInfo &other) const {
+ return type == other.type && symlink_target == other.symlink_target;
+ }
+
+ bool operator!=(const FileInfo &other) const {
+ return !(*this == other);
+ }
+};
+
+typedef std::map<std::string, FileInfo> FileInfoMap;
+
+class RunfilesCreator {
+ public:
+ explicit RunfilesCreator(const std::string &output_base)
+ : output_base_(output_base),
+ output_filename_("MANIFEST"),
+ temp_filename_(output_filename_ + ".tmp") {
+ SetupOutputBase();
+ if (chdir(output_base_.c_str()) != 0) {
+ err(2, "chdir '%s'", output_base_.c_str());
+ }
+ }
+
+ void ReadManifest(const std::string &manifest_file, bool allow_relative,
+ bool use_metadata) {
+ FILE *outfile = fopen(temp_filename_.c_str(), "w");
+ if (!outfile) {
+ err(2, "opening '%s/%s' for writing", output_base_.c_str(),
+ temp_filename_.c_str());
+ }
+ FILE *infile = fopen(manifest_file.c_str(), "r");
+ if (!infile) {
+ err(2, "opening '%s' for reading", manifest_file.c_str());
+ }
+
+ // read input manifest
+ int lineno = 0;
+ char buf[3 * PATH_MAX];
+ while (fgets(buf, sizeof buf, infile)) {
+ // copy line to output manifest
+ if (fputs(buf, outfile) == EOF) {
+ err(2, "writing to '%s/%s'", output_base_.c_str(),
+ temp_filename_.c_str());
+ }
+
+ // parse line
+ ++lineno;
+ // Skip metadata lines. They are used solely for
+ // dependency checking.
+ if (use_metadata && lineno % 2 == 0) continue;
+
+ char *tok = strtok(buf, " \n");
+ if (tok == nullptr) {
+ continue;
+ } else if (*tok == '/') {
+ errx(2, "%s:%d: paths must not be absolute", input_filename, lineno);
+ }
+ std::string link(tok);
+
+ const char *target = strtok(nullptr, " \n");
+ if (target == nullptr) {
+ target = "";
+ } else if (strtok(nullptr, " \n") != nullptr) {
+ errx(2, "%s:%d: link or target filename contains space", input_filename, lineno);
+ } else if (!allow_relative && target[0] != '/') {
+ errx(2, "%s:%d: expected absolute path", input_filename, lineno);
+ }
+
+ FileInfo *info = &manifest_[link];
+ if (target[0] == '\0') {
+ // No target means an empty file.
+ info->type = FILE_TYPE_REGULAR;
+ } else {
+ info->type = FILE_TYPE_SYMLINK;
+ info->symlink_target = target;
+ }
+
+ FileInfo parent_info;
+ parent_info.type = FILE_TYPE_DIRECTORY;
+
+ while (true) {
+ int k = link.rfind('/');
+ if (k < 0) break;
+ link.erase(k, std::string::npos);
+ if (!manifest_.insert(std::make_pair(link, parent_info)).second) break;
+ }
+ }
+ if (fclose(outfile) != 0) {
+ err(2, "writing to '%s/%s'", output_base_.c_str(),
+ temp_filename_.c_str());
+ }
+ fclose(infile);
+
+ // Don't delete the temp manifest file.
+ manifest_[temp_filename_].type = FILE_TYPE_REGULAR;
+ }
+
+ void CreateRunfiles() {
+ if (unlink(output_filename_.c_str()) != 0 && errno != ENOENT) {
+ err(2, "removing previous file at '%s/%s'", output_base_.c_str(),
+ output_filename_.c_str());
+ }
+
+ ScanTreeAndPrune(".");
+ CreateFiles();
+
+ // rename output file into place
+ if (rename(temp_filename_.c_str(), output_filename_.c_str()) != 0) {
+ err(2, "renaming '%s/%s' to '%s/%s'",
+ output_base_.c_str(), temp_filename_.c_str(),
+ output_base_.c_str(), output_filename_.c_str());
+ }
+ }
+
+ private:
+ void SetupOutputBase() {
+ struct stat st;
+ if (stat(output_base_.c_str(), &st) != 0) {
+ // Technically, this will cause problems if the user's umask contains
+ // 0200, but we don't care. Anyone who does that deserves what's coming.
+ if (mkdir(output_base_.c_str(), 0777) != 0) {
+ err(2, "creating directory '%s'", output_base_.c_str());
+ }
+ } else {
+ EnsureDirReadAndWritePerms(output_base_);
+ }
+ }
+
+ void ScanTreeAndPrune(const std::string &path) {
+ // A note on non-empty files:
+ // We don't distinguish between empty and non-empty files. That is, if
+ // there's a file that has contents, we don't truncate it here, even though
+ // the manifest supports creation of empty files, only. Given that
+ // .runfiles are *supposed* to be immutable, this shouldn't be a problem.
+ EnsureDirReadAndWritePerms(path);
+
+ struct dirent *entry;
+ DIR *dh = opendir(path.c_str());
+ if (!dh) {
+ err(2, "opendir '%s'", path.c_str());
+ }
+
+ errno = 0;
+ const std::string prefix = (path == "." ? "" : path + "/");
+ while ((entry = readdir(dh)) != nullptr) {
+ if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) continue;
+
+ std::string entry_path = prefix + entry->d_name;
+ FileInfo actual_info;
+ actual_info.type = DentryToFileType(entry_path, entry);
+
+ if (actual_info.type == FILE_TYPE_SYMLINK) {
+ ReadLinkOrDie(entry_path, &actual_info.symlink_target);
+ }
+
+ FileInfoMap::iterator expected_it = manifest_.find(entry_path);
+ if (expected_it == manifest_.end() ||
+ expected_it->second != actual_info) {
+ DelTree(entry_path, actual_info.type);
+ } else {
+ manifest_.erase(expected_it);
+ if (actual_info.type == FILE_TYPE_DIRECTORY) {
+ ScanTreeAndPrune(entry_path);
+ }
+ }
+
+ errno = 0;
+ }
+ if (errno != 0) {
+ err(2, "reading directory '%s'", path.c_str());
+ }
+ closedir(dh);
+ }
+
+ void CreateFiles() {
+ for (FileInfoMap::const_iterator it = manifest_.begin();
+ it != manifest_.end(); ++it) {
+ const std::string &path = it->first;
+ switch (it->second.type) {
+ case FILE_TYPE_DIRECTORY:
+ if (mkdir(path.c_str(), 0777) != 0) {
+ err(2, "mkdir '%s'", path.c_str());
+ }
+ break;
+ case FILE_TYPE_REGULAR:
+ {
+ int fd = open(path.c_str(), O_CREAT|O_EXCL|O_WRONLY, 0555);
+ if (fd < 0) {
+ err(2, "creating empty file '%s'", path.c_str());
+ }
+ close(fd);
+ }
+ break;
+ case FILE_TYPE_SYMLINK:
+ {
+ const std::string& target = it->second.symlink_target;
+ if (symlink(target.c_str(), path.c_str()) != 0) {
+ err(2, "symlinking '%s' -> '%s'", path.c_str(), target.c_str());
+ }
+ }
+ break;
+ }
+ }
+ }
+
+ FileType DentryToFileType(const std::string &path, struct dirent *ent) {
+#ifdef _DIRENT_HAVE_D_TYPE
+ if (ent->d_type != DT_UNKNOWN) {
+ if (ent->d_type == DT_DIR) {
+ return FILE_TYPE_DIRECTORY;
+ } else if (ent->d_type == DT_LNK) {
+ return FILE_TYPE_SYMLINK;
+ } else {
+ return FILE_TYPE_REGULAR;
+ }
+ } else // NOLINT (the brace is in the next line)
+#endif
+ {
+ struct stat st;
+ LStatOrDie(path, &st);
+ if (S_ISDIR(st.st_mode)) {
+ return FILE_TYPE_DIRECTORY;
+ } else if (S_ISLNK(st.st_mode)) {
+ return FILE_TYPE_SYMLINK;
+ } else {
+ return FILE_TYPE_REGULAR;
+ }
+ }
+ }
+
+ void LStatOrDie(const std::string &path, struct stat *st) {
+ if (lstat(path.c_str(), st) != 0) {
+ err(2, "lstating file '%s'", path.c_str());
+ }
+ }
+
+ void StatOrDie(const std::string &path, struct stat *st) {
+ if (stat(path.c_str(), st) != 0) {
+ err(2, "stating file '%s'", path.c_str());
+ }
+ }
+
+ void ReadLinkOrDie(const std::string &path, std::string *output) {
+ char readlink_buffer[PATH_MAX];
+ int sz = readlink(path.c_str(), readlink_buffer, sizeof(readlink_buffer));
+ if (sz < 0) {
+ err(2, "reading symlink '%s'", path.c_str());
+ }
+ // readlink returns a non-null terminated string.
+ std::string(readlink_buffer, sz).swap(*output);
+ }
+
+ void EnsureDirReadAndWritePerms(const std::string &path) {
+ const int kMode = 0700;
+ struct stat st;
+ LStatOrDie(path, &st);
+ if ((st.st_mode & kMode) != kMode) {
+ int new_mode = st.st_mode | kMode;
+ if (chmod(path.c_str(), new_mode) != 0) {
+ err(2, "chmod '%s'", path.c_str());
+ }
+ }
+ }
+
+ bool DelTree(const std::string &path, FileType file_type) {
+ if (file_type != FILE_TYPE_DIRECTORY) {
+ if (unlink(path.c_str()) != 0) {
+ err(2, "unlinking '%s'", path.c_str());
+ return false;
+ }
+ return true;
+ }
+
+ EnsureDirReadAndWritePerms(path);
+
+ struct dirent *entry;
+ DIR *dh = opendir(path.c_str());
+ if (!dh) {
+ err(2, "opendir '%s'", path.c_str());
+ }
+ errno = 0;
+ while ((entry = readdir(dh)) != nullptr) {
+ if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) continue;
+ const std::string entry_path = path + '/' + entry->d_name;
+ FileType entry_file_type = DentryToFileType(entry_path, entry);
+ DelTree(entry_path, entry_file_type);
+ errno = 0;
+ }
+ if (errno != 0) {
+ err(2, "readdir '%s'", path.c_str());
+ }
+ closedir(dh);
+ if (rmdir(path.c_str()) != 0) {
+ err(2, "rmdir '%s'", path.c_str());
+ }
+ return true;
+ }
+
+ private:
+ std::string output_base_;
+ std::string output_filename_;
+ std::string temp_filename_;
+
+ FileInfoMap manifest_;
+};
+
+int main(int argc, char **argv) {
+ argv0 = argv[0];
+
+ argc--; argv++;
+ bool allow_relative = false;
+ bool use_metadata = false;
+
+ while (argc >= 1) {
+ if (strcmp(argv[0], "--allow_relative") == 0) {
+ allow_relative = true;
+ argc--; argv++;
+ } else if (strcmp(argv[0], "--use_metadata") == 0) {
+ use_metadata = true;
+ argc--; argv++;
+ } else {
+ break;
+ }
+ }
+
+ if (argc != 2) {
+ fprintf(stderr, "usage: %s "
+ "[--allow_relative] [--use_metadata] "
+ "INPUT RUNFILES\n",
+ argv0);
+ return 1;
+ }
+
+ input_filename = argv[0];
+ output_base_dir = argv[1];
+
+ std::string manifest_file = input_filename;
+ if (input_filename[0] != '/') {
+ char cwd_buf[PATH_MAX];
+ if (getcwd(cwd_buf, sizeof(cwd_buf)) == nullptr) {
+ err(2, "getcwd failed");
+ }
+ manifest_file = std::string(cwd_buf) + '/' + manifest_file;
+ }
+
+ RunfilesCreator runfiles_creator(output_base_dir);
+ runfiles_creator.ReadManifest(manifest_file, allow_relative, use_metadata);
+ runfiles_creator.CreateRunfiles();
+
+ return 0;
+}
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 045cb1d..0b80226 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -72,9 +72,9 @@
def _get_os_name():
"""Get the host OS name."""
- if sys.platform == 'linux2':
+ if sys.platform.startswith('linux'):
return 'linux'
- if sys.platform == 'darwin':
+ if sys.platform.startswith('darwin'):
return 'darwin'
raise ValueError(sys.platform + ' is not supported')
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index ec0f2f9..225f3a5 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -18,16 +18,22 @@
}
blueprint_go_binary {
- name: "checkshare",
+ name: "compliance_checkshare",
srcs: ["cmd/checkshare/checkshare.go"],
- deps: ["compliance-module"],
+ deps: [
+ "compliance-module",
+ "soong-response",
+ ],
testSrcs: ["cmd/checkshare/checkshare_test.go"],
}
blueprint_go_binary {
name: "compliancenotice_bom",
srcs: ["cmd/bom/bom.go"],
- deps: ["compliance-module"],
+ deps: [
+ "compliance-module",
+ "soong-response",
+ ],
testSrcs: ["cmd/bom/bom_test.go"],
}
@@ -42,23 +48,32 @@
}
blueprint_go_binary {
- name: "listshare",
+ name: "compliance_listshare",
srcs: ["cmd/listshare/listshare.go"],
- deps: ["compliance-module"],
+ deps: [
+ "compliance-module",
+ "soong-response",
+ ],
testSrcs: ["cmd/listshare/listshare_test.go"],
}
blueprint_go_binary {
- name: "dumpgraph",
+ name: "compliance_dumpgraph",
srcs: ["cmd/dumpgraph/dumpgraph.go"],
- deps: ["compliance-module"],
+ deps: [
+ "compliance-module",
+ "soong-response",
+ ],
testSrcs: ["cmd/dumpgraph/dumpgraph_test.go"],
}
blueprint_go_binary {
- name: "dumpresolutions",
+ name: "compliance_dumpresolutions",
srcs: ["cmd/dumpresolutions/dumpresolutions.go"],
- deps: ["compliance-module"],
+ deps: [
+ "compliance-module",
+ "soong-response",
+ ],
testSrcs: ["cmd/dumpresolutions/dumpresolutions_test.go"],
}
@@ -68,14 +83,18 @@
deps: [
"compliance-module",
"blueprint-deptools",
+ "soong-response",
],
testSrcs: ["cmd/htmlnotice/htmlnotice_test.go"],
}
blueprint_go_binary {
- name: "rtrace",
+ name: "compliance_rtrace",
srcs: ["cmd/rtrace/rtrace.go"],
- deps: ["compliance-module"],
+ deps: [
+ "compliance-module",
+ "soong-response",
+ ],
testSrcs: ["cmd/rtrace/rtrace_test.go"],
}
@@ -85,6 +104,7 @@
deps: [
"compliance-module",
"blueprint-deptools",
+ "soong-response",
],
testSrcs: ["cmd/textnotice/textnotice_test.go"],
}
@@ -95,6 +115,7 @@
deps: [
"compliance-module",
"blueprint-deptools",
+ "soong-response",
],
testSrcs: ["cmd/xmlnotice/xmlnotice_test.go"],
}
diff --git a/tools/compliance/cmd/bom/bom.go b/tools/compliance/cmd/bom/bom.go
index b613a1f..187f828 100644
--- a/tools/compliance/cmd/bom/bom.go
+++ b/tools/compliance/cmd/bom/bom.go
@@ -24,13 +24,11 @@
"path/filepath"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
)
var (
- outputFile = flag.String("o", "-", "Where to write the bill of materials. (default stdout)")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoLicenses = fmt.Errorf("No licenses found")
)
@@ -55,22 +53,10 @@
return installPath
}
-func init() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs a bill of materials. i.e. the list of installed paths.
-
-Options:
-`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
- }
-}
-
// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
var f multiString
- flag.Var(&f, name, usage)
+ flags.Var(&f, name, usage)
return &f
}
@@ -81,16 +67,52 @@
func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
func main() {
- flag.Parse()
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a bill of materials. i.e. the list of installed paths.
+
+Options:
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the bill of materials. (default stdout)")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
if len(*outputFile) == 0 {
- flag.Usage()
+ flags.Usage()
fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
os.Exit(2)
} else {
@@ -118,10 +140,10 @@
ctx := &context{ofile, os.Stderr, compliance.FS, *stripPrefix}
- err := billOfMaterials(ctx, flag.Args()...)
+ err := billOfMaterials(ctx, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
diff --git a/tools/compliance/cmd/checkshare/checkshare.go b/tools/compliance/cmd/checkshare/checkshare.go
index 73bdcb5..f7b4cd2 100644
--- a/tools/compliance/cmd/checkshare/checkshare.go
+++ b/tools/compliance/cmd/checkshare/checkshare.go
@@ -15,6 +15,7 @@
package main
import (
+ "bytes"
"flag"
"fmt"
"io"
@@ -22,31 +23,12 @@
"os"
"path/filepath"
"sort"
+ "strings"
+ "android/soong/response"
"android/soong/tools/compliance"
)
-func init() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, `Usage: %s file.meta_lic {file.meta_lic...}
-
-Reports on stderr any targets where policy says that the source both
-must and must not be shared. The error report indicates the target, the
-license condition that has a source privacy policy, and the license
-condition that has a source sharing policy.
-
-Any given target may appear multiple times with different combinations
-of conflicting license conditions.
-
-If all the source code that policy says must be shared may be shared,
-outputs "PASS" to stdout and exits with status 0.
-
-If policy says any source must both be shared and not be shared,
-outputs "FAIL" to stdout and exits with status 1.
-`, filepath.Base(os.Args[0]))
- }
-}
-
var (
failConflicts = fmt.Errorf("conflicts")
failNoneRequested = fmt.Errorf("\nNo metadata files requested")
@@ -61,24 +43,105 @@
func (l byError) Less(i, j int) bool { return l[i].Error() < l[j].Error() }
func main() {
- flag.Parse()
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} file.meta_lic {file.meta_lic...}
+
+Reports on stderr any targets where policy says that the source both
+must and must not be shared. The error report indicates the target, the
+license condition that has a source privacy policy, and the license
+condition that has a source sharing policy.
+
+Any given target may appear multiple times with different combinations
+of conflicting license conditions.
+
+If all the source code that policy says must be shared may be shared,
+outputs "PASS" to stdout and exits with status 0.
+
+If policy says any source must both be shared and not be shared,
+outputs "FAIL" to stdout and exits with status 1.
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
- err := checkShare(os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
+ }
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
+ err := checkShare(ofile, os.Stderr, compliance.FS, flags.Args()...)
if err != nil {
if err != failConflicts {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
}
os.Exit(1)
}
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+ os.Exit(1)
+ }
+ }
os.Exit(0)
}
@@ -92,7 +155,7 @@
// Read the license graph from the license metadata files (*.meta_lic).
licenseGraph, err := compliance.ReadLicenseGraph(rootFS, stderr, files)
if err != nil {
- return fmt.Errorf("Unable to read license metadata file(s) %q: %w\n", files, err)
+ return fmt.Errorf("Unable to read license metadata file(s) %q from %q: %w\n", files, os.Getenv("PWD"), err)
}
if licenseGraph == nil {
return failNoLicenses
diff --git a/tools/compliance/cmd/dumpgraph/dumpgraph.go b/tools/compliance/cmd/dumpgraph/dumpgraph.go
index 32a3fc4..5625779 100644
--- a/tools/compliance/cmd/dumpgraph/dumpgraph.go
+++ b/tools/compliance/cmd/dumpgraph/dumpgraph.go
@@ -15,6 +15,7 @@
package main
import (
+ "bytes"
"flag"
"fmt"
"io"
@@ -24,14 +25,11 @@
"sort"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
)
var (
- graphViz = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
- labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoLicenses = fmt.Errorf("No licenses found")
)
@@ -55,8 +53,44 @@
return installPath
}
-func init() {
- flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+ var f multiString
+ flags.Var(&f, name, usage)
+ return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
Outputs space-separated Target Dependency Annotations tuples for each
@@ -70,42 +104,68 @@
Options:
`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
+ flags.PrintDefaults()
}
-}
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
- var f multiString
- flag.Var(&f, name, usage)
- return &f
-}
+ graphViz := flags.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
+ labelConditions := flags.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
+ outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
- flag.Parse()
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
+ }
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
ctx := &context{*graphViz, *labelConditions, *stripPrefix}
- err := dumpGraph(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+ err := dumpGraph(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
}
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+ os.Exit(1)
+ }
+ }
os.Exit(0)
}
diff --git a/tools/compliance/cmd/dumpresolutions/dumpresolutions.go b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
index d02c238..dc0cf88 100644
--- a/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
+++ b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
@@ -15,6 +15,7 @@
package main
import (
+ "bytes"
"flag"
"fmt"
"io"
@@ -24,15 +25,11 @@
"sort"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
)
var (
- conditions = newMultiString("c", "License condition to resolve. (may be given multiple times)")
- graphViz = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
- labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoLicenses = fmt.Errorf("No licenses found")
)
@@ -57,8 +54,44 @@
return installPath
}
-func init() {
- flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+ var f multiString
+ flags.Var(&f, name, usage)
+ return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
Outputs a space-separated Target ActsOn Origin Condition tuple for each
@@ -75,32 +108,52 @@
Options:
`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
+ flags.PrintDefaults()
}
-}
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
- var f multiString
- flag.Var(&f, name, usage)
- return &f
-}
+ conditions := newMultiString(flags, "c", "License condition to resolve. (may be given multiple times)")
+ graphViz := flags.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
+ labelConditions := flags.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
+ outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
- flag.Parse()
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
+ }
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
lcs := make([]compliance.LicenseCondition, 0, len(*conditions))
for _, name := range *conditions {
lcs = append(lcs, compliance.RecognizedConditionNames[name])
@@ -111,14 +164,21 @@
labelConditions: *labelConditions,
stripPrefix: *stripPrefix,
}
- _, err := dumpResolutions(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+ _, err := dumpResolutions(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
}
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+ os.Exit(1)
+ }
+ }
os.Exit(0)
}
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice.go b/tools/compliance/cmd/htmlnotice/htmlnotice.go
index e98b272..1a49610 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice.go
@@ -26,19 +26,13 @@
"path/filepath"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
"github.com/google/blueprint/deptools"
)
var (
- outputFile = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
- depsFile = flag.String("d", "", "Where to write the deps file")
- includeTOC = flag.Bool("toc", true, "Whether to include a table of contents.")
- product = flag.String("product", "", "The name of the product for which the notice is generated.")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
- title = flag.String("title", "", "The title of the notice file.")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoLicenses = fmt.Errorf("No licenses found")
)
@@ -70,23 +64,10 @@
return installPath
}
-func init() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
-ends with ".gz".
-
-Options:
-`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
- }
-}
-
// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
var f multiString
- flag.Var(&f, name, usage)
+ flags.Var(&f, name, usage)
return &f
}
@@ -97,16 +78,57 @@
func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
func main() {
- flag.Parse()
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
+ends with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+ depsFile := flags.String("d", "", "Where to write the deps file")
+ includeTOC := flags.Bool("toc", true, "Whether to include a table of contents.")
+ product := flags.String("product", "", "The name of the product for which the notice is generated.")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+ title := flags.String("title", "", "The title of the notice file.")
+
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
if len(*outputFile) == 0 {
- flag.Usage()
+ flags.Usage()
fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
os.Exit(2)
} else {
@@ -143,10 +165,10 @@
ctx := &context{ofile, os.Stderr, compliance.FS, *includeTOC, *product, *stripPrefix, *title, &deps}
- err := htmlNotice(ctx, flag.Args()...)
+ err := htmlNotice(ctx, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
diff --git a/tools/compliance/cmd/listshare/listshare.go b/tools/compliance/cmd/listshare/listshare.go
index 7f4038b..31bd1b2 100644
--- a/tools/compliance/cmd/listshare/listshare.go
+++ b/tools/compliance/cmd/listshare/listshare.go
@@ -15,6 +15,7 @@
package main
import (
+ "bytes"
"flag"
"fmt"
"io"
@@ -24,12 +25,41 @@
"sort"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
)
-func init() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, `Usage: %s file.meta_lic {file.meta_lic...}
+var (
+ failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+ failNoLicenses = fmt.Errorf("No licenses found")
+)
+
+func main() {
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} file.meta_lic {file.meta_lic...}
Outputs a csv file with 1 project per line in the first field followed
by target:condition pairs describing why the project must be shared.
@@ -39,30 +69,61 @@
restricted (e.g. GPL) or reciprocal (e.g. MPL).
`, filepath.Base(os.Args[0]))
}
-}
-var (
- failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
- failNoLicenses = fmt.Errorf("No licenses found")
-)
+ outputFile := flags.String("o", "-", "Where to write the list of projects to share. (default stdout)")
-func main() {
- flag.Parse()
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
- err := listShare(os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
+ }
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
+ err := listShare(ofile, os.Stderr, compliance.FS, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
}
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+ os.Exit(1)
+ }
+ }
os.Exit(0)
}
@@ -76,7 +137,7 @@
// Read the license graph from the license metadata files (*.meta_lic).
licenseGraph, err := compliance.ReadLicenseGraph(rootFS, stderr, files)
if err != nil {
- return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+ return fmt.Errorf("Unable to read license metadata file(s) %q from %q: %v\n", files, os.Getenv("PWD"), err)
}
if licenseGraph == nil {
return failNoLicenses
diff --git a/tools/compliance/cmd/rtrace/rtrace.go b/tools/compliance/cmd/rtrace/rtrace.go
index 91171c4..667cdce 100644
--- a/tools/compliance/cmd/rtrace/rtrace.go
+++ b/tools/compliance/cmd/rtrace/rtrace.go
@@ -15,6 +15,7 @@
package main
import (
+ "bytes"
"flag"
"fmt"
"io"
@@ -24,21 +25,19 @@
"sort"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
)
var (
- sources = newMultiString("rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoSources = fmt.Errorf("\nNo projects or metadata files to trace back from")
failNoLicenses = fmt.Errorf("No licenses found")
)
type context struct {
- sources []string
- stripPrefix []string
+ sources []string
+ stripPrefix []string
}
func (ctx context) strip(installPath string) string {
@@ -54,8 +53,44 @@
return installPath
}
-func init() {
- flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+ var f multiString
+ flags.Var(&f, name, usage)
+ return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
Outputs a space-separated Target ActsOn Origin Condition tuple for each
@@ -72,50 +107,75 @@
Options:
`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
+ flags.PrintDefaults()
}
-}
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
- var f multiString
- flag.Var(&f, name, usage)
- return &f
-}
+ outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+ sources := newMultiString(flags, "rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
- flag.Parse()
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
if len(*sources) == 0 {
- flag.Usage()
+ flags.Usage()
fmt.Fprintf(os.Stderr, "\nMust specify at least 1 --rtrace source.\n")
os.Exit(2)
}
- ctx := &context{
- sources: *sources,
- stripPrefix: *stripPrefix,
+ if len(*outputFile) == 0 {
+ flags.Usage()
+ fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+ os.Exit(2)
+ } else {
+ dir, err := filepath.Abs(filepath.Dir(*outputFile))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+ os.Exit(1)
+ }
+ fi, err := os.Stat(dir)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+ os.Exit(1)
+ }
+ if !fi.IsDir() {
+ fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+ os.Exit(1)
+ }
}
- _, err := traceRestricted(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+
+ var ofile io.Writer
+ ofile = os.Stdout
+ var obuf *bytes.Buffer
+ if *outputFile != "-" {
+ obuf = &bytes.Buffer{}
+ ofile = obuf
+ }
+
+ ctx := &context{
+ sources: *sources,
+ stripPrefix: *stripPrefix,
+ }
+ _, err := traceRestricted(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
}
+ if *outputFile != "-" {
+ err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+ os.Exit(1)
+ }
+ }
os.Exit(0)
}
diff --git a/tools/compliance/cmd/shippedlibs/shippedlibs.go b/tools/compliance/cmd/shippedlibs/shippedlibs.go
index 9d25dd3..add6dd6 100644
--- a/tools/compliance/cmd/shippedlibs/shippedlibs.go
+++ b/tools/compliance/cmd/shippedlibs/shippedlibs.go
@@ -39,9 +39,6 @@
rootFS fs.FS
}
-func init() {
-}
-
func main() {
var expandedArgs []string
for _, arg := range os.Args[1:] {
diff --git a/tools/compliance/cmd/textnotice/textnotice.go b/tools/compliance/cmd/textnotice/textnotice.go
index cfa0859..9beaf58 100644
--- a/tools/compliance/cmd/textnotice/textnotice.go
+++ b/tools/compliance/cmd/textnotice/textnotice.go
@@ -25,18 +25,13 @@
"path/filepath"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
"github.com/google/blueprint/deptools"
)
var (
- outputFile = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
- depsFile = flag.String("d", "", "Where to write the deps file")
- product = flag.String("product", "", "The name of the product for which the notice is generated.")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
- title = flag.String("title", "", "The title of the notice file.")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoLicenses = fmt.Errorf("No licenses found")
)
@@ -67,22 +62,10 @@
return installPath
}
-func init() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs a text NOTICE file.
-
-Options:
-`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
- }
-}
-
// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
var f multiString
- flag.Var(&f, name, usage)
+ flags.Var(&f, name, usage)
return &f
}
@@ -93,16 +76,55 @@
func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
func main() {
- flag.Parse()
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a text NOTICE file.
+
+Options:
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+ depsFile := flags.String("d", "", "Where to write the deps file")
+ product := flags.String("product", "", "The name of the product for which the notice is generated.")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+ title := flags.String("title", "", "The title of the notice file.")
+
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
if len(*outputFile) == 0 {
- flag.Usage()
+ flags.Usage()
fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
os.Exit(2)
} else {
@@ -139,10 +161,10 @@
ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, *title, &deps}
- err := textNotice(ctx, flag.Args()...)
+ err := textNotice(ctx, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice.go b/tools/compliance/cmd/xmlnotice/xmlnotice.go
index 84859d7..2097b7c 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice.go
@@ -26,18 +26,13 @@
"path/filepath"
"strings"
+ "android/soong/response"
"android/soong/tools/compliance"
"github.com/google/blueprint/deptools"
)
var (
- outputFile = flag.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
- depsFile = flag.String("d", "", "Where to write the deps file")
- product = flag.String("product", "", "The name of the product for which the notice is generated.")
- stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
- title = flag.String("title", "", "The title of the notice file.")
-
failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
failNoLicenses = fmt.Errorf("No licenses found")
)
@@ -68,23 +63,10 @@
return installPath
}
-func init() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
-with ".gz".
-
-Options:
-`, filepath.Base(os.Args[0]))
- flag.PrintDefaults()
- }
-}
-
// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
var f multiString
- flag.Var(&f, name, usage)
+ flags.Var(&f, name, usage)
return &f
}
@@ -95,16 +77,56 @@
func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
func main() {
- flag.Parse()
+ var expandedArgs []string
+ for _, arg := range os.Args[1:] {
+ if strings.HasPrefix(arg, "@") {
+ f, err := os.Open(strings.TrimPrefix(arg, "@"))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+
+ respArgs, err := response.ReadRspFile(f)
+ f.Close()
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err.Error())
+ os.Exit(1)
+ }
+ expandedArgs = append(expandedArgs, respArgs...)
+ } else {
+ expandedArgs = append(expandedArgs, arg)
+ }
+ }
+
+ flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+ flags.Usage = func() {
+ fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
+with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+ flags.PrintDefaults()
+ }
+
+ outputFile := flags.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
+ depsFile := flags.String("d", "", "Where to write the deps file")
+ product := flags.String("product", "", "The name of the product for which the notice is generated.")
+ stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+ title := flags.String("title", "", "The title of the notice file.")
+
+ flags.Parse(expandedArgs)
// Must specify at least one root target.
- if flag.NArg() == 0 {
- flag.Usage()
+ if flags.NArg() == 0 {
+ flags.Usage()
os.Exit(2)
}
if len(*outputFile) == 0 {
- flag.Usage()
+ flags.Usage()
fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
os.Exit(2)
} else {
@@ -141,10 +163,10 @@
ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, *title, &deps}
- err := xmlNotice(ctx, flag.Args()...)
+ err := xmlNotice(ctx, flags.Args()...)
if err != nil {
if err == failNoneRequested {
- flag.Usage()
+ flags.Usage()
}
fmt.Fprintf(os.Stderr, "%s\n", err.Error())
os.Exit(1)
diff --git a/tools/filter-product-graph.py b/tools/filter-product-graph.py
deleted file mode 100755
index b3a5b42..0000000
--- a/tools/filter-product-graph.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-# vim: ts=2 sw=2 nocindent
-
-import re
-import sys
-
-def choose_regex(regs, line):
- for func,reg in regs:
- m = reg.match(line)
- if m:
- return (func,m)
- return (None,None)
-
-def gather(included, deps):
- result = set()
- for inc in included:
- result.add(inc)
- for d in deps:
- if inc == d[1]:
- result.add(d[0])
- return result
-
-def main():
- deps = []
- infos = []
- def dependency(m):
- deps.append((m.group(1), m.group(2)))
- def info(m):
- infos.append((m.group(1), m.group(2)))
-
- REGS = [
- (dependency, re.compile(r'"(.*)"\s*->\s*"(.*)"')),
- (info, re.compile(r'"(.*)"(\s*\[.*\])')),
- ]
-
- lines = sys.stdin.readlines()
- lines = [line.strip() for line in lines]
-
- for line in lines:
- func,m = choose_regex(REGS, line)
- if func:
- func(m)
-
- # filter
- sys.stderr.write("argv: " + str(sys.argv) + "\n")
- if not (len(sys.argv) == 2 and sys.argv[1] == "--all"):
- targets = sys.argv[1:]
-
- included = set(targets)
- prevLen = -1
- while prevLen != len(included):
- prevLen = len(included)
- included = gather(included, deps)
-
- deps = [dep for dep in deps if dep[1] in included]
- infos = [info for info in infos if info[0] in included]
-
- print "digraph {"
- print "graph [ ratio=.5 ];"
- for dep in deps:
- print '"%s" -> "%s"' % dep
- for info in infos:
- print '"%s"%s' % info
- print "}"
-
-
-if __name__ == "__main__":
- main()
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index c6e89f0..32afa45 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -20,6 +20,7 @@
"os"
"os/exec"
"path/filepath"
+ "sort"
"strings"
"go.starlark.net/starlark"
@@ -111,19 +112,6 @@
return e.globals, e.err
}
-// fileExists returns True if file with given name exists.
-func fileExists(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
- kwargs []starlark.Tuple) (starlark.Value, error) {
- var path string
- if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &path); err != nil {
- return starlark.None, err
- }
- if _, err := os.Stat(path); err != nil {
- return starlark.False, nil
- }
- return starlark.True, nil
-}
-
// wildcard(pattern, top=None) expands shell's glob pattern. If 'top' is present,
// the 'top/pattern' is globbed and then 'top/' prefix is removed.
func wildcard(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
@@ -150,6 +138,10 @@
files[i] = strings.TrimPrefix(files[i], prefix)
}
}
+ // Kati uses glob(3) with no flags, which means it's sorted
+ // because GLOB_NOSORT is not passed. Go's glob is not
+ // guaranteed to sort the results.
+ sort.Strings(files)
return makeStringList(files), nil
}
@@ -269,8 +261,6 @@
"struct": starlark.NewBuiltin("struct", starlarkstruct.Make),
"rblf_cli": structFromEnv(env),
"rblf_env": structFromEnv(os.Environ()),
- // To convert makefile's $(wildcard foo)
- "rblf_file_exists": starlark.NewBuiltin("rblf_file_exists", fileExists),
// To convert find-copy-subdir and product-copy-files-by pattern
"rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
// To convert makefile's $(shell cmd)
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
index 50e39bf..2ee78fc 100644
--- a/tools/rbcrun/testdata/file_ops.star
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -4,9 +4,6 @@
def test():
myname = "file_ops.star"
- assert.true(rblf_file_exists("."), "./ exists ")
- assert.true(rblf_file_exists(myname), "the file %s does exist" % myname)
- assert.true(not rblf_file_exists("no_such_file"), "the file no_such_file does not exist")
files = rblf_wildcard("*.star")
assert.true(myname in files, "expected %s in %s" % (myname, files))
files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
diff --git a/tools/rbcrun/testdata/module1.star b/tools/rbcrun/testdata/module1.star
index 913fb7d..be04f75 100644
--- a/tools/rbcrun/testdata/module1.star
+++ b/tools/rbcrun/testdata/module1.star
@@ -2,6 +2,6 @@
load("assert.star", "assert")
# Make sure that builtins are defined for the loaded module, too
-assert.true(rblf_file_exists("module1.star"))
-assert.true(not rblf_file_exists("no_such file"))
+assert.true(rblf_wildcard("module1.star"))
+assert.true(not rblf_wildcard("no_such file"))
test = "module1"
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index d8e34b7..122202b 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -519,23 +519,6 @@
}
python_binary_host {
- name: "fsverity_manifest_generator",
- defaults: ["releasetools_binary_defaults"],
- srcs: [
- "fsverity_manifest_generator.py",
- ],
- libs: [
- "fsverity_digests_proto_python",
- "releasetools_common",
- ],
- required: [
- "aapt2",
- "apksigner",
- "fsverity",
- ],
-}
-
-python_binary_host {
name: "fsverity_metadata_generator",
defaults: ["releasetools_binary_defaults"],
srcs: [
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index e3db161..3e87c54 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -557,7 +557,7 @@
cmd = [bpttool, "make_table", "--output_json", bpt.name,
"--output_gpt", img.name]
input_files_str = OPTIONS.info_dict["board_bpt_input_files"]
- input_files = input_files_str.split(" ")
+ input_files = input_files_str.split()
for i in input_files:
cmd.extend(["--input", i])
disk_size = OPTIONS.info_dict.get("board_bpt_disk_size")
@@ -871,7 +871,7 @@
if has_vendor_kernel_boot:
banner("vendor_kernel_boot")
- vendor_kernel_boot_image = common.GetVendorBootImage(
+ vendor_kernel_boot_image = common.GetVendorKernelBootImage(
"IMAGES/vendor_kernel_boot.img", "vendor_kernel_boot.img", OPTIONS.input_tmp,
"VENDOR_KERNEL_BOOT")
if vendor_kernel_boot_image:
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3f13a4a..6730a25 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -54,7 +54,7 @@
class ApexApkSigner(object):
"""Class to sign the apk files and other files in an apex payload image and repack the apex"""
- def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None, fsverity_tool=None):
+ def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None):
self.apex_path = apex_path
if not key_passwords:
self.key_passwords = dict()
@@ -65,9 +65,8 @@
OPTIONS.search_path, "bin", "debugfs_static")
self.avbtool = avbtool if avbtool else "avbtool"
self.sign_tool = sign_tool
- self.fsverity_tool = fsverity_tool if fsverity_tool else "fsverity"
- def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None):
+ def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False):
"""Scans and signs the payload files and repack the apex
Args:
@@ -92,7 +91,7 @@
# No need to sign and repack, return the original apex path.
if not apk_entries and not sepolicy_entries and self.sign_tool is None:
- logger.info('No payload (apk or zip) file to sign in %s', self.apex_path)
+ logger.info('No apk file to sign in %s', self.apex_path)
return self.apex_path
for entry in apk_entries:
@@ -106,16 +105,15 @@
logger.warning('Apk path does not contain the intended directory name:'
' %s', entry)
- payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(apk_entries,
- apk_keys, payload_key, sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args)
+ payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
+ apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args)
if not has_signed_content:
- logger.info('No contents has been signed in %s', self.apex_path)
+ logger.info('No contents have been signed in %s', self.apex_path)
return self.apex_path
return self.RepackApexPayload(payload_dir, payload_key, signing_args)
- def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key,
- sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args):
+ def ExtractApexPayloadAndSignContents(self, apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args):
"""Extracts the payload image and signs the containing apk files."""
if not os.path.exists(self.debugfs_path):
raise ApexSigningError(
@@ -126,11 +124,11 @@
extract_cmd = ['deapexer', '--debugfs_path',
self.debugfs_path, 'extract', self.apex_path, payload_dir]
common.RunAndCheckOutput(extract_cmd)
+ assert os.path.exists(self.apex_path)
has_signed_content = False
for entry in apk_entries:
apk_path = os.path.join(payload_dir, entry)
- assert os.path.exists(self.apex_path)
key_name = apk_keys.get(os.path.basename(entry))
if key_name in common.SPECIAL_CERT_STRINGS:
@@ -148,9 +146,35 @@
has_signed_content = True
for entry in sepolicy_entries:
- sepolicy_key = sepolicy_key if sepolicy_key else payload_key
- self.SignSePolicy(payload_dir, entry, sepolicy_key, sepolicy_cert)
- has_signed_content = True
+ sepolicy_path = os.path.join(payload_dir, entry)
+
+ if not 'etc' in entry:
+ logger.warning('Sepolicy path does not contain the intended directory name etc:'
+ ' %s', entry)
+
+ key_name = apk_keys.get(os.path.basename(entry))
+ if key_name is None:
+ logger.warning('Failed to find signing keys for {} in'
+ ' apex {}, payload key will be used instead.'
+ ' Use "-e <name>=" to specify a key'
+ .format(entry, self.apex_path))
+ key_name = payload_key
+
+ if key_name in common.SPECIAL_CERT_STRINGS:
+ logger.info('Not signing: %s due to special cert string', sepolicy_path)
+ continue
+
+ if OPTIONS.sign_sepolicy_path is not None:
+ sig_path = os.path.join(payload_dir, sepolicy_path + '.sig')
+ fsv_sig_path = os.path.join(payload_dir, sepolicy_path + '.fsv_sig')
+ old_sig = common.MakeTempFile()
+ old_fsv_sig = common.MakeTempFile()
+ os.rename(sig_path, old_sig)
+ os.rename(fsv_sig_path, old_fsv_sig)
+
+ logger.info('Signing sepolicy file %s in apex %s', sepolicy_path, self.apex_path)
+ if common.SignSePolicy(sepolicy_path, key_name, self.key_passwords.get(key_name)):
+ has_signed_content = True
if self.sign_tool:
logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
@@ -165,36 +189,6 @@
return payload_dir, has_signed_content
- def SignSePolicy(self, payload_dir, sepolicy_zip, sepolicy_key, sepolicy_cert):
- sepolicy_sig = sepolicy_zip + '.sig'
- sepolicy_fsv_sig = sepolicy_zip + '.fsv_sig'
-
- policy_zip_path = os.path.join(payload_dir, sepolicy_zip)
- sig_out_path = os.path.join(payload_dir, sepolicy_sig)
- sig_old = sig_out_path + '.old'
- if os.path.exists(sig_out_path):
- os.rename(sig_out_path, sig_old)
- sign_cmd = ['openssl', 'dgst', '-sign', sepolicy_key, '-keyform', 'PEM', '-sha256',
- '-out', sig_out_path, '-binary', policy_zip_path]
- common.RunAndCheckOutput(sign_cmd)
- if os.path.exists(sig_old):
- os.remove(sig_old)
-
- if not sepolicy_cert:
- logger.info('No cert provided for SEPolicy, skipping fsverity sign')
- return
-
- fsv_sig_out_path = os.path.join(payload_dir, sepolicy_fsv_sig)
- fsv_sig_old = fsv_sig_out_path + '.old'
- if os.path.exists(fsv_sig_out_path):
- os.rename(fsv_sig_out_path, fsv_sig_old)
-
- fsverity_cmd = [self.fsverity_tool, 'sign', policy_zip_path, fsv_sig_out_path,
- '--key=' + sepolicy_key, '--cert=' + sepolicy_cert]
- common.RunAndCheckOutput(fsverity_cmd)
- if os.path.exists(fsv_sig_old):
- os.remove(fsv_sig_old)
-
def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
"""Rebuilds the apex file with the updated payload directory."""
apex_dir = common.MakeTempDir()
@@ -366,8 +360,7 @@
def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
container_pw, apk_keys, codename_to_api_level_map,
no_hashtree, signing_args=None, sign_tool=None,
- is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
- fsverity_tool=None):
+ is_sepolicy=False):
"""Signs the current uncompressed APEX with the given payload/container keys.
Args:
@@ -381,9 +374,6 @@
signing_args: Additional args to be passed to the payload signer.
sign_tool: A tool to sign the contents of the APEX.
is_sepolicy: Indicates if the apex is a sepolicy.apex
- sepolicy_key: Key to sign a sepolicy zip.
- sepolicy_cert: Cert to sign a sepolicy zip.
- fsverity_tool: fsverity path to sign sepolicy zip.
Returns:
The path to the signed APEX file.
@@ -392,9 +382,9 @@
# the apex file after signing.
apk_signer = ApexApkSigner(apex_file, container_pw,
codename_to_api_level_map,
- avbtool, sign_tool, fsverity_tool)
+ avbtool, sign_tool)
apex_file = apk_signer.ProcessApexFile(
- apk_keys, payload_key, signing_args, is_sepolicy, sepolicy_key, sepolicy_cert)
+ apk_keys, payload_key, signing_args, is_sepolicy)
# 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
# payload_key.
@@ -449,8 +439,7 @@
def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
container_pw, apk_keys, codename_to_api_level_map,
no_hashtree, signing_args=None, sign_tool=None,
- is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
- fsverity_tool=None):
+ is_sepolicy=False):
"""Signs the current compressed APEX with the given payload/container keys.
Args:
@@ -463,9 +452,6 @@
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
is_sepolicy: Indicates if the apex is a sepolicy.apex
- sepolicy_key: Key to sign a sepolicy zip.
- sepolicy_cert: Cert to sign a sepolicy zip.
- fsverity_tool: fsverity path to sign sepolicy zip.
Returns:
The path to the signed APEX file.
@@ -493,10 +479,7 @@
no_hashtree,
signing_args,
sign_tool,
- is_sepolicy,
- sepolicy_key,
- sepolicy_cert,
- fsverity_tool)
+ is_sepolicy)
# 3. Compress signed original apex.
compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -523,9 +506,8 @@
def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
- apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None, sign_tool=None,
- is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+ apk_keys, codename_to_api_level_map, no_hashtree,
+ signing_args=None, sign_tool=None, is_sepolicy=False):
"""Signs the current APEX with the given payload/container keys.
Args:
@@ -537,9 +519,7 @@
codename_to_api_level_map: A dict that maps from codename to API level.
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
- sepolicy_key: Key to sign a sepolicy zip.
- sepolicy_cert: Cert to sign a sepolicy zip.
- fsverity_tool: fsverity path to sign sepolicy zip.
+ is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -566,10 +546,7 @@
apk_keys=apk_keys,
signing_args=signing_args,
sign_tool=sign_tool,
- is_sepolicy=is_sepolicy,
- sepolicy_key=sepolicy_key,
- sepolicy_cert=sepolicy_cert,
- fsverity_tool=fsverity_tool)
+ is_sepolicy=is_sepolicy)
elif apex_type == 'COMPRESSED':
return SignCompressedApex(
avbtool,
@@ -582,10 +559,7 @@
apk_keys=apk_keys,
signing_args=signing_args,
sign_tool=sign_tool,
- is_sepolicy=is_sepolicy,
- sepolicy_key=sepolicy_key,
- sepolicy_cert=sepolicy_cert,
- fsverity_tool=fsverity_tool)
+ is_sepolicy=is_sepolicy)
else:
# TODO(b/172912232): support signing compressed apex
raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index e33b581..6d7895e 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -328,9 +328,17 @@
compressor = prop_dict["erofs_default_compressor"]
if "erofs_compressor" in prop_dict:
compressor = prop_dict["erofs_compressor"]
- if compressor:
+ if compressor and compressor != "none":
build_command.extend(["-z", compressor])
+ compress_hints = None
+ if "erofs_default_compress_hints" in prop_dict:
+ compress_hints = prop_dict["erofs_default_compress_hints"]
+ if "erofs_compress_hints" in prop_dict:
+ compress_hints = prop_dict["erofs_compress_hints"]
+ if compress_hints:
+ build_command.extend(["--compress-hints", compress_hints])
+
build_command.extend(["--mount-point", prop_dict["mount_point"]])
if target_out:
build_command.extend(["--product-out", target_out])
@@ -348,6 +356,8 @@
build_command.extend(["-C", prop_dict["erofs_pcluster_size"]])
if "erofs_share_dup_blocks" in prop_dict:
build_command.extend(["--chunksize", "4096"])
+ if "erofs_use_legacy_compression" in prop_dict:
+ build_command.extend(["-E", "legacy-compress"])
build_command.extend([out_file, in_dir])
if "erofs_sparse_flag" in prop_dict and not disable_sparse:
@@ -650,9 +660,11 @@
common_props = (
"extfs_sparse_flag",
"erofs_default_compressor",
+ "erofs_default_compress_hints",
"erofs_pcluster_size",
"erofs_share_dup_blocks",
"erofs_sparse_flag",
+ "erofs_use_legacy_compression",
"squashfs_sparse_flag",
"system_f2fs_compress",
"system_f2fs_sldc_flags",
@@ -698,10 +710,12 @@
(True, "avb_{}_hashtree_enable", "avb_hashtree_enable"),
(True, "avb_{}_key_path", "avb_key_path"),
(True, "avb_{}_salt", "avb_salt"),
+ (True, "erofs_use_legacy_compression", "erofs_use_legacy_compression"),
(True, "ext4_share_dup_blocks", "ext4_share_dup_blocks"),
(True, "{}_base_fs_file", "base_fs_file"),
(True, "{}_disable_sparse", "disable_sparse"),
(True, "{}_erofs_compressor", "erofs_compressor"),
+ (True, "{}_erofs_compress_hints", "erofs_compress_hints"),
(True, "{}_erofs_pcluster_size", "erofs_pcluster_size"),
(True, "{}_erofs_share_dup_blocks", "erofs_share_dup_blocks"),
(True, "{}_extfs_inode_count", "extfs_inode_count"),
@@ -810,16 +824,18 @@
def main(argv):
- if len(argv) != 4:
+ args = common.ParseOptions(argv, __doc__)
+
+ if len(args) != 4:
print(__doc__)
sys.exit(1)
common.InitLogging()
- in_dir = argv[0]
- glob_dict_file = argv[1]
- out_file = argv[2]
- target_out = argv[3]
+ in_dir = args[0]
+ glob_dict_file = args[1]
+ out_file = args[2]
+ target_out = args[3]
glob_dict = LoadGlobalDict(glob_dict_file)
if "mount_point" in glob_dict:
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index bd3af68..917e4dc 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -72,7 +72,9 @@
if "ANDROID_HOST_OUT" in os.environ:
self.search_path = os.environ["ANDROID_HOST_OUT"]
self.signapk_shared_library_path = "lib64" # Relative to search_path
+ self.sign_sepolicy_path = None
self.extra_signapk_args = []
+ self.extra_sign_sepolicy_args = []
self.aapt2_path = "aapt2"
self.java_path = "java" # Use the one on the path by default.
self.java_args = ["-Xmx2048m"] # The default JVM args.
@@ -114,7 +116,7 @@
# AVB_FOOTER_ARGS_BY_PARTITION in sign_target_files_apks need to be updated
# accordingly.
AVB_PARTITIONS = ('boot', 'init_boot', 'dtbo', 'odm', 'product', 'pvmfw', 'recovery',
- 'system', 'system_ext', 'vendor', 'vendor_boot',
+ 'system', 'system_ext', 'vendor', 'vendor_boot', 'vendor_kernel_boot',
'vendor_dlkm', 'odm_dlkm', 'system_dlkm')
# Chained VBMeta partitions.
@@ -455,6 +457,11 @@
return vabc_enabled
@property
+ def is_android_r(self):
+ system_prop = self.info_dict.get("system.build.prop")
+ return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
+
+ @property
def is_vabc_xor(self):
vendor_prop = self.info_dict.get("vendor.build.prop")
vabc_xor_enabled = vendor_prop and \
@@ -725,7 +732,7 @@
GZ = 2
-def _GetRamdiskFormat(info_dict):
+def GetRamdiskFormat(info_dict):
if info_dict.get('lz4_ramdisks') == 'true':
ramdisk_format = RamdiskFormat.LZ4
else:
@@ -834,7 +841,7 @@
# Load recovery fstab if applicable.
d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
- ramdisk_format = _GetRamdiskFormat(d)
+ ramdisk_format = GetRamdiskFormat(d)
# Tries to load the build props for all partitions with care_map, including
# system and vendor.
@@ -1182,16 +1189,20 @@
"""
def uniq_concat(a, b):
- combined = set(a.split(" "))
- combined.update(set(b.split(" ")))
+ combined = set(a.split())
+ combined.update(set(b.split()))
combined = [item.strip() for item in combined if item.strip()]
return " ".join(sorted(combined))
if (framework_dict.get("use_dynamic_partitions") !=
- "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
+ "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
raise ValueError("Both dictionaries must have use_dynamic_partitions=true")
merged_dict = {"use_dynamic_partitions": "true"}
+ # For keys-value pairs that are the same, copy to merged dict
+ for key in vendor_dict.keys():
+ if key in framework_dict and framework_dict[key] == vendor_dict[key]:
+ merged_dict[key] = vendor_dict[key]
merged_dict["dynamic_partition_list"] = uniq_concat(
framework_dict.get("dynamic_partition_list", ""),
@@ -1200,7 +1211,7 @@
# Super block devices are defined by the vendor dict.
if "super_block_devices" in vendor_dict:
merged_dict["super_block_devices"] = vendor_dict["super_block_devices"]
- for block_device in merged_dict["super_block_devices"].split(" "):
+ for block_device in merged_dict["super_block_devices"].split():
key = "super_%s_device_size" % block_device
if key not in vendor_dict:
raise ValueError("Vendor dict does not contain required key %s." % key)
@@ -1209,7 +1220,7 @@
# Partition groups and group sizes are defined by the vendor dict because
# these values may vary for each board that uses a shared system image.
merged_dict["super_partition_groups"] = vendor_dict["super_partition_groups"]
- for partition_group in merged_dict["super_partition_groups"].split(" "):
+ for partition_group in merged_dict["super_partition_groups"].split():
# Set the partition group's size using the value from the vendor dict.
key = "super_%s_group_size" % partition_group
if key not in vendor_dict:
@@ -1575,7 +1586,7 @@
img = tempfile.NamedTemporaryFile()
if has_ramdisk:
- ramdisk_format = _GetRamdiskFormat(info_dict)
+ ramdisk_format = GetRamdiskFormat(info_dict)
ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file,
ramdisk_format=ramdisk_format)
@@ -1782,6 +1793,9 @@
if info_dict.get("recovery_as_boot") == "true":
return True # the recovery-as-boot boot.img has a RECOVERY ramdisk.
+ if info_dict.get("gki_boot_image_without_ramdisk") == "true":
+ return False # A GKI boot.img has no ramdisk since Android-13.
+
if info_dict.get("system_root_image") == "true":
# The ramdisk content is merged into the system.img, so there is NO
# ramdisk in the boot.img or boot-<kernel version>.img.
@@ -1838,7 +1852,7 @@
return None
-def _BuildVendorBootImage(sourcedir, info_dict=None):
+def _BuildVendorBootImage(sourcedir, partition_name, info_dict=None):
"""Build a vendor boot image from the specified sourcedir.
Take a ramdisk, dtb, and vendor_cmdline from the input (in 'sourcedir'), and
@@ -1853,7 +1867,7 @@
img = tempfile.NamedTemporaryFile()
- ramdisk_format = _GetRamdiskFormat(info_dict)
+ ramdisk_format = GetRamdiskFormat(info_dict)
ramdisk_img = _MakeRamdisk(sourcedir, ramdisk_format=ramdisk_format)
# use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
@@ -1863,8 +1877,13 @@
fn = os.path.join(sourcedir, "dtb")
if os.access(fn, os.F_OK):
- cmd.append("--dtb")
- cmd.append(fn)
+ has_vendor_kernel_boot = (info_dict.get("vendor_kernel_boot", "").lower() == "true")
+
+ # Pack dtb into vendor_kernel_boot if building vendor_kernel_boot.
+ # Otherwise pack dtb into vendor_boot.
+ if not has_vendor_kernel_boot or partition_name == "vendor_kernel_boot":
+ cmd.append("--dtb")
+ cmd.append(fn)
fn = os.path.join(sourcedir, "vendor_cmdline")
if os.access(fn, os.F_OK):
@@ -1924,11 +1943,11 @@
# AVB: if enabled, calculate and add hash.
if info_dict.get("avb_enable") == "true":
avbtool = info_dict["avb_avbtool"]
- part_size = info_dict["vendor_boot_size"]
+ part_size = info_dict[f'{partition_name}_size']
cmd = [avbtool, "add_hash_footer", "--image", img.name,
- "--partition_size", str(part_size), "--partition_name", "vendor_boot"]
- AppendAVBSigningArgs(cmd, "vendor_boot")
- args = info_dict.get("avb_vendor_boot_add_hash_footer_args")
+ "--partition_size", str(part_size), "--partition_name", partition_name]
+ AppendAVBSigningArgs(cmd, partition_name)
+ args = info_dict.get(f'avb_{partition_name}_add_hash_footer_args')
if args and args.strip():
cmd.extend(shlex.split(args))
RunAndCheckOutput(cmd)
@@ -1962,7 +1981,31 @@
info_dict = OPTIONS.info_dict
data = _BuildVendorBootImage(
- os.path.join(unpack_dir, tree_subdir), info_dict)
+ os.path.join(unpack_dir, tree_subdir), "vendor_boot", info_dict)
+ if data:
+ return File(name, data)
+ return None
+
+
+def GetVendorKernelBootImage(name, prebuilt_name, unpack_dir, tree_subdir,
+ info_dict=None):
+ """Return a File object with the desired vendor kernel boot image.
+
+ Look for it under 'unpack_dir'/IMAGES, otherwise construct it from
+ the source files in 'unpack_dir'/'tree_subdir'."""
+
+ prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
+ if os.path.exists(prebuilt_path):
+ logger.info("using prebuilt %s from IMAGES...", prebuilt_name)
+ return File.FromLocalFile(name, prebuilt_path)
+
+ logger.info("building image from target_files %s...", tree_subdir)
+
+ if info_dict is None:
+ info_dict = OPTIONS.info_dict
+
+ data = _BuildVendorBootImage(
+ os.path.join(unpack_dir, tree_subdir), "vendor_kernel_boot", info_dict)
if data:
return File(name, data)
return None
@@ -2339,6 +2382,35 @@
"Failed to run signapk.jar: return code {}:\n{}".format(
proc.returncode, stdoutdata))
+def SignSePolicy(sepolicy, key, password):
+ """Sign the sepolicy zip, producing an fsverity .fsv_sig and
+ an RSA .sig signature files.
+ """
+
+ if OPTIONS.sign_sepolicy_path is None:
+ return False
+
+ java_library_path = os.path.join(
+ OPTIONS.search_path, OPTIONS.signapk_shared_library_path)
+
+ cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
+ ["-Djava.library.path=" + java_library_path,
+ "-jar", os.path.join(OPTIONS.search_path, OPTIONS.sign_sepolicy_path)] +
+ OPTIONS.extra_sign_sepolicy_args)
+
+ cmd.extend([key + OPTIONS.public_key_suffix,
+ key + OPTIONS.private_key_suffix,
+ sepolicy])
+
+ proc = Run(cmd, stdin=subprocess.PIPE)
+ if password is not None:
+ password += "\n"
+ stdoutdata, _ = proc.communicate(password)
+ if proc.returncode != 0:
+ raise ExternalError(
+ "Failed to run sign sepolicy: return code {}:\n{}".format(
+ proc.returncode, stdoutdata))
+ return True
def CheckSize(data, target, info_dict):
"""Checks the data string passed against the max size limit.
@@ -2515,7 +2587,8 @@
opts, args = getopt.getopt(
argv, "hvp:s:x:" + extra_opts,
["help", "verbose", "path=", "signapk_path=",
- "signapk_shared_library_path=", "extra_signapk_args=", "aapt2_path=",
+ "signapk_shared_library_path=", "extra_signapk_args=",
+ "sign_sepolicy_path=", "extra_sign_sepolicy_args=", "aapt2_path=",
"java_path=", "java_args=", "android_jar_path=", "public_key_suffix=",
"private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
"verity_signer_path=", "verity_signer_args=", "device_specific=",
@@ -2539,6 +2612,10 @@
OPTIONS.signapk_shared_library_path = a
elif o in ("--extra_signapk_args",):
OPTIONS.extra_signapk_args = shlex.split(a)
+ elif o in ("--sign_sepolicy_path",):
+ OPTIONS.sign_sepolicy_path = a
+ elif o in ("--extra_sign_sepolicy_args",):
+ OPTIONS.extra_sign_sepolicy_args = shlex.split(a)
elif o in ("--aapt2_path",):
OPTIONS.aapt2_path = a
elif o in ("--java_path",):
diff --git a/tools/releasetools/fsverity_manifest_generator.py b/tools/releasetools/fsverity_manifest_generator.py
deleted file mode 100644
index b8184bc..0000000
--- a/tools/releasetools/fsverity_manifest_generator.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-`fsverity_manifest_generator` generates build manifest APK file containing
-digests of target files. The APK file is signed so the manifest inside the APK
-can be trusted.
-"""
-
-import argparse
-import common
-import os
-import subprocess
-import sys
-from fsverity_digests_pb2 import FSVerityDigests
-
-HASH_ALGORITHM = 'sha256'
-
-def _digest(fsverity_path, input_file):
- cmd = [fsverity_path, 'digest', input_file]
- cmd.extend(['--compact'])
- cmd.extend(['--hash-alg', HASH_ALGORITHM])
- out = subprocess.check_output(cmd, universal_newlines=True).strip()
- return bytes(bytearray.fromhex(out))
-
-if __name__ == '__main__':
- p = argparse.ArgumentParser()
- p.add_argument(
- '--output',
- help='Path to the output manifest APK',
- required=True)
- p.add_argument(
- '--fsverity-path',
- help='path to the fsverity program',
- required=True)
- p.add_argument(
- '--aapt2-path',
- help='path to the aapt2 program',
- required=True)
- p.add_argument(
- '--min-sdk-version',
- help='minimum supported sdk version of the generated manifest apk',
- required=True)
- p.add_argument(
- '--version-code',
- help='version code for the generated manifest apk',
- required=True)
- p.add_argument(
- '--version-name',
- help='version name for the generated manifest apk',
- required=True)
- p.add_argument(
- '--framework-res',
- help='path to framework-res.apk',
- required=True)
- p.add_argument(
- '--apksigner-path',
- help='path to the apksigner program',
- required=True)
- p.add_argument(
- '--apk-key-path',
- help='path to the apk key',
- required=True)
- p.add_argument(
- '--apk-manifest-path',
- help='path to AndroidManifest.xml',
- required=True)
- p.add_argument(
- '--base-dir',
- help='directory to use as a relative root for the inputs',
- required=True)
- p.add_argument(
- 'inputs',
- nargs='+',
- help='input file for the build manifest')
- args = p.parse_args(sys.argv[1:])
-
- digests = FSVerityDigests()
- for f in sorted(args.inputs):
- # f is a full path for now; make it relative so it starts with {mount_point}/
- digest = digests.digests[os.path.relpath(f, args.base_dir)]
- digest.digest = _digest(args.fsverity_path, f)
- digest.hash_alg = HASH_ALGORITHM
-
- temp_dir = common.MakeTempDir()
-
- os.mkdir(os.path.join(temp_dir, "assets"))
- metadata_path = os.path.join(temp_dir, "assets", "build_manifest.pb")
- with open(metadata_path, "wb") as f:
- f.write(digests.SerializeToString())
-
- common.RunAndCheckOutput([args.aapt2_path, "link",
- "-A", os.path.join(temp_dir, "assets"),
- "-o", args.output,
- "--min-sdk-version", args.min_sdk_version,
- "--version-code", args.version_code,
- "--version-name", args.version_name,
- "-I", args.framework_res,
- "--manifest", args.apk_manifest_path])
- common.RunAndCheckOutput([args.apksigner_path, "sign", "--in", args.output,
- "--cert", args.apk_key_path + ".x509.pem",
- "--key", args.apk_key_path + ".pk8"])
diff --git a/tools/releasetools/merge/OWNERS b/tools/releasetools/merge/OWNERS
index 9012e3a..0eddee2 100644
--- a/tools/releasetools/merge/OWNERS
+++ b/tools/releasetools/merge/OWNERS
@@ -1,3 +1,4 @@
-danielnorman@google.com
+deyaoren@google.com
+haamed@google.com
jgalmes@google.com
rseymour@google.com
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index c06fd4c..c95cead 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -149,6 +149,13 @@
OPTIONS.vendor_dexpreopt_config = None
+def move_only_exists(source, destination):
+ """Judge whether the file exists and then move the file."""
+
+ if os.path.exists(source):
+ shutil.move(source, destination)
+
+
def create_merged_package(temp_dir):
"""Merges two target files packages into one target files structure.
@@ -286,9 +293,8 @@
shutil.move(
os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
os.path.join(target_files_dir, 'IMAGES', partition_img))
- shutil.move(
- os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
- os.path.join(target_files_dir, 'IMAGES', partition_map))
+ move_only_exists(os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
+ os.path.join(target_files_dir, 'IMAGES', partition_map))
def copy_recovery_file(filename):
for subdir in ('VENDOR', 'SYSTEM/vendor'):
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index f623ad2..e253b02 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -100,20 +100,16 @@
has_error = False
# Check that partitions only come from one input.
- for partition in _FRAMEWORK_PARTITIONS.union(_VENDOR_PARTITIONS):
- image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
- in_framework = (
- any(item.startswith(partition) for item in OPTIONS.framework_item_list)
- or image_path in OPTIONS.framework_item_list)
- in_vendor = (
- any(item.startswith(partition) for item in OPTIONS.vendor_item_list) or
- image_path in OPTIONS.vendor_item_list)
- if in_framework and in_vendor:
- logger.error(
- 'Cannot extract items from %s for both the framework and vendor'
- ' builds. Please ensure only one merge config item list'
- ' includes %s.', partition, partition)
- has_error = True
+ framework_partitions = ItemListToPartitionSet(OPTIONS.framework_item_list)
+ vendor_partitions = ItemListToPartitionSet(OPTIONS.vendor_item_list)
+ from_both = framework_partitions.intersection(vendor_partitions)
+ if from_both:
+ logger.error(
+ 'Cannot extract items from the same partition in both the '
+ 'framework and vendor builds. Please ensure only one merge config '
+ 'item list (or inferred list) includes each partition: %s' %
+ ','.join(from_both))
+ has_error = True
if any([
key in OPTIONS.framework_misc_info_keys
@@ -131,7 +127,8 @@
# system partition). The following regex matches this and extracts the
# partition name.
-_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
+_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/.*$')
+_IMAGE_PARTITION_PATTERN = re.compile(r'^IMAGES/(.*)\.img$')
def ItemListToPartitionSet(item_list):
@@ -154,62 +151,89 @@
partition_set = set()
for item in item_list:
- partition_match = _PARTITION_ITEM_PATTERN.search(item.strip())
- partition_tag = partition_match.group(
- 1).lower() if partition_match else None
-
- if partition_tag:
- partition_set.add(partition_tag)
+ for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN):
+ partition_match = pattern.search(item.strip())
+ if partition_match:
+ partition = partition_match.group(1).lower()
+ # These directories in target-files are not actual partitions.
+ if partition not in ('meta', 'images'):
+ partition_set.add(partition)
return partition_set
# Partitions that are grabbed from the framework partial build by default.
_FRAMEWORK_PARTITIONS = {
- 'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm'
-}
-# Partitions that are grabbed from the vendor partial build by default.
-_VENDOR_PARTITIONS = {
- 'vendor', 'odm', 'oem', 'boot', 'vendor_boot', 'recovery',
- 'prebuilt_images', 'radio', 'data', 'vendor_dlkm', 'odm_dlkm'
+ 'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
+ 'vbmeta_system'
}
def InferItemList(input_namelist, framework):
- item_list = []
+ item_set = set()
- # Some META items are grabbed from partial builds directly.
+ # Some META items are always grabbed from partial builds directly.
# Others are combined in merge_meta.py.
if framework:
- item_list.extend([
+ item_set.update([
'META/liblz4.so',
'META/postinstall_config.txt',
'META/update_engine_config.txt',
'META/zucchini_config.txt',
])
else: # vendor
- item_list.extend([
+ item_set.update([
'META/kernel_configs.txt',
'META/kernel_version.txt',
'META/otakeys.txt',
+ 'META/pack_radioimages.txt',
'META/releasetools.py',
- 'OTA/android-info.txt',
])
# Grab a set of items for the expected partitions in the partial build.
- for partition in (_FRAMEWORK_PARTITIONS if framework else _VENDOR_PARTITIONS):
- for namelist in input_namelist:
- if namelist.startswith('%s/' % partition.upper()):
- fs_config_prefix = '' if partition == 'system' else '%s_' % partition
- item_list.extend([
- '%s/*' % partition.upper(),
- 'IMAGES/%s.img' % partition,
- 'IMAGES/%s.map' % partition,
- 'META/%sfilesystem_config.txt' % fs_config_prefix,
- ])
- break
+ seen_partitions = []
+ for namelist in input_namelist:
+ if namelist.endswith('/'):
+ continue
- return sorted(item_list)
+ partition = namelist.split('/')[0].lower()
+
+ # META items are grabbed above, or merged later.
+ if partition == 'meta':
+ continue
+
+ if partition == 'images':
+ image_partition, extension = os.path.splitext(os.path.basename(namelist))
+ if image_partition == 'vbmeta':
+ # Always regenerate vbmeta.img since it depends on hash information
+ # from both builds.
+ continue
+ if extension in ('.img', '.map'):
+ # Include image files in IMAGES/* if the partition comes from
+ # the expected set.
+ if (framework and image_partition in _FRAMEWORK_PARTITIONS) or (
+ not framework and image_partition not in _FRAMEWORK_PARTITIONS):
+ item_set.add(namelist)
+ elif not framework:
+ # Include all miscellaneous non-image files in IMAGES/* from
+ # the vendor build.
+ item_set.add(namelist)
+ continue
+
+ # Skip already-visited partitions.
+ if partition in seen_partitions:
+ continue
+ seen_partitions.append(partition)
+
+ if (framework and partition in _FRAMEWORK_PARTITIONS) or (
+ not framework and partition not in _FRAMEWORK_PARTITIONS):
+ fs_config_prefix = '' if partition == 'system' else '%s_' % partition
+ item_set.update([
+ '%s/*' % partition.upper(),
+ 'META/%sfilesystem_config.txt' % fs_config_prefix,
+ ])
+
+ return sorted(item_set)
def InferFrameworkMiscInfoKeys(input_namelist):
@@ -223,8 +247,8 @@
]
for partition in _FRAMEWORK_PARTITIONS:
- for namelist in input_namelist:
- if namelist.startswith('%s/' % partition.upper()):
+ for partition_dir in ('%s/' % partition.upper(), 'SYSTEM/%s/' % partition):
+ if partition_dir in input_namelist:
fs_type_prefix = '' if partition == 'system' else '%s_' % partition
keys.extend([
'avb_%s_hashtree_enable' % partition,
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index 1949050..eceb734 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -108,20 +108,27 @@
def test_ItemListToPartitionSet(self):
item_list = [
+ 'IMAGES/system_ext.img',
'META/apexkeys.txt',
'META/apkcerts.txt',
'META/filesystem_config.txt',
'PRODUCT/*',
'SYSTEM/*',
- 'SYSTEM_EXT/*',
+ 'SYSTEM/system_ext/*',
]
partition_set = merge_utils.ItemListToPartitionSet(item_list)
self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
def test_InferItemList_Framework(self):
zip_namelist = [
+ 'IMAGES/product.img',
+ 'IMAGES/product.map',
+ 'IMAGES/system.img',
+ 'IMAGES/system.map',
'SYSTEM/my_system_file',
'PRODUCT/my_product_file',
+ # Device does not use a separate system_ext partition.
+ 'SYSTEM/system_ext/system_ext_file',
]
item_list = merge_utils.InferItemList(zip_namelist, framework=True)
@@ -147,37 +154,55 @@
zip_namelist = [
'VENDOR/my_vendor_file',
'ODM/my_odm_file',
+ 'IMAGES/odm.img',
+ 'IMAGES/odm.map',
+ 'IMAGES/vendor.img',
+ 'IMAGES/vendor.map',
+ 'IMAGES/my_custom_image.img',
+ 'IMAGES/my_custom_file.txt',
+ 'IMAGES/vbmeta.img',
+ 'CUSTOM_PARTITION/my_custom_file',
+ # Leftover framework pieces that shouldn't be grabbed.
+ 'IMAGES/system.img',
+ 'SYSTEM/system_file',
]
item_list = merge_utils.InferItemList(zip_namelist, framework=False)
expected_vendor_item_list = [
+ 'CUSTOM_PARTITION/*',
+ 'IMAGES/my_custom_file.txt',
+ 'IMAGES/my_custom_image.img',
'IMAGES/odm.img',
'IMAGES/odm.map',
'IMAGES/vendor.img',
'IMAGES/vendor.map',
+ 'META/custom_partition_filesystem_config.txt',
'META/kernel_configs.txt',
'META/kernel_version.txt',
'META/odm_filesystem_config.txt',
'META/otakeys.txt',
+ 'META/pack_radioimages.txt',
'META/releasetools.py',
'META/vendor_filesystem_config.txt',
'ODM/*',
- 'OTA/android-info.txt',
'VENDOR/*',
]
self.assertEqual(item_list, expected_vendor_item_list)
def test_InferFrameworkMiscInfoKeys(self):
zip_namelist = [
- 'SYSTEM/my_system_file',
- 'SYSTEM_EXT/my_system_ext_file',
+ 'PRODUCT/',
+ 'SYSTEM/',
+ 'SYSTEM/system_ext/',
]
keys = merge_utils.InferFrameworkMiscInfoKeys(zip_namelist)
expected_keys = [
'ab_update',
+ 'avb_product_add_hashtree_footer_args',
+ 'avb_product_hashtree_enable',
'avb_system_add_hashtree_footer_args',
'avb_system_ext_add_hashtree_footer_args',
'avb_system_ext_hashtree_enable',
@@ -186,10 +211,13 @@
'avb_vbmeta_system_algorithm',
'avb_vbmeta_system_key_path',
'avb_vbmeta_system_rollback_index_location',
+ 'building_product_image',
'building_system_ext_image',
'building_system_image',
'default_system_dev_certificate',
'fs_type',
+ 'product_disable_sparse',
+ 'product_fs_type',
'system_disable_sparse',
'system_ext_disable_sparse',
'system_ext_fs_type',
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 522d489..5384699 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -1068,10 +1068,11 @@
pre_partition_state, post_partition_state):
assert pre_partition_state is not None
partition_timestamps = {}
- for part in pre_partition_state:
- partition_timestamps[part.partition_name] = part.version
for part in post_partition_state:
- partition_timestamps[part.partition_name] = \
+ partition_timestamps[part.partition_name] = part.version
+ for part in pre_partition_state:
+ if part.partition_name in partition_timestamps:
+ partition_timestamps[part.partition_name] = \
max(part.version, partition_timestamps[part.partition_name])
return [
"--partition_timestamps",
@@ -1145,6 +1146,14 @@
logger.info("Either source or target does not support VABC, disabling.")
OPTIONS.disable_vabc = True
+ # Virtual AB Compression was introduced in Androd S.
+ # Later, we backported VABC to Android R. But verity support was not
+ # backported, so if VABC is used and we are on Android R, disable
+ # verity computation.
+ if not OPTIONS.disable_vabc and source_info.is_android_r:
+ OPTIONS.disable_verity_computation = True
+ OPTIONS.disable_fec_computation = True
+
else:
assert "ab_partitions" in OPTIONS.info_dict, \
"META/ab_partitions.txt is required for ab_update."
@@ -1208,6 +1217,8 @@
metadata.postcondition.partition_state)
if not ota_utils.IsZucchiniCompatible(source_file, target_file):
+ logger.warning(
+ "Builds doesn't support zucchini, or source/target don't have compatible zucchini versions. Disabling zucchini.")
OPTIONS.enable_zucchini = False
additional_args += ["--enable_zucchini",
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 5d403dc..ef1dca2 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,7 +22,8 @@
import ota_metadata_pb2
from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
- SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps)
+ SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
+ GetRamdiskFormat)
logger = logging.getLogger(__name__)
@@ -371,15 +372,18 @@
for partition in PARTITIONS_WITH_BUILD_PROP:
partition_prop_key = "{}.build.prop".format(partition)
input_file = info_dict[partition_prop_key].input_file
+ ramdisk = GetRamdiskFormat(info_dict)
if isinstance(input_file, zipfile.ZipFile):
with zipfile.ZipFile(input_file.filename, allowZip64=True) as input_zip:
info_dict[partition_prop_key] = \
PartitionBuildProps.FromInputFile(input_zip, partition,
- placeholder_values)
+ placeholder_values,
+ ramdisk)
else:
info_dict[partition_prop_key] = \
PartitionBuildProps.FromInputFile(input_file, partition,
- placeholder_values)
+ placeholder_values,
+ ramdisk)
info_dict["build.prop"] = info_dict["system.build.prop"]
build_info_set.add(BuildInfo(info_dict, default_build_info.oem_dicts))
@@ -693,6 +697,7 @@
if os.path.exists(entry_path):
with open(entry_path, "r") as fp:
return fp.read()
- else:
- return ""
- return ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME) == ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+ return False
+ sourceEntry = ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+ targetEntry = ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+ return sourceEntry and targetEntry and sourceEntry == targetEntry
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index a68f1ec..d3e242b 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -42,15 +42,6 @@
--sign_tool <sign_tool>
Optional flag that specifies a custom signing tool for the contents of the apex.
-
- --sepolicy_key <key>
- Optional flag that specifies the sepolicy signing key, defaults to payload_key.
-
- --sepolicy_cert <cert>
- Optional flag that specifies the sepolicy signing cert.
-
- --fsverity_tool <path>
- Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
"""
import logging
@@ -65,8 +56,7 @@
def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
- apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None,
- sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+ apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None):
"""Signs the given apex file."""
with open(apex_file, 'rb') as input_fp:
apex_data = input_fp.read()
@@ -82,10 +72,7 @@
apk_keys=apk_keys,
signing_args=signing_args,
sign_tool=sign_tool,
- is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name),
- sepolicy_key=sepolicy_key,
- sepolicy_cert=sepolicy_cert,
- fsverity_tool=fsverity_tool)
+ is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name))
def main(argv):
@@ -121,12 +108,6 @@
options['extra_apks'].update({n: key})
elif o == '--sign_tool':
options['sign_tool'] = a
- elif o == '--sepolicy_key':
- options['sepolicy_key'] = a
- elif o == '--sepolicy_cert':
- options['sepolicy_cert'] = a
- elif o == '--fsverity_tool':
- options['fsverity_tool'] = a
else:
return False
return True
@@ -142,9 +123,6 @@
'payload_key=',
'extra_apks=',
'sign_tool=',
- 'sepolicy_key=',
- 'sepolicy_cert=',
- 'fsverity_tool='
],
extra_option_handler=option_handler)
@@ -165,10 +143,7 @@
signing_args=options.get('payload_extra_args'),
codename_to_api_level_map=options.get(
'codename_to_api_level_map', {}),
- sign_tool=options.get('sign_tool', None),
- sepolicy_key=options.get('sepolicy_key', None),
- sepolicy_cert=options.get('sepolicy_cert', None),
- fsverity_tool=options.get('fsverity_tool', None))
+ sign_tool=options.get('sign_tool', None))
shutil.copyfile(signed_apex, args[1])
logger.info("done.")
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index c803340..40bd6a7 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -137,15 +137,6 @@
--android_jar_path <path>
Path to the android.jar to repack the apex file.
- --sepolicy_key <key>
- Optional flag that specifies the sepolicy signing key, defaults to payload_key for the sepolicy.apex.
-
- --sepolicy_cert <cert>
- Optional flag that specifies the sepolicy signing cert.
-
- --fsverity_tool <path>
- Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
-
--allow_gsi_debug_sepolicy
Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
(/system/system_ext|/system_ext)/etc/selinux.
@@ -205,9 +196,6 @@
OPTIONS.android_jar_path = None
OPTIONS.vendor_partitions = set()
OPTIONS.vendor_otatools = None
-OPTIONS.sepolicy_key = None
-OPTIONS.sepolicy_cert = None
-OPTIONS.fsverity_tool = None
OPTIONS.allow_gsi_debug_sepolicy = False
@@ -226,6 +214,7 @@
'pvmfw': 'avb_pvmfw_add_hash_footer_args',
'vendor': 'avb_vendor_add_hashtree_footer_args',
'vendor_boot': 'avb_vendor_boot_add_hash_footer_args',
+ 'vendor_kernel_boot': 'avb_vendor_kernel_boot_add_hash_footer_args',
'vendor_dlkm': "avb_vendor_dlkm_add_hashtree_footer_args",
'vbmeta': 'avb_vbmeta_args',
'vbmeta_system': 'avb_vbmeta_system_args',
@@ -246,8 +235,6 @@
def IsApexFile(filename):
return filename.endswith(".apex") or filename.endswith(".capex")
-def IsSepolicyApex(filename):
- return filename.endswith(OPTIONS.sepolicy_name)
def GetApexFilename(filename):
name = os.path.basename(filename)
@@ -270,24 +257,6 @@
return certmap
-def GetSepolicyKeys(keys_info):
- """Gets SEPolicy signing keys applying overrides from command line options.
-
- Args:
- keys_info: A dict that maps from the SEPolicy APEX filename to a tuple of
- (sepolicy_key, sepolicy_cert, fsverity_tool).
-
- Returns:
- A dict that contains the updated APEX key mapping, which should be used for
- the current signing.
- """
- for name in keys_info:
- (sepolicy_key, sepolicy_cert, fsverity_tool) = keys_info[name]
- sepolicy_key = OPTIONS.sepolicy_key if OPTIONS.sepolicy_key else sepolicy_key
- sepolicy_cert = OPTIONS.sepolicy_cert if OPTIONS.sepolicy_cert else sepolicy_cert
- fsverity_tool = OPTIONS.fsverity_tool if OPTIONS.fsverity_tool else fsverity_tool
- keys_info[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
- return keys_info
def GetApexKeys(keys_info, key_map):
"""Gets APEX payload and container signing keys by applying the mapping rules.
@@ -550,7 +519,7 @@
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
apk_keys, apex_keys, key_passwords,
platform_api_level, codename_to_api_level_map,
- compressed_extension, sepolicy_keys):
+ compressed_extension):
# maxsize measures the maximum filename length, including the ones to be
# skipped.
try:
@@ -618,17 +587,6 @@
print(" : %-*s payload (%s)" % (
maxsize, name, payload_key))
- sepolicy_key = None
- sepolicy_cert = None
- fsverity_tool = None
-
- if IsSepolicyApex(name):
- (sepolicy_key, sepolicy_cert, fsverity_tool) = sepolicy_keys[name]
- print(" : %-*s sepolicy key (%s)" % (
- maxsize, name, sepolicy_key))
- print(" : %-*s sepolicy cert (%s)" % (
- maxsize, name, sepolicy_cert))
-
signed_apex = apex_utils.SignApex(
misc_info['avb_avbtool'],
data,
@@ -639,11 +597,7 @@
codename_to_api_level_map,
no_hashtree=None, # Let apex_util determine if hash tree is needed
signing_args=OPTIONS.avb_extra_args.get('apex'),
- sign_tool=sign_tool,
- is_sepolicy=IsSepolicyApex(name),
- sepolicy_key=sepolicy_key,
- sepolicy_cert=sepolicy_cert,
- fsverity_tool=fsverity_tool)
+ sign_tool=sign_tool)
common.ZipWrite(output_tf_zip, signed_apex, filename)
else:
@@ -927,7 +881,7 @@
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif key == "ro.build.description":
- pieces = value.split(" ")
+ pieces = value.split()
assert pieces[-1].endswith("-keys")
pieces[-1] = EditTags(pieces[-1])
value = " ".join(pieces)
@@ -1144,7 +1098,7 @@
tokens = []
changed = False
- for token in args.split(' '):
+ for token in args.split():
fingerprint_key = 'com.android.build.{}.fingerprint'.format(partition)
if not token.startswith(fingerprint_key):
tokens.append(token)
@@ -1253,24 +1207,20 @@
def ReadApexKeysInfo(tf_zip):
"""Parses the APEX keys info from a given target-files zip.
- Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns
- two dicts, the first one contains the mapping from APEX names
- (e.g. com.android.tzdata) to a tuple of (payload_key, container_key,
- sign_tool). The second one maps the sepolicy APEX name to a tuple containing
- (sepolicy_key, sepolicy_cert, fsverity_tool).
+ Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns a
+ dict that contains the mapping from APEX names (e.g. com.android.tzdata) to a
+ tuple of (payload_key, container_key, sign_tool).
Args:
tf_zip: The input target_files ZipFile (already open).
Returns:
- name : (payload_key, container_key, sign_tool)
+ (payload_key, container_key, sign_tool):
- payload_key contains the path to the payload signing key
- container_key contains the path to the container signing key
- sign_tool is an apex-specific signing tool for its payload contents
- name : (sepolicy_key, sepolicy_cert, fsverity_tool)
"""
keys = {}
- sepolicy_keys = {}
for line in tf_zip.read('META/apexkeys.txt').decode().split('\n'):
line = line.strip()
if not line:
@@ -1281,9 +1231,6 @@
r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*?)"'
- r'(\s+sepolicy_key="(?P<SEPOLICY_KEY>.*?)")?'
- r'(\s+sepolicy_certificate="(?P<SEPOLICY_CERT>.*?)")?'
- r'(\s+fsverity_tool="(?P<FSVERITY_TOOL>.*?)")?'
r'(\s+partition="(?P<PARTITION>.*?)")?'
r'(\s+sign_tool="(?P<SIGN_TOOL>.*?)")?$',
line)
@@ -1312,18 +1259,12 @@
container_private_key, OPTIONS.private_key_suffix):
container_key = container_cert[:-len(OPTIONS.public_key_suffix)]
else:
- raise ValueError("Failed to parse container keys: \n{} **** {}".format(container_cert, container_private_key))
+ raise ValueError("Failed to parse container keys: \n{}".format(line))
sign_tool = matches.group("SIGN_TOOL")
keys[name] = (payload_private_key, container_key, sign_tool)
- if IsSepolicyApex(name):
- sepolicy_key = matches.group('SEPOLICY_KEY')
- sepolicy_cert = matches.group('SEPOLICY_CERT')
- fsverity_tool = matches.group('FSVERITY_TOOL')
- sepolicy_keys[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-
- return keys, sepolicy_keys
+ return keys
def BuildVendorPartitions(output_zip_path):
@@ -1354,12 +1295,13 @@
vendor_misc_info["no_boot"] = "true" # boot
vendor_misc_info["vendor_boot"] = "false" # vendor_boot
vendor_misc_info["no_recovery"] = "true" # recovery
+ vendor_misc_info["avb_enable"] = "false" # vbmeta
vendor_misc_info["board_bpt_enable"] = "false" # partition-table
vendor_misc_info["has_dtbo"] = "false" # dtbo
vendor_misc_info["has_pvmfw"] = "false" # pvmfw
vendor_misc_info["avb_custom_images_partition_list"] = "" # custom images
- vendor_misc_info["avb_enable"] = "false" # vbmeta
+ vendor_misc_info["avb_building_vbmeta_image"] = "false" # skip building vbmeta
vendor_misc_info["use_dynamic_partitions"] = "false" # super_empty
vendor_misc_info["build_super_partition"] = "false" # super split
with open(vendor_misc_info_path, "w") as output:
@@ -1413,9 +1355,14 @@
img_file_path = "IMAGES/{}.img".format(p)
map_file_path = "IMAGES/{}.map".format(p)
common.ZipWrite(output_zip, os.path.join(vendor_tempdir, img_file_path), img_file_path)
- common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
- # copy recovery patch & install.sh
+ if os.path.exists(os.path.join(vendor_tempdir, map_file_path)):
+ common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
+ # copy recovery.img, boot.img, recovery patch & install.sh
if OPTIONS.rebuild_recovery:
+ recovery_img = "IMAGES/recovery.img"
+ boot_img = "IMAGES/boot.img"
+ common.ZipWrite(output_zip, os.path.join(vendor_tempdir, recovery_img), recovery_img)
+ common.ZipWrite(output_zip, os.path.join(vendor_tempdir, boot_img), boot_img)
recovery_patch_path = "VENDOR/recovery-from-boot.p"
recovery_sh_path = "VENDOR/bin/install-recovery.sh"
common.ZipWrite(output_zip, os.path.join(vendor_tempdir, recovery_patch_path), recovery_patch_path)
@@ -1535,12 +1482,6 @@
OPTIONS.vendor_otatools = a
elif o == "--vendor_partitions":
OPTIONS.vendor_partitions = set(a.split(","))
- elif o == '--sepolicy_key':
- OPTIONS.sepolicy_key = a
- elif o == '--sepolicy_cert':
- OPTIONS.sepolicy_cert = a
- elif o == '--fsverity_tool':
- OPTIONS.fsverity_tool = a
elif o == "--allow_gsi_debug_sepolicy":
OPTIONS.allow_gsi_debug_sepolicy = True
else:
@@ -1595,9 +1536,6 @@
"gki_signing_extra_args=",
"vendor_partitions=",
"vendor_otatools=",
- "sepolicy_key=",
- "sepolicy_cert=",
- "fsverity_tool=",
"allow_gsi_debug_sepolicy",
],
extra_option_handler=option_handler)
@@ -1620,9 +1558,8 @@
apk_keys_info, compressed_extension = common.ReadApkCerts(input_zip)
apk_keys = GetApkCerts(apk_keys_info)
- apex_keys_info, sepolicy_keys_info = ReadApexKeysInfo(input_zip)
+ apex_keys_info = ReadApexKeysInfo(input_zip)
apex_keys = GetApexKeys(apex_keys_info, apk_keys)
- sepolicy_keys = GetSepolicyKeys(sepolicy_keys_info)
# TODO(xunchang) check for the apks inside the apex files, and abort early if
# the keys are not available.
@@ -1640,7 +1577,7 @@
ProcessTargetFiles(input_zip, output_zip, misc_info,
apk_keys, apex_keys, key_passwords,
platform_api_level, codename_to_api_level_map,
- compressed_extension, sepolicy_keys)
+ compressed_extension)
common.ZipClose(input_zip)
common.ZipClose(output_zip)
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index c344e22..7723de7 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -59,6 +59,21 @@
self.assertTrue(os.path.exists(signed_test_apex))
@test_utils.SkipIfExternalToolsUnavailable()
+ def test_SignSepolicyApex(self):
+ test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
+ payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+ container_key = os.path.join(self.testdata_dir, 'testkey')
+ apk_keys = {'SEPolicy-33.zip': os.path.join(self.testdata_dir, 'testkey')}
+ signed_test_apex = sign_apex.SignApexFile(
+ 'avbtool',
+ test_apex,
+ payload_key,
+ container_key,
+ False,
+ None)
+ self.assertTrue(os.path.exists(signed_test_apex))
+
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_SignCompressedApexFile(self):
apex = os.path.join(test_utils.get_current_dir(), 'com.android.apex.compressed.v1.capex')
payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
@@ -71,21 +86,3 @@
False,
codename_to_api_level_map={'S': 31, 'Tiramisu' : 32})
self.assertTrue(os.path.exists(signed_apex))
-
- @test_utils.SkipIfExternalToolsUnavailable()
- def test_SignApexWithSepolicy(self):
- test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
- payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
- container_key = os.path.join(self.testdata_dir, 'testkey')
- sepolicy_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
- sepolicy_cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
- signed_test_apex = sign_apex.SignApexFile(
- 'avbtool',
- test_apex,
- payload_key,
- container_key,
- False,
- None,
- sepolicy_key=sepolicy_key,
- sepolicy_cert=sepolicy_cert)
- self.assertTrue(os.path.exists(signed_test_apex))
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 144a3cd..0f13add 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -476,7 +476,7 @@
target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+ keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
'apex.apexd_test.apex': (
@@ -486,7 +486,6 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey', None),
}, keys_info)
- self.assertEqual({}, sepolicy_keys_info)
def test_ReadApexKeysInfo_mismatchingContainerKeys(self):
# Mismatching payload public / private keys.
@@ -516,7 +515,7 @@
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+ keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
'apex.apexd_test.apex': (
@@ -526,7 +525,6 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey', None),
}, keys_info)
- self.assertEqual({}, sepolicy_keys_info)
def test_ReadApexKeysInfo_missingPayloadPublicKey(self):
# Invalid lines will be skipped.
@@ -540,7 +538,7 @@
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+ keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
'apex.apexd_test.apex': (
@@ -550,7 +548,6 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey', None),
}, keys_info)
- self.assertEqual({}, sepolicy_keys_info)
def test_ReadApexKeysInfo_presignedKeys(self):
apex_keys = self.APEX_KEYS_TXT + (
@@ -564,7 +561,7 @@
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+ keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
'apex.apexd_test.apex': (
@@ -574,7 +571,6 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey', None),
}, keys_info)
- self.assertEqual({}, sepolicy_keys_info)
def test_ReadApexKeysInfo_presignedKeys(self):
apex_keys = self.APEX_KEYS_TXT + (
@@ -588,7 +584,7 @@
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+ keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
'apex.apexd_test.apex': (
@@ -598,72 +594,6 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey', None),
}, keys_info)
- self.assertEqual({}, sepolicy_keys_info)
-
- def test_ReadApexKeysInfo_withSepolicyKeys(self):
- apex_keys = self.APEX_KEYS_TXT + (
- 'name="sepolicy.apex" '
- 'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
- 'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
- 'container_certificate="build/make/target/product/security/testkey.x509.pem" '
- 'container_private_key="build/make/target/product/security/testkey.pk8" '
- 'sepolicy_key="build/make/target/product/security/testkey.key" '
- 'sepolicy_certificate="build/make/target/product/security/testkey.x509.pem" '
- 'fsverity_tool="fsverity"')
- target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
- target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
- with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
- self.assertEqual({
- 'apex.apexd_test.apex': (
- 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
- 'build/make/target/product/security/testkey', None),
- 'apex.apexd_test_different_app.apex': (
- 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
- 'build/make/target/product/security/testkey', None),
- 'sepolicy.apex': (
- 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
- 'build/make/target/product/security/testkey', None),
- }, keys_info)
- self.assertEqual({'sepolicy.apex': (
- 'build/make/target/product/security/testkey.key',
- 'build/make/target/product/security/testkey.x509.pem',
- 'fsverity'),
- }, sepolicy_keys_info)
-
- def test_ReadApexKeysInfo_withSepolicyApex(self):
- apex_keys = self.APEX_KEYS_TXT + (
- 'name="sepolicy.apex" '
- 'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
- 'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
- 'container_certificate="build/make/target/product/security/testkey.x509.pem" '
- 'container_private_key="build/make/target/product/security/testkey.pk8" ')
- target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
- target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
- with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
- keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
- self.assertEqual({
- 'apex.apexd_test.apex': (
- 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
- 'build/make/target/product/security/testkey', None),
- 'apex.apexd_test_different_app.apex': (
- 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
- 'build/make/target/product/security/testkey', None),
- 'sepolicy.apex': (
- 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
- 'build/make/target/product/security/testkey', None),
- }, keys_info)
- self.assertEqual({'sepolicy.apex': (
- None,
- None,
- None),
- }, sepolicy_keys_info)
def test_ReplaceGkiSigningKey(self):
common.OPTIONS.gki_signing_key = 'release_gki_key'
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
index f7d267d..2c646cd 100644
--- a/tools/releasetools/testdata/sepolicy.apex
+++ b/tools/releasetools/testdata/sepolicy.apex
Binary files differ
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index c127dbe..36a220c 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -901,7 +901,7 @@
* Tries to load a JSE Provider by class name. This is for custom PrivateKey
* types that might be stored in PKCS#11-like storage.
*/
- private static void loadProviderIfNecessary(String providerClassName) {
+ private static void loadProviderIfNecessary(String providerClassName, String providerArg) {
if (providerClassName == null) {
return;
}
@@ -920,27 +920,41 @@
return;
}
- Constructor<?> constructor = null;
- for (Constructor<?> c : klass.getConstructors()) {
- if (c.getParameterTypes().length == 0) {
- constructor = c;
- break;
+ Constructor<?> constructor;
+ Object o = null;
+ if (providerArg == null) {
+ try {
+ constructor = klass.getConstructor();
+ o = constructor.newInstance();
+ } catch (ReflectiveOperationException e) {
+ e.printStackTrace();
+ System.err.println("Unable to instantiate " + providerClassName
+ + " with a zero-arg constructor");
+ System.exit(1);
+ }
+ } else {
+ try {
+ constructor = klass.getConstructor(String.class);
+ o = constructor.newInstance(providerArg);
+ } catch (ReflectiveOperationException e) {
+ // This is expected from JDK 9+; the single-arg constructor accepting the
+ // configuration has been replaced with a configure(String) method to be invoked
+ // after instantiating the Provider with the zero-arg constructor.
+ try {
+ constructor = klass.getConstructor();
+ o = constructor.newInstance();
+ // The configure method will return either the modified Provider or a new
+ // Provider if this one cannot be configured in-place.
+ o = klass.getMethod("configure", String.class).invoke(o, providerArg);
+ } catch (ReflectiveOperationException roe) {
+ roe.printStackTrace();
+ System.err.println("Unable to instantiate " + providerClassName
+ + " with the provided argument " + providerArg);
+ System.exit(1);
+ }
}
}
- if (constructor == null) {
- System.err.println("No zero-arg constructor found for " + providerClassName);
- System.exit(1);
- return;
- }
- final Object o;
- try {
- o = constructor.newInstance();
- } catch (Exception e) {
- e.printStackTrace();
- System.exit(1);
- return;
- }
if (!(o instanceof Provider)) {
System.err.println("Not a Provider class: " + providerClassName);
System.exit(1);
@@ -1049,6 +1063,7 @@
"[-a <alignment>] " +
"[--align-file-size] " +
"[-providerClass <className>] " +
+ "[-providerArg <configureArg>] " +
"[-loadPrivateKeysFromKeyStore <keyStoreName>]" +
"[-keyStorePin <pin>]" +
"[--min-sdk-version <n>] " +
@@ -1073,6 +1088,7 @@
boolean signWholeFile = false;
String providerClass = null;
+ String providerArg = null;
String keyStoreName = null;
String keyStorePin = null;
int alignment = 4;
@@ -1093,6 +1109,12 @@
}
providerClass = args[++argstart];
++argstart;
+ } else if("-providerArg".equals(args[argstart])) {
+ if (argstart + 1 >= args.length) {
+ usage();
+ }
+ providerArg = args[++argstart];
+ ++argstart;
} else if ("-loadPrivateKeysFromKeyStore".equals(args[argstart])) {
if (argstart + 1 >= args.length) {
usage();
@@ -1153,7 +1175,7 @@
System.exit(2);
}
- loadProviderIfNecessary(providerClass);
+ loadProviderIfNecessary(providerClass, providerArg);
String inputFilename = args[numArgsExcludeV4FilePath - 2];
String outputFilename = args[numArgsExcludeV4FilePath - 1];
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index 3fa822a..46ba253 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -56,6 +56,7 @@
from __future__ import print_function
import csv
+import datetime
import html
import sys
@@ -258,7 +259,7 @@
def dump_stats(writer, warn_patterns):
- """Dump some stats about total number of warnings and such."""
+ """Dump some stats about total number of warnings and date."""
known = 0
skipped = 0
@@ -279,6 +280,8 @@
if total < 1000:
extra_msg = ' (low count may indicate incremental build)'
writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+ date_time_str = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+ writer('<p>(generated on ' + date_time_str + ')')
# New base table of warnings, [severity, warn_id, project, warning_message]
@@ -662,15 +665,26 @@
var warningsOfFiles = {};
var warningsOfDirs = {};
var subDirs = {};
- function addOneWarning(map, key) {
- map[key] = 1 + ((key in map) ? map[key] : 0);
+ function addOneWarning(map, key, type, unique) {
+ function increaseCounter(idx) {
+ map[idx] = 1 + ((idx in map) ? map[idx] : 0);
+ }
+ increaseCounter(key)
+ if (type != "") {
+ increaseCounter(type + " " + key)
+ if (unique) {
+ increaseCounter(type + " *")
+ }
+ }
}
for (var i = 0; i < numWarnings; i++) {
- var file = WarningMessages[i].replace(/:.*/, "");
- addOneWarning(warningsOfFiles, file);
+ var message = WarningMessages[i]
+ var file = message.replace(/:.*/, "");
+ var warningType = message.endsWith("]") ? message.replace(/.*\[/, "[") : "";
+ addOneWarning(warningsOfFiles, file, warningType, true);
var dirs = file.split("/");
var dir = dirs[0];
- addOneWarning(warningsOfDirs, dir);
+ addOneWarning(warningsOfDirs, dir, warningType, true);
for (var d = 1; d < dirs.length - 1; d++) {
var subDir = dir + "/" + dirs[d];
if (!(dir in subDirs)) {
@@ -678,7 +692,7 @@
}
subDirs[dir][subDir] = 1;
dir = subDir;
- addOneWarning(warningsOfDirs, dir);
+ addOneWarning(warningsOfDirs, dir, warningType, false);
}
}
var minDirWarnings = numWarnings*(LimitPercentWarnings/100);
@@ -725,27 +739,33 @@
document.getElementById(divName));
table.draw(view, {allowHtml: true, alternatingRowStyle: true});
}
- addTable("Directory", "top_dirs_table", TopDirs, "selectDir");
- addTable("File", "top_files_table", TopFiles, "selectFile");
+ addTable("[Warning Type] Directory", "top_dirs_table", TopDirs, "selectDir");
+ addTable("[Warning Type] File", "top_files_table", TopFiles, "selectFile");
}
function selectDirFile(idx, rows, dirFile) {
if (rows.length <= idx) {
return;
}
var name = rows[idx][2];
+ var type = "";
+ if (name.startsWith("[")) {
+ type = " " + name.replace(/ .*/, "");
+ name = name.replace(/.* /, "");
+ }
var spanName = "selected_" + dirFile + "_name";
- document.getElementById(spanName).innerHTML = name;
+ document.getElementById(spanName).innerHTML = name + type;
var divName = "selected_" + dirFile + "_warnings";
var numWarnings = rows[idx][1].v;
var prefix = name.replace(/\\.\\.\\.$/, "");
var data = new google.visualization.DataTable();
- data.addColumn('string', numWarnings + ' warnings in ' + name);
+ data.addColumn('string', numWarnings + type + ' warnings in ' + name);
var getWarningMessage = (FlagPlatform == "chrome")
? ((x) => addURLToLine(WarningMessages[Warnings[x][2]],
WarningLinks[Warnings[x][3]]))
: ((x) => addURL(WarningMessages[Warnings[x][2]]));
for (var i = 0; i < Warnings.length; i++) {
- if (WarningMessages[Warnings[i][2]].startsWith(prefix)) {
+ if ((prefix.startsWith("*") || WarningMessages[Warnings[i][2]].startsWith(prefix)) &&
+ (type == "" || WarningMessages[Warnings[i][2]].endsWith(type))) {
data.addRow([getWarningMessage(i)]);
}
}
@@ -827,14 +847,14 @@
def section2():
dump_dir_file_section(
writer, 'directory', 'top_dirs_table',
- 'Directories with at least ' +
- str(LIMIT_PERCENT_WARNINGS) + '% warnings')
+ 'Directories/Warnings with at least ' +
+ str(LIMIT_PERCENT_WARNINGS) + '% of all cases')
def section3():
dump_dir_file_section(
writer, 'file', 'top_files_table',
- 'Files with at least ' +
- str(LIMIT_PERCENT_WARNINGS) + '% or ' +
- str(LIMIT_WARNINGS_PER_FILE) + ' warnings')
+ 'Files/Warnings with at least ' +
+ str(LIMIT_PERCENT_WARNINGS) + '% of all or ' +
+ str(LIMIT_WARNINGS_PER_FILE) + ' cases')
def section4():
writer('<script>')
emit_js_data(writer, flags, warning_messages, warning_links,
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 61c8676..aa68313 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -64,6 +64,10 @@
from . import tidy_warn_patterns as tidy_patterns
+# Location of this file is used to guess the root of Android source tree.
+THIS_FILE_PATH = 'build/make/tools/warn/warn_common.py'
+
+
def parse_args(use_google3):
"""Define and parse the args. Return the parse_args() result."""
parser = argparse.ArgumentParser(
@@ -217,17 +221,27 @@
return link
-def find_warn_py_and_android_root(path):
- """Return android source root path if warn.py is found."""
+def find_this_file_and_android_root(path):
+ """Return android source root path if this file is found."""
parts = path.split('/')
for idx in reversed(range(2, len(parts))):
root_path = '/'.join(parts[:idx])
# Android root directory should contain this script.
- if os.path.exists(root_path + '/build/make/tools/warn.py'):
+ if os.path.exists(root_path + '/' + THIS_FILE_PATH):
return root_path
return ''
+def find_android_root_top_dirs(root_dir):
+ """Return a list of directories under the root_dir, if it exists."""
+ # Root directory should contain at least build/make and build/soong.
+ if (not os.path.isdir(root_dir + '/build/make') or
+ not os.path.isdir(root_dir + '/build/soong')):
+ return None
+ return list(filter(lambda d: os.path.isdir(root_dir + '/' + d),
+ os.listdir(root_dir)))
+
+
def find_android_root(buildlog):
"""Guess android source root from common prefix of file paths."""
# Use the longest common prefix of the absolute file paths
@@ -239,8 +253,8 @@
# We want to find android_root of a local build machine.
# Do not use RBE warning lines, which has '/b/f/w/' path prefix.
# Do not use /tmp/ file warnings.
- if warning_pattern.match(line) and (
- '/b/f/w' not in line and not line.startswith('/tmp/')):
+ if ('/b/f/w' not in line and not line.startswith('/tmp/') and
+ warning_pattern.match(line)):
warning_lines.append(line)
count += 1
if count > 9999:
@@ -249,17 +263,26 @@
# the source tree root.
if count < 100:
path = os.path.normpath(re.sub(':.*$', '', line))
- android_root = find_warn_py_and_android_root(path)
+ android_root = find_this_file_and_android_root(path)
if android_root:
- return android_root
+ return android_root, find_android_root_top_dirs(android_root)
# Do not use common prefix of a small number of paths.
+ android_root = ''
if count > 10:
# pytype: disable=wrong-arg-types
root_path = os.path.commonprefix(warning_lines)
# pytype: enable=wrong-arg-types
if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
- return root_path[:-1]
- return ''
+ android_root = root_path[:-1]
+ if android_root and os.path.isdir(android_root):
+ return android_root, find_android_root_top_dirs(android_root)
+ # When the build.log file is moved to a different machine where
+ # android_root is not found, use the location of this script
+ # to find the android source tree sub directories.
+ if __file__.endswith('/' + THIS_FILE_PATH):
+ script_root = __file__.replace('/' + THIS_FILE_PATH, '')
+ return android_root, find_android_root_top_dirs(script_root)
+ return android_root, None
def remove_android_root_prefix(path, android_root):
@@ -310,8 +333,6 @@
warning_pattern = re.compile(chrome_warning_pattern)
# Collect all unique warning lines
- # Remove the duplicated warnings save ~8% of time when parsing
- # one typical build log than before
unique_warnings = dict()
for line in infile:
if warning_pattern.match(line):
@@ -353,8 +374,7 @@
target_product = 'unknown'
target_variant = 'unknown'
build_id = 'unknown'
- use_rbe = False
- android_root = find_android_root(infile)
+ android_root, root_top_dirs = find_android_root(infile)
infile.seek(0)
# rustc warning messages have two lines that should be combined:
@@ -367,24 +387,39 @@
# C/C++ compiler warning messages have line and column numbers:
# some/path/file.c:line_number:column_number: warning: description
warning_pattern = re.compile('(^[^ ]*/[^ ]*: warning: .*)|(^warning: .*)')
- warning_without_file = re.compile('^warning: .*')
rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
- # If RBE was used, try to reclaim some warning lines mixed with some
- # leading chars from other concurrent job's stderr output .
+ # If RBE was used, try to reclaim some warning lines (from stdout)
+ # that contain leading characters from stderr.
# The leading characters can be any character, including digits and spaces.
- # It's impossible to correctly identify the starting point of the source
- # file path without the file directory name knowledge.
- # Here we can only be sure to recover lines containing "/b/f/w/".
- rbe_warning_pattern = re.compile('.*/b/f/w/[^ ]*: warning: .*')
- # Collect all unique warning lines
- # Remove the duplicated warnings save ~8% of time when parsing
- # one typical build log than before
+ # If a warning line's source file path contains the special RBE prefix
+ # /b/f/w/, we can remove all leading chars up to and including the "/b/f/w/".
+ bfw_warning_pattern = re.compile('.*/b/f/w/([^ ]*: warning: .*)')
+
+ # When android_root is known and available, we find its top directories
+ # and remove all leading chars before a top directory name.
+ # We assume that the leading chars from stderr do not contain "/".
+ # For example,
+ # 10external/...
+ # 12 warningsexternal/...
+ # 413 warningexternal/...
+ # 5 warnings generatedexternal/...
+ # Suppressed 1000 warnings (packages/modules/...
+ if root_top_dirs:
+ extra_warning_pattern = re.compile(
+ '^.[^/]*((' + '|'.join(root_top_dirs) +
+ ')/[^ ]*: warning: .*)')
+ else:
+ extra_warning_pattern = re.compile('^[^/]* ([^ /]*/[^ ]*: warning: .*)')
+
+ # Collect all unique warning lines
unique_warnings = dict()
+ checked_warning_lines = dict()
line_counter = 0
prev_warning = ''
for line in infile:
+ line_counter += 1
if prev_warning:
if rustc_file_position.match(line):
# must be a rustc warning, combine 2 lines into one warning
@@ -399,14 +434,31 @@
prev_warning, flags, android_root, unique_warnings)
prev_warning = ''
- if use_rbe and rbe_warning_pattern.match(line):
- cleaned_up_line = re.sub('.*/b/f/w/', '', line)
- unique_warnings = add_normalized_line_to_warnings(
- cleaned_up_line, flags, android_root, unique_warnings)
+ # re.match is slow, with several warning line patterns and
+ # long input lines like "TIMEOUT: ...".
+ # We save significant time by skipping non-warning lines.
+ # But do not skip the first 100 lines, because we want to
+ # catch build variables.
+ if line_counter > 100 and line.find('warning: ') < 0:
continue
+ # A large clean build output can contain up to 90% of duplicated
+ # "warning:" lines. If we can skip them quickly, we can
+ # speed up this for-loop 3X to 5X.
+ if line in checked_warning_lines:
+ continue
+ checked_warning_lines[line] = True
+
+ # Clean up extra prefix that could be introduced when RBE was used.
+ if '/b/f/w/' in line:
+ result = bfw_warning_pattern.search(line)
+ else:
+ result = extra_warning_pattern.search(line)
+ if result is not None:
+ line = result.group(1)
+
if warning_pattern.match(line):
- if warning_without_file.match(line):
+ if line.startswith('warning: '):
# save this line and combine it with the next line
prev_warning = line
else:
@@ -416,7 +468,6 @@
if line_counter < 100:
# save a little bit of time by only doing this for the first few lines
- line_counter += 1
result = re.search('(?<=^PLATFORM_VERSION=).*', line)
if result is not None:
platform_version = result.group(0)
@@ -433,13 +484,6 @@
if result is not None:
build_id = result.group(0)
continue
- result = re.search('(?<=^TOP=).*', line)
- if result is not None:
- android_root = result.group(1)
- continue
- if re.search('USE_RBE=', line) is not None:
- use_rbe = True
- continue
if android_root:
new_unique_warnings = dict()