Merge "support pre-defined timestamp and uuid when bulid EROFS images"
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
new file mode 100644
index 0000000..ce75150
--- /dev/null
+++ b/PREUPLOAD.cfg
@@ -0,0 +1,2 @@
+[Hook Scripts]
+do_not_use_DO_NOT_MERGE = ${REPO_ROOT}/build/soong/scripts/check_do_not_merge.sh ${PREUPLOAD_COMMIT}
diff --git a/core/Makefile b/core/Makefile
index a7553f0..cb5e44c 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1014,6 +1014,15 @@
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)"
endif
+ifdef INTERNAL_BOOTCONFIG
+INTERNAL_VENDOR_BOOTCONFIG_TARGET := $(PRODUCT_OUT)/vendor-bootconfig.img
+$(INTERNAL_VENDOR_BOOTCONFIG_TARGET):
+ rm -f $@
+ $(foreach param,$(INTERNAL_BOOTCONFIG), \
+ printf "%s\n" $(param) >> $@;)
+ INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_bootconfig $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
+endif
+
# $(1): Build target name
# $(2): Staging dir to be compressed
# $(3): Build dependencies
@@ -1061,7 +1070,7 @@
INSTALLED_VENDOR_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot.img
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DTBIMAGE_TARGET)
-$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS) $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
ifeq ($(BOARD_AVB_ENABLE),true)
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOTIMAGE_KEY_PATH)
$(call pretty,"Target vendor_boot image: $@")
@@ -4136,6 +4145,7 @@
mkuserimg_mke2fs \
ota_from_target_files \
repack_bootimg \
+ secilc \
sefcontext_compile \
sgdisk \
shflags \
@@ -4598,11 +4608,15 @@
ifdef BUILDING_VENDOR_BOOT_IMAGE
$(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
$(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+ # The vendor ramdisk may be built from the recovery ramdisk.
+ ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+ endif
endif
ifdef BUILDING_RECOVERY_IMAGE
# TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
- # BUILD_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
+ # BUILT_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
# commands in build-recoveryimage-target, which would touch the files under
# TARGET_RECOVERY_OUT and race with packaging target-files.zip.
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -4793,6 +4807,9 @@
ifdef INSTALLED_DTBIMAGE_TARGET
cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/VENDOR_BOOT/dtb
endif
+ifdef INTERNAL_VENDOR_BOOTCONFIG_TARGET
+ cp $(INTERNAL_VENDOR_BOOTCONFIG_TARGET) $(zip_root)/VENDOR_BOOT/vendor_bootconfig
+endif
ifdef BOARD_KERNEL_BASE
echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/VENDOR_BOOT/base
endif
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 5767996..ad96b5b 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -104,17 +104,19 @@
my_enforced_uses_libraries :=
ifdef LOCAL_ENFORCE_USES_LIBRARIES
- my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.timestamp
+ my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
$(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(LOCAL_USES_LIBRARIES)
$(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(LOCAL_OPTIONAL_USES_LIBRARIES)
+ $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(RELAX_USES_LIBRARY_CHECK)
$(my_enforced_uses_libraries): $(BUILD_SYSTEM)/verify_uses_libraries.sh $(AAPT)
$(my_enforced_uses_libraries): $(my_prebuilt_src_file)
@echo Verifying uses-libraries: $<
+ rm -f $@
aapt_binary=$(AAPT) \
uses_library_names="$(strip $(PRIVATE_USES_LIBRARIES))" \
optional_uses_library_names="$(strip $(PRIVATE_OPTIONAL_USES_LIBRARIES))" \
- $(BUILD_SYSTEM)/verify_uses_libraries.sh $<
- touch $@
+ relax_check="$(strip $(PRIVATE_RELAX_CHECK))" \
+ $(BUILD_SYSTEM)/verify_uses_libraries.sh $< $@
$(built_module) : $(my_enforced_uses_libraries)
endif
@@ -237,6 +239,7 @@
$(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR)
$(transform-prebuilt-to-target)
$(uncompress-prebuilt-embedded-jni-libs)
+ $(remove-unwanted-prebuilt-embedded-jni-libs)
ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
$(uncompress-dexs)
endif # LOCAL_UNCOMPRESS_DEX
diff --git a/core/board_config.mk b/core/board_config.mk
index 183bdbb..245a639 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -25,6 +25,7 @@
_board_strip_readonly_list += BOARD_HAVE_BLUETOOTH
_board_strip_readonly_list += BOARD_INSTALLER_CMDLINE
_board_strip_readonly_list += BOARD_KERNEL_CMDLINE
+_board_strip_readonly_list += BOARD_BOOTCONFIG
_board_strip_readonly_list += BOARD_KERNEL_BASE
_board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO
_board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT
@@ -222,6 +223,7 @@
.KATI_READONLY := $(_board_strip_readonly_list)
INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
+INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
ifneq ($(filter %64,$(TARGET_ARCH)),)
TARGET_IS_64_BIT := true
diff --git a/core/config.mk b/core/config.mk
index ed6429a..3bd3622 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -149,14 +149,14 @@
2ND_TARGET_PROJECT_SYSTEM_INCLUDES \
,Project include variables have been removed)
$(KATI_obsolete_var TARGET_PREFER_32_BIT TARGET_PREFER_32_BIT_APPS TARGET_PREFER_32_BIT_EXECUTABLES)
-$(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST.)
-$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST.)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST)
$(KATI_obsolete_var COVERAGE_PATHS,Use NATIVE_COVERAGE_PATHS instead)
$(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
-$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.)
-$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.)
-$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead.)
-$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead.)
+$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported)
+$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead)
+$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
+$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
$(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead)
# Used to force goals to build. Only use for conditionally defined goals.
@@ -479,6 +479,17 @@
USE_D8 := true
.KATI_READONLY := USE_D8
+# Whether to fail immediately if verify_uses_libraries check fails, or to keep
+# going and restrict dexpreopt to not compile any code for the failed module.
+#
+# The intended use case for this flag is to have a smoother migration path for
+# the Java modules that need to add <uses-library> information in their build
+# files. The flag allows to quickly silence build errors. This flag should be
+# used with caution and only as a temporary measure, as it masks real errors
+# and affects performance.
+RELAX_USES_LIBRARY_CHECK ?= false
+.KATI_READONLY := RELAX_USES_LIBRARY_CHECK
+
#
# Tools that are prebuilts for TARGET_BUILD_USE_PREBUILT_SDKS
#
diff --git a/core/definitions.mk b/core/definitions.mk
index 5f0bf55..2883f0d 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -556,7 +556,7 @@
$(foreach m, $(ALL_MODULES), \
$(eval ALL_MODULES.$(m).NOTICE_DEPS := \
$(sort \
- $(foreach d,$(ALL_MODULES.$(m).NOTICE_DEPS), \
+ $(foreach d,$(sort $(ALL_MODULES.$(m).NOTICE_DEPS)), \
$(_lookup.$(d)) \
) \
) \
@@ -578,7 +578,9 @@
define license-metadata-rule
$(strip $(eval _dir := $(call license-metadata-dir)))
$(strip $(eval _deps := $(sort $(filter-out $(_dir)/$(1).meta_lic,$(foreach d,$(ALL_MODULES.$(1).NOTICE_DEPS), $(_dir)/$(d).meta_lic)))))
-$(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)),
+$(strip $(eval _notices := $(sort $(ALL_MODULES.$(1).NOTICES))))
+$(strip $(eval _tgts := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))))
+$(foreach b,$(_tgts),
$(_dir)/$(b).meta_module ::
mkdir -p $$(dir $$@)
echo $(_dir)/$(1).meta_lic >> $$@
@@ -587,31 +589,46 @@
)
$(_dir)/$(1).meta_lic: PRIVATE_KINDS := $(sort $(ALL_MODULES.$(1).LICENSE_KINDS))
$(_dir)/$(1).meta_lic: PRIVATE_CONDITIONS := $(sort $(ALL_MODULES.$(1).LICENSE_CONDITIONS))
-$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(sort $(ALL_MODULES.$(1).NOTICES))
+$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(_notices)
$(_dir)/$(1).meta_lic: PRIVATE_NOTICE_DEPS := $(_deps)
-$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))
-$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(sort $(ALL_MODULES.$(1).IS_CONTAINER))
-$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME)
+$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(_tgts)
+$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(ALL_MODULES.$(1).IS_CONTAINER)
+$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(strip $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME))
$(_dir)/$(1).meta_lic: PRIVATE_INSTALL_MAP := $(sort $(ALL_MODULES.$(1).LICENSE_INSTALL_MAP))
-$(_dir)/$(1).meta_lic : $(_deps) $(ALL_MODULES.$(1).NOTICES) $(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
+$(_dir)/$(1).meta_lic : $(_deps) $(_notices) $(foreach b,$(_tgts), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
rm -f $$@
mkdir -p $$(dir $$@)
- build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(filter-out false,$$(PRIVATE_IS_CONTAINER)),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
-
-$(1) : $(_dir)/$(1).meta_lic
-
-$(if $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE),$(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE) : $(_dir)/$(1).meta_lic)
+ build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(PRIVATE_IS_CONTAINER),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
.PHONY: $(1).meta_lic
$(1).meta_lic : $(_dir)/$(1).meta_lic
+$(strip $(eval _mifs := $(sort $(ALL_MODULES.$(1).MODULE_INSTALLED_FILENAMES))))
+$(strip $(eval _infs := $(sort $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE))))
+
+# Emit each installed notice file rule if it references the current module
+$(if $(_infs),$(foreach inf,$(_infs),
+$(if $(strip $(filter $(1),$(INSTALLED_NOTICE_FILES.$(inf).MODULE))),
+$(strip $(eval _mif := $(firstword $(foreach m,$(_mifs),$(if $(filter %/src/$(m).txt,$(inf)),$(m))))))
+
+$(inf) : $(_dir)/$(1).meta_lic
+$(inf): PRIVATE_INSTALLED_MODULE := $(_mif)
+$(inf) : PRIVATE_NOTICES := $(_notices)
+
+$(inf): $(_notices)
+ @echo Notice file: $$< -- $$@
+ mkdir -p $$(dir $$@)
+ awk 'FNR==1 && NR > 1 {print "\n"} {print}' $$(PRIVATE_NOTICES) > $$@
+
+)))
+
endef
###########################################################
## Declares a license metadata build rule for ALL_MODULES
###########################################################
define build-license-metadata
-$(foreach m,$(ALL_MODULES),$(eval $(call license-metadata-rule,$(m))))
+$(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m))))
endef
###########################################################
@@ -2396,14 +2413,19 @@
#
define uncompress-prebuilt-embedded-jni-libs
if (zipinfo $@ 'lib/*.so' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
- $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' \
- $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
- -x 'lib/**/*.so' \
- $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS))) && \
- mv -f $@.tmp $@ ; \
+ $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' && mv -f $@.tmp $@ ; \
fi
endef
+# Remove unwanted shared JNI libraries embedded in an apk.
+#
+define remove-unwanted-prebuilt-embedded-jni-libs
+ $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
+ $(ZIP2ZIP) -i $@ -o $@.tmp \
+ -x 'lib/**/*.so' $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS)) && \
+ mv -f $@.tmp $@)
+endef
+
# TODO(joeo): If we can ever upgrade to post 3.81 make and get the
# new prebuilt rules to work, we should change this to copy the
# resources to the out directory and then copy the resources.
@@ -2765,7 +2787,8 @@
$(R8_DEBUG_MODE) \
$(PRIVATE_PROGUARD_FLAGS) \
$(addprefix -injars , $(PRIVATE_EXTRA_INPUT_JAR)) \
- $(PRIVATE_DX_FLAGS)
+ $(PRIVATE_DX_FLAGS) \
+ -ignorewarnings
$(hide) touch $(PRIVATE_PROGUARD_DICTIONARY)
endef
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 06e2fb7..dda7de0 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -105,6 +105,7 @@
$(call add_json_bool, IsEng, $(filter eng,$(TARGET_BUILD_VARIANT)))
$(call add_json_bool, SanitizeLite, $(SANITIZE_LITE))
$(call add_json_bool, DefaultAppImages, $(WITH_DEX_PREOPT_APP_IMAGE))
+ $(call add_json_bool, RelaxUsesLibraryCheck, $(filter true,$(RELAX_USES_LIBRARY_CHECK)))
$(call add_json_str, Dex2oatXmx, $(DEX2OAT_XMX))
$(call add_json_str, Dex2oatXms, $(DEX2OAT_XMS))
$(call add_json_str, EmptyDirectory, $(OUT_DIR)/empty)
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index f9a9ba7..cbd3069 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -278,6 +278,7 @@
$(call add_json_list, PreoptFlags, $(LOCAL_DEX_PREOPT_FLAGS))
$(call add_json_str, ProfileClassListing, $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE)))
$(call add_json_bool, ProfileIsTextListing, $(my_profile_is_text_listing))
+ $(call add_json_str, EnforceUsesLibrariesStatusFile, $(intermediates.COMMON)/enforce_uses_libraries.status)
$(call add_json_bool, EnforceUsesLibraries, $(LOCAL_ENFORCE_USES_LIBRARIES))
$(call add_json_str, ProvidesUsesLibrary, $(firstword $(LOCAL_PROVIDES_USES_LIBRARY) $(LOCAL_MODULE)))
$(call add_json_map, ClassLoaderContexts)
@@ -345,6 +346,9 @@
$(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
my_dexpreopt_deps += $(my_dexpreopt_images_deps)
my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+ ifeq ($(LOCAL_ENFORCE_USES_LIBRARIES),true)
+ my_dexpreopt_deps += $(intermediates.COMMON)/enforce_uses_libraries.status
+ endif
$(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
$(my_dexpreopt_zip): $(my_dexpreopt_deps)
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
new file mode 100644
index 0000000..9b1f2c2
--- /dev/null
+++ b/core/dumpconfig.mk
@@ -0,0 +1,144 @@
+# Read and dump the product configuration.
+
+# Called from the product-config tool, not from the main build system.
+
+#
+# Ensure we are being called correctly
+#
+ifndef KATI
+ $(warning Kati must be used to call dumpconfig.mk, not make.)
+ $(error stopping)
+endif
+
+ifdef DEFAULT_GOAL
+ $(warning Calling dumpconfig.mk from inside the make build system is not)
+ $(warning supported. It is only meant to be called via kati by product-confing.)
+ $(error stopping)
+endif
+
+ifndef TARGET_PRODUCT
+ $(warning dumpconfig.mk requires TARGET_PRODUCT to be set)
+ $(error stopping)
+endif
+
+ifndef TARGET_BUILD_VARIANT
+ $(warning dumpconfig.mk requires TARGET_BUILD_VARIANT to be set)
+ $(error stopping)
+endif
+
+ifneq (build/make/core/config.mk,$(wildcard build/make/core/config.mk))
+ $(warning dumpconfig must be called from the root of the source tree)
+ $(error stopping)
+endif
+
+ifeq (,$(DUMPCONFIG_FILE))
+ $(warning dumpconfig requires DUMPCONFIG_FILE to be set)
+ $(error stopping)
+endif
+
+# Skip the second inclusion of all of the product config files, because
+# we will do these checks in the product_config tool.
+SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK := true
+
+# Before we do anything else output the format version.
+$(file > $(DUMPCONFIG_FILE),dumpconfig_version,1)
+$(file >> $(DUMPCONFIG_FILE),dumpconfig_file,$(DUMPCONFIG_FILE))
+
+# Default goal for dumpconfig
+dumpconfig:
+ $(file >> $(DUMPCONFIG_FILE),***DONE***)
+ @echo ***DONE***
+
+# TODO(Remove): These need to be set externally
+OUT_DIR := out
+TMPDIR = /tmp/build-temp
+BUILD_DATETIME_FILE := $(OUT_DIR)/build_date.txt
+
+# Escape quotation marks for CSV, and wraps in quotation marks.
+define escape-for-csv
+"$(subst ","",$1)"
+endef
+
+# Args:
+# $(1): include stack
+define dump-import-start
+$(eval $(file >> $(DUMPCONFIG_FILE),import,$(strip $(1))))
+endef
+
+# Args:
+# $(1): include stack
+define dump-import-done
+$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1))))
+endef
+
+# Args:
+# $(1): Current file
+# $(2): Inherited file
+define dump-inherit
+$(eval $(file >> $(DUMPCONFIG_FILE),inherit,$(strip $(1)),$(strip $(2))))
+endef
+
+# Args:
+# $(1): Config phase (PRODUCT, EXPAND, or DEVICE)
+# $(2): Root nodes to import
+# $(3): All variable names
+# $(4): Single-value variables
+# $(5): Makefile being processed
+define dump-phase-start
+$(eval $(file >> $(DUMPCONFIG_FILE),phase,$(strip $(1)),$(strip $(2)))) \
+$(foreach var,$(3), \
+ $(eval $(file >> $(DUMPCONFIG_FILE),var,$(if $(filter $(4),$(var)),single,list),$(var))) \
+) \
+$(call dump-config-vals,$(strip $(5)),initial)
+endef
+
+# Args:
+# $(1): Makefile being processed
+define dump-phase-end
+$(call dump-config-vals,$(strip $(1)),final)
+endef
+
+define dump-debug
+$(eval $(file >> $(DUMPCONFIG_FILE),debug,$(1)))
+endef
+
+# Skip these when dumping. They're not used and they cause a lot of noise in the dump.
+DUMPCONFIG_SKIP_VARS := \
+ .VARIABLES \
+ .KATI_SYMBOLS \
+ 1 \
+ 2 \
+ 3 \
+ 4 \
+ 5 \
+ 6 \
+ 7 \
+ 8 \
+ 9 \
+ LOCAL_PATH \
+ MAKEFILE_LIST \
+ PARENT_PRODUCT_FILES \
+ current_mk \
+ _eiv_ev \
+ _eiv_i \
+ _eiv_sv \
+ _eiv_tv \
+ inherit_var \
+ np \
+ _node_import_context \
+ _included \
+ _include_stack \
+ _in \
+ _nic.%
+
+# Args:
+# $(1): Makefile that was included
+# $(2): block (before,import,after,initial,final)
+define dump-config-vals
+$(foreach var,$(filter-out $(DUMPCONFIG_SKIP_VARS),$(.KATI_SYMBOLS)),\
+ $(eval $(file >> $(DUMPCONFIG_FILE),val,$(call escape-for-csv,$(1)),$(2),$(call escape-for-csv,$(var)),$(call escape-for-csv,$($(var))),$(call escape-for-csv,$(KATI_variable_location $(var))))) \
+)
+endef
+
+include build/make/core/config.mk
+
diff --git a/core/node_fns.mk b/core/node_fns.mk
index b81d60c..8d20160 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -195,7 +195,11 @@
$(call clear-var-list, $(3))
$(eval LOCAL_PATH := $(patsubst %/,%,$(dir $(2))))
$(eval MAKEFILE_LIST :=)
+ $(call dump-import-start,$(_include_stack))
+ $(call dump-config-vals,$(2),before)
$(eval include $(2))
+ $(call dump-import-done,$(_include_stack))
+ $(call dump-config-vals,$(2),after)
$(eval _included := $(filter-out $(2),$(MAKEFILE_LIST)))
$(eval MAKEFILE_LIST :=)
$(eval LOCAL_PATH :=)
@@ -250,6 +254,7 @@
# of the default list semantics
#
define import-nodes
+$(call dump-phase-start,$(1),$(2),$(3),$(4),build/make/core/node_fns.mk) \
$(if \
$(foreach _in,$(2), \
$(eval _node_import_context := _nic.$(1).[[$(_in)]]) \
@@ -263,5 +268,6 @@
$(if $(_include_stack),$(eval $(error ASSERTION FAILED: _include_stack \
should be empty here: $(_include_stack))),) \
) \
-,)
+,) \
+$(call dump-phase-end,build/make/core/node_fns.mk)
endef
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 89f822b..9678380 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -80,7 +80,7 @@
ifeq (true,$(is_container))
# Include shared libraries' notices for "container" types, but not for binaries etc.
notice_deps := \
- $(sort \
+ $(strip \
$(LOCAL_REQUIRED_MODULES) \
$(LOCAL_STATIC_LIBRARIES) \
$(LOCAL_WHOLE_STATIC_LIBRARIES) \
@@ -95,7 +95,7 @@
)
else
notice_deps := \
- $(sort \
+ $(strip \
$(LOCAL_REQUIRED_MODULES) \
$(LOCAL_STATIC_LIBRARIES) \
$(LOCAL_WHOLE_STATIC_LIBRARIES) \
@@ -106,24 +106,24 @@
)
endif
ifeq ($(LOCAL_IS_HOST_MODULE),true)
-notice_deps := $(sort $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
+notice_deps := $(strip $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
else
-notice_deps := $(sort $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
+notice_deps := $(strip $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
endif
ifdef my_register_name
ALL_MODULES.$(my_register_name).LICENSE_PACKAGE_NAME := $(strip $(license_package_name))
-ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds))
-ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions))
-ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map))
-ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(sort $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps))
-ALL_MODULES.$(my_register_name).IS_CONTAINER := $(sort $(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container))
+ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds)
+ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions)
+ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map)
+ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps)
+ALL_MODULES.$(my_register_name).IS_CONTAINER := $(strip $(filter-out false,$(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container)))
endif
ifdef notice_file
ifdef my_register_name
-ALL_MODULES.$(my_register_name).NOTICES := $(sort $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file))
+ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
endif
# This relies on the name of the directory in PRODUCT_OUT matching where
@@ -180,9 +180,10 @@
installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
ifdef my_register_name
-ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(installed_notice_file)
-endif
-
+ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE) $(installed_notice_file)
+ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES := $(ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES) $(module_installed_filename)
+INSTALLED_NOTICE_FILES.$(installed_notice_file).MODULE := $(my_register_name)
+else
$(installed_notice_file): PRIVATE_INSTALLED_MODULE := $(module_installed_filename)
$(installed_notice_file) : PRIVATE_NOTICES := $(notice_file)
@@ -190,6 +191,7 @@
@echo Notice file: $< -- $@
$(hide) mkdir -p $(dir $@)
$(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $(PRIVATE_NOTICES) > $@
+endif
ifdef LOCAL_INSTALLED_MODULE
# Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 1b40624..346ca24 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -101,7 +101,6 @@
enforce_rro_enabled :=
ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
# * means all system and system_ext APKs, so enable conditionally based on module path.
- # Note that modules in PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS are excluded even if it is '*'
# Note that base_rules.mk has not yet been included, so it's likely that only
# one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
@@ -120,12 +119,6 @@
enforce_rro_enabled := true
endif
-# TODO(b/150820813) Some modules depend on static overlay, remove this after eliminating the dependency.
-ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS)))
- enforce_rro_enabled :=
-endif
-
-
product_package_overlays := $(strip \
$(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
$(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
diff --git a/core/product.mk b/core/product.mk
index ce9bacf..8976dd9 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -195,9 +195,6 @@
# Package list to apply enforcing RRO.
_product_list_vars += PRODUCT_ENFORCE_RRO_TARGETS
-# Packages to skip auto-generating RROs for when PRODUCT_ENFORCE_RRO_TARGETS is set to *.
-_product_list_vars += PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
-
_product_list_vars += PRODUCT_SDK_ATREE_FILES
_product_list_vars += PRODUCT_SDK_ADDON_NAME
_product_list_vars += PRODUCT_SDK_ADDON_COPY_FILES
@@ -460,7 +457,9 @@
$(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
$(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
$(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
- $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk)))
+ $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
+ $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+ $(call dump-config-vals,$(current_mk),inherit)
endef
# Specifies a number of path prefixes, relative to PRODUCT_OUT, where the
@@ -607,6 +606,8 @@
# to a shorthand that is more convenient to read from elsewhere.
#
define strip-product-vars
+$(call dump-phase-start,PRODUCT-EXPAND,,$(_product_var_list),$(_product_single_value_vars), \
+ build/make/core/product.mk) \
$(foreach v,\
$(_product_var_list) \
PRODUCT_ENFORCE_PACKAGES_EXIST \
@@ -614,7 +615,8 @@
$(eval $(v) := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).$(v)))) \
$(eval get-product-var = $$(if $$(filter $$(1),$$(INTERNAL_PRODUCT)),$$($$(2)),$$(PRODUCTS.$$(strip $$(1)).$$(2)))) \
$(KATI_obsolete_var PRODUCTS.$(INTERNAL_PRODUCT).$(v),Use $(v) instead) \
-)
+) \
+$(call dump-phase-end,build/make/core/product.mk)
endef
define add-to-product-copy-files-if-exists
diff --git a/core/product_config.mk b/core/product_config.mk
index 7b72b5e..11ffade 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -146,6 +146,11 @@
endif
endif
+ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
+_product_config_saved_KATI_ALLOW_RULES := $(.KATI_ALLOW_RULES)
+.KATI_ALLOW_RULES := $(ALLOW_RULES_IN_PRODUCT_CONFIG)
+endif
+
ifeq ($(load_all_product_makefiles),true)
# Import all product makefiles.
$(call import-products, $(all_product_makefiles))
@@ -163,12 +168,19 @@
# Quick check
$(check-all-products)
+ifeq ($(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
# Import all the products that have made artifact path requirements, so that we can verify
# the artifacts they produce.
# These are imported after check-all-products because some of them might not be real products.
$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
$(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
)
+endif
+
+ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
+.KATI_ALLOW_RULES := $(_saved_KATI_ALLOW_RULES)
+_product_config_saved_KATI_ALLOW_RULES :=
+endif
ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
$(dump-products)
@@ -181,14 +193,16 @@
ifneq ($(current_product_makefile),$(INTERNAL_PRODUCT))
$(error PRODUCT_NAME inconsistent in $(current_product_makefile) and $(INTERNAL_PRODUCT))
endif
-current_product_makefile :=
-all_product_makefiles :=
-all_product_configs :=
+
############################################################################
# Strip and assign the PRODUCT_ variables.
$(call strip-product-vars)
+current_product_makefile :=
+all_product_makefiles :=
+all_product_configs :=
+
#############################################################################
# Quick check and assign default values
@@ -401,6 +415,11 @@
$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+ifdef PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
+ $(error PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS is deprecated, consider using RRO for \
+ $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
+endif
+
define product-overrides-config
$$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
$$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 1569300..9fdf7b8 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -80,7 +80,6 @@
$(call add_json_list, DeviceResourceOverlays, $(DEVICE_PACKAGE_OVERLAYS))
$(call add_json_list, ProductResourceOverlays, $(PRODUCT_PACKAGE_OVERLAYS))
$(call add_json_list, EnforceRROTargets, $(PRODUCT_ENFORCE_RRO_TARGETS))
-$(call add_json_list, EnforceRROExemptedTargets, $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
$(call add_json_list, EnforceRROExcludedOverlays, $(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
$(call add_json_str, AAPTCharacteristics, $(TARGET_AAPT_CHARACTERISTICS))
diff --git a/core/verify_uses_libraries.sh b/core/verify_uses_libraries.sh
index dde0447..1bd0a2c 100755
--- a/core/verify_uses_libraries.sh
+++ b/core/verify_uses_libraries.sh
@@ -21,6 +21,7 @@
set -e
local_apk=$1
+status_file=$2
badging=$(${aapt_binary} dump badging "${local_apk}")
export sdk_version=$(echo "${badging}" | grep "sdkVersion" | sed -n "s/sdkVersion:'\(.*\)'/\1/p")
# Export target_sdk_version to the caller.
@@ -28,20 +29,28 @@
uses_libraries=$(echo "${badging}" | grep "uses-library" | sed -n "s/uses-library:'\(.*\)'/\1/p")
optional_uses_libraries=$(echo "${badging}" | grep "uses-library-not-required" | sed -n "s/uses-library-not-required:'\(.*\)'/\1/p")
+errmsg=
+
# Verify that the uses libraries match exactly.
# Currently we validate the ordering of the libraries since it matters for resolution.
single_line_libs=$(echo "${uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
if [[ "${single_line_libs}" != "${uses_library_names}" ]]; then
- echo "LOCAL_USES_LIBRARIES (${uses_library_names})" \
- "do not match (${single_line_libs}) in manifest for ${local_apk}"
- exit 1
+ errmsg="LOCAL_USES_LIBRARIES (${uses_library_names}) do not match (${single_line_libs}) in manifest for ${local_apk}"
fi
# Verify that the optional uses libraries match exactly.
single_line_optional_libs=$(echo "${optional_uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
if [[ "${single_line_optional_libs}" != "${optional_uses_library_names}" ]]; then
- echo "LOCAL_OPTIONAL_USES_LIBRARIES (${optional_uses_library_names}) " \
- "do not match (${single_line_optional_libs}) in manifest for ${local_apk}"
- exit 1
+ errmsg="LOCAL_OPTIONAL_USES_LIBRARIES (${optional_uses_library_names}) do not match (${single_line_optional_libs}) in manifest for ${local_apk}"
fi
+if [[ ! -z "${errmsg}" ]]; then
+ echo "${errmsg}" > "${status_file}"
+ if [[ "${relax_check}" != true ]]; then
+ # fail immediately
+ echo "${errmsg}"
+ exit 1
+ fi
+else
+ touch "${status_file}"
+fi
diff --git a/envsetup.sh b/envsetup.sh
index c03e2cb..58fcd3b 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -27,6 +27,7 @@
- mangrep: Greps on all local AndroidManifest.xml files.
- mgrep: Greps on all local Makefiles and *.bp files.
- owngrep: Greps on all local OWNERS files.
+- rsgrep: Greps on all local Rust files.
- sepgrep: Greps on all local sepolicy files.
- sgrep: Greps on all local source files.
- godir: Go to the directory containing a file.
@@ -34,6 +35,7 @@
- gomod: Go to the directory containing a module.
- pathmod: Get the directory containing a module.
- outmod: Gets the location of a module's installed outputs with a certain extension.
+- dirmods: Gets the modules defined in a given directory.
- installmod: Adb installs a module's built APK.
- refreshmod: Refresh list of modules for allmod/gomod/pathmod/outmod/installmod.
- syswrite: Remount partitions (e.g. system.img) as writable, rebooting if necessary.
@@ -1037,6 +1039,12 @@
-exec grep --color -n "$@" {} +
}
+function rsgrep()
+{
+ find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rs" \
+ -exec grep --color -n "$@" {} +
+}
+
function cgrep()
{
find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) \
@@ -1404,8 +1412,9 @@
python -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
}
-# Get the path of a specific module in the android tree, as cached in module-info.json. If any build change
-# is made, and it should be reflected in the output, you should run 'refreshmod' first.
+# Get the path of a specific module in the android tree, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run
+# 'refreshmod' first. Note: This is the inverse of dirmods.
function pathmod() {
if [[ $# -ne 1 ]]; then
echo "usage: pathmod <module>" >&2
@@ -1429,6 +1438,36 @@
fi
}
+# Get the path of a specific module in the android tree, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run
+# 'refreshmod' first. Note: This is the inverse of pathmod.
+function dirmods() {
+ if [[ $# -ne 1 ]]; then
+ echo "usage: dirmods <path>" >&2
+ return 1
+ fi
+
+ verifymodinfo || return 1
+
+ python -c "import json, os
+dir = '$1'
+while dir.endswith('/'):
+ dir = dir[:-1]
+prefix = dir + '/'
+module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
+results = set()
+for m in module_info.values():
+ for path in m.get(u'path', []):
+ if path == dir or path.startswith(prefix):
+ name = m.get(u'module_name')
+ if name:
+ results.add(name)
+for name in sorted(results):
+ print(name)
+"
+}
+
+
# Go to a specific module in the android tree, as cached in module-info.json. If any build change
# is made, and it should be reflected in the output, you should run 'refreshmod' first.
function gomod() {
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
index fe0293b..342abd7 100644
--- a/target/board/BoardConfigEmuCommon.mk
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -90,6 +90,3 @@
DEVICE_MATRIX_FILE := device/generic/goldfish/compatibility_matrix.xml
BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
-
-# b/176210699: remove this
-BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE := true
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 5238d40..09864bc 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -214,6 +214,7 @@
ndc \
netd \
NetworkStackNext \
+ odsign \
org.apache.http.legacy \
otacerts \
PackageInstaller \
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 717d990..c753e6c 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -19,12 +19,8 @@
LLNDK: libvndksupport.so
LLNDK: libvulkan.so
VNDK-SP: android.hardware.common-V2-ndk_platform.so
-VNDK-SP: android.hardware.common-unstable-ndk_platform.so
VNDK-SP: android.hardware.common.fmq-V1-ndk_platform.so
-VNDK-SP: android.hardware.common.fmq-ndk_platform.so
-VNDK-SP: android.hardware.common.fmq-unstable-ndk_platform.so
VNDK-SP: android.hardware.graphics.common-V2-ndk_platform.so
-VNDK-SP: android.hardware.graphics.common-unstable-ndk_platform.so
VNDK-SP: android.hardware.graphics.common@1.0.so
VNDK-SP: android.hardware.graphics.common@1.1.so
VNDK-SP: android.hardware.graphics.common@1.2.so
@@ -62,10 +58,7 @@
VNDK-SP: libz.so
VNDK-core: android.hardware.audio.common@2.0.so
VNDK-core: android.hardware.authsecret-V1-ndk_platform.so
-VNDK-core: android.hardware.authsecret-ndk_platform.so
-VNDK-core: android.hardware.authsecret-unstable-ndk_platform.so
VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
-VNDK-core: android.hardware.automotive.occupant_awareness-ndk_platform.so
VNDK-core: android.hardware.configstore-utils.so
VNDK-core: android.hardware.configstore@1.0.so
VNDK-core: android.hardware.configstore@1.1.so
@@ -76,52 +69,28 @@
VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
VNDK-core: android.hardware.health.storage-V1-ndk_platform.so
-VNDK-core: android.hardware.health.storage-ndk_platform.so
-VNDK-core: android.hardware.health.storage-unstable-ndk_platform.so
VNDK-core: android.hardware.identity-V2-ndk_platform.so
-VNDK-core: android.hardware.identity-ndk_platform.so
VNDK-core: android.hardware.keymaster-V2-ndk_platform.so
-VNDK-core: android.hardware.keymaster-ndk_platform.so
VNDK-core: android.hardware.light-V1-ndk_platform.so
-VNDK-core: android.hardware.light-ndk_platform.so
VNDK-core: android.hardware.media.bufferpool@2.0.so
VNDK-core: android.hardware.media.omx@1.0.so
VNDK-core: android.hardware.media@1.0.so
VNDK-core: android.hardware.memtrack-V1-ndk_platform.so
-VNDK-core: android.hardware.memtrack-ndk_platform.so
-VNDK-core: android.hardware.memtrack-unstable-ndk_platform.so
VNDK-core: android.hardware.memtrack@1.0.so
VNDK-core: android.hardware.oemlock-V1-ndk_platform.so
-VNDK-core: android.hardware.oemlock-ndk_platform.so
-VNDK-core: android.hardware.oemlock-unstable-ndk_platform.so
VNDK-core: android.hardware.power-V1-ndk_platform.so
-VNDK-core: android.hardware.power-ndk_platform.so
VNDK-core: android.hardware.power.stats-V1-ndk_platform.so
-VNDK-core: android.hardware.power.stats-ndk_platform.so
-VNDK-core: android.hardware.power.stats-unstable-ndk_platform.so
VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
-VNDK-core: android.hardware.rebootescrow-ndk_platform.so
VNDK-core: android.hardware.security.keymint-V1-ndk_platform.so
-VNDK-core: android.hardware.security.keymint-ndk_platform.so
-VNDK-core: android.hardware.security.keymint-unstable-ndk_platform.so
VNDK-core: android.hardware.security.secureclock-V1-ndk_platform.so
-VNDK-core: android.hardware.security.secureclock-ndk_platform.so
-VNDK-core: android.hardware.security.secureclock-unstable-ndk_platform.so
VNDK-core: android.hardware.security.sharedsecret-V1-ndk_platform.so
-VNDK-core: android.hardware.security.sharedsecret-ndk_platform.so
-VNDK-core: android.hardware.security.sharedsecret-unstable-ndk_platform.so
VNDK-core: android.hardware.soundtrigger@2.0-core.so
VNDK-core: android.hardware.soundtrigger@2.0.so
VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
-VNDK-core: android.hardware.vibrator-ndk_platform.so
VNDK-core: android.hardware.weaver-V1-ndk_platform.so
-VNDK-core: android.hardware.weaver-ndk_platform.so
-VNDK-core: android.hardware.weaver-unstable-ndk_platform.so
VNDK-core: android.hidl.token@1.0-utils.so
VNDK-core: android.hidl.token@1.0.so
VNDK-core: android.system.keystore2-V1-ndk_platform.so
-VNDK-core: android.system.keystore2-ndk_platform.so
-VNDK-core: android.system.keystore2-unstable-ndk_platform.so
VNDK-core: android.system.suspend@1.0.so
VNDK-core: libaudioroute.so
VNDK-core: libaudioutils.so
diff --git a/tools/compare_builds.py b/tools/compare_builds.py
new file mode 100755
index 0000000..838a628
--- /dev/null
+++ b/tools/compare_builds.py
@@ -0,0 +1,661 @@
+#!/usr/bin/env -S python3 -u
+
+"""
+This script helps find various build behaviors that make builds less hermetic
+and repeatable. Depending on the flags, it runs a sequence of builds and looks
+for files that have changed or have been improperly regenerated, updating
+their timestamps incorrectly. It also looks for changes that the build has
+done to the source tree, and for files whose contents are dependent on the
+location of the out directory.
+
+This utility has two major modes, full and incremental. By default, this tool
+runs in full mode. To run in incremental mode, pass the --incremental flag.
+
+
+FULL MODE
+
+In full mode, this tool helps verify BUILD CORRECTNESS by examining its
+REPEATABILITY. In full mode, this tool runs two complete builds in different
+directories and compares the CONTENTS of the two directories. Lists of any
+files that are added, removed or changed are printed, sorted by the timestamp
+of that file, to aid finding which dependencies trigger the rebuilding of
+other files.
+
+
+INCREMENTAL MODE
+
+In incremental mode, this tool helps verfiy the SPEED of the build. It runs two
+builds and looks at the TIMESTAMPS of the generated files, and reports files
+that were changed by the second build. In theory, an incremental build with no
+source files touched should not have any generated targets changed. As in full
+builds, the file list is returned sorted by timestamp.
+
+
+OTHER CHECKS
+
+In both full and incremental mode, this tool looks at the timestamps of all
+source files in the tree, and reports on files that have been touched. In the
+output, these are labeled with the header "Source files touched after start of
+build."
+
+In addition, by default, this tool sets the OUT_DIR environment variable to
+something other than "out" in order to find build rules that are not respecting
+the OUT_DIR. If you see these, you should fix them, but if your build can not
+complete for some reason because of this, you can pass the --no-check-out-dir
+flag to suppress this check.
+
+
+OTHER FLAGS
+
+In full mode, the --detect-embedded-paths flag does the two builds in different
+directories, to help in finding rules that embed the out directory path into
+the targets.
+
+The --hide-build-output flag hides the output of successful bulds, to make
+script output cleaner. The output of builds that fail is still shown.
+
+The --no-build flag is useful if you have already done a build and would
+just like to re-run the analysis.
+
+The --target flag lets you specify a build target other than the default
+full build (droid). You can pass "nothing" as in the example below, or a
+specific target, to reduce the scope of the checks performed.
+
+The --touch flag lets you specify a list of source files to touch between
+the builds, to examine the consequences of editing a particular file.
+
+
+EXAMPLE COMMANDLINES
+
+Please run build/make/tools/compare_builds.py --help for a full listing
+of the commandline flags. Here are a sampling of useful combinations.
+
+ 1. Find files changed during an incremental build that doesn't build
+ any targets.
+
+ build/make/tools/compare_builds.py --incremental --target nothing
+
+ Long incremental build times, or consecutive builds that re-run build actions
+ are usually caused by files being touched as part of loading the makefiles.
+
+ The nothing build (m nothing) loads the make and blueprint files, generates
+ the dependency graph, but then doesn't actually build any targets. Checking
+ against this build is the fastest and easiest way to find files that are
+ modified while makefiles are read, for example with $(shell) invocations.
+
+ 2. Find packaging targets that are different, ignoring intermediate files.
+
+ build/make/tools/compare_builds.py --subdirs --detect-embedded-paths
+
+ These flags will compare the final staging directories for partitions,
+ as well as the APKs, apexes, testcases, and the like (the full directory
+ list is in the DEFAULT_DIRS variable below). Since these are the files
+ that are ultimately released, it is more important that these files be
+ replicable, even if the intermediates that went into them are not (for
+ example, when debugging symbols are stripped).
+
+ 3. Check that all targets are repeatable.
+
+ build/make/tools/compare_builds.py --detect-embedded-paths
+
+ This check will list all of the differences in built targets that it can
+ find. Be aware that the AOSP tree still has quite a few targets that
+ are flagged by this check, so OEM changes might be lost in that list.
+ That said, each file shown here is a potential blocker for a repeatable
+ build.
+
+ 4. See what targets are rebuilt when a file is touched between builds.
+
+ build/make/tools/compare_builds.py --incremental \
+ --touch frameworks/base/core/java/android/app/Activity.java
+
+ This check simulates the common engineer workflow of touching a single
+ file and rebuilding the whole system. To see a restricted view, consider
+ also passing a --target option for a common use case. For example:
+
+ build/make/tools/compare_builds.py --incremental --target framework \
+ --touch frameworks/base/core/java/android/app/Activity.java
+"""
+
+import argparse
+import itertools
+import os
+import shutil
+import stat
+import subprocess
+import sys
+
+
+# Soong
+SOONG_UI = "build/soong/soong_ui.bash"
+
+
+# Which directories to use if no --subdirs is supplied without explicit directories.
+DEFAULT_DIRS = (
+ "apex",
+ "data",
+ "product",
+ "ramdisk",
+ "recovery",
+ "root",
+ "system",
+ "system_ext",
+ "system_other",
+ "testcases",
+ "vendor",
+)
+
+
+# Files to skip for incremental timestamp checking
+BUILD_INTERNALS_PREFIX_SKIP = (
+ "soong/.glob/",
+ ".path/",
+)
+
+
+BUILD_INTERNALS_SUFFIX_SKIP = (
+ "/soong/soong_build_metrics.pb",
+ "/.installable_test_files",
+ "/files.db",
+ "/.blueprint.bootstrap",
+ "/build_number.txt",
+ "/build.ninja",
+ "/.out-dir",
+ "/build_fingerprint.txt",
+ "/build_thumbprint.txt",
+ "/.copied_headers_list",
+ "/.installable_files",
+)
+
+
+class DiffType(object):
+ def __init__(self, code, message):
+ self.code = code
+ self.message = message
+
+DIFF_NONE = DiffType("DIFF_NONE", "Files are the same")
+DIFF_MODE = DiffType("DIFF_MODE", "Stat mode bits differ")
+DIFF_SIZE = DiffType("DIFF_SIZE", "File size differs")
+DIFF_SYMLINK = DiffType("DIFF_SYMLINK", "Symlinks point to different locations")
+DIFF_CONTENTS = DiffType("DIFF_CONTENTS", "File contents differ")
+
+
+def main():
+ argparser = argparse.ArgumentParser(description="Diff build outputs from two builds.",
+ epilog="Run this command from the root of the tree."
+ + " Before running this command, the build environment"
+ + " must be set up, including sourcing build/envsetup.sh"
+ + " and running lunch.")
+ argparser.add_argument("--detect-embedded-paths", action="store_true",
+ help="Use unique out dirs to detect paths embedded in binaries.")
+ argparser.add_argument("--incremental", action="store_true",
+ help="Compare which files are touched in two consecutive builds without a clean in between.")
+ argparser.add_argument("--hide-build-output", action="store_true",
+ help="Don't print the build output for successful builds")
+ argparser.add_argument("--no-build", dest="run_build", action="store_false",
+ help="Don't build or clean, but do everything else.")
+ argparser.add_argument("--no-check-out-dir", dest="check_out_dir", action="store_false",
+ help="Don't check for rules not honoring movable out directories.")
+ argparser.add_argument("--subdirs", nargs="*",
+ help="Only scan these subdirs of $PRODUCT_OUT instead of the whole out directory."
+ + " The --subdirs argument with no listed directories will give a default list.")
+ argparser.add_argument("--target", default="droid",
+ help="Make target to run. The default is droid")
+ argparser.add_argument("--touch", nargs="+", default=[],
+ help="Files to touch between builds. Must pair with --incremental.")
+ args = argparser.parse_args(sys.argv[1:])
+
+ if args.detect_embedded_paths and args.incremental:
+ sys.stderr.write("Can't pass --detect-embedded-paths and --incremental together.\n")
+ sys.exit(1)
+ if args.detect_embedded_paths and not args.check_out_dir:
+ sys.stderr.write("Can't pass --detect-embedded-paths and --no-check-out-dir together.\n")
+ sys.exit(1)
+ if args.touch and not args.incremental:
+ sys.stderr.write("The --incremental flag is required if the --touch flag is passed.")
+ sys.exit(1)
+
+ AssertAtTop()
+ RequireEnvVar("TARGET_PRODUCT")
+ RequireEnvVar("TARGET_BUILD_VARIANT")
+
+ # Out dir file names:
+ # - dir_prefix - The directory we'll put everything in (except for maybe the top level
+ # out/ dir).
+ # - *work_dir - The directory that we will build directly into. This is in dir_prefix
+ # unless --no-check-out-dir is set.
+ # - *out_dir - After building, if work_dir is different from out_dir, we move the out
+ # directory to here so we can do the comparisions.
+ # - timestamp_* - Files we touch so we know the various phases between the builds, so we
+ # can compare timestamps of files.
+ if args.incremental:
+ dir_prefix = "out_incremental"
+ if args.check_out_dir:
+ first_work_dir = first_out_dir = dir_prefix + "/out"
+ second_work_dir = second_out_dir = dir_prefix + "/out"
+ else:
+ first_work_dir = first_out_dir = "out"
+ second_work_dir = second_out_dir = "out"
+ else:
+ dir_prefix = "out_full"
+ first_out_dir = dir_prefix + "/out_1"
+ second_out_dir = dir_prefix + "/out_2"
+ if not args.check_out_dir:
+ first_work_dir = second_work_dir = "out"
+ elif args.detect_embedded_paths:
+ first_work_dir = first_out_dir
+ second_work_dir = second_out_dir
+ else:
+ first_work_dir = dir_prefix + "/work"
+ second_work_dir = dir_prefix + "/work"
+ timestamp_start = dir_prefix + "/timestamp_start"
+ timestamp_between = dir_prefix + "/timestamp_between"
+ timestamp_end = dir_prefix + "/timestamp_end"
+
+ if args.run_build:
+ # Initial clean, if necessary
+ print("Cleaning " + dir_prefix + "/")
+ Clean(dir_prefix)
+ print("Cleaning out/")
+ Clean("out")
+ CreateEmptyFile(timestamp_start)
+ print("Running the first build in " + first_work_dir)
+ RunBuild(first_work_dir, first_out_dir, args.target, args.hide_build_output)
+ for f in args.touch:
+ print("Touching " + f)
+ TouchFile(f)
+ CreateEmptyFile(timestamp_between)
+ print("Running the second build in " + second_work_dir)
+ RunBuild(second_work_dir, second_out_dir, args.target, args.hide_build_output)
+ CreateEmptyFile(timestamp_end)
+ print("Done building")
+ print()
+
+ # Which out directories to scan
+ if args.subdirs is not None:
+ if args.subdirs:
+ subdirs = args.subdirs
+ else:
+ subdirs = DEFAULT_DIRS
+ first_files = ProductFiles(RequireBuildVar(first_out_dir, "PRODUCT_OUT"), subdirs)
+ second_files = ProductFiles(RequireBuildVar(second_out_dir, "PRODUCT_OUT"), subdirs)
+ else:
+ first_files = OutFiles(first_out_dir)
+ second_files = OutFiles(second_out_dir)
+
+ printer = Printer()
+
+ if args.incremental:
+ # Find files that were rebuilt unnecessarily
+ touched_incrementally = FindOutFilesTouchedAfter(first_files,
+ GetFileTimestamp(timestamp_between))
+ printer.PrintList("Touched in incremental build", touched_incrementally)
+ else:
+ # Compare the two out dirs
+ added, removed, changed = DiffFileList(first_files, second_files)
+ printer.PrintList("Added", added)
+ printer.PrintList("Removed", removed)
+ printer.PrintList("Changed", changed, "%s %s")
+
+ # Find files in the source tree that were touched
+ touched_during = FindSourceFilesTouchedAfter(GetFileTimestamp(timestamp_start))
+ printer.PrintList("Source files touched after start of build", touched_during)
+
+ # Find files and dirs that were output to "out" and didn't respect $OUT_DIR
+ if args.check_out_dir:
+ bad_out_dir_contents = FindFilesAndDirectories("out")
+ printer.PrintList("Files and directories created by rules that didn't respect $OUT_DIR",
+ bad_out_dir_contents)
+
+ # If we didn't find anything, print success message
+ if not printer.printed_anything:
+ print("No bad behaviors found.")
+
+
+def AssertAtTop():
+ """If the current directory is not the top of an android source tree, print an error
+ message and exit."""
+ if not os.access(SOONG_UI, os.X_OK):
+ sys.stderr.write("FAILED: Please run from the root of the tree.\n")
+ sys.exit(1)
+
+
+def RequireEnvVar(name):
+ """Gets an environment variable. If that fails, then print an error message and exit."""
+ result = os.environ.get(name)
+ if not result:
+ sys.stderr.write("error: Can't determine %s. Please run lunch first.\n" % name)
+ sys.exit(1)
+ return result
+
+
+def RunSoong(out_dir, args, capture_output):
+ env = dict(os.environ)
+ env["OUT_DIR"] = out_dir
+ args = [SOONG_UI,] + args
+ if capture_output:
+ proc = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ combined_output, none = proc.communicate()
+ return proc.returncode, combined_output
+ else:
+ result = subprocess.run(args, env=env)
+ return result.returncode, None
+
+
+def GetBuildVar(out_dir, name):
+ """Gets a variable from the build system."""
+ returncode, output = RunSoong(out_dir, ["--dumpvar-mode", name], True)
+ if returncode != 0:
+ return None
+ else:
+ return output.decode("utf-8").strip()
+
+
+def RequireBuildVar(out_dir, name):
+ """Gets a variable from the builds system. If that fails, then print an error
+ message and exit."""
+ value = GetBuildVar(out_dir, name)
+ if not value:
+ sys.stderr.write("error: Can't determine %s. Please run lunch first.\n" % name)
+ sys.exit(1)
+ return value
+
+
+def Clean(directory):
+ """"Deletes the supplied directory."""
+ try:
+ shutil.rmtree(directory)
+ except FileNotFoundError:
+ pass
+
+
+def RunBuild(work_dir, out_dir, target, hide_build_output):
+ """Runs a build. If the build fails, prints a message and exits."""
+ returncode, output = RunSoong(work_dir,
+ ["--build-mode", "--all-modules", "--dir=" + os.getcwd(), target],
+ hide_build_output)
+ if work_dir != out_dir:
+ os.replace(work_dir, out_dir)
+ if returncode != 0:
+ if hide_build_output:
+ # The build output was hidden, so print it now for debugging
+ sys.stderr.buffer.write(output)
+ sys.stderr.write("FAILED: Build failed. Stopping.\n")
+ sys.exit(1)
+
+
+def DiffFileList(first_files, second_files):
+ """Examines the files.
+
+ Returns:
+ Filenames of files in first_filelist but not second_filelist (added files)
+ Filenames of files in second_filelist but not first_filelist (removed files)
+ 2-Tuple of filenames for the files that are in both but are different (changed files)
+ """
+ # List of files, relative to their respective PRODUCT_OUT directories
+ first_filelist = sorted([x for x in first_files], key=lambda x: x[1])
+ second_filelist = sorted([x for x in second_files], key=lambda x: x[1])
+
+ added = []
+ removed = []
+ changed = []
+
+ first_index = 0
+ second_index = 0
+
+ while first_index < len(first_filelist) and second_index < len(second_filelist):
+ # Path relative to source root and path relative to PRODUCT_OUT
+ first_full_filename, first_relative_filename = first_filelist[first_index]
+ second_full_filename, second_relative_filename = second_filelist[second_index]
+
+ if first_relative_filename < second_relative_filename:
+ # Removed
+ removed.append(first_full_filename)
+ first_index += 1
+ elif first_relative_filename > second_relative_filename:
+ # Added
+ added.append(second_full_filename)
+ second_index += 1
+ else:
+ # Both present
+ diff_type = DiffFiles(first_full_filename, second_full_filename)
+ if diff_type != DIFF_NONE:
+ changed.append((first_full_filename, second_full_filename))
+ first_index += 1
+ second_index += 1
+
+ while first_index < len(first_filelist):
+ first_full_filename, first_relative_filename = first_filelist[first_index]
+ removed.append(first_full_filename)
+ first_index += 1
+
+ while second_index < len(second_filelist):
+ second_full_filename, second_relative_filename = second_filelist[second_index]
+ added.append(second_full_filename)
+ second_index += 1
+
+ return (SortByTimestamp(added),
+ SortByTimestamp(removed),
+ SortByTimestamp(changed, key=lambda item: item[1]))
+
+
+def FindOutFilesTouchedAfter(files, timestamp):
+ """Find files in the given file iterator that were touched after timestamp."""
+ result = []
+ for full, relative in files:
+ ts = GetFileTimestamp(full)
+ if ts > timestamp:
+ result.append(TouchedFile(full, ts))
+ return [f.filename for f in sorted(result, key=lambda f: f.timestamp)]
+
+
+def GetFileTimestamp(filename):
+ """Get timestamp for a file (just wraps stat)."""
+ st = os.stat(filename, follow_symlinks=False)
+ return st.st_mtime
+
+
+def SortByTimestamp(items, key=lambda item: item):
+ """Sort the list by timestamp of files.
+ Args:
+ items - the list of items to sort
+ key - a function to extract a filename from each element in items
+ """
+ return [x[0] for x in sorted([(item, GetFileTimestamp(key(item))) for item in items],
+ key=lambda y: y[1])]
+
+
+def FindSourceFilesTouchedAfter(timestamp):
+ """Find files in the source tree that have changed after timestamp. Ignores
+ the out directory."""
+ result = []
+ for root, dirs, files in os.walk(".", followlinks=False):
+ if root == ".":
+ RemoveItemsFromList(dirs, (".repo", "out", "out_full", "out_incremental"))
+ for f in files:
+ full = os.path.sep.join((root, f))[2:]
+ ts = GetFileTimestamp(full)
+ if ts > timestamp:
+ result.append(TouchedFile(full, ts))
+ return [f.filename for f in sorted(result, key=lambda f: f.timestamp)]
+
+
+def FindFilesAndDirectories(directory):
+ """Finds all files and directories inside a directory."""
+ result = []
+ for root, dirs, files in os.walk(directory, followlinks=False):
+ result += [os.path.sep.join((root, x, "")) for x in dirs]
+ result += [os.path.sep.join((root, x)) for x in files]
+ return result
+
+
+def CreateEmptyFile(filename):
+ """Create an empty file with now as the timestamp at filename."""
+ try:
+ os.makedirs(os.path.dirname(filename))
+ except FileExistsError:
+ pass
+ open(filename, "w").close()
+ os.utime(filename)
+
+
+def TouchFile(filename):
+ os.utime(filename)
+
+
+def DiffFiles(first_filename, second_filename):
+ def AreFileContentsSame(remaining, first_filename, second_filename):
+ """Compare the file contents. They must be known to be the same size."""
+ CHUNK_SIZE = 32*1024
+ with open(first_filename, "rb") as first_file:
+ with open(second_filename, "rb") as second_file:
+ while remaining > 0:
+ size = min(CHUNK_SIZE, remaining)
+ if first_file.read(CHUNK_SIZE) != second_file.read(CHUNK_SIZE):
+ return False
+ remaining -= size
+ return True
+
+ first_stat = os.stat(first_filename, follow_symlinks=False)
+ second_stat = os.stat(first_filename, follow_symlinks=False)
+
+ # Mode bits
+ if first_stat.st_mode != second_stat.st_mode:
+ return DIFF_MODE
+
+ # File size
+ if first_stat.st_size != second_stat.st_size:
+ return DIFF_SIZE
+
+ # Contents
+ if stat.S_ISLNK(first_stat.st_mode):
+ if os.readlink(first_filename) != os.readlink(second_filename):
+ return DIFF_SYMLINK
+ elif stat.S_ISREG(first_stat.st_mode):
+ if not AreFileContentsSame(first_stat.st_size, first_filename, second_filename):
+ return DIFF_CONTENTS
+
+ return DIFF_NONE
+
+
+class FileIterator(object):
+ """Object that produces an iterator containing all files in a given directory.
+
+ Each iteration yields a tuple containing:
+
+ [0] (full) Path to file relative to source tree.
+ [1] (relative) Path to the file relative to the base directory given in the
+ constructor.
+ """
+
+ def __init__(self, base_dir):
+ self._base_dir = base_dir
+
+ def __iter__(self):
+ return self._Iterator(self, self._base_dir)
+
+ def ShouldIncludeFile(self, root, path):
+ return False
+
+ class _Iterator(object):
+ def __init__(self, parent, base_dir):
+ self._parent = parent
+ self._base_dir = base_dir
+ self._walker = os.walk(base_dir, followlinks=False)
+ self._current_index = 0
+ self._current_dir = []
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ # os.walk's iterator will eventually terminate by raising StopIteration
+ while True:
+ if self._current_index >= len(self._current_dir):
+ root, dirs, files = self._walker.__next__()
+ full_paths = [os.path.sep.join((root, f)) for f in files]
+ pairs = [(f, f[len(self._base_dir)+1:]) for f in full_paths]
+ self._current_dir = [(full, relative) for full, relative in pairs
+ if self._parent.ShouldIncludeFile(root, relative)]
+ self._current_index = 0
+ if not self._current_dir:
+ continue
+ index = self._current_index
+ self._current_index += 1
+ return self._current_dir[index]
+
+
+class OutFiles(FileIterator):
+ """Object that produces an iterator containing all files in a given out directory,
+ except for files which are known to be touched as part of build setup.
+ """
+ def __init__(self, out_dir):
+ super().__init__(out_dir)
+ self._out_dir = out_dir
+
+ def ShouldIncludeFile(self, root, relative):
+ # Skip files in root, although note that this could actually skip
+ # files that are sadly generated directly into that directory.
+ if root == self._out_dir:
+ return False
+ # Skiplist
+ for skip in BUILD_INTERNALS_PREFIX_SKIP:
+ if relative.startswith(skip):
+ return False
+ for skip in BUILD_INTERNALS_SUFFIX_SKIP:
+ if relative.endswith(skip):
+ return False
+ return True
+
+
+class ProductFiles(FileIterator):
+ """Object that produces an iterator containing files in listed subdirectories of $PRODUCT_OUT.
+ """
+ def __init__(self, product_out, subdirs):
+ super().__init__(product_out)
+ self._subdirs = subdirs
+
+ def ShouldIncludeFile(self, root, relative):
+ for subdir in self._subdirs:
+ if relative.startswith(subdir):
+ return True
+ return False
+
+
+class TouchedFile(object):
+ """A file in the out directory with a timestamp."""
+ def __init__(self, filename, timestamp):
+ self.filename = filename
+ self.timestamp = timestamp
+
+
+def RemoveItemsFromList(haystack, needles):
+ for needle in needles:
+ try:
+ haystack.remove(needle)
+ except ValueError:
+ pass
+
+
+class Printer(object):
+ def __init__(self):
+ self.printed_anything = False
+
+ def PrintList(self, title, items, fmt="%s"):
+ if items:
+ if self.printed_anything:
+ sys.stdout.write("\n")
+ sys.stdout.write("%s:\n" % title)
+ for item in items:
+ sys.stdout.write(" %s\n" % fmt % item)
+ self.printed_anything = True
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+
+
+# vim: ts=2 sw=2 sts=2 nocindent
diff --git a/tools/exercise_compare_builds b/tools/exercise_compare_builds
new file mode 100755
index 0000000..38e8405
--- /dev/null
+++ b/tools/exercise_compare_builds
@@ -0,0 +1,66 @@
+#!/bin/bash
+# Tests for compare_builds.py
+# usage (from root of source tree):
+# build/make/tools/exercise_compare_builds
+
+HIDE_BUILD_OUTPUT=--hide-build-output
+
+function run()
+{
+ echo
+ echo
+ echo ============================================================
+ echo $1
+ shift
+ echo ./build/make/tools/compare_builds.py $HIDE_BUILD_OUTPUT --target incidentd $@
+ echo ============================================================
+ time ./build/make/tools/compare_builds.py $HIDE_BUILD_OUTPUT --target incidentd $@
+}
+
+function run_tests()
+{
+ # These should error out
+
+ run "Incremental build, Separate work dirs (invalid flag combo, should error out)" \
+ --incremental --detect-embedded-paths
+ run "Use out/ as work dir, Separate work dirs (invalid flag combo, should error out)" \
+ --no-check-out-dir --detect-embedded-paths
+
+ # Each grouping starts with a build, and the following ones use --no-build to save time
+
+ run "REBUILD: Full builds, Same work dir, Whole out dir"
+ run "Full builds, Same work dir, Default subdirs" \
+ --no-build --subdirs
+ run "Full builds, Same work dir, Only $PRODUCT_OUT/system" \
+ --no-build --subdirs system
+
+ run "REBUILD: Full builds, Use out/ as work dir, Whole out dir" \
+ --no-check-out-dir
+ run "Full builds, Use out/ as work dir, Default subdirs" \
+ --no-build --no-check-out-dir --subdirs
+ run "Full builds, Use out/ as work dir, Only $PRODUCT_OUT/system" \
+ --no-build --no-check-out-dir --subdirs system
+
+ run "REBUILD: Full builds, Separate work dirs, Whole out dir" \
+ --detect-embedded-paths
+ run "Full builds, Separate work dirs, Default subdirs" \
+ --no-build --detect-embedded-paths --subdirs
+ run "Full builds, Separate work dirs, Only $PRODUCT_OUT/system" \
+ --no-build --detect-embedded-paths --subdirs system
+
+ run "REBUILD: Incremental build, Same work dir, Whole out dir" \
+ --incremental
+ run "Incremental build, Same work dir, Default subdirs" \
+ --no-build --incremental --subdirs
+ run "Incremental build, Same work dir, Only $PRODUCT_OUT/system" \
+ --no-build --incremental --subdirs system
+
+ run "REBUILD: Incremental build, Use out/ as work dir, Whole out dir" \
+ --incremental --no-check-out-dir
+ run "Incremental build, Use out/ as work dir, Default subdirs" \
+ --no-build --incremental --no-check-out-dir --subdirs
+ run "Incremental build, Use out/ as work dir, Only $PRODUCT_OUT/system" \
+ --no-build --incremental --no-check-out-dir --subdirs system
+}
+
+time run_tests 2>&1 | tee exercise_compare_builds.txt
diff --git a/tools/product_config/inherit_tree.py b/tools/product_config/inherit_tree.py
new file mode 100755
index 0000000..ae8a275
--- /dev/null
+++ b/tools/product_config/inherit_tree.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+
+#
+# Run from the root of the tree, after product-config has been run to see
+# the product inheritance hierarchy for the current lunch target.
+#
+
+import csv
+import sys
+
+def PrintNodes(graph, node, prefix):
+ sys.stdout.write("%s%s" % (prefix, node))
+ children = graph.get(node, [])
+ if children:
+ sys.stdout.write(" {\n")
+ for child in sorted(graph.get(node, [])):
+ PrintNodes(graph, child, prefix + " ")
+ sys.stdout.write("%s}\n" % prefix);
+ else:
+ sys.stdout.write("\n")
+
+def main(argv):
+ if len(argv) != 2:
+ print("usage: inherit_tree.py out/$TARGET_PRODUCT-$TARGET_BUILD_VARIANT/dumpconfig.csv")
+ sys.exit(1)
+
+ root = None
+ graph = {}
+ with open(argv[1], newline='') as csvfile:
+ for line in csv.reader(csvfile):
+ if not root:
+ # Look for PRODUCTS
+ if len(line) < 3 or line[0] != "phase" or line[1] != "PRODUCTS":
+ continue
+ root = line[2]
+ else:
+ # Everything else
+ if len(line) < 3 or line[0] != "inherit":
+ continue
+ graph.setdefault(line[1], list()).append(line[2])
+
+ PrintNodes(graph, root, "")
+
+
+if __name__ == "__main__":
+ main(sys.argv)
+
+# vim: set expandtab ts=2 sw=2 sts=2:
+
diff --git a/tools/product_config/src/com/android/build/config/ConfigBase.java b/tools/product_config/src/com/android/build/config/ConfigBase.java
new file mode 100644
index 0000000..9a81011
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ConfigBase.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Common parts between MakeConfig and the to-be-added GenericConfig, BazelConfig and SoongConfig.
+ */
+public class ConfigBase {
+ protected String mPhase;
+ protected List<String> mRootNodes;
+
+ /**
+ * State of the make varaible environment from before the first config file.
+ */
+ protected Map<String, Str> mInitialVariables = new HashMap();
+
+ /**
+ * State of the make varaible environment from after the first config file.
+ */
+ protected Map<String, Str> mFinalVariables = new HashMap();
+
+
+ /**
+ * The variables that are handled specially.
+ */
+ protected final TreeMap<String, VarType> mProductVars = new TreeMap();
+
+ public void setPhase(String phase) {
+ mPhase = phase;
+ }
+
+ public String getPhase() {
+ return mPhase;
+ }
+
+ public void setRootNodes(List<String> filenames) {
+ mRootNodes = new ArrayList(filenames);
+ }
+
+ public List<String> getRootNodes() {
+ return mRootNodes;
+ }
+
+ public void addProductVar(String name, VarType type) {
+ mProductVars.put(name, type);
+ }
+
+ public TreeMap<String, VarType> getProductVars() {
+ return mProductVars;
+ }
+
+ public VarType getVarType(String name) {
+ final VarType t = mProductVars.get(name);
+ if (t != null) {
+ return t;
+ } else {
+ return VarType.UNKNOWN;
+ }
+ }
+
+ public boolean isProductVar(String name) {
+ return mProductVars.get(name) != null;
+ }
+
+ /**
+ * Return the state the make variable environment from before the first config file.
+ */
+ public Map<String, Str> getInitialVariables() {
+ return mInitialVariables;
+ }
+
+ /**
+ * Return the state the make variable environment from before the first config file.
+ */
+ public Map<String, Str> getFinalVariables() {
+ return mFinalVariables;
+ }
+
+ /**
+ * Copy common base class fields from that to this.
+ */
+ public void copyFrom(ConfigBase that) {
+ setPhase(that.getPhase());
+ setRootNodes(that.getRootNodes());
+ for (Map.Entry<String, VarType> entry: that.getProductVars().entrySet()) {
+ addProductVar(entry.getKey(), entry.getValue());
+ }
+ mInitialVariables = new HashMap(that.getInitialVariables());
+ mFinalVariables = new HashMap(that.getFinalVariables());
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java b/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java
new file mode 100644
index 0000000..39bd5df
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Converts a MakeConfig into a Generic config by applying heuristics about
+ * the types of variable assignments that we do.
+ */
+public class ConvertMakeToGenericConfig {
+ private final Errors mErrors;
+
+ public ConvertMakeToGenericConfig(Errors errors) {
+ mErrors = errors;
+ }
+
+ public GenericConfig convert(Map<String, MakeConfig> make) {
+ final GenericConfig result = new GenericConfig();
+
+ final MakeConfig products = make.get("PRODUCTS");
+ if (products == null) {
+ mErrors.ERROR_DUMPCONFIG.add("Could not find PRODUCTS phase in dumpconfig output.");
+ return null;
+ }
+
+ // Base class fields
+ result.copyFrom(products);
+
+ // Each file
+ for (MakeConfig.ConfigFile f: products.getConfigFiles()) {
+ final GenericConfig.ConfigFile genericFile
+ = new GenericConfig.ConfigFile(f.getFilename());
+ result.addConfigFile(genericFile);
+
+ final List<MakeConfig.Block> blocks = f.getBlocks();
+
+ // Some assertions:
+ // TODO: Include better context for these errors.
+ // There should always be at least a BEGIN and an AFTER, so assert this.
+ if (blocks.size() < 2) {
+ throw new RuntimeException("expected at least blocks.size() >= 2. Actcual size: "
+ + blocks.size());
+ }
+ if (blocks.get(0).getBlockType() != MakeConfig.BlockType.BEFORE) {
+ throw new RuntimeException("expected first block to be BEFORE");
+ }
+ if (blocks.get(blocks.size() - 1).getBlockType() != MakeConfig.BlockType.AFTER) {
+ throw new RuntimeException("expected first block to be AFTER");
+ }
+ // Everything in between should be an INHERIT block.
+ for (int index = 1; index < blocks.size() - 1; index++) {
+ if (blocks.get(index).getBlockType() != MakeConfig.BlockType.INHERIT) {
+ throw new RuntimeException("expected INHERIT at block " + index);
+ }
+ }
+
+ // Each block represents a snapshot of the interpreter variable state (minus a few big
+ // sets of variables which we don't export because they're used in the internals
+ // of node_fns.mk, so we know they're not necessary here). The first (BEFORE) one
+ // is everything that is set before the file is included, so it forms the base
+ // for everything else.
+ MakeConfig.Block prevBlock = blocks.get(0);
+
+ for (int index = 1; index < blocks.size(); index++) {
+ final MakeConfig.Block block = blocks.get(index);
+ for (final Map.Entry<String, Str> entry: block.getVars().entrySet()) {
+ final String varName = entry.getKey();
+ final GenericConfig.Assign assign = convertAssignment(block.getBlockType(),
+ block.getInheritedFile(), products.getVarType(varName), varName,
+ entry.getValue(), prevBlock.getVar(varName));
+ if (assign != null) {
+ genericFile.addStatement(assign);
+ }
+ }
+ // Handle variables that are in prevBlock but not block -- they were
+ // deleted. Is this even possible, or do they show up as ""? We will
+ // treat them as positive assigments to empty string
+ for (String prevName: prevBlock.getVars().keySet()) {
+ if (!block.getVars().containsKey(prevName)) {
+ genericFile.addStatement(
+ new GenericConfig.Assign(prevName, new Str("")));
+ }
+ }
+ if (block.getBlockType() == MakeConfig.BlockType.INHERIT) {
+ genericFile.addStatement(
+ new GenericConfig.Inherit(block.getInheritedFile()));
+ }
+ // For next iteration
+ prevBlock = block;
+ }
+ }
+
+ // Overwrite the final variables with the ones that come from the PRODUCTS-EXPAND phase.
+ // Drop the ones that were newly defined between the two phases, but leave values
+ // that were modified between. We do need to reproduce that logic in this tool.
+ final MakeConfig expand = make.get("PRODUCT-EXPAND");
+ if (expand == null) {
+ mErrors.ERROR_DUMPCONFIG.add("Could not find PRODUCT-EXPAND phase in dumpconfig"
+ + " output.");
+ return null;
+ }
+ final Map<String, Str> productsFinal = products.getFinalVariables();
+ final Map<String, Str> expandInitial = expand.getInitialVariables();
+ final Map<String, Str> expandFinal = expand.getFinalVariables();
+ final Map<String, Str> finalFinal = result.getFinalVariables();
+ finalFinal.clear();
+ for (Map.Entry<String, Str> var: expandFinal.entrySet()) {
+ final String varName = var.getKey();
+ if (expandInitial.containsKey(varName) && !productsFinal.containsKey(varName)) {
+ continue;
+ }
+ finalFinal.put(varName, var.getValue());
+ }
+
+ return result;
+ }
+
+ /**
+ * Converts one variable from a MakeConfig Block into a GenericConfig Assignment.
+ */
+ GenericConfig.Assign convertAssignment(MakeConfig.BlockType blockType, Str inheritedFile,
+ VarType varType, String varName, Str varVal, Str prevVal) {
+ if (prevVal == null) {
+ // New variable.
+ return new GenericConfig.Assign(varName, varVal);
+ } else if (!varVal.equals(prevVal)) {
+ // The value changed from the last block.
+ if (varVal.length() == 0) {
+ // It was set to empty
+ return new GenericConfig.Assign(varName, varVal);
+ } else {
+ // Product vars have the @inherit processing. Other vars we
+ // will just ignore and put in one section at the end, based
+ // on the difference between the BEFORE and AFTER blocks.
+ if (varType == VarType.UNKNOWN) {
+ if (blockType == MakeConfig.BlockType.AFTER) {
+ // For UNKNOWN variables, we don't worry about the
+ // intermediate steps, just take the final value.
+ return new GenericConfig.Assign(varName, varVal);
+ } else {
+ return null;
+ }
+ } else {
+ return convertInheritedVar(blockType, inheritedFile,
+ varName, varVal, prevVal);
+ }
+ }
+ } else {
+ // Variable not touched
+ return null;
+ }
+ }
+
+ /**
+ * Handle the special inherited values, where the inherit-product puts in the
+ * @inherit:... markers, adding Statements to the ConfigFile.
+ */
+ GenericConfig.Assign convertInheritedVar(MakeConfig.BlockType blockType, Str inheritedFile,
+ String varName, Str varVal, Str prevVal) {
+ String varText = varVal.toString();
+ String prevText = prevVal.toString().trim();
+ if (blockType == MakeConfig.BlockType.INHERIT) {
+ // inherit-product appends @inherit:... so drop that.
+ final String marker = "@inherit:" + inheritedFile;
+ if (varText.endsWith(marker)) {
+ varText = varText.substring(0, varText.length() - marker.length()).trim();
+ } else {
+ mErrors.ERROR_IMPROPER_PRODUCT_VAR_MARKER.add(varVal.getPosition(),
+ "Variable didn't end with marker \"" + marker + "\": " + varText);
+ }
+ }
+
+ if (!varText.equals(prevText)) {
+ // If the variable value was actually changed.
+ final ArrayList<String> words = split(varText, prevText);
+ if (words.size() == 0) {
+ // Pure Assignment, none of the previous value is present.
+ return new GenericConfig.Assign(varName, new Str(varVal.getPosition(), varText));
+ } else {
+ // Self referential value (prepend, append, both).
+ if (words.size() > 2) {
+ // This is indicative of a construction that might not be quite
+ // what we want. The above code will do something that works if it was
+ // of the form "VAR := a $(VAR) b $(VAR) c", but if the original code
+ // something else this won't work. This doesn't happen in AOSP, but
+ // it's a theoretically possibility, so someone might do it.
+ mErrors.WARNING_VARIABLE_RECURSION.add(varVal.getPosition(),
+ "Possible unsupported variable recursion: "
+ + varName + " = " + varVal + " (prev=" + prevVal + ")");
+ }
+ return new GenericConfig.Assign(varName, Str.toList(varVal.getPosition(), words));
+ }
+ } else {
+ // Variable not touched
+ return null;
+ }
+ }
+
+ /**
+ * Split 'haystack' on occurrences of 'needle'. Trims each string of whitespace
+ * to preserve make list semantics.
+ */
+ private static ArrayList<String> split(String haystack, String needle) {
+ final ArrayList<String> result = new ArrayList();
+ final int needleLen = needle.length();
+ if (needleLen == 0) {
+ return result;
+ }
+ int start = 0;
+ int end;
+ while ((end = haystack.indexOf(needle, start)) >= 0) {
+ result.add(haystack.substring(start, end).trim());
+ start = end + needleLen;
+ }
+ result.add(haystack.substring(start).trim());
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/CsvParser.java b/tools/product_config/src/com/android/build/config/CsvParser.java
new file mode 100644
index 0000000..1c8b9c3
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/CsvParser.java
@@ -0,0 +1,242 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A CSV parser.
+ */
+public class CsvParser {
+ /**
+ * Internal string buffer grows by this amount.
+ */
+ private static final int CHUNK_SIZE = 64 * 1024;
+
+ /**
+ * Error parsing.
+ */
+ public static class ParseException extends Exception {
+ private int mLine;
+ private int mColumn;
+
+ public ParseException(int line, int column, String message) {
+ super(message);
+ mLine = line;
+ mColumn = column;
+ }
+
+ /**
+ * Line number in source file.
+ */
+ public int getLine() {
+ return mLine;
+ }
+
+ /**
+ * Column in source file.
+ */
+ public int getColumn() {
+ return mColumn;
+ }
+ }
+
+ public static class Line {
+ private final int mLineNumber;
+ private final List<String> mFields;
+
+ Line(int lineno, List<String> fields) {
+ mLineNumber = lineno;
+ mFields = fields;
+ }
+
+ public int getLine() {
+ return mLineNumber;
+ }
+
+ public List<String> getFields() {
+ return mFields;
+ }
+ }
+
+ // Parser States
+ private static final int STATE_START_LINE = 0;
+ private static final int STATE_START_FIELD = 1;
+ private static final int STATE_INSIDE_QUOTED_FIELD = 2;
+ private static final int STATE_FIRST_QUOTATION_MARK = 3;
+ private static final int STATE_INSIDE_UNQUOTED_FIELD = 4;
+ private static final int STATE_DONE = 5;
+
+ // Parser Actions
+ private static final int ACTION_APPEND_CHAR = 1;
+ private static final int ACTION_FIELD_COMPLETE = 2;
+ private static final int ACTION_LINE_COMPLETE = 4;
+
+ /**
+ * Constructor.
+ */
+ private CsvParser() {
+ }
+
+ /**
+ * Reads CSV and returns a list of Line objects.
+ *
+ * Handles newlines inside fields quoted with double quotes (").
+ *
+ * Doesn't report blank lines, but does include empty fields.
+ */
+ public static List<Line> parse(Reader reader)
+ throws ParseException, IOException {
+ ArrayList<Line> result = new ArrayList();
+ int line = 1;
+ int column = 1;
+ int pos = 0;
+ char[] buf = new char[CHUNK_SIZE];
+ HashMap<String,String> stringPool = new HashMap();
+ ArrayList<String> fields = new ArrayList();
+
+ int state = STATE_START_LINE;
+ while (state != STATE_DONE) {
+ int c = reader.read();
+ int action = 0;
+
+ if (state == STATE_START_LINE) {
+ if (c <= 0) {
+ // No data, skip ACTION_LINE_COMPLETE.
+ state = STATE_DONE;
+ } else if (c == '"') {
+ state = STATE_INSIDE_QUOTED_FIELD;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ // Consume the newline, state stays STATE_START_LINE.
+ } else {
+ action = ACTION_APPEND_CHAR;
+ state = STATE_INSIDE_UNQUOTED_FIELD;
+ }
+ } else if (state == STATE_START_FIELD) {
+ if (c <= 0) {
+ // Field will be empty
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_DONE;
+ } else if (c == '"') {
+ state = STATE_INSIDE_QUOTED_FIELD;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_START_LINE;
+ } else {
+ action = ACTION_APPEND_CHAR;
+ state = STATE_INSIDE_UNQUOTED_FIELD;
+ }
+ } else if (state == STATE_INSIDE_QUOTED_FIELD) {
+ if (c <= 0) {
+ throw new ParseException(line, column,
+ "Bad input: End of input inside quoted field.");
+ } else if (c == '"') {
+ state = STATE_FIRST_QUOTATION_MARK;
+ } else {
+ action = ACTION_APPEND_CHAR;
+ }
+ } else if (state == STATE_FIRST_QUOTATION_MARK) {
+ if (c <= 0) {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_DONE;
+ } else if (c == '"') {
+ action = ACTION_APPEND_CHAR;
+ state = STATE_INSIDE_QUOTED_FIELD;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_START_LINE;
+ } else {
+ throw new ParseException(line, column,
+ "Bad input: Character after field ended or unquoted '\"'.");
+ }
+ } else if (state == STATE_INSIDE_UNQUOTED_FIELD) {
+ if (c <= 0) {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_DONE;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_START_LINE;
+ } else {
+ action = ACTION_APPEND_CHAR;
+ }
+ }
+
+ if ((action & ACTION_APPEND_CHAR) != 0) {
+ // Reallocate buffer if necessary. Hopefully not often because CHUNK_SIZE is big.
+ if (pos >= buf.length) {
+ char[] old = buf;
+ buf = new char[old.length + CHUNK_SIZE];
+ System.arraycopy(old, 0, buf, 0, old.length);
+ }
+ // Store the character
+ buf[pos] = (char)c;
+ pos++;
+ }
+ if ((action & ACTION_FIELD_COMPLETE) != 0) {
+ // A lot of the strings are duplicated, so pool them to reduce peak memory
+ // usage. This could be made slightly better by having a custom key class
+ // that does the lookup without making a new String that gets immediately
+ // thrown away.
+ String field = new String(buf, 0, pos);
+ final String cached = stringPool.get(field);
+ if (cached == null) {
+ stringPool.put(field, field);
+ } else {
+ field = cached;
+ }
+ fields.add(field);
+ pos = 0;
+ }
+ if ((action & ACTION_LINE_COMPLETE) != 0) {
+ // Only report lines with any contents
+ if (fields.size() > 0) {
+ result.add(new Line(line, fields));
+ fields = new ArrayList();
+ }
+ }
+
+ if (c == '\n') {
+ line++;
+ column = 1;
+ } else {
+ column++;
+ }
+ }
+
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/DumpConfigParser.java b/tools/product_config/src/com/android/build/config/DumpConfigParser.java
new file mode 100644
index 0000000..c4cd963
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/DumpConfigParser.java
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * Parses the output of ckati building build/make/core/dumpconfig.mk.
+ *
+ * The format is as follows:
+ * - All processed lines are colon (':') separated fields.
+ * - Lines before the dumpconfig_version line are dropped for forward compatibility
+ * - Lines where the first field is config_var describe variables declared in makefiles
+ * (implemented by the dump-config-vals macro)
+ * Field Description
+ * 0 "config_var" row type
+ * 1 Product makefile being processed
+ * 2 The variable name
+ * 3 The value of the variable
+ * 4 The location of the variable, as best tracked by kati
+ */
+public class DumpConfigParser {
+ private static final boolean DEBUG = false;
+
+ private final Errors mErrors;
+ private final String mFilename;
+ private final Reader mReader;
+
+ private final Map<String,MakeConfig> mResults = new HashMap();
+
+ private static final Pattern LIST_SEPARATOR = Pattern.compile("\\s+");
+
+ /**
+ * Constructor.
+ */
+ private DumpConfigParser(Errors errors, String filename, Reader reader) {
+ mErrors = errors;
+ mFilename = filename;
+ mReader = reader;
+ }
+
+ /**
+ * Parse the text into a map of the phase names to MakeConfig objects.
+ */
+ public static Map<String,MakeConfig> parse(Errors errors, String filename, Reader reader)
+ throws CsvParser.ParseException, IOException {
+ DumpConfigParser parser = new DumpConfigParser(errors, filename, reader);
+ parser.parseImpl();
+ return parser.mResults;
+ }
+
+ /**
+ * Parse the input.
+ */
+ private void parseImpl() throws CsvParser.ParseException, IOException {
+ final List<CsvParser.Line> lines = CsvParser.parse(mReader);
+ final int lineCount = lines.size();
+ int index = 0;
+
+ int dumpconfigVersion = 0;
+
+ // Ignore lines until until we get a dumpconfig_version line for forward compatibility.
+ // In a previous life, this loop parsed from all of kati's stdout, not just the file
+ // that dumpconfig.mk writes, but it's harmless to leave this loop in. It gives us a
+ // little bit of flexibility which we probably won't need anyway, this tool probably
+ // won't diverge from dumpconfig.mk anyway.
+ for (; index < lineCount; index++) {
+ final CsvParser.Line line = lines.get(index);
+ final List<String> fields = line.getFields();
+
+ if (matchLineType(line, "dumpconfig_version", 1)) {
+ try {
+ dumpconfigVersion = Integer.parseInt(fields.get(1));
+ } catch (NumberFormatException ex) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Couldn't parse dumpconfig_version: " + fields.get(1));
+ }
+ break;
+ }
+ }
+
+ // If we never saw dumpconfig_version, there's a problem with the command, so stop.
+ if (dumpconfigVersion == 0) {
+ mErrors.ERROR_DUMPCONFIG.fatal(
+ new Position(mFilename),
+ "Never saw a valid dumpconfig_version line.");
+ }
+
+ // Any lines before the start signal will be dropped. We create garbage objects
+ // here to avoid having to check for null everywhere.
+ MakeConfig makeConfig = new MakeConfig();
+ MakeConfig.ConfigFile configFile = new MakeConfig.ConfigFile("<ignored>");
+ MakeConfig.Block block = new MakeConfig.Block(MakeConfig.BlockType.UNSET);
+ Map<String, Str> initialVariables = new HashMap();
+ Map<String, Str> finalVariables = new HashMap();
+
+ // Number of "phases" we've seen so far.
+ for (; index < lineCount; index++) {
+ final CsvParser.Line line = lines.get(index);
+ final List<String> fields = line.getFields();
+ final String lineType = fields.get(0);
+
+ if (matchLineType(line, "phase", 2)) {
+ // Start the new one
+ makeConfig = new MakeConfig();
+ makeConfig.setPhase(fields.get(1));
+ makeConfig.setRootNodes(splitList(fields.get(2)));
+ // If there is a duplicate phase of the same name, continue parsing, but
+ // don't add it. Emit a warning.
+ if (!mResults.containsKey(makeConfig.getPhase())) {
+ mResults.put(makeConfig.getPhase(), makeConfig);
+ } else {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Duplicate phase: " + makeConfig.getPhase()
+ + ". This one will be dropped.");
+ }
+ initialVariables = makeConfig.getInitialVariables();
+ finalVariables = makeConfig.getFinalVariables();
+
+ if (DEBUG) {
+ System.out.println("PHASE:");
+ System.out.println(" " + makeConfig.getPhase());
+ System.out.println(" " + makeConfig.getRootNodes());
+ }
+ } else if (matchLineType(line, "var", 2)) {
+ final VarType type = "list".equals(fields.get(1)) ? VarType.LIST : VarType.SINGLE;
+ makeConfig.addProductVar(fields.get(2), type);
+
+ if (DEBUG) {
+ System.out.println(" VAR: " + type + " " + fields.get(2));
+ }
+ } else if (matchLineType(line, "import", 1)) {
+ final List<String> importStack = splitList(fields.get(1));
+ if (importStack.size() == 0) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "'import' line with empty include stack.");
+ continue;
+ }
+
+ // The beginning of importing a new file.
+ configFile = new MakeConfig.ConfigFile(importStack.get(0));
+ if (makeConfig.addConfigFile(configFile) != null) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Duplicate file imported in section: " + configFile.getFilename());
+ }
+ // We expect a Variable block next.
+ block = new MakeConfig.Block(MakeConfig.BlockType.BEFORE);
+ configFile.addBlock(block);
+
+ if (DEBUG) {
+ System.out.println(" IMPORT: " + configFile.getFilename());
+ }
+ } else if (matchLineType(line, "inherit", 2)) {
+ final String currentFile = fields.get(1);
+ final String inheritedFile = fields.get(2);
+ if (!configFile.getFilename().equals(currentFile)) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Unexpected current file in 'inherit' line '" + currentFile
+ + "' while processing '" + configFile.getFilename() + "'");
+ continue;
+ }
+
+ // There is already a file in progress, so add another var block to that.
+ block = new MakeConfig.Block(MakeConfig.BlockType.INHERIT);
+ // TODO: Make dumpconfig.mk also output a Position for inherit-product
+ block.setInheritedFile(new Str(inheritedFile));
+ configFile.addBlock(block);
+
+ if (DEBUG) {
+ System.out.println(" INHERIT: " + inheritedFile);
+ }
+ } else if (matchLineType(line, "imported", 1)) {
+ final List<String> importStack = splitList(fields.get(1));
+ if (importStack.size() == 0) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "'imported' line with empty include stack.");
+ continue;
+ }
+ final String currentFile = importStack.get(0);
+ if (!configFile.getFilename().equals(currentFile)) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Unexpected current file in 'imported' line '" + currentFile
+ + "' while processing '" + configFile.getFilename() + "'");
+ continue;
+ }
+
+ // There is already a file in progress, so add another var block to that.
+ // This will be the last one, but will check that after parsing.
+ block = new MakeConfig.Block(MakeConfig.BlockType.AFTER);
+ configFile.addBlock(block);
+
+ if (DEBUG) {
+ System.out.println(" AFTER: " + currentFile);
+ }
+ } else if (matchLineType(line, "val", 5)) {
+ final String productMakefile = fields.get(1);
+ final String blockTypeString = fields.get(2);
+ final String varName = fields.get(3);
+ final String varValue = fields.get(4);
+ final Position pos = Position.parse(fields.get(5));
+ final Str str = new Str(pos, varValue);
+
+ if (blockTypeString.equals("initial")) {
+ initialVariables.put(varName, str);
+ } else if (blockTypeString.equals("final")) {
+ finalVariables.put(varName, str);
+ } else {
+ if (!productMakefile.equals(configFile.getFilename())) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Mismatched 'val' product makefile."
+ + " Expected: " + configFile.getFilename()
+ + " Saw: " + productMakefile);
+ continue;
+ }
+
+ final MakeConfig.BlockType blockType = parseBlockType(line, blockTypeString);
+ if (blockType == null) {
+ continue;
+ }
+ if (blockType != block.getBlockType()) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Mismatched 'val' block type."
+ + " Expected: " + block.getBlockType()
+ + " Saw: " + blockType);
+ }
+
+ // Add the variable to the block in progress
+ block.addVar(varName, str);
+ }
+ } else {
+ if (DEBUG) {
+ System.out.print("# ");
+ for (int d = 0; d < fields.size(); d++) {
+ System.out.print(fields.get(d));
+ if (d != fields.size() - 1) {
+ System.out.print(",");
+ }
+ }
+ System.out.println();
+ }
+ }
+ }
+ }
+
+ /**
+ * Return true if the line type matches 'lineType' and there are at least 'fieldCount'
+ * fields (not including the first field which is the line type).
+ */
+ private boolean matchLineType(CsvParser.Line line, String lineType, int fieldCount) {
+ final List<String> fields = line.getFields();
+ if (!lineType.equals(fields.get(0))) {
+ return false;
+ }
+ if (fields.size() < (fieldCount + 1)) {
+ mErrors.WARNING_DUMPCONFIG.add(new Position(mFilename, line.getLine()),
+ fields.get(0) + " line has " + fields.size() + " fields. Expected at least "
+ + (fieldCount + 1) + " fields.");
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Split a string with space separated items (i.e. the make list format) into a List<String>.
+ */
+ private static List<String> splitList(String text) {
+ // Arrays.asList returns a fixed-length List, so we copy it into an ArrayList to not
+ // propagate that surprise detail downstream.
+ return new ArrayList(Arrays.asList(LIST_SEPARATOR.split(text.trim())));
+ }
+
+ /**
+ * Parse a BockType or issue a warning if it can't be parsed.
+ */
+ private MakeConfig.BlockType parseBlockType(CsvParser.Line line, String text) {
+ if ("before".equals(text)) {
+ return MakeConfig.BlockType.BEFORE;
+ } else if ("inherit".equals(text)) {
+ return MakeConfig.BlockType.INHERIT;
+ } else if ("after".equals(text)) {
+ return MakeConfig.BlockType.AFTER;
+ } else {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Invalid block type: " + text);
+ return null;
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/ErrorReporter.java b/tools/product_config/src/com/android/build/config/ErrorReporter.java
index 5d87636..0a0c9f4 100644
--- a/tools/product_config/src/com/android/build/config/ErrorReporter.java
+++ b/tools/product_config/src/com/android/build/config/ErrorReporter.java
@@ -171,7 +171,7 @@
/**
* An instance of an error happening.
*/
- public class Entry {
+ public static class Entry {
private final Category mCategory;
private final Position mPosition;
private final String mMessage;
diff --git a/tools/product_config/src/com/android/build/config/Errors.java b/tools/product_config/src/com/android/build/config/Errors.java
index 63792c8..b333e78 100644
--- a/tools/product_config/src/com/android/build/config/Errors.java
+++ b/tools/product_config/src/com/android/build/config/Errors.java
@@ -30,7 +30,7 @@
* <b>Naming Convention:</b>
* <ul>
* <li>ERROR_ for Categories with isLevelSettable false and Level.ERROR
- * <li>WARNING_ for Categories with isLevelSettable false and default WARNING or HIDDEN
+ * <li>WARNING_ for Categories with isLevelSettable true and default WARNING or HIDDEN
* <li>Don't have isLevelSettable true and not ERROR. (The constructor asserts this).
* </ul>
*/
@@ -42,4 +42,33 @@
public final Category WARNING_UNKNOWN_COMMAND_LINE_ERROR = new Category(2, true, Level.HIDDEN,
"Passing unknown errors on the command line. Hidden by default for\n"
+ "forward compatibility.");
+
+ public final Category ERROR_KATI = new Category(3, false, Level.ERROR,
+ "Error executing or reading from Kati.");
+
+ public final Category WARNING_DUMPCONFIG = new Category(4, true, Level.WARNING,
+ "Anomaly parsing the output of kati and dumpconfig.mk.");
+
+ public final Category ERROR_DUMPCONFIG = new Category(5, false, Level.ERROR,
+ "Error parsing the output of kati and dumpconfig.mk.");
+
+ public final Category WARNING_VARIABLE_RECURSION = new Category(6, true, Level.WARNING,
+ "Possible unsupported variable recursion.");
+
+ // This could be a warning, but it's very likely that the data is corrupted somehow
+ // if we're seeing this.
+ public final Category ERROR_IMPROPER_PRODUCT_VAR_MARKER = new Category(7, true, Level.ERROR,
+ "Bad input from dumpvars causing corrupted product variables.");
+
+ public final Category ERROR_MISSING_CONFIG_FILE = new Category(8, true, Level.ERROR,
+ "Unable to find config file.");
+
+ public final Category ERROR_INFINITE_RECURSION = new Category(9, true, Level.ERROR,
+ "A file tries to inherit-product from itself or its own inherited products.");
+
+ // TODO: This will become obsolete when it is possible to have starlark-based product
+ // config files.
+ public final Category WARNING_DIFFERENT_FROM_KATI = new Category(1000, true, Level.WARNING,
+ "The cross-check with the original kati implementation failed.");
+
}
diff --git a/tools/product_config/src/com/android/build/config/FlatConfig.java b/tools/product_config/src/com/android/build/config/FlatConfig.java
new file mode 100644
index 0000000..6f277fe
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/FlatConfig.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Flattened configuration -- set of variables after all assignments and inherits have
+ * been executed.
+ */
+public class FlatConfig extends ConfigBase {
+
+ private final TreeMap<String, Value> mValues = new TreeMap();
+
+ public TreeMap<String, Value> getValues() {
+ return mValues;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/FlattenConfig.java b/tools/product_config/src/com/android/build/config/FlattenConfig.java
new file mode 100644
index 0000000..a19802b
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/FlattenConfig.java
@@ -0,0 +1,474 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.regex.Pattern;
+
+public class FlattenConfig {
+ private static final Pattern RE_SPACE = Pattern.compile("\\p{Space}+");
+ private static final String PRODUCTS_PREFIX = "PRODUCTS";
+
+ private final Errors mErrors;
+ private final GenericConfig mGenericConfig;
+ private final Map<String, GenericConfig.ConfigFile> mGenericConfigs;
+ private final FlatConfig mResult = new FlatConfig();
+ private final Map<String, Value> mVariables;
+ /**
+ * Files that have been visited, to prevent infinite recursion. There are no
+ * conditionals at this point in the processing, so we don't need a stack, just
+ * a single set.
+ */
+ private final Set<Str> mStack = new HashSet();
+
+
+ private FlattenConfig(Errors errors, GenericConfig genericConfig) {
+ mErrors = errors;
+ mGenericConfig = genericConfig;
+ mGenericConfigs = genericConfig.getFiles();
+ mVariables = mResult.getValues();
+
+ // Base class fields
+ mResult.copyFrom(genericConfig);
+ }
+
+ /**
+ * Flatten a GenericConfig to a FlatConfig.
+ *
+ * Makes three passes through the genericConfig, one to flatten the single variables,
+ * one to flatten the list variables, and one to flatten the unknown variables. Each
+ * has a slightly different algorithm.
+ */
+ public static FlatConfig flatten(Errors errors, GenericConfig genericConfig) {
+ final FlattenConfig flattener = new FlattenConfig(errors, genericConfig);
+ return flattener.flattenImpl();
+ }
+
+ private FlatConfig flattenImpl() {
+ final List<String> rootNodes = mGenericConfig.getRootNodes();
+ if (rootNodes.size() == 0) {
+ mErrors.ERROR_DUMPCONFIG.add("No root nodes in PRODUCTS phase.");
+ return null;
+ } else if (rootNodes.size() != 1) {
+ final StringBuilder msg = new StringBuilder(
+ "Ignoring extra root nodes in PRODUCTS phase. All nodes are:");
+ for (final String rn: rootNodes) {
+ msg.append(' ');
+ msg.append(rn);
+ }
+ mErrors.WARNING_DUMPCONFIG.add(msg.toString());
+ }
+ final String root = rootNodes.get(0);
+
+ // TODO: Do we need to worry about the initial state of variables? Anything
+ // that from the product config
+
+ flattenListVars(root);
+ flattenSingleVars(root);
+ flattenUnknownVars(root);
+ flattenInheritsFrom(root);
+
+ setDefaultKnownVars();
+
+ // TODO: This only supports the single product mode of import-nodes, which is all the
+ // real build does. m product-graph and friends will have to be rewritten.
+ mVariables.put("PRODUCTS", new Value(VarType.UNKNOWN, new Str(root)));
+
+ return mResult;
+ }
+
+ interface AssignCallback {
+ void onAssignStatement(GenericConfig.Assign assign);
+ }
+
+ interface InheritCallback {
+ void onInheritStatement(GenericConfig.Inherit assign);
+ }
+
+ /**
+ * Do a bunch of validity checks, and then iterate through each of the statements
+ * in the given file. For Assignments, the callback is only called for variables
+ * matching varType.
+ *
+ * Adds makefiles which have been traversed to the 'seen' set, and will not traverse
+ * into an inherit statement if its makefile has already been seen.
+ */
+ private void forEachStatement(Str filename, VarType varType, Set<String> seen,
+ AssignCallback assigner, InheritCallback inheriter) {
+ if (mStack.contains(filename)) {
+ mErrors.ERROR_INFINITE_RECURSION.add(filename.getPosition(),
+ "File is already in the inherit-product stack: " + filename);
+ return;
+ }
+
+ mStack.add(filename);
+ try {
+ final GenericConfig.ConfigFile genericFile = mGenericConfigs.get(filename.toString());
+
+ if (genericFile == null) {
+ mErrors.ERROR_MISSING_CONFIG_FILE.add(filename.getPosition(),
+ "Unable to find config file: " + filename);
+ return;
+ }
+
+ for (final GenericConfig.Statement statement: genericFile.getStatements()) {
+ if (statement instanceof GenericConfig.Assign) {
+ if (assigner != null) {
+ final GenericConfig.Assign assign = (GenericConfig.Assign)statement;
+ final String varName = assign.getName();
+
+ // Assert that we're not stomping on another variable, which
+ // really should be impossible at this point.
+ assertVarType(filename, varName);
+
+ if (mGenericConfig.getVarType(varName) == varType) {
+ assigner.onAssignStatement(assign);
+ }
+ }
+ } else if (statement instanceof GenericConfig.Inherit) {
+ if (inheriter != null) {
+ final GenericConfig.Inherit inherit = (GenericConfig.Inherit)statement;
+ if (seen != null) {
+ if (seen.contains(inherit.getFilename().toString())) {
+ continue;
+ }
+ seen.add(inherit.getFilename().toString());
+ }
+ inheriter.onInheritStatement(inherit);
+ }
+ }
+ }
+ } finally {
+ // Also executes after return statements, so we always remove this.
+ mStack.remove(filename);
+ }
+ }
+
+ /**
+ * Call 'inheriter' for each child of 'filename' in alphabetical order.
+ */
+ private void forEachInheritAlpha(final Str filename, VarType varType, Set<String> seen,
+ InheritCallback inheriter) {
+ final TreeMap<Str, GenericConfig.Inherit> alpha = new TreeMap();
+ forEachStatement(filename, varType, null, null,
+ (inherit) -> {
+ alpha.put(inherit.getFilename(), inherit);
+ });
+ for (final GenericConfig.Inherit inherit: alpha.values()) {
+ // Handle 'seen' here where we actaully call back, not before, so that
+ // the proper traversal order is preserved.
+ if (seen != null) {
+ if (seen.contains(inherit.getFilename().toString())) {
+ continue;
+ }
+ seen.add(inherit.getFilename().toString());
+ }
+ inheriter.onInheritStatement(inherit);
+ }
+ }
+
+ /**
+ * Traverse the inheritance hierarchy, setting list-value product config variables.
+ */
+ private void flattenListVars(final String filename) {
+ Map<String, Value> vars = flattenListVars(new Str(filename), new HashSet());
+ // Add the result of the recursion to mVariables. We know there will be
+ // no collisions because this function only handles list variables.
+ for (Map.Entry<String, Value> entry: vars.entrySet()) {
+ mVariables.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ /**
+ * Return the variables defined, recursively, by 'filename.' The 'seen' set
+ * accumulates which nodes have been visited, as each is only done once.
+ *
+ * This convoluted algorithm isn't ideal, but it matches what is in node_fns.mk.
+ */
+ private Map<String, Value> flattenListVars(final Str filename, Set<String> seen) {
+ Map<String, Value> result = new HashMap();
+
+ // Recurse into our children first in alphabetical order, building a map of
+ // that filename to its flattened values. The order matters here because
+ // we will only look at each child once, and when a file appears multiple
+ // times, its variables must have the right set, based on whether it's been
+ // seen before. This preserves the order from node_fns.mk.
+
+ // Child filename --> { varname --> value }
+ final Map<Str, Map<String, Value>> children = new HashMap();
+ forEachInheritAlpha(filename, VarType.LIST, seen,
+ (inherit) -> {
+ final Str child = inherit.getFilename();
+ children.put(child, flattenListVars(child, seen));
+ });
+
+ // Now, traverse the values again in the original source order to concatenate the values.
+ // Note that the contcatenation order is *different* from the inherit order above.
+ forEachStatement(filename, VarType.LIST, null,
+ (assign) -> {
+ assignToListVar(result, assign.getName(), assign.getValue());
+ },
+ (inherit) -> {
+ final Map<String, Value> child = children.get(inherit.getFilename());
+ // child == null happens if this node has been visited before.
+ if (child != null) {
+ for (Map.Entry<String, Value> entry: child.entrySet()) {
+ final String varName = entry.getKey();
+ final Value varVal = entry.getValue();
+ appendToListVar(result, varName, varVal.getList());
+ }
+ }
+ });
+
+ return result;
+ }
+
+ /**
+ * Traverse the inheritance hierarchy, setting single-value product config variables.
+ */
+ private void flattenSingleVars(final String filename) {
+ flattenSingleVars(new Str(filename), new HashSet(), new HashSet());
+ }
+
+ private void flattenSingleVars(final Str filename, Set<String> seen1, Set<String> seen2) {
+ // flattenSingleVars has two loops. The first sets all variables that are
+ // defined for *this* file. The second traverses through the inheritance,
+ // to fill in values that weren't defined in this file. The first appearance of
+ // the variable is the one that wins.
+
+ forEachStatement(filename, VarType.SINGLE, seen1,
+ (assign) -> {
+ final String varName = assign.getName();
+ Value v = mVariables.get(varName);
+ // Only take the first value that we see for single variables.
+ Value value = mVariables.get(varName);
+ if (!mVariables.containsKey(varName)) {
+ final List<Str> valueList = assign.getValue();
+ // There should never be more than one item in this list, because
+ // SINGLE values should never be appended to.
+ if (valueList.size() != 1) {
+ final StringBuilder positions = new StringBuilder("[");
+ for (Str s: valueList) {
+ positions.append(s.getPosition());
+ }
+ positions.append(" ]");
+ throw new RuntimeException("Value list found for SINGLE variable "
+ + varName + " size=" + valueList.size()
+ + "positions=" + positions.toString());
+ }
+ mVariables.put(varName,
+ new Value(VarType.SINGLE,
+ valueList.get(0)));
+ }
+ }, null);
+
+ forEachInheritAlpha(filename, VarType.SINGLE, seen2,
+ (inherit) -> {
+ flattenSingleVars(inherit.getFilename(), seen1, seen2);
+ });
+ }
+
+ /**
+ * Traverse the inheritance hierarchy and flatten the values
+ */
+ private void flattenUnknownVars(String filename) {
+ flattenUnknownVars(new Str(filename), new HashSet());
+ }
+
+ private void flattenUnknownVars(final Str filename, Set<String> seen) {
+ // flattenUnknownVars has two loops: First to attempt to set the variable from
+ // this file, and then a second loop to handle the inheritance. This is odd
+ // but it matches the order the files are included in node_fns.mk. The last appearance
+ // of the value is the one that wins.
+
+ forEachStatement(filename, VarType.UNKNOWN, null,
+ (assign) -> {
+ // Overwrite the current value with whatever is now in the file.
+ mVariables.put(assign.getName(),
+ new Value(VarType.UNKNOWN,
+ flattenAssignList(assign, new Str(""))));
+ }, null);
+
+ forEachInheritAlpha(filename, VarType.UNKNOWN, seen,
+ (inherit) -> {
+ flattenUnknownVars(inherit.getFilename(), seen);
+ });
+ }
+
+ String prefix = "";
+
+ /**
+ * Sets the PRODUCTS.<filename>.INHERITS_FROM variables.
+ */
+ private void flattenInheritsFrom(final String filename) {
+ flattenInheritsFrom(new Str(filename));
+ }
+
+ /**
+ * This flatten function, unlike the others visits all of the nodes regardless
+ * of whether they have been seen before, because that's what the make code does.
+ */
+ private void flattenInheritsFrom(final Str filename) {
+ // Recurse, and gather the list our chlidren
+ final TreeSet<Str> children = new TreeSet();
+ forEachStatement(filename, VarType.LIST, null, null,
+ (inherit) -> {
+ children.add(inherit.getFilename());
+ flattenInheritsFrom(inherit.getFilename());
+ });
+
+ final String varName = "PRODUCTS." + filename + ".INHERITS_FROM";
+ if (children.size() > 0) {
+ // Build the space separated list.
+ boolean first = true;
+ final StringBuilder val = new StringBuilder();
+ for (Str child: children) {
+ if (first) {
+ first = false;
+ } else {
+ val.append(' ');
+ }
+ val.append(child);
+ }
+ mVariables.put(varName, new Value(VarType.UNKNOWN, new Str(val.toString())));
+ } else {
+ // Clear whatever flattenUnknownVars happened to have put in.
+ mVariables.remove(varName);
+ }
+ }
+
+ /**
+ * Throw an exception if there's an existing variable with a different type.
+ */
+ private void assertVarType(Str filename, String varName) {
+ if (mGenericConfig.getVarType(varName) == VarType.UNKNOWN) {
+ final Value prevValue = mVariables.get(varName);
+ if (prevValue != null
+ && prevValue.getVarType() != VarType.UNKNOWN) {
+ throw new RuntimeException("Mismatched var types:"
+ + " filename=" + filename
+ + " varType=" + mGenericConfig.getVarType(varName)
+ + " varName=" + varName
+ + " prevValue=" + Value.debugString(prevValue));
+ }
+ }
+ }
+
+ /**
+ * Depending on whether the assignment is prepending, appending, setting, etc.,
+ * update the value. We can infer which of those operations it is by the length
+ * and contents of the values. Each value in the list was originally separated
+ * by the previous value.
+ */
+ private void assignToListVar(Map<String, Value> vars, String varName, List<Str> items) {
+ final Value value = vars.get(varName);
+ final List<Str> orig = value == null ? new ArrayList() : value.getList();
+ final List<Str> result = new ArrayList();
+ if (items.size() > 0) {
+ for (int i = 0; i < items.size(); i++) {
+ if (i != 0) {
+ result.addAll(orig);
+ }
+ final Str item = items.get(i);
+ addWords(result, item);
+ }
+ }
+ vars.put(varName, new Value(result));
+ }
+
+ /**
+ * Appends all of the words in in 'items' to an entry in vars keyed by 'varName',
+ * creating one if necessary.
+ */
+ private static void appendToListVar(Map<String, Value> vars, String varName, List<Str> items) {
+ Value value = vars.get(varName);
+ if (value == null) {
+ value = new Value(new ArrayList());
+ vars.put(varName, value);
+ }
+ final List<Str> out = value.getList();
+ for (Str item: items) {
+ addWords(out, item);
+ }
+ }
+
+ /**
+ * Split 'item' on spaces, and add each of them as a word to 'out'.
+ */
+ private static void addWords(List<Str> out, Str item) {
+ for (String word: RE_SPACE.split(item.toString().trim())) {
+ if (word.length() > 0) {
+ out.add(new Str(item.getPosition(), word));
+ }
+ }
+ }
+
+ /**
+ * Flatten the list of strings in an Assign statement, using the previous value
+ * as a separator.
+ */
+ private Str flattenAssignList(GenericConfig.Assign assign, Str previous) {
+ final StringBuilder result = new StringBuilder();
+ Position position = previous.getPosition();
+ final List<Str> list = assign.getValue();
+ final int size = list.size();
+ for (int i = 0; i < size; i++) {
+ final Str item = list.get(i);
+ result.append(item.toString());
+ if (i != size - 1) {
+ result.append(previous);
+ }
+ final Position pos = item.getPosition();
+ if (pos != null && pos.getFile() != null) {
+ position = pos;
+ }
+ }
+ return new Str(position, result.toString());
+ }
+
+ /**
+ * Make sure that each of the product config variables has a default value.
+ */
+ private void setDefaultKnownVars() {
+ for (Map.Entry<String, VarType> entry: mGenericConfig.getProductVars().entrySet()) {
+ final String varName = entry.getKey();
+ final VarType varType = entry.getValue();
+
+ final Value val = mVariables.get(varName);
+ if (val == null) {
+ mVariables.put(varName, new Value(varType));
+ }
+ }
+
+
+ // TODO: These two for now as well, until we can rewrite the enforce packages exist
+ // handling.
+ if (!mVariables.containsKey("PRODUCT_ENFORCE_PACKAGES_EXIST")) {
+ mVariables.put("PRODUCT_ENFORCE_PACKAGES_EXIST", new Value(VarType.UNKNOWN));
+ }
+ if (!mVariables.containsKey("PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST")) {
+ mVariables.put("PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST", new Value(VarType.UNKNOWN));
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/GenericConfig.java b/tools/product_config/src/com/android/build/config/GenericConfig.java
new file mode 100644
index 0000000..2ee2735
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/GenericConfig.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Language-agnostic representation of a configuration statement.
+ */
+public class GenericConfig extends ConfigBase {
+ /**
+ * The config files that were imported in this config pass.
+ */
+ protected final TreeMap<String, ConfigFile> mConfigFiles = new TreeMap();
+
+ /**
+ * A configuration file.
+ */
+ public static class ConfigFile {
+ /**
+ * The name of the file, relative to the tree root.
+ */
+ private final String mFilename;
+
+ /**
+ * Sections of variable definitions and import statements. Product config
+ * files will always have at least one block.
+ */
+ private final ArrayList<Statement> mStatements = new ArrayList();
+
+ public ConfigFile(String filename) {
+ mFilename = filename;
+ }
+
+ public String getFilename() {
+ return mFilename;
+ }
+
+ public void addStatement(Statement statement) {
+ mStatements.add(statement);
+ }
+
+ public ArrayList<Statement> getStatements() {
+ return mStatements;
+ }
+ }
+
+ /**
+ * Base class for statements that appear in config files.
+ */
+ public static class Statement {
+ }
+
+ /**
+ * A variable assignment.
+ */
+ public static class Assign extends Statement {
+ private final String mVarName;
+ private final List<Str> mValue;
+
+ /**
+ * Assignment of a single value
+ */
+ public Assign(String varName, Str value) {
+ mVarName = varName;
+ mValue = new ArrayList();
+ mValue.add(value);
+ }
+
+ /**
+ * Assignment referencing a previous value.
+ * VAR := $(1) $(VAR) $(2) $(VAR) $(3)
+ */
+ public Assign(String varName, List<Str> value) {
+ mVarName = varName;
+ mValue = value;
+ }
+
+ public String getName() {
+ return mVarName;
+ }
+
+ public List<Str> getValue() {
+ return mValue;
+ }
+ }
+
+ /**
+ * An $(inherit-product FILENAME) statement
+ */
+ public static class Inherit extends Statement {
+ private final Str mFilename;
+
+ public Inherit(Str filename) {
+ mFilename = filename;
+ }
+
+ public Str getFilename() {
+ return mFilename;
+ }
+ }
+
+ /**
+ * Adds the given config file. Returns any one previously added, or null.
+ */
+ public ConfigFile addConfigFile(ConfigFile file) {
+ return mConfigFiles.put(file.getFilename(), file);
+ }
+
+ public TreeMap<String, ConfigFile> getFiles() {
+ return mConfigFiles;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Kati.java b/tools/product_config/src/com/android/build/config/Kati.java
new file mode 100644
index 0000000..4fa2297
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Kati.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Map;
+
+/**
+ * Wrapper for invoking kati.
+ */
+public interface Kati {
+ public Map<String, MakeConfig> loadProductConfig();
+}
diff --git a/tools/product_config/src/com/android/build/config/KatiCommand.java b/tools/product_config/src/com/android/build/config/KatiCommand.java
new file mode 100644
index 0000000..f3c71d2
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiCommand.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Arrays;
+import java.util.List;
+
+public interface KatiCommand {
+ public static class KatiException extends Exception {
+ private String mStderr;
+
+ public KatiException(List<String> cmd, String stderr) {
+ super("Error running kati: " + Arrays.toString(cmd.toArray()));
+ mStderr = stderr;
+ }
+
+ public String getStderr() {
+ return mStderr;
+ }
+ }
+
+ /**
+ * Run kati directly. Returns stdout data.
+ *
+ * @throws KatiException if there is an error. KatiException will contain
+ * the stderr from the kati invocation.
+ */
+ public String run(String[] args) throws KatiException;
+}
diff --git a/tools/product_config/src/com/android/build/config/KatiCommandImpl.java b/tools/product_config/src/com/android/build/config/KatiCommandImpl.java
new file mode 100644
index 0000000..53480d4
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiCommandImpl.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.nio.charset.StandardCharsets;
+
+public class KatiCommandImpl implements KatiCommand {
+ final Errors mErrors;
+ final Options mOptions;
+
+ /**
+ * Runnable that consumes all of an InputStream until EOF, writes the contents
+ * into a StringBuilder, and then closes the stream.
+ */
+ class OutputReader implements Runnable {
+ private final InputStream mStream;
+ private final StringBuilder mOutput;
+
+ OutputReader(InputStream stream, StringBuilder output) {
+ mStream = stream;
+ mOutput = output;
+ }
+
+ @Override
+ public void run() {
+ final char[] buf = new char[16*1024];
+ final InputStreamReader reader = new InputStreamReader(mStream, StandardCharsets.UTF_8);
+ try {
+ int amt;
+ while ((amt = reader.read(buf, 0, buf.length)) >= 0) {
+ mOutput.append(buf, 0, amt);
+ }
+ } catch (IOException ex) {
+ mErrors.ERROR_KATI.add("Error reading from kati: " + ex.getMessage());
+ } finally {
+ try {
+ reader.close();
+ } catch (IOException ex) {
+ // Close doesn't throw
+ }
+ }
+ }
+ }
+
+ public KatiCommandImpl(Errors errors, Options options) {
+ mErrors = errors;
+ mOptions = options;
+ }
+
+ /**
+ * Run kati directly. Returns stdout data.
+ *
+ * @throws KatiException if there is an error. KatiException will contain
+ * the stderr from the kati invocation.
+ */
+ public String run(String[] args) throws KatiException {
+ final ArrayList<String> cmd = new ArrayList();
+ cmd.add(mOptions.getCKatiBin());
+ for (String arg: args) {
+ cmd.add(arg);
+ }
+
+ final ProcessBuilder builder = new ProcessBuilder(cmd);
+ builder.redirectOutput(ProcessBuilder.Redirect.PIPE);
+ builder.redirectError(ProcessBuilder.Redirect.PIPE);
+
+ Process process = null;
+
+ try {
+ process = builder.start();
+ } catch (IOException ex) {
+ throw new KatiException(cmd, "IOException running process: " + ex.getMessage());
+ }
+
+ final StringBuilder stdout = new StringBuilder();
+ final Thread stdoutThread = new Thread(new OutputReader(process.getInputStream(), stdout),
+ "kati_stdout_reader");
+ stdoutThread.start();
+
+ final StringBuilder stderr = new StringBuilder();
+ final Thread stderrThread = new Thread(new OutputReader(process.getErrorStream(), stderr),
+ "kati_stderr_reader");
+ stderrThread.start();
+
+ int returnCode = waitForProcess(process);
+ joinThread(stdoutThread);
+ joinThread(stderrThread);
+
+ if (returnCode != 0) {
+ throw new KatiException(cmd, stderr.toString());
+ }
+
+ return stdout.toString();
+ }
+
+ /**
+ * Wrap Process.waitFor() because it throws InterruptedException.
+ */
+ private static int waitForProcess(Process proc) {
+ while (true) {
+ try {
+ return proc.waitFor();
+ } catch (InterruptedException ex) {
+ }
+ }
+ }
+
+ /**
+ * Wrap Thread.join() because it throws InterruptedException.
+ */
+ private static void joinThread(Thread thread) {
+ while (true) {
+ try {
+ thread.join();
+ return;
+ } catch (InterruptedException ex) {
+ }
+ }
+ }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/KatiImpl.java b/tools/product_config/src/com/android/build/config/KatiImpl.java
new file mode 100644
index 0000000..de11f36
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiImpl.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class KatiImpl implements Kati {
+ // Subdirectory inside out for config stuff.
+ private static final String CONFIG_SUBDIR = "config";
+
+ private final Errors mErrors;
+ private final Options mOptions;
+ private final KatiCommand mCommand;
+
+ // TODO: Do we need to consider the whole or a greater subset of the
+ // environment (or a hash of it?). In theory product-variant is enough, but we know
+ // people use stuff from the environment, even though we're trying to get rid of that.
+ private String getWorkDirPath() {
+ return Paths.get(mOptions.getOutDir(), CONFIG_SUBDIR,
+ mOptions.getProduct() + '-' + mOptions.getVariant()).toString();
+ }
+
+ private String getDumpConfigCsvPath() {
+ return Paths.get(getWorkDirPath(), "dumpconfig.csv").toString();
+ }
+
+ public KatiImpl(Errors errors, Options options) {
+ this(errors, options, new KatiCommandImpl(errors, options));
+ }
+
+ // VisibleForTesting
+ public KatiImpl(Errors errors, Options options, KatiCommand command) {
+ mErrors = errors;
+ mOptions = options;
+ mCommand = command;
+ }
+
+ @Override
+ public Map<String, MakeConfig> loadProductConfig() {
+ final String csvPath = getDumpConfigCsvPath();
+ try {
+ File workDir = new File(getWorkDirPath());
+
+ if ((workDir.exists() && !workDir.isDirectory()) || !workDir.mkdirs()) {
+ mErrors.ERROR_KATI.add("Unable to create directory: " + workDir);
+ return null; // TODO: throw exception?
+ }
+
+ String out = mCommand.run(new String[] {
+ "-f", "build/make/core/dumpconfig.mk",
+ "DUMPCONFIG_FILE=" + csvPath
+ });
+
+ if (!out.contains("***DONE***")) {
+ mErrors.ERROR_KATI.add(
+ "Unknown error with kati, but it didn't print ***DONE*** message");
+ return null; // TODO: throw exception?
+ }
+ // TODO: Check that output was good.
+ } catch (KatiCommand.KatiException ex) {
+ mErrors.ERROR_KATI.add("Error running kati:\n" + ex.getStderr());
+ return null;
+ }
+
+ if (!(new File(csvPath)).canRead()) {
+ mErrors.ERROR_KATI.add("Kati ran but did not create " + csvPath);
+ return null;
+ }
+
+ try (FileReader reader = new FileReader(csvPath)) {
+ Map<String, MakeConfig> makeConfigs = DumpConfigParser.parse(mErrors, csvPath, reader);
+
+ if (makeConfigs.size() == 0) {
+ // TODO: Issue error?
+ return null;
+ }
+
+ return makeConfigs;
+ } catch (CsvParser.ParseException ex) {
+ mErrors.ERROR_KATI.add(new Position(csvPath, ex.getLine()),
+ "Unable to parse output of dumpconfig.mk: " + ex.getMessage());
+ return null; // TODO: throw exception?
+ } catch (IOException ex) {
+ System.out.println(ex);
+ mErrors.ERROR_KATI.add("Unable to read " + csvPath + ": " + ex.getMessage());
+ return null; // TODO: throw exception?
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Main.java b/tools/product_config/src/com/android/build/config/Main.java
index b792193..5cec55e 100644
--- a/tools/product_config/src/com/android/build/config/Main.java
+++ b/tools/product_config/src/com/android/build/config/Main.java
@@ -16,6 +16,11 @@
package com.android.build.config;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
public class Main {
private final Errors mErrors;
private final Options mOptions;
@@ -26,11 +31,44 @@
}
void run() {
- System.out.println("Hello World");
-
// TODO: Check the build environment to make sure we're running in a real
// build environment, e.g. actually inside a source tree, with TARGET_PRODUCT
// and TARGET_BUILD_VARIANT defined, etc.
+ Kati kati = new KatiImpl(mErrors, mOptions);
+ Map<String, MakeConfig> makeConfigs = kati.loadProductConfig();
+ if (makeConfigs == null || mErrors.hadError()) {
+ return;
+ }
+ if (false) {
+ for (MakeConfig makeConfig: (new TreeMap<String, MakeConfig>(makeConfigs)).values()) {
+ System.out.println();
+ System.out.println("=======================================");
+ System.out.println("PRODUCT CONFIG FILES : " + makeConfig.getPhase());
+ System.out.println("=======================================");
+ makeConfig.printToStream(System.out);
+ }
+ }
+
+ ConvertMakeToGenericConfig m2g = new ConvertMakeToGenericConfig(mErrors);
+ GenericConfig generic = m2g.convert(makeConfigs);
+ if (false) {
+ System.out.println("======================");
+ System.out.println("REGENERATED MAKE FILES");
+ System.out.println("======================");
+ MakeWriter.write(System.out, generic, 0);
+ }
+
+ // TODO: Lookup shortened name as used in PRODUCT_NAME / TARGET_PRODUCT
+ FlatConfig flat = FlattenConfig.flatten(mErrors, generic);
+ if (false) {
+ System.out.println("=======================");
+ System.out.println("FLATTENED VARIABLE LIST");
+ System.out.println("=======================");
+ MakeWriter.write(System.out, flat, 0);
+ }
+
+ OutputChecker checker = new OutputChecker(flat);
+ checker.reportErrors(mErrors);
// TODO: Run kati and extract the variables and convert all that into starlark files.
@@ -38,8 +76,6 @@
// TODO: Get the variables that were defined in starlark and use that to write
// out the make, soong and bazel input files.
- mErrors.ERROR_COMMAND_LINE.add("asdf");
- throw new RuntimeException("poop");
}
public static void main(String[] args) {
@@ -47,7 +83,7 @@
int exitCode = 0;
try {
- Options options = Options.parse(errors, args);
+ Options options = Options.parse(errors, args, System.getenv());
if (errors.hadError()) {
Options.printHelp(System.err);
System.err.println();
@@ -62,7 +98,7 @@
Options.printHelp(System.out);
return;
}
- } catch (CommandException ex) {
+ } catch (CommandException | Errors.FatalException ex) {
// These are user errors, so don't show a stack trace
exitCode = 1;
} catch (Throwable ex) {
@@ -76,7 +112,10 @@
} finally {
// Print errors and warnings
errors.printErrors(System.err);
+ if (errors.hadError()) {
+ exitCode = 1;
+ }
+ System.exit(exitCode);
}
- System.exit(exitCode);
}
}
diff --git a/tools/product_config/src/com/android/build/config/MakeConfig.java b/tools/product_config/src/com/android/build/config/MakeConfig.java
new file mode 100644
index 0000000..dda0db9
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/MakeConfig.java
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class MakeConfig extends ConfigBase {
+ /**
+ * The config files that were imported in this config pass.
+ */
+ protected final ArrayList<ConfigFile> mConfigFiles = new ArrayList();
+
+ public enum BlockType {
+ UNSET,
+ BEFORE,
+ INHERIT,
+ AFTER
+ }
+
+ public static class ConfigFile {
+ /**
+ * The name of the file, relative to the tree root.
+ */
+ private final String mFilename;
+
+ /**
+ * Sections of variable definitions and import statements. Product config
+ * files will always have at least one block.
+ */
+ private final ArrayList<Block> mBlocks = new ArrayList();
+
+ public ConfigFile(String filename) {
+ mFilename = filename;
+ }
+
+ public String getFilename() {
+ return mFilename;
+ }
+
+ public void addBlock(Block block) {
+ mBlocks.add(block);
+ }
+
+ public ArrayList<Block> getBlocks() {
+ return mBlocks;
+ }
+ }
+
+ /**
+ * A set of variables that were defined.
+ */
+ public static class Block {
+ private final BlockType mBlockType;
+ private final TreeMap<String, Str> mValues = new TreeMap();
+ private Str mInheritedFile;
+
+ public Block(BlockType blockType) {
+ mBlockType = blockType;
+ }
+
+ public BlockType getBlockType() {
+ return mBlockType;
+ }
+
+ public void addVar(String varName, Str varValue) {
+ mValues.put(varName, varValue);
+ }
+
+ public Str getVar(String varName) {
+ return mValues.get(varName);
+ }
+
+ public TreeMap<String, Str> getVars() {
+ return mValues;
+ }
+
+ public void setInheritedFile(Str filename) {
+ mInheritedFile = filename;
+ }
+
+ public Str getInheritedFile() {
+ return mInheritedFile;
+ }
+ }
+
+ /**
+ * Adds the given config file. Returns any one previously added, or null.
+ */
+ public ConfigFile addConfigFile(ConfigFile file) {
+ ConfigFile prev = null;
+ for (ConfigFile f: mConfigFiles) {
+ if (f.getFilename().equals(file.getFilename())) {
+ prev = f;
+ break;
+ }
+ }
+ mConfigFiles.add(file);
+ return prev;
+ }
+
+ public List<ConfigFile> getConfigFiles() {
+ return mConfigFiles;
+ }
+
+ public void printToStream(PrintStream out) {
+ out.println("MakeConfig {");
+ out.println(" phase: " + mPhase);
+ out.println(" rootNodes: " + mRootNodes);
+ out.print(" singleVars: [ ");
+ for (Map.Entry<String,VarType> entry: mProductVars.entrySet()) {
+ if (entry.getValue() == VarType.SINGLE) {
+ out.print(entry.getKey());
+ out.print(" ");
+ }
+ }
+ out.println("]");
+ out.print(" listVars: [ ");
+ for (Map.Entry<String,VarType> entry: mProductVars.entrySet()) {
+ if (entry.getValue() == VarType.LIST) {
+ out.print(entry.getKey());
+ out.print(" ");
+ }
+ }
+ out.println("]");
+ out.println(" configFiles: [");
+ for (final ConfigFile configFile: mConfigFiles) {
+ out.println(" ConfigFile {");
+ out.println(" filename: " + configFile.getFilename());
+ out.println(" blocks: [");
+ for (Block block: configFile.getBlocks()) {
+ out.println(" Block {");
+ out.println(" type: " + block.getBlockType());
+ if (block.getBlockType() == BlockType.INHERIT) {
+ out.println(" inherited: " + block.getInheritedFile());
+ }
+ out.println(" values: {");
+ for (Map.Entry<String,Str> var: block.getVars().entrySet()) {
+ if (!var.getKey().equals("PRODUCT_PACKAGES")) {
+ continue;
+ }
+ out.println(" " + var.getKey() + ": " + var.getValue());
+ }
+ out.println(" }");
+ out.println(" }");
+ }
+ out.println(" ]");
+ out.println(" }");
+ }
+ out.println(" ] // configFiles");
+ out.println("} // MakeConfig");
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/MakeWriter.java b/tools/product_config/src/com/android/build/config/MakeWriter.java
new file mode 100644
index 0000000..15fd095
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/MakeWriter.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class MakeWriter {
+ public static final int FLAG_WRITE_HEADER = 1;
+ public static final int FLAG_WRITE_ANNOTATIONS = 1 << 1;
+
+ private final boolean mWriteHeader;
+ private final boolean mWriteAnnotations;
+
+ public static void write(PrintStream out, GenericConfig config, int flags) {
+ (new MakeWriter(flags)).writeGeneric(out, config);
+ }
+
+ public static void write(PrintStream out, FlatConfig config, int flags) {
+ (new MakeWriter(flags)).writeFlat(out, config);
+ }
+
+
+ private MakeWriter(int flags) {
+ mWriteHeader = (flags & FLAG_WRITE_HEADER) != 0;
+ mWriteAnnotations = (flags & FLAG_WRITE_ANNOTATIONS) != 0;
+ }
+
+ private void writeGeneric(PrintStream out, GenericConfig config) {
+ for (GenericConfig.ConfigFile file: config.getFiles().values()) {
+ out.println("---------------------------------------------------------");
+ out.println("FILE: " + file.getFilename());
+ out.println("---------------------------------------------------------");
+ writeFile(out, config, file);
+ out.println();
+ }
+ out.println("---------------------------------------------------------");
+ out.println("VARIABLES TOUCHED BY MAKE BASED CONFIG:");
+ out.println("---------------------------------------------------------");
+ writeStrVars(out, OutputChecker.getModifiedVars(config.getInitialVariables(),
+ config.getFinalVariables()), config);
+ }
+
+ private void writeFile(PrintStream out, GenericConfig config, GenericConfig.ConfigFile file) {
+ if (mWriteHeader) {
+ out.println("# This file is generated by the product_config tool");
+ }
+ for (GenericConfig.Statement statement: file.getStatements()) {
+ if (statement instanceof GenericConfig.Assign) {
+ writeAssign(out, config, (GenericConfig.Assign)statement);
+ } else if (statement instanceof GenericConfig.Inherit) {
+ writeInherit(out, (GenericConfig.Inherit)statement);
+ } else {
+ throw new RuntimeException("Unexpected Statement: " + statement);
+ }
+ }
+ }
+
+ private void writeAssign(PrintStream out, GenericConfig config,
+ GenericConfig.Assign statement) {
+ final List<Str> values = statement.getValue();
+ final int size = values.size();
+ final String varName = statement.getName();
+ Position pos = null;
+ if (size == 0) {
+ return;
+ } else if (size == 1) {
+ // Plain :=
+ final Str value = values.get(0);
+ out.print(varName + " := " + value);
+ pos = value.getPosition();
+ } else if (size == 2 && values.get(0).toString().length() == 0) {
+ // Plain +=
+ final Str value = values.get(1);
+ out.print(varName + " += " + value);
+ pos = value.getPosition();
+ } else {
+ // Write it out the long way
+ out.print(varName + " := " + values.get(0));
+ for (int i = 1; i < size; i++) {
+ out.print("$(" + varName + ") " + values.get(i));
+ pos = values.get(i).getPosition();
+ }
+ }
+ if (mWriteAnnotations) {
+ out.print(" # " + config.getVarType(varName) + " " + pos);
+ }
+ out.println();
+ }
+
+ private void writeInherit(PrintStream out, GenericConfig.Inherit statement) {
+ final Str filename = statement.getFilename();
+ out.print("$(call inherit-product " + filename + ")");
+ if (mWriteAnnotations) {
+ out.print(" # " + filename.getPosition());
+ }
+ out.println();
+ }
+
+ private static class Var {
+ Var(String name, Str val) {
+ this.name = name;
+ this.val = val;
+ }
+ final String name;
+ final Str val;
+ }
+
+ private static void writeStrVars(PrintStream out, Map<String, Str> vars, ConfigBase config) {
+ // Sort by file name and var name
+ TreeMap<String, Var> sorted = new TreeMap();
+ for (Map.Entry<String, Str> entry: vars.entrySet()) {
+ sorted.put(entry.getValue().getPosition().toString() + " " + entry.getKey(),
+ new Var(entry.getKey(), entry.getValue()));
+ }
+ // Print it
+ for (Var var: sorted.values()) {
+ out.println(var.val.getPosition() + var.name + " := " + var.val);
+ }
+ }
+
+ private void writeFlat(PrintStream out, FlatConfig config) {
+ // TODO: Print positions.
+ for (Map.Entry<String, Value> entry: config.getValues().entrySet()) {
+ out.print(entry.getKey());
+ out.print(" := ");
+
+ final Value value = entry.getValue();
+ if (value.getVarType() == VarType.LIST) {
+ final List<Str> list = value.getList();
+ final int size = list.size();
+ for (int i = 0; i < size; i++) {
+ out.print(list.get(i).toString());
+ if (i != size - 1) {
+ out.print(" \\\n ");
+ }
+ }
+ } else {
+ out.print(value.getStr().toString());
+ }
+ out.println();
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Options.java b/tools/product_config/src/com/android/build/config/Options.java
index 48146cb..ed544dc 100644
--- a/tools/product_config/src/com/android/build/config/Options.java
+++ b/tools/product_config/src/com/android/build/config/Options.java
@@ -17,6 +17,7 @@
package com.android.build.config;
import java.io.PrintStream;
+import java.util.Map;
import java.util.TreeMap;
public class Options {
@@ -27,19 +28,50 @@
private Action mAction = Action.DEFAULT;
+ private String mProduct;
+ private String mVariant;
+ private String mOutDir;
+ private String mCKatiBin;
+
public Action getAction() {
return mAction;
}
+ public String getProduct() {
+ return mProduct;
+ }
+
+ public String getVariant() {
+ return mVariant;
+ }
+
+ public String getOutDir() {
+ return mOutDir != null ? mOutDir : "out";
+ }
+
+ public String getCKatiBin() {
+ return mCKatiBin;
+ }
+
public static void printHelp(PrintStream out) {
out.println("usage: product_config");
out.println();
- out.println("OPTIONS");
+ out.println("REQUIRED FLAGS");
+ out.println(" --ckati_bin CKATI Kati binary to use.");
+ out.println();
+ out.println("OPTIONAL FLAGS");
out.println(" --hide ERROR_ID Suppress this error.");
out.println(" --error ERROR_ID Make this ERROR_ID a fatal error.");
out.println(" --help -h This message.");
out.println(" --warning ERROR_ID Make this ERROR_ID a warning.");
out.println();
+ out.println("REQUIRED ENVIRONMENT");
+ out.println(" TARGET_PRODUCT Product to build from lunch command.");
+ out.println(" TARGET_BUILD_VARIANT Build variant from lunch command.");
+ out.println();
+ out.println("OPTIONAL ENVIRONMENT");
+ out.println(" OUT_DIR Build output directory. Defaults to \"out\".");
+ out.println();
out.println("ERRORS");
out.println(" The following are the errors that can be controlled on the");
out.println(" commandline with the --hide --warning --error flags.");
@@ -55,7 +87,7 @@
}
static class Parser {
- private class ParseException extends Exception {
+ private static class ParseException extends Exception {
public ParseException(String message) {
super(message);
}
@@ -63,20 +95,26 @@
private Errors mErrors;
private String[] mArgs;
+ private Map<String,String> mEnv;
private Options mResult = new Options();
private int mIndex;
+ private boolean mSkipRequiredArgValidation;
- public Parser(Errors errors, String[] args) {
+ public Parser(Errors errors, String[] args, Map<String,String> env) {
mErrors = errors;
mArgs = args;
+ mEnv = env;
}
public Options parse() {
+ // Args
try {
while (mIndex < mArgs.length) {
final String arg = mArgs[mIndex];
- if ("--hide".equals(arg)) {
+ if ("--ckati_bin".equals(arg)) {
+ mResult.mCKatiBin = requireNextStringArg(arg);
+ } else if ("--hide".equals(arg)) {
handleErrorCode(arg, Errors.Level.HIDDEN);
} else if ("--error".equals(arg)) {
handleErrorCode(arg, Errors.Level.ERROR);
@@ -99,11 +137,45 @@
mErrors.ERROR_COMMAND_LINE.add(ex.getMessage());
}
+ // Environment
+ mResult.mProduct = mEnv.get("TARGET_PRODUCT");
+ mResult.mVariant = mEnv.get("TARGET_BUILD_VARIANT");
+ mResult.mOutDir = mEnv.get("OUT_DIR");
+
+ validateArgs();
+
return mResult;
}
- private void addWarning(Errors.Category category, String message) {
- category.add(message);
+ /**
+ * For testing; don't generate errors about missing arguments
+ */
+ public void setSkipRequiredArgValidation() {
+ mSkipRequiredArgValidation = true;
+ }
+
+ private void validateArgs() {
+ if (!mSkipRequiredArgValidation) {
+ if (mResult.mCKatiBin == null || "".equals(mResult.mCKatiBin)) {
+ addMissingArgError("--ckati_bin");
+ }
+ if (mResult.mProduct == null) {
+ addMissingEnvError("TARGET_PRODUCT");
+ }
+ if (mResult.mVariant == null) {
+ addMissingEnvError("TARGET_BUILD_VARIANT");
+ }
+ }
+ }
+
+ private void addMissingArgError(String argName) {
+ mErrors.ERROR_COMMAND_LINE.add("Required command line argument missing: "
+ + argName);
+ }
+
+ private void addMissingEnvError(String envName) {
+ mErrors.ERROR_COMMAND_LINE.add("Required environment variable missing: "
+ + envName);
}
private String getNextNonFlagArg() {
@@ -117,6 +189,14 @@
return mArgs[mIndex];
}
+ private String requireNextStringArg(String arg) throws ParseException {
+ final String val = getNextNonFlagArg();
+ if (val == null) {
+ throw new ParseException(arg + " requires a string argument.");
+ }
+ return val;
+ }
+
private int requireNextNumberArg(String arg) throws ParseException {
final String val = getNextNonFlagArg();
if (val == null) {
@@ -151,7 +231,7 @@
* <p>
* Adds errors encountered to Errors object.
*/
- public static Options parse(Errors errors, String[] args) {
- return (new Parser(errors, args)).parse();
+ public static Options parse(Errors errors, String[] args, Map<String, String> env) {
+ return (new Parser(errors, args, env)).parse();
}
}
diff --git a/tools/product_config/src/com/android/build/config/OutputChecker.java b/tools/product_config/src/com/android/build/config/OutputChecker.java
new file mode 100644
index 0000000..d982dba
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/OutputChecker.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Compares the make-based configuration as reported by dumpconfig.mk
+ * with what was computed from the new tool.
+ */
+public class OutputChecker {
+ // Differences that we know about, either know issues to be fixed or intentional.
+ private static final RegexSet IGNORED_VARIABLES = new RegexSet(
+ // TODO: Rewrite the enforce packages exist logic into this tool.
+ "PRODUCT_ENFORCE_PACKAGES_EXIST",
+ "PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST",
+ "PRODUCTS\\..*\\.PRODUCT_ENFORCE_PACKAGES_EXIST",
+ "PRODUCTS\\..*\\.PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST",
+
+ // This is generated by this tool, but comes later in the make build system.
+ "INTERNAL_PRODUCT",
+
+ // This can be set temporarily by product_config.mk
+ ".KATI_ALLOW_RULES"
+ );
+
+ private final FlatConfig mConfig;
+ private final TreeMap<String, Variable> mVariables;
+
+ /**
+ * Represents the before and after state of a variable.
+ */
+ public static class Variable {
+ public final String name;
+ public final VarType type;
+ public final Str original;
+ public final Value updated;
+
+ public Variable(String name, VarType type, Str original) {
+ this(name, type, original, null);
+ }
+
+ public Variable(String name, VarType type, Str original, Value updated) {
+ this.name = name;
+ this.type = type;
+ this.original = original;
+ this.updated = updated;
+ }
+
+ /**
+ * Return copy of this Variable with the updated field also set.
+ */
+ public Variable addUpdated(Value updated) {
+ return new Variable(name, type, original, updated);
+ }
+
+ /**
+ * Return whether normalizedOriginal and normalizedUpdate are equal.
+ */
+ public boolean isSame() {
+ final Str normalizedOriginal = Value.normalize(original);
+ final Str normalizedUpdated = Value.normalize(updated);
+ if (normalizedOriginal == null && normalizedUpdated == null) {
+ return true;
+ } else if (normalizedOriginal != null) {
+ return normalizedOriginal.equals(normalizedUpdated);
+ } else {
+ return false;
+ }
+ }
+ }
+
+ /**
+ * Construct OutputChecker with the config it will check.
+ */
+ public OutputChecker(FlatConfig config) {
+ mConfig = config;
+ mVariables = getVariables(config);
+ }
+
+ /**
+ * Add a WARNING_DIFFERENT_FROM_KATI for each of the variables which have changed.
+ */
+ public void reportErrors(Errors errors) {
+ for (Variable var: getDifferences()) {
+ if (IGNORED_VARIABLES.matches(var.name)) {
+ continue;
+ }
+ errors.WARNING_DIFFERENT_FROM_KATI.add("product_config processing differs from"
+ + " kati processing for " + var.type + " variable " + var.name + ".\n"
+ + "original: "
+ + Value.oneLinePerWord(var.original, "<null>") + "\n"
+ + "updated: "
+ + Value.oneLinePerWord(var.updated, "<null>"));
+ }
+ }
+
+ /**
+ * Get the Variables that are different between the normalized form of the original
+ * and updated. If one is null and the other is not, even if one is an empty string,
+ * the values are considered different.
+ */
+ public List<Variable> getDifferences() {
+ final ArrayList<Variable> result = new ArrayList();
+ for (Variable var: mVariables.values()) {
+ if (!var.isSame()) {
+ result.add(var);
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Get all of the variables for this config.
+ *
+ * VisibleForTesting
+ */
+ static TreeMap<String, Variable> getVariables(FlatConfig config) {
+ final TreeMap<String, Variable> result = new TreeMap();
+
+ // Add the original values to mAll
+ for (Map.Entry<String, Str> entry: getModifiedVars(config.getInitialVariables(),
+ config.getFinalVariables()).entrySet()) {
+ final String name = entry.getKey();
+ result.put(name, new Variable(name, config.getVarType(name), entry.getValue()));
+ }
+
+ // Add the updated values to mAll
+ for (Map.Entry<String, Value> entry: config.getValues().entrySet()) {
+ final String name = entry.getKey();
+ final Value value = entry.getValue();
+ Variable var = result.get(name);
+ if (var == null) {
+ result.put(name, new Variable(name, config.getVarType(name), null, value));
+ } else {
+ result.put(name, var.addUpdated(value));
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Get the entries that are different in the two maps.
+ */
+ public static Map<String, Str> getModifiedVars(Map<String, Str> before,
+ Map<String, Str> after) {
+ final HashMap<String, Str> result = new HashMap();
+
+ // Entries that were added or changed.
+ for (Map.Entry<String, Str> afterEntry: after.entrySet()) {
+ final String varName = afterEntry.getKey();
+ final Str afterValue = afterEntry.getValue();
+ final Str beforeValue = before.get(varName);
+ if (beforeValue == null || !beforeValue.equals(afterValue)) {
+ result.put(varName, afterValue);
+ }
+ }
+
+ // removed Entries that were removed, we just treat them as empty string
+ for (Map.Entry<String, Str> beforeEntry: before.entrySet()) {
+ final String varName = beforeEntry.getKey();
+ if (!after.containsKey(varName)) {
+ result.put(varName, new Str(""));
+ }
+ }
+
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Position.java b/tools/product_config/src/com/android/build/config/Position.java
index 7953942..266021d 100644
--- a/tools/product_config/src/com/android/build/config/Position.java
+++ b/tools/product_config/src/com/android/build/config/Position.java
@@ -16,6 +16,9 @@
package com.android.build.config;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
/**
* Position in a source file.
*/
@@ -25,6 +28,9 @@
*/
public static final int NO_LINE = -1;
+ private static final Pattern REGEX = Pattern.compile("([^:]*)(?::(\\d)*)?:?\\s*");
+ public static final String UNKNOWN = "<unknown>";
+
private final String mFile;
private final int mLine;
@@ -63,12 +69,39 @@
return mLine;
}
+ /**
+ * Return a Position object from a string containing <filename>:<line>, or the default
+ * Position(null, NO_LINE) if the string can't be parsed.
+ */
+ public static Position parse(String str) {
+ final Matcher m = REGEX.matcher(str);
+ if (!m.matches()) {
+ return new Position();
+ }
+ String filename = m.group(1);
+ if (filename.length() == 0 || UNKNOWN.equals(filename)) {
+ filename = null;
+ }
+ String lineString = m.group(2);
+ int line;
+ if (lineString == null || lineString.length() == 0) {
+ line = NO_LINE;
+ } else {
+ try {
+ line = Integer.parseInt(lineString);
+ } catch (NumberFormatException ex) {
+ line = NO_LINE;
+ }
+ }
+ return new Position(filename, line);
+ }
+
@Override
public String toString() {
if (mFile == null && mLine == NO_LINE) {
return "";
} else if (mFile == null && mLine != NO_LINE) {
- return "<unknown>:" + mLine + ": ";
+ return UNKNOWN + ":" + mLine + ": ";
} else if (mFile != null && mLine == NO_LINE) {
return mFile + ": ";
} else { // if (mFile != null && mLine != NO_LINE)
diff --git a/tools/product_config/src/com/android/build/config/RegexSet.java b/tools/product_config/src/com/android/build/config/RegexSet.java
new file mode 100644
index 0000000..70fcd29
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/RegexSet.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.regex.Pattern;
+
+/**
+ * Returns whether a string matches one of a set of presupplied regexes.
+ */
+public class RegexSet {
+ private final Pattern[] mPatterns;
+
+ public RegexSet(String... patterns) {
+ mPatterns = new Pattern[patterns.length];
+ for (int i = 0; i < patterns.length; i++) {
+ mPatterns[i] = Pattern.compile(patterns[i]);
+ }
+ }
+
+ public boolean matches(String s) {
+ for (Pattern p: mPatterns) {
+ if (p.matcher(s).matches()) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/Str.java b/tools/product_config/src/com/android/build/config/Str.java
new file mode 100644
index 0000000..2516b76
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Str.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A String and a Position, where it came from in source code.
+ */
+public class Str implements Comparable<Str> {
+ private String mValue;
+ private Position mPosition;
+
+ public Str(String s) {
+ mValue = s;
+ mPosition = new Position();
+ }
+
+ public Str(Position pos, String s) {
+ mValue = s;
+ mPosition = pos;
+ }
+
+ public int length() {
+ return mValue.length();
+ }
+
+ @Override
+ public String toString() {
+ return mValue;
+ }
+
+ public Position getPosition() {
+ return mPosition;
+ }
+
+ /**
+ * Str is equal if the string value is equal, regardless of whether the position
+ * is the same.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof Str)) {
+ return false;
+ }
+ final Str that = (Str)o;
+ return mValue.equals(that.mValue);
+ }
+
+ @Override
+ public int hashCode() {
+ return mValue.hashCode();
+ }
+
+ @Override
+ public int compareTo(Str that) {
+ return this.mValue.compareTo(that.mValue);
+ }
+
+ public static ArrayList<Str> toList(Position pos, List<String> list) {
+ final ArrayList<Str> result = new ArrayList(list.size());
+ for (String s: list) {
+ result.add(new Str(pos, s));
+ }
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Value.java b/tools/product_config/src/com/android/build/config/Value.java
new file mode 100644
index 0000000..9bd6401
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Value.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+/**
+ * Class to hold the two types of variables we support, strings and lists of strings.
+ */
+public class Value {
+ private static final Pattern SPACES = Pattern.compile("\\s+");
+
+ private final VarType mVarType;
+ private final Str mStr;
+ private final ArrayList<Str> mList;
+
+ /**
+ * Construct an appropriately typed empty value.
+ */
+ public Value(VarType varType) {
+ mVarType = varType;
+ if (varType == VarType.LIST) {
+ mStr = null;
+ mList = new ArrayList();
+ mList.add(new Str(""));
+ } else {
+ mStr = new Str("");
+ mList = null;
+ }
+ }
+
+ public Value(VarType varType, Str str) {
+ mVarType = varType;
+ mStr = str;
+ mList = null;
+ }
+
+ public Value(List<Str> list) {
+ mVarType = VarType.LIST;
+ mStr = null;
+ mList = new ArrayList(list);
+ }
+
+ public VarType getVarType() {
+ return mVarType;
+ }
+
+ public Str getStr() {
+ return mStr;
+ }
+
+ public List<Str> getList() {
+ return mList;
+ }
+
+ /**
+ * Normalize a string that is behaving as a list.
+ */
+ public static String normalize(String str) {
+ if (str == null) {
+ return null;
+ }
+ return SPACES.matcher(str.trim()).replaceAll(" ").trim();
+ }
+
+ /**
+ * Normalize a string that is behaving as a list.
+ */
+ public static Str normalize(Str str) {
+ if (str == null) {
+ return null;
+ }
+ return new Str(str.getPosition(), normalize(str.toString()));
+ }
+
+ /**
+ * Normalize a this Value into the same format as normalize(Str).
+ */
+ public static Str normalize(Value val) {
+ if (val == null) {
+ return null;
+ }
+ if (val.mStr != null) {
+ return normalize(val.mStr);
+ }
+
+ if (val.mList.size() == 0) {
+ return new Str("");
+ }
+
+ StringBuilder result = new StringBuilder();
+ final int size = val.mList.size();
+ boolean first = true;
+ for (int i = 0; i < size; i++) {
+ String s = val.mList.get(i).toString().trim();
+ if (s.length() > 0) {
+ if (!first) {
+ result.append(" ");
+ } else {
+ first = false;
+ }
+ result.append(s);
+ }
+ }
+
+ // Just use the first item's position.
+ return new Str(val.mList.get(0).getPosition(), result.toString());
+ }
+
+ /**
+ * Put each word in 'str' on its own line in make format. If 'val' is null,
+ * 'nullValue' is returned.
+ */
+ public static String oneLinePerWord(Value val, String nullValue) {
+ if (val == null) {
+ return nullValue;
+ }
+ final String s = normalize(val).toString();
+ final Matcher m = SPACES.matcher(s);
+ final StringBuilder result = new StringBuilder();
+ if (s.length() > 0 && (val.mVarType == VarType.LIST || m.find())) {
+ result.append("\\\n ");
+ }
+ result.append(m.replaceAll(" \\\\\n "));
+ return result.toString();
+ }
+
+ /**
+ * Put each word in 'str' on its own line in make format. If 'str' is null,
+ * nullValue is returned.
+ */
+ public static String oneLinePerWord(Str str, String nullValue) {
+ if (str == null) {
+ return nullValue;
+ }
+ final Matcher m = SPACES.matcher(normalize(str.toString()));
+ final StringBuilder result = new StringBuilder();
+ if (m.find()) {
+ result.append("\\\n ");
+ }
+ result.append(m.replaceAll(" \\\\\n "));
+ return result.toString();
+ }
+
+ /**
+ * Return a string representing this value with detailed debugging information.
+ */
+ public static String debugString(Value val) {
+ if (val == null) {
+ return "null";
+ }
+
+ final StringBuilder str = new StringBuilder("Value(");
+ if (val.mStr != null) {
+ str.append("mStr=");
+ str.append("\"");
+ str.append(val.mStr.toString());
+ str.append("\"");
+ if (false) {
+ str.append(" (");
+ str.append(val.mStr.getPosition().toString());
+ str.append(")");
+ }
+ }
+ if (val.mList != null) {
+ str.append("mList=");
+ str.append("[");
+ for (Str s: val.mList) {
+ str.append(" \"");
+ str.append(s.toString());
+ if (false) {
+ str.append("\" (");
+ str.append(s.getPosition().toString());
+ str.append(")");
+ } else {
+ str.append("\"");
+ }
+ }
+ str.append(" ]");
+ }
+ str.append(")");
+ return str.toString();
+ }
+
+ /**
+ * Get the Positions of all of the parts of this Value.
+ */
+ public List<Position> getPositions() {
+ List<Position> result = new ArrayList();
+ if (mStr != null) {
+ result.add(mStr.getPosition());
+ }
+ if (mList != null) {
+ for (Str str: mList) {
+ result.add(str.getPosition());
+ }
+ }
+ return result;
+ }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/VarType.java b/tools/product_config/src/com/android/build/config/VarType.java
new file mode 100644
index 0000000..43e9366
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/VarType.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+/**
+ * Whether a product config variable is a list or single-value variable.
+ */
+public enum VarType {
+ /**
+ * A product config variable that is a list of space separated strings.
+ * These are defined by _product_single_value_vars in product.mk.
+ */
+ LIST,
+
+ /**
+ * A product config varaible that is a single string.
+ * These are defined by _product_list_vars in product.mk.
+ */
+ SINGLE,
+
+ /**
+ * A variable that is given the special product config handling but is
+ * nonetheless defined by product config makefiles.
+ */
+ UNKNOWN
+}
+
diff --git a/tools/product_config/test.sh b/tools/product_config/test.sh
new file mode 100755
index 0000000..ee9ed5c
--- /dev/null
+++ b/tools/product_config/test.sh
@@ -0,0 +1,120 @@
+#!/bin/bash
+
+#
+# This script runs the full set of tests for product config:
+# 1. Build the product-config tool.
+# 2. Run the unit tests.
+# 3. Run the product config for every product available in the current
+# source tree, for each of user, userdebug and eng.
+# - To restrict which products or variants are run, set the
+# PRODUCTS or VARIANTS environment variables.
+# - Products for which the make based product config fails are
+# skipped.
+#
+
+# The PRODUCTS variable is used by the build, and setting it in the environment
+# interferes with that, so unset it. (That should probably be fixed)
+products=$PRODUCTS
+variants=$VARIANTS
+unset PRODUCTS
+unset VARIANTS
+
+# Don't use lunch from the user's shell
+unset TARGET_PRODUCT
+unset TARGET_BUILD_VARIANT
+
+function die() {
+ format=$1
+ shift
+ printf "$format\nStopping...\n" $@ >&2
+ exit 1;
+}
+
+[[ -f build/make/envsetup.sh ]] || die "Run this script from the root of the tree."
+: ${products:=$(build/soong/soong_ui.bash --dumpvar-mode all_named_products | sed -e "s/ /\n/g" | sort -u )}
+: ${variants:="user userdebug eng"}
+: ${CKATI_BIN:=prebuilts/build-tools/$(build/soong/soong_ui.bash --dumpvar-mode HOST_PREBUILT_TAG)/bin/ckati}
+
+function if_signal_exit() {
+ [[ $1 -lt 128 ]] || exit $1
+}
+
+build/soong/soong_ui.bash --build-mode --all-modules --dir="$(pwd)" product-config-test product-config \
+ || die "Build failed."
+
+echo
+echo Running unit tests
+java -jar out/host/linux-x86/testcases/product-config-test/product-config-test.jar
+unit_tests=$?
+if_signal_exit $unit_tests
+
+failed_baseline_checks=
+for product in $products ; do
+ for variant in $variants ; do
+ echo
+ echo "Checking: lunch $product-$variant"
+
+ TARGET_PRODUCT=$product \
+ TARGET_BUILD_VARIANT=$variant \
+ build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT &> /dev/null
+ exit_status=$?
+ if_signal_exit $exit_status
+ if [ $exit_status -ne 0 ] ; then
+ echo "*** Combo fails with make, skipping product-config test run for $product-$variant"
+ else
+ rm -rf out/config/$product-$variant
+ TARGET_PRODUCT=$product TARGET_BUILD_VARIANT=$variant product-config \
+ --ckati_bin $CKATI_BIN \
+ --error 1000
+ exit_status=$?
+ if_signal_exit $exit_status
+ if [ $exit_status -ne 0 ] ; then
+ failed_baseline_checks="$failed_baseline_checks $product-$variant"
+ fi
+ if [ "$CHECK_FOR_RULES" != "" ] ; then
+ # This is a little bit of sleight of hand for good output formatting at the
+ # expense of speed. We've already run the command once without
+ # ALLOW_RULES_IN_PRODUCT_CONFIG, so we know it passes there. We run it again
+ # with ALLOW_RULES_IN_PRODUCT_CONFIG=error to see if it fails, but that will
+ # cause it to only print the first error. But we want to see all of them,
+ # so if it fails we run it a third time with ALLOW_RULES_IN_PRODUCT_CONFIG=warning,
+ # so we can see all the warnings.
+ TARGET_PRODUCT=$product \
+ TARGET_BUILD_VARIANT=$variant \
+ ALLOW_RULES_IN_PRODUCT_CONFIG=error \
+ build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT &> /dev/null
+ exit_status=$?
+ if_signal_exit $exit_status
+ if [ $exit_status -ne 0 ] ; then
+ TARGET_PRODUCT=$product \
+ TARGET_BUILD_VARIANT=$variant \
+ ALLOW_RULES_IN_PRODUCT_CONFIG=warning \
+ build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT > /dev/null
+ failed_rule_checks="$failed_rule_checks $product-$variant"
+ fi
+ fi
+ fi
+ done
+done
+
+echo
+echo
+echo "------------------------------"
+echo SUMMARY
+echo "------------------------------"
+
+echo -n "Unit tests "
+if [ $unit_tests -eq 0 ] ; then echo PASSED ; else echo FAILED ; fi
+
+echo -n "Baseline checks "
+if [ "$failed_baseline_checks" = "" ] ; then echo PASSED ; else echo FAILED ; fi
+for combo in $failed_baseline_checks ; do
+ echo " ... $combo"
+done
+
+echo -n "Rules checks "
+if [ "$failed_rule_checks" = "" ] ; then echo PASSED ; else echo FAILED ; fi
+for combo in $failed_rule_checks ; do
+ echo " ... $combo"
+done
+
diff --git a/tools/product_config/test/com/android/build/config/CsvParserTest.java b/tools/product_config/test/com/android/build/config/CsvParserTest.java
new file mode 100644
index 0000000..6f38d68
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/CsvParserTest.java
@@ -0,0 +1,148 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.StringReader;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test for CSV parser class.
+ */
+public class CsvParserTest {
+ public String listsToStrings(String[] expected, List<String> actual) {
+ return "expected=" + Arrays.toString(expected)
+ + " actual=" + Arrays.toString(actual.toArray());
+ }
+
+ public void assertLineEquals(CsvParser.Line actual, int lineno, String... fields) {
+ if (actual.getLine() != lineno) {
+ throw new RuntimeException("lineno mismatch: expected=" + lineno
+ + " actual=" + actual.getLine());
+ }
+ if (fields.length != actual.getFields().size()) {
+ throw new RuntimeException("getFields().size() mismatch: expected=" + fields.length
+ + " actual=" + actual.getFields().size()
+ + " values: " + listsToStrings(fields, actual.getFields()));
+ }
+ for (int i = 0; i < fields.length; i++) {
+ if (!fields[i].equals(actual.getFields().get(i))) {
+ throw new RuntimeException("getFields().get(" + i + ") mismatch: expected="
+ + fields[i] + " actual=" + actual.getFields().get(i)
+ + " values: " + listsToStrings(fields, actual.getFields()));
+
+ }
+ }
+ }
+
+ @Test
+ public void testEmptyString() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ ""));
+
+ Assert.assertEquals(0, lines.size());
+ }
+
+ @Test
+ public void testLexerOneCharacter() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a");
+ }
+
+ @Test
+ public void testLexerTwoFieldsNoNewline() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a,b"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a", "b");
+ }
+
+ @Test
+ public void testLexerTwoFieldsNewline() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a,b\n"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a", "b");
+ }
+
+ @Test
+ public void testEndsWithTwoNewlines() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a,b\n\n"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a", "b");
+ }
+
+ @Test
+ public void testOnlyNewlines() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "\n\n\n\n"));
+
+ Assert.assertEquals(0, lines.size());
+ }
+
+
+ @Test
+ public void testLexerComplex() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ ",\"ab\"\"\nc\",,de\n"
+ + "fg,\n"
+ + "\n"
+ + ",\n"
+ + "hijk"));
+
+ Assert.assertEquals(4, lines.size());
+ assertLineEquals(lines.get(0), 2, "", "ab\"\nc", "", "de");
+ assertLineEquals(lines.get(1), 3, "fg", "");
+ assertLineEquals(lines.get(2), 5, "", "");
+ assertLineEquals(lines.get(3), 6, "hijk");
+ }
+
+ @Test
+ public void testEndInsideQuoted() throws Exception {
+ try {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "\"asd"));
+ throw new RuntimeException("Didn't throw ParseException");
+ } catch (CsvParser.ParseException ex) {
+ System.out.println("Caught: " + ex);
+ }
+ }
+
+ @Test
+ public void testCharacterAfterQuotedField() throws Exception {
+ try {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "\"\"a"));
+ throw new RuntimeException("Didn't throw ParseException");
+ } catch (CsvParser.ParseException ex) {
+ System.out.println("Caught: " + ex);
+ }
+ }
+}
+
diff --git a/tools/product_config/test/com/android/build/config/OptionsTest.java b/tools/product_config/test/com/android/build/config/OptionsTest.java
index 2c36322..459efa5 100644
--- a/tools/product_config/test/com/android/build/config/OptionsTest.java
+++ b/tools/product_config/test/com/android/build/config/OptionsTest.java
@@ -19,12 +19,24 @@
import org.junit.Assert;
import org.junit.Test;
+import java.util.HashMap;
+
public class OptionsTest {
+
+ private Options parse(Errors errors, String[] args) {
+ final HashMap<String, String> env = new HashMap();
+ env.put("TARGET_PRODUCT", "test_product");
+ env.put("TARGET_BUILD_VARIANT", "user");
+ final Options.Parser parser = new Options.Parser(errors, args, env);
+ parser.setSkipRequiredArgValidation();
+ return parser.parse();
+ }
+
@Test
public void testErrorMissingLast() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error"
});
@@ -37,7 +49,7 @@
public void testErrorMissingNotLast() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "--warning", "2"
});
@@ -50,7 +62,7 @@
public void testErrorNotNumeric() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "notgood"
});
@@ -63,7 +75,7 @@
public void testErrorInvalidError() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "50000"
});
@@ -76,7 +88,7 @@
public void testErrorOne() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "2"
});
@@ -89,7 +101,7 @@
public void testWarningOne() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--warning", "2"
});
@@ -102,7 +114,7 @@
public void testHideOne() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--hide", "2"
});
@@ -110,5 +122,16 @@
Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
Assert.assertFalse(errors.hadWarningOrError());
}
+
+ @Test
+ public void testEnv() {
+ final Errors errors = new Errors();
+
+ final Options options = parse(errors, new String[0]);
+
+ Assert.assertEquals("test_product", options.getProduct());
+ Assert.assertEquals("user", options.getVariant());
+ Assert.assertFalse(errors.hadWarningOrError());
+ }
}
diff --git a/tools/product_config/test/com/android/build/config/PositionTest.java b/tools/product_config/test/com/android/build/config/PositionTest.java
new file mode 100644
index 0000000..82b5dd4
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/PositionTest.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+
+public class PositionTest {
+
+ @Test
+ public void testParseEmpty() {
+ final Position pos = Position.parse("");
+
+ Assert.assertEquals(null, pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+ @Test
+ public void testParseOnlyFile() {
+ final Position pos = Position.parse("asdf");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+ @Test
+ public void testParseBoth() {
+ final Position pos = Position.parse("asdf:1");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(1, pos.getLine());
+ }
+
+ @Test
+ public void testParseEndsWithColon() {
+ final Position pos = Position.parse("asdf:");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+ @Test
+ public void testParseEndsWithSpace() {
+ final Position pos = Position.parse("asdf: ");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+
+}
+
diff --git a/tools/product_config/test/com/android/build/config/TestRunner.java b/tools/product_config/test/com/android/build/config/TestRunner.java
index 9a5ee69..546518f 100644
--- a/tools/product_config/test/com/android/build/config/TestRunner.java
+++ b/tools/product_config/test/com/android/build/config/TestRunner.java
@@ -39,8 +39,10 @@
System.out.println(failure.getTrace());
}
});
- Result result = junit.run(ErrorReporterTest.class,
- OptionsTest.class);
+ Result result = junit.run(CsvParserTest.class,
+ ErrorReporterTest.class,
+ OptionsTest.class,
+ PositionTest.class);
if (!result.wasSuccessful()) {
System.out.println("\n*** FAILED ***");
}
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 81528ae..6d88249 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -451,6 +451,7 @@
required: [
"checkvintf",
"host_init_verifier",
+ "secilc",
],
target: {
darwin: {
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index da189f3..3ede4c5 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1691,6 +1691,11 @@
cmd.extend(["--vendor_ramdisk", ramdisk_img.name])
cmd.extend(["--vendor_boot", img.name])
+ fn = os.path.join(sourcedir, "vendor_bootconfig")
+ if os.access(fn, os.F_OK):
+ cmd.append("--vendor_bootconfig")
+ cmd.append(fn)
+
ramdisk_fragment_imgs = []
fn = os.path.join(sourcedir, "vendor_ramdisk_fragments")
if os.access(fn, os.F_OK):
@@ -1934,12 +1939,13 @@
# filename listed in system.map may contain an additional leading slash
# (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
# results.
- arcname = entry.replace(which, which.upper(), 1).lstrip('/')
-
- # Special handling another case, where files not under /system
+ # And handle another special case, where files not under /system
# (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
- if which == 'system' and not arcname.startswith('SYSTEM'):
+ arcname = entry.lstrip('/')
+ if which == 'system' and not arcname.startswith('system'):
arcname = 'ROOT/' + arcname
+ else:
+ arcname = arcname.replace(which, which.upper(), 1)
assert arcname in input_zip.namelist(), \
"Failed to find the ZIP entry for {}".format(entry)
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 3d9c717..16cab4f 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -93,6 +93,7 @@
import subprocess
import sys
import zipfile
+from xml.etree import ElementTree
import add_img_to_target_files
import build_super_image
@@ -658,6 +659,80 @@
os.path.join(output_target_files_dir, 'META', 'vendor_file_contexts.bin'))
+def compile_split_sepolicy(product_out, partition_map, output_policy):
+ """Uses secilc to compile a split sepolicy file.
+
+ Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
+
+ Args:
+ product_out: PRODUCT_OUT directory, containing partition directories.
+ partition_map: A map of partition name -> relative path within product_out.
+ output_policy: The name of the output policy created by secilc.
+
+ Returns:
+ A command list that can be executed to create the compiled sepolicy.
+ """
+
+ def get_file(partition, path):
+ if partition not in partition_map:
+ logger.warning('Cannot load SEPolicy files for missing partition %s',
+ partition)
+ return None
+ return os.path.join(product_out, partition_map[partition], path)
+
+ # Load the kernel sepolicy version from the FCM. This is normally provided
+ # directly to selinux.cpp as a build flag, but is also available in this file.
+ fcm_file = get_file('system', 'etc/vintf/compatibility_matrix.device.xml')
+ if not fcm_file or not os.path.exists(fcm_file):
+ raise ExternalError('Missing required file for loading sepolicy: %s', fcm)
+ kernel_sepolicy_version = ElementTree.parse(fcm_file).getroot().find(
+ 'sepolicy/kernel-sepolicy-version').text
+
+ # Load the vendor's plat sepolicy version. This is the version used for
+ # locating sepolicy mapping files.
+ vendor_plat_version_file = get_file('vendor',
+ 'etc/selinux/plat_sepolicy_vers.txt')
+ if not vendor_plat_version_file or not os.path.exists(
+ vendor_plat_version_file):
+ raise ExternalError('Missing required sepolicy file %s',
+ vendor_plat_version_file)
+ with open(vendor_plat_version_file) as f:
+ vendor_plat_version = f.read().strip()
+
+ # Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
+ cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
+ cmd.extend(['-c', kernel_sepolicy_version])
+ cmd.extend(['-o', output_policy])
+ cmd.extend(['-f', '/dev/null'])
+
+ required_policy_files = (
+ ('system', 'etc/selinux/plat_sepolicy.cil'),
+ ('system', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+ ('vendor', 'etc/selinux/vendor_sepolicy.cil'),
+ ('vendor', 'etc/selinux/plat_pub_versioned.cil'),
+ )
+ for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+ required_policy_files)):
+ if not policy or not os.path.exists(policy):
+ raise ExternalError('Missing required sepolicy file %s', policy)
+ cmd.append(policy)
+
+ optional_policy_files = (
+ ('system', 'etc/selinux/mapping/%s.compat.cil' % vendor_plat_version),
+ ('system_ext', 'etc/selinux/system_ext_sepolicy.cil'),
+ ('system_ext', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+ ('product', 'etc/selinux/product_sepolicy.cil'),
+ ('product', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+ ('odm', 'etc/selinux/odm_sepolicy.cil'),
+ )
+ for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+ optional_policy_files)):
+ if policy and os.path.exists(policy):
+ cmd.append(policy)
+
+ return cmd
+
+
def process_special_cases(framework_target_files_temp_dir,
vendor_target_files_temp_dir,
output_target_files_temp_dir,
@@ -977,17 +1052,28 @@
raise ValueError('sharedUserId APK error. See %s' %
shareduid_violation_modules)
- # Run host_init_verifier on the combined init rc files.
+ # host_init_verifier and secilc check only the following partitions:
filtered_partitions = {
partition: path
for partition, path in partition_map.items()
- # host_init_verifier checks only the following partitions:
if partition in ['system', 'system_ext', 'product', 'vendor', 'odm']
}
+
+ # Run host_init_verifier on the combined init rc files.
common.RunHostInitVerifier(
product_out=output_target_files_temp_dir,
partition_map=filtered_partitions)
+ # Check that the split sepolicy from the multiple builds can compile.
+ split_sepolicy_cmd = compile_split_sepolicy(
+ product_out=output_target_files_temp_dir,
+ partition_map=filtered_partitions,
+ output_policy=os.path.join(output_target_files_temp_dir,
+ 'META/combined.policy'))
+ logger.info('Compiling split sepolicy: %s', ' '.join(split_sepolicy_cmd))
+ common.RunAndCheckOutput(split_sepolicy_cmd)
+ # TODO(b/178864050): Run tests on the combined.policy file.
+
generate_images(output_target_files_temp_dir, rebuild_recovery)
generate_super_empty_image(output_target_files_temp_dir, output_super_empty)
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 2cbaf37..3dcabd5 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -230,7 +230,7 @@
import common
import ota_utils
from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
- PropertyFiles)
+ PropertyFiles, SECURITY_PATCH_LEVEL_PROP_NAME)
import target_files_diff
from check_target_files_vintf import CheckVintfIfTrebleEnabled
from non_ab_ota import GenerateNonAbOtaPackage
@@ -292,7 +292,7 @@
'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor', 'vendor',
'vendor_boot']
-SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
+
class PayloadSigner(object):
@@ -1418,14 +1418,20 @@
target_build_prop = OPTIONS.target_info_dict["build.prop"]
source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
- if target_spl < source_spl and not OPTIONS.spl_downgrade:
+ is_spl_downgrade = target_spl < source_spl
+ if is_spl_downgrade and not OPTIONS.spl_downgrade:
raise common.ExternalError(
"Target security patch level {} is older than source SPL {} applying "
- "such OTA will likely cause device fail to boot. Pass --spl-downgrade "
+ "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
"to override this check. This script expects security patch level to "
"be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
"separators other than -, so as long as it's used consistenly across "
"all SPL dates".format(target_spl, source_spl))
+ elif not is_spl_downgrade and OPTIONS.spl_downgrade:
+ raise ValueError("--spl_downgrade specified but no actual SPL downgrade"
+ " detected. Please only pass in this flag if you want a"
+ " SPL downgrade. Target SPL: {} Source SPL: {}"
+ .format(target_spl, source_spl))
if generate_ab:
GenerateAbOtaPackage(
target_file=args[0],
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
index 5da8b84..7aaca6f 100644
--- a/tools/releasetools/ota_metadata.proto
+++ b/tools/releasetools/ota_metadata.proto
@@ -105,4 +105,7 @@
bool retrofit_dynamic_partitions = 7;
// The required size of the cache partition, only valid for non-A/B update.
int64 required_cache = 8;
+
+ // True iff security patch level downgrade is permitted on this OTA.
+ bool spl_downgrade = 9;
}
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
index 27cc930..2552464 100644
--- a/tools/releasetools/ota_metadata_pb2.py
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -1,7 +1,9 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: ota_metadata.proto
-"""Generated protocol buffer code."""
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -17,9 +19,8 @@
name='ota_metadata.proto',
package='build.tools.releasetools',
syntax='proto3',
- serialized_options=b'H\003',
- create_key=_descriptor._internal_create_key,
- serialized_pb=b'\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\x98\x04\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x35\n\tapex_info\x18\t \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3'
+ serialized_options=_b('H\003'),
+ serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3')
)
@@ -29,33 +30,28 @@
full_name='build.tools.releasetools.OtaMetadata.OtaType',
filename=None,
file=DESCRIPTOR,
- create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
_descriptor.EnumValueDescriptor(
name='AB', index=1, number=1,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
_descriptor.EnumValueDescriptor(
name='BLOCK', index=2, number=2,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
_descriptor.EnumValueDescriptor(
name='BRICK', index=3, number=3,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
],
containing_type=None,
serialized_options=None,
- serialized_start=1004,
- serialized_end=1056,
+ serialized_start=972,
+ serialized_end=1024,
)
_sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
@@ -66,36 +62,35 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='partition_name', full_name='build.tools.releasetools.PartitionState.partition_name', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='device', full_name='build.tools.releasetools.PartitionState.device', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build', full_name='build.tools.releasetools.PartitionState.build', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='build.tools.releasetools.PartitionState.version', index=3,
number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -119,7 +114,6 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='device', full_name='build.tools.releasetools.DeviceState.device', index=0,
@@ -127,49 +121,49 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build', full_name='build.tools.releasetools.DeviceState.build', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build_incremental', full_name='build.tools.releasetools.DeviceState.build_incremental', index=2,
number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='build.tools.releasetools.DeviceState.timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sdk_level', full_name='build.tools.releasetools.DeviceState.sdk_level', index=4,
number=5, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_patch_level', full_name='build.tools.releasetools.DeviceState.security_patch_level', index=5,
number=6, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='partition_state', full_name='build.tools.releasetools.DeviceState.partition_state', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -193,36 +187,35 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='package_name', full_name='build.tools.releasetools.ApexInfo.package_name', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='build.tools.releasetools.ApexInfo.version', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_compressed', full_name='build.tools.releasetools.ApexInfo.is_compressed', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='decompressed_size', full_name='build.tools.releasetools.ApexInfo.decompressed_size', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -246,7 +239,6 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='apex_info', full_name='build.tools.releasetools.ApexMetadata.apex_info', index=0,
@@ -254,7 +246,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -278,36 +270,35 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
- serialized_options=b'8\001',
+ serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
- serialized_start=950,
- serialized_end=1002,
+ serialized_start=918,
+ serialized_end=970,
)
_OTAMETADATA = _descriptor.Descriptor(
@@ -316,7 +307,6 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='build.tools.releasetools.OtaMetadata.type', index=0,
@@ -324,63 +314,63 @@
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='wipe', full_name='build.tools.releasetools.OtaMetadata.wipe', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='downgrade', full_name='build.tools.releasetools.OtaMetadata.downgrade', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='property_files', full_name='build.tools.releasetools.OtaMetadata.property_files', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='precondition', full_name='build.tools.releasetools.OtaMetadata.precondition', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='postcondition', full_name='build.tools.releasetools.OtaMetadata.postcondition', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='retrofit_dynamic_partitions', full_name='build.tools.releasetools.OtaMetadata.retrofit_dynamic_partitions', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='required_cache', full_name='build.tools.releasetools.OtaMetadata.required_cache', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='apex_info', full_name='build.tools.releasetools.OtaMetadata.apex_info', index=8,
- number=9, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
+ name='spl_downgrade', full_name='build.tools.releasetools.OtaMetadata.spl_downgrade', index=8,
+ number=9, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -395,7 +385,7 @@
oneofs=[
],
serialized_start=520,
- serialized_end=1056,
+ serialized_end=1024,
)
_DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
@@ -405,7 +395,6 @@
_OTAMETADATA.fields_by_name['property_files'].message_type = _OTAMETADATA_PROPERTYFILESENTRY
_OTAMETADATA.fields_by_name['precondition'].message_type = _DEVICESTATE
_OTAMETADATA.fields_by_name['postcondition'].message_type = _DEVICESTATE
-_OTAMETADATA.fields_by_name['apex_info'].message_type = _APEXINFO
_OTAMETADATA_OTATYPE.containing_type = _OTAMETADATA
DESCRIPTOR.message_types_by_name['PartitionState'] = _PARTITIONSTATE
DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 6bbcc92..104f02f 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -39,6 +39,8 @@
METADATA_NAME = 'META-INF/com/android/metadata'
METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
+
def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
"""Finalizes the metadata and signs an A/B OTA package.
@@ -168,7 +170,7 @@
build_info_set = ComputeRuntimeBuildInfos(build_info,
boot_variable_values)
assert "ab_partitions" in build_info.info_dict,\
- "ab_partitions property required for ab update."
+ "ab_partitions property required for ab update."
ab_partitions = set(build_info.info_dict.get("ab_partitions"))
# delta_generator will error out on unused timestamps,
@@ -317,6 +319,8 @@
metadata_dict['pre-build'] = separator.join(pre_build.build)
metadata_dict['pre-build-incremental'] = pre_build.build_incremental
+ if metadata_proto.spl_downgrade:
+ metadata_dict['spl-downgrade'] = 'yes'
metadata_dict.update(metadata_proto.property_files)
return metadata_dict
@@ -330,6 +334,9 @@
pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
is_downgrade = int(post_timestamp) < int(pre_timestamp)
+ if OPTIONS.spl_downgrade:
+ metadata_proto.spl_downgrade = True
+
if OPTIONS.downgrade:
if not is_downgrade:
raise RuntimeError(
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 890cb51..05a085b 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -329,9 +329,8 @@
"""
unknown_files = []
for info in input_tf_zip.infolist():
- # Handle APEXes first, e.g. SYSTEM/apex/com.android.tzdata.apex.
- if (info.filename.startswith('SYSTEM/apex') and
- info.filename.endswith('.apex')):
+ # Handle APEXes on all partitions
+ if info.filename.endswith('.apex'):
name = os.path.basename(info.filename)
if name not in known_keys:
unknown_files.append(name)
@@ -363,8 +362,7 @@
invalid_apexes = []
for info in input_tf_zip.infolist():
- if (not info.filename.startswith('SYSTEM/apex') or
- not info.filename.endswith('.apex')):
+ if not info.filename.endswith('.apex'):
continue
name = os.path.basename(info.filename)
@@ -444,6 +442,7 @@
return data
+
def IsBuildPropFile(filename):
return filename in (
"SYSTEM/etc/prop.default",
@@ -462,6 +461,7 @@
# path here for clarity.
"RECOVERY/RAMDISK/default.prop") or filename.endswith("build.prop")
+
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
apk_keys, apex_keys, key_passwords,
platform_api_level, codename_to_api_level_map,
@@ -514,8 +514,8 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
- # Sign bundled APEX files.
- elif filename.startswith("SYSTEM/apex") and filename.endswith(".apex"):
+ # Sign bundled APEX files on all partitions
+ elif filename.endswith(".apex"):
name = os.path.basename(filename)
payload_key, container_key = apex_keys[name]
@@ -545,11 +545,6 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
- # AVB public keys for the installed APEXes, which will be updated later.
- elif (os.path.dirname(filename) == 'SYSTEM/etc/security/apex' and
- filename != 'SYSTEM/etc/security/apex/'):
- continue
-
# System properties.
elif IsBuildPropFile(filename):
print("Rewriting %s:" % (filename,))
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index 7ea7f96..072bb01 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -18,12 +18,11 @@
import common
import test_utils
-from merge_target_files import (validate_config_lists,
- DEFAULT_FRAMEWORK_ITEM_LIST,
- DEFAULT_VENDOR_ITEM_LIST,
- DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
- item_list_to_partition_set,
- process_apex_keys_apk_certs_common)
+from merge_target_files import (
+ validate_config_lists, DEFAULT_FRAMEWORK_ITEM_LIST,
+ DEFAULT_VENDOR_ITEM_LIST, DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
+ item_list_to_partition_set, process_apex_keys_apk_certs_common,
+ compile_split_sepolicy)
class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
@@ -235,3 +234,43 @@
]
partition_set = item_list_to_partition_set(item_list)
self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
+
+ def test_compile_split_sepolicy(self):
+ product_out_dir = common.MakeTempDir()
+
+ def write_temp_file(path, data=''):
+ full_path = os.path.join(product_out_dir, path)
+ if not os.path.exists(os.path.dirname(full_path)):
+ os.makedirs(os.path.dirname(full_path))
+ with open(full_path, 'w') as f:
+ f.write(data)
+
+ write_temp_file(
+ 'system/etc/vintf/compatibility_matrix.device.xml', """
+ <compatibility-matrix>
+ <sepolicy>
+ <kernel-sepolicy-version>30</kernel-sepolicy-version>
+ </sepolicy>
+ </compatibility-matrix>""")
+ write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
+
+ write_temp_file('system/etc/selinux/plat_sepolicy.cil')
+ write_temp_file('system/etc/selinux/mapping/30.0.cil')
+ write_temp_file('product/etc/selinux/mapping/30.0.cil')
+ write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
+ write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
+
+ cmd = compile_split_sepolicy(product_out_dir, {
+ 'system': 'system',
+ 'product': 'product',
+ 'vendor': 'vendor',
+ }, os.path.join(product_out_dir, 'policy'))
+ self.assertEqual(' '.join(cmd),
+ ('secilc -m -M true -G -N -c 30 '
+ '-o {OTP}/policy -f /dev/null '
+ '{OTP}/system/etc/selinux/plat_sepolicy.cil '
+ '{OTP}/system/etc/selinux/mapping/30.0.cil '
+ '{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
+ '{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
+ '{OTP}/product/etc/selinux/mapping/30.0.cil').format(
+ OTP=product_out_dir))
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 8266908..9f64849 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -290,11 +290,11 @@
self.assertEqual(apex_infos[0].is_compressed, True)
# Compare the decompressed APEX size with the original uncompressed APEX
original_apex_name = 'com.android.apex.compressed.v1_original.apex'
- original_apex_filepath = os.path.join(test_utils.get_current_dir(), original_apex_name)
+ original_apex_filepath = os.path.join(
+ test_utils.get_current_dir(), original_apex_name)
uncompressed_apex_size = os.path.getsize(original_apex_filepath)
self.assertEqual(apex_infos[0].decompressed_size, uncompressed_apex_size)
-
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.retrofit_dynamic_partitions = True
@@ -343,7 +343,10 @@
common.OPTIONS.incremental_source = ''
common.OPTIONS.downgrade = True
common.OPTIONS.wipe_user_data = True
+ common.OPTIONS.spl_downgrade = True
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
+ # Reset spl_downgrade so other tests are unaffected
+ common.OPTIONS.spl_downgrade = False
self.assertDictEqual(
{
@@ -359,6 +362,7 @@
'pre-device': 'product-device',
'pre-build': 'build-fingerprint-source',
'pre-build-incremental': 'build-version-incremental-source',
+ 'spl-downgrade': 'yes',
},
metadata)