Merge "support multiple kernels, boot.img output"
diff --git a/Deprecation.md b/Deprecation.md
index 5e26492..2f936e3 100644
--- a/Deprecation.md
+++ b/Deprecation.md
@@ -16,13 +16,13 @@
| -------------------------------- | --------- |
| `BUILD_AUX_EXECUTABLE` | Error |
| `BUILD_AUX_STATIC_LIBRARY` | Error |
-| `BUILD_COPY_HEADERS` | Warning |
-| `BUILD_HOST_EXECUTABLE` | Warning |
+| `BUILD_COPY_HEADERS` | Error |
+| `BUILD_HOST_EXECUTABLE` | Error |
| `BUILD_HOST_FUZZ_TEST` | Error |
| `BUILD_HOST_NATIVE_TEST` | Error |
-| `BUILD_HOST_SHARED_LIBRARY` | Warning |
+| `BUILD_HOST_SHARED_LIBRARY` | Error |
| `BUILD_HOST_SHARED_TEST_LIBRARY` | Error |
-| `BUILD_HOST_STATIC_LIBRARY` | Warning |
+| `BUILD_HOST_STATIC_LIBRARY` | Error |
| `BUILD_HOST_STATIC_TEST_LIBRARY` | Error |
| `BUILD_HOST_TEST_CONFIG` | Error |
| `BUILD_NATIVE_BENCHMARK` | Error |
diff --git a/OWNERS b/OWNERS
index e89a6a1..05f8b3d 100644
--- a/OWNERS
+++ b/OWNERS
@@ -8,4 +8,4 @@
hansson@google.com
# For version updates
-per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com
+per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
diff --git a/core/Makefile b/core/Makefile
index 57fd097..e47d423 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -501,6 +501,12 @@
INSTALLED_VENDOR_BUILD_PROP_TARGET := $(TARGET_OUT_VENDOR)/build.prop
ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_VENDOR_BUILD_PROP_TARGET)
+ifdef TARGET_VENDOR_PROP
+vendor_prop_files := $(TARGET_VENDOR_PROP)
+else
+vendor_prop_files := $(wildcard $(TARGET_DEVICE_DIR)/vendor.prop)
+endif
+
ifdef property_overrides_split_enabled
FINAL_VENDOR_BUILD_PROPERTIES += \
$(call collapse-pairs, $(PRODUCT_PROPERTY_OVERRIDES))
@@ -508,7 +514,7 @@
$(FINAL_VENDOR_BUILD_PROPERTIES),=)
endif # property_overrides_split_enabled
-$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS) $(intermediate_system_build_prop)
+$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS) $(intermediate_system_build_prop) $(vendor_prop_files)
@echo Target vendor buildinfo: $@
@mkdir -p $(dir $@)
$(hide) echo > $@
@@ -544,6 +550,16 @@
echo "#" >> $@;
$(hide) cat $(INSTALLED_ANDROID_INFO_TXT_TARGET) | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' >> $@
ifdef property_overrides_split_enabled
+ $(hide) $(foreach file,$(vendor_prop_files), \
+ if [ -f "$(file)" ]; then \
+ echo Target vendor properties from: "$(file)"; \
+ echo "" >> $@; \
+ echo "#" >> $@; \
+ echo "# from $(file)" >> $@; \
+ echo "#" >> $@; \
+ cat $(file) >> $@; \
+ echo "# end of $(file)" >> $@; \
+ fi;)
$(hide) $(foreach line,$(FINAL_VENDOR_BUILD_PROPERTIES), \
echo "$(line)" >> $@;)
endif # property_overrides_split_enabled
@@ -595,12 +611,18 @@
INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop
ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_ODM_BUILD_PROP_TARGET)
+ifdef TARGET_ODM_PROP
+odm_prop_files := $(TARGET_ODM_PROP)
+else
+odm_prop_files := $(wildcard $(TARGET_DEVICE_DIR)/odm.prop)
+endif
+
FINAL_ODM_BUILD_PROPERTIES += \
$(call collapse-pairs, $(PRODUCT_ODM_PROPERTIES))
FINAL_ODM_BUILD_PROPERTIES := $(call uniq-pairs-by-first-component, \
$(FINAL_ODM_BUILD_PROPERTIES),=)
-$(INSTALLED_ODM_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS)
+$(INSTALLED_ODM_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS) $(odm_prop_files)
@echo Target odm buildinfo: $@
@mkdir -p $(dir $@)
$(hide) echo > $@
@@ -608,6 +630,16 @@
$(hide) echo ro.odm.product.cpu.abilist32="$(TARGET_CPU_ABI_LIST_32_BIT)">>$@
$(hide) echo ro.odm.product.cpu.abilist64="$(TARGET_CPU_ABI_LIST_64_BIT)">>$@
$(hide) $(call generate-common-build-props,odm,$@)
+ $(hide) $(foreach file,$(odm_prop_files), \
+ if [ -f "$(file)" ]; then \
+ echo Target odm properties from: "$(file)"; \
+ echo "" >> $@; \
+ echo "#" >> $@; \
+ echo "# from $(file)" >> $@; \
+ echo "#" >> $@; \
+ cat $(file) >> $@; \
+ echo "# end of $(file)" >> $@; \
+ fi;)
$(hide) echo "#" >> $@; \
echo "# ADDITIONAL ODM BUILD PROPERTIES" >> $@; \
echo "#" >> $@;
@@ -734,7 +766,6 @@
$(if $(6),\
unzip -qo -d $$(PRIVATE_MODULE_DIR) $$(PRIVATE_MODULE_ARCHIVE); \
mkdir -p $$(PRIVATE_OUTPUT_DIR)/lib; \
- rm -rf $$(PRIVATE_OUTPUT_DIR)/lib/modules; \
cp -r $$(PRIVATE_MODULE_DIR) $$(PRIVATE_OUTPUT_DIR)/lib/; \
find $$(PRIVATE_MODULE_DIR) -type f -name *.ko | xargs basename -a > $$(PRIVATE_LOAD_FILE); \
)
@@ -1293,6 +1324,7 @@
# $(5) - Directory to use. Notice files are all $(5)/src. Other
# directories in there will be used for scratch
# $(6) - Dependencies for the output files
+# $(7) - Directories to exclude
#
# The algorithm here is that we go collect a hash for each of the notice
# files and write the names of the files that match that hash. Then
@@ -1310,7 +1342,7 @@
$(2) $(3): PRIVATE_DIR := $(5)
$(2) : $(3)
$(3) : $(6) $(BUILD_SYSTEM)/Makefile build/make/tools/generate-notice-files.py
- build/make/tools/generate-notice-files.py --text-output $(2) \
+ build/make/tools/generate-notice-files.py --text-output $(2) $(foreach xdir, $(7), -e $(xdir) )\
$(if $(filter $(1),xml_excluded_vendor_product_odm),-e vendor -e product -e system_ext -e odm --xml-output, \
$(if $(filter $(1),xml_excluded_system_product_odm),-e system -e product -e system_ext -e odm --xml-output, \
$(if $(filter $(1),xml_product),-i product --xml-output, \
@@ -1335,6 +1367,11 @@
winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES))
+# Some targets get included under $(PRODUCT_OUT) for debug symbols or other
+# reasons--not to be flashed onto any device. Targets under these directories
+# need no associated notice file on the device UI.
+exclude_target_dirs := apex
+
# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior.
ifneq ($(PRODUCT_NOTICE_SPLIT),true)
target_notice_file_html := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html
@@ -1345,7 +1382,8 @@
$(target_notice_file_html), \
"Notices for files contained in the filesystem images in this directory:", \
$(TARGET_OUT_NOTICE_FILES), \
- $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
+ $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files), \
+ $(exclude_target_dirs)))
$(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
$(hide) $(MINIGZIP) -9 < $< > $@
$(installed_notice_html_or_xml_gz): $(target_notice_file_html_gz)
@@ -1379,10 +1417,13 @@
# being built. A notice xml file must depend on all modules that could potentially
# install a license file relevant to it.
license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)
+# Only files copied to a system image need system image notices.
+license_modules := $(filter $(PRODUCT_OUT)/%,$(license_modules))
# Phonys/fakes don't have notice files (though their deps might)
license_modules := $(filter-out $(TARGET_OUT_FAKE)/%,$(license_modules))
# testcases are not relevant to the system image.
license_modules := $(filter-out $(TARGET_OUT_TESTCASES)/%,$(license_modules))
+# filesystem images: system, vendor, product, system_ext, and odm
license_modules_system := $(filter $(TARGET_OUT)/%,$(license_modules))
license_modules_vendor := $(filter $(TARGET_OUT_VENDOR)/%,$(license_modules))
license_modules_product := $(filter $(TARGET_OUT_PRODUCT)/%,$(license_modules))
@@ -1393,16 +1434,44 @@
$(license_modules_product) \
$(license_modules_system_ext) \
$(license_modules_odm)
+# targets used for debug symbols only and do not get copied to the device
+license_modules_symbols_only := $(filter $(PRODUCT_OUT)/apex/%,$(license_modules))
+
license_modules_rest := $(filter-out $(license_modules_agg),$(license_modules))
+license_modules_rest := $(filter-out $(license_modules_symbols_only),$(license_modules_rest))
+
+# Identify the other targets we expect to have notices for:
+# targets copied to the device but are not readable by the UI (e.g. must boot
+# into a different partition to read or don't have an associated /etc
+# directory) must have their notices built somewhere readable.
+license_modules_rehomed := $(filter-out $(PRODUCT_OUT)/%/%,$(license_modules_rest)) # files in root have no /etc
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/recovery/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/root/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/data/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/debug_ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/vendor-ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist.img,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/system_other/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/kernel%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/%.img,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/%.bin,$(license_modules_rest))
+
+# after removing targets in system images, targets reported in system images, and
+# targets used for debug symbols that do not need notices, nothing must remain.
+license_modules_rest := $(filter-out $(license_modules_rehomed),$(license_modules_rest))
+$(call maybe-print-list-and-error, $(license_modules_rest), \
+ "Targets added under $(PRODUCT_OUT)/ unaccounted for notice handling.")
# If we are building in a configuration that includes a prebuilt vendor.img, we can't
# update its notice file, so include those notices in the system partition instead
ifdef BOARD_PREBUILT_VENDORIMAGE
-license_modules_system += $(license_modules_rest)
+license_modules_system += $(license_modules_rehomed)
system_xml_directories := xml_excluded_vendor_product_odm
system_notice_file_message := "Notices for files contained in all filesystem images except vendor/system_ext/product/odm in this directory:"
else
-license_modules_vendor += $(license_modules_rest)
+license_modules_vendor += $(license_modules_rehomed)
system_xml_directories := xml_system
system_notice_file_message := "Notices for files contained in the system filesystem image in this directory:"
endif
@@ -1412,31 +1481,36 @@
$(target_notice_file_xml), \
$(system_notice_file_message), \
$(TARGET_OUT_NOTICE_FILES), \
- $(license_modules_system)))
+ $(license_modules_system), \
+ $(exclude_target_dirs)))
$(eval $(call combine-notice-files, xml_excluded_system_product_odm, \
$(target_vendor_notice_file_txt), \
$(target_vendor_notice_file_xml), \
"Notices for files contained in all filesystem images except system/system_ext/product/odm in this directory:", \
$(TARGET_OUT_NOTICE_FILES), \
- $(license_modules_vendor)))
+ $(license_modules_vendor), \
+ $(exclude_target_dirs)))
$(eval $(call combine-notice-files, xml_product, \
$(target_product_notice_file_txt), \
$(target_product_notice_file_xml), \
"Notices for files contained in the product filesystem image in this directory:", \
$(TARGET_OUT_NOTICE_FILES), \
- $(license_modules_product)))
+ $(license_modules_product), \
+ $(exclude_target_dirs)))
$(eval $(call combine-notice-files, xml_system_ext, \
$(target_system_ext_notice_file_txt), \
$(target_system_ext_notice_file_xml), \
"Notices for files contained in the system_ext filesystem image in this directory:", \
$(TARGET_OUT_NOTICE_FILES), \
- $(license_modules_system_ext)))
+ $(license_modules_system_ext), \
+ $(exclude_target_dirs)))
$(eval $(call combine-notice-files, xml_odm, \
$(target_odm_notice_file_txt), \
$(target_odm_notice_file_xml), \
"Notices for files contained in the odm filesystem image in this directory:", \
$(TARGET_OUT_NOTICE_FILES), \
- $(license_modules_odm)))
+ $(license_modules_odm), \
+ $(exclude_target_dirs)))
$(target_notice_file_xml_gz): $(target_notice_file_xml) | $(MINIGZIP)
$(hide) $(MINIGZIP) -9 < $< > $@
@@ -1474,7 +1548,8 @@
"Notices for files contained in the tools directory:", \
$(HOST_OUT_NOTICE_FILES), \
$(ALL_DEFAULT_INSTALLED_MODULES) \
- $(winpthreads_notice_file)))
+ $(winpthreads_notice_file), \
+ $(exclude_target_dirs)))
endif # TARGET_BUILD_APPS
@@ -2349,6 +2424,81 @@
endif # TARGET_NO_KERNEL
+ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
+ifeq ($(BUILDING_RAMDISK_IMAGE),true)
+# -----------------------------------------------------------------
+# vendor debug ramdisk
+# Combines vendor ramdisk files and debug ramdisk files to build the vendor debug ramdisk.
+INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor-ramdisk-debug.cpio.gz
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
+
+INTERNAL_VENDOR_DEBUG_RAMDISK_FILES := $(filter $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/%, \
+ $(ALL_GENERATED_SOURCES) \
+ $(ALL_DEFAULT_INSTALLED_MODULES))
+
+# Note: TARGET_VENDOR_DEBUG_RAMDISK_OUT will be $(PRODUCT_OUT)/vendor_debug_ramdisk/first_stage_ramdisk,
+# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/vendor_debug_ramdisk.
+# But the path of $(VENDOR_DEBUG_RAMDISK_DIR) to build the vendor debug ramdisk, is always
+# $(PRODUCT_OUT)/vendor_debug_ramdisk.
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(MINIGZIP)
+ $(call pretty,"Target vendor debug ram disk: $@")
+ mkdir -p $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)
+ touch $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/force_debuggable
+ $(foreach debug_file,$(DEBUG_RAMDISK_FILES), \
+ cp -f $(debug_file) $(subst $(PRODUCT_OUT)/debug_ramdisk,$(PRODUCT_OUT)/vendor_debug_ramdisk,$(debug_file)) &&) true
+ rsync -a $(VENDOR_RAMDISK_DIR)/ $(VENDOR_DEBUG_RAMDISK_DIR)
+ $(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_DEBUG_RAMDISK_DIR) | $(MINIGZIP) > $@
+
+INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk-debug.txt
+INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK:.txt=.json)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
+
+# The vendor debug ramdisk will rsync from $(TARGET_VENDOR_RAMDISK_OUT) and $(INTERNAL_DEBUG_RAMDISK_FILES),
+# so we have to wait for the vendor debug ramdisk to be built before generating the installed file list.
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
+ echo Installed file list: $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(VENDOR_DEBUG_RAMDISK_DIR) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+
+# -----------------------------------------------------------------
+# vendor_boot-debug.img.
+INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot-debug.img
+
+# The util to sign vendor_boot-debug.img with a test key.
+BOARD_AVB_VENDOR_BOOT_TEST_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
+INTERNAL_AVB_VENDOR_BOOT_TEST_SIGNING_ARGS := --algorithm SHA256_RSA2048 --key $(BOARD_AVB_VENDOR_BOOT_TEST_KEY_PATH)
+# $(1): the vendor bootimage to sign
+define test-key-sign-vendor-bootimage
+$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)))
+$(AVBTOOL) add_hash_footer \
+ --image $(1) \
+ --partition_size $(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE) \
+ --partition_name vendor_boot $(INTERNAL_AVB_VENDOR_BOOT_TEST_SIGNING_ARGS) \
+ $(BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS)
+$(call assert-max-image-size,$(1),$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
+endef
+
+ifneq ($(BOARD_AVB_VENDOR_BOOT_KEY_PATH),)
+$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOT_TEST_KEY_PATH)
+endif
+
+# Depends on vendor_boot.img and vendor-ramdisk-debug.cpio.gz to build the new vendor_boot-debug.img
+$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+ $(call pretty,"Target vendor_boot debug image: $@")
+ $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) --vendor_boot $@
+ $(call assert-max-image-size,$@,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
+ $(if $(BOARD_AVB_VENDOR_BOOT_KEY_PATH),$(call test-key-sign-vendor-bootimage,$@))
+
+endif # BUILDING_RAMDISK_IMAGE
+endif # BUILDING_VENDOR_BOOT_IMAGE
+
# -----------------------------------------------------------------
# The test harness ramdisk, which is based off debug_ramdisk, plus a
# few additional test-harness-specific properties in adb_debug.prop.
@@ -2598,11 +2748,8 @@
@echo "make $@: ignoring dependencies"
$(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE_TARGET))
$(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
-
-ifneq (,$(filter systemimage-nodeps snod, $(MAKECMDGOALS)))
ifeq (true,$(WITH_DEXPREOPT))
-$(warning Warning: with dexpreopt enabled, you may need a full rebuild.)
-endif
+ $(warning Warning: with dexpreopt enabled, you may need a full rebuild.)
endif
endif # BUILDING_SYSTEM_IMAGE
@@ -3568,6 +3715,13 @@
check_vintf_has_system :=
check_vintf_has_vendor :=
+ifneq (,$(filter EMPTY_ODM_SKU_PLACEHOLDER,$(ODM_MANIFEST_SKUS)))
+$(error EMPTY_ODM_SKU_PLACEHOLDER is an internal variable and cannot be used for ODM_MANIFEST_SKUS)
+endif
+ifneq (,$(filter EMPTY_VENDOR_SKU_PLACEHOLDER,$(DEVICE_MANIFEST_SKUS)))
+$(error EMPTY_VENDOR_SKU_PLACEHOLDER is an internal variable and cannot be used for DEIVCE_MANIFEST_SKUS)
+endif
+
# -- Check system manifest / matrix including fragments (excluding other framework manifests / matrices, e.g. product);
check_vintf_system_deps := $(filter $(TARGET_OUT)/etc/vintf/%, $(check_vintf_common_srcs))
ifneq ($(check_vintf_system_deps),)
@@ -3586,8 +3740,18 @@
check_vintf_has_vendor := true
check_vintf_vendor_log := $(intermediates)/check_vintf_vendor_log
check_vintf_all_deps += $(check_vintf_vendor_log)
+# Check vendor SKU=(empty) case when:
+# - DEVICE_MANIFEST_FILE is not empty; OR
+# - DEVICE_MANIFEST_FILE is empty AND DEVICE_MANIFEST_SKUS is empty (only vendor manifest fragments are used)
+$(check_vintf_vendor_log): PRIVATE_VENDOR_SKUS := \
+ $(if $(DEVICE_MANIFEST_FILE),EMPTY_VENDOR_SKU_PLACEHOLDER,\
+ $(if $(DEVICE_MANIFEST_SKUS),,EMPTY_VENDOR_SKU_PLACEHOLDER)) \
+ $(DEVICE_MANIFEST_SKUS)
$(check_vintf_vendor_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_vendor_deps)
- @( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
+ $(foreach vendor_sku,$(PRIVATE_VENDOR_SKUS), \
+ ( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) \
+ --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
+ > $@ 2>&1 ) || ( cat $@ && exit 1 ); )
check_vintf_vendor_log :=
endif # check_vintf_vendor_deps
check_vintf_vendor_deps :=
@@ -3681,17 +3845,31 @@
$(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_ARGS := $(check_vintf_compatible_args)
$(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_DEPS := $(check_vintf_compatible_deps)
+# Check ODM SKU=(empty) case when:
+# - ODM_MANIFEST_FILES is not empty; OR
+# - ODM_MANIFEST_FILES is empty AND ODM_MANIFEST_SKUS is empty (only ODM manifest fragments are used)
+$(check_vintf_compatible_log): PRIVATE_ODM_SKUS := \
+ $(if $(ODM_MANIFEST_FILES),EMPTY_ODM_SKU_PLACEHOLDER,\
+ $(if $(ODM_MANIFEST_SKUS),,EMPTY_ODM_SKU_PLACEHOLDER)) \
+ $(ODM_MANIFEST_SKUS)
+# Check vendor SKU=(empty) case when:
+# - DEVICE_MANIFEST_FILE is not empty; OR
+# - DEVICE_MANIFEST_FILE is empty AND DEVICE_MANIFEST_SKUS is empty (only vendor manifest fragments are used)
+$(check_vintf_compatible_log): PRIVATE_VENDOR_SKUS := \
+ $(if $(DEVICE_MANIFEST_FILE),EMPTY_VENDOR_SKU_PLACEHOLDER,\
+ $(if $(DEVICE_MANIFEST_SKUS),,EMPTY_VENDOR_SKU_PLACEHOLDER)) \
+ $(DEVICE_MANIFEST_SKUS)
$(check_vintf_compatible_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_compatible_deps)
@echo -n -e 'Deps: \n ' > $@
@sed 's/ /\n /g' <<< "$(PRIVATE_CHECK_VINTF_DEPS)" >> $@
@echo -n -e 'Args: \n ' >> $@
@cat <<< "$(PRIVATE_CHECK_VINTF_ARGS)" >> $@
- @echo -n -e 'For empty SKU:' >> $@
- @( $< --check-compat $(PRIVATE_CHECK_VINTF_ARGS) >> $@ 2>&1 ) || ( cat $@ && exit 1 )
- $(foreach sku,$(ODM_MANIFEST_SKUS), \
- echo "For SKU = $(sku):" >> $@; \
+ $(foreach odm_sku,$(PRIVATE_ODM_SKUS), $(foreach vendor_sku,$(PRIVATE_VENDOR_SKUS), \
+ echo "For ODM SKU = $(odm_sku), vendor SKU = $(vendor_sku)" >> $@; \
( $< --check-compat $(PRIVATE_CHECK_VINTF_ARGS) \
- --property ro.boot.product.hardware.sku=$(sku) >> $@ 2>&1 ) || ( cat $@ && exit 1 ); )
+ --property ro.boot.product.hardware.sku=$(filter-out EMPTY_ODM_SKU_PLACEHOLDER,$(odm_sku)) \
+ --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
+ >> $@ 2>&1 ) || (cat $@ && exit 1); ))
check_vintf_compatible_log :=
check_vintf_compatible_args :=
@@ -4090,6 +4268,15 @@
ifdef ODM_MANIFEST_SKUS
$(hide) echo "vintf_odm_manifest_skus=$(ODM_MANIFEST_SKUS)" >> $@
endif
+ifdef ODM_MANIFEST_FILES
+ $(hide) echo "vintf_include_empty_odm_sku=true" >> $@
+endif
+ifdef DEVICE_MANIFEST_SKUS
+ $(hide) echo "vintf_vendor_manifest_skus=$(DEVICE_MANIFEST_SKUS)" >> $@
+endif
+ifdef DEVICE_MANIFEST_FILE
+ $(hide) echo "vintf_include_empty_vendor_sku=true" >> $@
+endif
.PHONY: misc_info
misc_info: $(INSTALLED_MISC_INFO_TARGET)
@@ -4751,13 +4938,8 @@
JACOCO_REPORT_CLASSES_ALL := $(PRODUCT_OUT)/jacoco-report-classes-all.jar
$(JACOCO_REPORT_CLASSES_ALL) :
@echo "Collecting uninstrumented classes"
- $(hide) find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" | \
- zip -@ -0 -q -X $@
-# Meaning of these options:
-# -@ scan stdin for file paths to add to the zip
-# -0 don't do any compression
-# -q supress most output
-# -X skip storing extended file attributes
+ find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" 2>/dev/null | sort > $@.list
+ $(SOONG_ZIP) -o $@ -L 0 -C $(OUT_DIR) -P out -l $@.list
endif # EMMA_INSTRUMENT=true
@@ -5074,7 +5256,6 @@
$(ALL_DEFAULT_INSTALLED_MODULES) \
$(INSTALLED_RAMDISK_TARGET) \
$(ALL_DOCS) \
- $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/api-stubs-docs_annotations.zip \
$(ALL_SDK_FILES)
endif
@@ -5248,3 +5429,7 @@
.PHONY: haiku
haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
$(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
+
+# -----------------------------------------------------------------
+# The makefile for haiku line coverage.
+include $(BUILD_SYSTEM)/line_coverage.mk
diff --git a/core/base_rules.mk b/core/base_rules.mk
index cce6ec1..f70911e 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -705,13 +705,19 @@
ifeq ($(use_testcase_folder),true)
ifneq ($(my_test_data_file_pairs),)
+# Filter out existng installed test data paths when collecting test data files to be installed and
+# indexed as they cause build rule conflicts. Instead put them in a separate list which is only
+# used for indexing.
$(foreach pair, $(my_test_data_file_pairs), \
$(eval parts := $(subst :,$(space),$(pair))) \
$(eval src_path := $(word 1,$(parts))) \
$(eval file := $(word 2,$(parts))) \
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
- $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data))))))
+ $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data)))) \
+ $(eval my_compat_dist_test_data_$(suite) += \
+ $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
+ $(filter $(my_installed_test_data),$(call append-path,$(dir),$(file)))))))
endif
else
ifneq ($(my_test_data_file_pairs),)
@@ -732,7 +738,8 @@
$(call create-suite-dependencies)
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_dist_config_$(suite) := ))
+ $(eval my_compat_dist_config_$(suite) := ) \
+ $(eval my_compat_dist_test_data_$(suite) := ))
endif # LOCAL_COMPATIBILITY_SUITE
diff --git a/core/binary.mk b/core/binary.mk
index a62f76c..e021b7d 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -90,32 +90,6 @@
my_cflags += -DDO_NOT_CHECK_MANUAL_BINDER_INTERFACES
endif
-ifneq ($(strip $(ENABLE_XOM)),false)
- ifndef LOCAL_IS_HOST_MODULE
- my_xom := true
- # Disable XOM in excluded paths.
- combined_xom_exclude_paths := $(XOM_EXCLUDE_PATHS) \
- $(PRODUCT_XOM_EXCLUDE_PATHS)
- ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_xom_exclude_paths)),\
- $(filter $(dir)%,$(LOCAL_PATH)))),)
- my_xom := false
- endif
-
- # Allow LOCAL_XOM to override the above
- ifdef LOCAL_XOM
- my_xom := $(LOCAL_XOM)
- endif
-
- ifeq ($(strip $(my_xom)),true)
- ifeq (arm64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
- ifeq ($(my_use_clang_lld),true)
- my_ldflags += -Wl,--execute-only -Wl,-z,separate-code
- endif
- endif
- endif
- endif
-endif
-
my_allow_undefined_symbols := $(strip $(LOCAL_ALLOW_UNDEFINED_SYMBOLS))
ifdef SANITIZE_HOST
ifdef LOCAL_IS_HOST_MODULE
@@ -136,19 +110,8 @@
# Make sure we've built the NDK.
my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
- # mips32r6 is not supported by the NDK. No released NDK contains these
- # libraries, but the r10 in prebuilts/ndk had a local hack to add them :(
- #
- # We need to find a real solution to this problem, but until we do just drop
- # mips32r6 things back to r10 to get the tree building again.
- ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
- ifeq ($(LOCAL_NDK_VERSION), current)
- LOCAL_NDK_VERSION := r10
- endif
- endif
-
my_arch := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
- ifneq (,$(filter arm64 mips64 x86_64,$(my_arch)))
+ ifneq (,$(filter arm64 x86_64,$(my_arch)))
my_min_sdk_version := 21
else
my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
@@ -182,17 +145,11 @@
$(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \
$(my_ndk_sysroot)/usr/include \
- # x86_64 and and mips64 are both multilib toolchains, so their libraries are
+ # x86_64 is a multilib toolchain, so their libraries are
# installed in /usr/lib64. Aarch64, on the other hand, is not a multilib
# compiler, so its libraries are in /usr/lib.
- #
- # Mips32r6 is yet another variation, with libraries installed in libr6.
- #
- # For the rest, the libraries are installed simply to /usr/lib.
- ifneq (,$(filter x86_64 mips64,$(my_arch)))
+ ifneq (,$(filter x86_64,$(my_arch)))
my_ndk_libdir_name := lib64
- else ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
- my_ndk_libdir_name := libr6
else
my_ndk_libdir_name := lib
endif
@@ -206,11 +163,7 @@
# hashes (which are much faster!), but shipping to older devices requires
# the old style hash. Fortunately, we can build with both and it'll work
# anywhere.
- #
- # This is not currently supported on MIPS architectures.
- ifeq (,$(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
- my_ldflags += -Wl,--hash-style=both
- endif
+ my_ldflags += -Wl,--hash-style=both
# We don't want to expose the relocation packer to the NDK just yet.
LOCAL_PACK_MODULE_RELOCATIONS := false
@@ -221,9 +174,6 @@
my_ndk_stl_shared_lib_fullpath :=
my_ndk_stl_static_lib :=
my_cpu_variant := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)CPU_ABI)
- ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
- my_cpu_variant := mips32r6
- endif
LOCAL_NDK_STL_VARIANT := $(strip $(LOCAL_NDK_STL_VARIANT))
ifeq (,$(LOCAL_NDK_STL_VARIANT))
LOCAL_NDK_STL_VARIANT := system
@@ -299,6 +249,9 @@
# If PLATFORM_VNDK_VERSION has a CODENAME, it will return
# __ANDROID_API_FUTURE__.
my_api_level := $(call codename-or-sdk-to-sdk,$(PLATFORM_VNDK_VERSION))
+ else
+ # Build with current BOARD_VNDK_VERSION.
+ my_api_level := $(call codename-or-sdk-to-sdk,$(BOARD_VNDK_VERSION))
endif
my_cflags += -D__ANDROID_VNDK__
endif
diff --git a/core/board_config.mk b/core/board_config.mk
index 4efe10a..8ff16e2 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -282,6 +282,7 @@
# Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
+TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK := vendor_debug_ramdisk/first_stage_ramdisk
TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk/first_stage_ramdisk
endif
@@ -584,9 +585,8 @@
ifdef BOARD_VNDK_VERSION
ifneq ($(BOARD_VNDK_VERSION),current)
- $(error BOARD_VNDK_VERSION: Only "current" is implemented)
+ $(call check_vndk_version,$(BOARD_VNDK_VERSION))
endif
-
TARGET_VENDOR_TEST_SUFFIX := /vendor
else
TARGET_VENDOR_TEST_SUFFIX :=
@@ -624,16 +624,16 @@
###########################################
# Handle BUILD_BROKEN_USES_BUILD_*
-$(foreach m,$(filter-out BUILD_COPY_HEADERS,$(DEFAULT_WARNING_BUILD_MODULE_TYPES)),\
+$(foreach m,$(DEFAULT_WARNING_BUILD_MODULE_TYPES),\
$(if $(filter false,$(BUILD_BROKEN_USES_$(m))),\
$(KATI_obsolete_var $(m),Please convert to Soong),\
$(KATI_deprecated_var $(m),Please convert to Soong)))
-$(if $(filter false,$(BUILD_BROKEN_USES_BUILD_COPY_HEADERS)),\
- $(KATI_obsolete_var BUILD_COPY_HEADERS,See $(CHANGES_URL)#copy_headers),\
- $(KATI_deprecated_var BUILD_COPY_HEADERS,See $(CHANGES_URL)#copy_headers))
+$(if $(filter true,$(BUILD_BROKEN_USES_BUILD_COPY_HEADERS)),\
+ $(KATI_deprecated_var BUILD_COPY_HEADERS,See $(CHANGES_URL)#copy_headers),\
+ $(KATI_obsolete_var BUILD_COPY_HEADERS,See $(CHANGES_URL)#copy_headers))
-$(foreach m,$(DEFAULT_ERROR_BUILD_MODULE_TYPES),\
+$(foreach m,$(filter-out BUILD_COPY_HEADERS,$(DEFAULT_ERROR_BUILD_MODULE_TYPES)),\
$(if $(filter true,$(BUILD_BROKEN_USES_$(m))),\
$(KATI_deprecated_var $(m),Please convert to Soong),\
$(KATI_obsolete_var $(m),Please convert to Soong)))
diff --git a/core/build-system.html b/core/build-system.html
index cc242d9..9cd7b0b 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -467,8 +467,6 @@
<b>TARGET_ARCH</b><br/>
arm<br/>
arm64<br/>
- mips<br/>
- mips64<br/>
x86<br/>
x86_64
</td>
diff --git a/core/clang/TARGET_mips.mk b/core/clang/TARGET_mips.mk
deleted file mode 100644
index 3e54a66..0000000
--- a/core/clang/TARGET_mips.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-$(clang_2nd_arch_prefix)RS_TRIPLE := renderscript32-linux-androideabi
-$(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
-RS_COMPAT_TRIPLE := mipsel-linux-android
-
-$(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mipsel-android.a
-
-# Address sanitizer clang config
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
diff --git a/core/clang/TARGET_mips64.mk b/core/clang/TARGET_mips64.mk
deleted file mode 100644
index cb6a3cd..0000000
--- a/core/clang/TARGET_mips64.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-RS_TRIPLE := renderscript64-linux-android
-RS_TRIPLE_CFLAGS :=
-RS_COMPAT_TRIPLE := mips64el-linux-android
-
-TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mips64el-android.a
-
-# Address sanitizer clang config
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index bbc117f..e27d91c 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -314,7 +314,6 @@
LOCAL_VTS_MODE:=
LOCAL_WARNINGS_ENABLE:=
LOCAL_WHOLE_STATIC_LIBRARIES:=
-LOCAL_XOM:=
LOCAL_YACCFLAGS:=
LOCAL_CHECK_ELF_FILES:=
# TODO: deprecate, it does nothing
diff --git a/core/combo/TARGET_linux-mips.mk b/core/combo/TARGET_linux-mips.mk
deleted file mode 100644
index 9f14aa2..0000000
--- a/core/combo/TARGET_linux-mips.mk
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Copyright (C) 2010 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Linux on MIPS.
-# Included by combo/select.mk
-
-# You can set TARGET_ARCH_VARIANT to use an arch version other
-# than mips32r2-fp. Each value should correspond to a file named
-# $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions. Their
-# purpose is to allow module Android.mk files to selectively compile
-# different versions of code based upon the funtionality and
-# instructions available in a given architecture version.
-#
-# The blocks also define specific arch_variant_cflags, which
-# include defines, and compiler settings for the given architecture
-# version.
-#
-ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := mips32r2-fp
-endif
-
-include $(BUILD_SYSTEM)/combo/fdo.mk
-
-define $(combo_var_prefix)transform-shared-lib-to-toc
-$(call _gen_toc_command_for_elf,$(1),$(2))
-endef
-
-$(combo_2nd_arch_prefix)TARGET_PACK_MODULE_RELOCATIONS := true
-
-$(combo_2nd_arch_prefix)TARGET_LINKER := /system/bin/linker
diff --git a/core/combo/TARGET_linux-mips64.mk b/core/combo/TARGET_linux-mips64.mk
deleted file mode 100644
index ae17e46..0000000
--- a/core/combo/TARGET_linux-mips64.mk
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Linux on MIPS64.
-# Included by combo/select.mk
-
-# You can set TARGET_ARCH_VARIANT to use an arch version other
-# than mips64r6. Each value should correspond to a file named
-# $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions. Their
-# purpose is to allow module Android.mk files to selectively compile
-# different versions of code based upon the funtionality and
-# instructions available in a given architecture version.
-#
-# The blocks also define specific arch_variant_cflags, which
-# include defines, and compiler settings for the given architecture
-# version.
-#
-ifeq ($(strip $(TARGET_ARCH_VARIANT)),)
-TARGET_ARCH_VARIANT := mips64r6
-endif
-
-include $(BUILD_SYSTEM)/combo/fdo.mk
-
-define $(combo_var_prefix)transform-shared-lib-to-toc
-$(call _gen_toc_command_for_elf,$(1),$(2))
-endef
-
-TARGET_PACK_MODULE_RELOCATIONS := true
-
-TARGET_LINKER := /system/bin/linker64
diff --git a/core/config.mk b/core/config.mk
index 844d7d6..b329b0d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -820,7 +820,7 @@
MAINLINE_SEPOLICY_DEV_CERTIFICATES := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
endif
-BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
# SEPolicy versions
@@ -1219,23 +1219,6 @@
# These goals don't need to collect and include Android.mks/CleanSpec.mks
# in the source tree.
dont_bother_goals := out \
- snod systemimage-nodeps \
- userdataimage-nodeps \
- cacheimage-nodeps \
- bptimage-nodeps \
- vnod vendorimage-nodeps \
- pnod productimage-nodeps \
- senod systemextimage-nodeps \
- onod odmimage-nodeps \
- systemotherimage-nodeps \
- ramdisk-nodeps \
- ramdisk_debug-nodeps \
- ramdisk_test_harness-nodeps \
- bootimage-nodeps \
- bootimage_debug-nodeps \
- bootimage_test_harness-nodeps \
- recoveryimage-nodeps \
- vbmetaimage-nodeps \
product-graph dump-products
ifeq ($(CALLED_FROM_SETUP),true)
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index efb21e7..8c76a8d 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -134,12 +134,6 @@
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
-# CFI needs gold linker, and mips toolchain does not have one.
-ifneq ($(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
- my_sanitize := $(filter-out cfi,$(my_sanitize))
- my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
-endif
-
# Disable sanitizers which need the UBSan runtime for host targets.
ifdef LOCAL_IS_HOST_MODULE
my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/definitions.mk b/core/definitions.mk
index fd785b8..3499da9 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2197,17 +2197,19 @@
define transform-classes.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
-@mkdir -p $(dir $@)
+@mkdir -p $(dir $@)tmp
$(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
$(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
$(hide) $(D8_WRAPPER) $(DX_COMMAND) $(DEX_FLAGS) \
- --output $(dir $@) \
+ --output $(dir $@)tmp \
$(addprefix --lib ,$(PRIVATE_D8_LIBS)) \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
$(subst --main-dex-list=, --main-dex-list , \
$(filter-out --core-library --multi-dex --minimal-main-dex,$(PRIVATE_DX_FLAGS))) \
$(dir $@)d8_input.jar
+$(hide) mv $(dir $@)tmp/* $(dir $@)
$(hide) rm -f $(dir $@)d8_input.jar
+$(hide) rm -rf $(dir $@)tmp
endef
# We need the extra blank line, so that the command will be on a separate line.
@@ -2895,7 +2897,8 @@
$(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,$(eval ALL_COMPATIBILITY_SUITES += $(suite))) \
$(eval COMPATIBILITY.$(suite).FILES := \
$$(COMPATIBILITY.$(suite).FILES) $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
- $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f)))) \
+ $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
+ $$(my_compat_dist_test_data_$(suite))) \
$(eval COMPATIBILITY.$(suite).MODULES := \
$$(COMPATIBILITY.$(suite).MODULES) $$(my_register_name))) \
$(eval $(my_all_targets) : $(call copy-many-files, \
diff --git a/core/deprecation.mk b/core/deprecation.mk
index cc620a3..19bd4cf 100644
--- a/core/deprecation.mk
+++ b/core/deprecation.mk
@@ -24,10 +24,6 @@
# relevant BUILD_BROKEN_USES_BUILD_* variables, then these would move to
# DEFAULT_ERROR_BUILD_MODULE_TYPES.
DEFAULT_WARNING_BUILD_MODULE_TYPES :=$= \
- BUILD_COPY_HEADERS \
- BUILD_HOST_EXECUTABLE \
- BUILD_HOST_SHARED_LIBRARY \
- BUILD_HOST_STATIC_LIBRARY \
# These are BUILD_* variables that are errors to reference, but you can set
# BUILD_BROKEN_USES_BUILD_* in your BoardConfig.mk in order to turn them back
@@ -35,8 +31,12 @@
DEFAULT_ERROR_BUILD_MODULE_TYPES :=$= \
BUILD_AUX_EXECUTABLE \
BUILD_AUX_STATIC_LIBRARY \
+ BUILD_COPY_HEADERS \
+ BUILD_HOST_EXECUTABLE \
BUILD_HOST_FUZZ_TEST \
BUILD_HOST_NATIVE_TEST \
+ BUILD_HOST_SHARED_LIBRARY \
+ BUILD_HOST_STATIC_LIBRARY \
BUILD_HOST_STATIC_TEST_LIBRARY \
BUILD_HOST_TEST_CONFIG \
BUILD_NATIVE_BENCHMARK \
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 55eeec6..20b4051 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -18,9 +18,35 @@
ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED),$(PRODUCT_OUT))
# Install boot images. Note that there can be multiple.
+my_boot_image_arch := TARGET_ARCH
+my_boot_image_out := $(PRODUCT_OUT)
+my_boot_image_syms := $(TARGET_OUT_UNSTRIPPED)
+my_boot_image_root := DEFAULT_DEX_PREOPT_INSTALLED_IMAGE
DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
-$(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
$(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+ifdef TARGET_2ND_ARCH
+ my_boot_image_arch := TARGET_2ND_ARCH
+ my_boot_image_root := 2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE
+ 2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
+ $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+endif
+# Install boot images for testing on host. We exclude framework image as it is not part of art manifest.
+my_boot_image_arch := HOST_ARCH
+my_boot_image_out := $(HOST_OUT)
+my_boot_image_syms := $(HOST_OUT)/symbols
+my_boot_image_root := HOST_BOOT_IMAGE
+HOST_BOOT_IMAGE :=
+$(foreach my_boot_image_name,art_host,$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+ifdef HOST_2ND_ARCH
+ my_boot_image_arch := HOST_2ND_ARCH
+ my_boot_image_root := 2ND_HOST_BOOT_IMAGE
+ 2ND_HOST_BOOT_IMAGE :=
+ $(foreach my_boot_image_name,art_host,$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+endif
+my_boot_image_arch :=
+my_boot_image_out :=
+my_boot_image_syms :=
+my_boot_image_root :=
# Build the boot.zip which contains the boot jars and their compilation output
# We can do this only if preopt is enabled and if the product uses libart config (which sets the
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index ccf53f5..598ac2d 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -56,16 +56,6 @@
DEX2OAT_XMS := $(call get-product-default-property,dalvik.vm.dex2oat-Xms)
DEX2OAT_XMX := $(call get-product-default-property,dalvik.vm.dex2oat-Xmx)
-ifeq ($(TARGET_ARCH),$(filter $(TARGET_ARCH),mips mips64))
-# MIPS specific overrides.
-# For MIPS the ART image is loaded at a lower address. This causes issues
-# with the image overlapping with memory on the host cross-compiling and
-# building the image. We therefore limit the Xmx value. This isn't done
-# via a property as we want the larger Xmx value if we're running on a
-# MIPS device.
-DEX2OAT_XMX := 128m
-endif
-
ifeq ($(WRITE_SOONG_VARIABLES),true)
$(call json_start)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79d5f8c..12b29f4 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -1,45 +1,42 @@
####################################
# ART boot image installation
-# Input variable:
+# Input variables:
# my_boot_image_name: the boot image to install
+# my_boot_image_arch: the architecture to install (e.g. TARGET_ARCH, not expanded)
+# my_boot_image_out: the install directory (e.g. $(PRODUCT_OUT))
+# my_boot_image_syms: the symbols director (e.g. $(TARGET_OUT_UNSTRIPPED))
+# my_boot_image_root: make variable used to store installed image path
#
####################################
-# Install primary arch vdex files into a shared location, and then symlink them to both the primary
-# and secondary arch directories.
-my_vdex_copy_pairs := $(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_ARCH))
-my_installed := $(foreach v,$(my_vdex_copy_pairs),$(PRODUCT_OUT)$(call word-colon,2,$(v)))
+# Install $(1) to $(2) so that it is shared between architectures.
+define copy-vdex-file
+my_vdex_shared := $$(dir $$(patsubst %/,%,$$(dir $(2))))$$(notdir $(2)) # Remove the arch dir.
+ifneq ($(my_boot_image_arch),$(filter $(my_boot_image_arch), TARGET_2ND_ARCH HOST_2ND_ARCH))
+$$(my_vdex_shared): $(1) # Copy $(1) to directory one level up (i.e. with the arch dir removed).
+ @echo "Install: $$@"
+ $$(copy-file-to-target)
+endif
+$(2): $$(my_vdex_shared) # Create symlink at $(2) which points to the actual physical copy.
+ @echo "Symlink: $$@"
+ mkdir -p $$(dir $$@)
+ ln -sfn ../$$(notdir $$@) $$@
+my_vdex_shared :=
+endef
+
+# Same as 'copy-many-files' but it uses the vdex-specific helper above.
+define copy-vdex-files
+$(foreach v,$(1),$(eval $(call copy-vdex-file, $(call word-colon,1,$(v)), $(2)$(call word-colon,2,$(v)))))
+$(foreach v,$(1),$(2)$(call word-colon,2,$(v)))
+endef
+
+# Install the boot images compiled by Soong.
+# The first file is saved in $(my_boot_image_root) and the rest are added as it's dependencies.
+my_suffix := BUILT_INSTALLED_$(my_boot_image_name)_$($(my_boot_image_arch))
+my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_$(my_suffix)),$(my_boot_image_out))
+my_installed += $(call copy-many-files,$(DEXPREOPT_IMAGE_UNSTRIPPED_$(my_suffix)),$(my_boot_image_syms))
+my_installed += $(call copy-vdex-files,$(DEXPREOPT_IMAGE_VDEX_$(my_suffix)),$(my_boot_image_out))
+$(my_boot_image_root) += $(firstword $(my_installed))
$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-
-my_built_vdex_dir := $(dir $(call word-colon,1,$(firstword $(my_vdex_copy_pairs))))
-my_installed_vdex_dir := $(PRODUCT_OUT)$(dir $(call word-colon,2,$(firstword $(my_vdex_copy_pairs))))
-
-$(my_installed): $(my_installed_vdex_dir)% : $(my_built_vdex_dir)%
- @echo "Install: $@"
- @rm -f $@
- $(copy-file-to-target)
- mkdir -p $(dir $@)/$(TARGET_ARCH)
- ln -sfn ../$(notdir $@) $(dir $@)/$(TARGET_ARCH)
-ifdef TARGET_2ND_ARCH
- mkdir -p $(dir $@)/$(TARGET_2ND_ARCH)
- ln -sfn ../$(notdir $@) $(dir $@)/$(TARGET_2ND_ARCH)
-endif
-
-my_dexpreopt_image_extra_deps := $(firstword $(my_installed))
-
-my_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-
-ifdef TARGET_2ND_ARCH
- my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
- include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-endif
-
-my_2nd_arch_prefix :=
-
-
-my_vdex_copy_pairs :=
my_installed :=
-my_built_vdex_dir :=
-my_installed_vdex_dir :=
-my_dexpreopt_image_extra_deps :=
+my_suffix :=
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
deleted file mode 100644
index 34b8526..0000000
--- a/core/dex_preopt_libart_boot.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-# Rules to install a boot image built by dexpreopt_bootjars.go
-# Input variables:
-# my_boot_image_name: the boot image to install
-# my_2nd_arch_prefix: indicates if this is to build for the 2nd arch.
-# my_dexpreopt_image_extra_deps: extra dependencies to add on the installed boot.art
-
-# Install the boot images compiled by Soong
-# The first file (generally boot.art) is saved as DEFAULT_DEX_PREOPT_INSTALLED_IMAGE,
-# and the rest are added as dependencies of the first.
-
-my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_$(my_2nd_arch_prefix)ARCH)),$(PRODUCT_OUT))
-$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-$(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE += $(firstword $(my_installed))
-
-# Install the unstripped boot images compiled by Soong into the symbols directory
-# The first file (generally boot.art) made a dependency of DEFAULT_DEX_PREOPT_INSTALLED_IMAGE,
-# and the rest are added as dependencies of the first.
-my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_$(my_2nd_arch_prefix)ARCH)),$(TARGET_OUT_UNSTRIPPED))
-$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(firstword $(my_installed))
-
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(my_dexpreopt_image_extra_deps)
-
-my_installed :=
-my_built_installed :=
diff --git a/core/envsetup.mk b/core/envsetup.mk
index d35cb7e..ac3d5cf 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -101,7 +101,7 @@
# ---------------------------------------------------------------
# Set up configuration for host machine. We don't do cross-
-# compiles except for arm/mips, so the HOST is whatever we are
+# compiles except for arm, so the HOST is whatever we are
# running on
# HOST_OS
@@ -228,6 +228,7 @@
TARGET_COPY_OUT_OEM := oem
TARGET_COPY_OUT_RAMDISK := ramdisk
TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk
+TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK := vendor_debug_ramdisk
TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk
TARGET_COPY_OUT_ROOT := root
TARGET_COPY_OUT_RECOVERY := recovery
@@ -258,8 +259,6 @@
# Jars present in the ART apex. These should match exactly the list of
# Java libraries in the ART apex build rule.
ART_APEX_JARS := core-oj core-libart core-icu4j okhttp bouncycastle apache-xml
-TARGET_CORE_JARS := $(ART_APEX_JARS) conscrypt
-HOST_CORE_JARS := $(addsuffix -hostdex,$(TARGET_CORE_JARS))
#################################################################
# Read the product specs so we can get TARGET_DEVICE and other
@@ -818,6 +817,7 @@
TARGET_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_RAMDISK)
TARGET_RAMDISK_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
TARGET_DEBUG_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DEBUG_RAMDISK)
+TARGET_VENDOR_DEBUG_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK)
TARGET_TEST_HARNESS_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_TEST_HARNESS_RAMDISK)
TARGET_VENDOR_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR_RAMDISK)
diff --git a/core/goma.mk b/core/goma.mk
index c265259..2b51d8b 100644
--- a/core/goma.mk
+++ b/core/goma.mk
@@ -27,7 +27,8 @@
# use both ccache and gomacc.
CC_WRAPPER := $(strip $(CC_WRAPPER) $(GOMA_CC))
CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(GOMA_CC))
- JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
+ # b/143658984: goma can't handle the --system argument to javac
+ #JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
goma_dir :=
endif
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 882fe3a..5021510 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -79,7 +79,8 @@
$(java_source_list_file): $(java_sources_deps)
$(write-java-source-list)
-$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+# TODO(b/143658984): goma can't handle the --system argument to javac.
+#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index beaea2a..8998d52 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -70,7 +70,8 @@
$(java_source_list_file): $(java_sources_deps)
$(write-java-source-list)
-$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+# TODO(b/143658984): goma can't handle the --system argument to javac.
+#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
diff --git a/core/host_shared_library.mk b/core/host_shared_library.mk
index c22af97..fbe6442 100644
--- a/core/host_shared_library.mk
+++ b/core/host_shared_library.mk
@@ -38,6 +38,8 @@
## Copy headers to the install tree
###########################################################
ifdef LOCAL_COPY_HEADERS
-$(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers)
+$(if $(filter true,$(BUILD_BROKEN_USES_BUILD_COPY_HEADERS)),\
+ $(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers),\
+ $(call pretty-error,LOCAL_COPY_HEADERS is obsolete. See $(CHANGES_URL)#copy_headers))
include $(BUILD_SYSTEM)/copy_headers.mk
endif
diff --git a/core/host_static_library.mk b/core/host_static_library.mk
index 3dbd144..23d809c 100644
--- a/core/host_static_library.mk
+++ b/core/host_static_library.mk
@@ -38,6 +38,8 @@
## Copy headers to the install tree
###########################################################
ifdef LOCAL_COPY_HEADERS
-$(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers)
+$(if $(filter true,$(BUILD_BROKEN_USES_BUILD_COPY_HEADERS)),\
+ $(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers),\
+ $(call pretty-error,LOCAL_COPY_HEADERS is obsolete. See $(CHANGES_URL)#copy_headers))
include $(BUILD_SYSTEM)/copy_headers.mk
endif
diff --git a/core/java.mk b/core/java.mk
index a041321..44e005d 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -274,7 +274,8 @@
endif # TURBINE_ENABLED != false
-$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+# TODO(b/143658984): goma can't handle the --system argument to javac.
+#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
diff --git a/core/java_renderscript.mk b/core/java_renderscript.mk
index 672863b..bfcf59e 100644
--- a/core/java_renderscript.mk
+++ b/core/java_renderscript.mk
@@ -129,7 +129,7 @@
endif
my_arch := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
-ifneq (,$(filter arm64 mips64 x86_64,$(my_arch)))
+ifneq (,$(filter arm64 x86_64,$(my_arch)))
my_min_sdk_version := 21
else
my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
diff --git a/core/line_coverage.mk b/core/line_coverage.mk
new file mode 100644
index 0000000..a32eea6
--- /dev/null
+++ b/core/line_coverage.mk
@@ -0,0 +1,93 @@
+# -----------------------------------------------------------------
+# Make target for line coverage. This target generates a zip file
+# called `line_coverage_profiles.zip` that contains a large set of
+# zip files one for each fuzz target/critical component. Each zip
+# file contains a set of profile files (*.gcno) that we will use
+# to generate line coverage reports. Furthermore, target compiles
+# all fuzz targets with line coverage instrumentation enabled and
+# packs them into another zip file called `line_coverage_profiles.zip`.
+#
+# To run the make target set the coverage related envvars first:
+# NATIVE_LINE_COVERAGE=true NATIVE_COVERAGE=true \
+# COVERAGE_PATHS=* make haiku-line-coverage
+# -----------------------------------------------------------------
+
+# TODO(b/148306195): Due this issue some fuzz targets cannot be built with
+# line coverage instrumentation. For now we just blacklist them.
+blacklisted_fuzz_targets := libneuralnetworks_fuzzer
+
+fuzz_targets := $(ALL_FUZZ_TARGETS)
+fuzz_targets := $(filter-out $(blacklisted_fuzz_targets),$(fuzz_targets))
+
+
+# Android components that considered critical.
+# Please note that adding/Removing critical components is very rare.
+critical_components_static := \
+ lib-bt-packets \
+ libbt-stack \
+ libffi \
+ libhevcdec \
+ libhevcenc \
+ libmpeg2dec \
+ libosi \
+ libpdx \
+ libselinux \
+ libvold \
+ libyuv
+
+critical_components_shared := \
+ libaudioprocessing \
+ libbinder \
+ libbluetooth_gd \
+ libbrillo \
+ libcameraservice \
+ libcurl \
+ libhardware \
+ libinputflinger \
+ libopus \
+ libstagefright \
+ libunwind \
+ libvixl
+
+# Use the intermediates directory to avoid installing libraries to the device.
+intermediates := $(call intermediates-dir-for,PACKAGING,haiku-line-coverage)
+
+
+# We want the profile files for all fuzz targets + critical components.
+line_coverage_profiles := $(intermediates)/line_coverage_profiles.zip
+
+critical_components_static_inputs := $(foreach lib,$(critical_components_static), \
+ $(call intermediates-dir-for,STATIC_LIBRARIES,$(lib))/$(lib).a)
+
+critical_components_shared_inputs := $(foreach lib,$(critical_components_shared), \
+ $(call intermediates-dir-for,SHARED_LIBRARIES,$(lib))/$(lib).so)
+
+fuzz_target_inputs := $(foreach fuzz,$(fuzz_targets), \
+ $(call intermediates-dir-for,EXECUTABLES,$(fuzz))/$(fuzz))
+
+# When line coverage is enabled (NATIVE_LINE_COVERAGE is set), make creates
+# a "coverage" directory and stores all profile (*.gcno) files in inside.
+# We need everything that is stored inside this directory.
+$(line_coverage_profiles): $(fuzz_target_inputs)
+$(line_coverage_profiles): $(critical_components_static_inputs)
+$(line_coverage_profiles): $(critical_components_shared_inputs)
+$(line_coverage_profiles): $(SOONG_ZIP)
+ $(SOONG_ZIP) -o $@ -D $(PRODUCT_OUT)/coverage
+
+
+# Zip all fuzz targets compiled with line coverage.
+line_coverage_fuzz_targets := $(intermediates)/line_coverage_fuzz_targets.zip
+
+$(line_coverage_fuzz_targets): $(fuzz_target_inputs)
+$(line_coverage_fuzz_targets): $(SOONG_ZIP)
+ $(SOONG_ZIP) -o $@ -j $(addprefix -f ,$(fuzz_target_inputs))
+
+
+.PHONY: haiku-line-coverage
+haiku-line-coverage: $(line_coverage_profiles) $(line_coverage_fuzz_targets)
+$(call dist-for-goals, haiku-line-coverage, \
+ $(line_coverage_profiles):line_coverage_profiles.zip \
+ $(line_coverage_fuzz_targets):line_coverage_fuzz_targets.zip)
+
+line_coverage_profiles :=
+line_coverage_fuzz_targets :=
diff --git a/core/main.mk b/core/main.mk
index ec1effd..277ef7d 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -38,11 +38,13 @@
# Write the build number to a file so it can be read back in
# without changing the command line every time. Avoids rebuilds
# when using ninja.
-$(shell mkdir -p $(OUT_DIR) && \
- echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
-BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+$(shell mkdir -p $(SOONG_OUT_DIR) && \
+ echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FILE := $(SOONG_OUT_DIR)/build_number.txt
.KATI_READONLY := BUILD_NUMBER_FILE
$(KATI_obsolete_var BUILD_NUMBER,See https://android.googlesource.com/platform/build/+/master/Changes.md#BUILD_NUMBER)
+$(BUILD_NUMBER_FILE):
+ touch $@
DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
.KATI_READONLY := DATE_FROM_FILE
@@ -220,9 +222,6 @@
ADDITIONAL_BUILD_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
endif
-# Define ro.sanitize.<name> properties for all global sanitizers.
-ADDITIONAL_BUILD_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
-
# Sets the default value of ro.postinstall.fstab.prefix to /system.
# Device board config should override the value to /product when needed by:
#
@@ -1031,6 +1030,7 @@
define auto-included-modules
$(if $(BOARD_VNDK_VERSION),vndk_package) \
$(if $(DEVICE_MANIFEST_FILE),vendor_manifest.xml) \
+ $(if $(DEVICE_MANIFEST_SKUS),$(foreach sku, $(DEVICE_MANIFEST_SKUS),vendor_manifest_$(sku).xml)) \
$(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \
$(if $(ODM_MANIFEST_SKUS),$(foreach sku, $(ODM_MANIFEST_SKUS),odm_manifest_$(sku).xml)) \
@@ -1252,6 +1252,7 @@
$(CUSTOM_MODULES) \
)
+ifdef FULL_BUILD
#
# Used by the cleanup logic in soong_ui to remove files that should no longer
# be installed.
@@ -1272,6 +1273,7 @@
$(test_files)))))
test_files :=
+endif
# Don't include any GNU General Public License shared objects or static
@@ -1378,6 +1380,9 @@
.PHONY: ramdisk_test_harness
ramdisk_test_harness: $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+.PHONY: vendor_ramdisk_debug
+vendor_ramdisk_debug: $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+
.PHONY: userdataimage
userdataimage: $(INSTALLED_USERDATAIMAGE_TARGET)
@@ -1397,6 +1402,9 @@
.PHONY: vendorbootimage
vendorbootimage: $(INSTALLED_VENDOR_BOOTIMAGE_TARGET)
+.PHONY: vendorbootimage_debug
+vendorbootimage_debug: $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET)
+
.PHONY: productimage
productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
@@ -1443,6 +1451,8 @@
$(INSTALLED_BPTIMAGE_TARGET) \
$(INSTALLED_VENDORIMAGE_TARGET) \
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
$(INSTALLED_ODMIMAGE_TARGET) \
$(INSTALLED_SUPERIMAGE_EMPTY_TARGET) \
$(INSTALLED_PRODUCTIMAGE_TARGET) \
@@ -1463,6 +1473,8 @@
$(INSTALLED_FILES_JSON_RAMDISK) \
$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
$(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_FILES_FILE_ROOT) \
$(INSTALLED_FILES_JSON_ROOT) \
$(INSTALLED_FILES_FILE_RECOVERY) \
@@ -1586,8 +1598,12 @@
$(INSTALLED_FILES_JSON_RAMDISK) \
$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
$(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
)
$(call dist-for-goals, bootimage_test_harness, \
$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index b1f4b03..c2d2a5b 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -40,9 +40,7 @@
sdk \
sdk_addon \
sdk_repo \
- snod \
stnod \
- systemimage-nodeps \
target-files-package \
test-art% \
user \
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 9df1c11..77fb8d4 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -6,7 +6,7 @@
ifneq ($(LOCAL_NOTICE_FILE),)
notice_file:=$(strip $(LOCAL_NOTICE_FILE))
else
-notice_file:=$(strip $(wildcard $(LOCAL_PATH)/NOTICE))
+notice_file:=$(strip $(wildcard $(LOCAL_PATH)/LICENSE $(LOCAL_PATH)/LICENCE $(LOCAL_PATH)/NOTICE))
endif
ifeq ($(LOCAL_MODULE_CLASS),GYP)
@@ -101,7 +101,7 @@
$(installed_notice_file): $(notice_file)
@echo Notice file: $< -- $@
$(hide) mkdir -p $(dir $@)
- $(hide) cat $< > $@
+ $(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $^ > $@
ifdef LOCAL_INSTALLED_MODULE
# Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 7bbaeb6..e680df7 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -100,19 +100,19 @@
# Determine whether auto-RRO is enabled for this package.
enforce_rro_enabled :=
ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
- # * means all system APKs, so enable conditionally based on module path.
+ # * means all system and system_ext APKs, so enable conditionally based on module path.
+ # Note that modules in PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS are excluded even if it is '*'
# Note that base_rules.mk has not yet been included, so it's likely that only
# one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
ifeq (,$(LOCAL_MODULE_PATH))
- non_system_module := $(filter true,\
+ non_rro_target_module := $(filter true,\
$(LOCAL_ODM_MODULE) \
$(LOCAL_OEM_MODULE) \
$(LOCAL_PRODUCT_MODULE) \
- $(LOCAL_SYSTEM_EXT_MODULE) \
$(LOCAL_PROPRIETARY_MODULE) \
$(LOCAL_VENDOR_MODULE))
- enforce_rro_enabled := $(if $(non_system_module),,true)
+ enforce_rro_enabled := $(if $(non_rro_target_module),,true)
else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
enforce_rro_enabled := true
endif
@@ -120,6 +120,12 @@
enforce_rro_enabled := true
endif
+# TODO(b/150820813) Some modules depend on static overlay, remove this after eliminating the dependency.
+ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS)))
+ enforce_rro_enabled :=
+endif
+
+
product_package_overlays := $(strip \
$(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
$(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
diff --git a/core/product.mk b/core/product.mk
index 55cbc5e..364f0d8 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -193,6 +193,9 @@
# Package list to apply enforcing RRO.
_product_list_vars += PRODUCT_ENFORCE_RRO_TARGETS
+# Packages to skip auto-generating RROs for when PRODUCT_ENFORCE_RRO_TARGETS is set to *.
+_product_list_vars += PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
+
_product_list_vars += PRODUCT_SDK_ATREE_FILES
_product_list_vars += PRODUCT_SDK_ADDON_NAME
_product_list_vars += PRODUCT_SDK_ADDON_COPY_FILES
@@ -350,8 +353,6 @@
# system.img), so devices need to install the package in a system-only OTA manner.
_product_single_value_vars += PRODUCT_BUILD_GENERIC_OTA_PACKAGE
-# Whether any paths are excluded from being set XOM when ENABLE_XOM=true
-_product_list_vars += PRODUCT_XOM_EXCLUDE_PATHS
_product_list_vars += PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES
_product_list_vars += PRODUCT_PACKAGE_NAME_OVERRIDES
_product_list_vars += PRODUCT_CERTIFICATE_OVERRIDES
diff --git a/core/product_config.mk b/core/product_config.mk
index eba72e4..c4361d0 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -265,9 +265,9 @@
endif
$(foreach pair,$(PRODUCT_UPDATABLE_BOOT_JARS), \
- $(if $(findstring $(call word-colon,2,$(pair)),$(PRODUCT_BOOT_JARS)),, \
- $(error Every jar in PRODUCT_UPDATABLE_BOOT_JARS must also be in PRODUCT_BOOT_JARS, \
- $(call word-colon,2,$(pair)) is not) \
+ $(if $(findstring $(call word-colon,2,$(pair)),$(PRODUCT_BOOT_JARS)), \
+ $(error A jar in PRODUCT_UPDATABLE_BOOT_JARS must not be in PRODUCT_BOOT_JARS, \
+ but $(call word-colon,2,$(pair)) is) \
) \
)
diff --git a/core/shared_library.mk b/core/shared_library.mk
index ca17151..29d8276 100644
--- a/core/shared_library.mk
+++ b/core/shared_library.mk
@@ -57,6 +57,8 @@
## Copy headers to the install tree
###########################################################
ifdef LOCAL_COPY_HEADERS
-$(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers)
+$(if $(filter true,$(BUILD_BROKEN_USES_BUILD_COPY_HEADERS)),\
+ $(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers),\
+ $(call pretty-error,LOCAL_COPY_HEADERS is obsolete. See $(CHANGES_URL)#copy_headers))
include $(BUILD_SYSTEM)/copy_headers.mk
endif
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index 190a7ed..6317b53 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -112,7 +112,9 @@
my_check_same_vndk_variants :=
ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
- my_check_same_vndk_variants := true
+ ifneq ($(CLANG_COVERAGE),true)
+ my_check_same_vndk_variants := true
+ endif
endif
endif
@@ -128,10 +130,11 @@
$(same_vndk_variants_stamp): PRIVATE_TOOLS_PREFIX := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)TOOLS_PREFIX)
$(same_vndk_variants_stamp): $(my_core_shared_lib) $(LOCAL_PREBUILT_MODULE_FILE)
- $(call verify-vndk-libs-identical,\
- $(PRIVATE_CORE_VARIANT),\
- $(PRIVATE_VENDOR_VARIANT),\
- $(PRIVATE_TOOLS_PREFIX))
+ $(call verify-vndk-libs-identical,\
+ $(PRIVATE_CORE_VARIANT),\
+ $(PRIVATE_VENDOR_VARIANT),\
+ $(PRIVATE_TOOLS_PREFIX))
+ touch $@
$(LOCAL_BUILT_MODULE): $(same_vndk_variants_stamp)
endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index e247a91..631846a 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -25,7 +25,7 @@
$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT))
$(call add_json_str, BuildId, $(BUILD_ID))
-$(call add_json_str, BuildNumberFromFile, $(BUILD_NUMBER_FROM_FILE))
+$(call add_json_str, BuildNumberFile, build_number.txt)
$(call add_json_str, Platform_version_name, $(PLATFORM_VERSION))
$(call add_json_val, Platform_sdk_version, $(PLATFORM_SDK_VERSION))
@@ -81,6 +81,7 @@
$(call add_json_list, DeviceResourceOverlays, $(DEVICE_PACKAGE_OVERLAYS))
$(call add_json_list, ProductResourceOverlays, $(PRODUCT_PACKAGE_OVERLAYS))
$(call add_json_list, EnforceRROTargets, $(PRODUCT_ENFORCE_RRO_TARGETS))
+$(call add_json_list, EnforceRROExemptedTargets, $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
$(call add_json_list, EnforceRROExcludedOverlays, $(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
$(call add_json_str, AAPTCharacteristics, $(TARGET_AAPT_CHARACTERISTICS))
@@ -101,8 +102,6 @@
$(call add_json_bool, EnableCFI, $(call invert_bool,$(filter false,$(ENABLE_CFI))))
$(call add_json_list, CFIExcludePaths, $(CFI_EXCLUDE_PATHS) $(PRODUCT_CFI_EXCLUDE_PATHS))
$(call add_json_list, CFIIncludePaths, $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
-$(call add_json_bool, EnableXOM, $(call invert_bool,$(filter false,$(ENABLE_XOM))))
-$(call add_json_list, XOMExcludePaths, $(XOM_EXCLUDE_PATHS) $(PRODUCT_XOM_EXCLUDE_PATHS))
$(call add_json_list, IntegerOverflowExcludePaths, $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
$(call add_json_bool, Experimental_mte, $(filter true,$(TARGET_EXPERIMENTAL_MTE)))
@@ -138,6 +137,7 @@
$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
$(call add_json_list, BootJars, $(PRODUCT_BOOT_JARS))
+$(call add_json_list, UpdatableBootJars, $(PRODUCT_UPDATABLE_BOOT_JARS))
$(call add_json_bool, VndkUseCoreVariant, $(TARGET_VNDK_USE_CORE_VARIANT))
$(call add_json_bool, VndkSnapshotBuildArtifacts, $(VNDK_SNAPSHOT_BUILD_ARTIFACTS))
diff --git a/core/static_library.mk b/core/static_library.mk
index 78908cf..a450092 100644
--- a/core/static_library.mk
+++ b/core/static_library.mk
@@ -42,6 +42,8 @@
## Copy headers to the install tree
###########################################################
ifdef LOCAL_COPY_HEADERS
-$(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers)
+$(if $(filter true,$(BUILD_BROKEN_USES_BUILD_COPY_HEADERS)),\
+ $(call pretty-warning,LOCAL_COPY_HEADERS is deprecated. See $(CHANGES_URL)#copy_headers),\
+ $(call pretty-error,LOCAL_COPY_HEADERS is obsolete. See $(CHANGES_URL)#copy_headers))
include $(BUILD_SYSTEM)/copy_headers.mk
endif
diff --git a/core/tasks/boot_jars_package_check.mk b/core/tasks/boot_jars_package_check.mk
index ceaff54..05243e5 100644
--- a/core/tasks/boot_jars_package_check.mk
+++ b/core/tasks/boot_jars_package_check.mk
@@ -32,11 +32,7 @@
#TODO(jiyong) merge art_boot_jars into updatable_boot_jars
art_boot_jars := $(addsuffix .com.android.art.release,$(filter $(ART_APEX_JARS),$(PRODUCT_BOOT_JARS)))
-platform_boot_jars := $(filter-out \
- $(ART_APEX_JARS)\
- $(foreach pair,$(PRODUCT_UPDATABLE_BOOT_JARS),$(call word-colon,2,$(pair))),\
- $(PRODUCT_BOOT_JARS)\
-)
+platform_boot_jars := $(filter-out $(ART_APEX_JARS),$(PRODUCT_BOOT_JARS))
built_boot_jars := $(foreach j, $(updatable_boot_jars) $(art_boot_jars) $(platform_boot_jars), \
$(call intermediates-dir-for, JAVA_LIBRARIES, $(j),,COMMON)/classes.jar)
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 7bcc915..53ebddc 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -14,10 +14,12 @@
.PHONY: general-tests
+# TODO(b/149249068): Remove vts-tradefed.jar after all VTS tests are converted
general_tests_tools := \
$(HOST_OUT_JAVA_LIBRARIES)/cts-tradefed.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
$(HOST_OUT_JAVA_LIBRARIES)/vts-core-tradefed.jar \
+ $(HOST_OUT_JAVA_LIBRARIES)/vts-tradefed.jar
intermediates_dir := $(call intermediates-dir-for,PACKAGING,general-tests)
general_tests_zip := $(PRODUCT_OUT)/general-tests.zip
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index 95b729a..f67d722 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -12,45 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-.PHONY: vts-core
-
-vts-core-zip := $(PRODUCT_OUT)/vts-core-tests.zip
-# Create an artifact to include a list of test config files in vts-core.
-vts-core-list-zip := $(PRODUCT_OUT)/vts-core_list.zip
-# Create an artifact to include all test config files in vts-core.
-vts-core-configs-zip := $(PRODUCT_OUT)/vts-core_configs.zip
-my_host_shared_lib_for_vts_core := $(call copy-many-files,$(COMPATIBILITY.vts-core.HOST_SHARED_LIBRARY.FILES))
-$(vts-core-zip) : .KATI_IMPLICIT_OUTPUTS := $(vts-core-list-zip) $(vts-core-configs-zip)
-$(vts-core-zip) : PRIVATE_vts_core_list := $(PRODUCT_OUT)/vts-core_list
-$(vts-core-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_vts_core)
-$(vts-core-zip) : $(COMPATIBILITY.vts-core.FILES) $(my_host_shared_lib_for_vts_core) $(SOONG_ZIP)
- echo $(sort $(COMPATIBILITY.vts-core.FILES)) | tr " " "\n" > $@.list
- grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
- grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
- $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
- echo $$shared_lib >> $@-host.list; \
- done
- grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
- grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
- $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
- $(hide) $(SOONG_ZIP) -d -o $(vts-core-configs-zip) \
- -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
- -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
- rm -f $(PRIVATE_vts_core_list)
- $(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_vts_core_list)
- $(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_vts_core_list)
- $(hide) $(SOONG_ZIP) -d -o $(vts-core-list-zip) -C $(dir $@) -f $(PRIVATE_vts_core_list)
- rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
- $(PRIVATE_vts_core_list)
-
-vts-core: $(vts-core-zip)
-
test_suite_name := vts-core
test_suite_tradefed := vts-core-tradefed
test_suite_readme := test/vts/tools/vts-core-tradefed/README
-include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
-vts-core: $(compatibility_zip)
-$(call dist-for-goals, vts-core, $(vts-core-zip) $(vts-core-list-zip) $(vts-core-configs-zip) $(compatibility_zip))
+# TODO(b/149249068): Clean up after all VTS tests are converted.
+vts_test_artifact_paths :=
+# Some repo may not include vts project.
+-include test/vts/tools/build/tasks/framework/vts_for_core_suite.mk
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+.PHONY: vts-core
+$(compatibility_zip): $(vts_test_artifact_paths)
+vts-core: $(compatibility_zip)
+$(call dist-for-goals, vts-core, $(compatibility_zip))
tests: vts-core
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 57268ce..ab7783f 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -250,7 +250,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2020-02-05
+ PLATFORM_SECURITY_PATCH := 2020-03-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/envsetup.sh b/envsetup.sh
index 793f4b6..791a43d 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -8,7 +8,7 @@
Selects <product_name> as the product to build, and <build_variant> as the variant to
build, and stores those selections in the environment to be read by subsequent
invocations of 'm' etc.
-- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
+- tapas: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
- croot: Changes directory to the top of the tree, or a subdirectory thereof.
- m: Makes from the top of the tree.
- mm: Builds and installs all of the modules in the current directory, and their
@@ -218,8 +218,6 @@
arm64) toolchaindir=aarch64/aarch64-linux-android-$targetgccversion/bin;
toolchaindir2=arm/arm-linux-androideabi-$targetgccversion2/bin
;;
- mips|mips64) toolchaindir=mips/mips64el-linux-android-$targetgccversion/bin
- ;;
*)
echo "Can't find toolchain for unknown architecture: $ARCH"
toolchaindir=xxxxxxxxx
@@ -599,7 +597,12 @@
{
local answer
- if [ "$1" ] ; then
+ if [[ $# -gt 1 ]]; then
+ echo "usage: lunch [target]" >&2
+ return 1
+ fi
+
+ if [ "$1" ]; then
answer=$1
else
print_lunch_menu
@@ -696,10 +699,10 @@
function tapas()
{
local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
- local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|arm64|x86_64|mips64)$' | xargs)"
+ local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|arm64|x86_64)$' | xargs)"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
- local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+ local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|arm64|x86_64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
if [ "$showHelp" != "" ]; then
$(gettop)/build/make/tapasHelp.sh
@@ -722,10 +725,8 @@
local product=aosp_arm
case $arch in
x86) product=aosp_x86;;
- mips) product=aosp_mips;;
arm64) product=aosp_arm64;;
x86_64) product=aosp_x86_64;;
- mips64) product=aosp_mips64;;
esac
if [ -z "$variant" ]; then
variant=eng
@@ -949,7 +950,7 @@
Darwin)
function sgrep()
{
- find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts)' \
+ find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts|proto)' \
-exec grep --color -n "$@" {} +
}
@@ -957,7 +958,7 @@
*)
function sgrep()
{
- find . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\)' \
+ find . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
-exec grep --color -n "$@" {} +
}
;;
diff --git a/rbesetup.sh b/rbesetup.sh
new file mode 100644
index 0000000..7e9b2ea
--- /dev/null
+++ b/rbesetup.sh
@@ -0,0 +1,25 @@
+source build/envsetup.sh
+
+# This function prefixes the given command with appropriate variables needed
+# for the build to be executed with RBE.
+function use_rbe() {
+ local RBE_LOG_DIR="/tmp"
+ local RBE_BINARIES_DIR="prebuilts/remoteexecution-client/latest/"
+ local DOCKER_IMAGE="gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62"
+
+ # Do not set an invocation-ID and let reproxy auto-generate one.
+ USE_RBE="true" \
+ FLAG_server_address="unix:///tmp/reproxy_$RANDOM.sock" \
+ FLAG_exec_root="$(gettop)" \
+ FLAG_platform="container-image=docker://${DOCKER_IMAGE}" \
+ RBE_use_application_default_credentials="true" \
+ RBE_log_dir="${RBE_LOG_DIR}" \
+ RBE_reproxy_wait_seconds="20" \
+ RBE_output_dir="${RBE_LOG_DIR}" \
+ RBE_log_path="text://${RBE_LOG_DIR}/reproxy_log.txt" \
+ RBE_CXX_EXEC_STRATEGY="remote_local_fallback" \
+ RBE_cpp_dependency_scanner_plugin="${RBE_BINARIES_DIR}/dependency_scanner_go_plugin.so" \
+ RBE_DIR=${RBE_BINARIES_DIR} \
+ RBE_re_proxy="${RBE_BINARIES_DIR}/reproxy" \
+ $@
+}
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 38b3e34..0f46130 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
cd ../..
TOP="${PWD}"
-message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
tapas selects individual apps to be built by the Android build system. Unlike
"lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 2c7d2da..9edc85c 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -52,6 +52,43 @@
include $(BUILD_PREBUILT)
endif
+# DEVICE_MANIFEST_SKUS: a list of SKUS where DEVICE_MANIFEST_<sku>_FILES is defined.
+ifdef DEVICE_MANIFEST_SKUS
+
+# Install /vendor/etc/vintf/manifest_$(sku).xml
+# $(1): sku
+define _add_device_sku_manifest
+my_fragment_files_var := DEVICE_MANIFEST_$$(call to-upper,$(1))_FILES
+ifndef $$(my_fragment_files_var)
+$$(error $(1) is in DEVICE_MANIFEST_SKUS but $$(my_fragment_files_var) is not defined)
+endif
+my_fragment_files := $$($$(my_fragment_files_var))
+include $$(CLEAR_VARS)
+LOCAL_MODULE := vendor_manifest_$(1).xml
+LOCAL_MODULE_STEM := manifest_$(1).xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/vintf
+
+GEN := $$(local-generated-sources-dir)/manifest_$(1).xml
+$$(GEN): PRIVATE_SRC_FILES := $$(my_fragment_files)
+$$(GEN): $$(my_fragment_files) $$(HOST_OUT_EXECUTABLES)/assemble_vintf
+ BOARD_SEPOLICY_VERS=$$(BOARD_SEPOLICY_VERS) \
+ PRODUCT_ENFORCE_VINTF_MANIFEST=$$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
+ PRODUCT_SHIPPING_API_LEVEL=$$(PRODUCT_SHIPPING_API_LEVEL) \
+ $$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $$@ \
+ -i $$(call normalize-path-list,$$(PRIVATE_SRC_FILES))
+
+LOCAL_PREBUILT_MODULE_FILE := $$(GEN)
+include $$(BUILD_PREBUILT)
+my_fragment_files_var :=
+my_fragment_files :=
+endef
+
+$(foreach sku, $(DEVICE_MANIFEST_SKUS), $(eval $(call _add_device_sku_manifest,$(sku))))
+_add_device_sku_manifest :=
+
+endif # DEVICE_MANIFEST_SKUS
+
# ODM manifest
ifdef ODM_MANIFEST_FILES
# ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest.
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 20fd189..d9eb7c3 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -27,6 +27,7 @@
android.test.base \
android.test.mock \
android.test.runner \
+ ANGLE \
apexd \
appops \
app_process \
@@ -59,7 +60,6 @@
com.android.resolv \
com.android.neuralnetworks \
com.android.sdkext \
- com.android.telephony \
com.android.tethering \
com.android.tzdata \
ContactsProvider \
@@ -312,26 +312,21 @@
tz_version_host \
tz_version_host_tzdata_apex \
-ifeq ($(TARGET_CORE_JARS),)
-$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
+ifeq ($(ART_APEX_JARS),)
+$(error ART_APEX_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
endif
# The order matters for runtime class lookup performance.
PRODUCT_BOOT_JARS := \
- $(TARGET_CORE_JARS) \
+ $(ART_APEX_JARS) \
framework-minus-apex \
ext \
telephony-common \
voip-common \
ims-common \
- framework-sdkextensions \
- ike \
- updatable-media \
- framework-tethering
PRODUCT_UPDATABLE_BOOT_JARS := \
com.android.conscrypt:conscrypt \
- com.android.ipsec:ike \
com.android.media:updatable-media \
com.android.sdkext:framework-sdkextensions \
com.android.tethering:framework-tethering
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index 6847bfa..b67549a 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -17,4 +17,5 @@
# Base modules and settings for the system_ext partition.
PRODUCT_PACKAGES += \
group_system_ext \
+ system_ext_manifest.xml \
passwd_system_ext \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 469e231..b3368d6 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -67,11 +67,11 @@
selinux_policy_nonsystem \
shell_and_utilities_vendor \
vndservice \
- vndservicemanager \
# Base module when shipping api level is less than or equal to 29
PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
android.hardware.configstore@1.1-service \
+ vndservicemanager \
# VINTF data for vendor image
PRODUCT_PACKAGES += \
diff --git a/target/product/developer_gsi_keys.mk b/target/product/developer_gsi_keys.mk
index 79451ad..a7e3d62 100644
--- a/target/product/developer_gsi_keys.mk
+++ b/target/product/developer_gsi_keys.mk
@@ -27,3 +27,5 @@
#
PRODUCT_PACKAGES += \
q-developer-gsi.avbpubkey \
+ r-developer-gsi.avbpubkey \
+ s-developer-gsi.avbpubkey \
diff --git a/target/product/emulator_system.mk b/target/product/emulator_system.mk
new file mode 100644
index 0000000..4b6987c
--- /dev/null
+++ b/target/product/emulator_system.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This file lists emulator experimental modules added to PRODUCT_PACKAGES,
+# only included by targets sdk_phone_x86/64 and sdk_gphone_x86/64
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST := \
+ system/lib/libemulator_multidisplay_jni.so \
+ system/lib64/libemulator_multidisplay_jni.so \
+ system/priv-app/MultiDisplayProvider/MultiDisplayProvider.apk \
+
+PRODUCT_PACKAGES += MultiDisplayProvider
diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp
new file mode 100644
index 0000000..b7ce86e
--- /dev/null
+++ b/target/product/gsi/Android.bp
@@ -0,0 +1,20 @@
+// Copyright 2020 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+filegroup {
+ name: "vndk_lib_lists",
+ srcs: [
+ "*.txt",
+ ],
+}
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index bd8450b..c491d4a 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -156,10 +156,6 @@
ifneq ($(TARGET_SKIP_CURRENT_VNDK),true)
LOCAL_REQUIRED_MODULES += \
- llndk.libraries.txt \
- vndksp.libraries.txt \
- vndkcore.libraries.txt \
- vndkprivate.libraries.txt \
vndkcorevariant.libraries.txt \
$(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
$(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
@@ -169,24 +165,15 @@
include $(BUILD_PHONY_PACKAGE)
include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_snapshot_package
-_binder32 :=
-ifneq ($(TARGET_USES_64_BIT_BINDER),true)
-ifneq ($(TARGET_IS_64_BIT),true)
-_binder32 := _binder32
+_vndk_versions := $(PRODUCT_EXTRA_VNDK_VERSIONS)
+ifneq ($(BOARD_VNDK_VERSION),current)
+ _vndk_versions += $(BOARD_VNDK_VERSION)
endif
-endif
-# Phony targets are installed for **.libraries.txt files.
-# TODO(b/141450808): remove following VNDK phony targets when **.libraries.txt files are provided by apexes.
-LOCAL_REQUIRED_MODULES := \
- $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH)$(_binder32))
-_binder32 :=
+LOCAL_MODULE := vndk_apex_snapshot_package
+LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(_vndk_versions),com.android.vndk.v$(vndk_ver))
include $(BUILD_PHONY_PACKAGE)
-include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_apex_snapshot_package
-LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver))
-include $(BUILD_PHONY_PACKAGE)
+_vndk_versions :=
endif # BOARD_VNDK_VERSION is set
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 82c2f3c..0c1f788 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -128,10 +128,11 @@
VNDK-core: android.hardware.health@1.0.so
VNDK-core: android.hardware.health@2.0.so
VNDK-core: android.hardware.health@2.1.so
-VNDK-core: android.hardware.identity@1.0.so
+VNDK-core: android.hardware.identity-V1-ndk_platform.so
VNDK-core: android.hardware.input.classifier@1.0.so
VNDK-core: android.hardware.input.common@1.0.so
VNDK-core: android.hardware.ir@1.0.so
+VNDK-core: android.hardware.keymaster-V1-ndk_platform.so
VNDK-core: android.hardware.keymaster@3.0.so
VNDK-core: android.hardware.keymaster@4.0.so
VNDK-core: android.hardware.keymaster@4.1.so
@@ -160,7 +161,6 @@
VNDK-core: android.hardware.radio.config@1.0.so
VNDK-core: android.hardware.radio.config@1.1.so
VNDK-core: android.hardware.radio.config@1.2.so
-VNDK-core: android.hardware.radio.config@1.3.so
VNDK-core: android.hardware.radio.deprecated@1.0.so
VNDK-core: android.hardware.radio@1.0.so
VNDK-core: android.hardware.radio@1.1.so
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
index 36b740a..e03c212 100644
--- a/target/product/handheld_product.mk
+++ b/target/product/handheld_product.mk
@@ -29,16 +29,10 @@
DeskClock \
Gallery2 \
LatinIME \
- Launcher3QuickStep \
Music \
OneTimeInitializer \
- Provision \
QuickSearchBox \
- Settings \
SettingsIntelligence \
- StorageManager \
- SystemUI \
- WallpaperCropper \
frameworks-base-overlays
PRODUCT_PACKAGES_DEBUG += \
diff --git a/target/product/handheld_system_ext.mk b/target/product/handheld_system_ext.mk
index bda4be6..d935fbf 100644
--- a/target/product/handheld_system_ext.mk
+++ b/target/product/handheld_system_ext.mk
@@ -22,3 +22,9 @@
# /system_ext packages
PRODUCT_PACKAGES += \
+ Launcher3QuickStep \
+ Provision \
+ Settings \
+ StorageManager \
+ SystemUI \
+ WallpaperCropper \
diff --git a/target/product/mainline_arm64.mk b/target/product/mainline_arm64.mk
index 52b3222..850c775 100644
--- a/target/product/mainline_arm64.mk
+++ b/target/product/mainline_arm64.mk
@@ -29,7 +29,8 @@
PRODUCT_SHIPPING_API_LEVEL := 29
-PRODUCT_RESTRICT_VENDOR_FILES := all
+# TODO(b/137033385): change this back to "all"
+PRODUCT_RESTRICT_VENDOR_FILES := owner
PRODUCT_NAME := mainline_arm64
PRODUCT_DEVICE := mainline_arm64
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index ccbc907..a8b75e0 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -125,6 +125,9 @@
PRODUCT_ENFORCE_RRO_TARGETS := *
+# TODO(b/150820813) Settings depends on static overlay, remove this after eliminating the dependency.
+PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS := Settings
+
PRODUCT_NAME := mainline_system
PRODUCT_BRAND := generic
diff --git a/target/product/mainline_system_arm64.mk b/target/product/mainline_system_arm64.mk
index 772c687..60035c1 100644
--- a/target/product/mainline_system_arm64.mk
+++ b/target/product/mainline_system_arm64.mk
@@ -36,7 +36,8 @@
PRODUCT_SHIPPING_API_LEVEL := 29
-PRODUCT_RESTRICT_VENDOR_FILES := all
+# TODO(b/137033385): change this back to "all"
+PRODUCT_RESTRICT_VENDOR_FILES := owner
PRODUCT_NAME := mainline_system_arm64
PRODUCT_DEVICE := mainline_arm64
diff --git a/target/product/mainline_system_x86.mk b/target/product/mainline_system_x86.mk
index 05e51a9..a30a1fc 100644
--- a/target/product/mainline_system_x86.mk
+++ b/target/product/mainline_system_x86.mk
@@ -35,7 +35,8 @@
PRODUCT_SHIPPING_API_LEVEL := 29
-PRODUCT_RESTRICT_VENDOR_FILES := all
+# TODO(b/137033385): change this back to "all"
+PRODUCT_RESTRICT_VENDOR_FILES := owner
PRODUCT_NAME := mainline_system_x86
PRODUCT_DEVICE := mainline_x86
diff --git a/target/product/mainline_system_x86_arm.mk b/target/product/mainline_system_x86_arm.mk
index cc11c55..2e01cde 100644
--- a/target/product/mainline_system_x86_arm.mk
+++ b/target/product/mainline_system_x86_arm.mk
@@ -35,7 +35,8 @@
PRODUCT_SHIPPING_API_LEVEL := 29
-PRODUCT_RESTRICT_VENDOR_FILES := all
+# TODO(b/137033385): change this back to "all"
+PRODUCT_RESTRICT_VENDOR_FILES := owner
PRODUCT_NAME := mainline_system_x86_arm
PRODUCT_DEVICE := mainline_x86_arm
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index c2c9762..a83e609 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -36,7 +36,6 @@
make_f2fs \
requestsync \
StatementService \
- vndk_snapshot_package \
PRODUCT_HOST_PACKAGES += \
fsck.f2fs \
@@ -51,15 +50,15 @@
# The order here is the same order they end up on the classpath, so it matters.
PRODUCT_SYSTEM_SERVER_JARS := \
+ com.android.location.provider \
services \
ethernet-service \
wifi-service \
- com.android.location.provider \
# system server jars which are updated via apex modules.
# The values should be of the format <apex name>:<jar name>
PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS := \
- # Ex: com.android.wifi:wifi-service
+ com.android.ipsec:android.net.ipsec.ike \
PRODUCT_COPY_FILES += \
system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b8cb2ff..5184016 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -16,10 +16,6 @@
# Provides a functioning ART environment without Android frameworks
-ifeq ($(TARGET_CORE_JARS),)
-$(error TARGET_CORE_JARS is empty; cannot update PRODUCT_PACKAGES variable)
-endif
-
# Additional mixins to the boot classpath.
PRODUCT_PACKAGES += \
android.test.base \
@@ -33,8 +29,7 @@
# ART APEX module.
# Note that this package includes the minimal boot classpath JARs (listed in
-# TARGET_CORE_JARS), which should no longer be added directly to
-# PRODUCT_PACKAGES.
+# ART_APEX_JARS), which should no longer be added directly to PRODUCT_PACKAGES.
PRODUCT_PACKAGES += com.android.art
PRODUCT_HOST_PACKAGES += com.android.art
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 9df26a9..e8c60b4 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -44,8 +44,6 @@
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_x86.mk)
-
# Define the host tools and libs that are parts of the SDK.
-include sdk/build/product_sdk.mk
-include development/build/product_sdk.mk
diff --git a/target/product/telephony_product.mk b/target/product/telephony_product.mk
index a4c7e31..3ec954f 100644
--- a/target/product/telephony_product.mk
+++ b/target/product/telephony_product.mk
@@ -19,6 +19,4 @@
# /product packages
PRODUCT_PACKAGES += \
- CarrierConfig \
Dialer \
- EmergencyInfo \
diff --git a/target/product/telephony_system_ext.mk b/target/product/telephony_system_ext.mk
index 1b9ee98..f81a607 100644
--- a/target/product/telephony_system_ext.mk
+++ b/target/product/telephony_system_ext.mk
@@ -19,3 +19,5 @@
# /system_ext packages
PRODUCT_PACKAGES += \
+ CarrierConfig \
+ EmergencyInfo \
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 8c69417..1dd5e4a 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -52,6 +52,7 @@
cc_library_headers {
name: "oemaids_headers",
+ vendor_available: true,
generated_headers: ["oemaids_header_gen"],
export_generated_headers: ["oemaids_header_gen"],
}
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 8249915..cc05c64 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -338,7 +338,7 @@
# Use repeatable ext4 FS UUID and hash_seed UUID (based on partition name and
# build fingerprint).
build_info = common.BuildInfo(info_dict)
- uuid_seed = what + "-" + build_info.fingerprint
+ uuid_seed = what + "-" + build_info.GetPartitionFingerprint(what)
image_props["uuid"] = str(uuid.uuid5(uuid.NAMESPACE_URL, uuid_seed))
hash_seed = "hash_seed-" + uuid_seed
image_props["hash_seed"] = str(uuid.uuid5(uuid.NAMESPACE_URL, hash_seed))
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 4fac6f3..9e27d29 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -27,6 +27,8 @@
OPTIONS = common.OPTIONS
+APEX_PAYLOAD_IMAGE = 'apex_payload.img'
+
class ApexInfoError(Exception):
"""An Exception raised during Apex Information command."""
@@ -50,15 +52,11 @@
self.key_passwords = key_passwords
self.codename_to_api_level_map = codename_to_api_level_map
- def ProcessApexFile(self, apk_keys, payload_key, payload_public_key,
- signing_args=None):
+ def ProcessApexFile(self, apk_keys, payload_key):
"""Scans and signs the apk files and repack the apex
Args:
apk_keys: A dict that holds the signing keys for apk files.
- payload_key: The path to the apex payload signing key.
- payload_public_key: The path to the public key corresponding to the
- payload signing key.
Returns:
The repacked apex file containing the signed apk files.
@@ -89,8 +87,7 @@
logger.info('No apk file has been signed in %s', self.apex_path)
return self.apex_path
- return self.RepackApexPayload(payload_dir, payload_key, payload_public_key,
- signing_args)
+ return self.RepackApexPayload(payload_dir, payload_key)
def ExtractApexPayloadAndSignApks(self, apk_entries, apk_keys):
"""Extracts the payload image and signs the containing apk files."""
@@ -118,27 +115,15 @@
has_signed_apk = True
return payload_dir, has_signed_apk
- def RepackApexPayload(self, payload_dir, payload_key, payload_public_key,
- signing_args=None):
+ def RepackApexPayload(self, payload_dir, payload_key):
"""Rebuilds the apex file with the updated payload directory."""
apex_dir = common.MakeTempDir()
# Extract the apex file and reuse its meta files as repack parameters.
common.UnzipToDir(self.apex_path, apex_dir)
-
- android_jar_path = common.OPTIONS.android_jar_path
- if not android_jar_path:
- android_jar_path = os.path.join(os.environ.get('ANDROID_BUILD_TOP', ''),
- 'prebuilts', 'sdk', 'current', 'public',
- 'android.jar')
- logger.warning('android_jar_path not found in options, falling back to'
- ' use %s', android_jar_path)
-
arguments_dict = {
'manifest': os.path.join(apex_dir, 'apex_manifest.pb'),
'build_info': os.path.join(apex_dir, 'apex_build_info.pb'),
- 'android_jar_path': android_jar_path,
'key': payload_key,
- 'pubkey': payload_public_key,
}
for filename in arguments_dict.values():
assert os.path.exists(filename), 'file {} not found'.format(filename)
@@ -151,29 +136,30 @@
elif os.path.isdir(path):
shutil.rmtree(path)
- repacked_apex = common.MakeTempFile(suffix='.apex')
- repack_cmd = ['apexer', '--force', '--include_build_info',
- '--do_not_check_keyname', '--apexer_tool_path',
- os.getenv('PATH')]
+ # TODO(xunchang) the signing process can be improved by using
+ # '--unsigned_payload_only'. But we need to parse the vbmeta earlier for
+ # the signing arguments, e.g. algorithm, salt, etc.
+ payload_img = os.path.join(apex_dir, APEX_PAYLOAD_IMAGE)
+ generate_image_cmd = ['apexer', '--force', '--payload_only',
+ '--do_not_check_keyname', '--apexer_tool_path',
+ os.getenv('PATH')]
for key, val in arguments_dict.items():
- repack_cmd.extend(['--' + key, val])
- # Add quote to the signing_args as we will pass
- # --signing_args "--signing_helper_with_files=%path" to apexer
- if signing_args:
- repack_cmd.extend(['--signing_args', '"{}"'.format(signing_args)])
+ generate_image_cmd.extend(['--' + key, val])
# optional arguments for apex repacking
manifest_json = os.path.join(apex_dir, 'apex_manifest.json')
if os.path.exists(manifest_json):
- repack_cmd.extend(['--manifest_json', manifest_json])
- assets_dir = os.path.join(apex_dir, 'assets')
- if os.path.isdir(assets_dir):
- repack_cmd.extend(['--assets_dir', assets_dir])
- repack_cmd.extend([payload_dir, repacked_apex])
+ generate_image_cmd.extend(['--manifest_json', manifest_json])
+ generate_image_cmd.extend([payload_dir, payload_img])
if OPTIONS.verbose:
- repack_cmd.append('-v')
- common.RunAndCheckOutput(repack_cmd)
+ generate_image_cmd.append('-v')
+ common.RunAndCheckOutput(generate_image_cmd)
- return repacked_apex
+ # Add the payload image back to the apex file.
+ common.ZipDelete(self.apex_path, APEX_PAYLOAD_IMAGE)
+ with zipfile.ZipFile(self.apex_path, 'a') as output_apex:
+ common.ZipWrite(output_apex, payload_img, APEX_PAYLOAD_IMAGE,
+ compress_type=zipfile.ZIP_STORED)
+ return self.apex_path
def SignApexPayload(avbtool, payload_file, payload_key_path, payload_key_name,
@@ -311,16 +297,13 @@
with open(apex_file, 'wb') as apex_fp:
apex_fp.write(apex_data)
- APEX_PAYLOAD_IMAGE = 'apex_payload.img'
APEX_PUBKEY = 'apex_pubkey'
# 1. Extract the apex payload image and sign the containing apk files. Repack
# the apex file after signing.
- payload_public_key = common.ExtractAvbPublicKey(avbtool, payload_key)
apk_signer = ApexApkSigner(apex_file, container_pw,
codename_to_api_level_map)
- apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key,
- payload_public_key, signing_args)
+ apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key)
# 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
# payload_key.
@@ -341,7 +324,7 @@
signing_args)
# 2b. Update the embedded payload public key.
-
+ payload_public_key = common.ExtractAvbPublicKey(avbtool, payload_key)
common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
if APEX_PUBKEY in zip_items:
common.ZipDelete(apex_file, APEX_PUBKEY)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index e424b6f..54bb857 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -287,7 +287,7 @@
build_command.extend(["-U", prop_dict["uuid"]])
if "hash_seed" in prop_dict:
build_command.extend(["-S", prop_dict["hash_seed"]])
- if "ext4_share_dup_blocks" in prop_dict:
+ if prop_dict.get("ext4_share_dup_blocks") == "true":
build_command.append("-c")
if (needs_projid):
build_command.extend(["--inode_size", "512"])
@@ -540,7 +540,6 @@
"verity_disable",
"avb_enable",
"avb_avbtool",
- "avb_salt",
"use_dynamic_partition_size",
)
for p in common_props:
@@ -553,6 +552,7 @@
"avb_add_hashtree_footer_args")
copy_prop("avb_system_key_path", "avb_key_path")
copy_prop("avb_system_algorithm", "avb_algorithm")
+ copy_prop("avb_system_salt", "avb_salt")
copy_prop("fs_type", "fs_type")
# Copy the generic system fs type first, override with specific one if
# available.
@@ -584,6 +584,7 @@
"avb_add_hashtree_footer_args")
copy_prop("avb_system_other_key_path", "avb_key_path")
copy_prop("avb_system_other_algorithm", "avb_algorithm")
+ copy_prop("avb_system_other_salt", "avb_salt")
copy_prop("fs_type", "fs_type")
copy_prop("system_fs_type", "fs_type")
copy_prop("system_other_size", "partition_size")
@@ -619,6 +620,7 @@
"avb_add_hashtree_footer_args")
copy_prop("avb_vendor_key_path", "avb_key_path")
copy_prop("avb_vendor_algorithm", "avb_algorithm")
+ copy_prop("avb_vendor_salt", "avb_salt")
copy_prop("vendor_fs_type", "fs_type")
copy_prop("vendor_size", "partition_size")
if not copy_prop("vendor_journal_size", "journal_size"):
@@ -641,6 +643,7 @@
"avb_add_hashtree_footer_args")
copy_prop("avb_product_key_path", "avb_key_path")
copy_prop("avb_product_algorithm", "avb_algorithm")
+ copy_prop("avb_product_salt", "avb_salt")
copy_prop("product_fs_type", "fs_type")
copy_prop("product_size", "partition_size")
if not copy_prop("product_journal_size", "journal_size"):
@@ -663,6 +666,7 @@
"avb_add_hashtree_footer_args")
copy_prop("avb_system_ext_key_path", "avb_key_path")
copy_prop("avb_system_ext_algorithm", "avb_algorithm")
+ copy_prop("avb_system_ext_salt", "avb_salt")
copy_prop("system_ext_fs_type", "fs_type")
copy_prop("system_ext_size", "partition_size")
if not copy_prop("system_ext_journal_size", "journal_size"):
@@ -687,6 +691,7 @@
"avb_add_hashtree_footer_args")
copy_prop("avb_odm_key_path", "avb_key_path")
copy_prop("avb_odm_algorithm", "avb_algorithm")
+ copy_prop("avb_odm_salt", "avb_salt")
copy_prop("odm_fs_type", "fs_type")
copy_prop("odm_size", "partition_size")
if not copy_prop("odm_journal_size", "journal_size"):
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index f41df37..b3d491f 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -67,13 +67,18 @@
def GetArgsForSkus(info_dict):
- skus = info_dict.get('vintf_odm_manifest_skus', '').strip().split()
- if not skus:
- logger.info("ODM_MANIFEST_SKUS is not defined. Check once without SKUs.")
- skus = ['']
- return [['--property', 'ro.boot.product.hardware.sku=' + sku]
- for sku in skus]
+ odm_skus = info_dict.get('vintf_odm_manifest_skus', '').strip().split()
+ if info_dict.get('vintf_include_empty_odm_sku', '') == "true" or not odm_skus:
+ odm_skus += ['']
+ vendor_skus = info_dict.get('vintf_vendor_manifest_skus', '').strip().split()
+ if info_dict.get('vintf_include_empty_vendor_sku', '') == "true" or \
+ not vendor_skus:
+ vendor_skus += ['']
+
+ return [['--property', 'ro.boot.product.hardware.sku=' + odm_sku,
+ '--property', 'ro.boot.product.vendor.sku=' + vendor_sku]
+ for odm_sku in odm_skus for vendor_sku in vendor_skus]
def GetArgsForShippingApiLevel(info_dict):
shipping_api_level = info_dict['vendor.build.prop'].get(
@@ -89,7 +94,7 @@
config_path = os.path.join(input_tmp, 'META/kernel_configs.txt')
if not os.path.isfile(version_path) or not os.path.isfile(config_path):
- logger.info('Skipping kernel config checks because ' +
+ logger.info('Skipping kernel config checks because '
'PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS is not set')
return []
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 2e235ee..3276b29 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -319,7 +319,7 @@
OEM-specific properties, some of them will be calculated from two info dicts.
Users can query properties similarly as using a dict() (e.g. info['fstab']),
- or to query build properties via GetBuildProp() or GetVendorBuildProp().
+ or to query build properties via GetBuildProp() or GetPartitionBuildProp().
Attributes:
info_dict: The build-time info dict.
@@ -362,16 +362,31 @@
if self._oem_props:
assert oem_dicts, "OEM source required for this build"
+ def check_fingerprint(fingerprint):
+ if (" " in fingerprint or any(ord(ch) > 127 for ch in fingerprint)):
+ raise ValueError(
+ 'Invalid build fingerprint: "{}". See the requirement in Android CDD '
+ "3.2.2. Build Parameters.".format(fingerprint))
+
+
+ self._partition_fingerprints = {}
+ for partition in PARTITIONS_WITH_CARE_MAP:
+ try:
+ fingerprint = self.CalculatePartitionFingerprint(partition)
+ check_fingerprint(fingerprint)
+ self._partition_fingerprints[partition] = fingerprint
+ except ExternalError:
+ continue
+ if "system" in self._partition_fingerprints:
+ # system_other is not included in PARTITIONS_WITH_CARE_MAP, but does
+ # need a fingerprint when creating the image.
+ self._partition_fingerprints[
+ "system_other"] = self._partition_fingerprints["system"]
+
# These two should be computed only after setting self._oem_props.
self._device = self.GetOemProperty("ro.product.device")
self._fingerprint = self.CalculateFingerprint()
-
- # Sanity check the build fingerprint.
- if (' ' in self._fingerprint or
- any(ord(ch) > 127 for ch in self._fingerprint)):
- raise ValueError(
- 'Invalid build fingerprint: "{}". See the requirement in Android CDD '
- '3.2.2. Build Parameters.'.format(self._fingerprint))
+ check_fingerprint(self._fingerprint)
@property
def is_ab(self):
@@ -386,28 +401,6 @@
return self._fingerprint
@property
- def vendor_fingerprint(self):
- return self._fingerprint_of("vendor")
-
- @property
- def product_fingerprint(self):
- return self._fingerprint_of("product")
-
- @property
- def odm_fingerprint(self):
- return self._fingerprint_of("odm")
-
- def _fingerprint_of(self, partition):
- if partition + ".build.prop" not in self.info_dict:
- return None
- build_prop = self.info_dict[partition + ".build.prop"]
- if "ro." + partition + ".build.fingerprint" in build_prop:
- return build_prop["ro." + partition + ".build.fingerprint"]
- if "ro." + partition + ".build.thumbprint" in build_prop:
- return build_prop["ro." + partition + ".build.thumbprint"]
- return None
-
- @property
def oem_props(self):
return self._oem_props
@@ -423,8 +416,22 @@
def items(self):
return self.info_dict.items()
+ def GetPartitionBuildProp(self, prop, partition):
+ """Returns the inquired build property for the provided partition."""
+ # If provided a partition for this property, only look within that
+ # partition's build.prop.
+ if prop in BuildInfo._RO_PRODUCT_RESOLVE_PROPS:
+ prop = prop.replace("ro.product", "ro.product.{}".format(partition))
+ else:
+ prop = prop.replace("ro.", "ro.{}.".format(partition))
+ try:
+ return self.info_dict.get("{}.build.prop".format(partition), {})[prop]
+ except KeyError:
+ raise ExternalError("couldn't find %s in %s.build.prop" %
+ (prop, partition))
+
def GetBuildProp(self, prop):
- """Returns the inquired build property."""
+ """Returns the inquired build property from the standard build.prop file."""
if prop in BuildInfo._RO_PRODUCT_RESOLVE_PROPS:
return self._ResolveRoProductBuildProp(prop)
@@ -462,19 +469,28 @@
raise ExternalError("couldn't resolve {}".format(prop))
- def GetVendorBuildProp(self, prop):
- """Returns the inquired vendor build property."""
- try:
- return self.info_dict.get("vendor.build.prop", {})[prop]
- except KeyError:
- raise ExternalError(
- "couldn't find %s in vendor.build.prop" % (prop,))
-
def GetOemProperty(self, key):
if self.oem_props is not None and key in self.oem_props:
return self.oem_dicts[0][key]
return self.GetBuildProp(key)
+ def GetPartitionFingerprint(self, partition):
+ return self._partition_fingerprints.get(partition, None)
+
+ def CalculatePartitionFingerprint(self, partition):
+ try:
+ return self.GetPartitionBuildProp("ro.build.fingerprint", partition)
+ except ExternalError:
+ return "{}/{}/{}:{}/{}/{}:{}/{}".format(
+ self.GetPartitionBuildProp("ro.product.brand", partition),
+ self.GetPartitionBuildProp("ro.product.name", partition),
+ self.GetPartitionBuildProp("ro.product.device", partition),
+ self.GetPartitionBuildProp("ro.build.version.release", partition),
+ self.GetPartitionBuildProp("ro.build.id", partition),
+ self.GetPartitionBuildProp("ro.build.version.incremental", partition),
+ self.GetPartitionBuildProp("ro.build.type", partition),
+ self.GetPartitionBuildProp("ro.build.tags", partition))
+
def CalculateFingerprint(self):
if self.oem_props is None:
try:
@@ -644,7 +660,10 @@
# hash / hashtree footers.
if d.get("avb_enable") == "true":
build_info = BuildInfo(d)
- d["avb_salt"] = sha256(build_info.fingerprint).hexdigest()
+ for partition in PARTITIONS_WITH_CARE_MAP:
+ fingerprint = build_info.GetPartitionFingerprint(partition)
+ if fingerprint:
+ d["avb_{}_salt".format(partition)] = sha256(fingerprint).hexdigest()
return d
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 7a0d994..2126d11 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -631,7 +631,7 @@
partition_target_info = target_info["fstab"]["/" + name]
disable_imgdiff = (partition_source_info.fs_type == "squashfs" or
partition_target_info.fs_type == "squashfs")
- return common.BlockDifference(name, partition_src, partition_tgt,
+ return common.BlockDifference(name, partition_tgt, partition_src,
check_first_block,
version=blockimgdiff_version,
disable_imgdiff=disable_imgdiff)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index cce771c..5b7c2ac 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -91,6 +91,14 @@
Replace the veritykeyid in BOOT/cmdline of input_target_file_zip
with keyid of the cert pointed by <path_to_X509_PEM_cert_file>.
+ --remove_avb_public_keys <key1>,<key2>,...
+ Remove AVB public keys from the first-stage ramdisk. The key file to
+ remove is located at either of the following dirs:
+ - BOOT/RAMDISK/avb/ or
+ - BOOT/RAMDISK/first_stage_ramdisk/avb/
+ The second dir will be used for lookup if BOARD_USES_RECOVERY_AS_BOOT is
+ set to true.
+
--avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
vbmeta_vendor}_algorithm <algorithm>
--avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
@@ -150,6 +158,7 @@
OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
OPTIONS.replace_verity_keyid = False
+OPTIONS.remove_avb_public_keys = None
OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
OPTIONS.avb_keys = {}
OPTIONS.avb_algorithms = {}
@@ -585,6 +594,18 @@
filename in ("BOOT/RAMDISK/verity_key",
"ROOT/verity_key")):
pass
+ elif (OPTIONS.remove_avb_public_keys and
+ (filename.startswith("BOOT/RAMDISK/avb/") or
+ filename.startswith("BOOT/RAMDISK/first_stage_ramdisk/avb/"))):
+ matched_removal = False
+ for key_to_remove in OPTIONS.remove_avb_public_keys:
+ if filename.endswith(key_to_remove):
+ matched_removal = True
+ print("Removing AVB public key from ramdisk: %s" % filename)
+ break
+ if not matched_removal:
+ # Copy it verbatim if we don't want to remove it.
+ common.ZipWriteStr(output_tf_zip, out_info, data)
# Skip verity keyid (for system_root_image use) if we will replace it.
elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
@@ -610,8 +631,7 @@
# Should NOT sign boot-debug.img.
elif filename in (
"BOOT/RAMDISK/force_debuggable",
- "RECOVERY/RAMDISK/force_debuggable"
- "RECOVERY/RAMDISK/first_stage_ramdisk/force_debuggable"):
+ "BOOT/RAMDISK/first_stage_ramdisk/force_debuggable"):
raise common.ExternalError("debuggable boot.img cannot be signed")
# A non-APK file; copy it verbatim.
@@ -1135,6 +1155,8 @@
OPTIONS.replace_verity_private_key = (True, a)
elif o == "--replace_verity_keyid":
OPTIONS.replace_verity_keyid = (True, a)
+ elif o == "--remove_avb_public_keys":
+ OPTIONS.remove_avb_public_keys = a.split(",")
elif o == "--avb_vbmeta_key":
OPTIONS.avb_keys['vbmeta'] = a
elif o == "--avb_vbmeta_algorithm":
@@ -1203,6 +1225,7 @@
"replace_verity_public_key=",
"replace_verity_private_key=",
"replace_verity_keyid=",
+ "remove_avb_public_keys=",
"avb_apex_extra_args=",
"avb_vbmeta_algorithm=",
"avb_vbmeta_key=",
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index a249081..524c0f2 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -250,7 +250,17 @@
with open(fn) as f:
for line in f:
fn, ranges_text = line.rstrip().split(None, 1)
- ranges = rangelib.RangeSet.parse(ranges_text)
+ raw_ranges = rangelib.RangeSet.parse(ranges_text)
+
+ # Note: e2fsdroid records holes in the extent tree as "0" blocks.
+ # This causes confusion because clobbered_blocks always includes
+ # the superblock (physical block #0). Since the 0 blocks here do
+ # not represent actual physical blocks, remove them from the set.
+ ranges = raw_ranges.subtract(rangelib.RangeSet("0"))
+ # b/150334561 we need to perserve the monotonic property of the raw
+ # range. Otherwise, the validation script will read the blocks with
+ # wrong order when pulling files from the image.
+ ranges.monotonic = raw_ranges.monotonic
ranges.extra['text_str'] = ranges_text
if allow_shared_blocks:
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
index 07284ad..e19bc90 100644
--- a/tools/releasetools/test_apex_utils.py
+++ b/tools/releasetools/test_apex_utils.py
@@ -14,8 +14,10 @@
# limitations under the License.
#
+import re
import os
import os.path
+import shutil
import zipfile
import apex_utils
@@ -32,6 +34,7 @@
self.testdata_dir = test_utils.get_testdata_dir()
# The default payload signing key.
self.payload_key = os.path.join(self.testdata_dir, 'testkey.key')
+ self.apex_with_apk = os.path.join(self.testdata_dir, 'has_apk.apex')
common.OPTIONS.search_path = test_utils.get_search_path()
@@ -134,35 +137,43 @@
def test_ApexApkSigner_noApkPresent(self):
apex_path = os.path.join(self.testdata_dir, 'foo.apex')
signer = apex_utils.ApexApkSigner(apex_path, None, None)
- processed_apex = signer.ProcessApexFile({}, self.payload_key,
- None)
+ processed_apex = signer.ProcessApexFile({}, self.payload_key)
self.assertEqual(apex_path, processed_apex)
@test_utils.SkipIfExternalToolsUnavailable()
def test_ApexApkSigner_apkKeyNotPresent(self):
- apex_path = os.path.join(self.testdata_dir, 'has_apk.apex')
+ apex_path = common.MakeTempFile(suffix='.apex')
+ shutil.copy(self.apex_with_apk, apex_path)
signer = apex_utils.ApexApkSigner(apex_path, None, None)
- self.assertRaises(apex_utils.ApexSigningError, signer.ProcessApexFile, {},
- self.payload_key, None)
+ self.assertRaises(apex_utils.ApexSigningError, signer.ProcessApexFile,
+ {}, self.payload_key)
@test_utils.SkipIfExternalToolsUnavailable()
def test_ApexApkSigner_signApk(self):
- apex_path = os.path.join(self.testdata_dir, 'has_apk.apex')
+ apex_path = common.MakeTempFile(suffix='.apex')
+ shutil.copy(self.apex_with_apk, apex_path)
signer = apex_utils.ApexApkSigner(apex_path, None, None)
apk_keys = {'wifi-service-resources.apk': os.path.join(
self.testdata_dir, 'testkey')}
self.payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
- payload_pubkey = common.ExtractAvbPublicKey('avbtool',
- self.payload_key)
- signer.ProcessApexFile(apk_keys, self.payload_key, payload_pubkey)
+ apex_file = signer.ProcessApexFile(apk_keys, self.payload_key)
+ package_name_extract_cmd = ['aapt', 'dump', 'badging', apex_file]
+ output = common.RunAndCheckOutput(package_name_extract_cmd)
+ for line in output.splitlines():
+ # Sample output from aapt: "package: name='com.google.android.wifi'
+ # versionCode='1' versionName='' platformBuildVersionName='R'
+ # compileSdkVersion='29' compileSdkVersionCodename='R'"
+ match = re.search(r"^package:.* name='([\w|\.]+)'", line, re.IGNORECASE)
+ if match:
+ package_name = match.group(1)
+ self.assertEquals('com.google.android.wifi', package_name)
@test_utils.SkipIfExternalToolsUnavailable()
def test_ApexApkSigner_noAssetDir(self):
- apex_path = os.path.join(self.testdata_dir, 'has_apk.apex')
no_asset = common.MakeTempFile(suffix='.apex')
with zipfile.ZipFile(no_asset, 'w') as output_zip:
- with zipfile.ZipFile(apex_path, 'r') as input_zip:
+ with zipfile.ZipFile(self.apex_with_apk, 'r') as input_zip:
name_list = input_zip.namelist()
for name in name_list:
if not name.startswith('assets'):
@@ -173,23 +184,4 @@
self.testdata_dir, 'testkey')}
self.payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
- payload_pubkey = common.ExtractAvbPublicKey('avbtool',
- self.payload_key)
- signer.ProcessApexFile(apk_keys, self.payload_key, payload_pubkey)
-
- @test_utils.SkipIfExternalToolsUnavailable()
- def test_ApexApkSigner_withSignerHelper(self):
- apex_path = os.path.join(self.testdata_dir, 'has_apk.apex')
- signer = apex_utils.ApexApkSigner(apex_path, None, None)
- apk_keys = {'wifi-service-resources.apk': os.path.join(
- self.testdata_dir, 'testkey')}
-
- self.payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
- payload_pubkey = common.ExtractAvbPublicKey('avbtool', self.payload_key)
-
- signing_helper = os.path.join(self.testdata_dir, 'signing_helper.sh')
- os.chmod(signing_helper, 0o700)
- payload_signer_args = '--signing_helper_with_files={}'.format(
- signing_helper)
- signer.ProcessApexFile(apk_keys, self.payload_key, payload_pubkey,
- payload_signer_args)
+ signer.ProcessApexFile(apk_keys, self.payload_key)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 53b5b76..da92163 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -53,8 +53,26 @@
'ro.build.fingerprint' : 'build-fingerprint',
'ro.build.foo' : 'build-foo',
},
+ 'system.build.prop' : {
+ 'ro.product.system.brand' : 'product-brand',
+ 'ro.product.system.name' : 'product-name',
+ 'ro.product.system.device' : 'product-device',
+ 'ro.system.build.version.release' : 'version-release',
+ 'ro.system.build.id' : 'build-id',
+ 'ro.system.build.version.incremental' : 'version-incremental',
+ 'ro.system.build.type' : 'build-type',
+ 'ro.system.build.tags' : 'build-tags',
+ 'ro.system.build.foo' : 'build-foo',
+ },
'vendor.build.prop' : {
- 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+ 'ro.product.vendor.brand' : 'vendor-product-brand',
+ 'ro.product.vendor.name' : 'vendor-product-name',
+ 'ro.product.vendor.device' : 'vendor-product-device',
+ 'ro.vendor.build.version.release' : 'vendor-version-release',
+ 'ro.vendor.build.id' : 'vendor-build-id',
+ 'ro.vendor.build.version.incremental' : 'vendor-version-incremental',
+ 'ro.vendor.build.type' : 'vendor-build-type',
+ 'ro.vendor.build.tags' : 'vendor-build-tags',
},
'property1' : 'value1',
'property2' : 4096,
@@ -186,39 +204,27 @@
self.assertRaises(common.ExternalError, target_info.GetBuildProp,
'ro.build.nonexistent')
- def test_GetVendorBuildProp(self):
+ def test_GetPartitionFingerprint(self):
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('vendor-build-fingerprint',
- target_info.GetVendorBuildProp(
- 'ro.vendor.build.fingerprint'))
- self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
- 'ro.build.nonexistent')
+ self.assertEqual(
+ target_info.GetPartitionFingerprint('vendor'),
+ 'vendor-product-brand/vendor-product-name/vendor-product-device'
+ ':vendor-version-release/vendor-build-id/vendor-version-incremental'
+ ':vendor-build-type/vendor-build-tags')
- def test_GetVendorBuildProp_with_oem_props(self):
- target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- self.assertEqual('vendor-build-fingerprint',
- target_info.GetVendorBuildProp(
- 'ro.vendor.build.fingerprint'))
- self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
- 'ro.build.nonexistent')
-
- def test_vendor_fingerprint(self):
+ def test_GetPartitionFingerprint_system_other_uses_system(self):
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('vendor-build-fingerprint',
- target_info.vendor_fingerprint)
+ self.assertEqual(
+ target_info.GetPartitionFingerprint('system_other'),
+ target_info.GetPartitionFingerprint('system'))
- def test_vendor_fingerprint_blacklisted(self):
- target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
- del target_info_dict['vendor.build.prop']['ro.vendor.build.fingerprint']
- target_info = common.BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
- self.assertIsNone(target_info.vendor_fingerprint)
-
- def test_vendor_fingerprint_without_vendor_build_prop(self):
- target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
- del target_info_dict['vendor.build.prop']
- target_info = common.BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
- self.assertIsNone(target_info.vendor_fingerprint)
+ def test_GetPartitionFingerprint_uses_fingerprint_prop_if_available(self):
+ info_dict = copy.deepcopy(self.TEST_INFO_DICT)
+ info_dict['vendor.build.prop']['ro.vendor.build.fingerprint'] = 'vendor:fingerprint'
+ target_info = common.BuildInfo(info_dict, None)
+ self.assertEqual(
+ target_info.GetPartitionFingerprint('vendor'),
+ 'vendor:fingerprint')
def test_WriteMountOemScript(self):
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
diff --git a/tools/warn/chrome_project_list.py b/tools/warn/chrome_project_list.py
new file mode 100644
index 0000000..6096522
--- /dev/null
+++ b/tools/warn/chrome_project_list.py
@@ -0,0 +1,686 @@
+# python3
+"""Clang_Tidy_Warn Project List data for Chrome.
+
+This file stores the Chrome project_list used in warn.py and
+its dependencies. It has been put into this file for easier navigation and
+unification of the Chrome and Android warn.py.
+"""
+
+
+def create_pattern(pattern):
+ return [pattern, '(^|.*/)' + pattern + '/.*: warning:']
+
+
+# A list of [project_name, file_path_pattern].
+project_list = [
+ create_pattern('android_webview'),
+ create_pattern('apps'),
+ create_pattern('ash/app_list'),
+ create_pattern('ash/public'),
+ create_pattern('ash/assistant'),
+ create_pattern('ash/display'),
+ create_pattern('ash/resources'),
+ create_pattern('ash/login'),
+ create_pattern('ash/system'),
+ create_pattern('ash/wm'),
+ create_pattern('ash/shelf'),
+ create_pattern('ash'),
+ create_pattern('base/trace_event'),
+ create_pattern('base/debug'),
+ create_pattern('base/third_party'),
+ create_pattern('base/files'),
+ create_pattern('base/test'),
+ create_pattern('base/util'),
+ create_pattern('base/task'),
+ create_pattern('base/metrics'),
+ create_pattern('base/strings'),
+ create_pattern('base/memory'),
+ create_pattern('base'),
+ create_pattern('build'),
+ create_pattern('build_overrides'),
+ create_pattern('buildtools'),
+ create_pattern('cc'),
+ create_pattern('chrome/services'),
+ create_pattern('chrome/app'),
+ create_pattern('chrome/renderer'),
+ create_pattern('chrome/test'),
+ create_pattern('chrome/common/safe_browsing'),
+ create_pattern('chrome/common/importer'),
+ create_pattern('chrome/common/media_router'),
+ create_pattern('chrome/common/extensions'),
+ create_pattern('chrome/common'),
+ create_pattern('chrome/browser/sync_file_system'),
+ create_pattern('chrome/browser/safe_browsing'),
+ create_pattern('chrome/browser/download'),
+ create_pattern('chrome/browser/ui'),
+ create_pattern('chrome/browser/supervised_user'),
+ create_pattern('chrome/browser/search'),
+ create_pattern('chrome/browser/browsing_data'),
+ create_pattern('chrome/browser/predictors'),
+ create_pattern('chrome/browser/net'),
+ create_pattern('chrome/browser/devtools'),
+ create_pattern('chrome/browser/resource_coordinator'),
+ create_pattern('chrome/browser/page_load_metrics'),
+ create_pattern('chrome/browser/extensions'),
+ create_pattern('chrome/browser/ssl'),
+ create_pattern('chrome/browser/printing'),
+ create_pattern('chrome/browser/profiles'),
+ create_pattern('chrome/browser/chromeos'),
+ create_pattern('chrome/browser/performance_manager'),
+ create_pattern('chrome/browser/metrics'),
+ create_pattern('chrome/browser/component_updater'),
+ create_pattern('chrome/browser/media'),
+ create_pattern('chrome/browser/notifications'),
+ create_pattern('chrome/browser/web_applications'),
+ create_pattern('chrome/browser/media_galleries'),
+ create_pattern('chrome/browser'),
+ create_pattern('chrome'),
+ create_pattern('chromecast'),
+ create_pattern('chromeos/services'),
+ create_pattern('chromeos/dbus'),
+ create_pattern('chromeos/assistant'),
+ create_pattern('chromeos/components'),
+ create_pattern('chromeos/settings'),
+ create_pattern('chromeos/constants'),
+ create_pattern('chromeos/network'),
+ create_pattern('chromeos'),
+ create_pattern('cloud_print'),
+ create_pattern('components/crash'),
+ create_pattern('components/subresource_filter'),
+ create_pattern('components/invalidation'),
+ create_pattern('components/autofill'),
+ create_pattern('components/onc'),
+ create_pattern('components/arc'),
+ create_pattern('components/safe_browsing'),
+ create_pattern('components/services'),
+ create_pattern('components/cast_channel'),
+ create_pattern('components/download'),
+ create_pattern('components/feed'),
+ create_pattern('components/offline_pages'),
+ create_pattern('components/bookmarks'),
+ create_pattern('components/cloud_devices'),
+ create_pattern('components/mirroring'),
+ create_pattern('components/spellcheck'),
+ create_pattern('components/viz'),
+ create_pattern('components/gcm_driver'),
+ create_pattern('components/ntp_snippets'),
+ create_pattern('components/translate'),
+ create_pattern('components/search_engines'),
+ create_pattern('components/background_task_scheduler'),
+ create_pattern('components/signin'),
+ create_pattern('components/chromeos_camera'),
+ create_pattern('components/reading_list'),
+ create_pattern('components/assist_ranker'),
+ create_pattern('components/payments'),
+ create_pattern('components/feedback'),
+ create_pattern('components/ui_devtools'),
+ create_pattern('components/password_manager'),
+ create_pattern('components/omnibox'),
+ create_pattern('components/content_settings'),
+ create_pattern('components/dom_distiller'),
+ create_pattern('components/nacl'),
+ create_pattern('components/metrics'),
+ create_pattern('components/policy'),
+ create_pattern('components/optimization_guide'),
+ create_pattern('components/exo'),
+ create_pattern('components/update_client'),
+ create_pattern('components/data_reduction_proxy'),
+ create_pattern('components/sync'),
+ create_pattern('components/drive'),
+ create_pattern('components/variations'),
+ create_pattern('components/history'),
+ create_pattern('components/webcrypto'),
+ create_pattern('components'),
+ create_pattern('content/public'),
+ create_pattern('content/renderer'),
+ create_pattern('content/test'),
+ create_pattern('content/common'),
+ create_pattern('content/browser'),
+ create_pattern('content/zygote'),
+ create_pattern('content'),
+ create_pattern('courgette'),
+ create_pattern('crypto'),
+ create_pattern('dbus'),
+ create_pattern('device/base'),
+ create_pattern('device/vr'),
+ create_pattern('device/gamepad'),
+ create_pattern('device/test'),
+ create_pattern('device/fido'),
+ create_pattern('device/bluetooth'),
+ create_pattern('device'),
+ create_pattern('docs'),
+ create_pattern('extensions/docs'),
+ create_pattern('extensions/components'),
+ create_pattern('extensions/buildflags'),
+ create_pattern('extensions/renderer'),
+ create_pattern('extensions/test'),
+ create_pattern('extensions/common'),
+ create_pattern('extensions/shell'),
+ create_pattern('extensions/browser'),
+ create_pattern('extensions/strings'),
+ create_pattern('extensions'),
+ create_pattern('fuchsia'),
+ create_pattern('gin'),
+ create_pattern('google_apis'),
+ create_pattern('google_update'),
+ create_pattern('gpu/perftests'),
+ create_pattern('gpu/GLES2'),
+ create_pattern('gpu/command_buffer'),
+ create_pattern('gpu/tools'),
+ create_pattern('gpu/gles2_conform_support'),
+ create_pattern('gpu/ipc'),
+ create_pattern('gpu/khronos_glcts_support'),
+ create_pattern('gpu'),
+ create_pattern('headless'),
+ create_pattern('infra'),
+ create_pattern('ipc'),
+ create_pattern('jingle'),
+ create_pattern('media'),
+ create_pattern('mojo'),
+ create_pattern('native_client'),
+ create_pattern('ative_client_sdk'),
+ create_pattern('net'),
+ create_pattern('out'),
+ create_pattern('pdf'),
+ create_pattern('ppapi'),
+ create_pattern('printing'),
+ create_pattern('remoting'),
+ create_pattern('rlz'),
+ create_pattern('sandbox'),
+ create_pattern('services/audio'),
+ create_pattern('services/content'),
+ create_pattern('services/data_decoder'),
+ create_pattern('services/device'),
+ create_pattern('services/file'),
+ create_pattern('services/identity'),
+ create_pattern('services/image_annotation'),
+ create_pattern('services/media_session'),
+ create_pattern('services/metrics'),
+ create_pattern('services/network'),
+ create_pattern('services/preferences'),
+ create_pattern('services/proxy_resolver'),
+ create_pattern('services/resource_coordinator'),
+ create_pattern('services/service_manager'),
+ create_pattern('services/shape_detection'),
+ create_pattern('services/strings'),
+ create_pattern('services/test'),
+ create_pattern('services/tracing'),
+ create_pattern('services/video_capture'),
+ create_pattern('services/viz'),
+ create_pattern('services/ws'),
+ create_pattern('services'),
+ create_pattern('skia/config'),
+ create_pattern('skia/ext'),
+ create_pattern('skia/public'),
+ create_pattern('skia/tools'),
+ create_pattern('skia'),
+ create_pattern('sql'),
+ create_pattern('storage'),
+ create_pattern('styleguide'),
+ create_pattern('testing'),
+ create_pattern('third_party/Python-Markdown'),
+ create_pattern('third_party/SPIRV-Tools'),
+ create_pattern('third_party/abseil-cpp'),
+ create_pattern('third_party/accessibility-audit'),
+ create_pattern('third_party/accessibility_test_framework'),
+ create_pattern('third_party/adobe'),
+ create_pattern('third_party/afl'),
+ create_pattern('third_party/android_build_tools'),
+ create_pattern('third_party/android_crazy_linker'),
+ create_pattern('third_party/android_data_chart'),
+ create_pattern('third_party/android_deps'),
+ create_pattern('third_party/android_media'),
+ create_pattern('third_party/android_ndk'),
+ create_pattern('third_party/android_opengl'),
+ create_pattern('third_party/android_platform'),
+ create_pattern('third_party/android_protobuf'),
+ create_pattern('third_party/android_sdk'),
+ create_pattern('third_party/android_support_test_runner'),
+ create_pattern('third_party/android_swipe_refresh'),
+ create_pattern('third_party/android_system_sdk'),
+ create_pattern('third_party/android_tools'),
+ create_pattern('third_party/angle'),
+ create_pattern('third_party/apache-mac'),
+ create_pattern('third_party/apache-portable-runtime'),
+ create_pattern('third_party/apache-win32'),
+ create_pattern('third_party/apk-patch-size-estimator'),
+ create_pattern('third_party/apple_apsl'),
+ create_pattern('third_party/arcore-android-sdk'),
+ create_pattern('third_party/ashmem'),
+ create_pattern('third_party/auto'),
+ create_pattern('third_party/axe-core'),
+ create_pattern('third_party/bazel'),
+ create_pattern('third_party/binutils'),
+ create_pattern('third_party/bison'),
+ create_pattern('third_party/blanketjs'),
+ create_pattern('third_party/blink/common'),
+ create_pattern('third_party/blink/manual_tests'),
+ create_pattern('third_party/blink/perf_tests'),
+ create_pattern('third_party/blink/public/common'),
+ create_pattern('third_party/blink/public/default_100_percent'),
+ create_pattern('third_party/blink/public/default_200_percent'),
+ create_pattern('third_party/blink/public/platform'),
+ create_pattern('third_party/blink/public/mojom/ad_tagging'),
+ create_pattern('third_party/blink/public/mojom/app_banner'),
+ create_pattern('third_party/blink/public/mojom/appcache'),
+ create_pattern('third_party/blink/public/mojom/array_buffer'),
+ create_pattern('third_party/blink/public/mojom/associated_interfaces'),
+ create_pattern('third_party/blink/public/mojom/autoplay'),
+ create_pattern('third_party/blink/public/mojom/background_fetch'),
+ create_pattern('third_party/blink/public/mojom/background_sync'),
+ create_pattern('third_party/blink/public/mojom/badging'),
+ create_pattern('third_party/blink/public/mojom/blob'),
+ create_pattern('third_party/blink/public/mojom/bluetooth'),
+ create_pattern('third_party/blink/public/mojom/broadcastchannel'),
+ create_pattern('third_party/blink/public/mojom/cache_storage'),
+ create_pattern('third_party/blink/public/mojom/choosers'),
+ create_pattern('third_party/blink/public/mojom/clipboard'),
+ create_pattern('third_party/blink/public/mojom/commit_result'),
+ create_pattern('third_party/blink/public/mojom/contacts'),
+ create_pattern('third_party/blink/public/mojom/cookie_store'),
+ create_pattern('third_party/blink/public/mojom/crash'),
+ create_pattern('third_party/blink/public/mojom/credentialmanager'),
+ create_pattern('third_party/blink/public/mojom/csp'),
+ create_pattern('third_party/blink/public/mojom/devtools'),
+ create_pattern('third_party/blink/public/mojom/document_metadata'),
+ create_pattern('third_party/blink/public/mojom/dom_storage'),
+ create_pattern('third_party/blink/public/mojom/dwrite_font_proxy'),
+ create_pattern('third_party/blink/public/mojom/feature_policy'),
+ create_pattern('third_party/blink/public/mojom/fetch'),
+ create_pattern('third_party/blink/public/mojom/file'),
+ create_pattern('third_party/blink/public/mojom/filesystem'),
+ create_pattern('third_party/blink/public/mojom/font_unique_name_lookup'),
+ create_pattern('third_party/blink/public/mojom/frame'),
+ create_pattern('third_party/blink/public/mojom/frame_sinks'),
+ create_pattern('third_party/blink/public/mojom/geolocation'),
+ create_pattern('third_party/blink/public/mojom/hyphenation'),
+ create_pattern('third_party/blink/public/mojom/idle'),
+ create_pattern('third_party/blink/public/mojom/indexeddb'),
+ create_pattern('third_party/blink/public/mojom/input'),
+ create_pattern('third_party/blink/public/mojom/insecure_input'),
+ create_pattern('third_party/blink/public/mojom/installation'),
+ create_pattern('third_party/blink/public/mojom/installedapp'),
+ create_pattern('third_party/blink/public/mojom/keyboard_lock'),
+ create_pattern('third_party/blink/public/mojom/leak_detector'),
+ create_pattern('third_party/blink/public/mojom/loader'),
+ create_pattern('third_party/blink/public/mojom/locks'),
+ create_pattern('third_party/blink/public/mojom/manifest'),
+ create_pattern('third_party/blink/public/mojom/media_controls'),
+ create_pattern('third_party/blink/public/mojom/mediasession'),
+ create_pattern('third_party/blink/public/mojom/mediastream'),
+ create_pattern('third_party/blink/public/mojom/messaging'),
+ create_pattern('third_party/blink/public/mojom/mime'),
+ create_pattern('third_party/blink/public/mojom/native_file_system'),
+ create_pattern('third_party/blink/public/mojom/net'),
+ create_pattern('third_party/blink/public/mojom/notifications'),
+ create_pattern('third_party/blink/public/mojom/oom_intervention'),
+ create_pattern('third_party/blink/public/mojom/page'),
+ create_pattern('third_party/blink/public/mojom/payments'),
+ create_pattern('third_party/blink/public/mojom/permissions'),
+ create_pattern('third_party/blink/public/mojom/picture_in_picture'),
+ create_pattern('third_party/blink/public/mojom/plugins'),
+ create_pattern('third_party/blink/public/mojom/portal'),
+ create_pattern('third_party/blink/public/mojom/presentation'),
+ create_pattern('third_party/blink/public/mojom/push_messaging'),
+ create_pattern('third_party/blink/public/mojom/quota'),
+ create_pattern('third_party/blink/public/mojom/remote_objects'),
+ create_pattern('third_party/blink/public/mojom/reporting'),
+ create_pattern('third_party/blink/public/mojom/script'),
+ create_pattern('third_party/blink/public/mojom/selection_menu'),
+ create_pattern('third_party/blink/public/mojom/serial'),
+ create_pattern('third_party/blink/public/mojom/service_worker'),
+ create_pattern('third_party/blink/public/mojom/site_engagement'),
+ create_pattern('third_party/blink/public/mojom/sms'),
+ create_pattern('third_party/blink/public/mojom/speech'),
+ create_pattern('third_party/blink/public/mojom/ukm'),
+ create_pattern('third_party/blink/public/mojom/unhandled_tap_notifier'),
+ create_pattern('third_party/blink/public/mojom/usb'),
+ create_pattern('third_party/blink/public/mojom/use_counter'),
+ create_pattern('third_party/blink/public/mojom/user_agent'),
+ create_pattern('third_party/blink/public/mojom/wake_lock'),
+ create_pattern('third_party/blink/public/mojom/web_client_hints'),
+ create_pattern('third_party/blink/public/mojom/web_feature'),
+ create_pattern('third_party/blink/public/mojom/webaudio'),
+ create_pattern('third_party/blink/public/mojom/webauthn'),
+ create_pattern('third_party/blink/public/mojom/webdatabase'),
+ create_pattern('third_party/blink/public/mojom/webshare'),
+ create_pattern('third_party/blink/public/mojom/window_features'),
+ create_pattern('third_party/blink/public/mojom/worker'),
+ create_pattern('third_party/blink/public/web'),
+ create_pattern('third_party/blink/renderer/bindings'),
+ create_pattern('third_party/blink/renderer/build'),
+ create_pattern('third_party/blink/renderer/controller'),
+ create_pattern('third_party/blink/renderer/core/accessibility'),
+ create_pattern('third_party/blink/renderer/core/animation'),
+ create_pattern('third_party/blink/renderer/core/aom'),
+ create_pattern('third_party/blink/renderer/core/clipboard'),
+ create_pattern('third_party/blink/renderer/core/content_capture'),
+ create_pattern('third_party/blink/renderer/core/context_features'),
+ create_pattern('third_party/blink/renderer/core/css'),
+ create_pattern('third_party/blink/renderer/core/display_lock'),
+ create_pattern('third_party/blink/renderer/core/dom'),
+ create_pattern('third_party/blink/renderer/core/editing'),
+ create_pattern('third_party/blink/renderer/core/events'),
+ create_pattern('third_party/blink/renderer/core/execution_context'),
+ create_pattern('third_party/blink/renderer/core/exported'),
+ create_pattern('third_party/blink/renderer/core/feature_policy'),
+ create_pattern('third_party/blink/renderer/core/fetch'),
+ create_pattern('third_party/blink/renderer/core/fileapi'),
+ create_pattern('third_party/blink/renderer/core/frame'),
+ create_pattern('third_party/blink/renderer/core/fullscreen'),
+ create_pattern('third_party/blink/renderer/core/geometry'),
+ create_pattern('third_party/blink/renderer/core/html'),
+ create_pattern('third_party/blink/renderer/core/imagebitmap'),
+ create_pattern('third_party/blink/renderer/core/input'),
+ create_pattern('third_party/blink/renderer/core/inspector'),
+ create_pattern('third_party/blink/renderer/core/intersection_observer'),
+ create_pattern('third_party/blink/renderer/core/invisible_dom'),
+ create_pattern('third_party/blink/renderer/core/layout'),
+ create_pattern('third_party/blink/renderer/core/loader'),
+ create_pattern('third_party/blink/renderer/core/messaging'),
+ create_pattern('third_party/blink/renderer/core/mojo'),
+ create_pattern('third_party/blink/renderer/core/offscreencanvas'),
+ create_pattern('third_party/blink/renderer/core/origin_trials'),
+ create_pattern('third_party/blink/renderer/core/page'),
+ create_pattern('third_party/blink/renderer/core/paint'),
+ create_pattern('third_party/blink/renderer/core/probe'),
+ create_pattern('third_party/blink/renderer/core/resize_observer'),
+ create_pattern('third_party/blink/renderer/core/scheduler'),
+ create_pattern('third_party/blink/renderer/core/script'),
+ create_pattern('third_party/blink/renderer/core/scroll'),
+ create_pattern('third_party/blink/renderer/core/streams'),
+ create_pattern('third_party/blink/renderer/core/style'),
+ create_pattern('third_party/blink/renderer/core/svg'),
+ create_pattern('third_party/blink/renderer/core/testing'),
+ create_pattern('third_party/blink/renderer/core/timezone'),
+ create_pattern('third_party/blink/renderer/core/timing'),
+ create_pattern('third_party/blink/renderer/core/trustedtypes'),
+ create_pattern('third_party/blink/renderer/core/typed_arrays'),
+ create_pattern('third_party/blink/renderer/core/url'),
+ create_pattern('third_party/blink/renderer/core/win'),
+ create_pattern('third_party/blink/renderer/core/workers'),
+ create_pattern('third_party/blink/renderer/core/xml'),
+ create_pattern('third_party/blink/renderer/core/xmlhttprequest'),
+ create_pattern('third_party/blink/renderer/devtools'),
+ create_pattern('third_party/blink/renderer/modules'),
+ create_pattern('third_party/blink/renderer/platform'),
+ create_pattern('third_party/blink/tools'),
+ create_pattern('third_party/blink/web_tests'),
+ create_pattern('third_party/boringssl'),
+ create_pattern('third_party/bouncycastle'),
+ create_pattern('third_party/breakpad'),
+ create_pattern('third_party/brotli'),
+ create_pattern('third_party/bspatch'),
+ create_pattern('third_party/byte_buddy'),
+ create_pattern('third_party/cacheinvalidation'),
+ create_pattern('third_party/catapult'),
+ create_pattern('third_party/cct_dynamic_module'),
+ create_pattern('third_party/ced'),
+ create_pattern('third_party/chaijs'),
+ create_pattern('third_party/checkstyle'),
+ create_pattern('third_party/chromevox'),
+ create_pattern('third_party/chromite'),
+ create_pattern('third_party/cld_3'),
+ create_pattern('third_party/closure_compiler'),
+ create_pattern('third_party/colorama'),
+ create_pattern('third_party/crashpad'),
+ create_pattern('third_party/crc32c'),
+ create_pattern('third_party/cros_system_api'),
+ create_pattern('third_party/custom_tabs_client'),
+ create_pattern('third_party/d3'),
+ create_pattern('third_party/dav1d'),
+ create_pattern('third_party/dawn'),
+ create_pattern('third_party/decklink'),
+ create_pattern('third_party/depot_tools'),
+ create_pattern('third_party/devscripts'),
+ create_pattern('third_party/devtools-node-modules'),
+ create_pattern('third_party/dom_distiller_js'),
+ create_pattern('third_party/elfutils'),
+ create_pattern('third_party/emoji-segmenter'),
+ create_pattern('third_party/errorprone'),
+ create_pattern('third_party/espresso'),
+ create_pattern('third_party/expat'),
+ create_pattern('third_party/feed'),
+ create_pattern('third_party/ffmpeg'),
+ create_pattern('third_party/flac'),
+ create_pattern('third_party/flatbuffers'),
+ create_pattern('third_party/flot'),
+ create_pattern('third_party/fontconfig'),
+ create_pattern('third_party/freetype'),
+ create_pattern('third_party/fuchsia-sdk'),
+ create_pattern('third_party/gestures'),
+ create_pattern('third_party/gif_player'),
+ create_pattern('third_party/glfw'),
+ create_pattern('third_party/glslang'),
+ create_pattern('third_party/gnu_binutils'),
+ create_pattern('third_party/google-truth'),
+ create_pattern('third_party/google_android_play_core'),
+ create_pattern('third_party/google_appengine_cloudstorage'),
+ create_pattern('third_party/google_input_tools'),
+ create_pattern('third_party/google_toolbox_for_mac'),
+ create_pattern('third_party/google_trust_services'),
+ create_pattern('third_party/googletest'),
+ create_pattern('third_party/gperf'),
+ create_pattern('third_party/gradle_wrapper'),
+ create_pattern('third_party/grpc'),
+ create_pattern('third_party/gson'),
+ create_pattern('third_party/guava'),
+ create_pattern('third_party/gvr-android-keyboard'),
+ create_pattern('third_party/gvr-android-sdk'),
+ create_pattern('third_party/hamcrest'),
+ create_pattern('third_party/harfbuzz-ng'),
+ create_pattern('third_party/hunspell'),
+ create_pattern('third_party/hunspell_dictionaries'),
+ create_pattern('third_party/iaccessible2'),
+ create_pattern('third_party/iccjpeg'),
+ create_pattern('third_party/icu/android'),
+ create_pattern('third_party/icu/android_small'),
+ create_pattern('third_party/icu/cast'),
+ create_pattern('third_party/icu/chromeos'),
+ create_pattern('third_party/icu/common'),
+ create_pattern('third_party/icu/filters'),
+ create_pattern('third_party/icu/flutter'),
+ create_pattern('third_party/icu/fuzzers'),
+ create_pattern('third_party/icu/ios'),
+ create_pattern('third_party/icu/patches'),
+ create_pattern('third_party/icu/scripts'),
+ create_pattern('third_party/icu/source'),
+ create_pattern('third_party/icu/tzres'),
+ create_pattern('third_party/icu4j'),
+ create_pattern('third_party/ijar'),
+ create_pattern('third_party/ink'),
+ create_pattern('third_party/inspector_protocol'),
+ create_pattern('third_party/instrumented_libraries'),
+ create_pattern('third_party/intellij'),
+ create_pattern('third_party/isimpledom'),
+ create_pattern('third_party/jacoco'),
+ create_pattern('third_party/jinja2'),
+ create_pattern('third_party/jsoncpp'),
+ create_pattern('third_party/jsr-305'),
+ create_pattern('third_party/jstemplate'),
+ create_pattern('third_party/junit'),
+ create_pattern('third_party/khronos'),
+ create_pattern('third_party/lcov'),
+ create_pattern('third_party/leveldatabase'),
+ create_pattern('third_party/libFuzzer'),
+ create_pattern('third_party/libXNVCtrl'),
+ create_pattern('third_party/libaddressinput'),
+ create_pattern('third_party/libaom'),
+ create_pattern('third_party/libcxx-pretty-printers'),
+ create_pattern('third_party/libdrm'),
+ create_pattern('third_party/libevdev'),
+ create_pattern('third_party/libjingle_xmpp'),
+ create_pattern('third_party/libjpeg'),
+ create_pattern('third_party/libjpeg_turbo'),
+ create_pattern('third_party/liblouis'),
+ create_pattern('third_party/libovr'),
+ create_pattern('third_party/libphonenumber'),
+ create_pattern('third_party/libpng'),
+ create_pattern('third_party/libprotobuf-mutator'),
+ create_pattern('third_party/libsecret'),
+ create_pattern('third_party/libsrtp'),
+ create_pattern('third_party/libsync'),
+ create_pattern('third_party/libudev'),
+ create_pattern('third_party/libusb'),
+ create_pattern('third_party/libvpx'),
+ create_pattern('third_party/libwebm'),
+ create_pattern('third_party/libwebp'),
+ create_pattern('third_party/libxml'),
+ create_pattern('third_party/libxslt'),
+ create_pattern('third_party/libyuv'),
+ create_pattern('third_party/lighttpd'),
+ create_pattern('third_party/logilab'),
+ create_pattern('third_party/lss'),
+ create_pattern('third_party/lzma_sdk'),
+ create_pattern('third_party/mach_override'),
+ create_pattern('third_party/markdown'),
+ create_pattern('third_party/markupsafe'),
+ create_pattern('third_party/material_design_icons'),
+ create_pattern('third_party/mesa_headers'),
+ create_pattern('third_party/metrics_proto'),
+ create_pattern('third_party/microsoft_webauthn'),
+ create_pattern('third_party/mingw-w64'),
+ create_pattern('third_party/minigbm'),
+ create_pattern('third_party/minizip'),
+ create_pattern('third_party/mocha'),
+ create_pattern('third_party/mockito'),
+ create_pattern('third_party/modp_b64'),
+ create_pattern('third_party/motemplate'),
+ create_pattern('third_party/mozilla'),
+ create_pattern('third_party/nacl_sdk_binaries'),
+ create_pattern('third_party/nasm'),
+ create_pattern('third_party/netty-tcnative'),
+ create_pattern('third_party/netty4'),
+ create_pattern('third_party/node'),
+ create_pattern('third_party/nvml'),
+ create_pattern('third_party/objenesis'),
+ create_pattern('third_party/ocmock'),
+ create_pattern('third_party/openh264'),
+ create_pattern('third_party/openscreen'),
+ create_pattern('third_party/openvr'),
+ create_pattern('third_party/opus'),
+ create_pattern('third_party/ots'),
+ create_pattern('third_party/ow2_asm'),
+ create_pattern('third_party/pdfium'),
+ create_pattern('third_party/pefile'),
+ create_pattern('third_party/perfetto'),
+ create_pattern('third_party/perl'),
+ create_pattern('third_party/pexpect'),
+ create_pattern('third_party/pffft'),
+ create_pattern('third_party/ply'),
+ create_pattern('third_party/polymer'),
+ create_pattern('third_party/proguard'),
+ create_pattern('third_party/protobuf'),
+ create_pattern('third_party/protoc_javalite'),
+ create_pattern('third_party/pycoverage'),
+ create_pattern('third_party/pyelftools'),
+ create_pattern('third_party/pyjson5'),
+ create_pattern('third_party/pylint'),
+ create_pattern('third_party/pymock'),
+ create_pattern('third_party/pystache'),
+ create_pattern('third_party/pywebsocket'),
+ create_pattern('third_party/qcms'),
+ create_pattern('third_party/quic_trace'),
+ create_pattern('third_party/qunit'),
+ create_pattern('third_party/r8'),
+ create_pattern('third_party/re2'),
+ create_pattern('third_party/requests'),
+ create_pattern('third_party/rnnoise'),
+ create_pattern('third_party/robolectric'),
+ create_pattern('third_party/s2cellid'),
+ create_pattern('third_party/sfntly'),
+ create_pattern('third_party/shaderc'),
+ create_pattern('third_party/simplejson'),
+ create_pattern('third_party/sinonjs'),
+ create_pattern('third_party/skia'),
+ create_pattern('third_party/smhasher'),
+ create_pattern('third_party/snappy'),
+ create_pattern('third_party/speech-dispatcher'),
+ create_pattern('third_party/spirv-cross'),
+ create_pattern('third_party/spirv-headers'),
+ create_pattern('third_party/sqlite'),
+ create_pattern('third_party/sqlite4java'),
+ create_pattern('third_party/sudden_motion_sensor'),
+ create_pattern('third_party/swiftshader'),
+ create_pattern('third_party/tcmalloc'),
+ create_pattern('third_party/test_fonts'),
+ create_pattern('third_party/tlslite'),
+ create_pattern('third_party/ub-uiautomator'),
+ create_pattern('third_party/unrar'),
+ create_pattern('third_party/usb_ids'),
+ create_pattern('third_party/usrsctp'),
+ create_pattern('third_party/v4l-utils'),
+ create_pattern('third_party/vulkan'),
+ create_pattern('third_party/wayland'),
+ create_pattern('third_party/wayland-protocols'),
+ create_pattern('third_party/wds'),
+ create_pattern('third_party/web-animations-js'),
+ create_pattern('third_party/webdriver'),
+ create_pattern('third_party/webgl'),
+ create_pattern('third_party/webrtc'),
+ create_pattern('third_party/webrtc_overrides'),
+ create_pattern('third_party/webxr_test_pages'),
+ create_pattern('third_party/widevine'),
+ create_pattern('third_party/win_build_output'),
+ create_pattern('third_party/woff2'),
+ create_pattern('third_party/wtl'),
+ create_pattern('third_party/xdg-utils'),
+ create_pattern('third_party/xstream'),
+ create_pattern('third_party/yasm'),
+ create_pattern('third_party/zlib'),
+ create_pattern('tools'),
+ create_pattern('ui/accelerated_widget_mac'),
+ create_pattern('ui/accessibility'),
+ create_pattern('ui/android'),
+ create_pattern('ui/aura'),
+ create_pattern('ui/aura_extra'),
+ create_pattern('ui/base'),
+ create_pattern('ui/chromeos'),
+ create_pattern('ui/compositor'),
+ create_pattern('ui/compositor_extra'),
+ create_pattern('ui/content_accelerators'),
+ create_pattern('ui/display'),
+ create_pattern('ui/events'),
+ create_pattern('ui/file_manager'),
+ create_pattern('ui/gfx'),
+ create_pattern('ui/gl'),
+ create_pattern('ui/latency'),
+ create_pattern('ui/login'),
+ create_pattern('ui/message_center'),
+ create_pattern('ui/native_theme'),
+ create_pattern('ui/ozone'),
+ create_pattern('ui/platform_window'),
+ create_pattern('ui/resources'),
+ create_pattern('ui/shell_dialogs'),
+ create_pattern('ui/snapshot'),
+ create_pattern('ui/strings'),
+ create_pattern('ui/surface'),
+ create_pattern('ui/touch_selection'),
+ create_pattern('ui/views'),
+ create_pattern('ui/views_bridge_mac'),
+ create_pattern('ui/views_content_client'),
+ create_pattern('ui/web_dialogs'),
+ create_pattern('ui/webui'),
+ create_pattern('ui/wm'),
+ create_pattern('url'),
+ create_pattern('v8/benchmarks'),
+ create_pattern('v8/build_overrides'),
+ create_pattern('v8/custom_deps'),
+ create_pattern('v8/docs'),
+ create_pattern('v8/gni'),
+ create_pattern('v8/include'),
+ create_pattern('v8/infra'),
+ create_pattern('v8/samples'),
+ create_pattern('v8/src'),
+ create_pattern('v8/test'),
+ create_pattern('v8/testing'),
+ create_pattern('v8/third_party'),
+ create_pattern('v8/tools'),
+
+ # keep out/obj and other patterns at the end.
+ [
+ 'out/obj', '.*/(gen|obj[^/]*)/(include|EXECUTABLES|SHARED_LIBRARIES|'
+ 'STATIC_LIBRARIES|NATIVE_TESTS)/.*: warning:'
+ ],
+ ['other', '.*'] # all other unrecognized patterns
+]
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
new file mode 100644
index 0000000..b8d3fe6
--- /dev/null
+++ b/tools/warn/html_writer.py
@@ -0,0 +1,673 @@
+# Lint as: python3
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Emit warning messages to html or csv files."""
+
+# To emit html page of warning messages:
+# flags: --byproject, --url, --separator
+# Old stuff for static html components:
+# html_script_style: static html scripts and styles
+# htmlbig:
+# dump_stats, dump_html_prologue, dump_html_epilogue:
+# emit_buttons:
+# dump_fixed
+# sort_warnings:
+# emit_stats_by_project:
+# all_patterns,
+# findproject, classify_warning
+# dump_html
+#
+# New dynamic HTML page's static JavaScript data:
+# Some data are copied from Python to JavaScript, to generate HTML elements.
+# FlagPlatform flags.platform
+# FlagURL flags.url, used by 'android'
+# FlagSeparator flags.separator, used by 'android'
+# SeverityColors: list of colors for all severity levels
+# SeverityHeaders: list of headers for all severity levels
+# SeverityColumnHeaders: list of column_headers for all severity levels
+# ProjectNames: project_names, or project_list[*][0]
+# WarnPatternsSeverity: warn_patterns[*]['severity']
+# WarnPatternsDescription: warn_patterns[*]['description']
+# WarningMessages: warning_messages
+# Warnings: warning_records
+# StatsHeader: warning count table header row
+# StatsRows: array of warning count table rows
+#
+# New dynamic HTML page's dynamic JavaScript data:
+#
+# New dynamic HTML related function to emit data:
+# escape_string, strip_escape_string, emit_warning_arrays
+# emit_js_data():
+
+from __future__ import print_function
+import cgi
+import csv
+import sys
+
+# pylint:disable=relative-beyond-top-level
+# pylint:disable=g-importing-member
+from .severity import Severity
+
+
+html_head_scripts = """\
+ <script type="text/javascript">
+ function expand(id) {
+ var e = document.getElementById(id);
+ var f = document.getElementById(id + "_mark");
+ if (e.style.display == 'block') {
+ e.style.display = 'none';
+ f.innerHTML = '⊕';
+ }
+ else {
+ e.style.display = 'block';
+ f.innerHTML = '⊖';
+ }
+ };
+ function expandCollapse(show) {
+ for (var id = 1; ; id++) {
+ var e = document.getElementById(id + "");
+ var f = document.getElementById(id + "_mark");
+ if (!e || !f) break;
+ e.style.display = (show ? 'block' : 'none');
+ f.innerHTML = (show ? '⊖' : '⊕');
+ }
+ };
+ </script>
+ <style type="text/css">
+ th,td{border-collapse:collapse; border:1px solid black;}
+ .button{color:blue;font-size:110%;font-weight:bolder;}
+ .bt{color:black;background-color:transparent;border:none;outline:none;
+ font-size:140%;font-weight:bolder;}
+ .c0{background-color:#e0e0e0;}
+ .c1{background-color:#d0d0d0;}
+ .t1{border-collapse:collapse; width:100%; border:1px solid black;}
+ </style>
+ <script src="https://www.gstatic.com/charts/loader.js"></script>
+"""
+
+
+def make_writer(output_stream):
+
+ def writer(text):
+ return output_stream.write(text + '\n')
+
+ return writer
+
+
+def html_big(param):
+ return '<font size="+2">' + param + '</font>'
+
+
+def dump_html_prologue(title, writer, warn_patterns, project_names):
+ writer('<html>\n<head>')
+ writer('<title>' + title + '</title>')
+ writer(html_head_scripts)
+ emit_stats_by_project(writer, warn_patterns, project_names)
+ writer('</head>\n<body>')
+ writer(html_big(title))
+ writer('<p>')
+
+
+def dump_html_epilogue(writer):
+ writer('</body>\n</head>\n</html>')
+
+
+def sort_warnings(warn_patterns):
+ for i in warn_patterns:
+ i['members'] = sorted(set(i['members']))
+
+
+def create_warnings(warn_patterns, project_names):
+ """Creates warnings s.t.
+
+ warnings[p][s] is as specified in above docs.
+
+ Args:
+ warn_patterns: list of warning patterns for specified platform
+ project_names: list of project names
+
+ Returns:
+ 2D warnings array where warnings[p][s] is # of warnings in project name p of
+ severity level s
+ """
+ # pylint:disable=g-complex-comprehension
+ warnings = {p: {s.value: 0 for s in Severity.levels} for p in project_names}
+ for i in warn_patterns:
+ s = i['severity'].value
+ for p in i['projects']:
+ warnings[p][s] += i['projects'][p]
+ return warnings
+
+
+def get_total_by_project(warnings, project_names):
+ """Returns dict, project as key and # warnings for that project as value."""
+ # pylint:disable=g-complex-comprehension
+ return {
+ p: sum(warnings[p][s.value] for s in Severity.levels)
+ for p in project_names
+ }
+
+
+def get_total_by_severity(warnings, project_names):
+ """Returns dict, severity as key and # warnings of that severity as value."""
+ # pylint:disable=g-complex-comprehension
+ return {
+ s.value: sum(warnings[p][s.value] for p in project_names)
+ for s in Severity.levels
+ }
+
+
+def emit_table_header(total_by_severity):
+ """Returns list of HTML-formatted content for severity stats."""
+
+ stats_header = ['Project']
+ for s in Severity.levels:
+ if total_by_severity[s.value]:
+ stats_header.append(
+ '<span style=\'background-color:{}\'>{}</span>'.format(
+ s.color, s.column_header))
+ stats_header.append('TOTAL')
+ return stats_header
+
+
+def emit_row_counts_per_project(warnings, total_by_project, total_by_severity,
+ project_names):
+ """Returns total project warnings and row of stats for each project.
+
+ Args:
+ warnings: output of create_warnings(warn_patterns, project_names)
+ total_by_project: output of get_total_by_project(project_names)
+ total_by_severity: output of get_total_by_severity(project_names)
+ project_names: list of project names
+
+ Returns:
+ total_all_projects, the total number of warnings over all projects
+ stats_rows, a 2d list where each row is [Project Name, <severity counts>,
+ total # warnings for this project]
+ """
+
+ total_all_projects = 0
+ stats_rows = []
+ for p in project_names:
+ if total_by_project[p]:
+ one_row = [p]
+ for s in Severity.levels:
+ if total_by_severity[s.value]:
+ one_row.append(warnings[p][s.value])
+ one_row.append(total_by_project[p])
+ stats_rows.append(one_row)
+ total_all_projects += total_by_project[p]
+ return total_all_projects, stats_rows
+
+
+def emit_row_counts_per_severity(total_by_severity, stats_header, stats_rows,
+ total_all_projects, writer):
+ """Emits stats_header and stats_rows as specified above.
+
+ Args:
+ total_by_severity: output of get_total_by_severity()
+ stats_header: output of emit_table_header()
+ stats_rows: output of emit_row_counts_per_project()
+ total_all_projects: output of emit_row_counts_per_project()
+ writer: writer returned by make_writer(output_stream)
+ """
+
+ total_all_severities = 0
+ one_row = ['<b>TOTAL</b>']
+ for s in Severity.levels:
+ if total_by_severity[s.value]:
+ one_row.append(total_by_severity[s.value])
+ total_all_severities += total_by_severity[s.value]
+ one_row.append(total_all_projects)
+ stats_rows.append(one_row)
+ writer('<script>')
+ emit_const_string_array('StatsHeader', stats_header, writer)
+ emit_const_object_array('StatsRows', stats_rows, writer)
+ writer(draw_table_javascript)
+ writer('</script>')
+
+
+def emit_stats_by_project(writer, warn_patterns, project_names):
+ """Dump a google chart table of warnings per project and severity."""
+
+ warnings = create_warnings(warn_patterns, project_names)
+ total_by_project = get_total_by_project(warnings, project_names)
+ total_by_severity = get_total_by_severity(warnings, project_names)
+ stats_header = emit_table_header(total_by_severity)
+ total_all_projects, stats_rows = \
+ emit_row_counts_per_project(warnings, total_by_project, total_by_severity, project_names)
+ emit_row_counts_per_severity(total_by_severity, stats_header, stats_rows,
+ total_all_projects, writer)
+
+
+def dump_stats(writer, warn_patterns):
+ """Dump some stats about total number of warnings and such."""
+
+ known = 0
+ skipped = 0
+ unknown = 0
+ sort_warnings(warn_patterns)
+ for i in warn_patterns:
+ if i['severity'] == Severity.UNMATCHED:
+ unknown += len(i['members'])
+ elif i['severity'] == Severity.SKIP:
+ skipped += len(i['members'])
+ else:
+ known += len(i['members'])
+ writer('Number of classified warnings: <b>' + str(known) + '</b><br>')
+ writer('Number of skipped warnings: <b>' + str(skipped) + '</b><br>')
+ writer('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
+ total = unknown + known + skipped
+ extra_msg = ''
+ if total < 1000:
+ extra_msg = ' (low count may indicate incremental build)'
+ writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+
+
+# New base table of warnings, [severity, warn_id, project, warning_message]
+# Need buttons to show warnings in different grouping options.
+# (1) Current, group by severity, id for each warning pattern
+# sort by severity, warn_id, warning_message
+# (2) Current --byproject, group by severity,
+# id for each warning pattern + project name
+# sort by severity, warn_id, project, warning_message
+# (3) New, group by project + severity,
+# id for each warning pattern
+# sort by project, severity, warn_id, warning_message
+def emit_buttons(writer):
+ writer('<button class="button" onclick="expandCollapse(1);">'
+ 'Expand all warnings</button>\n'
+ '<button class="button" onclick="expandCollapse(0);">'
+ 'Collapse all warnings</button>\n'
+ '<button class="button" onclick="groupBySeverity();">'
+ 'Group warnings by severity</button>\n'
+ '<button class="button" onclick="groupByProject();">'
+ 'Group warnings by project</button><br>')
+
+
+def all_patterns(category):
+ patterns = ''
+ for i in category['patterns']:
+ patterns += i
+ patterns += ' / '
+ return patterns
+
+
+def dump_fixed(writer, warn_patterns):
+ """Show which warnings no longer occur."""
+ anchor = 'fixed_warnings'
+ mark = anchor + '_mark'
+ writer('\n<br><p style="background-color:lightblue"><b>'
+ '<button id="' + mark + '" '
+ 'class="bt" onclick="expand(\'' + anchor + '\');">'
+ '⊕</button> Fixed warnings. '
+ 'No more occurrences. Please consider turning these into '
+ 'errors if possible, before they are reintroduced in to the build'
+ ':</b></p>')
+ writer('<blockquote>')
+ fixed_patterns = []
+ for i in warn_patterns:
+ if not i['members']:
+ fixed_patterns.append(i['description'] + ' (' + all_patterns(i) + ')')
+ fixed_patterns = sorted(fixed_patterns)
+ writer('<div id="' + anchor + '" style="display:none;"><table>')
+ cur_row_class = 0
+ for text in fixed_patterns:
+ cur_row_class = 1 - cur_row_class
+ # remove last '\n'
+ t = text[:-1] if text[-1] == '\n' else text
+ writer('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
+ writer('</table></div>')
+ writer('</blockquote>')
+
+
+def write_severity(csvwriter, sev, kind, warn_patterns):
+ """Count warnings of given severity and write CSV entries to writer."""
+ total = 0
+ for pattern in warn_patterns:
+ if pattern['severity'] == sev and pattern['members']:
+ n = len(pattern['members'])
+ total += n
+ warning = kind + ': ' + (pattern['description'] or '?')
+ csvwriter.writerow([n, '', warning])
+ # print number of warnings for each project, ordered by project name
+ projects = sorted(pattern['projects'].keys())
+ for project in projects:
+ csvwriter.writerow([pattern['projects'][project], project, warning])
+ csvwriter.writerow([total, '', kind + ' warnings'])
+ return total
+
+
+def dump_csv(csvwriter, warn_patterns):
+ """Dump number of warnings in CSV format to writer."""
+ sort_warnings(warn_patterns)
+ total = 0
+ for s in Severity.levels:
+ total += write_severity(csvwriter, s, s.column_header, warn_patterns)
+ csvwriter.writerow([total, '', 'All warnings'])
+
+
+# Return s with escaped backslash and quotation characters.
+def escape_string(s):
+ return s.replace('\\', '\\\\').replace('"', '\\"')
+
+
+# Return s without trailing '\n' and escape the quotation characters.
+def strip_escape_string(s):
+ if not s:
+ return s
+ s = s[:-1] if s[-1] == '\n' else s
+ return escape_string(s)
+
+
+def emit_warning_array(name, writer, warn_patterns):
+ writer('var warning_{} = ['.format(name))
+ for w in warn_patterns:
+ if name == 'severity':
+ writer('{},'.format(w[name].value))
+ else:
+ writer('{},'.format(w[name]))
+ writer('];')
+
+
+def emit_warning_arrays(writer, warn_patterns):
+ emit_warning_array('severity', writer, warn_patterns)
+ writer('var warning_description = [')
+ for w in warn_patterns:
+ if w['members']:
+ writer('"{}",'.format(escape_string(w['description'])))
+ else:
+ writer('"",') # no such warning
+ writer('];')
+
+
+scripts_for_warning_groups = """
+ function compareMessages(x1, x2) { // of the same warning type
+ return (WarningMessages[x1[2]] <= WarningMessages[x2[2]]) ? -1 : 1;
+ }
+ function byMessageCount(x1, x2) {
+ return x2[2] - x1[2]; // reversed order
+ }
+ function bySeverityMessageCount(x1, x2) {
+ // orer by severity first
+ if (x1[1] != x2[1])
+ return x1[1] - x2[1];
+ return byMessageCount(x1, x2);
+ }
+ const ParseLinePattern = /^([^ :]+):(\\d+):(.+)/;
+ function addURL(line) { // used by Android
+ if (FlagURL == "") return line;
+ if (FlagSeparator == "") {
+ return line.replace(ParseLinePattern,
+ "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
+ }
+ return line.replace(ParseLinePattern,
+ "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
+ "$2'>$1:$2</a>:$3");
+ }
+ function addURLToLine(line, link) { // used by Chrome
+ let line_split = line.split(":");
+ let path = line_split.slice(0,3).join(":");
+ let msg = line_split.slice(3).join(":");
+ let html_link = `<a target="_blank" href="${link}">${path}</a>${msg}`;
+ return html_link;
+ }
+ function createArrayOfDictionaries(n) {
+ var result = [];
+ for (var i=0; i<n; i++) result.push({});
+ return result;
+ }
+ function groupWarningsBySeverity() {
+ // groups is an array of dictionaries,
+ // each dictionary maps from warning type to array of warning messages.
+ var groups = createArrayOfDictionaries(SeverityColors.length);
+ for (var i=0; i<Warnings.length; i++) {
+ var w = Warnings[i][0];
+ var s = WarnPatternsSeverity[w];
+ var k = w.toString();
+ if (!(k in groups[s]))
+ groups[s][k] = [];
+ groups[s][k].push(Warnings[i]);
+ }
+ return groups;
+ }
+ function groupWarningsByProject() {
+ var groups = createArrayOfDictionaries(ProjectNames.length);
+ for (var i=0; i<Warnings.length; i++) {
+ var w = Warnings[i][0];
+ var p = Warnings[i][1];
+ var k = w.toString();
+ if (!(k in groups[p]))
+ groups[p][k] = [];
+ groups[p][k].push(Warnings[i]);
+ }
+ return groups;
+ }
+ var GlobalAnchor = 0;
+ function createWarningSection(header, color, group) {
+ var result = "";
+ var groupKeys = [];
+ var totalMessages = 0;
+ for (var k in group) {
+ totalMessages += group[k].length;
+ groupKeys.push([k, WarnPatternsSeverity[parseInt(k)], group[k].length]);
+ }
+ groupKeys.sort(bySeverityMessageCount);
+ for (var idx=0; idx<groupKeys.length; idx++) {
+ var k = groupKeys[idx][0];
+ var messages = group[k];
+ var w = parseInt(k);
+ var wcolor = SeverityColors[WarnPatternsSeverity[w]];
+ var description = WarnPatternsDescription[w];
+ if (description.length == 0)
+ description = "???";
+ GlobalAnchor += 1;
+ result += "<table class='t1'><tr bgcolor='" + wcolor + "'><td>" +
+ "<button class='bt' id='" + GlobalAnchor + "_mark" +
+ "' onclick='expand(\\"" + GlobalAnchor + "\\");'>" +
+ "⊕</button> " +
+ description + " (" + messages.length + ")</td></tr></table>";
+ result += "<div id='" + GlobalAnchor +
+ "' style='display:none;'><table class='t1'>";
+ var c = 0;
+ messages.sort(compareMessages);
+ if (FlagPlatform == "chrome") {
+ for (var i=0; i<messages.length; i++) {
+ result += "<tr><td class='c" + c + "'>" +
+ addURLToLine(WarningMessages[messages[i][2]], WarningLinks[messages[i][3]]) + "</td></tr>";
+ c = 1 - c;
+ }
+ } else {
+ for (var i=0; i<messages.length; i++) {
+ result += "<tr><td class='c" + c + "'>" +
+ addURL(WarningMessages[messages[i][2]]) + "</td></tr>";
+ c = 1 - c;
+ }
+ }
+ result += "</table></div>";
+ }
+ if (result.length > 0) {
+ return "<br><span style='background-color:" + color + "'><b>" +
+ header + ": " + totalMessages +
+ "</b></span><blockquote><table class='t1'>" +
+ result + "</table></blockquote>";
+
+ }
+ return ""; // empty section
+ }
+ function generateSectionsBySeverity() {
+ var result = "";
+ var groups = groupWarningsBySeverity();
+ for (s=0; s<SeverityColors.length; s++) {
+ result += createWarningSection(SeverityHeaders[s], SeverityColors[s],
+ groups[s]);
+ }
+ return result;
+ }
+ function generateSectionsByProject() {
+ var result = "";
+ var groups = groupWarningsByProject();
+ for (i=0; i<groups.length; i++) {
+ result += createWarningSection(ProjectNames[i], 'lightgrey', groups[i]);
+ }
+ return result;
+ }
+ function groupWarnings(generator) {
+ GlobalAnchor = 0;
+ var e = document.getElementById("warning_groups");
+ e.innerHTML = generator();
+ }
+ function groupBySeverity() {
+ groupWarnings(generateSectionsBySeverity);
+ }
+ function groupByProject() {
+ groupWarnings(generateSectionsByProject);
+ }
+"""
+
+
+# Emit a JavaScript const string
+def emit_const_string(name, value, writer):
+ writer('const ' + name + ' = "' + escape_string(value) + '";')
+
+
+# Emit a JavaScript const integer array.
+def emit_const_int_array(name, array, writer):
+ writer('const ' + name + ' = [')
+ for n in array:
+ writer(str(n) + ',')
+ writer('];')
+
+
+# Emit a JavaScript const string array.
+def emit_const_string_array(name, array, writer):
+ writer('const ' + name + ' = [')
+ for s in array:
+ writer('"' + strip_escape_string(s) + '",')
+ writer('];')
+
+
+# Emit a JavaScript const string array for HTML.
+def emit_const_html_string_array(name, array, writer):
+ writer('const ' + name + ' = [')
+ for s in array:
+ # Not using html.escape yet, to work for both python 2 and 3,
+ # until all users switch to python 3.
+ # pylint:disable=deprecated-method
+ writer('"' + cgi.escape(strip_escape_string(s)) + '",')
+ writer('];')
+
+
+# Emit a JavaScript const object array.
+def emit_const_object_array(name, array, writer):
+ writer('const ' + name + ' = [')
+ for x in array:
+ writer(str(x) + ',')
+ writer('];')
+
+
+def emit_js_data(writer, flags, warning_messages, warning_links,
+ warning_records, warn_patterns, project_names):
+ """Dump dynamic HTML page's static JavaScript data."""
+ emit_const_string('FlagPlatform', flags.platform, writer)
+ emit_const_string('FlagURL', flags.url, writer)
+ emit_const_string('FlagSeparator', flags.separator, writer)
+ emit_const_string_array('SeverityColors', [s.color for s in Severity.levels],
+ writer)
+ emit_const_string_array('SeverityHeaders',
+ [s.header for s in Severity.levels], writer)
+ emit_const_string_array('SeverityColumnHeaders',
+ [s.column_header for s in Severity.levels], writer)
+ emit_const_string_array('ProjectNames', project_names, writer)
+ # pytype: disable=attribute-error
+ emit_const_int_array('WarnPatternsSeverity',
+ [w['severity'].value for w in warn_patterns], writer)
+ # pytype: enable=attribute-error
+ emit_const_html_string_array('WarnPatternsDescription',
+ [w['description'] for w in warn_patterns],
+ writer)
+ emit_const_html_string_array('WarningMessages', warning_messages, writer)
+ emit_const_object_array('Warnings', warning_records, writer)
+ if flags.platform == 'chrome':
+ emit_const_html_string_array('WarningLinks', warning_links, writer)
+
+
+draw_table_javascript = """
+google.charts.load('current', {'packages':['table']});
+google.charts.setOnLoadCallback(drawTable);
+function drawTable() {
+ var data = new google.visualization.DataTable();
+ data.addColumn('string', StatsHeader[0]);
+ for (var i=1; i<StatsHeader.length; i++) {
+ data.addColumn('number', StatsHeader[i]);
+ }
+ data.addRows(StatsRows);
+ for (var i=0; i<StatsRows.length; i++) {
+ for (var j=0; j<StatsHeader.length; j++) {
+ data.setProperty(i, j, 'style', 'border:1px solid black;');
+ }
+ }
+ var table = new google.visualization.Table(
+ document.getElementById('stats_table'));
+ table.draw(data, {allowHtml: true, alternatingRowStyle: true});
+}
+"""
+
+
+def dump_html(flags, output_stream, warning_messages, warning_links,
+ warning_records, header_str, warn_patterns, project_names):
+ """Dump the flags output to output_stream."""
+ writer = make_writer(output_stream)
+ dump_html_prologue('Warnings for ' + header_str, writer, warn_patterns,
+ project_names)
+ dump_stats(writer, warn_patterns)
+ writer('<br><div id="stats_table"></div><br>')
+ writer('\n<script>')
+ emit_js_data(writer, flags, warning_messages, warning_links, warning_records,
+ warn_patterns, project_names)
+ writer(scripts_for_warning_groups)
+ writer('</script>')
+ emit_buttons(writer)
+ # Warning messages are grouped by severities or project names.
+ writer('<br><div id="warning_groups"></div>')
+ if flags.byproject:
+ writer('<script>groupByProject();</script>')
+ else:
+ writer('<script>groupBySeverity();</script>')
+ dump_fixed(writer, warn_patterns)
+ dump_html_epilogue(writer)
+
+
+def write_html(flags, project_names, warn_patterns, html_path, warning_messages,
+ warning_links, warning_records, header_str):
+ """Write warnings html file."""
+ if html_path:
+ with open(html_path, 'w') as f:
+ dump_html(flags, f, warning_messages, warning_links, warning_records,
+ header_str, warn_patterns, project_names)
+
+
+def write_out_csv(flags, warn_patterns, warning_messages, warning_links,
+ warning_records, header_str, project_names):
+ """Write warnings csv file."""
+ if flags.csvpath:
+ with open(flags.csvpath, 'w') as f:
+ dump_csv(csv.writer(f, lineterminator='\n'), warn_patterns)
+
+ if flags.gencsv:
+ dump_csv(csv.writer(sys.stdout, lineterminator='\n'), warn_patterns)
+ else:
+ dump_html(flags, sys.stdout, warning_messages, warning_links,
+ warning_records, header_str, warn_patterns, project_names)
diff --git a/tools/warn/java_warn_patterns.py b/tools/warn/java_warn_patterns.py
index 80e2e1d..17e3864 100644
--- a/tools/warn/java_warn_patterns.py
+++ b/tools/warn/java_warn_patterns.py
@@ -16,8 +16,8 @@
"""Warning patterns for Java compiler tools."""
# pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
# pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
from .severity import Severity
@@ -485,16 +485,24 @@
java_medium('Static method should be qualified',
[r'.*\.java:.*: warning: \[static\] static method should be qualified']),
medium('AbstractInner'),
+ medium('BothPackageInfoAndHtml'),
medium('CallbackName'),
medium('ExecutorRegistration'),
+ medium('HiddenTypeParameter'),
medium('JavaApiUsedByMainlineModule'),
medium('ListenerLast'),
+ medium('MinMaxConstant'),
medium('MissingBuildMethod'),
medium('NoByteOrShort'),
medium('OverlappingConstants'),
medium('SetterReturnsThis'),
+ medium('StreamFiles'),
medium('Typo'),
medium('UseIcu'),
+ medium('fallthrough'),
+ medium('overrides'),
+ medium('serial'),
+ medium('try'),
high('AndroidInjectionBeforeSuper',
'AndroidInjection.inject() should always be invoked before calling super.lifecycleMethod()'),
high('AndroidJdkLibsChecker',
@@ -783,6 +791,8 @@
# Other javac tool warnings
java_medium('addNdkApiCoverage failed to getPackage',
[r".*: warning: addNdkApiCoverage failed to getPackage"]),
+ java_medium('bad path element',
+ [r".*: warning: \[path\] bad path element .*\.jar"]),
java_medium('Supported version from annotation processor',
[r".*: warning: Supported source version .+ from annotation processor"]),
]
diff --git a/tools/warn/make_warn_patterns.py b/tools/warn/make_warn_patterns.py
index dd6a1b0..4b20493 100644
--- a/tools/warn/make_warn_patterns.py
+++ b/tools/warn/make_warn_patterns.py
@@ -16,8 +16,8 @@
"""Warning patterns for build make tools."""
# pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
# pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
from .severity import Severity
warn_patterns = [
diff --git a/tools/warn/other_warn_patterns.py b/tools/warn/other_warn_patterns.py
index 1350936..318c3d4 100644
--- a/tools/warn/other_warn_patterns.py
+++ b/tools/warn/other_warn_patterns.py
@@ -16,8 +16,8 @@
"""Warning patterns from other tools."""
# pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
# pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
from .severity import Severity
@@ -42,14 +42,20 @@
return warn('asm', Severity.MEDIUM, description, pattern_list)
-def kotlin(description, pattern_list):
- return warn('Kotlin', Severity.MEDIUM, description, pattern_list)
+def kotlin(description, pattern):
+ return warn('Kotlin', Severity.MEDIUM, description,
+ [r'.*\.kt:.*: warning: ' + pattern])
def yacc(description, pattern_list):
return warn('yacc', Severity.MEDIUM, description, pattern_list)
+def rust(severity, description, pattern):
+ return warn('Rust', severity, description,
+ [r'.*\.rs:.*: warning: ' + pattern])
+
+
warn_patterns = [
# pylint:disable=line-too-long,g-inconsistent-quotes
# aapt warnings
@@ -109,26 +115,31 @@
'description': 'Proto: Import not used',
'patterns': [r".*: warning: Import .*/.*\.proto but not used.$"]},
# Kotlin warnings
- kotlin('never used parameter or variable',
- [r".*\.kt:.*: warning: (parameter|variable) '.*' is never used$",
- r".*\.kt:.*: warning: (parameter|variable) '.*' is never used, could be renamed to _$"]),
- kotlin('initializer is redundant',
- [r".*\.kt:.*: warning: .* initializer is redundant$"]),
+ kotlin('never used parameter or variable', '.+ \'.*\' is never used'),
+ kotlin('multiple labels', '.+ more than one label .+ in this scope'),
+ kotlin('type mismatch', 'type mismatch: '),
+ kotlin('is always true', '.+ is always \'true\''),
+ kotlin('no effect', '.+ annotation has no effect for '),
+ kotlin('no cast needed', 'no cast needed'),
+ kotlin('accessor not generated', 'an accessor will not be generated '),
+ kotlin('initializer is redundant', '.* initializer is redundant$'),
kotlin('elvis operator always returns ...',
- [r".*\.kt:.*: warning: elvis operator \(\?:\) always returns .+"]),
- kotlin('shadowed name',
- [r".*\.kt:.*: warning: name shadowed: .+"]),
- kotlin('unchecked cast',
- [r".*\.kt:.*: warning: unchecked cast: .* to .*$"]),
+ 'elvis operator (?:) always returns .+'),
+ kotlin('shadowed name', 'name shadowed: .+'),
+ kotlin('unchecked cast', 'unchecked cast: .* to .*$'),
+ kotlin('unreachable code', 'unreachable code'),
+ kotlin('unnecessary assertion', 'unnecessary .+ assertion .+'),
kotlin('unnecessary safe call on a non-null receiver',
- [r".*\.kt:.*: warning: unnecessary safe call on a non-null receiver"]),
+ 'unnecessary safe call on a non-null receiver'),
kotlin('Deprecated in Java',
- [r".*\.kt:.*: warning: '.*' is deprecated. Deprecated in Java"]),
+ '\'.*\' is deprecated. Deprecated in Java'),
kotlin('Replacing Handler for Executor',
- [r".*\.kt:.*: warning: .+ Replacing Handler for Executor in "]),
+ '.+ Replacing Handler for Executor in '),
kotlin('library has Kotlin runtime',
- [r".*: warning: library has Kotlin runtime bundled into it",
- r".*: warning: some JAR files .* have the Kotlin Runtime library"]),
+ '.+ has Kotlin runtime (bundled|library)'),
+ warn('Kotlin', Severity.MEDIUM, 'bundled Kotlin runtime',
+ ['.*warning: .+ (has|have the) Kotlin (runtime|Runtime library) bundled']),
+ kotlin('other warnings', '.+'), # catch all other Kotlin warnings
# Yacc warnings
yacc('deprecate directive',
[r".*\.yy?:.*: warning: deprecated directive: "]),
@@ -138,15 +149,20 @@
'description': 'yacc: fix-its can be applied',
'patterns': [r".*\.yy?: warning: fix-its can be applied."]},
# Rust warnings
- {'category': 'Rust', 'severity': Severity.HIGH,
- 'description': 'Rust: Does not derive Copy',
- 'patterns': [r".*: warning: .+ does not derive Copy"]},
- {'category': 'Rust', 'severity': Severity.MEDIUM,
- 'description': 'Rust: Deprecated range pattern',
- 'patterns': [r".*: warning: .+ range patterns are deprecated"]},
- {'category': 'Rust', 'severity': Severity.MEDIUM,
- 'description': 'Rust: Deprecated missing explicit \'dyn\'',
- 'patterns': [r".*: warning: .+ without an explicit `dyn` are deprecated"]},
+ rust(Severity.HIGH, 'Does not derive Copy', '.+ does not derive Copy'),
+ rust(Severity.MEDIUM, '... are deprecated',
+ ('(.+ are deprecated$|' +
+ 'use of deprecated item .* (use .* instead|is now preferred))')),
+ rust(Severity.MEDIUM, 'never used', '.* is never used:'),
+ rust(Severity.MEDIUM, 'unused import', 'unused import: '),
+ rust(Severity.MEDIUM, 'unnecessary attribute',
+ '.+ no longer requires an attribute'),
+ rust(Severity.MEDIUM, 'unnecessary parentheses',
+ 'unnecessary parentheses around'),
+ # Catch all RenderScript warnings
+ {'category': 'RenderScript', 'severity': Severity.LOW,
+ 'description': 'RenderScript warnings',
+ 'patterns': [r'.*\.rscript:.*: warning: ']},
# Broken/partial warning messages will be skipped.
{'category': 'Misc', 'severity': Severity.SKIP,
'description': 'skip, ,',
diff --git a/tools/warn/severity.py b/tools/warn/severity.py
index b1c38e4..b4c03c9 100644
--- a/tools/warn/severity.py
+++ b/tools/warn/severity.py
@@ -20,24 +20,26 @@
# pylint:disable=old-style-class
+class SeverityInfo:
+
+ def __init__(self, value, color, column_header, header):
+ self.value = value
+ self.color = color
+ self.column_header = column_header
+ self.header = header
+
+
+# pylint:disable=old-style-class
class Severity:
"""Class of Severity levels where each level is a SeverityInfo."""
- class SeverityInfo:
-
- def __init__(self, value, color, column_header, header):
- self.value = value
- self.color = color
- self.column_header = column_header
- self.header = header
-
# SEVERITY_UNKNOWN should never occur since every warn_pattern listed has
# a specified severity. It exists for protobuf, the other values must
# map to non-zero values (since 0 is reserved for a default UNKNOWN), but
# logic in clang_tidy_warn.py assumes severity level values are consecutive
# ints starting with 0.
- SEVERITY_UNKNOWN = SeverityInfo(0, 'blueviolet', 'Errors of unknown severity',
- 'Unknown severity (should not occur)')
+ SEVERITY_UNKNOWN = SeverityInfo(0, 'blueviolet', 'Unknown',
+ 'Unknown-severity warnings)')
FIXMENOW = SeverityInfo(1, 'fuschia', 'FixNow',
'Critical warnings, fix me now')
HIGH = SeverityInfo(2, 'red', 'High', 'High severity warnings')
diff --git a/tools/warn/tidy_warn_patterns.py b/tools/warn/tidy_warn_patterns.py
index 2c5ab79..5416cb2 100644
--- a/tools/warn/tidy_warn_patterns.py
+++ b/tools/warn/tidy_warn_patterns.py
@@ -16,8 +16,8 @@
"""Warning patterns for clang-tidy."""
# pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
# pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
from .severity import Severity
@@ -78,6 +78,7 @@
group_tidy_warn_pattern('android'),
simple_tidy_warn_pattern('abseil-string-find-startswith'),
simple_tidy_warn_pattern('bugprone-argument-comment'),
+ simple_tidy_warn_pattern('bugprone-branch-clone'),
simple_tidy_warn_pattern('bugprone-copy-constructor-init'),
simple_tidy_warn_pattern('bugprone-fold-init-type'),
simple_tidy_warn_pattern('bugprone-forward-declaration-namespace'),
@@ -89,6 +90,9 @@
simple_tidy_warn_pattern('bugprone-macro-parentheses'),
simple_tidy_warn_pattern('bugprone-misplaced-widening-cast'),
simple_tidy_warn_pattern('bugprone-move-forwarding-reference'),
+ simple_tidy_warn_pattern('bugprone-parent-virtual-call'),
+ simple_tidy_warn_pattern('bugprone-posix-return'),
+ simple_tidy_warn_pattern('bugprone-sizeof-container'),
simple_tidy_warn_pattern('bugprone-sizeof-expression'),
simple_tidy_warn_pattern('bugprone-string-constructor'),
simple_tidy_warn_pattern('bugprone-string-integer-assignment'),
@@ -96,10 +100,25 @@
simple_tidy_warn_pattern('bugprone-suspicious-missing-comma'),
simple_tidy_warn_pattern('bugprone-suspicious-string-compare'),
simple_tidy_warn_pattern('bugprone-suspicious-semicolon'),
+ simple_tidy_warn_pattern('bugprone-terminating-continue'),
+ simple_tidy_warn_pattern('bugprone-too-small-loop-variable'),
simple_tidy_warn_pattern('bugprone-undefined-memory-manipulation'),
+ simple_tidy_warn_pattern('bugprone-unhandled-self-assignment'),
simple_tidy_warn_pattern('bugprone-unused-raii'),
+ simple_tidy_warn_pattern('bugprone-unused-return-value'),
simple_tidy_warn_pattern('bugprone-use-after-move'),
group_tidy_warn_pattern('bugprone'),
+ simple_tidy_warn_pattern('cert-dcl16-c'),
+ simple_tidy_warn_pattern('cert-dcl21-cpp'),
+ simple_tidy_warn_pattern('cert-dcl50-cpp'),
+ simple_tidy_warn_pattern('cert-dcl54-cpp'),
+ simple_tidy_warn_pattern('cert-dcl59-cpp'),
+ simple_tidy_warn_pattern('cert-env33-c'),
+ simple_tidy_warn_pattern('cert-err34-c'),
+ simple_tidy_warn_pattern('cert-err52-cpp'),
+ simple_tidy_warn_pattern('cert-msc30-c'),
+ simple_tidy_warn_pattern('cert-msc50-cpp'),
+ simple_tidy_warn_pattern('cert-oop54-cpp'),
group_tidy_warn_pattern('cert'),
group_tidy_warn_pattern('clang-diagnostic'),
group_tidy_warn_pattern('cppcoreguidelines'),
diff --git a/tools/warn/warn.py b/tools/warn/warn.py
index bdfd489..56e8787 100755
--- a/tools/warn/warn.py
+++ b/tools/warn/warn.py
@@ -17,21 +17,51 @@
"""Simple wrapper to run warn_common with Python standard Pool."""
import multiprocessing
+import signal
+import sys
# pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
-from .warn_common import common_main
+from . import warn_common as common
-# This parallel_process could be changed depending on platform
-# and availability of multi-process library functions.
-def parallel_process(num_cpu, classify_warnings, groups):
+def classify_warnings(args):
+ """Classify a list of warning lines.
+
+ Args:
+ args: dictionary {
+ 'group': list of (warning, link),
+ 'project_patterns': re.compile(project_list[p][1]),
+ 'warn_patterns': list of warn_pattern,
+ 'num_processes': number of processes being used for multiprocessing }
+ Returns:
+ results: a list of the classified warnings.
+ """
+ results = []
+ for line, link in args['group']:
+ common.classify_one_warning(line, link, results, args['project_patterns'],
+ args['warn_patterns'])
+
+ # After the main work, ignore all other signals to a child process,
+ # to avoid bad warning/error messages from the exit clean-up process.
+ if args['num_processes'] > 1:
+ signal.signal(signal.SIGTERM, lambda *args: sys.exit(-signal.SIGTERM))
+ return results
+
+
+def create_and_launch_subprocesses(num_cpu, classify_warnings_fn, arg_groups,
+ group_results):
pool = multiprocessing.Pool(num_cpu)
- return pool.map(classify_warnings, groups)
+ for cpu in range(num_cpu):
+ proc_result = pool.map(classify_warnings_fn, arg_groups[cpu])
+ if proc_result is not None:
+ group_results.append(proc_result)
+ return group_results
def main():
- common_main(parallel_process)
+ use_google3 = False
+ common.common_main(use_google3, create_and_launch_subprocesses,
+ classify_warnings)
if __name__ == '__main__':
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 0c9d9ef..68ed995 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -18,6 +18,8 @@
Default is to output warnings in HTML tables grouped by warning severity.
Use option --byproject to output tables grouped by source file projects.
Use option --gencsv to output warning counts in CSV format.
+
+Default input file is build.log, which can be changed with the --log flag.
"""
# List of important data structures and functions in this script.
@@ -36,431 +38,203 @@
# project_patterns[p] re.compile(project_list[p][1])
# project_names[p] project_list[p][0]
# warning_messages array of each warning message, without source url
+# warning_links array of each warning code search link; for 'chrome'
# warning_records array of [idx to warn_patterns,
# idx to project_names,
-# idx to warning_messages]
-# android_root
-# platform_version
-# target_product
-# target_variant
+# idx to warning_messages,
+# idx to warning_links]
# parse_input_file
#
-# To emit html page of warning messages:
-# flags: --byproject, --url, --separator
-# Old stuff for static html components:
-# html_script_style: static html scripts and styles
-# htmlbig:
-# dump_stats, dump_html_prologue, dump_html_epilogue:
-# emit_buttons:
-# dump_fixed
-# sort_warnings:
-# emit_stats_by_project:
-# all_patterns,
-# findproject, classify_warning
-# dump_html
-#
-# New dynamic HTML page's static JavaScript data:
-# Some data are copied from Python to JavaScript, to generate HTML elements.
-# FlagURL args.url
-# FlagSeparator args.separator
-# SeverityColors: list of colors for all severity levels
-# SeverityHeaders: list of headers for all severity levels
-# SeverityColumnHeaders: list of column_headers for all severity levels
-# ProjectNames: project_names, or project_list[*][0]
-# WarnPatternsSeverity: warn_patterns[*]['severity']
-# WarnPatternsDescription: warn_patterns[*]['description']
-# WarningMessages: warning_messages
-# Warnings: warning_records
-# StatsHeader: warning count table header row
-# StatsRows: array of warning count table rows
-#
-# New dynamic HTML page's dynamic JavaScript data:
-#
-# New dynamic HTML related function to emit data:
-# escape_string, strip_escape_string, emit_warning_arrays
-# emit_js_data():
-
-from __future__ import print_function
import argparse
-import cgi
-import csv
import io
import multiprocessing
import os
import re
-import signal
import sys
# pylint:disable=relative-beyond-top-level
-from . import cpp_warn_patterns
-from . import java_warn_patterns
-from . import make_warn_patterns
-from . import other_warn_patterns
-from . import tidy_warn_patterns
# pylint:disable=g-importing-member
-from .android_project_list import project_list
-from .severity import Severity
-
-parser = argparse.ArgumentParser(description='Convert a build log into HTML')
-parser.add_argument('--csvpath',
- help='Save CSV warning file to the passed absolute path',
- default=None)
-parser.add_argument('--gencsv',
- help='Generate a CSV file with number of various warnings',
- action='store_true',
- default=False)
-parser.add_argument('--byproject',
- help='Separate warnings in HTML output by project names',
- action='store_true',
- default=False)
-parser.add_argument('--url',
- help='Root URL of an Android source code tree prefixed '
- 'before files in warnings')
-parser.add_argument('--separator',
- help='Separator between the end of a URL and the line '
- 'number argument. e.g. #')
-parser.add_argument('--processes',
- type=int,
- default=multiprocessing.cpu_count(),
- help='Number of parallel processes to process warnings')
-parser.add_argument(dest='buildlog', metavar='build.log',
- help='Path to build.log file')
-args = parser.parse_args()
-
-warn_patterns = make_warn_patterns.warn_patterns
-warn_patterns.extend(cpp_warn_patterns.warn_patterns)
-warn_patterns.extend(java_warn_patterns.warn_patterns)
-warn_patterns.extend(tidy_warn_patterns.warn_patterns)
-warn_patterns.extend(other_warn_patterns.warn_patterns)
-
-project_patterns = []
-project_names = []
-warning_messages = []
-warning_records = []
+from . import android_project_list
+from . import chrome_project_list
+from . import cpp_warn_patterns as cpp_patterns
+from . import html_writer
+from . import java_warn_patterns as java_patterns
+from . import make_warn_patterns as make_patterns
+from . import other_warn_patterns as other_patterns
+from . import tidy_warn_patterns as tidy_patterns
-def initialize_arrays():
- """Complete global arrays before they are used."""
- global project_names, project_patterns
- project_names = [p[0] for p in project_list]
- project_patterns = [re.compile(p[1]) for p in project_list]
- for w in warn_patterns:
- w['members'] = []
- # Each warning pattern has a 'projects' dictionary, that
- # maps a project name to number of warnings in that project.
- w['projects'] = {}
+def parse_args(use_google3):
+ """Define and parse the args. Return the parse_args() result."""
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument('--capacitor_path', default='',
+ help='Save capacitor warning file to the passed absolute'
+ ' path')
+ # csvpath has a different naming than the above path because historically the
+ # original Android script used csvpath, so other scripts rely on it
+ parser.add_argument('--csvpath', default='',
+ help='Save CSV warning file to the passed path')
+ parser.add_argument('--gencsv', action='store_true',
+ help='Generate CSV file with number of various warnings')
+ parser.add_argument('--byproject', action='store_true',
+ help='Separate warnings in HTML output by project names')
+ parser.add_argument('--url', default='',
+ help='Root URL of an Android source code tree prefixed '
+ 'before files in warnings')
+ parser.add_argument('--separator', default='?l=',
+ help='Separator between the end of a URL and the line '
+ 'number argument. e.g. #')
+ parser.add_argument('--processes', default=multiprocessing.cpu_count(),
+ type=int,
+ help='Number of parallel processes to process warnings')
+ # Old Android build scripts call warn.py without --platform,
+ # so the default platform is set to 'android'.
+ parser.add_argument('--platform', default='android',
+ choices=['chrome', 'android'],
+ help='Platform of the build log')
+ # Old Android build scripts call warn.py with only a build.log file path.
+ parser.add_argument('--log', help='Path to build log file')
+ parser.add_argument(dest='buildlog', metavar='build.log',
+ default='build.log', nargs='?',
+ help='Path to build.log file')
+ flags = parser.parse_args()
+ if not flags.log:
+ flags.log = flags.buildlog
+ if not use_google3 and not os.path.exists(flags.log):
+ sys.exit('Cannot find log file: ' + flags.log)
+ return flags
-initialize_arrays()
+def get_project_names(project_list):
+ """Get project_names from project_list."""
+ return [p[0] for p in project_list]
-android_root = ''
-platform_version = 'unknown'
-target_product = 'unknown'
-target_variant = 'unknown'
-
-
-##### Data and functions to dump html file. ##################################
-
-html_head_scripts = """\
- <script type="text/javascript">
- function expand(id) {
- var e = document.getElementById(id);
- var f = document.getElementById(id + "_mark");
- if (e.style.display == 'block') {
- e.style.display = 'none';
- f.innerHTML = '⊕';
- }
- else {
- e.style.display = 'block';
- f.innerHTML = '⊖';
- }
- };
- function expandCollapse(show) {
- for (var id = 1; ; id++) {
- var e = document.getElementById(id + "");
- var f = document.getElementById(id + "_mark");
- if (!e || !f) break;
- e.style.display = (show ? 'block' : 'none');
- f.innerHTML = (show ? '⊖' : '⊕');
- }
- };
- </script>
- <style type="text/css">
- th,td{border-collapse:collapse; border:1px solid black;}
- .button{color:blue;font-size:110%;font-weight:bolder;}
- .bt{color:black;background-color:transparent;border:none;outline:none;
- font-size:140%;font-weight:bolder;}
- .c0{background-color:#e0e0e0;}
- .c1{background-color:#d0d0d0;}
- .t1{border-collapse:collapse; width:100%; border:1px solid black;}
- </style>
- <script src="https://www.gstatic.com/charts/loader.js"></script>
-"""
-
-
-def make_writer(output_stream):
-
- def writer(text):
- return output_stream.write(text + '\n')
-
- return writer
-
-
-def html_big(param):
- return '<font size="+2">' + param + '</font>'
-
-
-def dump_html_prologue(title, writer):
- writer('<html>\n<head>')
- writer('<title>' + title + '</title>')
- writer(html_head_scripts)
- emit_stats_by_project(writer)
- writer('</head>\n<body>')
- writer(html_big(title))
- writer('<p>')
-
-
-def dump_html_epilogue(writer):
- writer('</body>\n</head>\n</html>')
-
-
-def sort_warnings():
- for i in warn_patterns:
- i['members'] = sorted(set(i['members']))
-
-
-def emit_stats_by_project(writer):
- """Dump a google chart table of warnings per project and severity."""
- # warnings[p][s] is number of warnings in project p of severity s.
- # pylint:disable=g-complex-comprehension
- warnings = {p: {s.value: 0 for s in Severity.levels} for p in project_names}
- for i in warn_patterns:
- # pytype: disable=attribute-error
- s = i['severity'].value
- # pytype: enable=attribute-error
- for p in i['projects']:
- warnings[p][s] += i['projects'][p]
-
- # total_by_project[p] is number of warnings in project p.
- total_by_project = {
- p: sum(warnings[p][s.value] for s in Severity.levels)
- for p in project_names
- }
-
- # total_by_severity[s] is number of warnings of severity s.
- total_by_severity = {
- s.value: sum(warnings[p][s.value] for p in project_names)
- for s in Severity.levels
- }
-
- # emit table header
- stats_header = ['Project']
- for s in Severity.levels:
- if total_by_severity[s.value]:
- stats_header.append(
- '<span style=\'background-color:{}\'>{}</span>'.format(
- s.color, s.column_header))
- stats_header.append('TOTAL')
-
- # emit a row of warning counts per project, skip no-warning projects
- total_all_projects = 0
- stats_rows = []
- for p in project_names:
- if total_by_project[p]:
- one_row = [p]
- for s in Severity.levels:
- if total_by_severity[s.value]:
- one_row.append(warnings[p][s.value])
- one_row.append(total_by_project[p])
- stats_rows.append(one_row)
- total_all_projects += total_by_project[p]
-
- # emit a row of warning counts per severity
- total_all_severities = 0
- one_row = ['<b>TOTAL</b>']
- for s in Severity.levels:
- if total_by_severity[s.value]:
- one_row.append(total_by_severity[s.value])
- total_all_severities += total_by_severity[s.value]
- one_row.append(total_all_projects)
- stats_rows.append(one_row)
- writer('<script>')
- emit_const_string_array('StatsHeader', stats_header, writer)
- emit_const_object_array('StatsRows', stats_rows, writer)
- writer(draw_table_javascript)
- writer('</script>')
-
-
-def dump_stats(writer):
- """Dump some stats about total number of warnings and such."""
- known = 0
- skipped = 0
- unknown = 0
- sort_warnings()
- for i in warn_patterns:
- if i['severity'] == Severity.UNMATCHED:
- unknown += len(i['members'])
- elif i['severity'] == Severity.SKIP:
- skipped += len(i['members'])
- else:
- known += len(i['members'])
- writer('Number of classified warnings: <b>' + str(known) + '</b><br>')
- writer('Number of skipped warnings: <b>' + str(skipped) + '</b><br>')
- writer('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
- total = unknown + known + skipped
- extra_msg = ''
- if total < 1000:
- extra_msg = ' (low count may indicate incremental build)'
- writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
-
-
-# New base table of warnings, [severity, warn_id, project, warning_message]
-# Need buttons to show warnings in different grouping options.
-# (1) Current, group by severity, id for each warning pattern
-# sort by severity, warn_id, warning_message
-# (2) Current --byproject, group by severity,
-# id for each warning pattern + project name
-# sort by severity, warn_id, project, warning_message
-# (3) New, group by project + severity,
-# id for each warning pattern
-# sort by project, severity, warn_id, warning_message
-def emit_buttons(writer):
- writer('<button class="button" onclick="expandCollapse(1);">'
- 'Expand all warnings</button>\n'
- '<button class="button" onclick="expandCollapse(0);">'
- 'Collapse all warnings</button>\n'
- '<button class="button" onclick="groupBySeverity();">'
- 'Group warnings by severity</button>\n'
- '<button class="button" onclick="groupByProject();">'
- 'Group warnings by project</button><br>')
-
-
-def all_patterns(category):
- patterns = ''
- for i in category['patterns']:
- patterns += i
- patterns += ' / '
- return patterns
-
-
-def dump_fixed(writer):
- """Show which warnings no longer occur."""
- anchor = 'fixed_warnings'
- mark = anchor + '_mark'
- writer('\n<br><p style="background-color:lightblue"><b>'
- '<button id="' + mark + '" '
- 'class="bt" onclick="expand(\'' + anchor + '\');">'
- '⊕</button> Fixed warnings. '
- 'No more occurrences. Please consider turning these into '
- 'errors if possible, before they are reintroduced in to the build'
- ':</b></p>')
- writer('<blockquote>')
- fixed_patterns = []
- for i in warn_patterns:
- if not i['members']:
- fixed_patterns.append(i['description'] + ' (' + all_patterns(i) + ')')
- fixed_patterns = sorted(fixed_patterns)
- writer('<div id="' + anchor + '" style="display:none;"><table>')
- cur_row_class = 0
- for text in fixed_patterns:
- cur_row_class = 1 - cur_row_class
- # remove last '\n'
- t = text[:-1] if text[-1] == '\n' else text
- writer('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
- writer('</table></div>')
- writer('</blockquote>')
-
-
-def find_project_index(line):
- for p in range(len(project_patterns)):
- if project_patterns[p].match(line):
- return p
+def find_project_index(line, project_patterns):
+ for i, p in enumerate(project_patterns):
+ if p.match(line):
+ return i
return -1
-def classify_one_warning(line, results):
+def classify_one_warning(warning, link, results, project_patterns,
+ warn_patterns):
"""Classify one warning line."""
- for i in range(len(warn_patterns)):
- w = warn_patterns[i]
+ for i, w in enumerate(warn_patterns):
for cpat in w['compiled_patterns']:
- # pytype: disable=attribute-error
- if cpat.match(line):
- p = find_project_index(line)
- results.append([line, i, p])
+ if cpat.match(warning):
+ p = find_project_index(warning, project_patterns)
+ results.append([warning, link, i, p])
return
else:
# If we end up here, there was a problem parsing the log
# probably caused by 'make -j' mixing the output from
# 2 or more concurrent compiles
pass
- # pytype: enable=attribute-error
-def classify_warnings(lines):
- results = []
- for line in lines:
- classify_one_warning(line, results)
- # After the main work, ignore all other signals to a child process,
- # to avoid bad warning/error messages from the exit clean-up process.
- if args.processes > 1:
- signal.signal(signal.SIGTERM, lambda *args: sys.exit(-signal.SIGTERM))
- return results
+def remove_prefix(s, sub):
+ """Remove everything before last occurrence of substring sub in string s."""
+ if sub in s:
+ inc_sub = s.rfind(sub)
+ return s[inc_sub:]
+ return s
-def parallel_classify_warnings(warning_lines, parallel_process):
- """Classify all warning lines with num_cpu parallel processes."""
- num_cpu = args.processes
- if num_cpu > 1:
- groups = [[] for x in range(num_cpu)]
- i = 0
- for x in warning_lines:
- groups[i].append(x)
- i = (i + 1) % num_cpu
- group_results = parallel_process(num_cpu, classify_warnings, groups)
- else:
- group_results = [classify_warnings(warning_lines)]
+# TODO(emmavukelj): Don't have any generate_*_cs_link functions call
+# normalize_path a second time (the first time being in parse_input_file)
+def generate_cs_link(warning_line, flags, android_root=None):
+ if flags.platform == 'chrome':
+ return generate_chrome_cs_link(warning_line, flags)
+ if flags.platform == 'android':
+ return generate_android_cs_link(warning_line, flags, android_root)
+ return 'https://cs.corp.google.com/'
- for result in group_results:
- for line, pattern_idx, project_idx in result:
- pattern = warn_patterns[pattern_idx]
- pattern['members'].append(line)
- message_idx = len(warning_messages)
- warning_messages.append(line)
- warning_records.append([pattern_idx, project_idx, message_idx])
- pname = '???' if project_idx < 0 else project_names[project_idx]
- # Count warnings by project.
- if pname in pattern['projects']:
- pattern['projects'][pname] += 1
- else:
- pattern['projects'][pname] = 1
+
+def generate_android_cs_link(warning_line, flags, android_root):
+ """Generate the code search link for a warning line in Android."""
+ # max_splits=2 -> only 3 items
+ raw_path, line_number_str, _ = warning_line.split(':', 2)
+ normalized_path = normalize_path(raw_path, flags, android_root)
+ if not flags.url:
+ return normalized_path
+ link_path = flags.url + '/' + normalized_path
+ if line_number_str.isdigit():
+ link_path += flags.separator + line_number_str
+ return link_path
+
+
+def generate_chrome_cs_link(warning_line, flags):
+ """Generate the code search link for a warning line in Chrome."""
+ split_line = warning_line.split(':')
+ raw_path = split_line[0]
+ normalized_path = normalize_path(raw_path, flags)
+ link_base = 'https://cs.chromium.org/'
+ link_add = 'chromium'
+ link_path = None
+
+ # Basically just going through a few specific directory cases and specifying
+ # the proper behavior for that case. This list of cases was accumulated
+ # through trial and error manually going through the warnings.
+ #
+ # This code pattern of using case-specific "if"s instead of "elif"s looks
+ # possibly accidental and mistaken but it is intentional because some paths
+ # fall under several cases (e.g. third_party/lib/nghttp2_frame.c) and for
+ # those we want the most specific case to be applied. If there is reliable
+ # knowledge of exactly where these occur, this could be changed to "elif"s
+ # but there is no reliable set of paths falling under multiple cases at the
+ # moment.
+ if '/src/third_party' in raw_path:
+ link_path = remove_prefix(raw_path, '/src/third_party/')
+ if '/chrome_root/src_internal/' in raw_path:
+ link_path = remove_prefix(raw_path, '/chrome_root/src_internal/')
+ link_path = link_path[len('/chrome_root'):] # remove chrome_root
+ if '/chrome_root/src/' in raw_path:
+ link_path = remove_prefix(raw_path, '/chrome_root/src/')
+ link_path = link_path[len('/chrome_root'):] # remove chrome_root
+ if '/libassistant/' in raw_path:
+ link_add = 'eureka_internal/chromium/src'
+ link_base = 'https://cs.corp.google.com/' # internal data
+ link_path = remove_prefix(normalized_path, '/libassistant/')
+ if raw_path.startswith('gen/'):
+ link_path = '/src/out/Debug/gen/' + normalized_path
+ if '/gen/' in raw_path:
+ return '%s?q=file:%s' % (link_base, remove_prefix(normalized_path, '/gen/'))
+
+ if not link_path and (raw_path.startswith('src/') or
+ raw_path.startswith('src_internal/')):
+ link_path = '/%s' % raw_path
+
+ if not link_path: # can't find specific link, send a query
+ return '%s?q=file:%s' % (link_base, normalized_path)
+
+ line_number = int(split_line[1])
+ link = '%s%s%s?l=%d' % (link_base, link_add, link_path, line_number)
+ return link
def find_warn_py_and_android_root(path):
- """Set and return android_root path if it is found."""
- global android_root
+ """Return android source root path if warn.py is found."""
parts = path.split('/')
for idx in reversed(range(2, len(parts))):
root_path = '/'.join(parts[:idx])
# Android root directory should contain this script.
if os.path.exists(root_path + '/build/make/tools/warn.py'):
- android_root = root_path
- return True
- return False
+ return root_path
+ return ''
-def find_android_root():
- """Guess android_root from common prefix of file paths."""
+def find_android_root(buildlog):
+ """Guess android source root from common prefix of file paths."""
# Use the longest common prefix of the absolute file paths
# of the first 10000 warning messages as the android_root.
- global android_root
- warning_lines = set()
+ warning_lines = []
warning_pattern = re.compile('^/[^ ]*/[^ ]*: warning: .*')
count = 0
- infile = io.open(args.buildlog, mode='r', encoding='utf-8')
- for line in infile:
+ for line in buildlog:
if warning_pattern.match(line):
- warning_lines.add(line)
+ warning_lines.append(line)
count += 1
if count > 9999:
break
@@ -468,56 +242,110 @@
# the source tree root.
if count < 100:
path = os.path.normpath(re.sub(':.*$', '', line))
- if find_warn_py_and_android_root(path):
- return
+ android_root = find_warn_py_and_android_root(path)
+ if android_root:
+ return android_root
# Do not use common prefix of a small number of paths.
if count > 10:
# pytype: disable=wrong-arg-types
root_path = os.path.commonprefix(warning_lines)
# pytype: enable=wrong-arg-types
if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
- android_root = root_path[:-1]
+ return root_path[:-1]
+ return ''
-def remove_android_root_prefix(path):
+def remove_android_root_prefix(path, android_root):
"""Remove android_root prefix from path if it is found."""
if path.startswith(android_root):
return path[1 + len(android_root):]
+ return path
+
+
+def normalize_path(path, flags, android_root=None):
+ """Normalize file path relative to src/ or src-internal/ directory."""
+ path = os.path.normpath(path)
+
+ if flags.platform == 'android':
+ if android_root:
+ return remove_android_root_prefix(path, android_root)
+ return path
+
+ # Remove known prefix of root path and normalize the suffix.
+ idx = path.find('chrome_root/')
+ if idx >= 0:
+ # remove chrome_root/, we want path relative to that
+ return path[idx + len('chrome_root/'):]
else:
return path
-def normalize_path(path):
- """Normalize file path relative to android_root."""
- # If path is not an absolute path, just normalize it.
- path = os.path.normpath(path)
- # Remove known prefix of root path and normalize the suffix.
- if path[0] == '/' and android_root:
- return remove_android_root_prefix(path)
- return path
-
-
-def normalize_warning_line(line):
- """Normalize file path relative to android_root in a warning line."""
- # replace fancy quotes with plain ol' quotes
+def normalize_warning_line(line, flags, android_root=None):
+ """Normalize file path relative to src directory in a warning line."""
line = re.sub(u'[\u2018\u2019]', '\'', line)
# replace non-ASCII chars to spaces
line = re.sub(u'[^\x00-\x7f]', ' ', line)
line = line.strip()
first_column = line.find(':')
- if first_column > 0:
- return normalize_path(line[:first_column]) + line[first_column:]
- else:
- return line
+ return normalize_path(line[:first_column], flags,
+ android_root) + line[first_column:]
-def parse_input_file(infile):
- """Parse input file, collect parameters and warning lines."""
- global android_root
- global platform_version
- global target_product
- global target_variant
- line_counter = 0
+def parse_input_file_chrome(infile, flags):
+ """Parse Chrome input file, collect parameters and warning lines."""
+ platform_version = 'unknown'
+ board_name = 'unknown'
+ architecture = 'unknown'
+
+ # only handle warning lines of format 'file_path:line_no:col_no: warning: ...'
+ chrome_warning_pattern = r'^[^ ]*/[^ ]*:[0-9]+:[0-9]+: warning: .*'
+
+ warning_pattern = re.compile(chrome_warning_pattern)
+
+ # Collect all unique warning lines
+ # Remove the duplicated warnings save ~8% of time when parsing
+ # one typical build log than before
+ unique_warnings = dict()
+ for line in infile:
+ if warning_pattern.match(line):
+ normalized_line = normalize_warning_line(line, flags)
+ if normalized_line not in unique_warnings:
+ unique_warnings[normalized_line] = generate_cs_link(line, flags)
+ elif (platform_version == 'unknown' or board_name == 'unknown' or
+ architecture == 'unknown'):
+ m = re.match(r'.+Package:.+chromeos-base/chromeos-chrome-', line)
+ if m is not None:
+ platform_version = 'R' + line.split('chrome-')[1].split('_')[0]
+ continue
+ m = re.match(r'.+Source\sunpacked\sin\s(.+)', line)
+ if m is not None:
+ board_name = m.group(1).split('/')[2]
+ continue
+ m = re.match(r'.+USE:\s*([^\s]*).*', line)
+ if m is not None:
+ architecture = m.group(1)
+ continue
+
+ header_str = '%s - %s - %s' % (platform_version, board_name, architecture)
+ return unique_warnings, header_str
+
+
+def add_normalized_line_to_warnings(line, flags, android_root, unique_warnings):
+ """Parse/normalize path, updating warning line and add to warnings dict."""
+ normalized_line = normalize_warning_line(line, flags, android_root)
+ if normalized_line not in unique_warnings:
+ unique_warnings[normalized_line] = generate_cs_link(line, flags,
+ android_root)
+ return unique_warnings
+
+
+def parse_input_file_android(infile, flags):
+ """Parse Android input file, collect parameters and warning lines."""
+ platform_version = 'unknown'
+ target_product = 'unknown'
+ target_variant = 'unknown'
+ android_root = find_android_root(infile)
+ infile.seek(0)
# rustc warning messages have two lines that should be combined:
# warning: description
@@ -532,20 +360,25 @@
warning_without_file = re.compile('^warning: .*')
rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
- # Collect all warnings into the warning_lines set.
- warning_lines = set()
+ # Collect all unique warning lines
+ # Remove the duplicated warnings save ~8% of time when parsing
+ # one typical build log than before
+ unique_warnings = dict()
+ line_counter = 0
prev_warning = ''
for line in infile:
if prev_warning:
if rustc_file_position.match(line):
# must be a rustc warning, combine 2 lines into one warning
line = line.strip().replace('--> ', '') + ': ' + prev_warning
- warning_lines.add(normalize_warning_line(line))
+ unique_warnings = add_normalized_line_to_warnings(
+ line, flags, android_root, unique_warnings)
prev_warning = ''
continue
# add prev_warning, and then process the current line
prev_warning = 'unknown_source_file: ' + prev_warning
- warning_lines.add(normalize_warning_line(prev_warning))
+ unique_warnings = add_normalized_line_to_warnings(
+ prev_warning, flags, android_root, unique_warnings)
prev_warning = ''
if warning_pattern.match(line):
@@ -553,7 +386,8 @@
# save this line and combine it with the next line
prev_warning = line
else:
- warning_lines.add(normalize_warning_line(line))
+ unique_warnings = add_normalized_line_to_warnings(
+ line, flags, android_root, unique_warnings)
continue
if line_counter < 100:
@@ -568,336 +402,182 @@
m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
if m is not None:
target_variant = m.group(0)
- m = re.search('.* TOP=([^ ]*) .*', line)
+ m = re.search('(?<=^TOP=).*', line)
if m is not None:
android_root = m.group(1)
- return warning_lines
+
+ if android_root:
+ new_unique_warnings = dict()
+ for warning_line in unique_warnings:
+ normalized_line = normalize_warning_line(warning_line, flags,
+ android_root)
+ new_unique_warnings[normalized_line] = generate_android_cs_link(
+ warning_line, flags, android_root)
+ unique_warnings = new_unique_warnings
+
+ header_str = '%s - %s - %s' % (platform_version, target_product,
+ target_variant)
+ return unique_warnings, header_str
-# Return s with escaped backslash and quotation characters.
-def escape_string(s):
- # pytype: disable=attribute-error
- return s.replace('\\', '\\\\').replace('"', '\\"')
- # pytype: enable=attribute-error
+def parse_input_file(infile, flags):
+ if flags.platform == 'chrome':
+ return parse_input_file_chrome(infile, flags)
+ if flags.platform == 'android':
+ return parse_input_file_android(infile, flags)
+ raise RuntimeError('parse_input_file not defined for platform %s' %
+ flags.platform)
-# Return s without trailing '\n' and escape the quotation characters.
-def strip_escape_string(s):
- if not s:
- return s
- s = s[:-1] if s[-1] == '\n' else s
- return escape_string(s)
+def parse_compiler_output(compiler_output):
+ """Parse compiler output for relevant info."""
+ split_output = compiler_output.split(':', 3) # 3 = max splits
+ file_path = split_output[0]
+ line_number = int(split_output[1])
+ col_number = int(split_output[2].split(' ')[0])
+ warning_message = split_output[3]
+ return file_path, line_number, col_number, warning_message
-def emit_warning_array(name, writer):
- writer('var warning_{} = ['.format(name))
- for i in range(len(warn_patterns)):
- writer('{},'.format(warn_patterns[i][name]))
- writer('];')
-
-
-def emit_warning_arrays(writer):
- emit_warning_array('severity', writer)
- writer('var warning_description = [')
- for i in range(len(warn_patterns)):
- if warn_patterns[i]['members']:
- writer('"{}",'.format(escape_string(warn_patterns[i]['description'])))
- else:
- writer('"",') # no such warning
- writer('];')
-
-
-scripts_for_warning_groups = """
- function compareMessages(x1, x2) { // of the same warning type
- return (WarningMessages[x1[2]] <= WarningMessages[x2[2]]) ? -1 : 1;
- }
- function byMessageCount(x1, x2) {
- return x2[2] - x1[2]; // reversed order
- }
- function bySeverityMessageCount(x1, x2) {
- // orer by severity first
- if (x1[1] != x2[1])
- return x1[1] - x2[1];
- return byMessageCount(x1, x2);
- }
- const ParseLinePattern = /^([^ :]+):(\\d+):(.+)/;
- function addURL(line) {
- if (FlagURL == "") return line;
- if (FlagSeparator == "") {
- return line.replace(ParseLinePattern,
- "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
- }
- return line.replace(ParseLinePattern,
- "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
- "$2'>$1:$2</a>:$3");
- }
- function createArrayOfDictionaries(n) {
- var result = [];
- for (var i=0; i<n; i++) result.push({});
- return result;
- }
- function groupWarningsBySeverity() {
- // groups is an array of dictionaries,
- // each dictionary maps from warning type to array of warning messages.
- var groups = createArrayOfDictionaries(SeverityColors.length);
- for (var i=0; i<Warnings.length; i++) {
- var w = Warnings[i][0];
- var s = WarnPatternsSeverity[w];
- var k = w.toString();
- if (!(k in groups[s]))
- groups[s][k] = [];
- groups[s][k].push(Warnings[i]);
- }
- return groups;
- }
- function groupWarningsByProject() {
- var groups = createArrayOfDictionaries(ProjectNames.length);
- for (var i=0; i<Warnings.length; i++) {
- var w = Warnings[i][0];
- var p = Warnings[i][1];
- var k = w.toString();
- if (!(k in groups[p]))
- groups[p][k] = [];
- groups[p][k].push(Warnings[i]);
- }
- return groups;
- }
- var GlobalAnchor = 0;
- function createWarningSection(header, color, group) {
- var result = "";
- var groupKeys = [];
- var totalMessages = 0;
- for (var k in group) {
- totalMessages += group[k].length;
- groupKeys.push([k, WarnPatternsSeverity[parseInt(k)], group[k].length]);
- }
- groupKeys.sort(bySeverityMessageCount);
- for (var idx=0; idx<groupKeys.length; idx++) {
- var k = groupKeys[idx][0];
- var messages = group[k];
- var w = parseInt(k);
- var wcolor = SeverityColors[WarnPatternsSeverity[w]];
- var description = WarnPatternsDescription[w];
- if (description.length == 0)
- description = "???";
- GlobalAnchor += 1;
- result += "<table class='t1'><tr bgcolor='" + wcolor + "'><td>" +
- "<button class='bt' id='" + GlobalAnchor + "_mark" +
- "' onclick='expand(\\"" + GlobalAnchor + "\\");'>" +
- "⊕</button> " +
- description + " (" + messages.length + ")</td></tr></table>";
- result += "<div id='" + GlobalAnchor +
- "' style='display:none;'><table class='t1'>";
- var c = 0;
- messages.sort(compareMessages);
- for (var i=0; i<messages.length; i++) {
- result += "<tr><td class='c" + c + "'>" +
- addURL(WarningMessages[messages[i][2]]) + "</td></tr>";
- c = 1 - c;
- }
- result += "</table></div>";
- }
- if (result.length > 0) {
- return "<br><span style='background-color:" + color + "'><b>" +
- header + ": " + totalMessages +
- "</b></span><blockquote><table class='t1'>" +
- result + "</table></blockquote>";
-
- }
- return ""; // empty section
- }
- function generateSectionsBySeverity() {
- var result = "";
- var groups = groupWarningsBySeverity();
- for (s=0; s<SeverityColors.length; s++) {
- result += createWarningSection(SeverityHeaders[s], SeverityColors[s],
- groups[s]);
- }
- return result;
- }
- function generateSectionsByProject() {
- var result = "";
- var groups = groupWarningsByProject();
- for (i=0; i<groups.length; i++) {
- result += createWarningSection(ProjectNames[i], 'lightgrey', groups[i]);
- }
- return result;
- }
- function groupWarnings(generator) {
- GlobalAnchor = 0;
- var e = document.getElementById("warning_groups");
- e.innerHTML = generator();
- }
- function groupBySeverity() {
- groupWarnings(generateSectionsBySeverity);
- }
- function groupByProject() {
- groupWarnings(generateSectionsByProject);
- }
-"""
-
-
-# Emit a JavaScript const string
-def emit_const_string(name, value, writer):
- writer('const ' + name + ' = "' + escape_string(value) + '";')
-
-
-# Emit a JavaScript const integer array.
-def emit_const_int_array(name, array, writer):
- writer('const ' + name + ' = [')
- for n in array:
- writer(str(n) + ',')
- writer('];')
-
-
-# Emit a JavaScript const string array.
-def emit_const_string_array(name, array, writer):
- writer('const ' + name + ' = [')
- for s in array:
- writer('"' + strip_escape_string(s) + '",')
- writer('];')
-
-
-# Emit a JavaScript const string array for HTML.
-def emit_const_html_string_array(name, array, writer):
- writer('const ' + name + ' = [')
- for s in array:
- # Not using html.escape yet, to work for both python 2 and 3,
- # until all users switch to python 3.
- # pylint:disable=deprecated-method
- writer('"' + cgi.escape(strip_escape_string(s)) + '",')
- writer('];')
-
-
-# Emit a JavaScript const object array.
-def emit_const_object_array(name, array, writer):
- writer('const ' + name + ' = [')
- for x in array:
- writer(str(x) + ',')
- writer('];')
-
-
-def emit_js_data(writer):
- """Dump dynamic HTML page's static JavaScript data."""
- emit_const_string('FlagURL',
- args.url if args.url else '', writer)
- emit_const_string('FlagSeparator',
- args.separator if args.separator else '', writer)
- emit_const_string_array('SeverityColors',
- [s.color for s in Severity.levels], writer)
- emit_const_string_array('SeverityHeaders',
- [s.header for s in Severity.levels], writer)
- emit_const_string_array('SeverityColumnHeaders',
- [s.column_header for s in Severity.levels], writer)
- emit_const_string_array('ProjectNames', project_names, writer)
- # pytype: disable=attribute-error
- emit_const_int_array('WarnPatternsSeverity',
- [w['severity'].value for w in warn_patterns], writer)
- # pytype: enable=attribute-error
- emit_const_html_string_array('WarnPatternsDescription',
- [w['description'] for w in warn_patterns],
- writer)
- emit_const_html_string_array('WarningMessages', warning_messages, writer)
- emit_const_object_array('Warnings', warning_records, writer)
-
-draw_table_javascript = """
-google.charts.load('current', {'packages':['table']});
-google.charts.setOnLoadCallback(drawTable);
-function drawTable() {
- var data = new google.visualization.DataTable();
- data.addColumn('string', StatsHeader[0]);
- for (var i=1; i<StatsHeader.length; i++) {
- data.addColumn('number', StatsHeader[i]);
- }
- data.addRows(StatsRows);
- for (var i=0; i<StatsRows.length; i++) {
- for (var j=0; j<StatsHeader.length; j++) {
- data.setProperty(i, j, 'style', 'border:1px solid black;');
- }
- }
- var table = new google.visualization.Table(
- document.getElementById('stats_table'));
- table.draw(data, {allowHtml: true, alternatingRowStyle: true});
-}
-"""
-
-
-def dump_html(output_stream):
- """Dump the html output to output_stream."""
- writer = make_writer(output_stream)
- dump_html_prologue('Warnings for ' + platform_version + ' - ' +
- target_product + ' - ' + target_variant, writer)
- dump_stats(writer)
- writer('<br><div id="stats_table"></div><br>')
- writer('\n<script>')
- emit_js_data(writer)
- writer(scripts_for_warning_groups)
- writer('</script>')
- emit_buttons(writer)
- # Warning messages are grouped by severities or project names.
- writer('<br><div id="warning_groups"></div>')
- if args.byproject:
- writer('<script>groupByProject();</script>')
+def get_warn_patterns(platform):
+ """Get and initialize warn_patterns."""
+ warn_patterns = []
+ if platform == 'chrome':
+ warn_patterns = cpp_patterns.warn_patterns
+ elif platform == 'android':
+ warn_patterns = make_patterns.warn_patterns + cpp_patterns.warn_patterns + java_patterns.warn_patterns + tidy_patterns.warn_patterns + other_patterns.warn_patterns
else:
- writer('<script>groupBySeverity();</script>')
- dump_fixed(writer)
- dump_html_epilogue(writer)
+ raise Exception('platform name %s is not valid' % platform)
+ for w in warn_patterns:
+ w['members'] = []
+ # Each warning pattern has a 'projects' dictionary, that
+ # maps a project name to number of warnings in that project.
+ w['projects'] = {}
+ return warn_patterns
-##### Functions to count warnings and dump csv file. #########################
+def get_project_list(platform):
+ """Return project list for appropriate platform."""
+ if platform == 'chrome':
+ return chrome_project_list.project_list
+ if platform == 'android':
+ return android_project_list.project_list
+ raise Exception('platform name %s is not valid' % platform)
-def description_for_csv(category):
- if not category['description']:
- return '?'
- return category['description']
+def parallel_classify_warnings(warning_data, args, project_names,
+ project_patterns, warn_patterns,
+ use_google3, create_launch_subprocs_fn,
+ classify_warnings_fn):
+ """Classify all warning lines with num_cpu parallel processes."""
+ num_cpu = args.processes
+ group_results = []
+ if num_cpu > 1:
+ # set up parallel processing for this...
+ warning_groups = [[] for _ in range(num_cpu)]
+ i = 0
+ for warning, link in warning_data.items():
+ warning_groups[i].append((warning, link))
+ i = (i + 1) % num_cpu
+ arg_groups = [[] for _ in range(num_cpu)]
+ for i, group in enumerate(warning_groups):
+ arg_groups[i] = [{
+ 'group': group,
+ 'project_patterns': project_patterns,
+ 'warn_patterns': warn_patterns,
+ 'num_processes': num_cpu
+ }]
-def count_severity(writer, sev, kind):
- """Count warnings of given severity."""
- total = 0
- for i in warn_patterns:
- if i['severity'] == sev and i['members']:
- n = len(i['members'])
- total += n
- warning = kind + ': ' + description_for_csv(i)
- writer.writerow([n, '', warning])
- # print number of warnings for each project, ordered by project name.
- # pytype: disable=attribute-error
- projects = sorted(i['projects'].keys())
- # pytype: enable=attribute-error
- for p in projects:
- writer.writerow([i['projects'][p], p, warning])
- writer.writerow([total, '', kind + ' warnings'])
-
- return total
-
-
-# dump number of warnings in csv format to stdout
-def dump_csv(writer):
- """Dump number of warnings in csv format to stdout."""
- sort_warnings()
- total = 0
- for s in Severity.levels:
- if s != Severity.SEVERITY_UNKNOWN:
- total += count_severity(writer, s, s.column_header)
- writer.writerow([total, '', 'All warnings'])
-
-
-def common_main(parallel_process):
- """Real main function to classify warnings and generate .html file."""
- find_android_root()
- # We must use 'utf-8' codec to parse some non-ASCII code in warnings.
- warning_lines = parse_input_file(
- io.open(args.buildlog, mode='r', encoding='utf-8'))
- parallel_classify_warnings(warning_lines, parallel_process)
- # If a user pases a csv path, save the fileoutput to the path
- # If the user also passed gencsv write the output to stdout
- # If the user did not pass gencsv flag dump the html report to stdout.
- if args.csvpath:
- with open(args.csvpath, 'w') as f:
- dump_csv(csv.writer(f, lineterminator='\n'))
- if args.gencsv:
- dump_csv(csv.writer(sys.stdout, lineterminator='\n'))
+ group_results = create_launch_subprocs_fn(num_cpu,
+ classify_warnings_fn,
+ arg_groups,
+ group_results)
else:
- dump_html(sys.stdout)
+ group_results = []
+ for warning, link in warning_data.items():
+ classify_one_warning(warning, link, group_results,
+ project_patterns, warn_patterns)
+ group_results = [group_results]
+
+ warning_messages = []
+ warning_links = []
+ warning_records = []
+ if use_google3:
+ group_results = [group_results]
+ for group_result in group_results:
+ for result in group_result:
+ for line, link, pattern_idx, project_idx in result:
+ pattern = warn_patterns[pattern_idx]
+ pattern['members'].append(line)
+ message_idx = len(warning_messages)
+ warning_messages.append(line)
+ link_idx = len(warning_links)
+ warning_links.append(link)
+ warning_records.append([pattern_idx, project_idx, message_idx,
+ link_idx])
+ pname = '???' if project_idx < 0 else project_names[project_idx]
+ # Count warnings by project.
+ if pname in pattern['projects']:
+ pattern['projects'][pname] += 1
+ else:
+ pattern['projects'][pname] = 1
+ return warning_messages, warning_links, warning_records
+
+
+def process_log(logfile, flags, project_names, project_patterns, warn_patterns,
+ html_path, use_google3, create_launch_subprocs_fn,
+ classify_warnings_fn, logfile_object):
+ # pylint: disable=g-doc-args
+ # pylint: disable=g-doc-return-or-yield
+ """Function that handles processing of a log.
+
+ This is isolated into its own function (rather than just taking place in main)
+ so that it can be used by both warn.py and the borg job process_gs_logs.py, to
+ avoid duplication of code.
+ Note that if the arguments to this function change, process_gs_logs.py must
+ be updated accordingly.
+ """
+ if logfile_object is None:
+ with io.open(logfile, encoding='utf-8') as log:
+ warning_lines_and_links, header_str = parse_input_file(log, flags)
+ else:
+ warning_lines_and_links, header_str = parse_input_file(
+ logfile_object, flags)
+ warning_messages, warning_links, warning_records = parallel_classify_warnings(
+ warning_lines_and_links, flags, project_names, project_patterns,
+ warn_patterns, use_google3, create_launch_subprocs_fn,
+ classify_warnings_fn)
+
+ html_writer.write_html(flags, project_names, warn_patterns, html_path,
+ warning_messages, warning_links, warning_records,
+ header_str)
+
+ return warning_messages, warning_links, warning_records, header_str
+
+
+def common_main(use_google3, create_launch_subprocs_fn, classify_warnings_fn,
+ logfile_object=None):
+ """Shared main function for Google3 and non-Google3 versions of warn.py."""
+ flags = parse_args(use_google3)
+ warn_patterns = get_warn_patterns(flags.platform)
+ project_list = get_project_list(flags.platform)
+
+ project_names = get_project_names(project_list)
+ project_patterns = [re.compile(p[1]) for p in project_list]
+
+ # html_path=None because we output html below if not outputting CSV
+ warning_messages, warning_links, warning_records, header_str = process_log(
+ logfile=flags.log, flags=flags, project_names=project_names,
+ project_patterns=project_patterns, warn_patterns=warn_patterns,
+ html_path=None, use_google3=use_google3,
+ create_launch_subprocs_fn=create_launch_subprocs_fn,
+ classify_warnings_fn=classify_warnings_fn,
+ logfile_object=logfile_object)
+
+ html_writer.write_out_csv(flags, warn_patterns, warning_messages,
+ warning_links, warning_records, header_str,
+ project_names)
+
+ # Return these values, so that caller can use them, if desired.
+ return flags, warning_messages, warning_records, warn_patterns