Merge "ATest: GTest auto gen config support run-test-as."
diff --git a/Changes.md b/Changes.md
index baa5e6e..4aa7ea2 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,31 @@
# Build System Changes for Android.mk Writers
+## `DIST_DIR`, `dist_goal`, and `dist-for-goals` {#dist}
+
+`DIST_DIR` and `dist_goal` are no longer available when reading Android.mk
+files (or other build tasks). Always use `dist-for-goals` instead, which takes
+a PHONY goal, and a list of files to copy to `$DIST_DIR`. Whenever `dist` is
+specified, and the goal would be built (either explicitly on the command line,
+or as a dependency of something on the command line), that file will be copied
+into `$DIST_DIR`. For example,
+
+``` make
+$(call dist-for-goals,foo,bar/baz)
+```
+
+will copy `bar/baz` into `$DIST_DIR/baz` when `m foo dist` is run.
+
+#### Renames during copy
+
+Instead of specifying just a file, a destination name can be specified,
+including subdirectories:
+
+``` make
+$(call dist-for-goals,foo,bar/baz:logs/foo.log)
+```
+
+will copy `bar/baz` into `$DIST_DIR/logs/foo.log` when `m foo dist` is run.
+
## `.PHONY` rule enforcement {#phony_targets}
There are several new warnings/errors meant to ensure the proper use of
diff --git a/CleanSpec.mk b/CleanSpec.mk
index a96dd83..39441e1 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -509,6 +509,12 @@
$(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
$(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
+# Remove strip.sh intermediates to save space
+$(call add-clean-step, find $(OUT_DIR) \( -name "*.so.debug" -o -name "*.so.dynsyms" -o -name "*.so.funcsyms" -o -name "*.so.keep_symbols" -o -name "*.so.mini_debuginfo.xz" \) -print0 | xargs -0 rm -f)
+
+# Clean up old ninja files
+$(call add-clean-step, rm -f $(OUT_DIR)/build-*-dist*.ninja)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/common/core.mk b/common/core.mk
new file mode 100644
index 0000000..e5264b0
--- /dev/null
+++ b/common/core.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Only use ANDROID_BUILD_SHELL to wrap around bash.
+# DO NOT use other shells such as zsh.
+ifdef ANDROID_BUILD_SHELL
+SHELL := $(ANDROID_BUILD_SHELL)
+else
+# Use bash, not whatever shell somebody has installed as /bin/sh
+# This is repeated from main.mk, since envsetup.sh runs this file
+# directly.
+SHELL := /bin/bash
+endif
+
+# Utility variables.
+empty :=
+space := $(empty) $(empty)
+comma := ,
+# Note that make will eat the newline just before endef.
+define newline
+
+
+endef
+# The pound character "#"
+define pound
+#
+endef
+# Unfortunately you can't simply define backslash as \ or \\.
+backslash := \a
+backslash := $(patsubst %a,%,$(backslash))
+
+# Prevent accidentally changing these variables
+.KATI_READONLY := SHELL empty space comma newline pound backslash
+
+# Basic warning/error wrappers. These will be redefined to include the local
+# module information when reading Android.mk files.
+define pretty-warning
+$(warning $(1))
+endef
+
+define pretty-error
+$(error $(1))
+endef
diff --git a/core/math.mk b/common/math.mk
similarity index 100%
rename from core/math.mk
rename to common/math.mk
diff --git a/core/strings.mk b/common/strings.mk
similarity index 100%
rename from core/strings.mk
rename to common/strings.mk
diff --git a/core/Makefile b/core/Makefile
index 2df67e9..c21c517 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -42,11 +42,14 @@
$(eval unique_product_copy_files_destinations += $(_dest))))
# Dump a list of overriden (and ignored PRODUCT_COPY_FILES entries)
-$(file >$(PRODUCT_OUT)/product_copy_files_ignored.txt,$(subst $(space),$(newline),$(strip $(product_copy_files_ignored))))
-ifdef dist_goal
-$(file >$(DIST_DIR)/logs/product_copy_files_ignored.txt,$(subst $(space),$(newline),$(strip $(product_copy_files_ignored))))
-endif
+pcf_ignored_file := $(PRODUCT_OUT)/product_copy_files_ignored.txt
+$(pcf_ignored_file): PRIVATE_IGNORED := $(sort $(product_copy_files_ignored))
+$(pcf_ignored_file):
+ echo "$(PRIVATE_IGNORED)" | tr " " "\n" >$@
+$(call dist-for-goals,droidcore,$(pcf_ignored_file):logs/$(notdir $(pcf_ignored_file)))
+
+pcf_ignored_file :=
product_copy_files_ignored :=
unique_product_copy_files_pairs :=
unique_product_copy_files_destinations :=
@@ -1635,15 +1638,13 @@
# Generate a file containing the keys that will be read by the
# recovery binary.
RECOVERY_INSTALL_OTA_KEYS := \
- $(call intermediates-dir-for,PACKAGING,ota_keys)/keys
-DUMPKEY_JAR := $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
+ $(call intermediates-dir-for,PACKAGING,ota_keys)/otacerts.zip
$(RECOVERY_INSTALL_OTA_KEYS): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
$(RECOVERY_INSTALL_OTA_KEYS): extra_keys := $(patsubst %,%.x509.pem,$(PRODUCT_EXTRA_RECOVERY_KEYS))
-$(RECOVERY_INSTALL_OTA_KEYS): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR) $(extra_keys)
- @echo "DumpPublicKey: $@ <= $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys)"
- @rm -rf $@
- @mkdir -p $(dir $@)
- $(JAVA) -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys) > $@
+$(RECOVERY_INSTALL_OTA_KEYS): $(SOONG_ZIP) $(OTA_PUBLIC_KEYS) $(extra_keys)
+ $(hide) rm -f $@
+ $(hide) mkdir -p $(dir $@)
+ $(hide) $(SOONG_ZIP) -o $@ $(foreach key_file, $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys), -C $(dir $(key_file)) -f $(key_file))
RECOVERYIMAGE_ID_FILE := $(PRODUCT_OUT)/recovery.id
@@ -1674,7 +1675,8 @@
cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.fstab)
$(if $(strip $(recovery_wipe)), \
$(hide) cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe)
- $(hide) cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/res/keys
+ $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
+ $(hide) cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security/otacerts.zip
$(hide) ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop
$(BOARD_RECOVERY_IMAGE_PREPARE)
$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
@@ -1918,7 +1920,7 @@
build/make/tools/releasetools/build_image.py \
$(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
$(systemimage_intermediates)/generated_system_image_info.txt \
- || ( mkdir -p $(DIST_DIR); cp $(INSTALLED_FILES_FILE) $(DIST_DIR)/installed-files-rescued.txt; \
+ || ( mkdir -p $${DIST_DIR}; cp $(INSTALLED_FILES_FILE) $${DIST_DIR}/installed-files-rescued.txt; \
exit 1 )
endef
@@ -2657,12 +2659,12 @@
endif
INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES := \
- $(BOARD_AVB_VBMETA_MAINLINE) \
+ $(BOARD_AVB_VBMETA_SYSTEM) \
$(BOARD_AVB_VBMETA_VENDOR)
# Not allowing the same partition to appear in multiple groups.
ifneq ($(words $(sort $(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES))),$(words $(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES)))
- $(error BOARD_AVB_VBMETA_MAINLINE and BOARD_AVB_VBMETA_VENDOR cannot have duplicates)
+ $(error BOARD_AVB_VBMETA_SYSTEM and BOARD_AVB_VBMETA_VENDOR cannot have duplicates)
endif
BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
@@ -2675,7 +2677,7 @@
ODM_FOOTER_ARGS := BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS
# Helper function that checks and sets required build variables for an AVB chained partition.
-# $(1): the partition to enable AVB chain, e.g., boot or system or vbmeta_mainline.
+# $(1): the partition to enable AVB chain, e.g., boot or system or vbmeta_system.
define _check-and-set-avb-chain-args
$(eval part := $(1))
$(eval PART=$(call to-upper,$(part)))
@@ -2698,7 +2700,7 @@
--chain_partition $(part):$($(_rollback_index_location)):$(AVB_CHAIN_KEY_DIR)/$(part).avbpubkey)
# Set rollback_index via footer args for non-chained vbmeta image. Chained vbmeta image will pick up
-# the index via a separate flag (e.g. BOARD_AVB_VBMETA_MAINLINE_ROLLBACK_INDEX).
+# the index via a separate flag (e.g. BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX).
$(if $(filter $(part),$(part:vbmeta_%=%)),\
$(eval _footer_args := $(PART)_FOOTER_ARGS) \
$(eval $($(_footer_args)) += --rollback_index $($(_rollback_index))))
@@ -2750,9 +2752,9 @@
$(eval $(call check-and-set-avb-args,recovery))
endif
-# Not using INSTALLED_VBMETA_MAINLINEIMAGE_TARGET as it won't be set yet.
-ifdef BOARD_AVB_VBMETA_MAINLINE
-$(eval $(call check-and-set-avb-args,vbmeta_mainline))
+# Not using INSTALLED_VBMETA_SYSTEMIMAGE_TARGET as it won't be set yet.
+ifdef BOARD_AVB_VBMETA_SYSTEM
+$(eval $(call check-and-set-avb-args,vbmeta_system))
endif
ifdef BOARD_AVB_VBMETA_VENDOR
@@ -2772,22 +2774,21 @@
endif
BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
-BOARD_AVB_MAKE_VBMETA_MAINLINE_IMAGE_ARGS += --padding_size 4096
+BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS += --padding_size 4096
BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS += --padding_size 4096
ifeq (eng,$(filter eng, $(TARGET_BUILD_VARIANT)))
+# We only need the flag in top-level vbmeta.img.
BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --set_hashtree_disabled_flag
-BOARD_AVB_MAKE_VBMETA_MAINLINE_IMAGE_ARGS += --set_hashtree_disabled_flag
-BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS += --set_hashtree_disabled_flag
endif
ifdef BOARD_AVB_ROLLBACK_INDEX
BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --rollback_index $(BOARD_AVB_ROLLBACK_INDEX)
endif
-ifdef BOARD_AVB_VBMETA_MAINLINE_ROLLBACK_INDEX
-BOARD_AVB_MAKE_VBMETA_MAINLINE_IMAGE_ARGS += \
- --rollback_index $(BOARD_AVB_VBMETA_MAINLINE_ROLLBACK_INDEX)
+ifdef BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX
+BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS += \
+ --rollback_index $(BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX)
endif
ifdef BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX
@@ -2821,9 +2822,9 @@
$(if $(BOARD_AVB_RECOVERY_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_RECOVERY_KEY_PATH) \
--output $(1)/recovery.avbpubkey)
- $(if $(BOARD_AVB_VBMETA_MAINLINE_KEY_PATH),\
- $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_MAINLINE_KEY_PATH) \
- --output $(1)/vbmeta_mainline.avbpubkey)
+ $(if $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH),\
+ $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH) \
+ --output $(1)/vbmeta_system.avbpubkey)
$(if $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH) \
--output $(1)/vbmeta_vendor.avbpubkey)
@@ -2831,11 +2832,11 @@
# Builds a chained VBMeta image. This VBMeta image will contain the descriptors for the partitions
# specified in BOARD_AVB_VBMETA_<NAME>. The built VBMeta image will be included into the top-level
-# vbmeta image as a chained partition. For example, if a target defines `BOARD_AVB_VBMETA_MAINLINE
-# := system product_services`, `vbmeta_mainline.img` will be created that includes the descriptors
-# for `system.img` and `product_services.img`. `vbmeta_mainline.img` itself will be included into
+# vbmeta image as a chained partition. For example, if a target defines `BOARD_AVB_VBMETA_SYSTEM
+# := system product_services`, `vbmeta_system.img` will be created that includes the descriptors
+# for `system.img` and `product_services.img`. `vbmeta_system.img` itself will be included into
# `vbmeta.img` as a chained partition.
-# $(1): VBMeta image name, such as "vbmeta_mainline", "vbmeta_vendor" etc.
+# $(1): VBMeta image name, such as "vbmeta_system", "vbmeta_vendor" etc.
# $(2): Output filename.
define build-chained-vbmeta-image
$(call pretty,"Target chained vbmeta image: $@")
@@ -2847,13 +2848,13 @@
--output $@
endef
-ifdef BOARD_AVB_VBMETA_MAINLINE
-INSTALLED_VBMETA_MAINLINEIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_mainline.img
-$(INSTALLED_VBMETA_MAINLINEIMAGE_TARGET): \
+ifdef BOARD_AVB_VBMETA_SYSTEM
+INSTALLED_VBMETA_SYSTEMIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_system.img
+$(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET): \
$(AVBTOOL) \
- $(call images-for-partitions,$(BOARD_AVB_VBMETA_MAINLINE)) \
- $(BOARD_AVB_VBMETA_MAINLINE_KEY_PATH)
- $(call build-chained-vbmeta-image,vbmeta_mainline)
+ $(call images-for-partitions,$(BOARD_AVB_VBMETA_SYSTEM)) \
+ $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH)
+ $(call build-chained-vbmeta-image,vbmeta_system)
endif
ifdef BOARD_AVB_VBMETA_VENDOR
@@ -2891,9 +2892,9 @@
$(INSTALLED_ODMIMAGE_TARGET) \
$(INSTALLED_DTBOIMAGE_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(INSTALLED_VBMETA_MAINLINEIMAGE_TARGET) \
+ $(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET) \
$(INSTALLED_VBMETA_VENDORIMAGE_TARGET) \
- $(BOARD_AVB_VBMETA_MAINLINE_KEY_PATH) \
+ $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH) \
$(BOARD_AVB_VBMETA_VENDOR_KEY_PATH) \
$(BOARD_AVB_KEY_PATH)
$(build-vbmetaimage-target)
@@ -2939,18 +2940,20 @@
--metadata-size 65536 \
--metadata-slots $(if $(1),2,1) \
--device-size $(BOARD_SUPER_PARTITION_SIZE) \
- $(foreach name,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
- --partition $(name)$(1):readonly:$(if $(2),$(call read-size-of-partitions,$(name)),0) \
- $(if $(2), --image $(name)$(1)=$(call images-for-partitions,$(name))) \
- $(if $(1), --partition $(name)_b:readonly:0) \
- )
+ $(foreach group,$(BOARD_SUPER_PARTITION_GROUPS), \
+ --group $(group):$(BOARD_$(call to-upper,$(group))_SIZE) \
+ $(foreach name,$(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \
+ --partition $(name)$(1):readonly:$(if $(2),$(call read-size-of-partitions,$(name)),0):$(group) \
+ $(if $(2), --image $(name)$(1)=$(call images-for-partitions,$(name))) \
+ $(if $(1), --partition $(name)_b:readonly:0:$(group)) \
+ ))
endef
# $(1): output image path
# $(2): slot A suffix (_a or empty)
# $(3): include images or not (true or empty)
define build-superimage-target
- $(HOST_OUT_EXECUTABLES)/lpmake \
+ $(LPMAKE) \
$(call build-superimage-target-args,$(2),$(3)) \
--output $(1)
endef
@@ -3090,7 +3093,6 @@
$(HOST_OUT_EXECUTABLES)/zipalign \
$(HOST_OUT_EXECUTABLES)/bsdiff \
$(HOST_OUT_EXECUTABLES)/imgdiff \
- $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar \
$(HOST_OUT_JAVA_LIBRARIES)/signapk.jar \
$(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar \
$(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar \
@@ -3520,16 +3522,16 @@
$(hide) echo "avb_recovery_algorithm=$(BOARD_AVB_RECOVERY_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "avb_recovery_rollback_index_location=$(BOARD_AVB_RECOVERY_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
endif # BOARD_AVB_RECOVERY_KEY_PATH
-ifneq (,$(strip $(BOARD_AVB_VBMETA_MAINLINE)))
- $(hide) echo "avb_vbmeta_mainline=$(BOARD_AVB_VBMETA_MAINLINE)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "avb_vbmeta_mainline_args=$(BOARD_AVB_MAKE_VBMETA_MAINLINE_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "avb_vbmeta_mainline_key_path=$(BOARD_AVB_VBMETA_MAINLINE_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "avb_vbmeta_mainline_algorithm=$(BOARD_AVB_VBMETA_MAINLINE_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "avb_vbmeta_mainline_rollback_index_location=$(BOARD_AVB_VBMETA_MAINLINE_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
-endif # BOARD_AVB_VBMETA_MAINLINE
+ifneq (,$(strip $(BOARD_AVB_VBMETA_SYSTEM)))
+ $(hide) echo "avb_vbmeta_system=$(BOARD_AVB_VBMETA_SYSTEM)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_key_path=$(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_algorithm=$(BOARD_AVB_VBMETA_SYSTEM_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_system_rollback_index_location=$(BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
+endif # BOARD_AVB_VBMETA_SYSTEM
ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
$(hide) echo "avb_vbmeta_vendor=$(BOARD_AVB_VBMETA_VENDOR)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_MAINLINE_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "avb_vbmeta_vendor_key_path=$(BOARD_AVB_VBMETA_VENDOR_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "avb_vbmeta_vendor_algorithm=$(BOARD_AVB_VBMETA_VENDOR_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "avb_vbmeta_vendor_rollback_index_location=$(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $(zip_root)/META/misc_info.txt
@@ -3940,7 +3942,7 @@
MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_verified_boot_params.sh
$(QEMU_VERIFIED_BOOT_PARAMS): $(INSTALLED_QEMU_SYSTEMIMAGE) $(MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_VBMETAIMAGE_TARGET) $(SGDISK_HOST) $(AVBTOOL)
@echo Creating $@
- (export SGDISK=$(SGDISK_HOST) AVBTOOL=$(AVBTOOL); $(MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_SYSTEMIMAGE_TARGET) $(INSTALLED_QEMU_SYSTEMIMAGE) $(QEMU_VERIFIED_BOOT_PARAMS))
+ (export SGDISK=$(SGDISK_HOST) AVBTOOL=$(AVBTOOL); $(MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_VBMETAIMAGE_TARGET) $(INSTALLED_QEMU_SYSTEMIMAGE) $(QEMU_VERIFIED_BOOT_PARAMS))
systemimage: $(QEMU_VERIFIED_BOOT_PARAMS)
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 8e8bfec..8608ca1 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -72,8 +72,15 @@
my_manifest_fixer_flags += --uses-non-sdk-api
endif
$(fixed_android_manifest): PRIVATE_MANIFEST_FIXER_FLAGS := $(my_manifest_fixer_flags)
+# These two libs are added as optional dependencies (<uses-library> with
+# android:required set to false). This is because they haven't existed in pre-P
+# devices, but classes in them were in bootclasspath jars, etc. So making them
+# hard dependencies (andriod:required=true) would prevent apps from being
+# installed to such legacy devices.
+$(fixed_android_manifest): PRIVATE_OPTIONAL_SDK_LIB_NAMES := android.test.base android.test.mock
$(fixed_android_manifest): $(MANIFEST_FIXER)
$(fixed_android_manifest): $(main_android_manifest)
+ echo $(PRIVATE_OPTIONAL_SDK_LIB_NAMES) | tr ' ' '\n' > $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
@echo "Fix manifest: $@"
$(MANIFEST_FIXER) \
--minSdkVersion $(PRIVATE_MIN_SDK_VERSION) \
@@ -81,5 +88,8 @@
--raise-min-sdk-version \
$(PRIVATE_MANIFEST_FIXER_FLAGS) \
$(if (PRIVATE_EXPORTED_SDK_LIBS_FILE),\
- $$(cat $(PRIVATE_EXPORTED_SDK_LIBS_FILE) | sort -u | sed -e 's/^/\ --uses-library\ /' | tr '\n' ' ')) \
+ $$(cat $(PRIVATE_EXPORTED_SDK_LIBS_FILE) | grep -v -f $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional | sort -u | sed -e 's/^/\ --uses-library\ /' | tr '\n' ' ') \
+ $$(cat $(PRIVATE_EXPORTED_SDK_LIBS_FILE) | grep -f $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional | sort -u | sed -e 's/^/\ --optional-uses-library\ /' | tr '\n' ' ') \
+ ) \
$< $@
+ rm $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
diff --git a/core/config.mk b/core/config.mk
index b9174b3..0e4e1fb 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -17,40 +17,20 @@
$(error done)
endif
-# Only use ANDROID_BUILD_SHELL to wrap around bash.
-# DO NOT use other shells such as zsh.
-ifdef ANDROID_BUILD_SHELL
-SHELL := $(ANDROID_BUILD_SHELL)
-else
-# Use bash, not whatever shell somebody has installed as /bin/sh
-# This is repeated from main.mk, since envsetup.sh runs this file
-# directly.
-SHELL := /bin/bash
-endif
+BUILD_SYSTEM :=$= build/make/core
+BUILD_SYSTEM_COMMON :=$= build/make/common
-# Utility variables.
-empty :=
-space := $(empty) $(empty)
-comma := ,
-# Note that make will eat the newline just before endef.
-define newline
-
-
-endef
-# The pound character "#"
-define pound
-#
-endef
-# Unfortunately you can't simply define backslash as \ or \\.
-backslash := \a
-backslash := $(patsubst %a,%,$(backslash))
-
-# Prevent accidentally changing these variables
-.KATI_READONLY := SHELL empty space comma newline pound backslash
+include $(BUILD_SYSTEM_COMMON)/core.mk
# Mark variables that should be coming as environment variables from soong_ui
# as readonly
.KATI_READONLY := OUT_DIR TMPDIR BUILD_DATETIME_FILE
+ifdef CALLED_FROM_SETUP
+ .KATI_READONLY := CALLED_FROM_SETUP
+endif
+ifdef KATI_PACKAGE_MK_DIR
+ .KATI_READONLY := KATI_PACKAGE_MK_DIR
+endif
# Mark variables deprecated/obsolete
CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
@@ -109,6 +89,7 @@
TARGET_NDK_GCC_VERSION 2ND_TARGET_NDK_GCC_VERSION \
GLOBAL_CFLAGS_NO_OVERRIDE GLOBAL_CPPFLAGS_NO_OVERRIDE \
,GCC support has been removed. Use Clang instead)
+$(KATI_obsolete_var DIST_DIR dist_goal,Use dist-for-goals instead. See $(CHANGES_URL)#dist)
# This is marked as obsolete in envsetup.mk after reading the BoardConfig.mk
$(KATI_deprecate_export It is a global setting. See $(CHANGES_URL)#export_keyword)
@@ -121,9 +102,6 @@
ORIGINAL_MAKECMDGOALS := $(MAKECMDGOALS)
-dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
-MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
-
UNAME := $(shell uname -sm)
SRC_TARGET_DIR := $(TOPDIR)build/target
@@ -138,9 +116,9 @@
# Set up efficient math functions which are used in make.
# Here since this file is included by envsetup as well as during build.
-include $(BUILD_SYSTEM)/math.mk
+include $(BUILD_SYSTEM_COMMON)/math.mk
-include $(BUILD_SYSTEM)/strings.mk
+include $(BUILD_SYSTEM_COMMON)/strings.mk
# Various mappings to avoid hard-coding paths all over the place
include $(BUILD_SYSTEM)/pathmap.mk
@@ -958,7 +936,6 @@
requirements := \
PRODUCT_USE_DYNAMIC_PARTITION_SIZE \
PRODUCT_BUILD_SUPER_PARTITION \
- PRODUCT_USE_FASTBOOTD \
$(foreach req,$(requirements),$(if $(filter false,$($(req))),\
$(error PRODUCT_USE_LOGICAL_PARTITIONS requires $(req) to be true)))
@@ -1169,6 +1146,7 @@
INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-light-greylist.txt
INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-dark-greylist.txt
INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-blacklist.txt
+INTERNAL_PLATFORM_HIDDENAPI_GREYLIST_METADATA := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-greylist.csv
# Missing optional uses-libraries so that the platform doesn't create build rules that depend on
# them. See setup_one_odex.mk.
diff --git a/core/definitions.mk b/core/definitions.mk
index 5a14826..baa2342 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -806,29 +806,13 @@
echo -e "$(ESC_BOLD)$(1): $(ESC_ERROR)error:$(ESC_RESET)$(ESC_BOLD)" $(2) "$(ESC_RESET)" >&2
endef
-# $(1): message to print
-define pretty-warning
-$(shell $(call echo-warning,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
-endef
-
-# $(1): message to print
-define pretty-error
-$(shell $(call echo-error,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
-$(error done)
-endef
-
###########################################################
-## Output the command lines, or not
+## Legacy showcommands compatibility
###########################################################
-ifeq ($(strip $(SHOW_COMMANDS)),)
define pretty
@echo $1
endef
-else
-define pretty
-endef
-endif
###########################################################
## Commands for including the dependency files the compiler generates
@@ -2679,6 +2663,10 @@
# Copy dex files, invoking $(HIDDENAPI) on them in the process.
# Also make the source dex file an input of the hiddenapi singleton rule in dex_preopt.mk.
+# Users can set UNSAFE_DISABLE_HIDDENAPI_FLAGS=true to skip this step. This is
+# meant to speed up local incremental builds. Note that skipping this step changes
+# Java semantics of the result dex bytecode. Use at own risk.
+ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
define hiddenapi-copy-dex-files
$(2): $(1) $(HIDDENAPI) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
@@ -2692,9 +2680,17 @@
--blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
$(INTERNAL_PLATFORM_HIDDENAPI_PRIVATE_LIST): $(1)
-$(INTERNAL_PLATFORM_HIDDENAPI_PRIVATE_LIST): \
- PRIVATE_DEX_INPUTS := $$(PRIVATE_DEX_INPUTS) $(1)
+$(INTERNAL_PLATFORM_HIDDENAPI_PRIVATE_LIST): PRIVATE_DEX_INPUTS := $$(PRIVATE_DEX_INPUTS) $(1)
endef
+else # UNSAFE_DISABLE_HIDDENAPI_FLAGS
+define hiddenapi-copy-dex-files
+$(2): $(1)
+ echo "WARNING: skipping hiddenapi post-processing for $(1)" 1>&2
+ @rm -rf $(dir $(2))
+ @mkdir -p $(dir $(2))
+ find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | xargs -I{} cp -f {} $(dir $(2))/
+endef
+endif # UNSAFE_DISABLE_HIDDENAPI_FLAGS
# Generate a greylist.txt from a classes.jar
define hiddenapi-generate-greylist-txt
@@ -2705,8 +2701,12 @@
$(3): $(1) $(CLASS2GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_PUBLIC_LIST)
$(CLASS2GREYLIST) --public-api-list $(INTERNAL_PLATFORM_HIDDENAPI_PUBLIC_LIST) $(1) \
--write-whitelist $(2) \
- --write-greylist $(3) \
- --write-greylist 26,28:$(4)
+ --write-greylist none,28:$(3) \
+ --write-greylist 26:$(4)
+
+$(5): $(1) $(CLASS2GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_PUBLIC_LIST)
+ $(CLASS2GREYLIST) --public-api-list $(INTERNAL_PLATFORM_HIDDENAPI_PUBLIC_LIST) $(1) \
+ --write-metadata-csv $(5)
$(INTERNAL_PLATFORM_HIDDENAPI_WHITELIST): $(2) $(3) $(4)
$(INTERNAL_PLATFORM_HIDDENAPI_WHITELIST): \
@@ -2714,6 +2714,10 @@
$(INTERNAL_PLATFORM_HIDDENAPI_WHITELIST): \
PRIVATE_GREYLIST_INPUTS := $$(PRIVATE_GREYLIST_INPUTS) $(3)
PRIVATE_DARKGREYLIST_INPUTS := $$(PRIVATE_DARKGREYLIST_INPUTS) $(4)
+$(INTERNAL_PLATFORM_HIDDENAPI_GREYLIST_METADATA): $(5)
+$(INTERNAL_PLATFORM_HIDDENAPI_GREYLIST_METADATA): \
+ PRIVATE_METADATA_INPUTS := $$(PRIVATE_METADATA_INPUTS) $(5)
+
endif
endef
diff --git a/core/distdir.mk b/core/distdir.mk
index c074186..5f40407 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -17,52 +17,53 @@
# When specifying "dist", the user has asked that we copy the important
# files from this build into DIST_DIR.
-ifdef dist_goal
-
-# $(1): source file
-# $(2): destination file
-# $(3): goals that should copy the file
-#
-define copy-one-dist-file
-$(3): $(2)
-$(2): $(1)
- @echo "Dist: $$@"
- $$(copy-file-to-new-target-with-cp)
-endef
-
-# A global variable to remember all dist'ed src:dst pairs.
-# So if a src:dst is already dist'ed by another goal,
-# we should just establish the dependency and don't really call the
-# copy-one-dist-file to avoid multiple rules for the same target.
+# list of all goals that depend on any dist files
+_all_dist_goals :=
+# pairs of goal:distfile
+_all_dist_goal_output_pairs :=
+# pairs of srcfile:distfile
_all_dist_src_dst_pairs :=
+
# Other parts of the system should use this function to associate
# certain files with certain goals. When those goals are built
# and "dist" is specified, the marked files will be copied to DIST_DIR.
#
-# $(1): a list of goals (e.g. droid, sdk, pdk, ndk)
+# $(1): a list of goals (e.g. droid, sdk, pdk, ndk). These must be PHONY
# $(2): the dist files to add to those goals. If the file contains ':',
# the text following the colon is the name that the file is copied
# to under the dist directory. Subdirs are ok, and will be created
# at copy time if necessary.
define dist-for-goals
+$(if $(strip $(2)), \
+ $(eval _all_dist_goals += $$(1))) \
$(foreach file,$(2), \
- $(eval fw := $(subst :,$(space),$(file))) \
- $(eval src := $(word 1,$(fw))) \
- $(eval dst := $(word 2,$(fw))) \
- $(eval dst := $(if $(dst),$(dst),$(notdir $(src)))) \
- $(if $(filter $(_all_dist_src_dst_pairs),$(src):$(dst)),\
- $(eval $(call add-dependency,$(1),$(DIST_DIR)/$(dst))),\
- $(eval $(call copy-one-dist-file,\
- $(src),$(DIST_DIR)/$(dst),$(1)))\
- $(eval _all_dist_src_dst_pairs += $(src):$(dst))\
- )\
-)
+ $(eval src := $(call word-colon,1,$(file))) \
+ $(eval dst := $(call word-colon,2,$(file))) \
+ $(if $(dst),,$(eval dst := $$(notdir $$(src)))) \
+ $(eval _all_dist_src_dst_pairs += $$(src):$$(dst)) \
+ $(foreach goal,$(1), \
+ $(eval _all_dist_goal_output_pairs += $$(goal):$$(dst))))
endef
-else # !dist_goal
+#------------------------------------------------------------------
+# To be used at the end of the build to collect all the uses of
+# dist-for-goals, and write them into a file for the packaging step to use.
-# empty definition when not building dist
-define dist-for-goals
+# $(1): The file to write
+define dist-write-file
+$(strip \
+ $(KATI_obsolete_var dist-for-goals,Cannot be used after dist-write-file) \
+ $(foreach goal,$(sort $(_all_dist_goals)), \
+ $(eval $$(goal): _dist_$$(goal))) \
+ $(shell mkdir -p $(dir $(1))) \
+ $(file >$(1).tmp, \
+ DIST_GOAL_OUTPUT_PAIRS := $(sort $(_all_dist_goal_output_pairs)) \
+ $(newline)DIST_SRC_DST_PAIRS := $(sort $(_all_dist_src_dst_pairs))) \
+ $(shell if ! cmp -s $(1).tmp $(1); then \
+ mv $(1).tmp $(1); \
+ else \
+ rm $(1).tmp; \
+ fi))
endef
-endif # !dist_goal
+.KATI_READONLY := dist-for-goals dist-write-file
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 96e7e2c..f5babb6 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -1036,11 +1036,6 @@
PER_ARCH_MODULE_CLASSES := SHARED_LIBRARIES STATIC_LIBRARIES EXECUTABLES GYP RENDERSCRIPT_BITCODE NATIVE_TESTS HEADER_LIBRARIES
.KATI_READONLY := COMMON_MODULE_CLASSES PER_ARCH_MODULE_CLASSES
-ifeq (,$(strip $(DIST_DIR)))
- DIST_DIR := $(OUT_DIR)/dist
-endif
-.KATI_READONLY := DIST_DIR
-
ifeq ($(CALLED_FROM_SETUP),true)
PRINT_BUILD_CONFIG ?= true
endif
diff --git a/core/java.mk b/core/java.mk
index c015e4a..30571b7 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -77,6 +77,7 @@
hiddenapi_whitelist_txt := $(intermediates.COMMON)/hiddenapi/whitelist.txt
hiddenapi_greylist_txt := $(intermediates.COMMON)/hiddenapi/greylist.txt
hiddenapi_darkgreylist_txt := $(intermediates.COMMON)/hiddenapi/darkgreylist.txt
+hiddenapi_greylist_metadata_csv := $(intermediates.COMMON)/hiddenapi/greylist.csv
ifeq ($(LOCAL_MODULE_CLASS)$(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),APPS)
# If this is an apk without any Java code (e.g. framework-res), we should skip compiling Java.
@@ -507,8 +508,8 @@
# dex later on. The difference is academic currently, as we don't proguard any
# bootclasspath code at the moment. If we were to do that, we should add keep
# rules for all members with the @UnsupportedAppUsage annotation.
- $(eval $(call hiddenapi-generate-greylist-txt, $(full_classes_pre_proguard_jar),$(hiddenapi_whitelist_txt),$(hiddenapi_greylist_txt),$(hiddenapi_darkgreylist_txt)))
- LOCAL_INTERMEDIATE_TARGETS += $(hiddenapi_whitelist_txt) $(hiddenapi_greylist_txt) $(hiddenapi_darkgreylist_txt)
+ $(eval $(call hiddenapi-generate-greylist-txt, $(full_classes_pre_proguard_jar),$(hiddenapi_whitelist_txt),$(hiddenapi_greylist_txt),$(hiddenapi_darkgreylist_txt),$(hiddenapi_greylist_metadata_csv)))
+ LOCAL_INTERMEDIATE_TARGETS += $(hiddenapi_whitelist_txt) $(hiddenapi_greylist_txt) $(hiddenapi_darkgreylist_txt) $(hiddenapi_greylist_metadata_csv)
$(eval $(call hiddenapi-copy-dex-files,$(built_dex_intermediate),$(built_dex_hiddenapi)))
built_dex_copy_from := $(built_dex_hiddenapi)
else # !is_boot_jar
diff --git a/core/main.mk b/core/main.mk
index 7f673e9..6ff5f93 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -36,8 +36,6 @@
TOP := .
TOPDIR :=
-BUILD_SYSTEM := $(TOPDIR)build/make/core
-
# This is the default target. It must be the first declared target.
.PHONY: droid
DEFAULT_GOAL := droid
@@ -48,7 +46,7 @@
# Set up various standard variables based on configuration
# and host information.
-include $(BUILD_SYSTEM)/config.mk
+include build/make/core/config.mk
ifneq ($(filter $(dont_bother_goals), $(MAKECMDGOALS)),)
dont_bother := true
@@ -419,6 +417,19 @@
ENFORCE_RRO_SOURCES :=
endif
+# Color-coded warnings including current module info
+# $(1): message to print
+define pretty-warning
+$(shell $(call echo-warning,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
+endef
+
+# Color-coded errors including current module info
+# $(1): message to print
+define pretty-error
+$(shell $(call echo-error,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
+$(error done)
+endef
+
subdir_makefiles_inc := .
FULL_BUILD :=
@@ -493,6 +504,18 @@
# -------------------------------------------------------------------
# -------------------------------------------------------------------
+# Use basic warning/error messages now that LOCAL_MODULE_MAKEFILE
+# and LOCAL_MODULE aren't useful anymore.
+# -------------------------------------------------------------------
+define pretty-warning
+$(warning $(1))
+endef
+
+define pretty-error
+$(error $(1))
+endef
+
+# -------------------------------------------------------------------
# Enforce to generate all RRO packages for modules having resource
# overlays.
# -------------------------------------------------------------------
@@ -1010,8 +1033,22 @@
ifdef FULL_BUILD
product_FILES := $(call product-installed-files, $(INTERNAL_PRODUCT))
+ # WARNING: The product_MODULES variable is depended on by external files.
+ product_MODULES := $(_pif_modules)
# Verify the artifact path requirements made by included products.
+
+ # Fakes don't get installed, and host files are irrelevant.
+ static_whitelist_patterns := $(TARGET_OUT_FAKE)/% $(HOST_OUT)/%
+ # RROs become REQUIRED by the source module, but are always placed on the vendor partition.
+ static_whitelist_patterns += %__auto_generated_rro.apk
+ ifeq (true,$(BOARD_USES_SYSTEM_OTHER_ODEX))
+ # Allow system_other odex space optimization.
+ static_whitelist_patterns += \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.odex \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.vdex \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.art
+ endif
all_offending_files :=
$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
$(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
@@ -1020,10 +1057,7 @@
$(eval path_patterns := $(call resolve-product-relative-paths,$(requirements),%)) \
$(eval whitelist_patterns := $(call resolve-product-relative-paths,$(whitelist))) \
$(eval files := $(call product-installed-files, $(makefile))) \
- $(eval files := $(filter-out $(TARGET_OUT_FAKE)/% $(HOST_OUT)/%,$(files))) \
- $(eval # RROs become REQUIRED by the source module, but are always placed on the vendor partition.) \
- $(eval files := $(filter-out %__auto_generated_rro.apk,$(files))) \
- $(eval offending_files := $(filter-out $(path_patterns) $(whitelist_patterns),$(files))) \
+ $(eval offending_files := $(filter-out $(path_patterns) $(whitelist_patterns) $(static_whitelist_patterns),$(files))) \
$(call maybe-print-list-and-error,$(offending_files),$(makefile) produces files outside its artifact path requirement.) \
$(eval unused_whitelist := $(filter-out $(files),$(whitelist_patterns))) \
$(call maybe-print-list-and-error,$(unused_whitelist),$(makefile) includes redundant whitelist entries in its artifact path requirement.) \
@@ -1034,9 +1068,13 @@
$(eval whitelist := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST)) \
$(eval whitelist_patterns := $(call resolve-product-relative-paths,$(whitelist))) \
$(eval offending_files := $(filter-out $(whitelist_patterns),$(files_in_requirement))) \
- $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS),\
- $(call maybe-print-list-and-error,$(offending_files),$(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement.) \
- $(eval unused_whitelist := $(filter-out $(extra_files),$(whitelist_patterns))) \
+ $(eval enforcement := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
+ $(if $(enforcement),\
+ $(call maybe-print-list-and-error,$(offending_files),\
+ $(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement. \
+ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT)) \
+ $(eval unused_whitelist := $(if $(filter true strict,$(enforcement)),\
+ $(foreach p,$(whitelist_patterns),$(if $(filter $(p),$(extra_files)),,$(p))))) \
$(call maybe-print-list-and-error,$(unused_whitelist),$(INTERNAL_PRODUCT) includes redundant artifact path requirement whitelist entries.) \
) \
)
@@ -1454,6 +1492,8 @@
ndk: $(SOONG_OUT_DIR)/ndk.timestamp
.PHONY: ndk
+$(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
+
$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
endif # KATI
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index ca2dcee..684ab9f 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -7,7 +7,7 @@
KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
# Modifier goals we don't need to pass to Ninja.
-NINJA_EXCLUDE_GOALS := all dist APP-% PRODUCT-%
+NINJA_EXCLUDE_GOALS := all APP-% PRODUCT-%
# A list of goals which affect parsing of makefiles and we need to pass to Kati.
PARSE_TIME_MAKE_GOALS := \
@@ -28,7 +28,6 @@
custom_images \
deps-license \
dicttool_aosp \
- dist \
dump-products \
eng \
fusion \
diff --git a/core/product.mk b/core/product.mk
index d1c74e7..f9f8d60 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -205,10 +205,10 @@
PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE \
PRODUCT_USE_LOGICAL_PARTITIONS \
PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS \
+ PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT \
PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST \
PRODUCT_USE_DYNAMIC_PARTITION_SIZE \
PRODUCT_BUILD_SUPER_PARTITION \
- PRODUCT_USE_FASTBOOTD \
PRODUCT_FORCE_PRODUCT_MODULES_TO_SYSTEM_PARTITION \
define dump-product
diff --git a/core/product_config.mk b/core/product_config.mk
index 7cbea91..27af09e 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -525,10 +525,6 @@
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BUILD_SUPER_PARTITION)),\
$(PRODUCT_USE_LOGICAL_PARTITIONS))
.KATI_READONLY := PRODUCT_BUILD_SUPER_PARTITION
-PRODUCT_USE_FASTBOOTD := $(or \
- $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_USE_FASTBOOTD)),\
- $(PRODUCT_USE_LOGICAL_PARTITIONS))
-.KATI_READONLY := PRODUCT_USE_FASTBOOTD
# List of modules that should be forcefully unmarked from being LOCAL_PRODUCT_MODULE, and hence
# installed on /system directory by default.
diff --git a/core/soong_config.mk b/core/soong_config.mk
index e61aad0..2f978fa 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -102,7 +102,6 @@
$(call add_json_list, CFIIncludePaths, $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
$(call add_json_list, IntegerOverflowExcludePaths, $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
-$(call add_json_bool, UseClangLld, $(call invert_bool,$(filter 0 false,$(USE_CLANG_LLD))))
$(call add_json_bool, ClangTidy, $(filter 1 true,$(WITH_TIDY)))
$(call add_json_str, TidyChecks, $(WITH_TIDY_CHECKS))
@@ -141,8 +140,6 @@
$(call add_json_bool, UseGoma, $(filter-out false,$(USE_GOMA)))
$(call add_json_bool, Arc, $(filter true,$(TARGET_ARC)))
-$(call add_json_str, DistDir, $(if $(dist_goal), $(DIST_DIR)))
-
$(call add_json_list, NamespacesToExport, $(PRODUCT_SOONG_NAMESPACES))
$(call add_json_list, PgoAdditionalProfileDirs, $(PGO_ADDITIONAL_PROFILE_DIRS))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 18a09fb..20bfc66 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -22,6 +22,7 @@
hiddenapi_whitelist_txt := $(intermediates.COMMON)/hiddenapi/whitelist.txt
hiddenapi_greylist_txt := $(intermediates.COMMON)/hiddenapi/greylist.txt
hiddenapi_darkgreylist_txt := $(intermediates.COMMON)/hiddenapi/darkgreylist.txt
+hiddenapi_greylist_metadata_csv := $(intermediates.COMMON)/hiddenapi/greylist.csv
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_jar)))
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_pre_proguard_jar)))
@@ -79,7 +80,7 @@
# We use full_classes_jar here, which is the post-proguard jar (on the basis that we also
# have a full_classes_pre_proguard_jar). This is consistent with the equivalent code in
# java.mk.
- $(eval $(call hiddenapi-generate-greylist-txt,$(full_classes_jar),$(hiddenapi_whitelist_txt),$(hiddenapi_greylist_txt),$(hiddenapi_darkgreylist_txt)))
+ $(eval $(call hiddenapi-generate-greylist-txt,$(full_classes_jar),$(hiddenapi_whitelist_txt),$(hiddenapi_greylist_txt),$(hiddenapi_darkgreylist_txt),$(hiddenapi_greylist_metadata_csv)))
$(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
else # !is_boot_jar
$(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
diff --git a/core/tasks/check_emu_boot.mk b/core/tasks/check_emu_boot.mk
deleted file mode 100644
index 4870677..0000000
--- a/core/tasks/check_emu_boot.mk
+++ /dev/null
@@ -1,23 +0,0 @@
-check_emu_boot0 := $(DIST_DIR)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-emulator-boot-test-result.txt
-$(check_emu_boot0) : PRIVATE_PREFIX := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-$(check_emu_boot0) : PRIVATE_EMULATOR_BOOT_TEST_SH := device/generic/goldfish/tools/emulator_boot_test.sh
-$(check_emu_boot0) : PRIVATE_BOOT_COMPLETE_STRING := "emulator: INFO: boot completed"
-$(check_emu_boot0) : PRIVATE_BOOT_FAIL_STRING := "emulator: ERROR: fail to boot after"
-$(check_emu_boot0) : PRIVATE_SUCCESS_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-SUCCESS.txt
-$(check_emu_boot0) : PRIVATE_FAIL_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-FAIL.txt
-$(check_emu_boot0) : $(INSTALLED_QEMU_SYSTEMIMAGE) $(INSTALLED_QEMU_VENDORIMAGE) \
- $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(PRODUCT_OUT)/userdata.img) \
- $(PRODUCT_OUT)/ramdisk.img device/generic/goldfish/tools/emulator_boot_test.sh
- @mkdir -p $(dir $(check_emu_boot0))
- $(hide) rm -f $(check_emu_boot0)
- $(hide) rm -f $(PRIVATE_SUCCESS_FILE)
- $(hide) rm -f $(PRIVATE_FAIL_FILE)
- (export ANDROID_PRODUCT_OUT=$$(cd $(PRODUCT_OUT);pwd);\
- export ANDROID_BUILD_TOP=$$(pwd);\
- $(PRIVATE_EMULATOR_BOOT_TEST_SH) > $(check_emu_boot0))
- (if grep -q $(PRIVATE_BOOT_COMPLETE_STRING) $(check_emu_boot0);\
- then echo boot_succeeded > $(PRIVATE_SUCCESS_FILE); fi)
- (if grep -q $(PRIVATE_BOOT_FAIL_STRING) $(check_emu_boot0);\
- then echo boot_failed > $(PRIVATE_FAIL_FILE); fi)
-.PHONY: check_emu_boot
-check_emu_boot: $(check_emu_boot0)
diff --git a/core/tasks/collect_gpl_sources.mk b/core/tasks/collect_gpl_sources.mk
index fdbf6c9..acbe9be 100644
--- a/core/tasks/collect_gpl_sources.mk
+++ b/core/tasks/collect_gpl_sources.mk
@@ -12,12 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-ifdef dist_goal
-
# The rule below doesn't have dependenices on the files that it copies,
-# so manually generate directly into the DIST_DIR directory that is always
-# wiped between dist builds.
-gpl_source_tgz := $(DIST_DIR)/gpl_source.tgz
+# so manually generate into a PACKAGING intermediate dir, which is wiped
+# in installclean between incremental builds on build servers.
+gpl_source_tgz := $(call intermediates-dir-for,PACKAGING,gpl_source)/gpl_source.tgz
# FORCE since we can't know whether any of the sources changed
$(gpl_source_tgz): PRIVATE_PATHS := $(sort $(patsubst %/, %, $(dir $(ALL_GPL_MODULE_LICENSE_FILES))))
@@ -26,8 +24,4 @@
$(hide) tar cfz $@ --exclude ".git*" $(PRIVATE_PATHS)
# Dist the tgz only if we are doing a full build
-ifeq (,$(TARGET_BUILD_APPS))
-droidcore: $(gpl_source_tgz)
-endif
-
-endif # dist_goal
+$(call dist-for-goals,droidcore,$(gpl_source_tgz))
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 8baac5a..122161b 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -12,10 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-ifndef ONE_SHOT_MAKEFILE
-
.PHONY: sdk_addon
+ifndef ONE_SHOT_MAKEFILE
+
# If they didn't define PRODUCT_SDK_ADDON_NAME, then we won't define
# any of these rules.
addon_name := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_NAME))
@@ -70,6 +70,7 @@
$(addon_dir_img):$(INSTALLED_QEMU_VENDORIMAGE):images/$(TARGET_CPU_ABI)/vendor.img \
$(addon_dir_img):$(BUILT_RAMDISK_TARGET):images/$(TARGET_CPU_ABI)/ramdisk.img \
$(addon_dir_img):$(PRODUCT_OUT)/system/build.prop:images/$(TARGET_CPU_ABI)/build.prop \
+ $(addon_dir_img):device/generic/goldfish/data/etc/userdata.img:images/$(TARGET_CPU_ABI)/userdata.img \
$(addon_dir_img):$(target_notice_file_txt):images/$(TARGET_CPU_ABI)/NOTICE.txt \
$(addon_dir_img):$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP):images/source.properties
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index d2433ea..57a5cf9 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -35,7 +35,6 @@
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util-tests.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-common-util-tests.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed-tests.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/host-libprotobuf-java-full.jar \
$(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed).jar \
$(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed)-tests.jar \
$(HOST_OUT_EXECUTABLES)/$(test_suite_tradefed) \
diff --git a/core/use_lld_setup.mk b/core/use_lld_setup.mk
index 5f0f412..2026f31 100644
--- a/core/use_lld_setup.mk
+++ b/core/use_lld_setup.mk
@@ -1,21 +1,16 @@
#############################################################
-## Set up flags based on USE_CLANG_LLD and LOCAL_USE_CLANG_LLD.
-## Input variables: USE_CLANG_LLD,LOCAL_USE_CLANG_LLD.
+## Set up flags based on LOCAL_USE_CLANG_LLD.
+## Input variables: LOCAL_USE_CLANG_LLD
## Output variables: my_use_clang_lld
#############################################################
# Use LLD by default.
-# Do not use LLD if LOCAL_USE_CLANG_LLD is false or 0,
-# of if LOCAL_USE_CLANG_LLD is not set and USE_CLANG_LLD is 0 or false.
+# Do not use LLD if LOCAL_USE_CLANG_LLD is false or 0
my_use_clang_lld := true
ifneq (,$(LOCAL_USE_CLANG_LLD))
ifneq (,$(filter 0 false,$(LOCAL_USE_CLANG_LLD)))
my_use_clang_lld := false
endif
-else
- ifneq (,$(filter 0 false,$(USE_CLANG_LLD)))
- my_use_clang_lld := false
- endif
endif
# Do not use LLD for Darwin host executables or shared libraries. See
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index e3cf13d..42a3bea 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -249,7 +249,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2018-08-05
+ PLATFORM_SECURITY_PATCH := 2018-09-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/envsetup.sh b/envsetup.sh
index 4579bef..a4d950e 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1574,9 +1574,12 @@
}
# Zsh needs bashcompinit called to support bash-style completion.
-function add_zsh_completion() {
- autoload -U compinit && compinit
- autoload -U bashcompinit && bashcompinit
+function enable_zsh_completion() {
+ # Don't override user's options if bash-style completion is already enabled.
+ if ! declare -f complete >/dev/null; then
+ autoload -U compinit && compinit
+ autoload -U bashcompinit && bashcompinit
+ fi
}
function validate_current_shell() {
@@ -1587,7 +1590,7 @@
;;
*zsh*)
function check_type() { type "$1"; }
- add_zsh_completion ;;
+ enable_zsh_completion ;;
*)
echo -e "WARNING: Only bash and zsh are supported.\nUse of other shell would lead to erroneous results."
;;
diff --git a/packaging/distdir.mk b/packaging/distdir.mk
new file mode 100644
index 0000000..264a8b0
--- /dev/null
+++ b/packaging/distdir.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# From the Android.mk pass:
+DIST_GOAL_OUTPUT_PAIRS :=
+DIST_SRC_DST_PAIRS :=
+include $(KATI_PACKAGE_MK_DIR)/dist.mk
+
+$(foreach pair,$(DIST_GOAL_OUTPUT_PAIRS), \
+ $(eval goal := $(call word-colon,1,$(pair))) \
+ $(eval output := $(call word-colon,2,$(pair))) \
+ $(eval .PHONY: _dist_$$(goal)) \
+ $(if $(call streq,$(DIST),true),\
+ $(eval _dist_$$(goal): $$(DIST_DIR)/$$(output)), \
+ $(eval _dist_$$(goal):)))
+
+define copy-one-dist-file
+$(2): $(1)
+ @echo "Dist: $$@"
+ rm -f $$@
+ cp $$< $$@
+endef
+
+ifeq ($(DIST),true)
+ $(foreach pair,$(DIST_SRC_DST_PAIRS), \
+ $(eval src := $(call word-colon,1,$(pair))) \
+ $(eval dst := $(DIST_DIR)/$(call word-colon,2,$(pair))) \
+ $(eval $(call copy-one-dist-file,$(src),$(dst))))
+endif
+
+copy-one-dist-file :=
+DIST_GOAL_OUTPUT_PAIRS :=
+DIST_SRC_DST_PAIRS :=
diff --git a/packaging/main.mk b/packaging/main.mk
new file mode 100644
index 0000000..0b746a8
--- /dev/null
+++ b/packaging/main.mk
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Create a default rule. This is unused currently, as the real default rule is
+# still in the Kati build step.
+.PHONY: _packaging_default_rule_
+_packaging_default_rule_:
+
+ifndef KATI
+$(error Only Kati is supported.)
+endif
+
+$(info [1/3] initializing packaging system ...)
+
+.KATI_READONLY := KATI_PACKAGE_MK_DIR
+
+include build/make/common/core.mk
+include build/make/common/strings.mk
+
+$(info [2/3] including distdir.mk ...)
+
+include build/make/packaging/distdir.mk
+
+$(info [3/3] writing packaging rules ...)
diff --git a/target/board/generic/device.mk b/target/board/generic/device.mk
index a75bd07..0a32415 100644
--- a/target/board/generic/device.mk
+++ b/target/board/generic/device.mk
@@ -14,17 +14,6 @@
# limitations under the License.
#
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs.xml \
- hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
-
# NFC:
# Provide default libnfc-nci.conf file for devices that does not have one in
# vendor/etc because aosp system image (of aosp_$arch products) is going to
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 25e51ba..1b6429c 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -61,7 +61,7 @@
# cleaned up all device specific directories under root!
# TODO(b/111434759, b/111287060) SoC specific hacks
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt:/firmware
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 8bd6a8b..2004624 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -14,16 +14,6 @@
# limitations under the License.
#
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs.xml
-
# NFC:
# Provide default libnfc-nci.conf file for devices that does not have one in
# vendor/etc because aosp system image (of aosp_$arch products) is going to
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index fc6b582..88b90a8 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -34,7 +34,9 @@
# TODO(jiyong) These might be SoC specific.
BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
# Set this to create /cache mount point for non-A/B devices that mounts /cache.
# The partition size doesn't matter, just to make build pass.
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
index 7d9ea9c..3d14842 100644
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -28,7 +28,9 @@
# TODO(jiyong) These might be SoC specific.
BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
# Set this to create /cache mount point for non-A/B devices that mounts /cache.
# The partition size doesn't matter, just to make build pass.
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index fa2d472..0a32415 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -14,16 +14,6 @@
# limitations under the License.
#
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
-
# NFC:
# Provide default libnfc-nci.conf file for devices that does not have one in
# vendor/etc because aosp system image (of aosp_$arch products) is going to
@@ -32,7 +22,3 @@
# NFC configuration file should be in vendor/etc, instead of system/etc
PRODUCT_COPY_FILES += \
device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
-
-PRODUCT_PACKAGES := \
- audio.primary.goldfish \
- vibrator.goldfish
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index fa2d472..0a32415 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -14,16 +14,6 @@
# limitations under the License.
#
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
- frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
- device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
-
# NFC:
# Provide default libnfc-nci.conf file for devices that does not have one in
# vendor/etc because aosp system image (of aosp_$arch products) is going to
@@ -32,7 +22,3 @@
# NFC configuration file should be in vendor/etc, instead of system/etc
PRODUCT_COPY_FILES += \
device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
-
-PRODUCT_PACKAGES := \
- audio.primary.goldfish \
- vibrator.goldfish
diff --git a/target/board/generic_x86_arm/BoardConfig.mk b/target/board/generic_x86_arm/BoardConfig.mk
index d1e4884..8e70b25 100644
--- a/target/board/generic_x86_arm/BoardConfig.mk
+++ b/target/board/generic_x86_arm/BoardConfig.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2016 The Android Open Source Project
+# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,11 +13,7 @@
# limitations under the License.
#
-# Configuration for generic_x86 + arm libraries needed by binary translation.
-
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# x86 emulator specific definitions
TARGET_CPU_ABI := x86
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
@@ -28,39 +24,27 @@
TARGET_2ND_ARCH_VARIANT := armv7-a
TARGET_2ND_CPU_VARIANT := generic
-# Tell the build system this isn't a typical 64bit+32bit multilib configuration.
+TARGET_CPU_ABI_LIST := x86 armeabi-v7a armeabi
TARGET_TRANSLATE_2ND_ARCH := true
BUILD_BROKEN_DUP_RULES := true
-# no hardware camera
-USE_CAMERA_STUB := true
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
- ifeq ($(WITH_DEXPREOPT),)
- WITH_DEXPREOPT := true
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY := false
- endif
-endif
+include build/make/target/board/BoardConfigEmuCommon.mk
+include build/make/target/board/BoardConfigGsiCommon.mk
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
+# Resize to 4G to accomodate ASAN and CTS
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
-# Build OpenGLES emulation host and guest libraries
-BUILD_EMULATOR_OPENGL := true
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192 # 1.75 GB
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
+# Wifi.
+BOARD_WLAN_DEVICE := emulator
+BOARD_HOSTAPD_DRIVER := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/generic_x86_arm/README.txt b/target/board/generic_x86_arm/README.txt
new file mode 100644
index 0000000..05f7ca2
--- /dev/null
+++ b/target/board/generic_x86_arm/README.txt
@@ -0,0 +1,10 @@
+The "generic_x86_arm" product defines a non-hardware-specific IA target
+without a kernel or bootloader.
+
+It can be used to build the entire user-level system, and
+will work with the IA version of the emulator,
+
+It is not a product "base class"; no other products inherit
+from it or use it in any way.
+
+Third party arm to x86 translator has to be installed as well
diff --git a/target/board/generic_x86_arm/device.mk b/target/board/generic_x86_arm/device.mk
new file mode 100644
index 0000000..0a32415
--- /dev/null
+++ b/target/board/generic_x86_arm/device.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# NFC:
+# Provide default libnfc-nci.conf file for devices that does not have one in
+# vendor/etc because aosp system image (of aosp_$arch products) is going to
+# be used as GSI.
+# May need to remove the following for newly launched devices in P since this
+# NFC configuration file should be in vendor/etc, instead of system/etc
+PRODUCT_COPY_FILES += \
+ device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
diff --git a/target/board/generic_x86_arm/system.prop b/target/board/generic_x86_arm/system.prop
new file mode 100644
index 0000000..64829f3
--- /dev/null
+++ b/target/board/generic_x86_arm/system.prop
@@ -0,0 +1,5 @@
+#
+# system.prop for generic sdk
+#
+
+rild.libpath=/vendor/lib/libreference-ril.so
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index 19f57e8..b921c97 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -17,27 +17,32 @@
# aosp_x86 with arm libraries needed by binary translation.
+# The system image of aosp_x86-userdebug is a GSI for the devices with:
+# - x86 32 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+-include device/generic/goldfish/x86-vendor.mk
+
include $(SRC_TARGET_DIR)/product/full_x86.mk
-# arm libraries. This is the list of shared libraries included in the NDK.
-# Their dependency libraries will be automatically pulled in.
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
+# Enable A/B update
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
PRODUCT_PACKAGES += \
- libandroid_arm \
- libaaudio_arm \
- libc_arm \
- libdl_arm \
- libEGL_arm \
- libGLESv1_CM_arm \
- libGLESv2_arm \
- libGLESv3_arm \
- libjnigraphics_arm \
- liblog_arm \
- libm_arm \
- libmediandk_arm \
- libOpenMAXAL_arm \
- libstdc++_arm \
- libOpenSLES_arm \
- libz_arm \
+ update_engine \
+ update_verifier
+
+# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
+
+# Support addtional P vendor interface
+PRODUCT_EXTRA_VNDK_VERSIONS := 28
PRODUCT_NAME := aosp_x86_arm
PRODUCT_DEVICE := generic_x86_arm
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 11f5fe4..a3c9ac7 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -74,6 +74,8 @@
fsck_msdos \
fs_config_files_system \
fs_config_dirs_system \
+ heapprofd \
+ heapprofd_client \
gatekeeperd \
healthd \
hid \
@@ -85,9 +87,9 @@
incidentd \
incident_helper \
incident_report \
- init \
init.environ.rc \
init.rc \
+ init_system \
input \
installd \
iorapd \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 1b25f27..9bb45d1 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -34,6 +34,7 @@
fs_config_dirs_nonsystem \
gralloc.default \
group \
+ init_vendor \
libbundlewrapper \
libclearkeycasplugin \
libdownmix \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index 8d0611f..ed6dcc9 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -27,6 +27,7 @@
DMService \
LiveWallpapersPicker \
PartnerBookmarksProvider \
+ PresencePolling \
RcsService \
SafetyRegulatoryInfo \
Stk \
@@ -40,6 +41,10 @@
PRODUCT_PACKAGES += \
netutils-wrapper-1.0 \
+# Charger images
+PRODUCT_PACKAGES += \
+ charger_res_images \
+
# system_other support
PRODUCT_PACKAGES += \
cppreopts.sh \
@@ -50,22 +55,28 @@
audio.a2dp.default \
audio.hearing_aid.default \
+PRODUCT_PACKAGES_DEBUG += \
+ avbctl \
+ bootctl \
+ tinyplay \
+ tinycap \
+ tinymix \
+ tinypcminfo \
+ update_engine_client \
+
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
PRODUCT_NAME := mainline_system
PRODUCT_BRAND := generic
-PRODUCT_SHIPPING_API_LEVEL := 28
_base_mk_whitelist :=
_my_whitelist := $(_base_mk_whitelist)
-# Both /system and / are in system.img when PRODUCT_SHIPPING_API_LEVEL>=28.
-# Though we do have a new ramdisk partition for logical partitions.
+# For mainline, system.img should be mounted at /, so we include ROOT here.
_my_paths := \
- $(TARGET_COPY_OUT_ROOT) \
- $(TARGET_COPY_OUT_SYSTEM) \
- $(TARGET_COPY_OUT_RAMDISK) \
+ $(TARGET_COPY_OUT_ROOT)/ \
+ $(TARGET_COPY_OUT_SYSTEM)/ \
$(call require-artifacts-in-path, $(_my_paths), $(_my_whitelist))
diff --git a/target/product/vndk/current.txt b/target/product/vndk/current.txt
index 7d8409b..6120e9d 100644
--- a/target/product/vndk/current.txt
+++ b/target/product/vndk/current.txt
@@ -245,8 +245,6 @@
VNDK-core: libtinyxml2.so
VNDK-core: libui.so
VNDK-core: libusbhost.so
-VNDK-core: libvixl-arm.so
-VNDK-core: libvixl-arm64.so
VNDK-core: libvorbisidec.so
VNDK-core: libwifi-system-iface.so
VNDK-core: libxml2.so
diff --git a/tools/atree/files.cpp b/tools/atree/files.cpp
index d5c8a97..b90f8b3 100644
--- a/tools/atree/files.cpp
+++ b/tools/atree/files.cpp
@@ -81,7 +81,7 @@
state = TEXT;
break;
}
- // otherwise fall-through to TEXT case
+ [[fallthrough]];
case TEXT:
if (state != IN_QUOTE && isspace(*p)) {
if (q != p) {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 2fa5f52..ddc50be 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -46,6 +46,7 @@
from __future__ import print_function
import datetime
+import logging
import os
import shlex
import shutil
@@ -62,8 +63,9 @@
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
-OPTIONS = common.OPTIONS
+logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
OPTIONS.add_missing = False
OPTIONS.rebuild_recovery = False
OPTIONS.replace_updated_files_list = []
@@ -127,7 +129,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.img")
if os.path.exists(img.input_name):
- print("system.img already exists; no need to rebuild...")
+ logger.info("system.img already exists; no need to rebuild...")
return img.input_name
def output_sink(fn, data):
@@ -142,7 +144,7 @@
common.ZipWrite(output_zip, ofile.name, arc_name)
if OPTIONS.rebuild_recovery:
- print("Building new recovery patch")
+ logger.info("Building new recovery patch")
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
boot_img, info_dict=OPTIONS.info_dict)
@@ -159,7 +161,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system_other.img")
if os.path.exists(img.input_name):
- print("system_other.img already exists; no need to rebuild...")
+ logger.info("system_other.img already exists; no need to rebuild...")
return
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system_other", img)
@@ -171,7 +173,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.img")
if os.path.exists(img.input_name):
- print("vendor.img already exists; no need to rebuild...")
+ logger.info("vendor.img already exists; no need to rebuild...")
return img.input_name
block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
@@ -186,7 +188,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "product.img")
if os.path.exists(img.input_name):
- print("product.img already exists; no need to rebuild...")
+ logger.info("product.img already exists; no need to rebuild...")
return img.input_name
block_list = OutputFile(
@@ -204,7 +206,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES",
"product_services.img")
if os.path.exists(img.input_name):
- print("product_services.img already exists; no need to rebuild...")
+ logger.info("product_services.img already exists; no need to rebuild...")
return img.input_name
block_list = OutputFile(
@@ -220,7 +222,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "odm.img")
if os.path.exists(img.input_name):
- print("odm.img already exists; no need to rebuild...")
+ logger.info("odm.img already exists; no need to rebuild...")
return img.input_name
block_list = OutputFile(
@@ -239,7 +241,7 @@
"""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "dtbo.img")
if os.path.exists(img.input_name):
- print("dtbo.img already exists; no need to rebuild...")
+ logger.info("dtbo.img already exists; no need to rebuild...")
return img.input_name
dtbo_prebuilt_path = os.path.join(
@@ -269,7 +271,7 @@
def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
- print("creating " + what + ".img...")
+ logger.info("creating " + what + ".img...")
image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
fstab = info_dict["fstab"]
@@ -340,7 +342,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "userdata.img")
if os.path.exists(img.input_name):
- print("userdata.img already exists; no need to rebuild...")
+ logger.info("userdata.img already exists; no need to rebuild...")
return
# Skip userdata.img if no size.
@@ -348,7 +350,7 @@
if not image_props.get("partition_size"):
return
- print("creating userdata.img...")
+ logger.info("creating userdata.img...")
image_props["timestamp"] = FIXED_FILE_TIMESTAMP
@@ -399,10 +401,13 @@
partitions: A dict that's keyed by partition names with image paths as
values. Only valid partition names are accepted, as listed in
common.AVB_PARTITIONS.
- name: Name of the VBMeta partition, e.g. 'vbmeta', 'vbmeta_mainline'.
+ name: Name of the VBMeta partition, e.g. 'vbmeta', 'vbmeta_system'.
needed_partitions: Partitions whose descriptors should be included into the
generated VBMeta image.
+ Returns:
+ Path to the created image.
+
Raises:
AssertionError: On invalid input args.
"""
@@ -411,7 +416,7 @@
img = OutputFile(
output_zip, OPTIONS.input_tmp, "IMAGES", "{}.img".format(name))
if os.path.exists(img.input_name):
- print("{}.img already exists; not rebuilding...".format(name))
+ logger.info("%s.img already exists; not rebuilding...", name)
return img.input_name
avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
@@ -421,7 +426,8 @@
for partition, path in partitions.items():
if partition not in needed_partitions:
continue
- assert partition in common.AVB_PARTITIONS, \
+ assert (partition in common.AVB_PARTITIONS or
+ partition.startswith('vbmeta_')), \
'Unknown partition: {}'.format(partition)
assert os.path.exists(path), \
'Failed to find {} for {}'.format(path, partition)
@@ -456,6 +462,7 @@
assert proc.returncode == 0, \
"avbtool make_vbmeta_image failed:\n{}".format(stdoutdata)
img.Write()
+ return img.name
def AddPartitionTable(output_zip):
@@ -495,7 +502,7 @@
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "cache.img")
if os.path.exists(img.input_name):
- print("cache.img already exists; no need to rebuild...")
+ logger.info("cache.img already exists; no need to rebuild...")
return
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache")
@@ -503,7 +510,7 @@
if "fs_type" not in image_props:
return
- print("creating cache.img...")
+ logger.info("creating cache.img...")
image_props["timestamp"] = FIXED_FILE_TIMESTAMP
@@ -580,8 +587,7 @@
present_props = [x for x in prop_name_list if x in build_props]
if not present_props:
- print("Warning: fingerprint is not present for partition {}".
- format(partition))
+ logger.warning("fingerprint is not present for partition %s", partition)
property_id, fingerprint = "unknown", "unknown"
else:
property_id = present_props[0]
@@ -633,7 +639,7 @@
prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
if os.path.exists(prebuilt_path):
- print("%s already exists, no need to overwrite..." % (img_name,))
+ logger.info("%s already exists, no need to overwrite...", img_name)
continue
img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
@@ -698,7 +704,7 @@
if not OPTIONS.add_missing:
if os.path.isdir(os.path.join(OPTIONS.input_tmp, "IMAGES")):
- print("target_files appears to already contain images.")
+ logger.warning("target_files appears to already contain images.")
sys.exit(1)
OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, repacking=True)
@@ -748,7 +754,7 @@
partitions = dict()
def banner(s):
- print("\n\n++++ " + s + " ++++\n\n")
+ logger.info("\n\n++++ " + s + " ++++\n\n")
banner("boot")
# common.GetBootableImage() returns the image directly if present.
@@ -832,20 +838,20 @@
# chained VBMeta image plus the chained VBMeta images themselves.
vbmeta_partitions = common.AVB_PARTITIONS[:]
- vbmeta_mainline = OPTIONS.info_dict.get("avb_vbmeta_mainline", "").strip()
- if vbmeta_mainline:
- banner("vbmeta_mainline")
- AddVBMeta(
- output_zip, partitions, "vbmeta_mainline", vbmeta_mainline.split())
+ vbmeta_system = OPTIONS.info_dict.get("avb_vbmeta_system", "").strip()
+ if vbmeta_system:
+ banner("vbmeta_system")
+ partitions["vbmeta_system"] = AddVBMeta(
+ output_zip, partitions, "vbmeta_system", vbmeta_system.split())
vbmeta_partitions = [
item for item in vbmeta_partitions
- if item not in vbmeta_mainline.split()]
- vbmeta_partitions.append("vbmeta_mainline")
+ if item not in vbmeta_system.split()]
+ vbmeta_partitions.append("vbmeta_system")
vbmeta_vendor = OPTIONS.info_dict.get("avb_vbmeta_vendor", "").strip()
if vbmeta_vendor:
banner("vbmeta_vendor")
- AddVBMeta(
+ partitions["vbmeta_vendor"] = AddVBMeta(
output_zip, partitions, "vbmeta_vendor", vbmeta_vendor.split())
vbmeta_partitions = [
item for item in vbmeta_partitions
@@ -912,20 +918,21 @@
"is_signing"],
extra_option_handler=option_handler)
-
if len(args) != 1:
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
AddImagesToTargetFiles(args[0])
- print("done.")
+ logger.info("done.")
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
- except common.ExternalError as e:
- print("\n ERROR: %s\n" % (e,))
+ except common.ExternalError:
+ logger.exception("\n ERROR:\n")
sys.exit(1)
finally:
common.Cleanup()
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 189dba2..2d20e23 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -19,6 +19,7 @@
import functools
import heapq
import itertools
+import logging
import multiprocessing
import os
import os.path
@@ -33,6 +34,8 @@
__all__ = ["EmptyImage", "DataImage", "BlockImageDiff"]
+logger = logging.getLogger(__name__)
+
def compute_patch(srcfile, tgtfile, imgdiff=False):
patchfile = common.MakeTempFile(prefix='patch-')
@@ -304,8 +307,8 @@
"""Prints a report of the collected imgdiff stats."""
def print_header(header, separator):
- print(header)
- print(separator * len(header) + '\n')
+ logger.info(header)
+ logger.info(separator * len(header) + '\n')
print_header(' Imgdiff Stats Report ', '=')
for key in self.REASONS:
@@ -314,7 +317,7 @@
values = self.stats[key]
section_header = ' {} (count: {}) '.format(key, len(values))
print_header(section_header, '-')
- print(''.join([' {}\n'.format(name) for name in values]))
+ logger.info(''.join([' {}\n'.format(name) for name in values]))
class BlockImageDiff(object):
@@ -482,7 +485,7 @@
self.WriteTransfers(prefix)
# Report the imgdiff stats.
- if common.OPTIONS.verbose and not self.disable_imgdiff:
+ if not self.disable_imgdiff:
self.imgdiff_stats.Report()
def WriteTransfers(self, prefix):
@@ -692,16 +695,17 @@
OPTIONS = common.OPTIONS
if OPTIONS.cache_size is not None:
max_allowed = OPTIONS.cache_size * OPTIONS.stash_threshold
- print("max stashed blocks: %d (%d bytes), "
- "limit: %d bytes (%.2f%%)\n" % (
- max_stashed_blocks, self._max_stashed_size, max_allowed,
- self._max_stashed_size * 100.0 / max_allowed))
+ logger.info(
+ "max stashed blocks: %d (%d bytes), limit: %d bytes (%.2f%%)\n",
+ max_stashed_blocks, self._max_stashed_size, max_allowed,
+ self._max_stashed_size * 100.0 / max_allowed)
else:
- print("max stashed blocks: %d (%d bytes), limit: <unknown>\n" % (
- max_stashed_blocks, self._max_stashed_size))
+ logger.info(
+ "max stashed blocks: %d (%d bytes), limit: <unknown>\n",
+ max_stashed_blocks, self._max_stashed_size)
def ReviseStashSize(self):
- print("Revising stash size...")
+ logger.info("Revising stash size...")
stash_map = {}
# Create the map between a stash and its def/use points. For example, for a
@@ -746,7 +750,7 @@
# that will use this stash and replace the command with "new".
use_cmd = stash_map[stash_raw_id][2]
replaced_cmds.append(use_cmd)
- print("%10d %9s %s" % (sr.size(), "explicit", use_cmd))
+ logger.info("%10d %9s %s", sr.size(), "explicit", use_cmd)
else:
# Update the stashes map.
if sh in stashes:
@@ -762,7 +766,7 @@
if xf.src_ranges.overlaps(xf.tgt_ranges):
if stashed_blocks + xf.src_ranges.size() > max_allowed:
replaced_cmds.append(xf)
- print("%10d %9s %s" % (xf.src_ranges.size(), "implicit", xf))
+ logger.info("%10d %9s %s", xf.src_ranges.size(), "implicit", xf)
# Replace the commands in replaced_cmds with "new"s.
for cmd in replaced_cmds:
@@ -788,28 +792,29 @@
stashes.pop(sh)
num_of_bytes = new_blocks * self.tgt.blocksize
- print(" Total %d blocks (%d bytes) are packed as new blocks due to "
- "insufficient cache size." % (new_blocks, num_of_bytes))
+ logger.info(
+ " Total %d blocks (%d bytes) are packed as new blocks due to "
+ "insufficient cache size.", new_blocks, num_of_bytes)
return new_blocks
def ComputePatches(self, prefix):
- print("Reticulating splines...")
+ logger.info("Reticulating splines...")
diff_queue = []
patch_num = 0
with open(prefix + ".new.dat", "wb") as new_f:
for index, xf in enumerate(self.transfers):
if xf.style == "zero":
tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
- print("%10d %10d (%6.2f%%) %7s %s %s" % (
- tgt_size, tgt_size, 100.0, xf.style, xf.tgt_name,
- str(xf.tgt_ranges)))
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s", tgt_size, tgt_size, 100.0,
+ xf.style, xf.tgt_name, str(xf.tgt_ranges))
elif xf.style == "new":
self.tgt.WriteRangeDataToFd(xf.tgt_ranges, new_f)
tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
- print("%10d %10d (%6.2f%%) %7s %s %s" % (
- tgt_size, tgt_size, 100.0, xf.style,
- xf.tgt_name, str(xf.tgt_ranges)))
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s", tgt_size, tgt_size, 100.0,
+ xf.style, xf.tgt_name, str(xf.tgt_ranges))
elif xf.style == "diff":
# We can't compare src and tgt directly because they may have
@@ -827,11 +832,12 @@
xf.patch = None
tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
if xf.src_ranges != xf.tgt_ranges:
- print("%10d %10d (%6.2f%%) %7s %s %s (from %s)" % (
- tgt_size, tgt_size, 100.0, xf.style,
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s (from %s)", tgt_size, tgt_size,
+ 100.0, xf.style,
xf.tgt_name if xf.tgt_name == xf.src_name else (
xf.tgt_name + " (from " + xf.src_name + ")"),
- str(xf.tgt_ranges), str(xf.src_ranges)))
+ str(xf.tgt_ranges), str(xf.src_ranges))
else:
if xf.patch:
# We have already generated the patch with imgdiff, while
@@ -850,9 +856,9 @@
if diff_queue:
if self.threads > 1:
- print("Computing patches (using %d threads)..." % (self.threads,))
+ logger.info("Computing patches (using %d threads)...", self.threads)
else:
- print("Computing patches...")
+ logger.info("Computing patches...")
diff_total = len(diff_queue)
patches = [None] * diff_total
@@ -874,13 +880,6 @@
xf_index, imgdiff, patch_index = diff_queue.pop()
xf = self.transfers[xf_index]
- if sys.stdout.isatty():
- diff_left = len(diff_queue)
- progress = (diff_total - diff_left) * 100 / diff_total
- # '\033[K' is to clear to EOL.
- print(' [%3d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
- sys.stdout.flush()
-
patch = xf.patch
if not patch:
src_ranges = xf.src_ranges
@@ -918,13 +917,10 @@
while threads:
threads.pop().join()
- if sys.stdout.isatty():
- print('\n')
-
if error_messages:
- print('ERROR:')
- print('\n'.join(error_messages))
- print('\n\n\n')
+ logger.error('ERROR:')
+ logger.error('\n'.join(error_messages))
+ logger.error('\n\n\n')
sys.exit(1)
else:
patches = []
@@ -938,14 +934,13 @@
offset += xf.patch_len
patch_fd.write(patch)
- if common.OPTIONS.verbose:
- tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
- print("%10d %10d (%6.2f%%) %7s %s %s %s" % (
- xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
- xf.style,
- xf.tgt_name if xf.tgt_name == xf.src_name else (
- xf.tgt_name + " (from " + xf.src_name + ")"),
- xf.tgt_ranges, xf.src_ranges))
+ tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
+ logger.info(
+ "%10d %10d (%6.2f%%) %7s %s %s %s", xf.patch_len, tgt_size,
+ xf.patch_len * 100.0 / tgt_size, xf.style,
+ xf.tgt_name if xf.tgt_name == xf.src_name else (
+ xf.tgt_name + " (from " + xf.src_name + ")"),
+ xf.tgt_ranges, xf.src_ranges)
def AssertSha1Good(self):
"""Check the SHA-1 of the src & tgt blocks in the transfer list.
@@ -1005,7 +1000,7 @@
assert touched[i] == 1
def ImproveVertexSequence(self):
- print("Improving vertex order...")
+ logger.info("Improving vertex order...")
# At this point our digraph is acyclic; we reversed any edges that
# were backwards in the heuristically-generated sequence. The
@@ -1057,7 +1052,7 @@
blocks will be written to the same stash slot in WriteTransfers().
"""
- print("Reversing backward edges...")
+ logger.info("Reversing backward edges...")
in_order = 0
out_of_order = 0
stash_raw_id = 0
@@ -1089,15 +1084,15 @@
xf.goes_after[u] = None # value doesn't matter
u.goes_before[xf] = None
- print((" %d/%d dependencies (%.2f%%) were violated; "
- "%d source blocks stashed.") %
- (out_of_order, in_order + out_of_order,
- (out_of_order * 100.0 / (in_order + out_of_order))
- if (in_order + out_of_order) else 0.0,
- stash_size))
+ logger.info(
+ " %d/%d dependencies (%.2f%%) were violated; %d source blocks "
+ "stashed.", out_of_order, in_order + out_of_order,
+ (out_of_order * 100.0 / (in_order + out_of_order)) if (
+ in_order + out_of_order) else 0.0,
+ stash_size)
def FindVertexSequence(self):
- print("Finding vertex sequence...")
+ logger.info("Finding vertex sequence...")
# This is based on "A Fast & Effective Heuristic for the Feedback
# Arc Set Problem" by P. Eades, X. Lin, and W.F. Smyth. Think of
@@ -1210,7 +1205,7 @@
self.transfers = new_transfers
def GenerateDigraph(self):
- print("Generating digraph...")
+ logger.info("Generating digraph...")
# Each item of source_ranges will be:
# - None, if that block is not used as a source,
@@ -1376,9 +1371,9 @@
if tgt_changed < tgt_size * crop_threshold:
assert tgt_changed + tgt_skipped.size() == tgt_size
- print('%10d %10d (%6.2f%%) %s' % (
- tgt_skipped.size(), tgt_size,
- tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
+ logger.info(
+ '%10d %10d (%6.2f%%) %s', tgt_skipped.size(), tgt_size,
+ tgt_skipped.size() * 100.0 / tgt_size, tgt_name)
AddSplitTransfers(
"%s-skipped" % (tgt_name,),
"%s-skipped" % (src_name,),
@@ -1519,7 +1514,7 @@
split_src_ranges,
patch_content))
- print("Finding transfers...")
+ logger.info("Finding transfers...")
large_apks = []
split_large_apks = []
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 43c91da..4a013c2 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -26,6 +26,7 @@
from __future__ import print_function
+import logging
import os
import os.path
import re
@@ -35,6 +36,8 @@
import common
import verity_utils
+logger = logging.getLogger(__name__)
+
OPTIONS = common.OPTIONS
BLOCK_SIZE = common.BLOCK_SIZE
BYTES_IN_MB = 1024 * 1024
@@ -228,8 +231,8 @@
"partition_size" not in prop_dict):
# If partition_size is not defined, use output of `du' + reserved_size.
size = GetDiskUsage(in_dir)
- if OPTIONS.verbose:
- print("The tree size of %s is %d MB." % (in_dir, size // BYTES_IN_MB))
+ logger.info(
+ "The tree size of %s is %d MB.", in_dir, size // BYTES_IN_MB)
size += int(prop_dict.get("partition_reserved_size", 0))
# Round this up to a multiple of 4K so that avbtool works
size = common.RoundUpTo4K(size)
@@ -241,8 +244,8 @@
lambda x: verity_utils.AVBCalcMaxImageSize(
avbtool, avb_footer_type, x, avb_signing_args))
prop_dict["partition_size"] = str(size)
- if OPTIONS.verbose:
- print("Allocating %d MB for %s." % (size // BYTES_IN_MB, out_file))
+ logger.info(
+ "Allocating %d MB for %s.", size // BYTES_IN_MB, out_file)
prop_dict["image_size"] = prop_dict["partition_size"]
@@ -350,8 +353,8 @@
du_str = "{} bytes ({} MB)".format(du, du // BYTES_IN_MB)
# Suppress any errors from GetDiskUsage() to avoid hiding the real errors
# from common.RunAndCheckOutput().
- except Exception as e: # pylint: disable=broad-except
- print(e, file=sys.stderr)
+ except Exception: # pylint: disable=broad-except
+ logger.exception("Failed to compute disk usage with du")
du_str = "unknown"
print(
"Out of space? The tree size of {} is {}, with reserved space of {} "
@@ -664,6 +667,8 @@
print(__doc__)
sys.exit(1)
+ common.InitLogging()
+
in_dir = argv[0]
glob_dict_file = argv[1]
out_file = argv[2]
@@ -697,7 +702,7 @@
elif image_filename == "product_services.img":
mount_point = "product_services"
else:
- print("error: unknown image file name ", image_filename, file=sys.stderr)
+ logger.error("Unknown image file name %s", image_filename)
sys.exit(1)
image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
@@ -705,14 +710,14 @@
try:
BuildImage(in_dir, image_properties, out_file, target_out)
except:
- print("Error: Failed to build {} from {}".format(out_file, in_dir),
- file=sys.stderr)
+ logger.error("Failed to build %s from %s", out_file, in_dir)
raise
if prop_file_out:
glob_dict_out = GlobalDictFromImageProp(image_properties, mount_point)
SaveGlobalDict(prop_file_out, glob_dict_out)
+
if __name__ == '__main__':
try:
main(sys.argv[1:])
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index a580709..7d3424b 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -21,16 +21,18 @@
from __future__ import print_function
import argparse
+import logging
import re
import subprocess
import sys
import zipfile
-
from hashlib import sha1
from hashlib import sha256
import common
+logger = logging.getLogger(__name__)
+
def CertUsesSha256(cert):
"""Check if the cert uses SHA-256 hashing algorithm."""
@@ -181,6 +183,8 @@
parser.add_argument('package', help='The OTA package to be verified.')
args = parser.parse_args()
+ common.InitLogging()
+
VerifyPackage(args.certificate, args.package)
VerifyAbOtaPayload(args.certificate, args.package)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 7cca766..fe63458 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -20,6 +20,9 @@
import getpass
import gzip
import imp
+import json
+import logging
+import logging.config
import os
import platform
import re
@@ -37,6 +40,8 @@
import blockimgdiff
import sparse_img
+logger = logging.getLogger(__name__)
+
class Options(object):
def __init__(self):
@@ -121,13 +126,53 @@
pass
+def InitLogging():
+ DEFAULT_LOGGING_CONFIG = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'formatters': {
+ 'standard': {
+ 'format':
+ '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s',
+ 'datefmt': '%Y-%m-%d %H:%M:%S',
+ },
+ },
+ 'handlers': {
+ 'default': {
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'standard',
+ },
+ },
+ 'loggers': {
+ '': {
+ 'handlers': ['default'],
+ 'level': 'WARNING',
+ 'propagate': True,
+ }
+ }
+ }
+ env_config = os.getenv('LOGGING_CONFIG')
+ if env_config:
+ with open(env_config) as f:
+ config = json.load(f)
+ else:
+ config = DEFAULT_LOGGING_CONFIG
+
+ # Increase the logging level for verbose mode.
+ if OPTIONS.verbose:
+ config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
+ config['loggers']['']['level'] = 'INFO'
+
+ logging.config.dictConfig(config)
+
+
def Run(args, verbose=None, **kwargs):
"""Creates and returns a subprocess.Popen object.
Args:
args: The command represented as a list of strings.
- verbose: Whether the commands should be shown (default to OPTIONS.verbose
- if unspecified).
+ verbose: Whether the commands should be shown. Default to the global
+ verbosity if unspecified.
kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
stdin, etc. stdout and stderr will default to subprocess.PIPE and
subprocess.STDOUT respectively unless caller specifies any of them.
@@ -135,13 +180,12 @@
Returns:
A subprocess.Popen object.
"""
- if verbose is None:
- verbose = OPTIONS.verbose
if 'stdout' not in kwargs and 'stderr' not in kwargs:
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.STDOUT
- if verbose:
- print(" Running: \"{}\"".format(" ".join(args)))
+ # Don't log any if caller explicitly says so.
+ if verbose != False:
+ logger.info(" Running: \"%s\"", " ".join(args))
return subprocess.Popen(args, **kwargs)
@@ -150,8 +194,8 @@
Args:
args: The command represented as a list of strings.
- verbose: Whether the commands should be shown (default to OPTIONS.verbose
- if unspecified).
+ verbose: Whether the commands should be shown. Default to the global
+ verbosity if unspecified.
kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
stdin, etc. stdout and stderr will default to subprocess.PIPE and
subprocess.STDOUT respectively unless caller specifies any of them.
@@ -162,12 +206,11 @@
Raises:
ExternalError: On non-zero exit from the command.
"""
- if verbose is None:
- verbose = OPTIONS.verbose
proc = Run(args, verbose=verbose, **kwargs)
output, _ = proc.communicate()
- if verbose:
- print("{}".format(output.rstrip()))
+ # Don't log any if caller explicitly says so.
+ if verbose != False:
+ logger.info("%s", output.rstrip())
if proc.returncode != 0:
raise ExternalError(
"Failed to run command '{}' (exit code {}):\n{}".format(
@@ -277,8 +320,8 @@
if os.path.exists(system_base_fs_file):
d["system_base_fs_file"] = system_base_fs_file
else:
- print("Warning: failed to find system base fs file: %s" % (
- system_base_fs_file,))
+ logger.warning(
+ "Failed to find system base fs file: %s", system_base_fs_file)
del d["system_base_fs_file"]
if "vendor_base_fs_file" in d:
@@ -287,8 +330,8 @@
if os.path.exists(vendor_base_fs_file):
d["vendor_base_fs_file"] = vendor_base_fs_file
else:
- print("Warning: failed to find vendor base fs file: %s" % (
- vendor_base_fs_file,))
+ logger.warning(
+ "Failed to find vendor base fs file: %s", vendor_base_fs_file)
del d["vendor_base_fs_file"]
def makeint(key):
@@ -364,7 +407,7 @@
try:
data = read_helper(prop_file)
except KeyError:
- print("Warning: could not read %s" % (prop_file,))
+ logger.warning("Failed to read %s", prop_file)
data = ""
return LoadDictionaryFromLines(data.split("\n"))
@@ -394,7 +437,7 @@
try:
data = read_helper(recovery_fstab_path)
except KeyError:
- print("Warning: could not find {}".format(recovery_fstab_path))
+ logger.warning("Failed to find %s", recovery_fstab_path)
data = ""
assert fstab_version == 2
@@ -447,7 +490,7 @@
def DumpInfoDict(d):
for k, v in sorted(d.items()):
- print("%-25s = (%s) %s" % (k, type(v).__name__, v))
+ logger.info("%-25s = (%s) %s", k, type(v).__name__, v)
def AppendAVBSigningArgs(cmd, partition):
@@ -657,15 +700,15 @@
prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
- print("using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,))
+ logger.info("using prebuilt %s from BOOTABLE_IMAGES...", prebuilt_name)
return File.FromLocalFile(name, prebuilt_path)
prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
if os.path.exists(prebuilt_path):
- print("using prebuilt %s from IMAGES..." % (prebuilt_name,))
+ logger.info("using prebuilt %s from IMAGES...", prebuilt_name)
return File.FromLocalFile(name, prebuilt_path)
- print("building image from target_files %s..." % (tree_subdir,))
+ logger.info("building image from target_files %s...", tree_subdir)
if info_dict is None:
info_dict = OPTIONS.info_dict
@@ -1001,9 +1044,9 @@
if pct >= 99.0:
raise ExternalError(msg)
elif pct >= 95.0:
- print("\n WARNING: %s\n" % (msg,))
- elif OPTIONS.verbose:
- print(" ", msg)
+ logger.warning("\n WARNING: %s\n", msg)
+ else:
+ logger.info(" %s", msg)
def ReadApkCerts(tf_zip):
@@ -1302,13 +1345,13 @@
continue
m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
if not m:
- print("failed to parse password file: ", line)
+ logger.warning("Failed to parse password file: %s", line)
else:
result[m.group(2)] = m.group(1)
f.close()
except IOError as e:
if e.errno != errno.ENOENT:
- print("error reading password file: ", str(e))
+ logger.exception("Error reading password file:")
return result
@@ -1452,10 +1495,10 @@
if x == ".py":
f = b
info = imp.find_module(f, [d])
- print("loaded device-specific extensions from", path)
+ logger.info("loaded device-specific extensions from %s", path)
self.module = imp.load_module("device_specific", *info)
except ImportError:
- print("unable to load device-specific module; assuming none")
+ logger.info("unable to load device-specific module; assuming none")
def _DoCall(self, function_name, *args, **kwargs):
"""Call the named function in the device-specific module, passing
@@ -1597,7 +1640,7 @@
th.start()
th.join(timeout=300) # 5 mins
if th.is_alive():
- print("WARNING: diff command timed out")
+ logger.warning("diff command timed out")
p.terminate()
th.join(5)
if th.is_alive():
@@ -1605,8 +1648,7 @@
th.join()
if p.returncode != 0:
- print("WARNING: failure running %s:\n%s\n" % (
- diff_program, "".join(err)))
+ logger.warning("Failure running %s:\n%s\n", diff_program, "".join(err))
self.patch = None
return None, None, None
diff = ptemp.read()
@@ -1630,7 +1672,7 @@
def ComputeDifferences(diffs):
"""Call ComputePatch on all the Difference objects in 'diffs'."""
- print(len(diffs), "diffs to compute")
+ logger.info("%d diffs to compute", len(diffs))
# Do the largest files first, to try and reduce the long-pole effect.
by_size = [(i.tf.size, i) for i in diffs]
@@ -1656,14 +1698,14 @@
else:
name = "%s (%s)" % (tf.name, sf.name)
if patch is None:
- print(
- "patching failed! %s" % (name,))
+ logger.error("patching failed! %40s", name)
else:
- print("%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
- dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name))
+ logger.info(
+ "%8.2f sec %8d / %8d bytes (%6.2f%%) %s", dur, len(patch),
+ tf.size, 100.0 * len(patch) / tf.size, name)
lock.release()
- except Exception as e:
- print(e)
+ except Exception:
+ logger.exception("Failed to compute diff from worker")
raise
# start worker threads; wait for them all to finish.
@@ -2086,6 +2128,6 @@
# in the L release.
sh_location = "bin/install-recovery.sh"
- print("putting script in", sh_location)
+ logger.info("putting script in %s", sh_location)
output_sink(sh_location, sh)
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 01ff149..0156b72 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -28,6 +28,7 @@
from __future__ import print_function
+import logging
import os
import shutil
import sys
@@ -39,6 +40,7 @@
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
@@ -72,6 +74,8 @@
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
OPTIONS.input_tmp = common.UnzipTemp(args[0], ["IMAGES/*", "OTA/*"])
output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
CopyInfo(output_zip)
@@ -90,11 +94,11 @@
common.ZipWrite(output_zip, os.path.join(images_path, image), image)
finally:
- print("cleaning up...")
+ logger.info("cleaning up...")
common.ZipClose(output_zip)
shutil.rmtree(OPTIONS.input_tmp)
- print("done.")
+ logger.info("done.")
if __name__ == '__main__':
@@ -102,5 +106,5 @@
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError as e:
- print("\n ERROR: %s\n" % (e,))
+ logger.exception("\n ERROR:\n")
sys.exit(1)
diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py
index 7c6007e..725b355 100755
--- a/tools/releasetools/make_recovery_patch.py
+++ b/tools/releasetools/make_recovery_patch.py
@@ -16,24 +16,27 @@
from __future__ import print_function
+import logging
+import os
import sys
+import common
+
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
-import os
-import common
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
-def main(argv):
- # def option_handler(o, a):
- # return False
+def main(argv):
args = common.ParseOptions(argv, __doc__)
input_dir, output_dir = args
+ common.InitLogging()
+
OPTIONS.info_dict = common.LoadInfoDict(input_dir)
recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 7ea53f8..2264655 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -164,12 +164,12 @@
from __future__ import print_function
+import logging
import multiprocessing
import os.path
import shlex
import shutil
import struct
-import subprocess
import sys
import tempfile
import zipfile
@@ -182,6 +182,7 @@
print("Python 2.7 or newer is required.", file=sys.stderr)
sys.exit(1)
+logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
OPTIONS.package_key = None
@@ -393,11 +394,7 @@
cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
cmd.extend(["-out", signing_key])
-
- get_signing_key = common.Run(cmd, verbose=False)
- stdoutdata, _ = get_signing_key.communicate()
- assert get_signing_key.returncode == 0, \
- "Failed to get signing key: {}".format(stdoutdata)
+ common.RunAndCheckOutput(cmd, verbose=False)
self.signer = "openssl"
self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
@@ -410,10 +407,7 @@
"""Signs the given input file. Returns the output filename."""
out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
- signing = common.Run(cmd)
- stdoutdata, _ = signing.communicate()
- assert signing.returncode == 0, \
- "Failed to sign the input file: {}".format(stdoutdata)
+ common.RunAndCheckOutput(cmd)
return out_file
@@ -431,8 +425,6 @@
Args:
secondary: Whether it's generating a secondary payload (default: False).
"""
- # The place where the output from the subprocess should go.
- self._log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
self.payload_file = None
self.payload_properties = None
self.secondary = secondary
@@ -457,10 +449,7 @@
if source_file is not None:
cmd.extend(["--source_image", source_file])
cmd.extend(additional_args)
- p = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- stdoutdata, _ = p.communicate()
- assert p.returncode == 0, \
- "brillo_update_payload generate failed: {}".format(stdoutdata)
+ common.RunAndCheckOutput(cmd)
self.payload_file = payload_file
self.payload_properties = None
@@ -484,9 +473,7 @@
"--signature_size", "256",
"--metadata_hash_file", metadata_sig_file,
"--payload_hash_file", payload_sig_file]
- p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- p1.communicate()
- assert p1.returncode == 0, "brillo_update_payload hash failed"
+ common.RunAndCheckOutput(cmd)
# 2. Sign the hashes.
signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
@@ -501,9 +488,7 @@
"--signature_size", "256",
"--metadata_signature_file", signed_metadata_sig_file,
"--payload_signature_file", signed_payload_sig_file]
- p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- p1.communicate()
- assert p1.returncode == 0, "brillo_update_payload sign failed"
+ common.RunAndCheckOutput(cmd)
# 4. Dump the signed payload properties.
properties_file = common.MakeTempFile(prefix="payload-properties-",
@@ -511,9 +496,7 @@
cmd = ["brillo_update_payload", "properties",
"--payload", signed_payload_file,
"--properties_file", properties_file]
- p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
- p1.communicate()
- assert p1.returncode == 0, "brillo_update_payload properties failed"
+ common.RunAndCheckOutput(cmd)
if self.secondary:
with open(properties_file, "a") as f:
@@ -595,11 +578,11 @@
OPTIONS.input_tmp, "RECOVERY")
common.ZipWriteStr(
output_zip, recovery_two_step_img_name, recovery_two_step_img.data)
- print("two-step package: using %s in stage 1/3" % (
- recovery_two_step_img_name,))
+ logger.info(
+ "two-step package: using %s in stage 1/3", recovery_two_step_img_name)
script.WriteRawImage("/boot", recovery_two_step_img_name)
else:
- print("two-step package: using recovery.img in stage 1/3")
+ logger.info("two-step package: using recovery.img in stage 1/3")
# The "recovery.img" entry has been written into package earlier.
script.WriteRawImage("/boot", "recovery.img")
@@ -1363,8 +1346,8 @@
target_api_version = target_info["recovery_api_version"]
source_api_version = source_info["recovery_api_version"]
if source_api_version == 0:
- print("WARNING: generating edify script for a source that "
- "can't install it.")
+ logger.warning(
+ "Generating edify script for a source that can't install it.")
script = edify_generator.EdifyGenerator(
source_api_version, target_info, fstab=source_info["fstab"])
@@ -1542,8 +1525,9 @@
else:
include_full_boot = False
- print("boot target: %d source: %d diff: %d" % (
- target_boot.size, source_boot.size, len(d)))
+ logger.info(
+ "boot target: %d source: %d diff: %d", target_boot.size,
+ source_boot.size, len(d))
common.ZipWriteStr(output_zip, "boot.img.p", d)
@@ -1593,19 +1577,19 @@
if OPTIONS.two_step:
common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
script.WriteRawImage("/boot", "boot.img")
- print("writing full boot image (forced by two-step mode)")
+ logger.info("writing full boot image (forced by two-step mode)")
if not OPTIONS.two_step:
if updating_boot:
if include_full_boot:
- print("boot image changed; including full.")
+ logger.info("boot image changed; including full.")
script.Print("Installing boot image...")
script.WriteRawImage("/boot", "boot.img")
else:
# Produce the boot image by applying a patch to the current
# contents of the boot partition, and write it back to the
# partition.
- print("boot image changed; including patch.")
+ logger.info("boot image changed; including patch.")
script.Print("Patching boot image...")
script.ShowProgress(0.1, 10)
script.PatchPartition(
@@ -1615,7 +1599,7 @@
boot_type, boot_device, source_boot.size, source_boot.sha1),
'boot.img.p')
else:
- print("boot image unchanged; skipping.")
+ logger.info("boot image unchanged; skipping.")
# Do device-specific installation (eg, write radio image).
device_specific.IncrementalOTA_InstallEnd()
@@ -1806,7 +1790,7 @@
common.ZipWriteStr(output_zip, care_map_name, care_map_data,
compress_type=zipfile.ZIP_STORED)
else:
- print("Warning: cannot find care map file in target_file package")
+ logger.warning("Cannot find care map file in target_file package")
AddCompatibilityArchiveIfTrebleEnabled(
target_zip, output_zip, target_info, source_info)
@@ -1922,6 +1906,8 @@
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
if OPTIONS.downgrade:
# We should only allow downgrading incrementals (as opposed to full).
# Otherwise the device may go back from arbitrary build with this full
@@ -1942,9 +1928,8 @@
with zipfile.ZipFile(args[0], 'r') as input_zip:
OPTIONS.info_dict = common.LoadInfoDict(input_zip)
- if OPTIONS.verbose:
- print("--- target info ---")
- common.DumpInfoDict(OPTIONS.info_dict)
+ logger.info("--- target info ---")
+ common.DumpInfoDict(OPTIONS.info_dict)
# Load the source build dict if applicable.
if OPTIONS.incremental_source is not None:
@@ -1952,9 +1937,8 @@
with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
- if OPTIONS.verbose:
- print("--- source info ---")
- common.DumpInfoDict(OPTIONS.source_info_dict)
+ logger.info("--- source info ---")
+ common.DumpInfoDict(OPTIONS.source_info_dict)
# Load OEM dicts if provided.
OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
@@ -1978,7 +1962,7 @@
output_file=args[1],
source_file=OPTIONS.incremental_source)
- print("done.")
+ logger.info("done.")
return
# Sanity check the loaded info dicts first.
@@ -1989,7 +1973,7 @@
# Non-A/B OTAs rely on /cache partition to store temporary files.
cache_size = OPTIONS.info_dict.get("cache_size")
if cache_size is None:
- print("--- can't determine the cache partition size ---")
+ logger.warning("--- can't determine the cache partition size ---")
OPTIONS.cache_size = cache_size
if OPTIONS.extra_script is not None:
@@ -1998,7 +1982,7 @@
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input
else:
- print("unzipping target target-files...")
+ logger.info("unzipping target target-files...")
OPTIONS.input_tmp = common.UnzipTemp(args[0], UNZIP_PATTERN)
OPTIONS.target_tmp = OPTIONS.input_tmp
@@ -2010,7 +1994,7 @@
if OPTIONS.device_specific is None:
from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
if os.path.exists(from_input):
- print("(using device-specific extensions from target_files)")
+ logger.info("(using device-specific extensions from target_files)")
OPTIONS.device_specific = from_input
else:
OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
@@ -2027,7 +2011,7 @@
# Generate an incremental OTA.
else:
- print("unzipping source target-files...")
+ logger.info("unzipping source target-files...")
OPTIONS.source_tmp = common.UnzipTemp(
OPTIONS.incremental_source, UNZIP_PATTERN)
with zipfile.ZipFile(args[0], 'r') as input_zip, \
@@ -2043,15 +2027,15 @@
target_files_diff.recursiveDiff(
'', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
- print("done.")
+ logger.info("done.")
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
- except common.ExternalError as e:
- print("\n ERROR: %s\n" % (e,))
+ except common.ExternalError:
+ logger.exception("\n ERROR:\n")
sys.exit(1)
finally:
common.Cleanup()
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index d35e9e8..de3ead6 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -369,13 +369,13 @@
"SYSTEM/bin/install-recovery.sh"):
OPTIONS.rebuild_recovery = True
- # Don't copy OTA keys if we're replacing them.
+ # Don't copy OTA certs if we're replacing them.
elif (
OPTIONS.replace_ota_keys and
filename in (
- "BOOT/RAMDISK/res/keys",
+ "BOOT/RAMDISK/system/etc/security/otacerts.zip",
"BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
- "RECOVERY/RAMDISK/res/keys",
+ "RECOVERY/RAMDISK/system/etc/security/otacerts.zip",
"SYSTEM/etc/security/otacerts.zip",
"SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
pass
@@ -548,6 +548,27 @@
return "\n".join(output) + "\n"
+def WriteOtacerts(output_zip, filename, keys):
+ """Constructs a zipfile from given keys; and writes it to output_zip.
+
+ Args:
+ output_zip: The output target_files zip.
+ filename: The archive name in the output zip.
+ keys: A list of public keys to use during OTA package verification.
+ """
+
+ try:
+ from StringIO import StringIO
+ except ImportError:
+ from io import StringIO
+ temp_file = StringIO()
+ certs_zip = zipfile.ZipFile(temp_file, "w")
+ for k in keys:
+ common.ZipWrite(certs_zip, k)
+ common.ZipClose(certs_zip)
+ common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
+
+
def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
try:
keylist = input_tf_zip.read("META/otakeys.txt").split()
@@ -585,39 +606,20 @@
print("META/otakeys.txt has no keys; using %s for OTA package"
" verification." % (mapped_keys[0],))
- # recovery uses a version of the key that has been slightly
- # predigested (by DumpPublicKey.java) and put in res/keys.
+ # recovery now uses the same x509.pem version of the keys.
# extra_recovery_keys are used only in recovery.
- cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
- ["-jar",
- os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")] +
- mapped_keys + extra_recovery_keys)
- p = common.Run(cmd, stdout=subprocess.PIPE)
- new_recovery_keys, _ = p.communicate()
- if p.returncode != 0:
- raise common.ExternalError("failed to run dumpkeys")
-
if misc_info.get("recovery_as_boot") == "true":
- recovery_keys_location = "BOOT/RAMDISK/res/keys"
+ recovery_keys_location = "BOOT/RAMDISK/system/etc/security/otacerts.zip"
else:
- recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
- common.ZipWriteStr(output_tf_zip, recovery_keys_location, new_recovery_keys)
+ recovery_keys_location = "RECOVERY/RAMDISK/system/etc/security/otacerts.zip"
+
+ WriteOtacerts(output_tf_zip, recovery_keys_location,
+ mapped_keys + extra_recovery_keys)
# SystemUpdateActivity uses the x509.pem version of the keys, but
# put into a zipfile system/etc/security/otacerts.zip.
# We DO NOT include the extra_recovery_keys (if any) here.
-
- try:
- from StringIO import StringIO
- except ImportError:
- from io import StringIO
- temp_file = StringIO()
- certs_zip = zipfile.ZipFile(temp_file, "w")
- for k in mapped_keys:
- common.ZipWrite(certs_zip, k)
- common.ZipClose(certs_zip)
- common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
- temp_file.getvalue())
+ WriteOtacerts(output_tf_zip, "SYSTEM/etc/security/otacerts.zip", mapped_keys)
# For A/B devices, update the payload verification key.
if misc_info.get("ab_update") == "true":
@@ -638,8 +640,6 @@
"BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
pubkey)
- return new_recovery_keys
-
def ReplaceVerityPublicKey(output_zip, filename, key_path):
"""Replaces the verity public key at the given path in the given zip.
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index ca53ae1..5ebb1f0 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -13,6 +13,7 @@
# limitations under the License.
import bisect
+import logging
import os
import struct
import threading
@@ -20,6 +21,8 @@
import rangelib
+logger = logging.getLogger(__name__)
+
class SparseImage(object):
"""Wraps a sparse image file into an image object.
@@ -61,8 +64,9 @@
raise ValueError("Chunk header size was expected to be 12, but is %u." %
(chunk_hdr_sz,))
- print("Total of %u %u-byte output blocks in %u input chunks."
- % (total_blks, blk_sz, total_chunks))
+ logger.info(
+ "Total of %u %u-byte output blocks in %u input chunks.", total_blks,
+ blk_sz, total_chunks)
if not build_map:
assert not hashtree_info_generator, \
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index f75b3a7..44703db 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -1268,7 +1268,7 @@
target_file = construct_target_files()
common.ZipDelete(target_file, 'IMAGES/vendor.img')
payload = Payload()
- self.assertRaises(AssertionError, payload.Generate, target_file)
+ self.assertRaises(common.ExternalError, payload.Generate, target_file)
def test_Sign_full(self):
payload = self._create_payload_full()
@@ -1316,7 +1316,7 @@
payload = self._create_payload_full()
payload_signer = PayloadSigner()
payload_signer.signer_args.append('bad-option')
- self.assertRaises(AssertionError, payload.Sign, payload_signer)
+ self.assertRaises(common.ExternalError, payload.Sign, payload_signer)
def test_WriteToZip(self):
payload = self._create_payload_full()
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index b9c8dc7..edb3d41 100644
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -18,13 +18,18 @@
Utils for running unittests.
"""
+import logging
import os
import os.path
import struct
+import sys
import unittest
import common
+# Some test runner doesn't like outputs from stderr.
+logging.basicConfig(stream=sys.stdout)
+
def get_testdata_dir():
"""Returns the testdata dir, in relative to the script dir."""
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 626a1dd..00af296 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -16,6 +16,7 @@
from __future__ import print_function
+import logging
import os.path
import shlex
import struct
@@ -24,6 +25,8 @@
import sparse_img
from rangelib import RangeSet
+logger = logging.getLogger(__name__)
+
OPTIONS = common.OPTIONS
BLOCK_SIZE = common.BLOCK_SIZE
FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
@@ -71,7 +74,7 @@
def ZeroPadSimg(image_file, pad_size):
blocks = pad_size // BLOCK_SIZE
- print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
+ logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
simg.AppendFillChunk(0, blocks)
@@ -114,9 +117,9 @@
else:
hi = i
- if OPTIONS.verbose:
- print("Adjusted partition size for verity, partition_size: {},"
- " verity_size: {}".format(result, verity_size))
+ logger.info(
+ "Adjusted partition size for verity, partition_size: %s, verity_size: %s",
+ result, verity_size)
AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
return (result, verity_size)
@@ -326,9 +329,9 @@
else:
lo = mid + BLOCK_SIZE
- if OPTIONS.verbose:
- print("AVBCalcMinPartitionSize({}): partition_size: {}.".format(
- image_size, partition_size))
+ logger.info(
+ "AVBCalcMinPartitionSize(%d): partition_size: %d.",
+ image_size, partition_size)
return partition_size
@@ -514,9 +517,9 @@
salt, self.hashtree_info.salt)
if root_hash != self.hashtree_info.root_hash:
- print(
- "Calculated root hash {} doesn't match the one in metadata {}".format(
- root_hash, self.hashtree_info.root_hash))
+ logger.warning(
+ "Calculated root hash %s doesn't match the one in metadata %s",
+ root_hash, self.hashtree_info.root_hash)
return False
# Reads the generated hash tree and checks if it has the exact same bytes
diff --git a/tools/warn.py b/tools/warn.py
index bcde64a..c710164 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -505,6 +505,11 @@
{'category': 'java',
'severity': Severity.LOW,
'description':
+ 'Java: This class\'s name looks like a Type Parameter.',
+ 'patterns': [r".*: warning: \[ClassNamedLikeTypeParameter\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.LOW,
+ 'description':
'Java: Field name is CONSTANT_CASE, but field is not static and final',
'patterns': [r".*: warning: \[ConstantField\] .+"]},
{'category': 'java',
@@ -515,6 +520,11 @@
{'category': 'java',
'severity': Severity.LOW,
'description':
+ 'Java: Prefer assertThrows to ExpectedException',
+ 'patterns': [r".*: warning: \[ExpectedExceptionRefactoring\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.LOW,
+ 'description':
'Java: This field is only assigned during initialization; consider making it final',
'patterns': [r".*: warning: \[FieldCanBeFinal\] .+"]},
{'category': 'java',
@@ -525,7 +535,12 @@
{'category': 'java',
'severity': Severity.LOW,
'description':
- r'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
+ 'Java: Refactors uses of the JSR 305 @Immutable to Error Prone\'s annotation',
+ 'patterns': [r".*: warning: \[ImmutableRefactoring\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.LOW,
+ 'description':
+ 'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
'patterns': [r".*: warning: \[LambdaFunctionalInterface\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
@@ -560,7 +575,7 @@
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Non-standard parameter comment; prefer `/*paramName=*/ arg`',
+ 'Java: Non-standard parameter comment; prefer `/* paramName= */ arg`',
'patterns': [r".*: warning: \[ParameterComment\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
@@ -600,17 +615,27 @@
{'category': 'java',
'severity': Severity.LOW,
'description':
+ 'Java: Prefer assertThrows to @Test(expected=...)',
+ 'patterns': [r".*: warning: \[TestExceptionRefactoring\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.LOW,
+ 'description':
'Java: Unchecked exceptions do not need to be declared in the method signature.',
'patterns': [r".*: warning: \[ThrowsUncheckedException\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
+ 'Java: Prefer assertThrows to try/fail',
+ 'patterns': [r".*: warning: \[TryFailRefactoring\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.LOW,
+ 'description':
'Java: Type parameters must be a single letter with an optional numeric suffix, or an UpperCamelCase name followed by the letter \'T\'.',
'patterns': [r".*: warning: \[TypeParameterNaming\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Constructors and methods with the same name should appear sequentially with no other code in between',
+ 'Java: Constructors and methods with the same name should appear sequentially with no other code in between. Please re-order or re-name methods.',
'patterns': [r".*: warning: \[UngroupedOverloads\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
@@ -640,11 +665,26 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: This method passes a pair of parameters through to String.format, but the enclosing method wasn\'t annotated @FormatMethod. Doing so gives compile-time rather than run-time protection against malformed format strings.',
+ 'patterns': [r".*: warning: \[AnnotateFormatMethod\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Annotations should be positioned after Javadocs, but before modifiers..',
+ 'patterns': [r".*: warning: \[AnnotationPosition\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Arguments are in the wrong order or could be commented for clarity.',
'patterns': [r".*: warning: \[ArgumentSelectionDefectChecker\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Arrays do not override equals() or hashCode, so comparisons will be done on reference equality only. If neither deduplication nor lookup are needed, consider using a List instead. Otherwise, use IdentityHashMap/Set, a Map from a library that handles object arrays, or an Iterable/List of pairs.',
+ 'patterns': [r".*: warning: \[ArrayAsKeyOfSetOrMap\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Arguments are swapped in assertEquals-like call',
'patterns': [r".*: warning: \[AssertEqualsArgumentOrderChecker\] .+"]},
{'category': 'java',
@@ -655,7 +695,7 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: The lambda passed to assertThows should contain exactly one statement',
+ 'Java: The lambda passed to assertThrows should contain exactly one statement',
'patterns': [r".*: warning: \[AssertThrowsMultipleStatements\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
@@ -670,6 +710,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Make toString(), hashCode() and equals() final in AutoValue classes, so it is clear to readers that AutoValue is not overriding them',
+ 'patterns': [r".*: warning: \[AutoValueFinalMethods\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Classes that implement Annotation must override equals and hashCode. Consider using AutoAnnotation instead of implementing Annotation by hand.',
'patterns': [r".*: warning: \[BadAnnotationImplementation\] .+"]},
{'category': 'java',
@@ -680,7 +725,22 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: BigDecimal(double) and BigDecimal.valueOf(double) may lose precision, prefer BigDecimal(String) or BigDecimal(long)',
+ 'Java: Importing nested classes/static methods/static fields with commonly-used names can make code harder to read, because it may not be clear from the context exactly which type is being referred to. Qualifying the name with that of the containing class can make the code clearer.',
+ 'patterns': [r".*: warning: \[BadImport\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: instanceof used in a way that is equivalent to a null check.',
+ 'patterns': [r".*: warning: \[BadInstanceof\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: BigDecimal#equals has surprising behavior: it also compares scale.',
+ 'patterns': [r".*: warning: \[BigDecimalEquals\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: new BigDecimal(double) loses precision in this case.',
'patterns': [r".*: warning: \[BigDecimalLiteralDouble\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
@@ -735,6 +795,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Providing Closeable resources makes their lifecycle unclear',
+ 'patterns': [r".*: warning: \[CloseableProvides\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: The type of the array parameter of Collection.toArray needs to be compatible with the array type',
'patterns': [r".*: warning: \[CollectionToArraySafeParameter\] .+"]},
{'category': 'java',
@@ -770,6 +835,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Avoid deprecated Thread methods; read the method\'s javadoc for details.',
+ 'patterns': [r".*: warning: \[DeprecatedThreadMethods\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Prefer collection factory methods or builders to the double-brace initialization pattern.',
'patterns': [r".*: warning: \[DoubleBraceInitialization\] .+"]},
{'category': 'java',
@@ -785,6 +855,16 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: equals() implementation may throw NullPointerException when given null',
+ 'patterns': [r".*: warning: \[EqualsBrokenForNull\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Overriding Object#equals in a non-final class by using getClass rather than instanceof breaks substitutability of subclasses.',
+ 'patterns': [r".*: warning: \[EqualsGetClass\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Classes that override equals should also override hashCode.',
'patterns': [r".*: warning: \[EqualsHashCode\] .+"]},
{'category': 'java',
@@ -795,11 +875,26 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: The contract of #equals states that it should return false for incompatible types, while this implementation may throw ClassCastException.',
+ 'patterns': [r".*: warning: \[EqualsUnsafeCast\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Implementing #equals by just comparing hashCodes is fragile. Hashes collide frequently, and this will lead to false positives in #equals.',
+ 'patterns': [r".*: warning: \[EqualsUsingHashCode\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Calls to ExpectedException#expect should always be followed by exactly one statement.',
'patterns': [r".*: warning: \[ExpectedExceptionChecker\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: When only using JUnit Assert\'s static methods, you should import statically instead of extending.',
+ 'patterns': [r".*: warning: \[ExtendingJUnitAssert\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Switch case may fall through',
'patterns': [r".*: warning: \[FallThrough\] .+"]},
{'category': 'java',
@@ -815,6 +910,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: This fuzzy equality check is using a tolerance less than the gap to the next number. You may want a less restrictive tolerance, or to assert equality.',
+ 'patterns': [r".*: warning: \[FloatingPointAssertionWithinEpsilon\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Floating point literal loses precision',
'patterns': [r".*: warning: \[FloatingPointLiteralPrecision\] .+"]},
{'category': 'java',
@@ -875,6 +975,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Including fields in hashCode which are not compared in equals violates the contract of hashCode.',
+ 'patterns': [r".*: warning: \[InconsistentHashCode\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: The ordering of parameters in overloaded methods should be as consistent as possible (when viewed from left to right)',
'patterns': [r".*: warning: \[InconsistentOverloads\] .+"]},
{'category': 'java',
@@ -905,6 +1010,21 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: This @param tag doesn\'t refer to a parameter of the method.',
+ 'patterns': [r".*: warning: \[InvalidParam\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: This tag is invalid.',
+ 'patterns': [r".*: warning: \[InvalidTag\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: The documented method doesn\'t actually throw this checked exception.',
+ 'patterns': [r".*: warning: \[InvalidThrows\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Class should not implement both `Iterable` and `Iterator`',
'patterns': [r".*: warning: \[IterableAndIterator\] .+"]},
{'category': 'java',
@@ -935,11 +1055,21 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Calls to Lock#lock should be immediately followed by a try block which releases the lock.',
+ 'patterns': [r".*: warning: \[LockNotBeforeTry\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Assignment where a boolean expression was expected; use == if this assignment wasn\'t expected or add parentheses for clarity.',
'patterns': [r".*: warning: \[LogicalAssignment\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Math.abs does not always give a positive result. Please consider other methods for positive random numbers.',
+ 'patterns': [r".*: warning: \[MathAbsoluteRandom\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Switches on enum types should either handle all values, or have a default case.',
'patterns': [r".*: warning: \[MissingCasesInEnumSwitch\] .+"]},
{'category': 'java',
@@ -960,6 +1090,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: A collection or proto builder was created, but its values were never accessed.',
+ 'patterns': [r".*: warning: \[ModifiedButNotUsed\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Modifying a collection while iterating over it in a loop may cause a ConcurrentModificationException to be thrown.',
'patterns': [r".*: warning: \[ModifyCollectionInEnhancedForLoop\] .+"]},
{'category': 'java',
@@ -990,6 +1125,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Instead of returning a functional type, return the actual type that the returned function would return and use lambdas at use site.',
+ 'patterns': [r".*: warning: \[NoFunctionalReturnType\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: This update of a volatile variable is non-atomic',
'patterns': [r".*: warning: \[NonAtomicVolatileUpdate\] .+"]},
{'category': 'java',
@@ -1010,6 +1150,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Dereference of possibly-null value',
+ 'patterns': [r".*: warning: \[NullableDereference\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: @Nullable should not be used for primitive types since they cannot be null',
'patterns': [r".*: warning: \[NullablePrimitive\] .+"]},
{'category': 'java',
@@ -1025,6 +1170,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Objects.hashCode(Object o) should not be passed a primitive value',
+ 'patterns': [r".*: warning: \[ObjectsHashCodePrimitive\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Use grouping parenthesis to make the operator precedence explicit',
'patterns': [r".*: warning: \[OperatorPrecedence\] .+"]},
{'category': 'java',
@@ -1070,8 +1220,13 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Protobuf fields cannot be null, so this check is redundant',
- 'patterns': [r".*: warning: \[ProtoFieldPreconditionsCheckNotNull\] .+"]},
+ 'Java: A field on a protocol buffer was set twice in the same chained expression.',
+ 'patterns': [r".*: warning: \[ProtoRedundantSet\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Protos should not be used as a key to a map, in a set, or in a contains method on a descendant of a collection. Protos have non deterministic ordering and proto equality is deep, which is a performance issue.',
+ 'patterns': [r".*: warning: \[ProtosAsKeyOfSetOrMap\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
@@ -1110,7 +1265,12 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- r'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
+ 'Java: Void methods should not have a @return tag.',
+ 'patterns': [r".*: warning: \[ReturnFromVoid\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
@@ -1140,11 +1300,21 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: SWIG generated code that can\'t call a C++ destructor will leak memory',
+ 'patterns': [r".*: warning: \[SwigMemoryLeak\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Synchronizing on non-final fields is not safe: if the field is ever updated, different threads may end up locking on different objects.',
'patterns': [r".*: warning: \[SynchronizeOnNonFinalField\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Code that contains System.exit() is untestable.',
+ 'patterns': [r".*: warning: \[SystemExitOutsideMain\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Using @Test(expected=...) is discouraged, since the test will pass if *any* statement in the test method throws the expected exception',
'patterns': [r".*: warning: \[TestExceptionChecker\] .+"]},
{'category': 'java',
@@ -1160,11 +1330,26 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Relying on the thread scheduler is discouraged; see Effective Java Item 72 (2nd edition) / 84 (3rd edition).',
+ 'patterns': [r".*: warning: \[ThreadPriorityCheck\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Three-letter time zone identifiers are deprecated, may be ambiguous, and might not do what you intend; the full IANA time zone ID should be used instead.',
'patterns': [r".*: warning: \[ThreeLetterTimeZoneID\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: An implementation of Object.toString() should never return null.',
+ 'patterns': [r".*: warning: \[ToStringReturnsNull\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: The actual and expected values appear to be swapped, which results in poor assertion failure messages. The actual value should come first.',
+ 'patterns': [r".*: warning: \[TruthAssertExpected\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Truth Library assert is called on a constant.',
'patterns': [r".*: warning: \[TruthConstantAsserts\] .+"]},
{'category': 'java',
@@ -1175,6 +1360,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Type parameter declaration shadows another named type',
+ 'patterns': [r".*: warning: \[TypeNameShadowing\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Type parameter declaration overrides another type parameter already declared',
'patterns': [r".*: warning: \[TypeParameterShadowing\] .+"]},
{'category': 'java',
@@ -1190,21 +1380,46 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Switch handles all enum values; an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
+ 'Java: Collection, Iterable, Multimap, and Queue do not have well-defined equals behavior',
+ 'patterns': [r".*: warning: \[UndefinedEquals\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: Switch handles all enum values: an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
'patterns': [r".*: warning: \[UnnecessaryDefaultInEnumSwitch\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Unnecessary use of grouping parentheses',
+ 'patterns': [r".*: warning: \[UnnecessaryParentheses\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Finalizer may run before native code finishes execution',
'patterns': [r".*: warning: \[UnsafeFinalization\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Prefer `asSubclass` instead of casting the result of `newInstance`, to detect classes of incorrect type before invoking their constructors.This way, if the class is of the incorrect type,it will throw an exception before invoking its constructor.',
+ 'patterns': [r".*: warning: \[UnsafeReflectiveConstructionCast\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Unsynchronized method overrides a synchronized method.',
'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: Unused.',
+ 'patterns': [r".*: warning: \[Unused\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
+ 'Java: This catch block catches an exception and re-throws another, but swallows the caught exception rather than setting it as a cause. This can make debugging harder.',
+ 'patterns': [r".*: warning: \[UnusedException\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Java assert is used in test. For testing purposes Assert.* matchers should be used.',
'patterns': [r".*: warning: \[UseCorrectAssertInTests\] .+"]},
{'category': 'java',
@@ -1215,6 +1430,11 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
+ 'Java: variableName and type with the same name would refer to the static field instead of the class',
+ 'patterns': [r".*: warning: \[VariableNameSameAsType\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.MEDIUM,
+ 'description':
'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
{'category': 'java',
@@ -1230,6 +1450,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Use of class, field, or method that is not compatible with legacy Android devices',
+ 'patterns': [r".*: warning: \[AndroidJdkLibsChecker\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Reference equality used to compare arrays',
'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
{'category': 'java',
@@ -1310,11 +1535,16 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
- r'Java: Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+ 'Java: Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
'patterns': [r".*: warning: \[ComparableType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: this == null is always false, this != null is always true',
+ 'patterns': [r".*: warning: \[ComparingThisWithNull\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: This comparison method violates the contract',
'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
{'category': 'java',
@@ -1395,6 +1625,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Comparing different pairs of fields/getters in an equals implementation is probably a mistake.',
+ 'patterns': [r".*: warning: \[EqualsWrongThing\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class, or from an override of the method',
'patterns': [r".*: warning: \[ForOverride\] .+"]},
{'category': 'java',
@@ -1510,6 +1745,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Members shouldn\'t be annotated with @Inject if constructor is already annotated @Inject',
+ 'patterns': [r".*: warning: \[InjectOnMemberAndConstructor\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Scope annotation on an interface or abstact class is not allowed',
'patterns': [r".*: warning: \[InjectScopeAnnotationOnInterfaceOrAbstractClass\] .+"]},
{'category': 'java',
@@ -1550,7 +1790,7 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
- r'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
+ 'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1590,6 +1830,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Use of class, field, or method that is not compatible with JDK 7',
+ 'patterns': [r".*: warning: \[Java7ApiChecker\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Abstract and default methods are not injectable with javax.inject.Inject',
'patterns': [r".*: warning: \[JavaxInjectOnAbstractMethod\] .+"]},
{'category': 'java',
@@ -1620,6 +1865,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: Math.round(Integer) results in truncation',
+ 'patterns': [r".*: warning: \[MathRoundIntLong\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Certain resources in `android.R.string` have names that do not match their content',
'patterns': [r".*: warning: \[MislabeledAndroidString\] .+"]},
{'category': 'java',
@@ -1630,6 +1880,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: A terminating method call is required for a test helper to have any effect.',
+ 'patterns': [r".*: warning: \[MissingTestCall\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Use of "YYYY" (week year) in a date pattern without "ww" (week in year). You probably meant to use "yyyy" (year) instead.',
'patterns': [r".*: warning: \[MisusedWeekYear\] .+"]},
{'category': 'java',
@@ -1735,7 +1990,7 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Using ::equals as an incompatible Predicate; the predicate will always return false',
+ 'Java: Using ::equals or ::isInstance as an incompatible Predicate; the predicate will always return false',
'patterns': [r".*: warning: \[PredicateIncompatibleType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1745,7 +2000,7 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Protobuf fields cannot be null',
+ 'Java: Protobuf fields cannot be null.',
'patterns': [r".*: warning: \[ProtoFieldNullComparison\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1835,6 +2090,11 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
+ 'Java: String.substring(0) returns the original String',
+ 'patterns': [r".*: warning: \[SubstringOfZero\] .+"]},
+ {'category': 'java',
+ 'severity': Severity.HIGH,
+ 'description':
'Java: Suppressing "deprecated" is probably a typo for "deprecation"',
'patterns': [r".*: warning: \[SuppressWarningsDeprecated\] .+"]},
{'category': 'java',