Merge "Add UNSAFE_DISABLE_HIDDENAPI_FLAGS to skip hiddenapi processing"
diff --git a/Changes.md b/Changes.md
index baa5e6e..4aa7ea2 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,31 @@
 # Build System Changes for Android.mk Writers
 
+## `DIST_DIR`, `dist_goal`, and `dist-for-goals`  {#dist}
+
+`DIST_DIR` and `dist_goal` are no longer available when reading Android.mk
+files (or other build tasks). Always use `dist-for-goals` instead, which takes
+a PHONY goal, and a list of files to copy to `$DIST_DIR`. Whenever `dist` is
+specified, and the goal would be built (either explicitly on the command line,
+or as a dependency of something on the command line), that file will be copied
+into `$DIST_DIR`. For example,
+
+``` make
+$(call dist-for-goals,foo,bar/baz)
+```
+
+will copy `bar/baz` into `$DIST_DIR/baz` when `m foo dist` is run.
+
+#### Renames during copy
+
+Instead of specifying just a file, a destination name can be specified,
+including subdirectories:
+
+``` make
+$(call dist-for-goals,foo,bar/baz:logs/foo.log)
+```
+
+will copy `bar/baz` into `$DIST_DIR/logs/foo.log` when `m foo dist` is run.
+
 ## `.PHONY` rule enforcement  {#phony_targets}
 
 There are several new warnings/errors meant to ensure the proper use of
diff --git a/CleanSpec.mk b/CleanSpec.mk
index a96dd83..39441e1 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -509,6 +509,12 @@
   $(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
   $(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
 
+# Remove strip.sh intermediates to save space
+$(call add-clean-step, find $(OUT_DIR) \( -name "*.so.debug" -o -name "*.so.dynsyms" -o -name "*.so.funcsyms" -o -name "*.so.keep_symbols" -o -name "*.so.mini_debuginfo.xz" \) -print0 | xargs -0 rm -f)
+
+# Clean up old ninja files
+$(call add-clean-step, rm -f $(OUT_DIR)/build-*-dist*.ninja)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/common/core.mk b/common/core.mk
new file mode 100644
index 0000000..e5264b0
--- /dev/null
+++ b/common/core.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Only use ANDROID_BUILD_SHELL to wrap around bash.
+# DO NOT use other shells such as zsh.
+ifdef ANDROID_BUILD_SHELL
+SHELL := $(ANDROID_BUILD_SHELL)
+else
+# Use bash, not whatever shell somebody has installed as /bin/sh
+# This is repeated from main.mk, since envsetup.sh runs this file
+# directly.
+SHELL := /bin/bash
+endif
+
+# Utility variables.
+empty :=
+space := $(empty) $(empty)
+comma := ,
+# Note that make will eat the newline just before endef.
+define newline
+
+
+endef
+# The pound character "#"
+define pound
+#
+endef
+# Unfortunately you can't simply define backslash as \ or \\.
+backslash := \a
+backslash := $(patsubst %a,%,$(backslash))
+
+# Prevent accidentally changing these variables
+.KATI_READONLY := SHELL empty space comma newline pound backslash
+
+# Basic warning/error wrappers. These will be redefined to include the local
+# module information when reading Android.mk files.
+define pretty-warning
+$(warning $(1))
+endef
+
+define pretty-error
+$(error $(1))
+endef
diff --git a/core/math.mk b/common/math.mk
similarity index 100%
rename from core/math.mk
rename to common/math.mk
diff --git a/core/strings.mk b/common/strings.mk
similarity index 100%
rename from core/strings.mk
rename to common/strings.mk
diff --git a/core/Makefile b/core/Makefile
index dabb86d..fe728d6 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1921,7 +1921,7 @@
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
       $(systemimage_intermediates)/generated_system_image_info.txt \
-      || ( mkdir -p $$(DIST_DIR); cp $(INSTALLED_FILES_FILE) $$(DIST_DIR)/installed-files-rescued.txt; \
+      || ( mkdir -p $${DIST_DIR}; cp $(INSTALLED_FILES_FILE) $${DIST_DIR}/installed-files-rescued.txt; \
            exit 1 )
 endef
 
@@ -3945,7 +3945,7 @@
 MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_verified_boot_params.sh
 $(QEMU_VERIFIED_BOOT_PARAMS): $(INSTALLED_QEMU_SYSTEMIMAGE) $(MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_VBMETAIMAGE_TARGET) $(SGDISK_HOST) $(AVBTOOL)
 	@echo Creating $@
-	(export SGDISK=$(SGDISK_HOST) AVBTOOL=$(AVBTOOL); $(MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_SYSTEMIMAGE_TARGET) $(INSTALLED_QEMU_SYSTEMIMAGE) $(QEMU_VERIFIED_BOOT_PARAMS))
+	(export SGDISK=$(SGDISK_HOST) AVBTOOL=$(AVBTOOL); $(MK_VERIFIED_BOOT_KERNEL_CMDLINE_SH) $(INSTALLED_VBMETAIMAGE_TARGET) $(INSTALLED_QEMU_SYSTEMIMAGE) $(QEMU_VERIFIED_BOOT_PARAMS))
 
 
 systemimage: $(QEMU_VERIFIED_BOOT_PARAMS)
diff --git a/core/config.mk b/core/config.mk
index aeb8aee..0e4e1fb 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -17,40 +17,20 @@
 $(error done)
 endif
 
-# Only use ANDROID_BUILD_SHELL to wrap around bash.
-# DO NOT use other shells such as zsh.
-ifdef ANDROID_BUILD_SHELL
-SHELL := $(ANDROID_BUILD_SHELL)
-else
-# Use bash, not whatever shell somebody has installed as /bin/sh
-# This is repeated from main.mk, since envsetup.sh runs this file
-# directly.
-SHELL := /bin/bash
-endif
+BUILD_SYSTEM :=$= build/make/core
+BUILD_SYSTEM_COMMON :=$= build/make/common
 
-# Utility variables.
-empty :=
-space := $(empty) $(empty)
-comma := ,
-# Note that make will eat the newline just before endef.
-define newline
-
-
-endef
-# The pound character "#"
-define pound
-#
-endef
-# Unfortunately you can't simply define backslash as \ or \\.
-backslash := \a
-backslash := $(patsubst %a,%,$(backslash))
-
-# Prevent accidentally changing these variables
-.KATI_READONLY := SHELL empty space comma newline pound backslash
+include $(BUILD_SYSTEM_COMMON)/core.mk
 
 # Mark variables that should be coming as environment variables from soong_ui
 # as readonly
-.KATI_READONLY := OUT_DIR TMPDIR BUILD_DATETIME_FILE DIST_DIR
+.KATI_READONLY := OUT_DIR TMPDIR BUILD_DATETIME_FILE
+ifdef CALLED_FROM_SETUP
+  .KATI_READONLY := CALLED_FROM_SETUP
+endif
+ifdef KATI_PACKAGE_MK_DIR
+  .KATI_READONLY := KATI_PACKAGE_MK_DIR
+endif
 
 # Mark variables deprecated/obsolete
 CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
@@ -109,6 +89,7 @@
   TARGET_NDK_GCC_VERSION 2ND_TARGET_NDK_GCC_VERSION \
   GLOBAL_CFLAGS_NO_OVERRIDE GLOBAL_CPPFLAGS_NO_OVERRIDE \
   ,GCC support has been removed. Use Clang instead)
+$(KATI_obsolete_var DIST_DIR dist_goal,Use dist-for-goals instead. See $(CHANGES_URL)#dist)
 
 # This is marked as obsolete in envsetup.mk after reading the BoardConfig.mk
 $(KATI_deprecate_export It is a global setting. See $(CHANGES_URL)#export_keyword)
@@ -121,9 +102,6 @@
 
 ORIGINAL_MAKECMDGOALS := $(MAKECMDGOALS)
 
-dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
-MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
-
 UNAME := $(shell uname -sm)
 
 SRC_TARGET_DIR := $(TOPDIR)build/target
@@ -138,9 +116,9 @@
 
 # Set up efficient math functions which are used in make.
 # Here since this file is included by envsetup as well as during build.
-include $(BUILD_SYSTEM)/math.mk
+include $(BUILD_SYSTEM_COMMON)/math.mk
 
-include $(BUILD_SYSTEM)/strings.mk
+include $(BUILD_SYSTEM_COMMON)/strings.mk
 
 # Various mappings to avoid hard-coding paths all over the place
 include $(BUILD_SYSTEM)/pathmap.mk
diff --git a/core/definitions.mk b/core/definitions.mk
index bb785ec..fd55fe6 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -806,29 +806,13 @@
 echo -e "$(ESC_BOLD)$(1): $(ESC_ERROR)error:$(ESC_RESET)$(ESC_BOLD)" $(2) "$(ESC_RESET)" >&2
 endef
 
-# $(1): message to print
-define pretty-warning
-$(shell $(call echo-warning,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
-endef
-
-# $(1): message to print
-define pretty-error
-$(shell $(call echo-error,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
-$(error done)
-endef
-
 ###########################################################
-## Output the command lines, or not
+## Legacy showcommands compatibility
 ###########################################################
 
-ifeq ($(strip $(SHOW_COMMANDS)),)
 define pretty
 @echo $1
 endef
-else
-define pretty
-endef
-endif
 
 ###########################################################
 ## Commands for including the dependency files the compiler generates
diff --git a/core/distdir.mk b/core/distdir.mk
index c074186..5f40407 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -17,52 +17,53 @@
 # When specifying "dist", the user has asked that we copy the important
 # files from this build into DIST_DIR.
 
-ifdef dist_goal
-
-# $(1): source file
-# $(2): destination file
-# $(3): goals that should copy the file
-#
-define copy-one-dist-file
-$(3): $(2)
-$(2): $(1)
-	@echo "Dist: $$@"
-	$$(copy-file-to-new-target-with-cp)
-endef
-
-# A global variable to remember all dist'ed src:dst pairs.
-# So if a src:dst is already dist'ed by another goal,
-# we should just establish the dependency and don't really call the
-# copy-one-dist-file to avoid multiple rules for the same target.
+# list of all goals that depend on any dist files
+_all_dist_goals :=
+# pairs of goal:distfile
+_all_dist_goal_output_pairs :=
+# pairs of srcfile:distfile
 _all_dist_src_dst_pairs :=
+
 # Other parts of the system should use this function to associate
 # certain files with certain goals.  When those goals are built
 # and "dist" is specified, the marked files will be copied to DIST_DIR.
 #
-# $(1): a list of goals  (e.g. droid, sdk, pdk, ndk)
+# $(1): a list of goals  (e.g. droid, sdk, pdk, ndk). These must be PHONY
 # $(2): the dist files to add to those goals.  If the file contains ':',
 #       the text following the colon is the name that the file is copied
 #       to under the dist directory.  Subdirs are ok, and will be created
 #       at copy time if necessary.
 define dist-for-goals
+$(if $(strip $(2)), \
+  $(eval _all_dist_goals += $$(1))) \
 $(foreach file,$(2), \
-  $(eval fw := $(subst :,$(space),$(file))) \
-  $(eval src := $(word 1,$(fw))) \
-  $(eval dst := $(word 2,$(fw))) \
-  $(eval dst := $(if $(dst),$(dst),$(notdir $(src)))) \
-  $(if $(filter $(_all_dist_src_dst_pairs),$(src):$(dst)),\
-    $(eval $(call add-dependency,$(1),$(DIST_DIR)/$(dst))),\
-    $(eval $(call copy-one-dist-file,\
-      $(src),$(DIST_DIR)/$(dst),$(1)))\
-      $(eval _all_dist_src_dst_pairs += $(src):$(dst))\
-  )\
-)
+  $(eval src := $(call word-colon,1,$(file))) \
+  $(eval dst := $(call word-colon,2,$(file))) \
+  $(if $(dst),,$(eval dst := $$(notdir $$(src)))) \
+  $(eval _all_dist_src_dst_pairs += $$(src):$$(dst)) \
+  $(foreach goal,$(1), \
+    $(eval _all_dist_goal_output_pairs += $$(goal):$$(dst))))
 endef
 
-else # !dist_goal
+#------------------------------------------------------------------
+# To be used at the end of the build to collect all the uses of
+# dist-for-goals, and write them into a file for the packaging step to use.
 
-# empty definition when not building dist
-define dist-for-goals
+# $(1): The file to write
+define dist-write-file
+$(strip \
+  $(KATI_obsolete_var dist-for-goals,Cannot be used after dist-write-file) \
+  $(foreach goal,$(sort $(_all_dist_goals)), \
+    $(eval $$(goal): _dist_$$(goal))) \
+  $(shell mkdir -p $(dir $(1))) \
+  $(file >$(1).tmp, \
+    DIST_GOAL_OUTPUT_PAIRS := $(sort $(_all_dist_goal_output_pairs)) \
+    $(newline)DIST_SRC_DST_PAIRS := $(sort $(_all_dist_src_dst_pairs))) \
+  $(shell if ! cmp -s $(1).tmp $(1); then \
+            mv $(1).tmp $(1); \
+          else \
+            rm $(1).tmp; \
+          fi))
 endef
 
-endif # !dist_goal
+.KATI_READONLY := dist-for-goals dist-write-file
diff --git a/core/main.mk b/core/main.mk
index f26b2bb..6ff5f93 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -36,8 +36,6 @@
 TOP := .
 TOPDIR :=
 
-BUILD_SYSTEM := $(TOPDIR)build/make/core
-
 # This is the default target.  It must be the first declared target.
 .PHONY: droid
 DEFAULT_GOAL := droid
@@ -48,7 +46,7 @@
 
 # Set up various standard variables based on configuration
 # and host information.
-include $(BUILD_SYSTEM)/config.mk
+include build/make/core/config.mk
 
 ifneq ($(filter $(dont_bother_goals), $(MAKECMDGOALS)),)
 dont_bother := true
@@ -419,6 +417,19 @@
 ENFORCE_RRO_SOURCES :=
 endif
 
+# Color-coded warnings including current module info
+# $(1): message to print
+define pretty-warning
+$(shell $(call echo-warning,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
+endef
+
+# Color-coded errors including current module info
+# $(1): message to print
+define pretty-error
+$(shell $(call echo-error,$(LOCAL_MODULE_MAKEFILE),$(LOCAL_MODULE): $(1)))
+$(error done)
+endef
+
 subdir_makefiles_inc := .
 FULL_BUILD :=
 
@@ -493,6 +504,18 @@
 # -------------------------------------------------------------------
 
 # -------------------------------------------------------------------
+# Use basic warning/error messages now that LOCAL_MODULE_MAKEFILE
+# and LOCAL_MODULE aren't useful anymore.
+# -------------------------------------------------------------------
+define pretty-warning
+$(warning $(1))
+endef
+
+define pretty-error
+$(error $(1))
+endef
+
+# -------------------------------------------------------------------
 # Enforce to generate all RRO packages for modules having resource
 # overlays.
 # -------------------------------------------------------------------
@@ -1010,8 +1033,22 @@
 
 ifdef FULL_BUILD
   product_FILES := $(call product-installed-files, $(INTERNAL_PRODUCT))
+  # WARNING: The product_MODULES variable is depended on by external files.
+  product_MODULES := $(_pif_modules)
 
   # Verify the artifact path requirements made by included products.
+
+  # Fakes don't get installed, and host files are irrelevant.
+  static_whitelist_patterns := $(TARGET_OUT_FAKE)/% $(HOST_OUT)/%
+  # RROs become REQUIRED by the source module, but are always placed on the vendor partition.
+  static_whitelist_patterns += %__auto_generated_rro.apk
+  ifeq (true,$(BOARD_USES_SYSTEM_OTHER_ODEX))
+    # Allow system_other odex space optimization.
+    static_whitelist_patterns += \
+      $(TARGET_OUT_SYSTEM_OTHER)/%.odex \
+      $(TARGET_OUT_SYSTEM_OTHER)/%.vdex \
+      $(TARGET_OUT_SYSTEM_OTHER)/%.art
+  endif
   all_offending_files :=
   $(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
     $(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
@@ -1020,10 +1057,7 @@
     $(eval path_patterns := $(call resolve-product-relative-paths,$(requirements),%)) \
     $(eval whitelist_patterns := $(call resolve-product-relative-paths,$(whitelist))) \
     $(eval files := $(call product-installed-files, $(makefile))) \
-    $(eval files := $(filter-out $(TARGET_OUT_FAKE)/% $(HOST_OUT)/%,$(files))) \
-    $(eval # RROs become REQUIRED by the source module, but are always placed on the vendor partition.) \
-    $(eval files := $(filter-out %__auto_generated_rro.apk,$(files))) \
-    $(eval offending_files := $(filter-out $(path_patterns) $(whitelist_patterns),$(files))) \
+    $(eval offending_files := $(filter-out $(path_patterns) $(whitelist_patterns) $(static_whitelist_patterns),$(files))) \
     $(call maybe-print-list-and-error,$(offending_files),$(makefile) produces files outside its artifact path requirement.) \
     $(eval unused_whitelist := $(filter-out $(files),$(whitelist_patterns))) \
     $(call maybe-print-list-and-error,$(unused_whitelist),$(makefile) includes redundant whitelist entries in its artifact path requirement.) \
@@ -1458,6 +1492,8 @@
 ndk: $(SOONG_OUT_DIR)/ndk.timestamp
 .PHONY: ndk
 
+$(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
+
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
 
 endif # KATI
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index ca2dcee..684ab9f 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -7,7 +7,7 @@
 KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
 
 # Modifier goals we don't need to pass to Ninja.
-NINJA_EXCLUDE_GOALS := all dist APP-% PRODUCT-%
+NINJA_EXCLUDE_GOALS := all APP-% PRODUCT-%
 
 # A list of goals which affect parsing of makefiles and we need to pass to Kati.
 PARSE_TIME_MAKE_GOALS := \
@@ -28,7 +28,6 @@
 	custom_images \
 	deps-license \
 	dicttool_aosp \
-	dist \
 	dump-products \
 	eng \
 	fusion \
diff --git a/core/soong_config.mk b/core/soong_config.mk
index e61aad0..2d7c0d9 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -141,8 +141,6 @@
 $(call add_json_bool, UseGoma,                           $(filter-out false,$(USE_GOMA)))
 $(call add_json_bool, Arc,                               $(filter true,$(TARGET_ARC)))
 
-$(call add_json_str,  DistDir,                           $(if $(dist_goal), $(DIST_DIR)))
-
 $(call add_json_list, NamespacesToExport,                $(PRODUCT_SOONG_NAMESPACES))
 
 $(call add_json_list, PgoAdditionalProfileDirs,          $(PGO_ADDITIONAL_PROFILE_DIRS))
diff --git a/core/tasks/check_emu_boot.mk b/core/tasks/check_emu_boot.mk
deleted file mode 100644
index 4870677..0000000
--- a/core/tasks/check_emu_boot.mk
+++ /dev/null
@@ -1,23 +0,0 @@
-check_emu_boot0 := $(DIST_DIR)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-emulator-boot-test-result.txt
-$(check_emu_boot0) : PRIVATE_PREFIX := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-$(check_emu_boot0) : PRIVATE_EMULATOR_BOOT_TEST_SH := device/generic/goldfish/tools/emulator_boot_test.sh
-$(check_emu_boot0) : PRIVATE_BOOT_COMPLETE_STRING := "emulator: INFO: boot completed"
-$(check_emu_boot0) : PRIVATE_BOOT_FAIL_STRING := "emulator: ERROR: fail to boot after"
-$(check_emu_boot0) : PRIVATE_SUCCESS_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-SUCCESS.txt
-$(check_emu_boot0) : PRIVATE_FAIL_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-FAIL.txt
-$(check_emu_boot0) : $(INSTALLED_QEMU_SYSTEMIMAGE)  $(INSTALLED_QEMU_VENDORIMAGE) \
-                 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(PRODUCT_OUT)/userdata.img) \
-                 $(PRODUCT_OUT)/ramdisk.img device/generic/goldfish/tools/emulator_boot_test.sh
-	@mkdir -p $(dir $(check_emu_boot0))
-	$(hide) rm -f $(check_emu_boot0)
-	$(hide) rm -f $(PRIVATE_SUCCESS_FILE)
-	$(hide) rm -f $(PRIVATE_FAIL_FILE)
-	(export ANDROID_PRODUCT_OUT=$$(cd $(PRODUCT_OUT);pwd);\
-		export ANDROID_BUILD_TOP=$$(pwd);\
-		$(PRIVATE_EMULATOR_BOOT_TEST_SH) > $(check_emu_boot0))
-	(if grep -q $(PRIVATE_BOOT_COMPLETE_STRING) $(check_emu_boot0);\
-	then echo boot_succeeded > $(PRIVATE_SUCCESS_FILE); fi)
-	(if grep -q $(PRIVATE_BOOT_FAIL_STRING) $(check_emu_boot0);\
-	then echo boot_failed > $(PRIVATE_FAIL_FILE); fi)
-.PHONY: check_emu_boot
-check_emu_boot: $(check_emu_boot0)
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 8baac5a..93fea4e 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -70,6 +70,7 @@
 	$(addon_dir_img):$(INSTALLED_QEMU_VENDORIMAGE):images/$(TARGET_CPU_ABI)/vendor.img \
 	$(addon_dir_img):$(BUILT_RAMDISK_TARGET):images/$(TARGET_CPU_ABI)/ramdisk.img \
 	$(addon_dir_img):$(PRODUCT_OUT)/system/build.prop:images/$(TARGET_CPU_ABI)/build.prop \
+	$(addon_dir_img):device/generic/goldfish/data/etc/userdata.img:images/$(TARGET_CPU_ABI)/userdata.img \
 	$(addon_dir_img):$(target_notice_file_txt):images/$(TARGET_CPU_ABI)/NOTICE.txt \
 	$(addon_dir_img):$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP):images/source.properties
 
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index e3cf13d..42a3bea 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -249,7 +249,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2018-08-05
+      PLATFORM_SECURITY_PATCH := 2018-09-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/packaging/distdir.mk b/packaging/distdir.mk
new file mode 100644
index 0000000..264a8b0
--- /dev/null
+++ b/packaging/distdir.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# From the Android.mk pass:
+DIST_GOAL_OUTPUT_PAIRS :=
+DIST_SRC_DST_PAIRS :=
+include $(KATI_PACKAGE_MK_DIR)/dist.mk
+
+$(foreach pair,$(DIST_GOAL_OUTPUT_PAIRS), \
+  $(eval goal := $(call word-colon,1,$(pair))) \
+  $(eval output := $(call word-colon,2,$(pair))) \
+  $(eval .PHONY: _dist_$$(goal)) \
+  $(if $(call streq,$(DIST),true),\
+    $(eval _dist_$$(goal): $$(DIST_DIR)/$$(output)), \
+    $(eval _dist_$$(goal):)))
+
+define copy-one-dist-file
+$(2): $(1)
+	@echo "Dist: $$@"
+	rm -f $$@
+	cp $$< $$@
+endef
+
+ifeq ($(DIST),true)
+  $(foreach pair,$(DIST_SRC_DST_PAIRS), \
+    $(eval src := $(call word-colon,1,$(pair))) \
+    $(eval dst := $(DIST_DIR)/$(call word-colon,2,$(pair))) \
+    $(eval $(call copy-one-dist-file,$(src),$(dst))))
+endif
+
+copy-one-dist-file :=
+DIST_GOAL_OUTPUT_PAIRS :=
+DIST_SRC_DST_PAIRS :=
diff --git a/packaging/main.mk b/packaging/main.mk
new file mode 100644
index 0000000..0b746a8
--- /dev/null
+++ b/packaging/main.mk
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Create a default rule. This is unused currently, as the real default rule is
+# still in the Kati build step.
+.PHONY: _packaging_default_rule_
+_packaging_default_rule_:
+
+ifndef KATI
+$(error Only Kati is supported.)
+endif
+
+$(info [1/3] initializing packaging system ...)
+
+.KATI_READONLY := KATI_PACKAGE_MK_DIR
+
+include build/make/common/core.mk
+include build/make/common/strings.mk
+
+$(info [2/3] including distdir.mk ...)
+
+include build/make/packaging/distdir.mk
+
+$(info [3/3] writing packaging rules ...)
diff --git a/target/board/generic/device.mk b/target/board/generic/device.mk
index a75bd07..0a32415 100644
--- a/target/board/generic/device.mk
+++ b/target/board/generic/device.mk
@@ -14,17 +14,6 @@
 # limitations under the License.
 #
 
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
-    device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_video.xml \
-    device/generic/goldfish/camera/media_codecs.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs.xml \
-    hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
-
 # NFC:
 #   Provide default libnfc-nci.conf file for devices that does not have one in
 #   vendor/etc because aosp system image (of aosp_$arch products) is going to
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 25e51ba..1b6429c 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -61,7 +61,7 @@
 # cleaned up all device specific directories under root!
 
 # TODO(b/111434759, b/111287060) SoC specific hacks
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
 BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
 BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt:/firmware
 
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 8bd6a8b..2004624 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -14,16 +14,6 @@
 # limitations under the License.
 #
 
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
-    device/generic/goldfish/camera/media_profiles.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_profiles.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_audio.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_telephony.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs_google_video.xml \
-    device/generic/goldfish/camera/media_codecs.xml:$(TARGET_COPY_OUT_VENDOR)/etc/media_codecs.xml
-
 # NFC:
 #   Provide default libnfc-nci.conf file for devices that does not have one in
 #   vendor/etc because aosp system image (of aosp_$arch products) is going to
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index fc6b582..88b90a8 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -34,7 +34,9 @@
 
 # TODO(jiyong) These might be SoC specific.
 BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
 
 # Set this to create /cache mount point for non-A/B devices that mounts /cache.
 # The partition size doesn't matter, just to make build pass.
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
index 7d9ea9c..3d14842 100644
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -28,7 +28,9 @@
 
 # TODO(jiyong) These might be SoC specific.
 BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
+BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
 
 # Set this to create /cache mount point for non-A/B devices that mounts /cache.
 # The partition size doesn't matter, just to make build pass.
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index fa2d472..0a32415 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -14,16 +14,6 @@
 # limitations under the License.
 #
 
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
-    device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
-    device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
-
 # NFC:
 #   Provide default libnfc-nci.conf file for devices that does not have one in
 #   vendor/etc because aosp system image (of aosp_$arch products) is going to
@@ -32,7 +22,3 @@
 #   NFC configuration file should be in vendor/etc, instead of system/etc
 PRODUCT_COPY_FILES += \
     device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
-
-PRODUCT_PACKAGES := \
-    audio.primary.goldfish \
-    vibrator.goldfish
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index fa2d472..0a32415 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -14,16 +14,6 @@
 # limitations under the License.
 #
 
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-PRODUCT_COPY_FILES := \
-    device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
-    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
-    device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
-
 # NFC:
 #   Provide default libnfc-nci.conf file for devices that does not have one in
 #   vendor/etc because aosp system image (of aosp_$arch products) is going to
@@ -32,7 +22,3 @@
 #   NFC configuration file should be in vendor/etc, instead of system/etc
 PRODUCT_COPY_FILES += \
     device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
-
-PRODUCT_PACKAGES := \
-    audio.primary.goldfish \
-    vibrator.goldfish
diff --git a/target/board/generic_x86_arm/BoardConfig.mk b/target/board/generic_x86_arm/BoardConfig.mk
index d1e4884..8e70b25 100644
--- a/target/board/generic_x86_arm/BoardConfig.mk
+++ b/target/board/generic_x86_arm/BoardConfig.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2016 The Android Open Source Project
+# Copyright (C) 2018 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,11 +13,7 @@
 # limitations under the License.
 #
 
-# Configuration for generic_x86 + arm libraries needed by binary translation.
-
-# The generic product target doesn't have any hardware-specific pieces.
-TARGET_NO_BOOTLOADER := true
-TARGET_NO_KERNEL := true
+# x86 emulator specific definitions
 TARGET_CPU_ABI := x86
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
@@ -28,39 +24,27 @@
 TARGET_2ND_ARCH_VARIANT := armv7-a
 TARGET_2ND_CPU_VARIANT := generic
 
-# Tell the build system this isn't a typical 64bit+32bit multilib configuration.
+TARGET_CPU_ABI_LIST := x86 armeabi-v7a armeabi
 TARGET_TRANSLATE_2ND_ARCH := true
 
 BUILD_BROKEN_DUP_RULES := true
 
-# no hardware camera
-USE_CAMERA_STUB := true
 
-# Enable dex-preoptimization to speed up the first boot sequence
-# of an SDK AVD. Note that this operation only works on Linux for now
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY := false
-  endif
-endif
+include build/make/target/board/BoardConfigEmuCommon.mk
+include build/make/target/board/BoardConfigGsiCommon.mk
 
-TARGET_USES_HWC2 := true
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
+# Resize to 4G to accomodate ASAN and CTS
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
 
-# Build OpenGLES emulation host and guest libraries
-BUILD_EMULATOR_OPENGL := true
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
 
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
diff --git a/target/board/generic_x86_arm/README.txt b/target/board/generic_x86_arm/README.txt
new file mode 100644
index 0000000..05f7ca2
--- /dev/null
+++ b/target/board/generic_x86_arm/README.txt
@@ -0,0 +1,10 @@
+The "generic_x86_arm" product defines a non-hardware-specific IA target
+without a kernel or bootloader.
+
+It can be used to build the entire user-level system, and
+will work with the IA version of the emulator,
+
+It is not a product "base class"; no other products inherit
+from it or use it in any way.
+
+Third party arm to x86 translator has to be installed as well
diff --git a/target/board/generic_x86_arm/device.mk b/target/board/generic_x86_arm/device.mk
new file mode 100644
index 0000000..0a32415
--- /dev/null
+++ b/target/board/generic_x86_arm/device.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# NFC:
+#   Provide default libnfc-nci.conf file for devices that does not have one in
+#   vendor/etc because aosp system image (of aosp_$arch products) is going to
+#   be used as GSI.
+#   May need to remove the following for newly launched devices in P since this
+#   NFC configuration file should be in vendor/etc, instead of system/etc
+PRODUCT_COPY_FILES += \
+    device/generic/common/nfc/libnfc-nci.conf:system/etc/libnfc-nci.conf
diff --git a/target/board/generic_x86_arm/system.prop b/target/board/generic_x86_arm/system.prop
new file mode 100644
index 0000000..64829f3
--- /dev/null
+++ b/target/board/generic_x86_arm/system.prop
@@ -0,0 +1,5 @@
+#
+# system.prop for generic sdk
+#
+
+rild.libpath=/vendor/lib/libreference-ril.so
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index 19f57e8..b921c97 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -17,27 +17,32 @@
 
 # aosp_x86 with arm libraries needed by binary translation.
 
+# The system image of aosp_x86-userdebug is a GSI for the devices with:
+# - x86 32 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+-include device/generic/goldfish/x86-vendor.mk
+
 include $(SRC_TARGET_DIR)/product/full_x86.mk
 
-# arm libraries. This is the list of shared libraries included in the NDK.
-# Their dependency libraries will be automatically pulled in.
+# Enable dynamic partition size
+PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+
+# Enable A/B update
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
 PRODUCT_PACKAGES += \
-  libandroid_arm \
-  libaaudio_arm \
-  libc_arm \
-  libdl_arm \
-  libEGL_arm \
-  libGLESv1_CM_arm \
-  libGLESv2_arm \
-  libGLESv3_arm \
-  libjnigraphics_arm \
-  liblog_arm \
-  libm_arm \
-  libmediandk_arm \
-  libOpenMAXAL_arm \
-  libstdc++_arm \
-  libOpenSLES_arm \
-  libz_arm \
+    update_engine \
+    update_verifier
+
+# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
+
+# Support addtional P vendor interface
+PRODUCT_EXTRA_VNDK_VERSIONS := 28
 
 PRODUCT_NAME := aosp_x86_arm
 PRODUCT_DEVICE := generic_x86_arm
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 57e981f..a3c9ac7 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -74,6 +74,8 @@
     fsck_msdos \
     fs_config_files_system \
     fs_config_dirs_system \
+    heapprofd \
+    heapprofd_client \
     gatekeeperd \
     healthd \
     hid \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index d18fd62..ed6dcc9 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -27,6 +27,7 @@
     DMService \
     LiveWallpapersPicker \
     PartnerBookmarksProvider \
+    PresencePolling \
     RcsService \
     SafetyRegulatoryInfo \
     Stk \
@@ -68,15 +69,14 @@
 
 PRODUCT_NAME := mainline_system
 PRODUCT_BRAND := generic
-PRODUCT_SHIPPING_API_LEVEL := 28
 
 _base_mk_whitelist :=
 
 _my_whitelist := $(_base_mk_whitelist)
 
-# Both /system and / are in system.img when PRODUCT_SHIPPING_API_LEVEL>=28.
+# For mainline, system.img should be mounted at /, so we include ROOT here.
 _my_paths := \
-  $(TARGET_COPY_OUT_ROOT) \
-  $(TARGET_COPY_OUT_SYSTEM) \
+  $(TARGET_COPY_OUT_ROOT)/ \
+  $(TARGET_COPY_OUT_SYSTEM)/ \
 
 $(call require-artifacts-in-path, $(_my_paths), $(_my_whitelist))
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 1b1ec20..1e8677c 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -46,6 +46,7 @@
 from __future__ import print_function
 
 import datetime
+import logging
 import os
 import shlex
 import shutil
@@ -62,8 +63,9 @@
   print("Python 2.7 or newer is required.", file=sys.stderr)
   sys.exit(1)
 
-OPTIONS = common.OPTIONS
+logger = logging.getLogger(__name__)
 
+OPTIONS = common.OPTIONS
 OPTIONS.add_missing = False
 OPTIONS.rebuild_recovery = False
 OPTIONS.replace_updated_files_list = []
@@ -127,7 +129,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.img")
   if os.path.exists(img.input_name):
-    print("system.img already exists; no need to rebuild...")
+    logger.info("system.img already exists; no need to rebuild...")
     return img.input_name
 
   def output_sink(fn, data):
@@ -142,7 +144,7 @@
       common.ZipWrite(output_zip, ofile.name, arc_name)
 
   if OPTIONS.rebuild_recovery:
-    print("Building new recovery patch")
+    logger.info("Building new recovery patch")
     common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
                              boot_img, info_dict=OPTIONS.info_dict)
 
@@ -159,7 +161,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system_other.img")
   if os.path.exists(img.input_name):
-    print("system_other.img already exists; no need to rebuild...")
+    logger.info("system_other.img already exists; no need to rebuild...")
     return
 
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system_other", img)
@@ -171,7 +173,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.img")
   if os.path.exists(img.input_name):
-    print("vendor.img already exists; no need to rebuild...")
+    logger.info("vendor.img already exists; no need to rebuild...")
     return img.input_name
 
   block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
@@ -186,7 +188,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "product.img")
   if os.path.exists(img.input_name):
-    print("product.img already exists; no need to rebuild...")
+    logger.info("product.img already exists; no need to rebuild...")
     return img.input_name
 
   block_list = OutputFile(
@@ -204,7 +206,7 @@
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES",
                    "product_services.img")
   if os.path.exists(img.input_name):
-    print("product_services.img already exists; no need to rebuild...")
+    logger.info("product_services.img already exists; no need to rebuild...")
     return img.input_name
 
   block_list = OutputFile(
@@ -220,7 +222,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "odm.img")
   if os.path.exists(img.input_name):
-    print("odm.img already exists; no need to rebuild...")
+    logger.info("odm.img already exists; no need to rebuild...")
     return img.input_name
 
   block_list = OutputFile(
@@ -239,7 +241,7 @@
   """
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "dtbo.img")
   if os.path.exists(img.input_name):
-    print("dtbo.img already exists; no need to rebuild...")
+    logger.info("dtbo.img already exists; no need to rebuild...")
     return img.input_name
 
   dtbo_prebuilt_path = os.path.join(
@@ -269,7 +271,7 @@
 
 
 def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
-  print("creating " + what + ".img...")
+  logger.info("creating " + what + ".img...")
 
   image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
   fstab = info_dict["fstab"]
@@ -340,7 +342,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "userdata.img")
   if os.path.exists(img.input_name):
-    print("userdata.img already exists; no need to rebuild...")
+    logger.info("userdata.img already exists; no need to rebuild...")
     return
 
   # Skip userdata.img if no size.
@@ -348,7 +350,7 @@
   if not image_props.get("partition_size"):
     return
 
-  print("creating userdata.img...")
+  logger.info("creating userdata.img...")
 
   image_props["timestamp"] = FIXED_FILE_TIMESTAMP
 
@@ -411,7 +413,7 @@
   img = OutputFile(
       output_zip, OPTIONS.input_tmp, "IMAGES", "{}.img".format(name))
   if os.path.exists(img.input_name):
-    print("{}.img already exists; not rebuilding...".format(name))
+    logger.info("%s.img already exists; not rebuilding...", name)
     return img.input_name
 
   avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
@@ -495,7 +497,7 @@
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "cache.img")
   if os.path.exists(img.input_name):
-    print("cache.img already exists; no need to rebuild...")
+    logger.info("cache.img already exists; no need to rebuild...")
     return
 
   image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache")
@@ -503,7 +505,7 @@
   if "fs_type" not in image_props:
     return
 
-  print("creating cache.img...")
+  logger.info("creating cache.img...")
 
   image_props["timestamp"] = FIXED_FILE_TIMESTAMP
 
@@ -580,8 +582,7 @@
 
       present_props = [x for x in prop_name_list if x in build_props]
       if not present_props:
-        print("Warning: fingerprint is not present for partition {}".
-              format(partition))
+        logger.warning("fingerprint is not present for partition %s", partition)
         property_id, fingerprint = "unknown", "unknown"
       else:
         property_id = present_props[0]
@@ -633,7 +634,7 @@
 
     prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
     if os.path.exists(prebuilt_path):
-      print("%s already exists, no need to overwrite..." % (img_name,))
+      logger.info("%s already exists, no need to overwrite...", img_name)
       continue
 
     img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
@@ -698,7 +699,7 @@
 
   if not OPTIONS.add_missing:
     if os.path.isdir(os.path.join(OPTIONS.input_tmp, "IMAGES")):
-      print("target_files appears to already contain images.")
+      logger.warning("target_files appears to already contain images.")
       sys.exit(1)
 
   OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, repacking=True)
@@ -748,7 +749,7 @@
   partitions = dict()
 
   def banner(s):
-    print("\n\n++++ " + s + " ++++\n\n")
+    logger.info("\n\n++++ " + s + " ++++\n\n")
 
   banner("boot")
   # common.GetBootableImage() returns the image directly if present.
@@ -912,20 +913,21 @@
                        "is_signing"],
       extra_option_handler=option_handler)
 
-
   if len(args) != 1:
     common.Usage(__doc__)
     sys.exit(1)
 
+  common.InitLogging()
+
   AddImagesToTargetFiles(args[0])
-  print("done.")
+  logger.info("done.")
 
 if __name__ == '__main__':
   try:
     common.CloseInheritedPipes()
     main(sys.argv[1:])
-  except common.ExternalError as e:
-    print("\n   ERROR: %s\n" % (e,))
+  except common.ExternalError:
+    logger.exception("\n   ERROR:\n")
     sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 189dba2..2d20e23 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -19,6 +19,7 @@
 import functools
 import heapq
 import itertools
+import logging
 import multiprocessing
 import os
 import os.path
@@ -33,6 +34,8 @@
 
 __all__ = ["EmptyImage", "DataImage", "BlockImageDiff"]
 
+logger = logging.getLogger(__name__)
+
 
 def compute_patch(srcfile, tgtfile, imgdiff=False):
   patchfile = common.MakeTempFile(prefix='patch-')
@@ -304,8 +307,8 @@
     """Prints a report of the collected imgdiff stats."""
 
     def print_header(header, separator):
-      print(header)
-      print(separator * len(header) + '\n')
+      logger.info(header)
+      logger.info(separator * len(header) + '\n')
 
     print_header('  Imgdiff Stats Report  ', '=')
     for key in self.REASONS:
@@ -314,7 +317,7 @@
       values = self.stats[key]
       section_header = ' {} (count: {}) '.format(key, len(values))
       print_header(section_header, '-')
-      print(''.join(['  {}\n'.format(name) for name in values]))
+      logger.info(''.join(['  {}\n'.format(name) for name in values]))
 
 
 class BlockImageDiff(object):
@@ -482,7 +485,7 @@
     self.WriteTransfers(prefix)
 
     # Report the imgdiff stats.
-    if common.OPTIONS.verbose and not self.disable_imgdiff:
+    if not self.disable_imgdiff:
       self.imgdiff_stats.Report()
 
   def WriteTransfers(self, prefix):
@@ -692,16 +695,17 @@
     OPTIONS = common.OPTIONS
     if OPTIONS.cache_size is not None:
       max_allowed = OPTIONS.cache_size * OPTIONS.stash_threshold
-      print("max stashed blocks: %d  (%d bytes), "
-            "limit: %d bytes (%.2f%%)\n" % (
-                max_stashed_blocks, self._max_stashed_size, max_allowed,
-                self._max_stashed_size * 100.0 / max_allowed))
+      logger.info(
+          "max stashed blocks: %d  (%d bytes), limit: %d bytes (%.2f%%)\n",
+          max_stashed_blocks, self._max_stashed_size, max_allowed,
+          self._max_stashed_size * 100.0 / max_allowed)
     else:
-      print("max stashed blocks: %d  (%d bytes), limit: <unknown>\n" % (
-          max_stashed_blocks, self._max_stashed_size))
+      logger.info(
+          "max stashed blocks: %d  (%d bytes), limit: <unknown>\n",
+          max_stashed_blocks, self._max_stashed_size)
 
   def ReviseStashSize(self):
-    print("Revising stash size...")
+    logger.info("Revising stash size...")
     stash_map = {}
 
     # Create the map between a stash and its def/use points. For example, for a
@@ -746,7 +750,7 @@
           # that will use this stash and replace the command with "new".
           use_cmd = stash_map[stash_raw_id][2]
           replaced_cmds.append(use_cmd)
-          print("%10d  %9s  %s" % (sr.size(), "explicit", use_cmd))
+          logger.info("%10d  %9s  %s", sr.size(), "explicit", use_cmd)
         else:
           # Update the stashes map.
           if sh in stashes:
@@ -762,7 +766,7 @@
         if xf.src_ranges.overlaps(xf.tgt_ranges):
           if stashed_blocks + xf.src_ranges.size() > max_allowed:
             replaced_cmds.append(xf)
-            print("%10d  %9s  %s" % (xf.src_ranges.size(), "implicit", xf))
+            logger.info("%10d  %9s  %s", xf.src_ranges.size(), "implicit", xf)
 
       # Replace the commands in replaced_cmds with "new"s.
       for cmd in replaced_cmds:
@@ -788,28 +792,29 @@
           stashes.pop(sh)
 
     num_of_bytes = new_blocks * self.tgt.blocksize
-    print("  Total %d blocks (%d bytes) are packed as new blocks due to "
-          "insufficient cache size." % (new_blocks, num_of_bytes))
+    logger.info(
+        "  Total %d blocks (%d bytes) are packed as new blocks due to "
+        "insufficient cache size.", new_blocks, num_of_bytes)
     return new_blocks
 
   def ComputePatches(self, prefix):
-    print("Reticulating splines...")
+    logger.info("Reticulating splines...")
     diff_queue = []
     patch_num = 0
     with open(prefix + ".new.dat", "wb") as new_f:
       for index, xf in enumerate(self.transfers):
         if xf.style == "zero":
           tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
-          print("%10d %10d (%6.2f%%) %7s %s %s" % (
-              tgt_size, tgt_size, 100.0, xf.style, xf.tgt_name,
-              str(xf.tgt_ranges)))
+          logger.info(
+              "%10d %10d (%6.2f%%) %7s %s %s", tgt_size, tgt_size, 100.0,
+              xf.style, xf.tgt_name, str(xf.tgt_ranges))
 
         elif xf.style == "new":
           self.tgt.WriteRangeDataToFd(xf.tgt_ranges, new_f)
           tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
-          print("%10d %10d (%6.2f%%) %7s %s %s" % (
-              tgt_size, tgt_size, 100.0, xf.style,
-              xf.tgt_name, str(xf.tgt_ranges)))
+          logger.info(
+              "%10d %10d (%6.2f%%) %7s %s %s", tgt_size, tgt_size, 100.0,
+              xf.style, xf.tgt_name, str(xf.tgt_ranges))
 
         elif xf.style == "diff":
           # We can't compare src and tgt directly because they may have
@@ -827,11 +832,12 @@
             xf.patch = None
             tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
             if xf.src_ranges != xf.tgt_ranges:
-              print("%10d %10d (%6.2f%%) %7s %s %s (from %s)" % (
-                  tgt_size, tgt_size, 100.0, xf.style,
+              logger.info(
+                  "%10d %10d (%6.2f%%) %7s %s %s (from %s)", tgt_size, tgt_size,
+                  100.0, xf.style,
                   xf.tgt_name if xf.tgt_name == xf.src_name else (
                       xf.tgt_name + " (from " + xf.src_name + ")"),
-                  str(xf.tgt_ranges), str(xf.src_ranges)))
+                  str(xf.tgt_ranges), str(xf.src_ranges))
           else:
             if xf.patch:
               # We have already generated the patch with imgdiff, while
@@ -850,9 +856,9 @@
 
     if diff_queue:
       if self.threads > 1:
-        print("Computing patches (using %d threads)..." % (self.threads,))
+        logger.info("Computing patches (using %d threads)...", self.threads)
       else:
-        print("Computing patches...")
+        logger.info("Computing patches...")
 
       diff_total = len(diff_queue)
       patches = [None] * diff_total
@@ -874,13 +880,6 @@
             xf_index, imgdiff, patch_index = diff_queue.pop()
             xf = self.transfers[xf_index]
 
-            if sys.stdout.isatty():
-              diff_left = len(diff_queue)
-              progress = (diff_total - diff_left) * 100 / diff_total
-              # '\033[K' is to clear to EOL.
-              print(' [%3d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
-              sys.stdout.flush()
-
           patch = xf.patch
           if not patch:
             src_ranges = xf.src_ranges
@@ -918,13 +917,10 @@
       while threads:
         threads.pop().join()
 
-      if sys.stdout.isatty():
-        print('\n')
-
       if error_messages:
-        print('ERROR:')
-        print('\n'.join(error_messages))
-        print('\n\n\n')
+        logger.error('ERROR:')
+        logger.error('\n'.join(error_messages))
+        logger.error('\n\n\n')
         sys.exit(1)
     else:
       patches = []
@@ -938,14 +934,13 @@
         offset += xf.patch_len
         patch_fd.write(patch)
 
-        if common.OPTIONS.verbose:
-          tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
-          print("%10d %10d (%6.2f%%) %7s %s %s %s" % (
-              xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
-              xf.style,
-              xf.tgt_name if xf.tgt_name == xf.src_name else (
-                  xf.tgt_name + " (from " + xf.src_name + ")"),
-              xf.tgt_ranges, xf.src_ranges))
+        tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
+        logger.info(
+            "%10d %10d (%6.2f%%) %7s %s %s %s", xf.patch_len, tgt_size,
+            xf.patch_len * 100.0 / tgt_size, xf.style,
+            xf.tgt_name if xf.tgt_name == xf.src_name else (
+                xf.tgt_name + " (from " + xf.src_name + ")"),
+            xf.tgt_ranges, xf.src_ranges)
 
   def AssertSha1Good(self):
     """Check the SHA-1 of the src & tgt blocks in the transfer list.
@@ -1005,7 +1000,7 @@
         assert touched[i] == 1
 
   def ImproveVertexSequence(self):
-    print("Improving vertex order...")
+    logger.info("Improving vertex order...")
 
     # At this point our digraph is acyclic; we reversed any edges that
     # were backwards in the heuristically-generated sequence.  The
@@ -1057,7 +1052,7 @@
     blocks will be written to the same stash slot in WriteTransfers().
     """
 
-    print("Reversing backward edges...")
+    logger.info("Reversing backward edges...")
     in_order = 0
     out_of_order = 0
     stash_raw_id = 0
@@ -1089,15 +1084,15 @@
           xf.goes_after[u] = None    # value doesn't matter
           u.goes_before[xf] = None
 
-    print(("  %d/%d dependencies (%.2f%%) were violated; "
-           "%d source blocks stashed.") %
-          (out_of_order, in_order + out_of_order,
-           (out_of_order * 100.0 / (in_order + out_of_order))
-           if (in_order + out_of_order) else 0.0,
-           stash_size))
+    logger.info(
+        "  %d/%d dependencies (%.2f%%) were violated; %d source blocks "
+        "stashed.", out_of_order, in_order + out_of_order,
+        (out_of_order * 100.0 / (in_order + out_of_order)) if (
+            in_order + out_of_order) else 0.0,
+        stash_size)
 
   def FindVertexSequence(self):
-    print("Finding vertex sequence...")
+    logger.info("Finding vertex sequence...")
 
     # This is based on "A Fast & Effective Heuristic for the Feedback
     # Arc Set Problem" by P. Eades, X. Lin, and W.F. Smyth.  Think of
@@ -1210,7 +1205,7 @@
     self.transfers = new_transfers
 
   def GenerateDigraph(self):
-    print("Generating digraph...")
+    logger.info("Generating digraph...")
 
     # Each item of source_ranges will be:
     #   - None, if that block is not used as a source,
@@ -1376,9 +1371,9 @@
 
         if tgt_changed < tgt_size * crop_threshold:
           assert tgt_changed + tgt_skipped.size() == tgt_size
-          print('%10d %10d (%6.2f%%) %s' % (
-              tgt_skipped.size(), tgt_size,
-              tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
+          logger.info(
+              '%10d %10d (%6.2f%%) %s', tgt_skipped.size(), tgt_size,
+              tgt_skipped.size() * 100.0 / tgt_size, tgt_name)
           AddSplitTransfers(
               "%s-skipped" % (tgt_name,),
               "%s-skipped" % (src_name,),
@@ -1519,7 +1514,7 @@
                                    split_src_ranges,
                                    patch_content))
 
-    print("Finding transfers...")
+    logger.info("Finding transfers...")
 
     large_apks = []
     split_large_apks = []
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 43c91da..4a013c2 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -26,6 +26,7 @@
 
 from __future__ import print_function
 
+import logging
 import os
 import os.path
 import re
@@ -35,6 +36,8 @@
 import common
 import verity_utils
 
+logger = logging.getLogger(__name__)
+
 OPTIONS = common.OPTIONS
 BLOCK_SIZE = common.BLOCK_SIZE
 BYTES_IN_MB = 1024 * 1024
@@ -228,8 +231,8 @@
       "partition_size" not in prop_dict):
     # If partition_size is not defined, use output of `du' + reserved_size.
     size = GetDiskUsage(in_dir)
-    if OPTIONS.verbose:
-      print("The tree size of %s is %d MB." % (in_dir, size // BYTES_IN_MB))
+    logger.info(
+        "The tree size of %s is %d MB.", in_dir, size // BYTES_IN_MB)
     size += int(prop_dict.get("partition_reserved_size", 0))
     # Round this up to a multiple of 4K so that avbtool works
     size = common.RoundUpTo4K(size)
@@ -241,8 +244,8 @@
           lambda x: verity_utils.AVBCalcMaxImageSize(
               avbtool, avb_footer_type, x, avb_signing_args))
     prop_dict["partition_size"] = str(size)
-    if OPTIONS.verbose:
-      print("Allocating %d MB for %s." % (size // BYTES_IN_MB, out_file))
+    logger.info(
+        "Allocating %d MB for %s.", size // BYTES_IN_MB, out_file)
 
   prop_dict["image_size"] = prop_dict["partition_size"]
 
@@ -350,8 +353,8 @@
       du_str = "{} bytes ({} MB)".format(du, du // BYTES_IN_MB)
     # Suppress any errors from GetDiskUsage() to avoid hiding the real errors
     # from common.RunAndCheckOutput().
-    except Exception as e:  # pylint: disable=broad-except
-      print(e, file=sys.stderr)
+    except Exception:  # pylint: disable=broad-except
+      logger.exception("Failed to compute disk usage with du")
       du_str = "unknown"
     print(
         "Out of space? The tree size of {} is {}, with reserved space of {} "
@@ -664,6 +667,8 @@
     print(__doc__)
     sys.exit(1)
 
+  common.InitLogging()
+
   in_dir = argv[0]
   glob_dict_file = argv[1]
   out_file = argv[2]
@@ -697,7 +702,7 @@
     elif image_filename == "product_services.img":
       mount_point = "product_services"
     else:
-      print("error: unknown image file name ", image_filename, file=sys.stderr)
+      logger.error("Unknown image file name %s", image_filename)
       sys.exit(1)
 
     image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
@@ -705,14 +710,14 @@
   try:
     BuildImage(in_dir, image_properties, out_file, target_out)
   except:
-    print("Error: Failed to build {} from {}".format(out_file, in_dir),
-          file=sys.stderr)
+    logger.error("Failed to build %s from %s", out_file, in_dir)
     raise
 
   if prop_file_out:
     glob_dict_out = GlobalDictFromImageProp(image_properties, mount_point)
     SaveGlobalDict(prop_file_out, glob_dict_out)
 
+
 if __name__ == '__main__':
   try:
     main(sys.argv[1:])
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index a580709..7d3424b 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -21,16 +21,18 @@
 from __future__ import print_function
 
 import argparse
+import logging
 import re
 import subprocess
 import sys
 import zipfile
-
 from hashlib import sha1
 from hashlib import sha256
 
 import common
 
+logger = logging.getLogger(__name__)
+
 
 def CertUsesSha256(cert):
   """Check if the cert uses SHA-256 hashing algorithm."""
@@ -181,6 +183,8 @@
   parser.add_argument('package', help='The OTA package to be verified.')
   args = parser.parse_args()
 
+  common.InitLogging()
+
   VerifyPackage(args.certificate, args.package)
   VerifyAbOtaPayload(args.certificate, args.package)
 
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 7cca766..fe63458 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -20,6 +20,9 @@
 import getpass
 import gzip
 import imp
+import json
+import logging
+import logging.config
 import os
 import platform
 import re
@@ -37,6 +40,8 @@
 import blockimgdiff
 import sparse_img
 
+logger = logging.getLogger(__name__)
+
 
 class Options(object):
   def __init__(self):
@@ -121,13 +126,53 @@
   pass
 
 
+def InitLogging():
+  DEFAULT_LOGGING_CONFIG = {
+      'version': 1,
+      'disable_existing_loggers': False,
+      'formatters': {
+          'standard': {
+              'format':
+                  '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s',
+              'datefmt': '%Y-%m-%d %H:%M:%S',
+          },
+      },
+      'handlers': {
+          'default': {
+              'class': 'logging.StreamHandler',
+              'formatter': 'standard',
+          },
+      },
+      'loggers': {
+          '': {
+              'handlers': ['default'],
+              'level': 'WARNING',
+              'propagate': True,
+          }
+      }
+  }
+  env_config = os.getenv('LOGGING_CONFIG')
+  if env_config:
+    with open(env_config) as f:
+      config = json.load(f)
+  else:
+    config = DEFAULT_LOGGING_CONFIG
+
+    # Increase the logging level for verbose mode.
+    if OPTIONS.verbose:
+      config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
+      config['loggers']['']['level'] = 'INFO'
+
+  logging.config.dictConfig(config)
+
+
 def Run(args, verbose=None, **kwargs):
   """Creates and returns a subprocess.Popen object.
 
   Args:
     args: The command represented as a list of strings.
-    verbose: Whether the commands should be shown (default to OPTIONS.verbose
-        if unspecified).
+    verbose: Whether the commands should be shown. Default to the global
+        verbosity if unspecified.
     kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
         stdin, etc. stdout and stderr will default to subprocess.PIPE and
         subprocess.STDOUT respectively unless caller specifies any of them.
@@ -135,13 +180,12 @@
   Returns:
     A subprocess.Popen object.
   """
-  if verbose is None:
-    verbose = OPTIONS.verbose
   if 'stdout' not in kwargs and 'stderr' not in kwargs:
     kwargs['stdout'] = subprocess.PIPE
     kwargs['stderr'] = subprocess.STDOUT
-  if verbose:
-    print("  Running: \"{}\"".format(" ".join(args)))
+  # Don't log any if caller explicitly says so.
+  if verbose != False:
+    logger.info("  Running: \"%s\"", " ".join(args))
   return subprocess.Popen(args, **kwargs)
 
 
@@ -150,8 +194,8 @@
 
   Args:
     args: The command represented as a list of strings.
-    verbose: Whether the commands should be shown (default to OPTIONS.verbose
-        if unspecified).
+    verbose: Whether the commands should be shown. Default to the global
+        verbosity if unspecified.
     kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
         stdin, etc. stdout and stderr will default to subprocess.PIPE and
         subprocess.STDOUT respectively unless caller specifies any of them.
@@ -162,12 +206,11 @@
   Raises:
     ExternalError: On non-zero exit from the command.
   """
-  if verbose is None:
-    verbose = OPTIONS.verbose
   proc = Run(args, verbose=verbose, **kwargs)
   output, _ = proc.communicate()
-  if verbose:
-    print("{}".format(output.rstrip()))
+  # Don't log any if caller explicitly says so.
+  if verbose != False:
+    logger.info("%s", output.rstrip())
   if proc.returncode != 0:
     raise ExternalError(
         "Failed to run command '{}' (exit code {}):\n{}".format(
@@ -277,8 +320,8 @@
       if os.path.exists(system_base_fs_file):
         d["system_base_fs_file"] = system_base_fs_file
       else:
-        print("Warning: failed to find system base fs file: %s" % (
-            system_base_fs_file,))
+        logger.warning(
+            "Failed to find system base fs file: %s", system_base_fs_file)
         del d["system_base_fs_file"]
 
     if "vendor_base_fs_file" in d:
@@ -287,8 +330,8 @@
       if os.path.exists(vendor_base_fs_file):
         d["vendor_base_fs_file"] = vendor_base_fs_file
       else:
-        print("Warning: failed to find vendor base fs file: %s" % (
-            vendor_base_fs_file,))
+        logger.warning(
+            "Failed to find vendor base fs file: %s", vendor_base_fs_file)
         del d["vendor_base_fs_file"]
 
   def makeint(key):
@@ -364,7 +407,7 @@
   try:
     data = read_helper(prop_file)
   except KeyError:
-    print("Warning: could not read %s" % (prop_file,))
+    logger.warning("Failed to read %s", prop_file)
     data = ""
   return LoadDictionaryFromLines(data.split("\n"))
 
@@ -394,7 +437,7 @@
   try:
     data = read_helper(recovery_fstab_path)
   except KeyError:
-    print("Warning: could not find {}".format(recovery_fstab_path))
+    logger.warning("Failed to find %s", recovery_fstab_path)
     data = ""
 
   assert fstab_version == 2
@@ -447,7 +490,7 @@
 
 def DumpInfoDict(d):
   for k, v in sorted(d.items()):
-    print("%-25s = (%s) %s" % (k, type(v).__name__, v))
+    logger.info("%-25s = (%s) %s", k, type(v).__name__, v)
 
 
 def AppendAVBSigningArgs(cmd, partition):
@@ -657,15 +700,15 @@
 
   prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
   if os.path.exists(prebuilt_path):
-    print("using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,))
+    logger.info("using prebuilt %s from BOOTABLE_IMAGES...", prebuilt_name)
     return File.FromLocalFile(name, prebuilt_path)
 
   prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
   if os.path.exists(prebuilt_path):
-    print("using prebuilt %s from IMAGES..." % (prebuilt_name,))
+    logger.info("using prebuilt %s from IMAGES...", prebuilt_name)
     return File.FromLocalFile(name, prebuilt_path)
 
-  print("building image from target_files %s..." % (tree_subdir,))
+  logger.info("building image from target_files %s...", tree_subdir)
 
   if info_dict is None:
     info_dict = OPTIONS.info_dict
@@ -1001,9 +1044,9 @@
     if pct >= 99.0:
       raise ExternalError(msg)
     elif pct >= 95.0:
-      print("\n  WARNING: %s\n" % (msg,))
-    elif OPTIONS.verbose:
-      print("  ", msg)
+      logger.warning("\n  WARNING: %s\n", msg)
+    else:
+      logger.info("  %s", msg)
 
 
 def ReadApkCerts(tf_zip):
@@ -1302,13 +1345,13 @@
           continue
         m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
         if not m:
-          print("failed to parse password file: ", line)
+          logger.warning("Failed to parse password file: %s", line)
         else:
           result[m.group(2)] = m.group(1)
       f.close()
     except IOError as e:
       if e.errno != errno.ENOENT:
-        print("error reading password file: ", str(e))
+        logger.exception("Error reading password file:")
     return result
 
 
@@ -1452,10 +1495,10 @@
           if x == ".py":
             f = b
           info = imp.find_module(f, [d])
-        print("loaded device-specific extensions from", path)
+        logger.info("loaded device-specific extensions from %s", path)
         self.module = imp.load_module("device_specific", *info)
       except ImportError:
-        print("unable to load device-specific module; assuming none")
+        logger.info("unable to load device-specific module; assuming none")
 
   def _DoCall(self, function_name, *args, **kwargs):
     """Call the named function in the device-specific module, passing
@@ -1597,7 +1640,7 @@
       th.start()
       th.join(timeout=300)   # 5 mins
       if th.is_alive():
-        print("WARNING: diff command timed out")
+        logger.warning("diff command timed out")
         p.terminate()
         th.join(5)
         if th.is_alive():
@@ -1605,8 +1648,7 @@
           th.join()
 
       if p.returncode != 0:
-        print("WARNING: failure running %s:\n%s\n" % (
-            diff_program, "".join(err)))
+        logger.warning("Failure running %s:\n%s\n", diff_program, "".join(err))
         self.patch = None
         return None, None, None
       diff = ptemp.read()
@@ -1630,7 +1672,7 @@
 
 def ComputeDifferences(diffs):
   """Call ComputePatch on all the Difference objects in 'diffs'."""
-  print(len(diffs), "diffs to compute")
+  logger.info("%d diffs to compute", len(diffs))
 
   # Do the largest files first, to try and reduce the long-pole effect.
   by_size = [(i.tf.size, i) for i in diffs]
@@ -1656,14 +1698,14 @@
         else:
           name = "%s (%s)" % (tf.name, sf.name)
         if patch is None:
-          print(
-              "patching failed!                                  %s" % (name,))
+          logger.error("patching failed! %40s", name)
         else:
-          print("%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
-              dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name))
+          logger.info(
+              "%8.2f sec %8d / %8d bytes (%6.2f%%) %s", dur, len(patch),
+              tf.size, 100.0 * len(patch) / tf.size, name)
       lock.release()
-    except Exception as e:
-      print(e)
+    except Exception:
+      logger.exception("Failed to compute diff from worker")
       raise
 
   # start worker threads; wait for them all to finish.
@@ -2086,6 +2128,6 @@
   # in the L release.
   sh_location = "bin/install-recovery.sh"
 
-  print("putting script in", sh_location)
+  logger.info("putting script in %s", sh_location)
 
   output_sink(sh_location, sh)
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 01ff149..0156b72 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -28,6 +28,7 @@
 
 from __future__ import print_function
 
+import logging
 import os
 import shutil
 import sys
@@ -39,6 +40,7 @@
   print("Python 2.7 or newer is required.", file=sys.stderr)
   sys.exit(1)
 
+logger = logging.getLogger(__name__)
 
 OPTIONS = common.OPTIONS
 
@@ -72,6 +74,8 @@
     common.Usage(__doc__)
     sys.exit(1)
 
+  common.InitLogging()
+
   OPTIONS.input_tmp = common.UnzipTemp(args[0], ["IMAGES/*", "OTA/*"])
   output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
   CopyInfo(output_zip)
@@ -90,11 +94,11 @@
       common.ZipWrite(output_zip, os.path.join(images_path, image), image)
 
   finally:
-    print("cleaning up...")
+    logger.info("cleaning up...")
     common.ZipClose(output_zip)
     shutil.rmtree(OPTIONS.input_tmp)
 
-  print("done.")
+  logger.info("done.")
 
 
 if __name__ == '__main__':
@@ -102,5 +106,5 @@
     common.CloseInheritedPipes()
     main(sys.argv[1:])
   except common.ExternalError as e:
-    print("\n   ERROR: %s\n" % (e,))
+    logger.exception("\n   ERROR:\n")
     sys.exit(1)
diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py
index 7c6007e..725b355 100755
--- a/tools/releasetools/make_recovery_patch.py
+++ b/tools/releasetools/make_recovery_patch.py
@@ -16,24 +16,27 @@
 
 from __future__ import print_function
 
+import logging
+import os
 import sys
 
+import common
+
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
   sys.exit(1)
 
-import os
-import common
+logger = logging.getLogger(__name__)
 
 OPTIONS = common.OPTIONS
 
-def main(argv):
-  # def option_handler(o, a):
-  #   return False
 
+def main(argv):
   args = common.ParseOptions(argv, __doc__)
   input_dir, output_dir = args
 
+  common.InitLogging()
+
   OPTIONS.info_dict = common.LoadInfoDict(input_dir)
 
   recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index daf959f..2264655 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -164,6 +164,7 @@
 
 from __future__ import print_function
 
+import logging
 import multiprocessing
 import os.path
 import shlex
@@ -181,6 +182,7 @@
   print("Python 2.7 or newer is required.", file=sys.stderr)
   sys.exit(1)
 
+logger = logging.getLogger(__name__)
 
 OPTIONS = common.OPTIONS
 OPTIONS.package_key = None
@@ -576,11 +578,11 @@
         OPTIONS.input_tmp, "RECOVERY")
     common.ZipWriteStr(
         output_zip, recovery_two_step_img_name, recovery_two_step_img.data)
-    print("two-step package: using %s in stage 1/3" % (
-        recovery_two_step_img_name,))
+    logger.info(
+        "two-step package: using %s in stage 1/3", recovery_two_step_img_name)
     script.WriteRawImage("/boot", recovery_two_step_img_name)
   else:
-    print("two-step package: using recovery.img in stage 1/3")
+    logger.info("two-step package: using recovery.img in stage 1/3")
     # The "recovery.img" entry has been written into package earlier.
     script.WriteRawImage("/boot", "recovery.img")
 
@@ -1344,8 +1346,8 @@
   target_api_version = target_info["recovery_api_version"]
   source_api_version = source_info["recovery_api_version"]
   if source_api_version == 0:
-    print("WARNING: generating edify script for a source that "
-          "can't install it.")
+    logger.warning(
+        "Generating edify script for a source that can't install it.")
 
   script = edify_generator.EdifyGenerator(
       source_api_version, target_info, fstab=source_info["fstab"])
@@ -1523,8 +1525,9 @@
     else:
       include_full_boot = False
 
-      print("boot      target: %d  source: %d  diff: %d" % (
-          target_boot.size, source_boot.size, len(d)))
+      logger.info(
+          "boot      target: %d  source: %d  diff: %d", target_boot.size,
+          source_boot.size, len(d))
 
       common.ZipWriteStr(output_zip, "boot.img.p", d)
 
@@ -1574,19 +1577,19 @@
   if OPTIONS.two_step:
     common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
     script.WriteRawImage("/boot", "boot.img")
-    print("writing full boot image (forced by two-step mode)")
+    logger.info("writing full boot image (forced by two-step mode)")
 
   if not OPTIONS.two_step:
     if updating_boot:
       if include_full_boot:
-        print("boot image changed; including full.")
+        logger.info("boot image changed; including full.")
         script.Print("Installing boot image...")
         script.WriteRawImage("/boot", "boot.img")
       else:
         # Produce the boot image by applying a patch to the current
         # contents of the boot partition, and write it back to the
         # partition.
-        print("boot image changed; including patch.")
+        logger.info("boot image changed; including patch.")
         script.Print("Patching boot image...")
         script.ShowProgress(0.1, 10)
         script.PatchPartition(
@@ -1596,7 +1599,7 @@
                 boot_type, boot_device, source_boot.size, source_boot.sha1),
             'boot.img.p')
     else:
-      print("boot image unchanged; skipping.")
+      logger.info("boot image unchanged; skipping.")
 
   # Do device-specific installation (eg, write radio image).
   device_specific.IncrementalOTA_InstallEnd()
@@ -1787,7 +1790,7 @@
       common.ZipWriteStr(output_zip, care_map_name, care_map_data,
                          compress_type=zipfile.ZIP_STORED)
     else:
-      print("Warning: cannot find care map file in target_file package")
+      logger.warning("Cannot find care map file in target_file package")
 
   AddCompatibilityArchiveIfTrebleEnabled(
       target_zip, output_zip, target_info, source_info)
@@ -1903,6 +1906,8 @@
     common.Usage(__doc__)
     sys.exit(1)
 
+  common.InitLogging()
+
   if OPTIONS.downgrade:
     # We should only allow downgrading incrementals (as opposed to full).
     # Otherwise the device may go back from arbitrary build with this full
@@ -1923,9 +1928,8 @@
     with zipfile.ZipFile(args[0], 'r') as input_zip:
       OPTIONS.info_dict = common.LoadInfoDict(input_zip)
 
-  if OPTIONS.verbose:
-    print("--- target info ---")
-    common.DumpInfoDict(OPTIONS.info_dict)
+  logger.info("--- target info ---")
+  common.DumpInfoDict(OPTIONS.info_dict)
 
   # Load the source build dict if applicable.
   if OPTIONS.incremental_source is not None:
@@ -1933,9 +1937,8 @@
     with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
       OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
 
-    if OPTIONS.verbose:
-      print("--- source info ---")
-      common.DumpInfoDict(OPTIONS.source_info_dict)
+    logger.info("--- source info ---")
+    common.DumpInfoDict(OPTIONS.source_info_dict)
 
   # Load OEM dicts if provided.
   OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
@@ -1959,7 +1962,7 @@
         output_file=args[1],
         source_file=OPTIONS.incremental_source)
 
-    print("done.")
+    logger.info("done.")
     return
 
   # Sanity check the loaded info dicts first.
@@ -1970,7 +1973,7 @@
   # Non-A/B OTAs rely on /cache partition to store temporary files.
   cache_size = OPTIONS.info_dict.get("cache_size")
   if cache_size is None:
-    print("--- can't determine the cache partition size ---")
+    logger.warning("--- can't determine the cache partition size ---")
   OPTIONS.cache_size = cache_size
 
   if OPTIONS.extra_script is not None:
@@ -1979,7 +1982,7 @@
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
   else:
-    print("unzipping target target-files...")
+    logger.info("unzipping target target-files...")
     OPTIONS.input_tmp = common.UnzipTemp(args[0], UNZIP_PATTERN)
   OPTIONS.target_tmp = OPTIONS.input_tmp
 
@@ -1991,7 +1994,7 @@
   if OPTIONS.device_specific is None:
     from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
     if os.path.exists(from_input):
-      print("(using device-specific extensions from target_files)")
+      logger.info("(using device-specific extensions from target_files)")
       OPTIONS.device_specific = from_input
     else:
       OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
@@ -2008,7 +2011,7 @@
 
   # Generate an incremental OTA.
   else:
-    print("unzipping source target-files...")
+    logger.info("unzipping source target-files...")
     OPTIONS.source_tmp = common.UnzipTemp(
         OPTIONS.incremental_source, UNZIP_PATTERN)
     with zipfile.ZipFile(args[0], 'r') as input_zip, \
@@ -2024,15 +2027,15 @@
         target_files_diff.recursiveDiff(
             '', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
 
-  print("done.")
+  logger.info("done.")
 
 
 if __name__ == '__main__':
   try:
     common.CloseInheritedPipes()
     main(sys.argv[1:])
-  except common.ExternalError as e:
-    print("\n   ERROR: %s\n" % (e,))
+  except common.ExternalError:
+    logger.exception("\n   ERROR:\n")
     sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index ca53ae1..5ebb1f0 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import bisect
+import logging
 import os
 import struct
 import threading
@@ -20,6 +21,8 @@
 
 import rangelib
 
+logger = logging.getLogger(__name__)
+
 
 class SparseImage(object):
   """Wraps a sparse image file into an image object.
@@ -61,8 +64,9 @@
       raise ValueError("Chunk header size was expected to be 12, but is %u." %
                        (chunk_hdr_sz,))
 
-    print("Total of %u %u-byte output blocks in %u input chunks."
-          % (total_blks, blk_sz, total_chunks))
+    logger.info(
+        "Total of %u %u-byte output blocks in %u input chunks.", total_blks,
+        blk_sz, total_chunks)
 
     if not build_map:
       assert not hashtree_info_generator, \
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index b9c8dc7..edb3d41 100644
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -18,13 +18,18 @@
 Utils for running unittests.
 """
 
+import logging
 import os
 import os.path
 import struct
+import sys
 import unittest
 
 import common
 
+# Some test runner doesn't like outputs from stderr.
+logging.basicConfig(stream=sys.stdout)
+
 
 def get_testdata_dir():
   """Returns the testdata dir, in relative to the script dir."""
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 626a1dd..00af296 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -16,6 +16,7 @@
 
 from __future__ import print_function
 
+import logging
 import os.path
 import shlex
 import struct
@@ -24,6 +25,8 @@
 import sparse_img
 from rangelib import RangeSet
 
+logger = logging.getLogger(__name__)
+
 OPTIONS = common.OPTIONS
 BLOCK_SIZE = common.BLOCK_SIZE
 FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
@@ -71,7 +74,7 @@
 
 def ZeroPadSimg(image_file, pad_size):
   blocks = pad_size // BLOCK_SIZE
-  print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
+  logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
   simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
   simg.AppendFillChunk(0, blocks)
 
@@ -114,9 +117,9 @@
     else:
       hi = i
 
-  if OPTIONS.verbose:
-    print("Adjusted partition size for verity, partition_size: {},"
-          " verity_size: {}".format(result, verity_size))
+  logger.info(
+      "Adjusted partition size for verity, partition_size: %s, verity_size: %s",
+      result, verity_size)
   AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
   return (result, verity_size)
 
@@ -326,9 +329,9 @@
     else:
       lo = mid + BLOCK_SIZE
 
-  if OPTIONS.verbose:
-    print("AVBCalcMinPartitionSize({}): partition_size: {}.".format(
-        image_size, partition_size))
+  logger.info(
+      "AVBCalcMinPartitionSize(%d): partition_size: %d.",
+      image_size, partition_size)
 
   return partition_size
 
@@ -514,9 +517,9 @@
             salt, self.hashtree_info.salt)
 
     if root_hash != self.hashtree_info.root_hash:
-      print(
-          "Calculated root hash {} doesn't match the one in metadata {}".format(
-              root_hash, self.hashtree_info.root_hash))
+      logger.warning(
+          "Calculated root hash %s doesn't match the one in metadata %s",
+          root_hash, self.hashtree_info.root_hash)
       return False
 
     # Reads the generated hash tree and checks if it has the exact same bytes
diff --git a/tools/warn.py b/tools/warn.py
index bcde64a..c710164 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -505,6 +505,11 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: This class\'s name looks like a Type Parameter.',
+     'patterns': [r".*: warning: \[ClassNamedLikeTypeParameter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Field name is CONSTANT_CASE, but field is not static and final',
      'patterns': [r".*: warning: \[ConstantField\] .+"]},
     {'category': 'java',
@@ -515,6 +520,11 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Prefer assertThrows to ExpectedException',
+     'patterns': [r".*: warning: \[ExpectedExceptionRefactoring\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: This field is only assigned during initialization; consider making it final',
      'patterns': [r".*: warning: \[FieldCanBeFinal\] .+"]},
     {'category': 'java',
@@ -525,7 +535,12 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
-         r'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
+         'Java: Refactors uses of the JSR 305 @Immutable to Error Prone\'s annotation',
+     'patterns': [r".*: warning: \[ImmutableRefactoring\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
      'patterns': [r".*: warning: \[LambdaFunctionalInterface\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
@@ -560,7 +575,7 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
-         'Java: Non-standard parameter comment; prefer `/*paramName=*/ arg`',
+         'Java: Non-standard parameter comment; prefer `/* paramName= */ arg`',
      'patterns': [r".*: warning: \[ParameterComment\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
@@ -600,17 +615,27 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Prefer assertThrows to @Test(expected=...)',
+     'patterns': [r".*: warning: \[TestExceptionRefactoring\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Unchecked exceptions do not need to be declared in the method signature.',
      'patterns': [r".*: warning: \[ThrowsUncheckedException\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Prefer assertThrows to try/fail',
+     'patterns': [r".*: warning: \[TryFailRefactoring\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Type parameters must be a single letter with an optional numeric suffix, or an UpperCamelCase name followed by the letter \'T\'.',
      'patterns': [r".*: warning: \[TypeParameterNaming\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
-         'Java: Constructors and methods with the same name should appear sequentially with no other code in between',
+         'Java: Constructors and methods with the same name should appear sequentially with no other code in between. Please re-order or re-name methods.',
      'patterns': [r".*: warning: \[UngroupedOverloads\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
@@ -640,11 +665,26 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: This method passes a pair of parameters through to String.format, but the enclosing method wasn\'t annotated @FormatMethod. Doing so gives compile-time rather than run-time protection against malformed format strings.',
+     'patterns': [r".*: warning: \[AnnotateFormatMethod\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Annotations should be positioned after Javadocs, but before modifiers..',
+     'patterns': [r".*: warning: \[AnnotationPosition\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Arguments are in the wrong order or could be commented for clarity.',
      'patterns': [r".*: warning: \[ArgumentSelectionDefectChecker\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Arrays do not override equals() or hashCode, so comparisons will be done on reference equality only. If neither deduplication nor lookup are needed, consider using a List instead. Otherwise, use IdentityHashMap/Set, a Map from a library that handles object arrays, or an Iterable/List of pairs.',
+     'patterns': [r".*: warning: \[ArrayAsKeyOfSetOrMap\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Arguments are swapped in assertEquals-like call',
      'patterns': [r".*: warning: \[AssertEqualsArgumentOrderChecker\] .+"]},
     {'category': 'java',
@@ -655,7 +695,7 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: The lambda passed to assertThows should contain exactly one statement',
+         'Java: The lambda passed to assertThrows should contain exactly one statement',
      'patterns': [r".*: warning: \[AssertThrowsMultipleStatements\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
@@ -670,6 +710,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Make toString(), hashCode() and equals() final in AutoValue classes, so it is clear to readers that AutoValue is not overriding them',
+     'patterns': [r".*: warning: \[AutoValueFinalMethods\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Classes that implement Annotation must override equals and hashCode. Consider using AutoAnnotation instead of implementing Annotation by hand.',
      'patterns': [r".*: warning: \[BadAnnotationImplementation\] .+"]},
     {'category': 'java',
@@ -680,7 +725,22 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: BigDecimal(double) and BigDecimal.valueOf(double) may lose precision, prefer BigDecimal(String) or BigDecimal(long)',
+         'Java: Importing nested classes/static methods/static fields with commonly-used names can make code harder to read, because it may not be clear from the context exactly which type is being referred to. Qualifying the name with that of the containing class can make the code clearer.',
+     'patterns': [r".*: warning: \[BadImport\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: instanceof used in a way that is equivalent to a null check.',
+     'patterns': [r".*: warning: \[BadInstanceof\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: BigDecimal#equals has surprising behavior: it also compares scale.',
+     'patterns': [r".*: warning: \[BigDecimalEquals\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: new BigDecimal(double) loses precision in this case.',
      'patterns': [r".*: warning: \[BigDecimalLiteralDouble\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
@@ -735,6 +795,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Providing Closeable resources makes their lifecycle unclear',
+     'patterns': [r".*: warning: \[CloseableProvides\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: The type of the array parameter of Collection.toArray needs to be compatible with the array type',
      'patterns': [r".*: warning: \[CollectionToArraySafeParameter\] .+"]},
     {'category': 'java',
@@ -770,6 +835,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Avoid deprecated Thread methods; read the method\'s javadoc for details.',
+     'patterns': [r".*: warning: \[DeprecatedThreadMethods\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Prefer collection factory methods or builders to the double-brace initialization pattern.',
      'patterns': [r".*: warning: \[DoubleBraceInitialization\] .+"]},
     {'category': 'java',
@@ -785,6 +855,16 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: equals() implementation may throw NullPointerException when given null',
+     'patterns': [r".*: warning: \[EqualsBrokenForNull\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Overriding Object#equals in a non-final class by using getClass rather than instanceof breaks substitutability of subclasses.',
+     'patterns': [r".*: warning: \[EqualsGetClass\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Classes that override equals should also override hashCode.',
      'patterns': [r".*: warning: \[EqualsHashCode\] .+"]},
     {'category': 'java',
@@ -795,11 +875,26 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: The contract of #equals states that it should return false for incompatible types, while this implementation may throw ClassCastException.',
+     'patterns': [r".*: warning: \[EqualsUnsafeCast\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Implementing #equals by just comparing hashCodes is fragile. Hashes collide frequently, and this will lead to false positives in #equals.',
+     'patterns': [r".*: warning: \[EqualsUsingHashCode\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Calls to ExpectedException#expect should always be followed by exactly one statement.',
      'patterns': [r".*: warning: \[ExpectedExceptionChecker\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: When only using JUnit Assert\'s static methods, you should import statically instead of extending.',
+     'patterns': [r".*: warning: \[ExtendingJUnitAssert\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Switch case may fall through',
      'patterns': [r".*: warning: \[FallThrough\] .+"]},
     {'category': 'java',
@@ -815,6 +910,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: This fuzzy equality check is using a tolerance less than the gap to the next number. You may want a less restrictive tolerance, or to assert equality.',
+     'patterns': [r".*: warning: \[FloatingPointAssertionWithinEpsilon\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Floating point literal loses precision',
      'patterns': [r".*: warning: \[FloatingPointLiteralPrecision\] .+"]},
     {'category': 'java',
@@ -875,6 +975,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Including fields in hashCode which are not compared in equals violates the contract of hashCode.',
+     'patterns': [r".*: warning: \[InconsistentHashCode\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: The ordering of parameters in overloaded methods should be as consistent as possible (when viewed from left to right)',
      'patterns': [r".*: warning: \[InconsistentOverloads\] .+"]},
     {'category': 'java',
@@ -905,6 +1010,21 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: This @param tag doesn\'t refer to a parameter of the method.',
+     'patterns': [r".*: warning: \[InvalidParam\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: This tag is invalid.',
+     'patterns': [r".*: warning: \[InvalidTag\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: The documented method doesn\'t actually throw this checked exception.',
+     'patterns': [r".*: warning: \[InvalidThrows\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Class should not implement both `Iterable` and `Iterator`',
      'patterns': [r".*: warning: \[IterableAndIterator\] .+"]},
     {'category': 'java',
@@ -935,11 +1055,21 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Calls to Lock#lock should be immediately followed by a try block which releases the lock.',
+     'patterns': [r".*: warning: \[LockNotBeforeTry\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Assignment where a boolean expression was expected; use == if this assignment wasn\'t expected or add parentheses for clarity.',
      'patterns': [r".*: warning: \[LogicalAssignment\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Math.abs does not always give a positive result. Please consider other methods for positive random numbers.',
+     'patterns': [r".*: warning: \[MathAbsoluteRandom\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Switches on enum types should either handle all values, or have a default case.',
      'patterns': [r".*: warning: \[MissingCasesInEnumSwitch\] .+"]},
     {'category': 'java',
@@ -960,6 +1090,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: A collection or proto builder was created, but its values were never accessed.',
+     'patterns': [r".*: warning: \[ModifiedButNotUsed\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Modifying a collection while iterating over it in a loop may cause a ConcurrentModificationException to be thrown.',
      'patterns': [r".*: warning: \[ModifyCollectionInEnhancedForLoop\] .+"]},
     {'category': 'java',
@@ -990,6 +1125,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Instead of returning a functional type, return the actual type that the returned function would return and use lambdas at use site.',
+     'patterns': [r".*: warning: \[NoFunctionalReturnType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: This update of a volatile variable is non-atomic',
      'patterns': [r".*: warning: \[NonAtomicVolatileUpdate\] .+"]},
     {'category': 'java',
@@ -1010,6 +1150,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Dereference of possibly-null value',
+     'patterns': [r".*: warning: \[NullableDereference\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: @Nullable should not be used for primitive types since they cannot be null',
      'patterns': [r".*: warning: \[NullablePrimitive\] .+"]},
     {'category': 'java',
@@ -1025,6 +1170,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Objects.hashCode(Object o) should not be passed a primitive value',
+     'patterns': [r".*: warning: \[ObjectsHashCodePrimitive\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Use grouping parenthesis to make the operator precedence explicit',
      'patterns': [r".*: warning: \[OperatorPrecedence\] .+"]},
     {'category': 'java',
@@ -1070,8 +1220,13 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Protobuf fields cannot be null, so this check is redundant',
-     'patterns': [r".*: warning: \[ProtoFieldPreconditionsCheckNotNull\] .+"]},
+         'Java: A field on a protocol buffer was set twice in the same chained expression.',
+     'patterns': [r".*: warning: \[ProtoRedundantSet\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Protos should not be used as a key to a map, in a set, or in a contains method on a descendant of a collection. Protos have non deterministic ordering and proto equality is deep, which is a performance issue.',
+     'patterns': [r".*: warning: \[ProtosAsKeyOfSetOrMap\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -1110,7 +1265,12 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         r'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
+         'Java: Void methods should not have a @return tag.',
+     'patterns': [r".*: warning: \[ReturnFromVoid\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
      'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
@@ -1140,11 +1300,21 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: SWIG generated code that can\'t call a C++ destructor will leak memory',
+     'patterns': [r".*: warning: \[SwigMemoryLeak\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Synchronizing on non-final fields is not safe: if the field is ever updated, different threads may end up locking on different objects.',
      'patterns': [r".*: warning: \[SynchronizeOnNonFinalField\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Code that contains System.exit() is untestable.',
+     'patterns': [r".*: warning: \[SystemExitOutsideMain\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Using @Test(expected=...) is discouraged, since the test will pass if *any* statement in the test method throws the expected exception',
      'patterns': [r".*: warning: \[TestExceptionChecker\] .+"]},
     {'category': 'java',
@@ -1160,11 +1330,26 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Relying on the thread scheduler is discouraged; see Effective Java Item 72 (2nd edition) / 84 (3rd edition).',
+     'patterns': [r".*: warning: \[ThreadPriorityCheck\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Three-letter time zone identifiers are deprecated, may be ambiguous, and might not do what you intend; the full IANA time zone ID should be used instead.',
      'patterns': [r".*: warning: \[ThreeLetterTimeZoneID\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: An implementation of Object.toString() should never return null.',
+     'patterns': [r".*: warning: \[ToStringReturnsNull\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: The actual and expected values appear to be swapped, which results in poor assertion failure messages. The actual value should come first.',
+     'patterns': [r".*: warning: \[TruthAssertExpected\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Truth Library assert is called on a constant.',
      'patterns': [r".*: warning: \[TruthConstantAsserts\] .+"]},
     {'category': 'java',
@@ -1175,6 +1360,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Type parameter declaration shadows another named type',
+     'patterns': [r".*: warning: \[TypeNameShadowing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Type parameter declaration overrides another type parameter already declared',
      'patterns': [r".*: warning: \[TypeParameterShadowing\] .+"]},
     {'category': 'java',
@@ -1190,21 +1380,46 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Switch handles all enum values; an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
+         'Java: Collection, Iterable, Multimap, and Queue do not have well-defined equals behavior',
+     'patterns': [r".*: warning: \[UndefinedEquals\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switch handles all enum values: an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
      'patterns': [r".*: warning: \[UnnecessaryDefaultInEnumSwitch\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Unnecessary use of grouping parentheses',
+     'patterns': [r".*: warning: \[UnnecessaryParentheses\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Finalizer may run before native code finishes execution',
      'patterns': [r".*: warning: \[UnsafeFinalization\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Prefer `asSubclass` instead of casting the result of `newInstance`, to detect classes of incorrect type before invoking their constructors.This way, if the class is of the incorrect type,it will throw an exception before invoking its constructor.',
+     'patterns': [r".*: warning: \[UnsafeReflectiveConstructionCast\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Unsynchronized method overrides a synchronized method.',
      'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Unused.',
+     'patterns': [r".*: warning: \[Unused\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: This catch block catches an exception and re-throws another, but swallows the caught exception rather than setting it as a cause. This can make debugging harder.',
+     'patterns': [r".*: warning: \[UnusedException\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Java assert is used in test. For testing purposes Assert.* matchers should be used.',
      'patterns': [r".*: warning: \[UseCorrectAssertInTests\] .+"]},
     {'category': 'java',
@@ -1215,6 +1430,11 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: variableName and type with the same name would refer to the static field instead of the class',
+     'patterns': [r".*: warning: \[VariableNameSameAsType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
      'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
     {'category': 'java',
@@ -1230,6 +1450,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Use of class, field, or method that is not compatible with legacy Android devices',
+     'patterns': [r".*: warning: \[AndroidJdkLibsChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Reference equality used to compare arrays',
      'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
     {'category': 'java',
@@ -1310,11 +1535,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         r'Java:  Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+         'Java:  Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
      'patterns': [r".*: warning: \[ComparableType\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: this == null is always false, this != null is always true',
+     'patterns': [r".*: warning: \[ComparingThisWithNull\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: This comparison method violates the contract',
      'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
     {'category': 'java',
@@ -1395,6 +1625,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Comparing different pairs of fields/getters in an equals implementation is probably a mistake.',
+     'patterns': [r".*: warning: \[EqualsWrongThing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class, or from an override of the method',
      'patterns': [r".*: warning: \[ForOverride\] .+"]},
     {'category': 'java',
@@ -1510,6 +1745,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Members shouldn\'t be annotated with @Inject if constructor is already annotated @Inject',
+     'patterns': [r".*: warning: \[InjectOnMemberAndConstructor\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Scope annotation on an interface or abstact class is not allowed',
      'patterns': [r".*: warning: \[InjectScopeAnnotationOnInterfaceOrAbstractClass\] .+"]},
     {'category': 'java',
@@ -1550,7 +1790,7 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         r'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
+         'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
      'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1590,6 +1830,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Use of class, field, or method that is not compatible with JDK 7',
+     'patterns': [r".*: warning: \[Java7ApiChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Abstract and default methods are not injectable with javax.inject.Inject',
      'patterns': [r".*: warning: \[JavaxInjectOnAbstractMethod\] .+"]},
     {'category': 'java',
@@ -1620,6 +1865,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Math.round(Integer) results in truncation',
+     'patterns': [r".*: warning: \[MathRoundIntLong\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Certain resources in `android.R.string` have names that do not match their content',
      'patterns': [r".*: warning: \[MislabeledAndroidString\] .+"]},
     {'category': 'java',
@@ -1630,6 +1880,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: A terminating method call is required for a test helper to have any effect.',
+     'patterns': [r".*: warning: \[MissingTestCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Use of "YYYY" (week year) in a date pattern without "ww" (week in year). You probably meant to use "yyyy" (year) instead.',
      'patterns': [r".*: warning: \[MisusedWeekYear\] .+"]},
     {'category': 'java',
@@ -1735,7 +1990,7 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Using ::equals as an incompatible Predicate; the predicate will always return false',
+         'Java: Using ::equals or ::isInstance as an incompatible Predicate; the predicate will always return false',
      'patterns': [r".*: warning: \[PredicateIncompatibleType\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1745,7 +2000,7 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Protobuf fields cannot be null',
+         'Java: Protobuf fields cannot be null.',
      'patterns': [r".*: warning: \[ProtoFieldNullComparison\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1835,6 +2090,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: String.substring(0) returns the original String',
+     'patterns': [r".*: warning: \[SubstringOfZero\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Suppressing "deprecated" is probably a typo for "deprecation"',
      'patterns': [r".*: warning: \[SuppressWarningsDeprecated\] .+"]},
     {'category': 'java',