[automerger skipped] Merge "Make init.userspace_reboot.is_supported a rw property" into rvc-dev am: 1524571ff3 am: 5e49215410 am: f97e1791e6 am: 32a816322c -s ours

am skip reason: Change-Id Ie3604b315a5d2a846037064ad9a29258f2cad9d0 with SHA-1 8e12e1be21 is in history

Change-Id: I08eff9d303e01a6f388636d80ba7fde80fa8d489
diff --git a/core/Makefile b/core/Makefile
index c93bb09..db4114d 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -863,10 +863,17 @@
 # -----------------------------------------------------------------
 # Cert-to-package mapping.  Used by the post-build signing tools.
 # Use a macro to add newline to each echo command
+# $1 package name
+# $2 certificate
+# $3 private key
+# $4 compressed
+# $5 partition tag
+# $6 output file
 define _apkcerts_write_line
-$(hide) echo -n 'name="$(1).apk" certificate="$2" private_key="$3"' >> $5
-$(if $(4), $(hide) echo -n ' compressed="$4"' >> $5)
-$(hide) echo '' >> $5
+$(hide) echo -n 'name="$(1).apk" certificate="$2" private_key="$3"' >> $6
+$(if $(4), $(hide) echo -n ' compressed="$4"' >> $6)
+$(if $(5), $(hide) echo -n ' partition="$5"' >> $6)
+$(hide) echo '' >> $6
 
 endef
 
@@ -886,8 +893,8 @@
 	@rm -f $@
 	$(foreach p,$(sort $(PACKAGES)),\
 	  $(if $(PACKAGES.$(p).EXTERNAL_KEY),\
-	    $(call _apkcerts_write_line,$(p),"EXTERNAL","",$(PACKAGES.$(p).COMPRESSED),$@),\
-	    $(call _apkcerts_write_line,$(p),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$@)))
+	    $(call _apkcerts_write_line,$(p),"EXTERNAL","",$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@),\
+	    $(call _apkcerts_write_line,$(p),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@)))
 	# In case value of PACKAGES is empty.
 	$(hide) touch $@
 
@@ -1034,7 +1041,12 @@
   INSTALLED_2NDBOOTLOADER_TARGET :=
 endif # TARGET_NO_BOOTLOADER
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
-  INSTALLED_KERNEL_TARGET := $(PRODUCT_OUT)/kernel
+  ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
+    INSTALLED_KERNEL_TARGET := $(foreach k,$(BOARD_KERNEL_BINARIES), \
+      $(PRODUCT_OUT)/$(k))
+  else
+    INSTALLED_KERNEL_TARGET := $(PRODUCT_OUT)/kernel
+  endif
 else
   INSTALLED_KERNEL_TARGET :=
 endif
@@ -1106,7 +1118,12 @@
 
 # This is defined here since we may be building recovery as boot
 # below and only want to define this once
-BUILT_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
+ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
+  BUILT_BOOTIMAGE_TARGET := $(foreach k,$(subst kernel,boot,$(BOARD_KERNEL_BINARIES)), $(PRODUCT_OUT)/$(k).img)
+else
+  BUILT_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
+endif
+
 
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 INTERNAL_BOOTIMAGE_ARGS := \
@@ -1316,6 +1333,7 @@
 # $(5) - Directory to use.  Notice files are all $(5)/src.  Other
 #		 directories in there will be used for scratch
 # $(6) - Dependencies for the output files
+# $(7) - Directories to exclude
 #
 # The algorithm here is that we go collect a hash for each of the notice
 # files and write the names of the files that match that hash.  Then
@@ -1333,7 +1351,7 @@
 $(2) $(3): PRIVATE_DIR := $(5)
 $(2) : $(3)
 $(3) : $(6) $(BUILD_SYSTEM)/Makefile build/make/tools/generate-notice-files.py
-	build/make/tools/generate-notice-files.py --text-output $(2) \
+	build/make/tools/generate-notice-files.py --text-output $(2) $(foreach xdir, $(7), -e $(xdir) )\
 	    $(if $(filter $(1),xml_excluded_vendor_product_odm),-e vendor -e product -e system_ext -e odm --xml-output, \
 	      $(if $(filter $(1),xml_excluded_system_product_odm),-e system -e product -e system_ext -e odm --xml-output, \
 	        $(if $(filter $(1),xml_product),-i product --xml-output, \
@@ -1358,6 +1376,11 @@
 winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
 pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES))
 
+# Some targets get included under $(PRODUCT_OUT) for debug symbols or other
+# reasons--not to be flashed onto any device. Targets under these directories
+# need no associated notice file on the device UI.
+exclude_target_dirs := apex
+
 # TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior.
 ifneq ($(PRODUCT_NOTICE_SPLIT),true)
 target_notice_file_html := $(TARGET_OUT_INTERMEDIATES)/NOTICE.html
@@ -1368,7 +1391,8 @@
 	        $(target_notice_file_html), \
 	        "Notices for files contained in the filesystem images in this directory:", \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)))
+	        $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files), \
+	        $(exclude_target_dirs)))
 $(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
 $(installed_notice_html_or_xml_gz): $(target_notice_file_html_gz)
@@ -1402,10 +1426,13 @@
 # being built. A notice xml file must depend on all modules that could potentially
 # install a license file relevant to it.
 license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)
+# Only files copied to a system image need system image notices.
+license_modules := $(filter $(PRODUCT_OUT)/%,$(license_modules))
 # Phonys/fakes don't have notice files (though their deps might)
 license_modules := $(filter-out $(TARGET_OUT_FAKE)/%,$(license_modules))
 # testcases are not relevant to the system image.
 license_modules := $(filter-out $(TARGET_OUT_TESTCASES)/%,$(license_modules))
+# filesystem images: system, vendor, product, system_ext, and odm
 license_modules_system := $(filter $(TARGET_OUT)/%,$(license_modules))
 license_modules_vendor := $(filter $(TARGET_OUT_VENDOR)/%,$(license_modules))
 license_modules_product := $(filter $(TARGET_OUT_PRODUCT)/%,$(license_modules))
@@ -1416,16 +1443,44 @@
                        $(license_modules_product) \
                        $(license_modules_system_ext) \
                        $(license_modules_odm)
+# targets used for debug symbols only and do not get copied to the device
+license_modules_symbols_only := $(filter $(PRODUCT_OUT)/apex/%,$(license_modules))
+
 license_modules_rest := $(filter-out $(license_modules_agg),$(license_modules))
+license_modules_rest := $(filter-out $(license_modules_symbols_only),$(license_modules_rest))
+
+# Identify the other targets we expect to have notices for:
+# targets copied to the device but are not readable by the UI (e.g. must boot
+# into a different partition to read or don't have an associated /etc
+# directory) must have their notices built somewhere readable.
+license_modules_rehomed := $(filter-out $(PRODUCT_OUT)/%/%,$(license_modules_rest))  # files in root have no /etc
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/recovery/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/root/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/data/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/debug_ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/vendor-ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist.img,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/system_other/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/kernel%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/%.img,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/%.bin,$(license_modules_rest))
+
+# after removing targets in system images, targets reported in system images, and
+# targets used for debug symbols that do not need notices, nothing must remain.
+license_modules_rest := $(filter-out $(license_modules_rehomed),$(license_modules_rest))
+$(call maybe-print-list-and-error, $(license_modules_rest), \
+        "Targets added under $(PRODUCT_OUT)/ unaccounted for notice handling.")
 
 # If we are building in a configuration that includes a prebuilt vendor.img, we can't
 # update its notice file, so include those notices in the system partition instead
 ifdef BOARD_PREBUILT_VENDORIMAGE
-license_modules_system += $(license_modules_rest)
+license_modules_system += $(license_modules_rehomed)
 system_xml_directories := xml_excluded_vendor_product_odm
 system_notice_file_message := "Notices for files contained in all filesystem images except vendor/system_ext/product/odm in this directory:"
 else
-license_modules_vendor += $(license_modules_rest)
+license_modules_vendor += $(license_modules_rehomed)
 system_xml_directories := xml_system
 system_notice_file_message := "Notices for files contained in the system filesystem image in this directory:"
 endif
@@ -1435,31 +1490,36 @@
 	        $(target_notice_file_xml), \
 	        $(system_notice_file_message), \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(license_modules_system)))
+	        $(license_modules_system), \
+	        $(exclude_target_dirs)))
 $(eval $(call combine-notice-files, xml_excluded_system_product_odm, \
 	        $(target_vendor_notice_file_txt), \
 	        $(target_vendor_notice_file_xml), \
 	        "Notices for files contained in all filesystem images except system/system_ext/product/odm in this directory:", \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(license_modules_vendor)))
+	        $(license_modules_vendor), \
+	        $(exclude_target_dirs)))
 $(eval $(call combine-notice-files, xml_product, \
 	        $(target_product_notice_file_txt), \
 	        $(target_product_notice_file_xml), \
 	        "Notices for files contained in the product filesystem image in this directory:", \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(license_modules_product)))
+	        $(license_modules_product), \
+	        $(exclude_target_dirs)))
 $(eval $(call combine-notice-files, xml_system_ext, \
 	        $(target_system_ext_notice_file_txt), \
 	        $(target_system_ext_notice_file_xml), \
 	        "Notices for files contained in the system_ext filesystem image in this directory:", \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(license_modules_system_ext)))
+	        $(license_modules_system_ext), \
+	        $(exclude_target_dirs)))
 $(eval $(call combine-notice-files, xml_odm, \
 	        $(target_odm_notice_file_txt), \
 	        $(target_odm_notice_file_xml), \
 	        "Notices for files contained in the odm filesystem image in this directory:", \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(license_modules_odm)))
+	        $(license_modules_odm), \
+	        $(exclude_target_dirs)))
 
 $(target_notice_file_xml_gz): $(target_notice_file_xml) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
@@ -1497,7 +1557,8 @@
 	        "Notices for files contained in the tools directory:", \
 	        $(HOST_OUT_NOTICE_FILES), \
 	        $(ALL_DEFAULT_INSTALLED_MODULES) \
-	        $(winpthreads_notice_file)))
+	        $(winpthreads_notice_file), \
+	        $(exclude_target_dirs)))
 
 endif  # TARGET_BUILD_APPS
 
@@ -2071,7 +2132,7 @@
 else # INSTALLED_VENDOR_BOOTIMAGE_TARGET not defined
   INTERNAL_RECOVERYIMAGE_ARGS := \
       $(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
-      --kernel $(recovery_kernel) --ramdisk $(recovery_ramdisk)
+      --ramdisk $(recovery_ramdisk)
 # Assumes this has already been stripped
 ifdef INTERNAL_KERNEL_CMDLINE
   INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)"
@@ -2097,38 +2158,53 @@
 endif
 endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET not defined
 
+$(recovery_ramdisk): $(MKBOOTFS) $(MINIGZIP) \
+	    $(INTERNAL_ROOT_FILES) \
+	    $(INSTALLED_RAMDISK_TARGET) \
+	    $(INTERNAL_RECOVERYIMAGE_FILES) \
+	    $(recovery_sepolicy) \
+	    $(INSTALLED_2NDBOOTLOADER_TARGET) \
+	    $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
+	    $(recovery_resource_deps) \
+	    $(recovery_fstab)
+	# Making recovery image
+	mkdir -p $(TARGET_RECOVERY_OUT)
+	mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp
+	# Copying baseline ramdisk...
+	# Use rsync because "cp -Rf" fails to overwrite broken symlinks on Mac.
+	rsync -a --exclude=sdcard $(IGNORE_RECOVERY_SEPOLICY) $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
+	# Modifying ramdisk contents...
+	$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),, \
+	  ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init)
+	# Removes $(TARGET_RECOVERY_ROOT_OUT)/init*.rc EXCEPT init.recovery*.rc.
+	find $(TARGET_RECOVERY_ROOT_OUT) -maxdepth 1 -name 'init*.rc' -type f -not -name "init.recovery.*.rc" | xargs rm -f
+	cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/ 2> /dev/null || true # Ignore error when the src file doesn't exist.
+	mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/res
+	rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/*
+	cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res
+	$(foreach recovery_text_file,$(generated_recovery_text_files), \
+	  cp -rf $(recovery_text_file) $(TARGET_RECOVERY_ROOT_OUT)/res/images/ &&) true
+	cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png
+	$(foreach item,$(TARGET_PRIVATE_RES_DIRS), \
+	  cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/$(newline))
+	$(foreach item,$(recovery_fstab), \
+	  cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.fstab)
+	$(if $(strip $(recovery_wipe)), \
+	  cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe)
+	ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop
+	$(BOARD_RECOVERY_IMAGE_PREPARE)
+	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
+
 # $(1): output file
+# $(2): kernel file
 define build-recoveryimage-target
-  # Making recovery image
-  $(hide) mkdir -p $(TARGET_RECOVERY_OUT)
-  $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp
-  # Copying baseline ramdisk...
-  # Use rsync because "cp -Rf" fails to overwrite broken symlinks on Mac.
-  $(hide) rsync -a --exclude=sdcard $(IGNORE_RECOVERY_SEPOLICY) $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
-  # Modifying ramdisk contents...
-  $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),, \
-    $(hide) ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init)
-  # Removes $(TARGET_RECOVERY_ROOT_OUT)/init*.rc EXCEPT init.recovery*.rc.
-  $(hide) find $(TARGET_RECOVERY_ROOT_OUT) -maxdepth 1 -name 'init*.rc' -type f -not -name "init.recovery.*.rc" | xargs rm -f
-  $(hide) cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/ 2> /dev/null || true # Ignore error when the src file doesn't exist.
-  $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/res
-  $(hide) rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/*
-  $(hide) cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res
-  $(hide) $(foreach recovery_text_file,$(generated_recovery_text_files), \
-    cp -rf $(recovery_text_file) $(TARGET_RECOVERY_ROOT_OUT)/res/images/ &&) true
-  $(hide) cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png
-  $(hide) $(foreach item,$(TARGET_PRIVATE_RES_DIRS), \
-    cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/$(newline))
-  $(hide) $(foreach item,$(recovery_fstab), \
-    cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.fstab)
-  $(if $(strip $(recovery_wipe)), \
-    $(hide) cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe)
-  $(hide) ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop
-  $(BOARD_RECOVERY_IMAGE_PREPARE)
-  $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
   $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
-    $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned, \
-    $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1))
+    $(MKBOOTIMG) --kernel $(2) $(MKBOOTIMG_KERNEL_ARG) $(INTERNAL_RECOVERYIMAGE_ARGS) \
+                 $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) \
+                 --output $(1).unsigned, \
+    $(MKBOOTIMG) --kernel $(2) $(MKBOOTIMG_KERNEL_ARG) $(INTERNAL_RECOVERYIMAGE_ARGS) \
+                 $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) \
+                 --output $(1))
   $(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
     $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
       $(BOOT_SIGNER) /boot $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
@@ -2138,12 +2214,12 @@
   $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
     $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
-    $(hide) $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))), \
-    $(hide) $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
+    $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))), \
+    $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
   $(if $(filter true,$(BOARD_AVB_ENABLE)), \
     $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
-      $(hide) $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS),\
-      $(hide) $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_RECOVERYIMAGE_PARTITION_SIZE) --partition_name recovery $(INTERNAL_AVB_RECOVERY_SIGNING_ARGS) $(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)))
+      $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS),\
+      $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_RECOVERYIMAGE_PARTITION_SIZE) --partition_name recovery $(INTERNAL_AVB_RECOVERY_SIGNING_ARGS) $(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)))
 endef
 
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -2170,18 +2246,10 @@
 $(INSTALLED_BOOTIMAGE_TARGET): $(INSTALLED_DTBIMAGE_TARGET)
 endif
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
-	    $(INTERNAL_ROOT_FILES) \
-	    $(INSTALLED_RAMDISK_TARGET) \
-	    $(INTERNAL_RECOVERYIMAGE_FILES) \
-	    $(recovery_sepolicy) $(recovery_kernel) \
-	    $(INSTALLED_2NDBOOTLOADER_TARGET) \
-	    $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
-	    $(recovery_resource_deps) \
-	    $(recovery_fstab)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(recovery_ramdisk) \
+	    $(recovery_kernel)
 	$(call pretty,"Target boot image from recovery: $@")
-	$(call build-recoveryimage-target, $@)
-$(INSTALLED_BOOTIMAGE_TARGET): .KATI_IMPLICIT_OUTPUTS += $(recovery_ramdisk)
+	$(call build-recoveryimage-target, $@, $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $@))))
 endif # BOARD_USES_RECOVERY_AS_BOOT
 
 ifdef BOARD_INCLUDE_RECOVERY_DTBO
@@ -2198,17 +2266,9 @@
 $(INSTALLED_RECOVERYIMAGE_TARGET): $(INSTALLED_DTBIMAGE_TARGET)
 endif
 
-$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
-	    $(INTERNAL_ROOT_FILES) \
-	    $(INSTALLED_RAMDISK_TARGET) \
-	    $(INSTALLED_BOOTIMAGE_TARGET) \
-	    $(INTERNAL_RECOVERYIMAGE_FILES) \
-	    $(recovery_sepolicy) $(recovery_kernel) \
-	    $(INSTALLED_2NDBOOTLOADER_TARGET) \
-	    $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
-	    $(recovery_resource_deps) \
-	    $(recovery_fstab)
-	$(call build-recoveryimage-target, $@)
+$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTIMG) $(recovery_ramdisk) \
+	    $(recovery_kernel)
+	$(call build-recoveryimage-target, $@, $(recovery_kernel))
 
 ifdef RECOVERY_RESOURCE_ZIP
 $(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME)
@@ -2319,8 +2379,12 @@
 # Note: it's intentional to skip signing for boot-debug.img, because it
 # can only be used if the device is unlocked with verification error.
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
-
-INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot-debug.img
+ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
+  INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(foreach k,$(subst kernel,boot-debug,$(BOARD_KERNEL_BINARIES)), \
+         $(PRODUCT_OUT)/$(k).img)
+else
+  INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot-debug.img
+endif
 
 # Replace ramdisk.img in $(MKBOOTIMG) ARGS with ramdisk-debug.img to build boot-debug.img
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -2350,17 +2414,22 @@
 $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 endef
 
+# $(1): output file
+define build-debug-bootimage-target
+  $(MKBOOTIMG) --kernel $(PRODUCT_OUT)/$(subst .img,,$(subst boot-debug,kernel,$(notdir $(1)))) \
+    $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $1
+  $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$1))
+endef
+
 # Depends on original boot.img and ramdisk-debug.img, to build the new boot-debug.img
 $(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
 	$(call pretty,"Target boot debug image: $@")
-	$(MKBOOTIMG) $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-	$(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@))
+	$(call build-debug-bootimage-target, $@)
 
 .PHONY: bootimage_debug-nodeps
 bootimage_debug-nodeps: $(MKBOOTIMG)
 	echo "make $@: ignoring dependencies"
-	$(MKBOOTIMG) $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
-	$(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET)))
+	$(foreach b,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call build-debug-bootimage-target,$b))
 
 endif # TARGET_NO_KERNEL
 
@@ -2688,11 +2757,8 @@
 	@echo "make $@: ignoring dependencies"
 	$(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE_TARGET))
 	$(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
-
-ifneq (,$(filter systemimage-nodeps snod, $(MAKECMDGOALS)))
 ifeq (true,$(WITH_DEXPREOPT))
-$(warning Warning: with dexpreopt enabled, you may need a full rebuild.)
-endif
+	$(warning Warning: with dexpreopt enabled, you may need a full rebuild.)
 endif
 
 endif # BUILDING_SYSTEM_IMAGE
@@ -3934,6 +4000,7 @@
 INTERNAL_OTATOOLS_MODULES := \
   aapt2 \
   add_img_to_target_files \
+  aftltool \
   append2simg \
   avbtool \
   blk_alloc_to_base_fs \
@@ -4427,7 +4494,7 @@
 	$(hide) $(call package_files-copy-root, \
 	    $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
 ifdef INSTALLED_KERNEL_TARGET
-	cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
+	cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/
 endif
 ifdef INSTALLED_VENDOR_BOOTIMAGE_TARGET
 	echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
@@ -4848,6 +4915,19 @@
 	$(hide) find $(TARGET_OUT_COVERAGE) | sort >$(PRIVATE_LIST_FILE)
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(TARGET_OUT_COVERAGE) -l $(PRIVATE_LIST_FILE)
 
+#------------------------------------------------------------------
+# Export the LLVM profile data tool and dependencies for Clang coverage processing
+#
+ifeq (true,$(CLANG_COVERAGE))
+  LLVM_PROFDATA := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-profdata
+  LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/libc++.so.1
+  PROFDATA_ZIP := $(PRODUCT_OUT)/llvm-profdata.zip
+  $(PROFDATA_ZIP): $(SOONG_ZIP)
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION) -f $(LLVM_PROFDATA) -f $(LIBCXX)
+
+  $(call dist-for-goals,droidcore,$(PROFDATA_ZIP))
+endif
+
 # -----------------------------------------------------------------
 # A zip of the Android Apps. Not keeping full path so that we don't
 # include product names when distributing
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index ce554c9..05d9001 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -167,6 +167,9 @@
 
 include $(BUILD_SYSTEM)/app_certificate_validate.mk
 
+# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
+PACKAGES.$(LOCAL_MODULE).PARTITION := $(actual_partition_tag)
+
 # Disable dex-preopt of prebuilts to save space, if requested.
 ifndef LOCAL_DEX_PREOPT
 ifeq ($(DONT_DEXPREOPT_PREBUILTS),true)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index cce6ec1..f78e509 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -208,23 +208,39 @@
 my_module_relative_path := $(strip $(LOCAL_MODULE_RELATIVE_PATH))
 ifdef LOCAL_IS_HOST_MODULE
   partition_tag :=
+  actual_partition_tag :=
 else
 ifeq (true,$(strip $(LOCAL_VENDOR_MODULE)))
   partition_tag := _VENDOR
+  # A vendor module could be on the vendor partition at "vendor" or the system
+  # partition at "system/vendor".
+  actual_partition_tag := $(if $(filter true,$(BOARD_USES_VENDORIMAGE)),vendor,system)
 else ifeq (true,$(strip $(LOCAL_OEM_MODULE)))
   partition_tag := _OEM
+  actual_partition_tag := oem
 else ifeq (true,$(strip $(LOCAL_ODM_MODULE)))
   partition_tag := _ODM
+  # An ODM module could be on the odm partition at "odm", the vendor partition
+  # at "vendor/odm", or the system partition at "system/vendor/odm".
+  actual_partition_tag := $(if $(filter true,$(BOARD_USES_ODMIMAGE)),odm,$(if $(filter true,$(BOARD_USES_VENDORIMAGE)),vendor,system))
 else ifeq (true,$(strip $(LOCAL_PRODUCT_MODULE)))
   partition_tag := _PRODUCT
+  # A product module could be on the product partition at "product" or the
+  # system partition at "system/product".
+  actual_partition_tag := $(if $(filter true,$(BOARD_USES_PRODUCTIMAGE)),product,system)
 else ifeq (true,$(strip $(LOCAL_SYSTEM_EXT_MODULE)))
   partition_tag := _SYSTEM_EXT
+  # A system_ext-specific module could be on the system_ext partition at
+  # "system_ext" or the system partition at "system/system_ext".
+  actual_partition_tag := $(if $(filter true,$(BOARD_USES_SYSTEM_EXTIMAGE)),system_ext,system)
 else ifeq (NATIVE_TESTS,$(LOCAL_MODULE_CLASS))
   partition_tag := _DATA
+  actual_partition_tag := data
 else
   # The definition of should-install-to-system will be different depending
   # on which goal (e.g., sdk or just droid) is being built.
   partition_tag := $(if $(call should-install-to-system,$(my_module_tags)),,_DATA)
+  actual_partition_tag := $(if $(partition_tag),data,system)
 endif
 endif
 # For test modules that lack a suite tag, set null-suite as the default.
@@ -705,13 +721,19 @@
 
 ifeq ($(use_testcase_folder),true)
 ifneq ($(my_test_data_file_pairs),)
+# Filter out existng installed test data paths when collecting test data files to be installed and
+# indexed as they cause build rule conflicts. Instead put them in a separate list which is only
+# used for indexing.
 $(foreach pair, $(my_test_data_file_pairs), \
   $(eval parts := $(subst :,$(space),$(pair))) \
   $(eval src_path := $(word 1,$(parts))) \
   $(eval file := $(word 2,$(parts))) \
   $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
     $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
-      $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data))))))
+      $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data)))) \
+    $(eval my_compat_dist_test_data_$(suite) += \
+      $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
+        $(filter $(my_installed_test_data),$(call append-path,$(dir),$(file)))))))
 endif
 else
 ifneq ($(my_test_data_file_pairs),)
@@ -732,7 +754,8 @@
 
 $(call create-suite-dependencies)
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
-  $(eval my_compat_dist_config_$(suite) := ))
+  $(eval my_compat_dist_config_$(suite) := ) \
+  $(eval my_compat_dist_test_data_$(suite) := ))
 
 endif  # LOCAL_COMPATIBILITY_SUITE
 
diff --git a/core/binary.mk b/core/binary.mk
index 38ff9d6..e021b7d 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -110,19 +110,8 @@
   # Make sure we've built the NDK.
   my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
 
-  # mips32r6 is not supported by the NDK. No released NDK contains these
-  # libraries, but the r10 in prebuilts/ndk had a local hack to add them :(
-  #
-  # We need to find a real solution to this problem, but until we do just drop
-  # mips32r6 things back to r10 to get the tree building again.
-  ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
-    ifeq ($(LOCAL_NDK_VERSION), current)
-      LOCAL_NDK_VERSION := r10
-    endif
-  endif
-
   my_arch := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
-  ifneq (,$(filter arm64 mips64 x86_64,$(my_arch)))
+  ifneq (,$(filter arm64 x86_64,$(my_arch)))
     my_min_sdk_version := 21
   else
     my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
@@ -156,17 +145,11 @@
       $(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \
       $(my_ndk_sysroot)/usr/include \
 
-  # x86_64 and and mips64 are both multilib toolchains, so their libraries are
+  # x86_64 is a multilib toolchain, so their libraries are
   # installed in /usr/lib64. Aarch64, on the other hand, is not a multilib
   # compiler, so its libraries are in /usr/lib.
-  #
-  # Mips32r6 is yet another variation, with libraries installed in libr6.
-  #
-  # For the rest, the libraries are installed simply to /usr/lib.
-  ifneq (,$(filter x86_64 mips64,$(my_arch)))
+  ifneq (,$(filter x86_64,$(my_arch)))
     my_ndk_libdir_name := lib64
-  else ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
-    my_ndk_libdir_name := libr6
   else
     my_ndk_libdir_name := lib
   endif
@@ -180,11 +163,7 @@
   # hashes (which are much faster!), but shipping to older devices requires
   # the old style hash. Fortunately, we can build with both and it'll work
   # anywhere.
-  #
-  # This is not currently supported on MIPS architectures.
-  ifeq (,$(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
-    my_ldflags += -Wl,--hash-style=both
-  endif
+  my_ldflags += -Wl,--hash-style=both
 
   # We don't want to expose the relocation packer to the NDK just yet.
   LOCAL_PACK_MODULE_RELOCATIONS := false
@@ -195,9 +174,6 @@
   my_ndk_stl_shared_lib_fullpath :=
   my_ndk_stl_static_lib :=
   my_cpu_variant := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)CPU_ABI)
-  ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
-    my_cpu_variant := mips32r6
-  endif
   LOCAL_NDK_STL_VARIANT := $(strip $(LOCAL_NDK_STL_VARIANT))
   ifeq (,$(LOCAL_NDK_STL_VARIANT))
     LOCAL_NDK_STL_VARIANT := system
diff --git a/core/build-system.html b/core/build-system.html
index cc242d9..9cd7b0b 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -467,8 +467,6 @@
         <b>TARGET_ARCH</b><br/>
         arm<br/>
         arm64<br/>
-        mips<br/>
-        mips64<br/>
         x86<br/>
         x86_64
     </td>
diff --git a/core/clang/TARGET_mips.mk b/core/clang/TARGET_mips.mk
deleted file mode 100644
index 3e54a66..0000000
--- a/core/clang/TARGET_mips.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-$(clang_2nd_arch_prefix)RS_TRIPLE := renderscript32-linux-androideabi
-$(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
-RS_COMPAT_TRIPLE := mipsel-linux-android
-
-$(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mipsel-android.a
-
-# Address sanitizer clang config
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
diff --git a/core/clang/TARGET_mips64.mk b/core/clang/TARGET_mips64.mk
deleted file mode 100644
index cb6a3cd..0000000
--- a/core/clang/TARGET_mips64.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-RS_TRIPLE := renderscript64-linux-android
-RS_TRIPLE_CFLAGS :=
-RS_COMPAT_TRIPLE := mips64el-linux-android
-
-TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mips64el-android.a
-
-# Address sanitizer clang config
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/combo/TARGET_linux-mips.mk b/core/combo/TARGET_linux-mips.mk
deleted file mode 100644
index 9f14aa2..0000000
--- a/core/combo/TARGET_linux-mips.mk
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Copyright (C) 2010 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Linux on MIPS.
-# Included by combo/select.mk
-
-# You can set TARGET_ARCH_VARIANT to use an arch version other
-# than mips32r2-fp. Each value should correspond to a file named
-# $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions. Their
-# purpose is to allow module Android.mk files to selectively compile
-# different versions of code based upon the funtionality and
-# instructions available in a given architecture version.
-#
-# The blocks also define specific arch_variant_cflags, which
-# include defines, and compiler settings for the given architecture
-# version.
-#
-ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := mips32r2-fp
-endif
-
-include $(BUILD_SYSTEM)/combo/fdo.mk
-
-define $(combo_var_prefix)transform-shared-lib-to-toc
-$(call _gen_toc_command_for_elf,$(1),$(2))
-endef
-
-$(combo_2nd_arch_prefix)TARGET_PACK_MODULE_RELOCATIONS := true
-
-$(combo_2nd_arch_prefix)TARGET_LINKER := /system/bin/linker
diff --git a/core/combo/TARGET_linux-mips64.mk b/core/combo/TARGET_linux-mips64.mk
deleted file mode 100644
index ae17e46..0000000
--- a/core/combo/TARGET_linux-mips64.mk
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Linux on MIPS64.
-# Included by combo/select.mk
-
-# You can set TARGET_ARCH_VARIANT to use an arch version other
-# than mips64r6. Each value should correspond to a file named
-# $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions. Their
-# purpose is to allow module Android.mk files to selectively compile
-# different versions of code based upon the funtionality and
-# instructions available in a given architecture version.
-#
-# The blocks also define specific arch_variant_cflags, which
-# include defines, and compiler settings for the given architecture
-# version.
-#
-ifeq ($(strip $(TARGET_ARCH_VARIANT)),)
-TARGET_ARCH_VARIANT := mips64r6
-endif
-
-include $(BUILD_SYSTEM)/combo/fdo.mk
-
-define $(combo_var_prefix)transform-shared-lib-to-toc
-$(call _gen_toc_command_for_elf,$(1),$(2))
-endef
-
-TARGET_PACK_MODULE_RELOCATIONS := true
-
-TARGET_LINKER := /system/bin/linker64
diff --git a/core/config.mk b/core/config.mk
index 844d7d6..b329b0d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -820,7 +820,7 @@
   MAINLINE_SEPOLICY_DEV_CERTIFICATES := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
 endif
 
-BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
 BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
 
 # SEPolicy versions
@@ -1219,23 +1219,6 @@
 # These goals don't need to collect and include Android.mks/CleanSpec.mks
 # in the source tree.
 dont_bother_goals := out \
-    snod systemimage-nodeps \
-    userdataimage-nodeps \
-    cacheimage-nodeps \
-    bptimage-nodeps \
-    vnod vendorimage-nodeps \
-    pnod productimage-nodeps \
-    senod systemextimage-nodeps \
-    onod odmimage-nodeps \
-    systemotherimage-nodeps \
-    ramdisk-nodeps \
-    ramdisk_debug-nodeps \
-    ramdisk_test_harness-nodeps \
-    bootimage-nodeps \
-    bootimage_debug-nodeps \
-    bootimage_test_harness-nodeps \
-    recoveryimage-nodeps \
-    vbmetaimage-nodeps \
     product-graph dump-products
 
 ifeq ($(CALLED_FROM_SETUP),true)
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index efb21e7..8c76a8d 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -134,12 +134,6 @@
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
 
-# CFI needs gold linker, and mips toolchain does not have one.
-ifneq ($(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
-  my_sanitize := $(filter-out cfi,$(my_sanitize))
-  my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
-endif
-
 # Disable sanitizers which need the UBSan runtime for host targets.
 ifdef LOCAL_IS_HOST_MODULE
   my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/definitions.mk b/core/definitions.mk
index 3d56f4c..f88b75a 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2897,7 +2897,8 @@
   $(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,$(eval ALL_COMPATIBILITY_SUITES += $(suite))) \
   $(eval COMPATIBILITY.$(suite).FILES := \
     $$(COMPATIBILITY.$(suite).FILES) $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
-      $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f)))) \
+      $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
+      $$(my_compat_dist_test_data_$(suite))) \
   $(eval COMPATIBILITY.$(suite).MODULES := \
     $$(COMPATIBILITY.$(suite).MODULES) $$(my_register_name))) \
 $(eval $(my_all_targets) : $(call copy-many-files, \
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 55eeec6..20b4051 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -18,9 +18,35 @@
 ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED),$(PRODUCT_OUT))
 
 # Install boot images. Note that there can be multiple.
+my_boot_image_arch := TARGET_ARCH
+my_boot_image_out := $(PRODUCT_OUT)
+my_boot_image_syms := $(TARGET_OUT_UNSTRIPPED)
+my_boot_image_root := DEFAULT_DEX_PREOPT_INSTALLED_IMAGE
 DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
-$(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
 $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+ifdef TARGET_2ND_ARCH
+  my_boot_image_arch := TARGET_2ND_ARCH
+  my_boot_image_root := 2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE
+  2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE :=
+  $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+endif
+# Install boot images for testing on host. We exclude framework image as it is not part of art manifest.
+my_boot_image_arch := HOST_ARCH
+my_boot_image_out := $(HOST_OUT)
+my_boot_image_syms := $(HOST_OUT)/symbols
+my_boot_image_root := HOST_BOOT_IMAGE
+HOST_BOOT_IMAGE :=
+$(foreach my_boot_image_name,art_host,$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+ifdef HOST_2ND_ARCH
+  my_boot_image_arch := HOST_2ND_ARCH
+  my_boot_image_root := 2ND_HOST_BOOT_IMAGE
+  2ND_HOST_BOOT_IMAGE :=
+  $(foreach my_boot_image_name,art_host,$(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk))
+endif
+my_boot_image_arch :=
+my_boot_image_out :=
+my_boot_image_syms :=
+my_boot_image_root :=
 
 # Build the boot.zip which contains the boot jars and their compilation output
 # We can do this only if preopt is enabled and if the product uses libart config (which sets the
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index ccf53f5..598ac2d 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -56,16 +56,6 @@
 DEX2OAT_XMS := $(call get-product-default-property,dalvik.vm.dex2oat-Xms)
 DEX2OAT_XMX := $(call get-product-default-property,dalvik.vm.dex2oat-Xmx)
 
-ifeq ($(TARGET_ARCH),$(filter $(TARGET_ARCH),mips mips64))
-# MIPS specific overrides.
-# For MIPS the ART image is loaded at a lower address. This causes issues
-# with the image overlapping with memory on the host cross-compiling and
-# building the image. We therefore limit the Xmx value. This isn't done
-# via a property as we want the larger Xmx value if we're running on a
-# MIPS device.
-DEX2OAT_XMX := 128m
-endif
-
 ifeq ($(WRITE_SOONG_VARIABLES),true)
 
   $(call json_start)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79d5f8c..12b29f4 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -1,45 +1,42 @@
 ####################################
 # ART boot image installation
-# Input variable:
+# Input variables:
 #   my_boot_image_name: the boot image to install
+#   my_boot_image_arch: the architecture to install (e.g. TARGET_ARCH, not expanded)
+#   my_boot_image_out:  the install directory (e.g. $(PRODUCT_OUT))
+#   my_boot_image_syms: the symbols director (e.g. $(TARGET_OUT_UNSTRIPPED))
+#   my_boot_image_root: make variable used to store installed image path
 #
 ####################################
 
-# Install primary arch vdex files into a shared location, and then symlink them to both the primary
-# and secondary arch directories.
-my_vdex_copy_pairs := $(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_ARCH))
-my_installed := $(foreach v,$(my_vdex_copy_pairs),$(PRODUCT_OUT)$(call word-colon,2,$(v)))
+# Install $(1) to $(2) so that it is shared between architectures.
+define copy-vdex-file
+my_vdex_shared := $$(dir $$(patsubst %/,%,$$(dir $(2))))$$(notdir $(2))  # Remove the arch dir.
+ifneq ($(my_boot_image_arch),$(filter $(my_boot_image_arch), TARGET_2ND_ARCH HOST_2ND_ARCH))
+$$(my_vdex_shared): $(1)  # Copy $(1) to directory one level up (i.e. with the arch dir removed).
+	@echo "Install: $$@"
+	$$(copy-file-to-target)
+endif
+$(2): $$(my_vdex_shared)  # Create symlink at $(2) which points to the actual physical copy.
+	@echo "Symlink: $$@"
+	mkdir -p $$(dir $$@)
+	ln -sfn ../$$(notdir $$@) $$@
+my_vdex_shared :=
+endef
+
+# Same as 'copy-many-files' but it uses the vdex-specific helper above.
+define copy-vdex-files
+$(foreach v,$(1),$(eval $(call copy-vdex-file, $(call word-colon,1,$(v)), $(2)$(call word-colon,2,$(v)))))
+$(foreach v,$(1),$(2)$(call word-colon,2,$(v)))
+endef
+
+# Install the boot images compiled by Soong.
+# The first file is saved in $(my_boot_image_root) and the rest are added as it's dependencies.
+my_suffix := BUILT_INSTALLED_$(my_boot_image_name)_$($(my_boot_image_arch))
+my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_$(my_suffix)),$(my_boot_image_out))
+my_installed += $(call copy-many-files,$(DEXPREOPT_IMAGE_UNSTRIPPED_$(my_suffix)),$(my_boot_image_syms))
+my_installed += $(call copy-vdex-files,$(DEXPREOPT_IMAGE_VDEX_$(my_suffix)),$(my_boot_image_out))
+$(my_boot_image_root) += $(firstword $(my_installed))
 $(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-
-my_built_vdex_dir := $(dir $(call word-colon,1,$(firstword $(my_vdex_copy_pairs))))
-my_installed_vdex_dir := $(PRODUCT_OUT)$(dir $(call word-colon,2,$(firstword $(my_vdex_copy_pairs))))
-
-$(my_installed): $(my_installed_vdex_dir)% : $(my_built_vdex_dir)%
-	@echo "Install: $@"
-	@rm -f $@
-	$(copy-file-to-target)
-	mkdir -p $(dir $@)/$(TARGET_ARCH)
-	ln -sfn ../$(notdir $@) $(dir $@)/$(TARGET_ARCH)
-ifdef TARGET_2ND_ARCH
-	mkdir -p $(dir $@)/$(TARGET_2ND_ARCH)
-	ln -sfn ../$(notdir $@) $(dir $@)/$(TARGET_2ND_ARCH)
-endif
-
-my_dexpreopt_image_extra_deps := $(firstword $(my_installed))
-
-my_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-
-ifdef TARGET_2ND_ARCH
-  my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-  include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-endif
-
-my_2nd_arch_prefix :=
-
-
-my_vdex_copy_pairs :=
 my_installed :=
-my_built_vdex_dir :=
-my_installed_vdex_dir :=
-my_dexpreopt_image_extra_deps :=
+my_suffix :=
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
deleted file mode 100644
index 34b8526..0000000
--- a/core/dex_preopt_libart_boot.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-# Rules to install a boot image built by dexpreopt_bootjars.go
-# Input variables:
-#   my_boot_image_name: the boot image to install
-#   my_2nd_arch_prefix: indicates if this is to build for the 2nd arch.
-#   my_dexpreopt_image_extra_deps: extra dependencies to add on the installed boot.art
-
-# Install the boot images compiled by Soong
-# The first file (generally boot.art) is saved as DEFAULT_DEX_PREOPT_INSTALLED_IMAGE,
-# and the rest are added as dependencies of the first.
-
-my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_$(my_2nd_arch_prefix)ARCH)),$(PRODUCT_OUT))
-$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-$(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE += $(firstword $(my_installed))
-
-# Install the unstripped boot images compiled by Soong into the symbols directory
-# The first file (generally boot.art) made a dependency of DEFAULT_DEX_PREOPT_INSTALLED_IMAGE,
-# and the rest are added as dependencies of the first.
-my_installed := $(call copy-many-files,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_boot_image_name)_$(TARGET_$(my_2nd_arch_prefix)ARCH)),$(TARGET_OUT_UNSTRIPPED))
-$(firstword $(my_installed)): $(wordlist 2,9999,$(my_installed))
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(firstword $(my_installed))
-
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(my_dexpreopt_image_extra_deps)
-
-my_installed :=
-my_built_installed :=
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 862d874..ac3d5cf 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -101,7 +101,7 @@
 
 # ---------------------------------------------------------------
 # Set up configuration for host machine.  We don't do cross-
-# compiles except for arm/mips, so the HOST is whatever we are
+# compiles except for arm, so the HOST is whatever we are
 # running on
 
 # HOST_OS
diff --git a/core/goma.mk b/core/goma.mk
index c265259..2b51d8b 100644
--- a/core/goma.mk
+++ b/core/goma.mk
@@ -27,7 +27,8 @@
   # use both ccache and gomacc.
   CC_WRAPPER := $(strip $(CC_WRAPPER) $(GOMA_CC))
   CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(GOMA_CC))
-  JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
+  # b/143658984: goma can't handle the --system argument to javac
+  #JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
 
   goma_dir :=
 endif
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 882fe3a..5021510 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -79,7 +79,8 @@
 $(java_source_list_file): $(java_sources_deps)
 	$(write-java-source-list)
 
-$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+# TODO(b/143658984): goma can't handle the --system argument to javac.
+#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index beaea2a..8998d52 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -70,7 +70,8 @@
 $(java_source_list_file): $(java_sources_deps)
 	$(write-java-source-list)
 
-$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+# TODO(b/143658984): goma can't handle the --system argument to javac.
+#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
diff --git a/core/java.mk b/core/java.mk
index a041321..44e005d 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -274,7 +274,8 @@
 
 endif # TURBINE_ENABLED != false
 
-$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+# TODO(b/143658984): goma can't handle the --system argument to javac.
+#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
diff --git a/core/java_renderscript.mk b/core/java_renderscript.mk
index 672863b..bfcf59e 100644
--- a/core/java_renderscript.mk
+++ b/core/java_renderscript.mk
@@ -129,7 +129,7 @@
 endif
 
 my_arch := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
-ifneq (,$(filter arm64 mips64 x86_64,$(my_arch)))
+ifneq (,$(filter arm64 x86_64,$(my_arch)))
   my_min_sdk_version := 21
 else
   my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
diff --git a/core/main.mk b/core/main.mk
index 4059a78..50cb70f 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -38,11 +38,13 @@
 # Write the build number to a file so it can be read back in
 # without changing the command line every time.  Avoids rebuilds
 # when using ninja.
-$(shell mkdir -p $(OUT_DIR) && \
-    echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
-BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+$(shell mkdir -p $(SOONG_OUT_DIR) && \
+    echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FILE := $(SOONG_OUT_DIR)/build_number.txt
 .KATI_READONLY := BUILD_NUMBER_FILE
 $(KATI_obsolete_var BUILD_NUMBER,See https://android.googlesource.com/platform/build/+/master/Changes.md#BUILD_NUMBER)
+$(BUILD_NUMBER_FILE):
+	touch $@
 
 DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
 .KATI_READONLY := DATE_FROM_FILE
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 591937c..3f2e5de 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -40,9 +40,7 @@
 	sdk \
 	sdk_addon \
 	sdk_repo \
-	snod \
 	stnod \
-	systemimage-nodeps \
 	target-files-package \
 	test-art% \
 	user \
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 9df1c11..77fb8d4 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -6,7 +6,7 @@
 ifneq ($(LOCAL_NOTICE_FILE),)
 notice_file:=$(strip $(LOCAL_NOTICE_FILE))
 else
-notice_file:=$(strip $(wildcard $(LOCAL_PATH)/NOTICE))
+notice_file:=$(strip $(wildcard $(LOCAL_PATH)/LICENSE $(LOCAL_PATH)/LICENCE $(LOCAL_PATH)/NOTICE))
 endif
 
 ifeq ($(LOCAL_MODULE_CLASS),GYP)
@@ -101,7 +101,7 @@
 $(installed_notice_file): $(notice_file)
 	@echo Notice file: $< -- $@
 	$(hide) mkdir -p $(dir $@)
-	$(hide) cat $< > $@
+	$(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $^ > $@
 
 ifdef LOCAL_INSTALLED_MODULE
 # Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 7bbaeb6..c6c2cf5 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -100,19 +100,19 @@
 # Determine whether auto-RRO is enabled for this package.
 enforce_rro_enabled :=
 ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
-  # * means all system APKs, so enable conditionally based on module path.
+  # * means all system and system_ext APKs, so enable conditionally based on module path.
+  # Note that modules in PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS are excluded even if it is '*'
 
   # Note that base_rules.mk has not yet been included, so it's likely that only
   # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
   ifeq (,$(LOCAL_MODULE_PATH))
-    non_system_module := $(filter true,\
+    non_rro_target_module := $(filter true,\
         $(LOCAL_ODM_MODULE) \
         $(LOCAL_OEM_MODULE) \
         $(LOCAL_PRODUCT_MODULE) \
-        $(LOCAL_SYSTEM_EXT_MODULE) \
         $(LOCAL_PROPRIETARY_MODULE) \
         $(LOCAL_VENDOR_MODULE))
-    enforce_rro_enabled := $(if $(non_system_module),,true)
+    enforce_rro_enabled := $(if $(non_rro_target_module),,true)
   else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
     enforce_rro_enabled := true
   endif
@@ -120,6 +120,12 @@
   enforce_rro_enabled := true
 endif
 
+# TODO(b/150820813) Some modules depend on static overlay, remove this after eliminating the dependency.
+ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS)))
+  enforce_rro_enabled :=
+endif
+
+
 product_package_overlays := $(strip \
     $(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
       $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
@@ -465,6 +471,9 @@
 $(LOCAL_BUILT_MODULE): $(additional_certificates)
 $(LOCAL_BUILT_MODULE): PRIVATE_ADDITIONAL_CERTIFICATES := $(additional_certificates)
 
+# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
+PACKAGES.$(LOCAL_PACKAGE_NAME).PARTITION := $(actual_partition_tag)
+
 # Verify LOCAL_USES_LIBRARIES/LOCAL_OPTIONAL_USES_LIBRARIES
 # If LOCAL_ENFORCE_USES_LIBRARIES is not set, default to true if either of LOCAL_USES_LIBRARIES or
 # LOCAL_OPTIONAL_USES_LIBRARIES are specified.
diff --git a/core/product.mk b/core/product.mk
index 8e82f71..b497abb 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -193,6 +193,9 @@
 # Package list to apply enforcing RRO.
 _product_list_vars += PRODUCT_ENFORCE_RRO_TARGETS
 
+# Packages to skip auto-generating RROs for when PRODUCT_ENFORCE_RRO_TARGETS is set to *.
+_product_list_vars += PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
+
 _product_list_vars += PRODUCT_SDK_ATREE_FILES
 _product_list_vars += PRODUCT_SDK_ADDON_NAME
 _product_list_vars += PRODUCT_SDK_ADDON_COPY_FILES
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index a001e3a..6dc396c 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -157,6 +157,9 @@
 include $(BUILD_SYSTEM)/app_certificate_validate.mk
 PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
 
+# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
+PACKAGES.$(LOCAL_MODULE).PARTITION := $(actual_partition_tag)
+
 ifdef LOCAL_SOONG_BUNDLE
   ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(LOCAL_SOONG_BUNDLE)
 endif
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index 190a7ed..6317b53 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -112,7 +112,9 @@
 my_check_same_vndk_variants :=
 ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
   ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
-    my_check_same_vndk_variants := true
+    ifneq ($(CLANG_COVERAGE),true)
+      my_check_same_vndk_variants := true
+    endif
   endif
 endif
 
@@ -128,10 +130,11 @@
   $(same_vndk_variants_stamp): PRIVATE_TOOLS_PREFIX := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)TOOLS_PREFIX)
 
   $(same_vndk_variants_stamp): $(my_core_shared_lib) $(LOCAL_PREBUILT_MODULE_FILE)
-		$(call verify-vndk-libs-identical,\
-		    $(PRIVATE_CORE_VARIANT),\
-		    $(PRIVATE_VENDOR_VARIANT),\
-		    $(PRIVATE_TOOLS_PREFIX))
+	$(call verify-vndk-libs-identical,\
+	    $(PRIVATE_CORE_VARIANT),\
+	    $(PRIVATE_VENDOR_VARIANT),\
+	    $(PRIVATE_TOOLS_PREFIX))
+	touch $@
 
   $(LOCAL_BUILT_MODULE): $(same_vndk_variants_stamp)
 endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index f31c9a0..c91639c 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -25,7 +25,7 @@
 $(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
 
 $(call add_json_str,  BuildId,                           $(BUILD_ID))
-$(call add_json_str,  BuildNumberFromFile,               $(BUILD_NUMBER_FROM_FILE))
+$(call add_json_str,  BuildNumberFile,                   build_number.txt)
 
 $(call add_json_str,  Platform_version_name,             $(PLATFORM_VERSION))
 $(call add_json_val,  Platform_sdk_version,              $(PLATFORM_SDK_VERSION))
@@ -81,6 +81,7 @@
 $(call add_json_list, DeviceResourceOverlays,            $(DEVICE_PACKAGE_OVERLAYS))
 $(call add_json_list, ProductResourceOverlays,           $(PRODUCT_PACKAGE_OVERLAYS))
 $(call add_json_list, EnforceRROTargets,                 $(PRODUCT_ENFORCE_RRO_TARGETS))
+$(call add_json_list, EnforceRROExemptedTargets,         $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
 $(call add_json_list, EnforceRROExcludedOverlays,        $(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
 
 $(call add_json_str,  AAPTCharacteristics,               $(TARGET_AAPT_CHARACTERISTICS))
@@ -116,6 +117,8 @@
 $(call add_json_list, CoveragePaths,                     $(COVERAGE_PATHS))
 $(call add_json_list, CoverageExcludePaths,              $(COVERAGE_EXCLUDE_PATHS))
 
+$(call add_json_bool, SamplingPGO,                       $(filter true,$(SAMPLING_PGO)))
+
 $(call add_json_bool, ArtUseReadBarrier,                 $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
 $(call add_json_bool, Binder32bit,                       $(BINDER32BIT))
 $(call add_json_str,  BtConfigIncludeDir,                $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index f071c7c..73fad7c 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -21,30 +21,38 @@
 # Create an artifact to include all test config files in device-tests.
 device-tests-configs-zip := $(PRODUCT_OUT)/device-tests_configs.zip
 my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES))
-$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip)
+device_tests_host_shared_libs_zip := $(PRODUCT_OUT)/device-tests_host-shared-libs.zip
+
+$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip)
 $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
 $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
+$(device-tests-zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip)
 $(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
+	rm -f $@-shared-libs.list
 	echo $(sort $(COMPATIBILITY.device-tests.FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $@-host.list; \
+	  echo $$shared_lib >> $@-shared-libs.list; \
 	done
+	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \
 	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
+	$(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
 	rm -f $(PRIVATE_device_tests_list)
 	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list)
 	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list)
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list)
 	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
-	  $(PRIVATE_device_tests_list)
+	  $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_device_tests_list)
 
 device-tests: $(device-tests-zip)
-$(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip))
+$(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip))
 
 tests: device-tests
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 7c4266c..2b43f0f 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -16,7 +16,16 @@
 #
 
 my_makefile := $(lastword $(filter-out $(lastword $(MAKEFILE_LIST)),$(MAKEFILE_LIST)))
-my_staging_dir := $(call intermediates-dir-for,PACKAGING,$(my_package_name))
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := $(my_package_name)
+LOCAL_MODULE_CLASS := PACKAGING
+LOCAL_MODULE_STEM := $(my_package_name).zip
+LOCAL_UNINSTALLABLE_MODULE := true
+include $(BUILD_SYSTEM)/base_rules.mk
+my_staging_dir := $(intermediates)
+my_package_zip := $(LOCAL_BUILT_MODULE)
+
 my_built_modules := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)))
 my_copy_pairs := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)):$(my_staging_dir)/$(call word-colon,2,$(p)))
 my_pickup_files :=
@@ -80,7 +89,6 @@
   $(error done)
 endif
 
-my_package_zip := $(my_staging_dir)/$(my_package_name).zip
 $(my_package_zip): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
 $(my_package_zip): PRIVATE_PICKUP_FILES := $(my_pickup_files)
 $(my_package_zip) : $(my_built_modules)
diff --git a/envsetup.sh b/envsetup.sh
index 793f4b6..791a43d 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -8,7 +8,7 @@
               Selects <product_name> as the product to build, and <build_variant> as the variant to
               build, and stores those selections in the environment to be read by subsequent
               invocations of 'm' etc.
-- tapas:      tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
+- tapas:      tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
 - croot:      Changes directory to the top of the tree, or a subdirectory thereof.
 - m:          Makes from the top of the tree.
 - mm:         Builds and installs all of the modules in the current directory, and their
@@ -218,8 +218,6 @@
         arm64) toolchaindir=aarch64/aarch64-linux-android-$targetgccversion/bin;
                toolchaindir2=arm/arm-linux-androideabi-$targetgccversion2/bin
             ;;
-        mips|mips64) toolchaindir=mips/mips64el-linux-android-$targetgccversion/bin
-            ;;
         *)
             echo "Can't find toolchain for unknown architecture: $ARCH"
             toolchaindir=xxxxxxxxx
@@ -599,7 +597,12 @@
 {
     local answer
 
-    if [ "$1" ] ; then
+    if [[ $# -gt 1 ]]; then
+        echo "usage: lunch [target]" >&2
+        return 1
+    fi
+
+    if [ "$1" ]; then
         answer=$1
     else
         print_lunch_menu
@@ -696,10 +699,10 @@
 function tapas()
 {
     local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
-    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|arm64|x86_64|mips64)$' | xargs)"
+    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|arm64|x86_64)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
     local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
-    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|arm64|x86_64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
 
     if [ "$showHelp" != "" ]; then
       $(gettop)/build/make/tapasHelp.sh
@@ -722,10 +725,8 @@
     local product=aosp_arm
     case $arch in
       x86)    product=aosp_x86;;
-      mips)   product=aosp_mips;;
       arm64)  product=aosp_arm64;;
       x86_64) product=aosp_x86_64;;
-      mips64)  product=aosp_mips64;;
     esac
     if [ -z "$variant" ]; then
         variant=eng
@@ -949,7 +950,7 @@
     Darwin)
         function sgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts)' \
+            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts|proto)' \
                 -exec grep --color -n "$@" {} +
         }
 
@@ -957,7 +958,7 @@
     *)
         function sgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\)' \
+            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
                 -exec grep --color -n "$@" {} +
         }
         ;;
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 38b3e34..0f46130 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
 cd ../..
 TOP="${PWD}"
 
-message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
 
 tapas selects individual apps to be built by the Android build system. Unlike
 "lunch", "tapas" does not request the building of images for a device.
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index a01133b..15488fc 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -69,7 +69,6 @@
     com.android.wifi \
     ContactsProvider \
     content \
-    crash_dump \
     debuggerd\
     device_config \
     dmctl \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index f8d85bf..814cb64 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -120,6 +120,9 @@
 
 PRODUCT_ENFORCE_RRO_TARGETS := *
 
+# TODO(b/150820813) Settings depends on static overlay, remove this after eliminating the dependency.
+PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS := Settings
+
 PRODUCT_NAME := mainline_system
 PRODUCT_BRAND := generic
 
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 9df26a9..e8c60b4 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -44,8 +44,6 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_x86.mk)
-
 # Define the host tools and libs that are parts of the SDK.
 -include sdk/build/product_sdk.mk
 -include development/build/product_sdk.mk
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 90a6485..d6f2116 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -188,6 +188,7 @@
         "imgdiff",
         "minigzip",
         "mkbootfs",
+        "signapk",
     ],
 }
 
@@ -237,6 +238,16 @@
             embedded_launcher: false,
         },
     },
+    // TODO (b/140144201) Build imgdiff from releasetools_common
+    required: [
+        "aapt2",
+        "boot_signer",
+        "brotli",
+        "bsdiff",
+        "imgdiff",
+        "minigzip",
+        "mkbootfs",
+    ],
 }
 
 python_binary_host {
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 3276b29..3bbf9d8 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -931,6 +931,37 @@
   return "{}:{}:{}".format(partition, rollback_index_location, pubkey_path)
 
 
+def AddAftlInclusionProof(output_image):
+  """Appends the aftl inclusion proof to the vbmeta image."""
+
+  # Ensure the other AFTL parameters are set as well.
+  assert OPTIONS.aftl_key_path is not None, 'No AFTL key provided.'
+  assert OPTIONS.aftl_manufacturer_key_path is not None, \
+      'No AFTL manufacturer key provided.'
+
+  vbmeta_image = MakeTempFile()
+  os.rename(output_image, vbmeta_image)
+  build_info = BuildInfo(OPTIONS.info_dict)
+  version_incremental = build_info.GetBuildProp("ro.build.version.incremental")
+  aftl_cmd = ["aftltool", "make_icp_from_vbmeta",
+              "--vbmeta_image_path", vbmeta_image,
+              "--output", output_image,
+              "--version_incremental", version_incremental,
+              "--transparency_log_servers", OPTIONS.aftl_server,
+              "--transparency_log_pub_keys", OPTIONS.aftl_key_path,
+              "--manufacturer_key", OPTIONS.aftl_manufacturer_key_path,
+              "--algorithm", "SHA256_RSA4096",
+              "--padding", "4096"]
+  if OPTIONS.aftl_signer_helper:
+    aftl_cmd.extend(shlex.split(OPTIONS.aftl_signer_helper))
+  RunAndCheckOutput(aftl_cmd)
+
+  verify_cmd = ['aftltool', 'verify_image_icp', '--vbmeta_image_path',
+                output_image, '--transparency_log_pub_keys',
+                OPTIONS.aftl_key_path]
+  RunAndCheckOutput(verify_cmd)
+
+
 def BuildVBMeta(image_path, partitions, name, needed_partitions):
   """Creates a VBMeta image.
 
@@ -973,28 +1004,26 @@
       # zip only). For such cases, we additionally scan other locations (e.g.
       # IMAGES/, RADIO/, etc) before bailing out.
       if arg == '--include_descriptors_from_image':
-        image_path = split_args[index + 1]
-        if os.path.exists(image_path):
+        chained_image = split_args[index + 1]
+        if os.path.exists(chained_image):
           continue
         found = False
         for dir_name in ['IMAGES', 'RADIO', 'PREBUILT_IMAGES']:
           alt_path = os.path.join(
-              OPTIONS.input_tmp, dir_name, os.path.basename(image_path))
+              OPTIONS.input_tmp, dir_name, os.path.basename(chained_image))
           if os.path.exists(alt_path):
             split_args[index + 1] = alt_path
             found = True
             break
-        assert found, 'Failed to find {}'.format(image_path)
+        assert found, 'Failed to find {}'.format(chained_image)
     cmd.extend(split_args)
 
   RunAndCheckOutput(cmd)
 
+  # Generate the AFTL inclusion proof.
   if OPTIONS.aftl_server is not None:
-    # Ensure the other AFTL parameters are set as well.
-    assert OPTIONS.aftl_key_path is not None, 'No AFTL key provided.'
-    assert OPTIONS.aftl_manufacturer_key_path is not None, 'No AFTL manufacturer key provided.'
-    assert OPTIONS.aftl_signer_helper is not None, 'No AFTL signer helper provided.'
-    # AFTL inclusion proof generation code will go here.
+    AddAftlInclusionProof(image_path)
+
 
 def _MakeRamdisk(sourcedir, fs_config_file=None):
   ramdisk_img = tempfile.NamedTemporaryFile()
@@ -1754,7 +1783,8 @@
       continue
     m = re.match(
         r'^name="(?P<NAME>.*)"\s+certificate="(?P<CERT>.*)"\s+'
-        r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?$',
+        r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?'
+        r'(\s+partition="(?P<PARTITION>.*)")?$',
         line)
     if not m:
       continue
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index eb68bc3..8e97509 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -79,6 +79,7 @@
 import fnmatch
 import logging
 import os
+import re
 import shutil
 import subprocess
 import sys
@@ -109,6 +110,27 @@
 OPTIONS.rebuild_recovery = False
 OPTIONS.keep_tmp = False
 
+# In an item list (framework or vendor), we may see entries that select whole
+# partitions. Such an entry might look like this 'SYSTEM/*' (e.g., for the
+# system partition). The following regex matches this and extracts the
+# partition name.
+
+PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
+
+# In apexkeys.txt or apkcerts.txt, we may find partition tags on the various
+# entries in the file. We use these partition tags to filter the entries in
+# those files from the two different target files packages to produce a merged
+# apexkeys.txt or apkcerts.txt file. A partition tag (e.g., for the product
+# partition) looks like this: 'partition="_PRODUCT"' or 'partition="product".
+# We use the group syntax grab the value of the tag.
+
+PARTITION_TAG_PATTERN = re.compile(r'partition="(.*)"')
+
+# The sorting algorithm for apexkeys.txt and apkcerts.txt does not include the
+# ".apex" or ".apk" suffix, so we use the following pattern to extract a key.
+
+MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
+
 # DEFAULT_FRAMEWORK_ITEM_LIST is a list of items to extract from the partial
 # framework target files package as is, meaning these items will land in the
 # output target files package exactly as they appear in the input partial
@@ -484,9 +506,40 @@
       path=output_dynamic_partitions_info_txt)
 
 
+def item_list_to_partition_set(item_list):
+  """Converts a target files item list to a partition set.
+
+  The item list contains items that might look like 'SYSTEM/*' or 'VENDOR/*' or
+  'OTA/android-info.txt'. Items that end in '/*' are assumed to match entire
+  directories where 'SYSTEM' or 'VENDOR' is a directory name that identifies the
+  contents of a partition of the same name. Other items in the list, such as the
+  'OTA' example contain metadata. This function iterates such a list, returning
+  a set that contains the partition entries.
+
+  Args:
+    item_list: A list of items in a target files package.
+  Returns:
+    A set of partitions extracted from the list of items.
+  """
+
+  partition_set = set()
+
+  for item in item_list:
+    match = PARTITION_ITEM_PATTERN.search(item.strip())
+    partition_tag = match.group(1).lower() if match else None
+
+    if partition_tag:
+      partition_set.add(partition_tag)
+
+  return partition_set
+
+
 def process_apex_keys_apk_certs_common(framework_target_files_dir,
                                        vendor_target_files_dir,
-                                       output_target_files_dir, file_name):
+                                       output_target_files_dir,
+                                       framework_partition_set,
+                                       vendor_partition_set, file_name):
+
   """Performs special processing for META/apexkeys.txt or META/apkcerts.txt.
 
   This function merges the contents of the META/apexkeys.txt or
@@ -502,6 +555,10 @@
       items extracted from the vendor target files package.
     output_target_files_dir: The name of a directory that will be used to create
       the output target files package after all the special cases are processed.
+    framework_partition_set: Partitions that are considered framework
+      partitions. Used to filter apexkeys.txt and apkcerts.txt.
+    vendor_partition_set: Partitions that are considered vendor partitions. Used
+      to filter apexkeys.txt and apkcerts.txt.
     file_name: The name of the file to merge. One of apkcerts.txt or
       apexkeys.txt.
   """
@@ -512,21 +569,44 @@
     with open(file_path) as f:
       for line in f:
         if line.strip():
-          temp[line.split()[0]] = line.strip()
+          name = line.split()[0]
+          match = MODULE_KEY_PATTERN.search(name)
+          temp[match.group(1)] = line.strip()
     return temp
 
   framework_dict = read_helper(framework_target_files_dir)
   vendor_dict = read_helper(vendor_target_files_dir)
+  merged_dict = {}
 
-  for key in framework_dict:
-    if key in vendor_dict and vendor_dict[key] != framework_dict[key]:
-      raise ValueError('Conflicting entries found in %s:\n %s and\n %s' %
-                       (file_name, framework_dict[key], vendor_dict[key]))
-    vendor_dict[key] = framework_dict[key]
+  def filter_into_merged_dict(item_dict, partition_set):
+    for key, value in item_dict.items():
+      match = PARTITION_TAG_PATTERN.search(value)
+
+      if match is None:
+        raise ValueError('Entry missing partition tag: %s' % value)
+
+      partition_tag = match.group(1)
+
+      if partition_tag in partition_set:
+        if key in merged_dict:
+          raise ValueError('Duplicate key %s' % key)
+
+        merged_dict[key] = value
+
+  filter_into_merged_dict(framework_dict, framework_partition_set)
+  filter_into_merged_dict(vendor_dict, vendor_partition_set)
 
   output_file = os.path.join(output_target_files_dir, 'META', file_name)
 
-  write_sorted_data(data=vendor_dict.values(), path=output_file)
+  # The following code is similar to write_sorted_data, but different enough
+  # that we couldn't use that function. We need the output to be sorted by the
+  # basename of the apex/apk (without the ".apex" or ".apk" suffix). This
+  # allows the sort to be consistent with the framework/vendor input data and
+  # eases comparison of input data with merged data.
+  with open(output_file, 'w') as output:
+    for key in sorted(merged_dict.keys()):
+      out_str = merged_dict[key] + '\n'
+      output.write(out_str)
 
 
 def copy_file_contexts(framework_target_files_dir, vendor_target_files_dir,
@@ -559,7 +639,9 @@
 def process_special_cases(framework_target_files_temp_dir,
                           vendor_target_files_temp_dir,
                           output_target_files_temp_dir,
-                          framework_misc_info_keys):
+                          framework_misc_info_keys,
+                          framework_partition_set,
+                          vendor_partition_set):
   """Performs special-case processing for certain target files items.
 
   Certain files in the output target files package require special-case
@@ -576,6 +658,10 @@
     framework_misc_info_keys: A list of keys to obtain from the framework
       instance of META/misc_info.txt. The remaining keys from the vendor
       instance.
+    framework_partition_set: Partitions that are considered framework
+      partitions. Used to filter apexkeys.txt and apkcerts.txt.
+    vendor_partition_set: Partitions that are considered vendor partitions. Used
+      to filter apexkeys.txt and apkcerts.txt.
   """
 
   if 'ab_update' in framework_misc_info_keys:
@@ -604,12 +690,16 @@
       framework_target_files_dir=framework_target_files_temp_dir,
       vendor_target_files_dir=vendor_target_files_temp_dir,
       output_target_files_dir=output_target_files_temp_dir,
+      framework_partition_set=framework_partition_set,
+      vendor_partition_set=vendor_partition_set,
       file_name='apkcerts.txt')
 
   process_apex_keys_apk_certs_common(
       framework_target_files_dir=framework_target_files_temp_dir,
       vendor_target_files_dir=vendor_target_files_temp_dir,
       output_target_files_dir=output_target_files_temp_dir,
+      framework_partition_set=framework_partition_set,
+      vendor_partition_set=vendor_partition_set,
       file_name='apexkeys.txt')
 
 
@@ -716,7 +806,9 @@
       framework_target_files_temp_dir=framework_target_files_temp_dir,
       vendor_target_files_temp_dir=vendor_target_files_temp_dir,
       output_target_files_temp_dir=output_target_files_temp_dir,
-      framework_misc_info_keys=framework_misc_info_keys)
+      framework_misc_info_keys=framework_misc_info_keys,
+      framework_partition_set=item_list_to_partition_set(framework_item_list),
+      vendor_partition_set=item_list_to_partition_set(vendor_item_list))
 
   return output_target_files_temp_dir
 
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 2126d11..92a46a2 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -318,7 +318,7 @@
     common.RunAndCheckOutput(cmd)
     with open(out_signature_size_file) as f:
       signature_size = f.read().rstrip()
-    logger.info("% outputs the maximum signature size: %", cmd[0],
+    logger.info("%s outputs the maximum signature size: %s", cmd[0],
                 signature_size)
     return int(signature_size)
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 5b7c2ac..783d63c 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -1082,7 +1082,8 @@
         r'public_key="(?P<PAYLOAD_PUBLIC_KEY>.*)"\s+'
         r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
         r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
-        r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*)"$',
+        r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*)"\s+'
+        r'partition="(?P<PARTITION>.*)"$',
         line)
     if not matches:
       continue
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index da92163..9621de8 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1404,6 +1404,46 @@
     self.assertEqual('3', chained_partition_args[1])
     self.assertTrue(os.path.exists(chained_partition_args[2]))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_BuildVBMeta_appendAftl(self):
+    testdata_dir = test_utils.get_testdata_dir()
+    common.OPTIONS.info_dict = {
+        'ab_update': 'true',
+        'avb_avbtool': 'avbtool',
+        'build.prop': {
+            'ro.build.version.incremental': '6285659',
+            'ro.product.device': 'coral',
+            'ro.build.fingerprint': 'google/coral/coral:R/RP1A.200311.002/'
+                                    '6285659:userdebug/dev-keys'
+        }
+    }
+    common.OPTIONS.aftl_server = "log.endpoints.aftl-dev.cloud.goog:9000"
+    common.OPTIONS.aftl_key_path = os.path.join(testdata_dir,
+                                                'test_transparency_key.pub')
+    common.OPTIONS.aftl_manufacturer_key_path = os.path.join(
+        testdata_dir, 'test_aftl_rsa4096.pem')
+
+    input_dir = common.MakeTempDir()
+    system_image = common.MakeTempFile()
+    build_image_cmd = ['mkuserimg_mke2fs', input_dir, system_image, 'ext4',
+                       '/system', str(4096 * 100), '-j', '0', '-s']
+    common.RunAndCheckOutput(build_image_cmd)
+
+    add_footer_cmd = ['avbtool', 'add_hashtree_footer',
+                      '--partition_size', str(4096 * 150),
+                      '--partition_name', 'system',
+                      '--image', system_image]
+    common.RunAndCheckOutput(add_footer_cmd)
+
+    vbmeta_image = common.MakeTempFile()
+    common.BuildVBMeta(vbmeta_image, {'system': system_image}, 'vbmeta',
+                       ['system'])
+
+    verify_cmd = ['aftltool', 'verify_image_icp', '--vbmeta_image_path',
+                  vbmeta_image, '--transparency_log_pub_keys',
+                  common.OPTIONS.aftl_key_path]
+    common.RunAndCheckOutput(verify_cmd)
+
 
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index 1abe83c..ff8593b 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -22,6 +22,7 @@
                                 DEFAULT_FRAMEWORK_ITEM_LIST,
                                 DEFAULT_VENDOR_ITEM_LIST,
                                 DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
+                                item_list_to_partition_set,
                                 process_apex_keys_apk_certs_common)
 
 
@@ -142,6 +143,8 @@
         os.path.join(vendor_dir, 'META', 'apexkeys.txt'))
 
     process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
+                                       set(['product', 'system', 'system_ext']),
+                                       set(['odm', 'vendor']),
                                        'apexkeys.txt')
 
     merged_entries = []
@@ -175,4 +178,54 @@
         os.path.join(conflict_dir, 'META', 'apexkeys.txt'))
 
     self.assertRaises(ValueError, process_apex_keys_apk_certs_common,
-                      framework_dir, conflict_dir, output_dir, 'apexkeys.txt')
+                      framework_dir, conflict_dir, output_dir,
+                      set(['product', 'system', 'system_ext']),
+                      set(['odm', 'vendor']),
+                      'apexkeys.txt')
+
+  def test_process_apex_keys_apk_certs_HandlesApkCertsSyntax(self):
+    output_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(output_dir, 'META'))
+
+    framework_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(framework_dir, 'META'))
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apkcerts_framework.txt'),
+        os.path.join(framework_dir, 'META', 'apkcerts.txt'))
+
+    vendor_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(vendor_dir, 'META'))
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apkcerts_vendor.txt'),
+        os.path.join(vendor_dir, 'META', 'apkcerts.txt'))
+
+    process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
+                                       set(['product', 'system', 'system_ext']),
+                                       set(['odm', 'vendor']),
+                                       'apkcerts.txt')
+
+    merged_entries = []
+    merged_path = os.path.join(self.testdata_dir, 'apkcerts_merge.txt')
+
+    with open(merged_path) as f:
+      merged_entries = f.read().split('\n')
+
+    output_entries = []
+    output_path = os.path.join(output_dir, 'META', 'apkcerts.txt')
+
+    with open(output_path) as f:
+      output_entries = f.read().split('\n')
+
+    return self.assertEqual(merged_entries, output_entries)
+
+  def test_item_list_to_partition_set(self):
+    item_list = [
+        'META/apexkeys.txt',
+        'META/apkcerts.txt',
+        'META/filesystem_config.txt',
+        'PRODUCT/*',
+        'SYSTEM/*',
+        'SYSTEM_EXT/*',
+    ]
+    partition_set = item_list_to_partition_set(item_list)
+    self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 2b84413..2dacd50 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -36,8 +36,8 @@
 </policy>"""
 
   # pylint: disable=line-too-long
-  APEX_KEYS_TXT = """name="apex.apexd_test.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem" container_certificate="build/make/target/product/security/testkey.x509.pem" container_private_key="build/make/target/product/security/testkey.pk8"
-name="apex.apexd_test_different_app.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" container_certificate="build/make/target/product/security/testkey.x509.pem" container_private_key="build/make/target/product/security/testkey.pk8"
+  APEX_KEYS_TXT = """name="apex.apexd_test.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem" container_certificate="build/make/target/product/security/testkey.x509.pem" container_private_key="build/make/target/product/security/testkey.pk8" partition="system"
+name="apex.apexd_test_different_app.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" container_certificate="build/make/target/product/security/testkey.x509.pem" container_private_key="build/make/target/product/security/testkey.pk8" partition="system"
 """
 
   def setUp(self):
@@ -484,7 +484,8 @@
         'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
         'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
         'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey2.pk8"')
+        'container_private_key="build/make/target/product/security/testkey2.pk8" '
+        'partition="system"')
     target_files = common.MakeTempFile(suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
diff --git a/tools/releasetools/testdata/apexkeys_framework.txt b/tools/releasetools/testdata/apexkeys_framework.txt
index 2346668..b9caf9e 100644
--- a/tools/releasetools/testdata/apexkeys_framework.txt
+++ b/tools/releasetools/testdata/apexkeys_framework.txt
@@ -1,2 +1,7 @@
-name="com.android.runtime.debug.apex" public_key="art/build/apex/com.android.runtime.avbpubkey" private_key="art/build/apex/com.android.runtime.pem" container_certificate="art/build/apex/com.android.runtime.debug.x509.pem" container_private_key="art/build/apex/com.android.runtime.debug.pk8"
-name="com.android.conscrypt.apex" public_key="external/conscrypt/apex/com.android.conscrypt.avbpubkey" private_key="external/conscrypt/apex/com.android.conscrypt.pem" container_certificate="external/conscrypt/apex/com.android.conscrypt.x509.pem" container_private_key="external/conscrypt/apex/com.android.conscrypt.pk8"
+name="com.android.conscrypt.apex" public_key="external/conscrypt/apex/com.android.conscrypt.avbpubkey" private_key="external/conscrypt/apex/com.android.conscrypt.pem" container_certificate="external/conscrypt/apex/com.android.conscrypt.x509.pem" container_private_key="external/conscrypt/apex/com.android.conscrypt.pk8" partition="system"
+name="com.android.dummy_product.apex" public_key="selected" private_key="selected" container_certificate="selected" container_private_key="selected" partition="product"
+name="com.android.runtime.apex" public_key="bionic/apex/com.android.runtime.avbpubkey" private_key="bionic/apex/com.android.runtime.pem" container_certificate="bionic/apex/com.android.runtime.x509.pem" container_private_key="bionic/apex/com.android.runtime.pk8" partition="system"
+name="com.android.vndk.current.on_vendor.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="vendor"
+name="com.android.vndk.v27.apex" public_key="packages/modules/vndk/apex/com.android.vndk.v27.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.v27.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.v27.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.v27.pk8" partition="system_ext"
+name="com.android.vndk.v28.apex" public_key="packages/modules/vndk/apex/com.android.vndk.v28.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.v28.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.v28.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.v28.pk8" partition="system_ext"
+name="com.android.vndk.v29.apex" public_key="packages/modules/vndk/apex/com.android.vndk.v29.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.v29.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.v29.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.v29.pk8" partition="system_ext"
diff --git a/tools/releasetools/testdata/apexkeys_framework_conflict.txt b/tools/releasetools/testdata/apexkeys_framework_conflict.txt
index caa21c2..9a055f4 100644
--- a/tools/releasetools/testdata/apexkeys_framework_conflict.txt
+++ b/tools/releasetools/testdata/apexkeys_framework_conflict.txt
@@ -1 +1 @@
-name="com.android.runtime.debug.apex" public_key="art/build/apex/com.android.runtime.avbpubkey" private_key="art/build/apex/com.android.runtime.pem" container_certificate="art/build/apex/com.android.runtime.release.x509.pem" container_private_key="art/build/apex/com.android.runtime.debug.pk8"
+name="com.android.conscrypt.apex" public_key="external/conscrypt/apex/com.android.conscrypt.avbpubkey" private_key="external/conscrypt/apex/com.android.conscrypt.pem" container_certificate="external/conscrypt/apex/com.android.conscrypt.x509.pem" container_private_key="external/conscrypt/apex/com.android.conscrypt.pk8" partition="vendor"
diff --git a/tools/releasetools/testdata/apexkeys_merge.txt b/tools/releasetools/testdata/apexkeys_merge.txt
index 48e789f..a9355d7 100644
--- a/tools/releasetools/testdata/apexkeys_merge.txt
+++ b/tools/releasetools/testdata/apexkeys_merge.txt
@@ -1,4 +1,7 @@
-name="com.android.conscrypt.apex" public_key="external/conscrypt/apex/com.android.conscrypt.avbpubkey" private_key="external/conscrypt/apex/com.android.conscrypt.pem" container_certificate="external/conscrypt/apex/com.android.conscrypt.x509.pem" container_private_key="external/conscrypt/apex/com.android.conscrypt.pk8"
-name="com.android.runtime.debug.apex" public_key="art/build/apex/com.android.runtime.avbpubkey" private_key="art/build/apex/com.android.runtime.pem" container_certificate="art/build/apex/com.android.runtime.debug.x509.pem" container_private_key="art/build/apex/com.android.runtime.debug.pk8"
-name="com.android.runtime.release.apex" public_key="art/build/apex/com.android.runtime.avbpubkey" private_key="art/build/apex/com.android.runtime.pem" container_certificate="art/build/apex/com.android.runtime.release.x509.pem" container_private_key="art/build/apex/com.android.runtime.release.pk8"
-name="com.android.support.apexer.apex" public_key="system/apex/apexer/etc/com.android.support.apexer.avbpubkey" private_key="system/apex/apexer/etc/com.android.support.apexer.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
+name="com.android.conscrypt.apex" public_key="external/conscrypt/apex/com.android.conscrypt.avbpubkey" private_key="external/conscrypt/apex/com.android.conscrypt.pem" container_certificate="external/conscrypt/apex/com.android.conscrypt.x509.pem" container_private_key="external/conscrypt/apex/com.android.conscrypt.pk8" partition="system"
+name="com.android.dummy_product.apex" public_key="selected" private_key="selected" container_certificate="selected" container_private_key="selected" partition="product"
+name="com.android.runtime.apex" public_key="bionic/apex/com.android.runtime.avbpubkey" private_key="bionic/apex/com.android.runtime.pem" container_certificate="bionic/apex/com.android.runtime.x509.pem" container_private_key="bionic/apex/com.android.runtime.pk8" partition="system"
+name="com.android.vndk.current.on_vendor.apex" public_key="packages/modules/vndk/apex/com.android.vndk.current.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.current.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.current.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.current.pk8" partition="vendor"
+name="com.android.vndk.v27.apex" public_key="packages/modules/vndk/apex/com.android.vndk.v27.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.v27.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.v27.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.v27.pk8" partition="system_ext"
+name="com.android.vndk.v28.apex" public_key="packages/modules/vndk/apex/com.android.vndk.v28.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.v28.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.v28.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.v28.pk8" partition="system_ext"
+name="com.android.vndk.v29.apex" public_key="packages/modules/vndk/apex/com.android.vndk.v29.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.v29.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.v29.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.v29.pk8" partition="system_ext"
diff --git a/tools/releasetools/testdata/apexkeys_vendor.txt b/tools/releasetools/testdata/apexkeys_vendor.txt
index b751227..7dd3964 100644
--- a/tools/releasetools/testdata/apexkeys_vendor.txt
+++ b/tools/releasetools/testdata/apexkeys_vendor.txt
@@ -1,3 +1,7 @@
-name="com.android.runtime.release.apex" public_key="art/build/apex/com.android.runtime.avbpubkey" private_key="art/build/apex/com.android.runtime.pem" container_certificate="art/build/apex/com.android.runtime.release.x509.pem" container_private_key="art/build/apex/com.android.runtime.release.pk8"
-name="com.android.support.apexer.apex" public_key="system/apex/apexer/etc/com.android.support.apexer.avbpubkey" private_key="system/apex/apexer/etc/com.android.support.apexer.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
-name="com.android.runtime.debug.apex" public_key="art/build/apex/com.android.runtime.avbpubkey" private_key="art/build/apex/com.android.runtime.pem" container_certificate="art/build/apex/com.android.runtime.debug.x509.pem" container_private_key="art/build/apex/com.android.runtime.debug.pk8"
+name="com.android.conscrypt.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="system"
+name="com.android.dummy_product.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="product"
+name="com.android.runtime.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="system"
+name="com.android.vndk.current.on_vendor.apex" public_key="packages/modules/vndk/apex/com.android.vndk.current.pubkey" private_key="packages/modules/vndk/apex/com.android.vndk.current.pem" container_certificate="packages/modules/vndk/apex/com.android.vndk.current.x509.pem" container_private_key="packages/modules/vndk/apex/com.android.vndk.current.pk8" partition="vendor"
+name="com.android.vndk.v27.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="system_ext"
+name="com.android.vndk.v28.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="system_ext"
+name="com.android.vndk.v29.apex" public_key="not_selected" private_key="not_selected" container_certificate="not_selected" container_private_key="not_selected" partition="system_ext"
diff --git a/tools/releasetools/testdata/apkcerts_framework.txt b/tools/releasetools/testdata/apkcerts_framework.txt
new file mode 100644
index 0000000..a75f55c
--- /dev/null
+++ b/tools/releasetools/testdata/apkcerts_framework.txt
@@ -0,0 +1,6 @@
+name="TestSystem1.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="system"
+name="TestSystem2.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="system"
+name="TestVendor.apk" certificate="not_selected" private_key="not_selected" partition="vendor"
+name="TestOdm.apk" certificate="not_selected" private_key="not_selected" partition="odm"
+name="TestProduct.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="product"
+name="TestSystemExt.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="system_ext"
diff --git a/tools/releasetools/testdata/apkcerts_merge.txt b/tools/releasetools/testdata/apkcerts_merge.txt
new file mode 100644
index 0000000..0425e96
--- /dev/null
+++ b/tools/releasetools/testdata/apkcerts_merge.txt
@@ -0,0 +1,6 @@
+name="TestOdm.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="odm"
+name="TestProduct.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="product"
+name="TestSystem1.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="system"
+name="TestSystem2.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="system"
+name="TestSystemExt.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="system_ext"
+name="TestVendor.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="vendor"
diff --git a/tools/releasetools/testdata/apkcerts_vendor.txt b/tools/releasetools/testdata/apkcerts_vendor.txt
new file mode 100644
index 0000000..13d5255
--- /dev/null
+++ b/tools/releasetools/testdata/apkcerts_vendor.txt
@@ -0,0 +1,6 @@
+name="TestSystem1.apk" certificate="not_selected" private_key="not_selected" partition="system"
+name="TestSystem2.apk" certificate="not_selected" private_key="not_selected" partition="system"
+name="TestVendor.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="vendor"
+name="TestOdm.apk" certificate="build/make/target/product/security/testkey.x509.pem" private_key="build/make/target/product/security/testkey.pk8" partition="odm"
+name="TestProduct.apk" certificate="not_selected" private_key="not_selected" partition="product"
+name="TestSystemExt.apk" certificate="not_selected" private_key="not_selected" partition="system_ext"
diff --git a/tools/releasetools/testdata/test_aftl_rsa4096.pem b/tools/releasetools/testdata/test_aftl_rsa4096.pem
new file mode 100644
index 0000000..89f1ef3
--- /dev/null
+++ b/tools/releasetools/testdata/test_aftl_rsa4096.pem
@@ -0,0 +1,52 @@
+-----BEGIN PRIVATE KEY-----
+MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDDlhUPUgtWL6LB
+Wybp6wsEJeioV1aRLPGSA2/xIpTiJUK46cb/MD5eBTWjKENoIgX23eL/ePy2I68e
++WvcZ5ITGOTRQqNVZIdc5qvr03wkV0BsJQMHSMAHacePpB/4xM5MzN/6Ku1wA8Dw
+uK+v/Cw4hqq8H/gP0oPVQ1bwcIePzRPX4YkkyXusoyzTIm5DJ9reVtyFucKqANCN
+aFmGxcaEc2nADtARQWJpO95joFsMvr68+JBxpCt8aWbxuSz/rLJ9Y8Z46V/++XG+
+E4QEob/WVY5pUD/RyogLrfhIf+zO7R3wJklXElSFacIX9+RzR9dgkQVbqxLfBKIP
+XWLCsF4I4EnvqUtaVjIMl8UpZpoq8pDLRqZ71Os5xZYq06x9E02M6DnvFbZEdaOX
+MCz2mmNX3g5FahvJayBhCuNhyTkd79MFR71Wp48TvWxKz3S7q0T0cWHNhtPkHSCa
+KwD93AQnqtLKYDGkHIZBzJPcs+QxbzdHyGzhXZb+qh5KmQvNA9HRBQY1RkMmzIbI
+8pzYTwpOkbCEhVoCWcRaaF1Pgl+zcpgJOMbBBUabx/dConFIhMDW/I5fHgKgwGqm
+tWUibrMPdnfS6W5MXi8jC0eDuZl0VwmdE+4dLujiOofUYnb7D+GXojf3PrSLcTw1
+PmG0f7l5xDKN9a0N+IXqvD2oAANTsQIDAQABAoICAQCW5HXw8OogHvYg2HMIKrbA
+B4McRO1baWIhtRcq4PQeGIMGaA2HmS+0l65O5uRCNWWGlJ7pW+0TlCop6mHFk/4F
+T8JQk2mxmrI4ARqIAQwYeVwRUuioOP81eO1mK0gjQ6qpY7I0reOq9KpozQN18UYo
+gfS82Kkng9EDukUbkKV1UtFJTw3gXLVWdjlB1qFcnCXmPPs7DBpbz+8V+XiAWpsS
+WnwumP77IQeMiozDLdaw2YQMBHRjyDVocWTjfmpyAkleJZjcdagC7W1MKIBElomL
+EUyigTALaYZWBGy1ekQ3TIY5XUBdtZ2RpAsDNNOCAN3v+VI565zOhCOHWRO1gh24
+vyhBFR0HYqBRoLbLAqo8bM5iLPz1EWGyaTnfxt38J8Va0TD7KihcBnphiA+dkhEF
+oc0yIp/8S2o3CfkNok7Ju8Amb7M4JJuKhuP8wxn86fAHpjjd3Y4SlZp0NrTrd7T2
+msLIneb1OUZZxFxyJG1XQGEZplLPalnGadIF4p3q/3nd1rVb491qCNl/A5QwhI9r
+ZV62O90M9fu3+cAynBLbMT09IZecNwP1gXmunlY6YH+ymM+3NFqC8q2tnzomiz8/
+Fee0ftZ2C/jK62fET0Y8LPWGkVQGHtvZH0FPg4suA0GMmYAe0tQl93A+jFltfKKZ
+RgCDrYs6Wv76E9gnWVnEdQKCAQEA8L76LjZUTKOg83Bra+hP+cXnwGsgwOwJfGBp
+OM++5HzlpYjtbD38esBZVJtwb/8xJGdsHtP2n7ZgbSDuAnRj5S50QHIApvRkz1Y+
+1hL8tAdgVP2JkYjpyG3bPk4QVKyXkKvBcp2BCidXs75+HzfOxqkazumaYOYo2guh
+azHdka2xSqxcZqo4yyORc/oue25RU4skmuNDOlP0+OTxU/uXnl7QZmlaOfT5TqO4
+s7uER4BXt/87j44mnOBdXmtqrsL49+R9bzVskx76aeuaBbwf7jnpR058E71OZwSd
+F1P3fx6hl0yLOZF/5Jnq+14rEna6jH50XtzlhB6deSZFTOw2gwKCAQEAz/qXRzwH
+I0YWISgkUG2zBJseHmfHqV4CDzb5+tTJ3B2I8cXE0m2sQJXi2s7oMhWSc1cQOHCX
+txpgWaD59uBz2lcwnGRNp27TRXv8Wo+X0+O+lGWU2cO+j8AB2Vtb7F7rCySp0+Uu
+z+dBfoQ2zhKEQlkX0YldVILGzCL3QBHVvPC4iDlwkMRbcejDoh9NsBtHL8lG+MAw
+ZXbwJjhaJkhTXJFpJpejq70naS8VVlLt8Os80iuBXe5JK/ecAHtsNcJlXO02sMNZ
+Fbcy8WosGyvRKQ/tHtTjAlxZ7Ey8usWE8BvWBdUgiIBkIcjLtE2GrA8eOGNb3v1I
+HRt8NsV8yaLWuwKCAQAR7SaT6le8nTKO7gARuOq7npDzMwbtVqYeLM+o+08rlGFF
+QjzronH6cfg05J4quMXgABN8+CuVGO91MM6IQEJv/lWJtvN1ex1GkxV6u0812JbD
+vV1RCPDfi86XhRiSNYfTrfZponDJYMSXDcg2auFqyYzFe3+TV5ATLGqIoN3uyxA4
+jz0SJ/qypaNfD3IGnuBPaD0Bi4ql/TpwjhuqNUHE+SprdczSI/usb2SBfaUL7fKa
+MNcuiVc2tz48maMIAFypmMn+TewXyGa9HF4Lr0ZxZr6IIL/8eEwuP5my8v2q6Yz+
+xyRW1Q7A5vUoYoqyhUS+0Wu45JnyjJUNQFxIrg4hAoIBAF1uBIGSvN4iwRQ6FT4w
+WahrCre8BVzXh3NQTjJZXylL91YtcwLZE/Wbn+KN6o99U2IPLZE9O1qdNcVt5Hz8
+Te87FfJbuOrLhYuEbFQ+h4U/nUDK9XhyT+wB5JLBUOU5qrtByC0Rmtr411o/iONA
+PDwWC/YskEnDygywdIRKvsr3FN7VdvUB0Na2KxRsnZjMWElmUUS0Ccm7CZ0R2aWy
+/gfqpuMYYgVnnwnIhfxWmt+MvbDorGAHCMYAoQsyZuUrpB9/zP7RcvanavI6sP+v
+ynF43xvnpOdNl3Po8SuyScsXpijOmqPXkaP/sUsZPLOUww2vzPi6raetzjpIs4td
+ZLsCggEAe42Zj3FEbruJZeDgmd9lSc0j8UF90mNw8KH44IbuA6R9fGv3WkrNHEVd
+XZOwjWqAxhOj6pFoJk8n6h5d8iS/yXFZ0AfBMc21XMecu9mnfx9E9LFAIWmv7Wut
+vy3h2BqY+crglpg5RAw+3J97HAGMYCvp+hH2il+9zzjpmCtTD21LRMkw34szY7RR
+CDy9G5FTmKVlxw5eegvyj164olQRLurEdUIfSr5UnBjrWftJHy9JW8KWCeFDSmm9
+xCl3nGDyQuZmOTngxPtrOYAhb5LoKR9BeGcy6jlom7V4nYYqm3t1IDBgMqjYGT9c
+vqQgxO2OFsQOJQ/4PRYEKd1neTlZrw==
+-----END PRIVATE KEY-----
diff --git a/tools/releasetools/testdata/test_transparency_key.pub b/tools/releasetools/testdata/test_transparency_key.pub
new file mode 100644
index 0000000..8bfd816
--- /dev/null
+++ b/tools/releasetools/testdata/test_transparency_key.pub
@@ -0,0 +1,15 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4ilqCNsenNA013iCdwgD
+YPxZ853nbHG9lMBp9boXiwRcqT/8bUKHIL7YX5z7s+QoRYVY3rkMKppRabclXzyx
+H59YnPMaU4uv7NqwWzjgaZo7E+vo7IF+KBjV3cJulId5Av0yIYUCsrwd7MpGtWdC
+Q3S+7Vd4zwzCKEhcvliNIhnNlp1U3wNkPCxOyCAsMEn6k8O5ar12ke5TvxDv15db
+rPDeHh8G2OYWoCkWL+lSN35L2kOJqKqVbLKWrrOd96RCYrrtbPCi580OADJRcUlG
+lgcjwmNwmypBWvQMZ6ITj0P0ksHnl1zZz1DE2rXe1goLI1doghb5KxLaezlR8c2C
+E3w/uo9KJgNmNgUVzzqZZ6FE0moyIDNOpP7KtZAL0DvEZj6jqLbB0ccPQElrg52m
+Dv2/A3nYSr0mYBKeskT4+Bg7PGgoC8p7WyLSxMyzJEDYdtrj9OFx6eZaA23oqTQx
+k3Qq5H8RfNBeeSUEeKF7pKH/7gyqZ2bNzBFMA2EBZgBozwRfaeN/HCv3qbaCnwvu
+6caacmAsK+RxiYxSL1QsJqyhCWWGxVyenmxdc1KG/u5ypi7OIioztyzR3t2tAzD3
+Nb+2t8lgHBRxbV24yiPlnvPmB1ZYEctXnlRR9Evpl1o9xA9NnybPHKr9rozN39CZ
+V/USB8K6ao1y5xPZxa8CZksCAwEAAQ==
+-----END PUBLIC KEY-----
+
diff --git a/tools/warn/chrome_project_list.py b/tools/warn/chrome_project_list.py
new file mode 100644
index 0000000..6096522
--- /dev/null
+++ b/tools/warn/chrome_project_list.py
@@ -0,0 +1,686 @@
+# python3
+"""Clang_Tidy_Warn Project List data for Chrome.
+
+This file stores the Chrome project_list used in warn.py and
+its dependencies. It has been put into this file for easier navigation and
+unification of the Chrome and Android warn.py.
+"""
+
+
+def create_pattern(pattern):
+  return [pattern, '(^|.*/)' + pattern + '/.*: warning:']
+
+
+# A list of [project_name, file_path_pattern].
+project_list = [
+    create_pattern('android_webview'),
+    create_pattern('apps'),
+    create_pattern('ash/app_list'),
+    create_pattern('ash/public'),
+    create_pattern('ash/assistant'),
+    create_pattern('ash/display'),
+    create_pattern('ash/resources'),
+    create_pattern('ash/login'),
+    create_pattern('ash/system'),
+    create_pattern('ash/wm'),
+    create_pattern('ash/shelf'),
+    create_pattern('ash'),
+    create_pattern('base/trace_event'),
+    create_pattern('base/debug'),
+    create_pattern('base/third_party'),
+    create_pattern('base/files'),
+    create_pattern('base/test'),
+    create_pattern('base/util'),
+    create_pattern('base/task'),
+    create_pattern('base/metrics'),
+    create_pattern('base/strings'),
+    create_pattern('base/memory'),
+    create_pattern('base'),
+    create_pattern('build'),
+    create_pattern('build_overrides'),
+    create_pattern('buildtools'),
+    create_pattern('cc'),
+    create_pattern('chrome/services'),
+    create_pattern('chrome/app'),
+    create_pattern('chrome/renderer'),
+    create_pattern('chrome/test'),
+    create_pattern('chrome/common/safe_browsing'),
+    create_pattern('chrome/common/importer'),
+    create_pattern('chrome/common/media_router'),
+    create_pattern('chrome/common/extensions'),
+    create_pattern('chrome/common'),
+    create_pattern('chrome/browser/sync_file_system'),
+    create_pattern('chrome/browser/safe_browsing'),
+    create_pattern('chrome/browser/download'),
+    create_pattern('chrome/browser/ui'),
+    create_pattern('chrome/browser/supervised_user'),
+    create_pattern('chrome/browser/search'),
+    create_pattern('chrome/browser/browsing_data'),
+    create_pattern('chrome/browser/predictors'),
+    create_pattern('chrome/browser/net'),
+    create_pattern('chrome/browser/devtools'),
+    create_pattern('chrome/browser/resource_coordinator'),
+    create_pattern('chrome/browser/page_load_metrics'),
+    create_pattern('chrome/browser/extensions'),
+    create_pattern('chrome/browser/ssl'),
+    create_pattern('chrome/browser/printing'),
+    create_pattern('chrome/browser/profiles'),
+    create_pattern('chrome/browser/chromeos'),
+    create_pattern('chrome/browser/performance_manager'),
+    create_pattern('chrome/browser/metrics'),
+    create_pattern('chrome/browser/component_updater'),
+    create_pattern('chrome/browser/media'),
+    create_pattern('chrome/browser/notifications'),
+    create_pattern('chrome/browser/web_applications'),
+    create_pattern('chrome/browser/media_galleries'),
+    create_pattern('chrome/browser'),
+    create_pattern('chrome'),
+    create_pattern('chromecast'),
+    create_pattern('chromeos/services'),
+    create_pattern('chromeos/dbus'),
+    create_pattern('chromeos/assistant'),
+    create_pattern('chromeos/components'),
+    create_pattern('chromeos/settings'),
+    create_pattern('chromeos/constants'),
+    create_pattern('chromeos/network'),
+    create_pattern('chromeos'),
+    create_pattern('cloud_print'),
+    create_pattern('components/crash'),
+    create_pattern('components/subresource_filter'),
+    create_pattern('components/invalidation'),
+    create_pattern('components/autofill'),
+    create_pattern('components/onc'),
+    create_pattern('components/arc'),
+    create_pattern('components/safe_browsing'),
+    create_pattern('components/services'),
+    create_pattern('components/cast_channel'),
+    create_pattern('components/download'),
+    create_pattern('components/feed'),
+    create_pattern('components/offline_pages'),
+    create_pattern('components/bookmarks'),
+    create_pattern('components/cloud_devices'),
+    create_pattern('components/mirroring'),
+    create_pattern('components/spellcheck'),
+    create_pattern('components/viz'),
+    create_pattern('components/gcm_driver'),
+    create_pattern('components/ntp_snippets'),
+    create_pattern('components/translate'),
+    create_pattern('components/search_engines'),
+    create_pattern('components/background_task_scheduler'),
+    create_pattern('components/signin'),
+    create_pattern('components/chromeos_camera'),
+    create_pattern('components/reading_list'),
+    create_pattern('components/assist_ranker'),
+    create_pattern('components/payments'),
+    create_pattern('components/feedback'),
+    create_pattern('components/ui_devtools'),
+    create_pattern('components/password_manager'),
+    create_pattern('components/omnibox'),
+    create_pattern('components/content_settings'),
+    create_pattern('components/dom_distiller'),
+    create_pattern('components/nacl'),
+    create_pattern('components/metrics'),
+    create_pattern('components/policy'),
+    create_pattern('components/optimization_guide'),
+    create_pattern('components/exo'),
+    create_pattern('components/update_client'),
+    create_pattern('components/data_reduction_proxy'),
+    create_pattern('components/sync'),
+    create_pattern('components/drive'),
+    create_pattern('components/variations'),
+    create_pattern('components/history'),
+    create_pattern('components/webcrypto'),
+    create_pattern('components'),
+    create_pattern('content/public'),
+    create_pattern('content/renderer'),
+    create_pattern('content/test'),
+    create_pattern('content/common'),
+    create_pattern('content/browser'),
+    create_pattern('content/zygote'),
+    create_pattern('content'),
+    create_pattern('courgette'),
+    create_pattern('crypto'),
+    create_pattern('dbus'),
+    create_pattern('device/base'),
+    create_pattern('device/vr'),
+    create_pattern('device/gamepad'),
+    create_pattern('device/test'),
+    create_pattern('device/fido'),
+    create_pattern('device/bluetooth'),
+    create_pattern('device'),
+    create_pattern('docs'),
+    create_pattern('extensions/docs'),
+    create_pattern('extensions/components'),
+    create_pattern('extensions/buildflags'),
+    create_pattern('extensions/renderer'),
+    create_pattern('extensions/test'),
+    create_pattern('extensions/common'),
+    create_pattern('extensions/shell'),
+    create_pattern('extensions/browser'),
+    create_pattern('extensions/strings'),
+    create_pattern('extensions'),
+    create_pattern('fuchsia'),
+    create_pattern('gin'),
+    create_pattern('google_apis'),
+    create_pattern('google_update'),
+    create_pattern('gpu/perftests'),
+    create_pattern('gpu/GLES2'),
+    create_pattern('gpu/command_buffer'),
+    create_pattern('gpu/tools'),
+    create_pattern('gpu/gles2_conform_support'),
+    create_pattern('gpu/ipc'),
+    create_pattern('gpu/khronos_glcts_support'),
+    create_pattern('gpu'),
+    create_pattern('headless'),
+    create_pattern('infra'),
+    create_pattern('ipc'),
+    create_pattern('jingle'),
+    create_pattern('media'),
+    create_pattern('mojo'),
+    create_pattern('native_client'),
+    create_pattern('ative_client_sdk'),
+    create_pattern('net'),
+    create_pattern('out'),
+    create_pattern('pdf'),
+    create_pattern('ppapi'),
+    create_pattern('printing'),
+    create_pattern('remoting'),
+    create_pattern('rlz'),
+    create_pattern('sandbox'),
+    create_pattern('services/audio'),
+    create_pattern('services/content'),
+    create_pattern('services/data_decoder'),
+    create_pattern('services/device'),
+    create_pattern('services/file'),
+    create_pattern('services/identity'),
+    create_pattern('services/image_annotation'),
+    create_pattern('services/media_session'),
+    create_pattern('services/metrics'),
+    create_pattern('services/network'),
+    create_pattern('services/preferences'),
+    create_pattern('services/proxy_resolver'),
+    create_pattern('services/resource_coordinator'),
+    create_pattern('services/service_manager'),
+    create_pattern('services/shape_detection'),
+    create_pattern('services/strings'),
+    create_pattern('services/test'),
+    create_pattern('services/tracing'),
+    create_pattern('services/video_capture'),
+    create_pattern('services/viz'),
+    create_pattern('services/ws'),
+    create_pattern('services'),
+    create_pattern('skia/config'),
+    create_pattern('skia/ext'),
+    create_pattern('skia/public'),
+    create_pattern('skia/tools'),
+    create_pattern('skia'),
+    create_pattern('sql'),
+    create_pattern('storage'),
+    create_pattern('styleguide'),
+    create_pattern('testing'),
+    create_pattern('third_party/Python-Markdown'),
+    create_pattern('third_party/SPIRV-Tools'),
+    create_pattern('third_party/abseil-cpp'),
+    create_pattern('third_party/accessibility-audit'),
+    create_pattern('third_party/accessibility_test_framework'),
+    create_pattern('third_party/adobe'),
+    create_pattern('third_party/afl'),
+    create_pattern('third_party/android_build_tools'),
+    create_pattern('third_party/android_crazy_linker'),
+    create_pattern('third_party/android_data_chart'),
+    create_pattern('third_party/android_deps'),
+    create_pattern('third_party/android_media'),
+    create_pattern('third_party/android_ndk'),
+    create_pattern('third_party/android_opengl'),
+    create_pattern('third_party/android_platform'),
+    create_pattern('third_party/android_protobuf'),
+    create_pattern('third_party/android_sdk'),
+    create_pattern('third_party/android_support_test_runner'),
+    create_pattern('third_party/android_swipe_refresh'),
+    create_pattern('third_party/android_system_sdk'),
+    create_pattern('third_party/android_tools'),
+    create_pattern('third_party/angle'),
+    create_pattern('third_party/apache-mac'),
+    create_pattern('third_party/apache-portable-runtime'),
+    create_pattern('third_party/apache-win32'),
+    create_pattern('third_party/apk-patch-size-estimator'),
+    create_pattern('third_party/apple_apsl'),
+    create_pattern('third_party/arcore-android-sdk'),
+    create_pattern('third_party/ashmem'),
+    create_pattern('third_party/auto'),
+    create_pattern('third_party/axe-core'),
+    create_pattern('third_party/bazel'),
+    create_pattern('third_party/binutils'),
+    create_pattern('third_party/bison'),
+    create_pattern('third_party/blanketjs'),
+    create_pattern('third_party/blink/common'),
+    create_pattern('third_party/blink/manual_tests'),
+    create_pattern('third_party/blink/perf_tests'),
+    create_pattern('third_party/blink/public/common'),
+    create_pattern('third_party/blink/public/default_100_percent'),
+    create_pattern('third_party/blink/public/default_200_percent'),
+    create_pattern('third_party/blink/public/platform'),
+    create_pattern('third_party/blink/public/mojom/ad_tagging'),
+    create_pattern('third_party/blink/public/mojom/app_banner'),
+    create_pattern('third_party/blink/public/mojom/appcache'),
+    create_pattern('third_party/blink/public/mojom/array_buffer'),
+    create_pattern('third_party/blink/public/mojom/associated_interfaces'),
+    create_pattern('third_party/blink/public/mojom/autoplay'),
+    create_pattern('third_party/blink/public/mojom/background_fetch'),
+    create_pattern('third_party/blink/public/mojom/background_sync'),
+    create_pattern('third_party/blink/public/mojom/badging'),
+    create_pattern('third_party/blink/public/mojom/blob'),
+    create_pattern('third_party/blink/public/mojom/bluetooth'),
+    create_pattern('third_party/blink/public/mojom/broadcastchannel'),
+    create_pattern('third_party/blink/public/mojom/cache_storage'),
+    create_pattern('third_party/blink/public/mojom/choosers'),
+    create_pattern('third_party/blink/public/mojom/clipboard'),
+    create_pattern('third_party/blink/public/mojom/commit_result'),
+    create_pattern('third_party/blink/public/mojom/contacts'),
+    create_pattern('third_party/blink/public/mojom/cookie_store'),
+    create_pattern('third_party/blink/public/mojom/crash'),
+    create_pattern('third_party/blink/public/mojom/credentialmanager'),
+    create_pattern('third_party/blink/public/mojom/csp'),
+    create_pattern('third_party/blink/public/mojom/devtools'),
+    create_pattern('third_party/blink/public/mojom/document_metadata'),
+    create_pattern('third_party/blink/public/mojom/dom_storage'),
+    create_pattern('third_party/blink/public/mojom/dwrite_font_proxy'),
+    create_pattern('third_party/blink/public/mojom/feature_policy'),
+    create_pattern('third_party/blink/public/mojom/fetch'),
+    create_pattern('third_party/blink/public/mojom/file'),
+    create_pattern('third_party/blink/public/mojom/filesystem'),
+    create_pattern('third_party/blink/public/mojom/font_unique_name_lookup'),
+    create_pattern('third_party/blink/public/mojom/frame'),
+    create_pattern('third_party/blink/public/mojom/frame_sinks'),
+    create_pattern('third_party/blink/public/mojom/geolocation'),
+    create_pattern('third_party/blink/public/mojom/hyphenation'),
+    create_pattern('third_party/blink/public/mojom/idle'),
+    create_pattern('third_party/blink/public/mojom/indexeddb'),
+    create_pattern('third_party/blink/public/mojom/input'),
+    create_pattern('third_party/blink/public/mojom/insecure_input'),
+    create_pattern('third_party/blink/public/mojom/installation'),
+    create_pattern('third_party/blink/public/mojom/installedapp'),
+    create_pattern('third_party/blink/public/mojom/keyboard_lock'),
+    create_pattern('third_party/blink/public/mojom/leak_detector'),
+    create_pattern('third_party/blink/public/mojom/loader'),
+    create_pattern('third_party/blink/public/mojom/locks'),
+    create_pattern('third_party/blink/public/mojom/manifest'),
+    create_pattern('third_party/blink/public/mojom/media_controls'),
+    create_pattern('third_party/blink/public/mojom/mediasession'),
+    create_pattern('third_party/blink/public/mojom/mediastream'),
+    create_pattern('third_party/blink/public/mojom/messaging'),
+    create_pattern('third_party/blink/public/mojom/mime'),
+    create_pattern('third_party/blink/public/mojom/native_file_system'),
+    create_pattern('third_party/blink/public/mojom/net'),
+    create_pattern('third_party/blink/public/mojom/notifications'),
+    create_pattern('third_party/blink/public/mojom/oom_intervention'),
+    create_pattern('third_party/blink/public/mojom/page'),
+    create_pattern('third_party/blink/public/mojom/payments'),
+    create_pattern('third_party/blink/public/mojom/permissions'),
+    create_pattern('third_party/blink/public/mojom/picture_in_picture'),
+    create_pattern('third_party/blink/public/mojom/plugins'),
+    create_pattern('third_party/blink/public/mojom/portal'),
+    create_pattern('third_party/blink/public/mojom/presentation'),
+    create_pattern('third_party/blink/public/mojom/push_messaging'),
+    create_pattern('third_party/blink/public/mojom/quota'),
+    create_pattern('third_party/blink/public/mojom/remote_objects'),
+    create_pattern('third_party/blink/public/mojom/reporting'),
+    create_pattern('third_party/blink/public/mojom/script'),
+    create_pattern('third_party/blink/public/mojom/selection_menu'),
+    create_pattern('third_party/blink/public/mojom/serial'),
+    create_pattern('third_party/blink/public/mojom/service_worker'),
+    create_pattern('third_party/blink/public/mojom/site_engagement'),
+    create_pattern('third_party/blink/public/mojom/sms'),
+    create_pattern('third_party/blink/public/mojom/speech'),
+    create_pattern('third_party/blink/public/mojom/ukm'),
+    create_pattern('third_party/blink/public/mojom/unhandled_tap_notifier'),
+    create_pattern('third_party/blink/public/mojom/usb'),
+    create_pattern('third_party/blink/public/mojom/use_counter'),
+    create_pattern('third_party/blink/public/mojom/user_agent'),
+    create_pattern('third_party/blink/public/mojom/wake_lock'),
+    create_pattern('third_party/blink/public/mojom/web_client_hints'),
+    create_pattern('third_party/blink/public/mojom/web_feature'),
+    create_pattern('third_party/blink/public/mojom/webaudio'),
+    create_pattern('third_party/blink/public/mojom/webauthn'),
+    create_pattern('third_party/blink/public/mojom/webdatabase'),
+    create_pattern('third_party/blink/public/mojom/webshare'),
+    create_pattern('third_party/blink/public/mojom/window_features'),
+    create_pattern('third_party/blink/public/mojom/worker'),
+    create_pattern('third_party/blink/public/web'),
+    create_pattern('third_party/blink/renderer/bindings'),
+    create_pattern('third_party/blink/renderer/build'),
+    create_pattern('third_party/blink/renderer/controller'),
+    create_pattern('third_party/blink/renderer/core/accessibility'),
+    create_pattern('third_party/blink/renderer/core/animation'),
+    create_pattern('third_party/blink/renderer/core/aom'),
+    create_pattern('third_party/blink/renderer/core/clipboard'),
+    create_pattern('third_party/blink/renderer/core/content_capture'),
+    create_pattern('third_party/blink/renderer/core/context_features'),
+    create_pattern('third_party/blink/renderer/core/css'),
+    create_pattern('third_party/blink/renderer/core/display_lock'),
+    create_pattern('third_party/blink/renderer/core/dom'),
+    create_pattern('third_party/blink/renderer/core/editing'),
+    create_pattern('third_party/blink/renderer/core/events'),
+    create_pattern('third_party/blink/renderer/core/execution_context'),
+    create_pattern('third_party/blink/renderer/core/exported'),
+    create_pattern('third_party/blink/renderer/core/feature_policy'),
+    create_pattern('third_party/blink/renderer/core/fetch'),
+    create_pattern('third_party/blink/renderer/core/fileapi'),
+    create_pattern('third_party/blink/renderer/core/frame'),
+    create_pattern('third_party/blink/renderer/core/fullscreen'),
+    create_pattern('third_party/blink/renderer/core/geometry'),
+    create_pattern('third_party/blink/renderer/core/html'),
+    create_pattern('third_party/blink/renderer/core/imagebitmap'),
+    create_pattern('third_party/blink/renderer/core/input'),
+    create_pattern('third_party/blink/renderer/core/inspector'),
+    create_pattern('third_party/blink/renderer/core/intersection_observer'),
+    create_pattern('third_party/blink/renderer/core/invisible_dom'),
+    create_pattern('third_party/blink/renderer/core/layout'),
+    create_pattern('third_party/blink/renderer/core/loader'),
+    create_pattern('third_party/blink/renderer/core/messaging'),
+    create_pattern('third_party/blink/renderer/core/mojo'),
+    create_pattern('third_party/blink/renderer/core/offscreencanvas'),
+    create_pattern('third_party/blink/renderer/core/origin_trials'),
+    create_pattern('third_party/blink/renderer/core/page'),
+    create_pattern('third_party/blink/renderer/core/paint'),
+    create_pattern('third_party/blink/renderer/core/probe'),
+    create_pattern('third_party/blink/renderer/core/resize_observer'),
+    create_pattern('third_party/blink/renderer/core/scheduler'),
+    create_pattern('third_party/blink/renderer/core/script'),
+    create_pattern('third_party/blink/renderer/core/scroll'),
+    create_pattern('third_party/blink/renderer/core/streams'),
+    create_pattern('third_party/blink/renderer/core/style'),
+    create_pattern('third_party/blink/renderer/core/svg'),
+    create_pattern('third_party/blink/renderer/core/testing'),
+    create_pattern('third_party/blink/renderer/core/timezone'),
+    create_pattern('third_party/blink/renderer/core/timing'),
+    create_pattern('third_party/blink/renderer/core/trustedtypes'),
+    create_pattern('third_party/blink/renderer/core/typed_arrays'),
+    create_pattern('third_party/blink/renderer/core/url'),
+    create_pattern('third_party/blink/renderer/core/win'),
+    create_pattern('third_party/blink/renderer/core/workers'),
+    create_pattern('third_party/blink/renderer/core/xml'),
+    create_pattern('third_party/blink/renderer/core/xmlhttprequest'),
+    create_pattern('third_party/blink/renderer/devtools'),
+    create_pattern('third_party/blink/renderer/modules'),
+    create_pattern('third_party/blink/renderer/platform'),
+    create_pattern('third_party/blink/tools'),
+    create_pattern('third_party/blink/web_tests'),
+    create_pattern('third_party/boringssl'),
+    create_pattern('third_party/bouncycastle'),
+    create_pattern('third_party/breakpad'),
+    create_pattern('third_party/brotli'),
+    create_pattern('third_party/bspatch'),
+    create_pattern('third_party/byte_buddy'),
+    create_pattern('third_party/cacheinvalidation'),
+    create_pattern('third_party/catapult'),
+    create_pattern('third_party/cct_dynamic_module'),
+    create_pattern('third_party/ced'),
+    create_pattern('third_party/chaijs'),
+    create_pattern('third_party/checkstyle'),
+    create_pattern('third_party/chromevox'),
+    create_pattern('third_party/chromite'),
+    create_pattern('third_party/cld_3'),
+    create_pattern('third_party/closure_compiler'),
+    create_pattern('third_party/colorama'),
+    create_pattern('third_party/crashpad'),
+    create_pattern('third_party/crc32c'),
+    create_pattern('third_party/cros_system_api'),
+    create_pattern('third_party/custom_tabs_client'),
+    create_pattern('third_party/d3'),
+    create_pattern('third_party/dav1d'),
+    create_pattern('third_party/dawn'),
+    create_pattern('third_party/decklink'),
+    create_pattern('third_party/depot_tools'),
+    create_pattern('third_party/devscripts'),
+    create_pattern('third_party/devtools-node-modules'),
+    create_pattern('third_party/dom_distiller_js'),
+    create_pattern('third_party/elfutils'),
+    create_pattern('third_party/emoji-segmenter'),
+    create_pattern('third_party/errorprone'),
+    create_pattern('third_party/espresso'),
+    create_pattern('third_party/expat'),
+    create_pattern('third_party/feed'),
+    create_pattern('third_party/ffmpeg'),
+    create_pattern('third_party/flac'),
+    create_pattern('third_party/flatbuffers'),
+    create_pattern('third_party/flot'),
+    create_pattern('third_party/fontconfig'),
+    create_pattern('third_party/freetype'),
+    create_pattern('third_party/fuchsia-sdk'),
+    create_pattern('third_party/gestures'),
+    create_pattern('third_party/gif_player'),
+    create_pattern('third_party/glfw'),
+    create_pattern('third_party/glslang'),
+    create_pattern('third_party/gnu_binutils'),
+    create_pattern('third_party/google-truth'),
+    create_pattern('third_party/google_android_play_core'),
+    create_pattern('third_party/google_appengine_cloudstorage'),
+    create_pattern('third_party/google_input_tools'),
+    create_pattern('third_party/google_toolbox_for_mac'),
+    create_pattern('third_party/google_trust_services'),
+    create_pattern('third_party/googletest'),
+    create_pattern('third_party/gperf'),
+    create_pattern('third_party/gradle_wrapper'),
+    create_pattern('third_party/grpc'),
+    create_pattern('third_party/gson'),
+    create_pattern('third_party/guava'),
+    create_pattern('third_party/gvr-android-keyboard'),
+    create_pattern('third_party/gvr-android-sdk'),
+    create_pattern('third_party/hamcrest'),
+    create_pattern('third_party/harfbuzz-ng'),
+    create_pattern('third_party/hunspell'),
+    create_pattern('third_party/hunspell_dictionaries'),
+    create_pattern('third_party/iaccessible2'),
+    create_pattern('third_party/iccjpeg'),
+    create_pattern('third_party/icu/android'),
+    create_pattern('third_party/icu/android_small'),
+    create_pattern('third_party/icu/cast'),
+    create_pattern('third_party/icu/chromeos'),
+    create_pattern('third_party/icu/common'),
+    create_pattern('third_party/icu/filters'),
+    create_pattern('third_party/icu/flutter'),
+    create_pattern('third_party/icu/fuzzers'),
+    create_pattern('third_party/icu/ios'),
+    create_pattern('third_party/icu/patches'),
+    create_pattern('third_party/icu/scripts'),
+    create_pattern('third_party/icu/source'),
+    create_pattern('third_party/icu/tzres'),
+    create_pattern('third_party/icu4j'),
+    create_pattern('third_party/ijar'),
+    create_pattern('third_party/ink'),
+    create_pattern('third_party/inspector_protocol'),
+    create_pattern('third_party/instrumented_libraries'),
+    create_pattern('third_party/intellij'),
+    create_pattern('third_party/isimpledom'),
+    create_pattern('third_party/jacoco'),
+    create_pattern('third_party/jinja2'),
+    create_pattern('third_party/jsoncpp'),
+    create_pattern('third_party/jsr-305'),
+    create_pattern('third_party/jstemplate'),
+    create_pattern('third_party/junit'),
+    create_pattern('third_party/khronos'),
+    create_pattern('third_party/lcov'),
+    create_pattern('third_party/leveldatabase'),
+    create_pattern('third_party/libFuzzer'),
+    create_pattern('third_party/libXNVCtrl'),
+    create_pattern('third_party/libaddressinput'),
+    create_pattern('third_party/libaom'),
+    create_pattern('third_party/libcxx-pretty-printers'),
+    create_pattern('third_party/libdrm'),
+    create_pattern('third_party/libevdev'),
+    create_pattern('third_party/libjingle_xmpp'),
+    create_pattern('third_party/libjpeg'),
+    create_pattern('third_party/libjpeg_turbo'),
+    create_pattern('third_party/liblouis'),
+    create_pattern('third_party/libovr'),
+    create_pattern('third_party/libphonenumber'),
+    create_pattern('third_party/libpng'),
+    create_pattern('third_party/libprotobuf-mutator'),
+    create_pattern('third_party/libsecret'),
+    create_pattern('third_party/libsrtp'),
+    create_pattern('third_party/libsync'),
+    create_pattern('third_party/libudev'),
+    create_pattern('third_party/libusb'),
+    create_pattern('third_party/libvpx'),
+    create_pattern('third_party/libwebm'),
+    create_pattern('third_party/libwebp'),
+    create_pattern('third_party/libxml'),
+    create_pattern('third_party/libxslt'),
+    create_pattern('third_party/libyuv'),
+    create_pattern('third_party/lighttpd'),
+    create_pattern('third_party/logilab'),
+    create_pattern('third_party/lss'),
+    create_pattern('third_party/lzma_sdk'),
+    create_pattern('third_party/mach_override'),
+    create_pattern('third_party/markdown'),
+    create_pattern('third_party/markupsafe'),
+    create_pattern('third_party/material_design_icons'),
+    create_pattern('third_party/mesa_headers'),
+    create_pattern('third_party/metrics_proto'),
+    create_pattern('third_party/microsoft_webauthn'),
+    create_pattern('third_party/mingw-w64'),
+    create_pattern('third_party/minigbm'),
+    create_pattern('third_party/minizip'),
+    create_pattern('third_party/mocha'),
+    create_pattern('third_party/mockito'),
+    create_pattern('third_party/modp_b64'),
+    create_pattern('third_party/motemplate'),
+    create_pattern('third_party/mozilla'),
+    create_pattern('third_party/nacl_sdk_binaries'),
+    create_pattern('third_party/nasm'),
+    create_pattern('third_party/netty-tcnative'),
+    create_pattern('third_party/netty4'),
+    create_pattern('third_party/node'),
+    create_pattern('third_party/nvml'),
+    create_pattern('third_party/objenesis'),
+    create_pattern('third_party/ocmock'),
+    create_pattern('third_party/openh264'),
+    create_pattern('third_party/openscreen'),
+    create_pattern('third_party/openvr'),
+    create_pattern('third_party/opus'),
+    create_pattern('third_party/ots'),
+    create_pattern('third_party/ow2_asm'),
+    create_pattern('third_party/pdfium'),
+    create_pattern('third_party/pefile'),
+    create_pattern('third_party/perfetto'),
+    create_pattern('third_party/perl'),
+    create_pattern('third_party/pexpect'),
+    create_pattern('third_party/pffft'),
+    create_pattern('third_party/ply'),
+    create_pattern('third_party/polymer'),
+    create_pattern('third_party/proguard'),
+    create_pattern('third_party/protobuf'),
+    create_pattern('third_party/protoc_javalite'),
+    create_pattern('third_party/pycoverage'),
+    create_pattern('third_party/pyelftools'),
+    create_pattern('third_party/pyjson5'),
+    create_pattern('third_party/pylint'),
+    create_pattern('third_party/pymock'),
+    create_pattern('third_party/pystache'),
+    create_pattern('third_party/pywebsocket'),
+    create_pattern('third_party/qcms'),
+    create_pattern('third_party/quic_trace'),
+    create_pattern('third_party/qunit'),
+    create_pattern('third_party/r8'),
+    create_pattern('third_party/re2'),
+    create_pattern('third_party/requests'),
+    create_pattern('third_party/rnnoise'),
+    create_pattern('third_party/robolectric'),
+    create_pattern('third_party/s2cellid'),
+    create_pattern('third_party/sfntly'),
+    create_pattern('third_party/shaderc'),
+    create_pattern('third_party/simplejson'),
+    create_pattern('third_party/sinonjs'),
+    create_pattern('third_party/skia'),
+    create_pattern('third_party/smhasher'),
+    create_pattern('third_party/snappy'),
+    create_pattern('third_party/speech-dispatcher'),
+    create_pattern('third_party/spirv-cross'),
+    create_pattern('third_party/spirv-headers'),
+    create_pattern('third_party/sqlite'),
+    create_pattern('third_party/sqlite4java'),
+    create_pattern('third_party/sudden_motion_sensor'),
+    create_pattern('third_party/swiftshader'),
+    create_pattern('third_party/tcmalloc'),
+    create_pattern('third_party/test_fonts'),
+    create_pattern('third_party/tlslite'),
+    create_pattern('third_party/ub-uiautomator'),
+    create_pattern('third_party/unrar'),
+    create_pattern('third_party/usb_ids'),
+    create_pattern('third_party/usrsctp'),
+    create_pattern('third_party/v4l-utils'),
+    create_pattern('third_party/vulkan'),
+    create_pattern('third_party/wayland'),
+    create_pattern('third_party/wayland-protocols'),
+    create_pattern('third_party/wds'),
+    create_pattern('third_party/web-animations-js'),
+    create_pattern('third_party/webdriver'),
+    create_pattern('third_party/webgl'),
+    create_pattern('third_party/webrtc'),
+    create_pattern('third_party/webrtc_overrides'),
+    create_pattern('third_party/webxr_test_pages'),
+    create_pattern('third_party/widevine'),
+    create_pattern('third_party/win_build_output'),
+    create_pattern('third_party/woff2'),
+    create_pattern('third_party/wtl'),
+    create_pattern('third_party/xdg-utils'),
+    create_pattern('third_party/xstream'),
+    create_pattern('third_party/yasm'),
+    create_pattern('third_party/zlib'),
+    create_pattern('tools'),
+    create_pattern('ui/accelerated_widget_mac'),
+    create_pattern('ui/accessibility'),
+    create_pattern('ui/android'),
+    create_pattern('ui/aura'),
+    create_pattern('ui/aura_extra'),
+    create_pattern('ui/base'),
+    create_pattern('ui/chromeos'),
+    create_pattern('ui/compositor'),
+    create_pattern('ui/compositor_extra'),
+    create_pattern('ui/content_accelerators'),
+    create_pattern('ui/display'),
+    create_pattern('ui/events'),
+    create_pattern('ui/file_manager'),
+    create_pattern('ui/gfx'),
+    create_pattern('ui/gl'),
+    create_pattern('ui/latency'),
+    create_pattern('ui/login'),
+    create_pattern('ui/message_center'),
+    create_pattern('ui/native_theme'),
+    create_pattern('ui/ozone'),
+    create_pattern('ui/platform_window'),
+    create_pattern('ui/resources'),
+    create_pattern('ui/shell_dialogs'),
+    create_pattern('ui/snapshot'),
+    create_pattern('ui/strings'),
+    create_pattern('ui/surface'),
+    create_pattern('ui/touch_selection'),
+    create_pattern('ui/views'),
+    create_pattern('ui/views_bridge_mac'),
+    create_pattern('ui/views_content_client'),
+    create_pattern('ui/web_dialogs'),
+    create_pattern('ui/webui'),
+    create_pattern('ui/wm'),
+    create_pattern('url'),
+    create_pattern('v8/benchmarks'),
+    create_pattern('v8/build_overrides'),
+    create_pattern('v8/custom_deps'),
+    create_pattern('v8/docs'),
+    create_pattern('v8/gni'),
+    create_pattern('v8/include'),
+    create_pattern('v8/infra'),
+    create_pattern('v8/samples'),
+    create_pattern('v8/src'),
+    create_pattern('v8/test'),
+    create_pattern('v8/testing'),
+    create_pattern('v8/third_party'),
+    create_pattern('v8/tools'),
+
+    # keep out/obj and other patterns at the end.
+    [
+        'out/obj', '.*/(gen|obj[^/]*)/(include|EXECUTABLES|SHARED_LIBRARIES|'
+        'STATIC_LIBRARIES|NATIVE_TESTS)/.*: warning:'
+    ],
+    ['other', '.*']  # all other unrecognized patterns
+]
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
new file mode 100644
index 0000000..b8d3fe6
--- /dev/null
+++ b/tools/warn/html_writer.py
@@ -0,0 +1,673 @@
+# Lint as: python3
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Emit warning messages to html or csv files."""
+
+# To emit html page of warning messages:
+#   flags: --byproject, --url, --separator
+# Old stuff for static html components:
+#   html_script_style:  static html scripts and styles
+#   htmlbig:
+#   dump_stats, dump_html_prologue, dump_html_epilogue:
+#   emit_buttons:
+#   dump_fixed
+#   sort_warnings:
+#   emit_stats_by_project:
+#   all_patterns,
+#   findproject, classify_warning
+#   dump_html
+#
+# New dynamic HTML page's static JavaScript data:
+#   Some data are copied from Python to JavaScript, to generate HTML elements.
+#   FlagPlatform           flags.platform
+#   FlagURL                flags.url, used by 'android'
+#   FlagSeparator          flags.separator, used by 'android'
+#   SeverityColors:        list of colors for all severity levels
+#   SeverityHeaders:       list of headers for all severity levels
+#   SeverityColumnHeaders: list of column_headers for all severity levels
+#   ProjectNames:          project_names, or project_list[*][0]
+#   WarnPatternsSeverity:     warn_patterns[*]['severity']
+#   WarnPatternsDescription:  warn_patterns[*]['description']
+#   WarningMessages:          warning_messages
+#   Warnings:                 warning_records
+#   StatsHeader:           warning count table header row
+#   StatsRows:             array of warning count table rows
+#
+# New dynamic HTML page's dynamic JavaScript data:
+#
+# New dynamic HTML related function to emit data:
+#   escape_string, strip_escape_string, emit_warning_arrays
+#   emit_js_data():
+
+from __future__ import print_function
+import cgi
+import csv
+import sys
+
+# pylint:disable=relative-beyond-top-level
+# pylint:disable=g-importing-member
+from .severity import Severity
+
+
+html_head_scripts = """\
+  <script type="text/javascript">
+  function expand(id) {
+    var e = document.getElementById(id);
+    var f = document.getElementById(id + "_mark");
+    if (e.style.display == 'block') {
+       e.style.display = 'none';
+       f.innerHTML = '&#x2295';
+    }
+    else {
+       e.style.display = 'block';
+       f.innerHTML = '&#x2296';
+    }
+  };
+  function expandCollapse(show) {
+    for (var id = 1; ; id++) {
+      var e = document.getElementById(id + "");
+      var f = document.getElementById(id + "_mark");
+      if (!e || !f) break;
+      e.style.display = (show ? 'block' : 'none');
+      f.innerHTML = (show ? '&#x2296' : '&#x2295');
+    }
+  };
+  </script>
+  <style type="text/css">
+  th,td{border-collapse:collapse; border:1px solid black;}
+  .button{color:blue;font-size:110%;font-weight:bolder;}
+  .bt{color:black;background-color:transparent;border:none;outline:none;
+      font-size:140%;font-weight:bolder;}
+  .c0{background-color:#e0e0e0;}
+  .c1{background-color:#d0d0d0;}
+  .t1{border-collapse:collapse; width:100%; border:1px solid black;}
+  </style>
+  <script src="https://www.gstatic.com/charts/loader.js"></script>
+"""
+
+
+def make_writer(output_stream):
+
+  def writer(text):
+    return output_stream.write(text + '\n')
+
+  return writer
+
+
+def html_big(param):
+  return '<font size="+2">' + param + '</font>'
+
+
+def dump_html_prologue(title, writer, warn_patterns, project_names):
+  writer('<html>\n<head>')
+  writer('<title>' + title + '</title>')
+  writer(html_head_scripts)
+  emit_stats_by_project(writer, warn_patterns, project_names)
+  writer('</head>\n<body>')
+  writer(html_big(title))
+  writer('<p>')
+
+
+def dump_html_epilogue(writer):
+  writer('</body>\n</head>\n</html>')
+
+
+def sort_warnings(warn_patterns):
+  for i in warn_patterns:
+    i['members'] = sorted(set(i['members']))
+
+
+def create_warnings(warn_patterns, project_names):
+  """Creates warnings s.t.
+
+  warnings[p][s] is as specified in above docs.
+
+  Args:
+    warn_patterns: list of warning patterns for specified platform
+    project_names: list of project names
+
+  Returns:
+    2D warnings array where warnings[p][s] is # of warnings in project name p of
+    severity level s
+  """
+  # pylint:disable=g-complex-comprehension
+  warnings = {p: {s.value: 0 for s in Severity.levels} for p in project_names}
+  for i in warn_patterns:
+    s = i['severity'].value
+    for p in i['projects']:
+      warnings[p][s] += i['projects'][p]
+  return warnings
+
+
+def get_total_by_project(warnings, project_names):
+  """Returns dict, project as key and # warnings for that project as value."""
+  # pylint:disable=g-complex-comprehension
+  return {
+      p: sum(warnings[p][s.value] for s in Severity.levels)
+      for p in project_names
+  }
+
+
+def get_total_by_severity(warnings, project_names):
+  """Returns dict, severity as key and # warnings of that severity as value."""
+  # pylint:disable=g-complex-comprehension
+  return {
+      s.value: sum(warnings[p][s.value] for p in project_names)
+      for s in Severity.levels
+  }
+
+
+def emit_table_header(total_by_severity):
+  """Returns list of HTML-formatted content for severity stats."""
+
+  stats_header = ['Project']
+  for s in Severity.levels:
+    if total_by_severity[s.value]:
+      stats_header.append(
+          '<span style=\'background-color:{}\'>{}</span>'.format(
+              s.color, s.column_header))
+  stats_header.append('TOTAL')
+  return stats_header
+
+
+def emit_row_counts_per_project(warnings, total_by_project, total_by_severity,
+                                project_names):
+  """Returns total project warnings and row of stats for each project.
+
+  Args:
+    warnings: output of create_warnings(warn_patterns, project_names)
+    total_by_project: output of get_total_by_project(project_names)
+    total_by_severity: output of get_total_by_severity(project_names)
+    project_names: list of project names
+
+  Returns:
+    total_all_projects, the total number of warnings over all projects
+    stats_rows, a 2d list where each row is [Project Name, <severity counts>,
+    total # warnings for this project]
+  """
+
+  total_all_projects = 0
+  stats_rows = []
+  for p in project_names:
+    if total_by_project[p]:
+      one_row = [p]
+      for s in Severity.levels:
+        if total_by_severity[s.value]:
+          one_row.append(warnings[p][s.value])
+      one_row.append(total_by_project[p])
+      stats_rows.append(one_row)
+      total_all_projects += total_by_project[p]
+  return total_all_projects, stats_rows
+
+
+def emit_row_counts_per_severity(total_by_severity, stats_header, stats_rows,
+                                 total_all_projects, writer):
+  """Emits stats_header and stats_rows as specified above.
+
+  Args:
+    total_by_severity: output of get_total_by_severity()
+    stats_header: output of emit_table_header()
+    stats_rows: output of emit_row_counts_per_project()
+    total_all_projects: output of emit_row_counts_per_project()
+    writer: writer returned by make_writer(output_stream)
+  """
+
+  total_all_severities = 0
+  one_row = ['<b>TOTAL</b>']
+  for s in Severity.levels:
+    if total_by_severity[s.value]:
+      one_row.append(total_by_severity[s.value])
+      total_all_severities += total_by_severity[s.value]
+  one_row.append(total_all_projects)
+  stats_rows.append(one_row)
+  writer('<script>')
+  emit_const_string_array('StatsHeader', stats_header, writer)
+  emit_const_object_array('StatsRows', stats_rows, writer)
+  writer(draw_table_javascript)
+  writer('</script>')
+
+
+def emit_stats_by_project(writer, warn_patterns, project_names):
+  """Dump a google chart table of warnings per project and severity."""
+
+  warnings = create_warnings(warn_patterns, project_names)
+  total_by_project = get_total_by_project(warnings, project_names)
+  total_by_severity = get_total_by_severity(warnings, project_names)
+  stats_header = emit_table_header(total_by_severity)
+  total_all_projects, stats_rows = \
+    emit_row_counts_per_project(warnings, total_by_project, total_by_severity, project_names)
+  emit_row_counts_per_severity(total_by_severity, stats_header, stats_rows,
+                               total_all_projects, writer)
+
+
+def dump_stats(writer, warn_patterns):
+  """Dump some stats about total number of warnings and such."""
+
+  known = 0
+  skipped = 0
+  unknown = 0
+  sort_warnings(warn_patterns)
+  for i in warn_patterns:
+    if i['severity'] == Severity.UNMATCHED:
+      unknown += len(i['members'])
+    elif i['severity'] == Severity.SKIP:
+      skipped += len(i['members'])
+    else:
+      known += len(i['members'])
+  writer('Number of classified warnings: <b>' + str(known) + '</b><br>')
+  writer('Number of skipped warnings: <b>' + str(skipped) + '</b><br>')
+  writer('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
+  total = unknown + known + skipped
+  extra_msg = ''
+  if total < 1000:
+    extra_msg = ' (low count may indicate incremental build)'
+  writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+
+
+# New base table of warnings, [severity, warn_id, project, warning_message]
+# Need buttons to show warnings in different grouping options.
+# (1) Current, group by severity, id for each warning pattern
+#     sort by severity, warn_id, warning_message
+# (2) Current --byproject, group by severity,
+#     id for each warning pattern + project name
+#     sort by severity, warn_id, project, warning_message
+# (3) New, group by project + severity,
+#     id for each warning pattern
+#     sort by project, severity, warn_id, warning_message
+def emit_buttons(writer):
+  writer('<button class="button" onclick="expandCollapse(1);">'
+         'Expand all warnings</button>\n'
+         '<button class="button" onclick="expandCollapse(0);">'
+         'Collapse all warnings</button>\n'
+         '<button class="button" onclick="groupBySeverity();">'
+         'Group warnings by severity</button>\n'
+         '<button class="button" onclick="groupByProject();">'
+         'Group warnings by project</button><br>')
+
+
+def all_patterns(category):
+  patterns = ''
+  for i in category['patterns']:
+    patterns += i
+    patterns += ' / '
+  return patterns
+
+
+def dump_fixed(writer, warn_patterns):
+  """Show which warnings no longer occur."""
+  anchor = 'fixed_warnings'
+  mark = anchor + '_mark'
+  writer('\n<br><p style="background-color:lightblue"><b>'
+         '<button id="' + mark + '" '
+         'class="bt" onclick="expand(\'' + anchor + '\');">'
+         '&#x2295</button> Fixed warnings. '
+         'No more occurrences. Please consider turning these into '
+         'errors if possible, before they are reintroduced in to the build'
+         ':</b></p>')
+  writer('<blockquote>')
+  fixed_patterns = []
+  for i in warn_patterns:
+    if not i['members']:
+      fixed_patterns.append(i['description'] + ' (' + all_patterns(i) + ')')
+  fixed_patterns = sorted(fixed_patterns)
+  writer('<div id="' + anchor + '" style="display:none;"><table>')
+  cur_row_class = 0
+  for text in fixed_patterns:
+    cur_row_class = 1 - cur_row_class
+    # remove last '\n'
+    t = text[:-1] if text[-1] == '\n' else text
+    writer('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
+  writer('</table></div>')
+  writer('</blockquote>')
+
+
+def write_severity(csvwriter, sev, kind, warn_patterns):
+  """Count warnings of given severity and write CSV entries to writer."""
+  total = 0
+  for pattern in warn_patterns:
+    if pattern['severity'] == sev and pattern['members']:
+      n = len(pattern['members'])
+      total += n
+      warning = kind + ': ' + (pattern['description'] or '?')
+      csvwriter.writerow([n, '', warning])
+      # print number of warnings for each project, ordered by project name
+      projects = sorted(pattern['projects'].keys())
+      for project in projects:
+        csvwriter.writerow([pattern['projects'][project], project, warning])
+  csvwriter.writerow([total, '', kind + ' warnings'])
+  return total
+
+
+def dump_csv(csvwriter, warn_patterns):
+  """Dump number of warnings in CSV format to writer."""
+  sort_warnings(warn_patterns)
+  total = 0
+  for s in Severity.levels:
+    total += write_severity(csvwriter, s, s.column_header, warn_patterns)
+  csvwriter.writerow([total, '', 'All warnings'])
+
+
+# Return s with escaped backslash and quotation characters.
+def escape_string(s):
+  return s.replace('\\', '\\\\').replace('"', '\\"')
+
+
+# Return s without trailing '\n' and escape the quotation characters.
+def strip_escape_string(s):
+  if not s:
+    return s
+  s = s[:-1] if s[-1] == '\n' else s
+  return escape_string(s)
+
+
+def emit_warning_array(name, writer, warn_patterns):
+  writer('var warning_{} = ['.format(name))
+  for w in warn_patterns:
+    if name == 'severity':
+      writer('{},'.format(w[name].value))
+    else:
+      writer('{},'.format(w[name]))
+  writer('];')
+
+
+def emit_warning_arrays(writer, warn_patterns):
+  emit_warning_array('severity', writer, warn_patterns)
+  writer('var warning_description = [')
+  for w in warn_patterns:
+    if w['members']:
+      writer('"{}",'.format(escape_string(w['description'])))
+    else:
+      writer('"",')  # no such warning
+  writer('];')
+
+
+scripts_for_warning_groups = """
+  function compareMessages(x1, x2) { // of the same warning type
+    return (WarningMessages[x1[2]] <= WarningMessages[x2[2]]) ? -1 : 1;
+  }
+  function byMessageCount(x1, x2) {
+    return x2[2] - x1[2];  // reversed order
+  }
+  function bySeverityMessageCount(x1, x2) {
+    // orer by severity first
+    if (x1[1] != x2[1])
+      return  x1[1] - x2[1];
+    return byMessageCount(x1, x2);
+  }
+  const ParseLinePattern = /^([^ :]+):(\\d+):(.+)/;
+  function addURL(line) { // used by Android
+    if (FlagURL == "") return line;
+    if (FlagSeparator == "") {
+      return line.replace(ParseLinePattern,
+        "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
+    }
+    return line.replace(ParseLinePattern,
+      "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
+        "$2'>$1:$2</a>:$3");
+  }
+  function addURLToLine(line, link) { // used by Chrome
+      let line_split = line.split(":");
+      let path = line_split.slice(0,3).join(":");
+      let msg = line_split.slice(3).join(":");
+      let html_link = `<a target="_blank" href="${link}">${path}</a>${msg}`;
+      return html_link;
+  }
+  function createArrayOfDictionaries(n) {
+    var result = [];
+    for (var i=0; i<n; i++) result.push({});
+    return result;
+  }
+  function groupWarningsBySeverity() {
+    // groups is an array of dictionaries,
+    // each dictionary maps from warning type to array of warning messages.
+    var groups = createArrayOfDictionaries(SeverityColors.length);
+    for (var i=0; i<Warnings.length; i++) {
+      var w = Warnings[i][0];
+      var s = WarnPatternsSeverity[w];
+      var k = w.toString();
+      if (!(k in groups[s]))
+        groups[s][k] = [];
+      groups[s][k].push(Warnings[i]);
+    }
+    return groups;
+  }
+  function groupWarningsByProject() {
+    var groups = createArrayOfDictionaries(ProjectNames.length);
+    for (var i=0; i<Warnings.length; i++) {
+      var w = Warnings[i][0];
+      var p = Warnings[i][1];
+      var k = w.toString();
+      if (!(k in groups[p]))
+        groups[p][k] = [];
+      groups[p][k].push(Warnings[i]);
+    }
+    return groups;
+  }
+  var GlobalAnchor = 0;
+  function createWarningSection(header, color, group) {
+    var result = "";
+    var groupKeys = [];
+    var totalMessages = 0;
+    for (var k in group) {
+       totalMessages += group[k].length;
+       groupKeys.push([k, WarnPatternsSeverity[parseInt(k)], group[k].length]);
+    }
+    groupKeys.sort(bySeverityMessageCount);
+    for (var idx=0; idx<groupKeys.length; idx++) {
+      var k = groupKeys[idx][0];
+      var messages = group[k];
+      var w = parseInt(k);
+      var wcolor = SeverityColors[WarnPatternsSeverity[w]];
+      var description = WarnPatternsDescription[w];
+      if (description.length == 0)
+          description = "???";
+      GlobalAnchor += 1;
+      result += "<table class='t1'><tr bgcolor='" + wcolor + "'><td>" +
+                "<button class='bt' id='" + GlobalAnchor + "_mark" +
+                "' onclick='expand(\\"" + GlobalAnchor + "\\");'>" +
+                "&#x2295</button> " +
+                description + " (" + messages.length + ")</td></tr></table>";
+      result += "<div id='" + GlobalAnchor +
+                "' style='display:none;'><table class='t1'>";
+      var c = 0;
+      messages.sort(compareMessages);
+      if (FlagPlatform == "chrome") {
+        for (var i=0; i<messages.length; i++) {
+          result += "<tr><td class='c" + c + "'>" +
+                    addURLToLine(WarningMessages[messages[i][2]], WarningLinks[messages[i][3]]) + "</td></tr>";
+          c = 1 - c;
+        }
+      } else {
+        for (var i=0; i<messages.length; i++) {
+          result += "<tr><td class='c" + c + "'>" +
+                    addURL(WarningMessages[messages[i][2]]) + "</td></tr>";
+          c = 1 - c;
+        }
+      }
+      result += "</table></div>";
+    }
+    if (result.length > 0) {
+      return "<br><span style='background-color:" + color + "'><b>" +
+             header + ": " + totalMessages +
+             "</b></span><blockquote><table class='t1'>" +
+             result + "</table></blockquote>";
+
+    }
+    return "";  // empty section
+  }
+  function generateSectionsBySeverity() {
+    var result = "";
+    var groups = groupWarningsBySeverity();
+    for (s=0; s<SeverityColors.length; s++) {
+      result += createWarningSection(SeverityHeaders[s], SeverityColors[s],
+                                     groups[s]);
+    }
+    return result;
+  }
+  function generateSectionsByProject() {
+    var result = "";
+    var groups = groupWarningsByProject();
+    for (i=0; i<groups.length; i++) {
+      result += createWarningSection(ProjectNames[i], 'lightgrey', groups[i]);
+    }
+    return result;
+  }
+  function groupWarnings(generator) {
+    GlobalAnchor = 0;
+    var e = document.getElementById("warning_groups");
+    e.innerHTML = generator();
+  }
+  function groupBySeverity() {
+    groupWarnings(generateSectionsBySeverity);
+  }
+  function groupByProject() {
+    groupWarnings(generateSectionsByProject);
+  }
+"""
+
+
+# Emit a JavaScript const string
+def emit_const_string(name, value, writer):
+  writer('const ' + name + ' = "' + escape_string(value) + '";')
+
+
+# Emit a JavaScript const integer array.
+def emit_const_int_array(name, array, writer):
+  writer('const ' + name + ' = [')
+  for n in array:
+    writer(str(n) + ',')
+  writer('];')
+
+
+# Emit a JavaScript const string array.
+def emit_const_string_array(name, array, writer):
+  writer('const ' + name + ' = [')
+  for s in array:
+    writer('"' + strip_escape_string(s) + '",')
+  writer('];')
+
+
+# Emit a JavaScript const string array for HTML.
+def emit_const_html_string_array(name, array, writer):
+  writer('const ' + name + ' = [')
+  for s in array:
+    # Not using html.escape yet, to work for both python 2 and 3,
+    # until all users switch to python 3.
+    # pylint:disable=deprecated-method
+    writer('"' + cgi.escape(strip_escape_string(s)) + '",')
+  writer('];')
+
+
+# Emit a JavaScript const object array.
+def emit_const_object_array(name, array, writer):
+  writer('const ' + name + ' = [')
+  for x in array:
+    writer(str(x) + ',')
+  writer('];')
+
+
+def emit_js_data(writer, flags, warning_messages, warning_links,
+                 warning_records, warn_patterns, project_names):
+  """Dump dynamic HTML page's static JavaScript data."""
+  emit_const_string('FlagPlatform', flags.platform, writer)
+  emit_const_string('FlagURL', flags.url, writer)
+  emit_const_string('FlagSeparator', flags.separator, writer)
+  emit_const_string_array('SeverityColors', [s.color for s in Severity.levels],
+                          writer)
+  emit_const_string_array('SeverityHeaders',
+                          [s.header for s in Severity.levels], writer)
+  emit_const_string_array('SeverityColumnHeaders',
+                          [s.column_header for s in Severity.levels], writer)
+  emit_const_string_array('ProjectNames', project_names, writer)
+  # pytype: disable=attribute-error
+  emit_const_int_array('WarnPatternsSeverity',
+                       [w['severity'].value for w in warn_patterns], writer)
+  # pytype: enable=attribute-error
+  emit_const_html_string_array('WarnPatternsDescription',
+                               [w['description'] for w in warn_patterns],
+                               writer)
+  emit_const_html_string_array('WarningMessages', warning_messages, writer)
+  emit_const_object_array('Warnings', warning_records, writer)
+  if flags.platform == 'chrome':
+    emit_const_html_string_array('WarningLinks', warning_links, writer)
+
+
+draw_table_javascript = """
+google.charts.load('current', {'packages':['table']});
+google.charts.setOnLoadCallback(drawTable);
+function drawTable() {
+  var data = new google.visualization.DataTable();
+  data.addColumn('string', StatsHeader[0]);
+  for (var i=1; i<StatsHeader.length; i++) {
+    data.addColumn('number', StatsHeader[i]);
+  }
+  data.addRows(StatsRows);
+  for (var i=0; i<StatsRows.length; i++) {
+    for (var j=0; j<StatsHeader.length; j++) {
+      data.setProperty(i, j, 'style', 'border:1px solid black;');
+    }
+  }
+  var table = new google.visualization.Table(
+      document.getElementById('stats_table'));
+  table.draw(data, {allowHtml: true, alternatingRowStyle: true});
+}
+"""
+
+
+def dump_html(flags, output_stream, warning_messages, warning_links,
+              warning_records, header_str, warn_patterns, project_names):
+  """Dump the flags output to output_stream."""
+  writer = make_writer(output_stream)
+  dump_html_prologue('Warnings for ' + header_str, writer, warn_patterns,
+                     project_names)
+  dump_stats(writer, warn_patterns)
+  writer('<br><div id="stats_table"></div><br>')
+  writer('\n<script>')
+  emit_js_data(writer, flags, warning_messages, warning_links, warning_records,
+               warn_patterns, project_names)
+  writer(scripts_for_warning_groups)
+  writer('</script>')
+  emit_buttons(writer)
+  # Warning messages are grouped by severities or project names.
+  writer('<br><div id="warning_groups"></div>')
+  if flags.byproject:
+    writer('<script>groupByProject();</script>')
+  else:
+    writer('<script>groupBySeverity();</script>')
+  dump_fixed(writer, warn_patterns)
+  dump_html_epilogue(writer)
+
+
+def write_html(flags, project_names, warn_patterns, html_path, warning_messages,
+               warning_links, warning_records, header_str):
+  """Write warnings html file."""
+  if html_path:
+    with open(html_path, 'w') as f:
+      dump_html(flags, f, warning_messages, warning_links, warning_records,
+                header_str, warn_patterns, project_names)
+
+
+def write_out_csv(flags, warn_patterns, warning_messages, warning_links,
+                  warning_records, header_str, project_names):
+  """Write warnings csv file."""
+  if flags.csvpath:
+    with open(flags.csvpath, 'w') as f:
+      dump_csv(csv.writer(f, lineterminator='\n'), warn_patterns)
+
+  if flags.gencsv:
+    dump_csv(csv.writer(sys.stdout, lineterminator='\n'), warn_patterns)
+  else:
+    dump_html(flags, sys.stdout, warning_messages, warning_links,
+              warning_records, header_str, warn_patterns, project_names)
diff --git a/tools/warn/java_warn_patterns.py b/tools/warn/java_warn_patterns.py
index 80e2e1d..17e3864 100644
--- a/tools/warn/java_warn_patterns.py
+++ b/tools/warn/java_warn_patterns.py
@@ -16,8 +16,8 @@
 """Warning patterns for Java compiler tools."""
 
 # pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
 # pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
 
@@ -485,16 +485,24 @@
     java_medium('Static method should be qualified',
                 [r'.*\.java:.*: warning: \[static\] static method should be qualified']),
     medium('AbstractInner'),
+    medium('BothPackageInfoAndHtml'),
     medium('CallbackName'),
     medium('ExecutorRegistration'),
+    medium('HiddenTypeParameter'),
     medium('JavaApiUsedByMainlineModule'),
     medium('ListenerLast'),
+    medium('MinMaxConstant'),
     medium('MissingBuildMethod'),
     medium('NoByteOrShort'),
     medium('OverlappingConstants'),
     medium('SetterReturnsThis'),
+    medium('StreamFiles'),
     medium('Typo'),
     medium('UseIcu'),
+    medium('fallthrough'),
+    medium('overrides'),
+    medium('serial'),
+    medium('try'),
     high('AndroidInjectionBeforeSuper',
          'AndroidInjection.inject() should always be invoked before calling super.lifecycleMethod()'),
     high('AndroidJdkLibsChecker',
@@ -783,6 +791,8 @@
     # Other javac tool warnings
     java_medium('addNdkApiCoverage failed to getPackage',
                 [r".*: warning: addNdkApiCoverage failed to getPackage"]),
+    java_medium('bad path element',
+                [r".*: warning: \[path\] bad path element .*\.jar"]),
     java_medium('Supported version from annotation processor',
                 [r".*: warning: Supported source version .+ from annotation processor"]),
 ]
diff --git a/tools/warn/make_warn_patterns.py b/tools/warn/make_warn_patterns.py
index dd6a1b0..4b20493 100644
--- a/tools/warn/make_warn_patterns.py
+++ b/tools/warn/make_warn_patterns.py
@@ -16,8 +16,8 @@
 """Warning patterns for build make tools."""
 
 # pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
 # pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
 warn_patterns = [
diff --git a/tools/warn/other_warn_patterns.py b/tools/warn/other_warn_patterns.py
index 1350936..318c3d4 100644
--- a/tools/warn/other_warn_patterns.py
+++ b/tools/warn/other_warn_patterns.py
@@ -16,8 +16,8 @@
 """Warning patterns from other tools."""
 
 # pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
 # pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
 
@@ -42,14 +42,20 @@
   return warn('asm', Severity.MEDIUM, description, pattern_list)
 
 
-def kotlin(description, pattern_list):
-  return warn('Kotlin', Severity.MEDIUM, description, pattern_list)
+def kotlin(description, pattern):
+  return warn('Kotlin', Severity.MEDIUM, description,
+              [r'.*\.kt:.*: warning: ' + pattern])
 
 
 def yacc(description, pattern_list):
   return warn('yacc', Severity.MEDIUM, description, pattern_list)
 
 
+def rust(severity, description, pattern):
+  return warn('Rust', severity, description,
+              [r'.*\.rs:.*: warning: ' + pattern])
+
+
 warn_patterns = [
     # pylint:disable=line-too-long,g-inconsistent-quotes
     # aapt warnings
@@ -109,26 +115,31 @@
      'description': 'Proto: Import not used',
      'patterns': [r".*: warning: Import .*/.*\.proto but not used.$"]},
     # Kotlin warnings
-    kotlin('never used parameter or variable',
-           [r".*\.kt:.*: warning: (parameter|variable) '.*' is never used$",
-            r".*\.kt:.*: warning: (parameter|variable) '.*' is never used, could be renamed to _$"]),
-    kotlin('initializer is redundant',
-           [r".*\.kt:.*: warning: .* initializer is redundant$"]),
+    kotlin('never used parameter or variable', '.+ \'.*\' is never used'),
+    kotlin('multiple labels', '.+ more than one label .+ in this scope'),
+    kotlin('type mismatch', 'type mismatch: '),
+    kotlin('is always true', '.+ is always \'true\''),
+    kotlin('no effect', '.+ annotation has no effect for '),
+    kotlin('no cast needed', 'no cast needed'),
+    kotlin('accessor not generated', 'an accessor will not be generated '),
+    kotlin('initializer is redundant', '.* initializer is redundant$'),
     kotlin('elvis operator always returns ...',
-           [r".*\.kt:.*: warning: elvis operator \(\?:\) always returns .+"]),
-    kotlin('shadowed name',
-           [r".*\.kt:.*: warning: name shadowed: .+"]),
-    kotlin('unchecked cast',
-           [r".*\.kt:.*: warning: unchecked cast: .* to .*$"]),
+           'elvis operator (?:) always returns .+'),
+    kotlin('shadowed name', 'name shadowed: .+'),
+    kotlin('unchecked cast', 'unchecked cast: .* to .*$'),
+    kotlin('unreachable code', 'unreachable code'),
+    kotlin('unnecessary assertion', 'unnecessary .+ assertion .+'),
     kotlin('unnecessary safe call on a non-null receiver',
-           [r".*\.kt:.*: warning: unnecessary safe call on a non-null receiver"]),
+           'unnecessary safe call on a non-null receiver'),
     kotlin('Deprecated in Java',
-           [r".*\.kt:.*: warning: '.*' is deprecated. Deprecated in Java"]),
+           '\'.*\' is deprecated. Deprecated in Java'),
     kotlin('Replacing Handler for Executor',
-           [r".*\.kt:.*: warning: .+ Replacing Handler for Executor in "]),
+           '.+ Replacing Handler for Executor in '),
     kotlin('library has Kotlin runtime',
-           [r".*: warning: library has Kotlin runtime bundled into it",
-            r".*: warning: some JAR files .* have the Kotlin Runtime library"]),
+           '.+ has Kotlin runtime (bundled|library)'),
+    warn('Kotlin', Severity.MEDIUM, 'bundled Kotlin runtime',
+         ['.*warning: .+ (has|have the) Kotlin (runtime|Runtime library) bundled']),
+    kotlin('other warnings', '.+'),  # catch all other Kotlin warnings
     # Yacc warnings
     yacc('deprecate directive',
          [r".*\.yy?:.*: warning: deprecated directive: "]),
@@ -138,15 +149,20 @@
      'description': 'yacc: fix-its can be applied',
      'patterns': [r".*\.yy?: warning: fix-its can be applied."]},
     # Rust warnings
-    {'category': 'Rust', 'severity': Severity.HIGH,
-     'description': 'Rust: Does not derive Copy',
-     'patterns': [r".*: warning: .+ does not derive Copy"]},
-    {'category': 'Rust', 'severity': Severity.MEDIUM,
-     'description': 'Rust: Deprecated range pattern',
-     'patterns': [r".*: warning: .+ range patterns are deprecated"]},
-    {'category': 'Rust', 'severity': Severity.MEDIUM,
-     'description': 'Rust: Deprecated missing explicit \'dyn\'',
-     'patterns': [r".*: warning: .+ without an explicit `dyn` are deprecated"]},
+    rust(Severity.HIGH, 'Does not derive Copy', '.+ does not derive Copy'),
+    rust(Severity.MEDIUM, '... are deprecated',
+         ('(.+ are deprecated$|' +
+          'use of deprecated item .* (use .* instead|is now preferred))')),
+    rust(Severity.MEDIUM, 'never used', '.* is never used:'),
+    rust(Severity.MEDIUM, 'unused import', 'unused import: '),
+    rust(Severity.MEDIUM, 'unnecessary attribute',
+         '.+ no longer requires an attribute'),
+    rust(Severity.MEDIUM, 'unnecessary parentheses',
+         'unnecessary parentheses around'),
+    # Catch all RenderScript warnings
+    {'category': 'RenderScript', 'severity': Severity.LOW,
+     'description': 'RenderScript warnings',
+     'patterns': [r'.*\.rscript:.*: warning: ']},
     # Broken/partial warning messages will be skipped.
     {'category': 'Misc', 'severity': Severity.SKIP,
      'description': 'skip, ,',
diff --git a/tools/warn/severity.py b/tools/warn/severity.py
index b1c38e4..b4c03c9 100644
--- a/tools/warn/severity.py
+++ b/tools/warn/severity.py
@@ -20,24 +20,26 @@
 
 
 # pylint:disable=old-style-class
+class SeverityInfo:
+
+  def __init__(self, value, color, column_header, header):
+    self.value = value
+    self.color = color
+    self.column_header = column_header
+    self.header = header
+
+
+# pylint:disable=old-style-class
 class Severity:
   """Class of Severity levels where each level is a SeverityInfo."""
 
-  class SeverityInfo:
-
-    def __init__(self, value, color, column_header, header):
-      self.value = value
-      self.color = color
-      self.column_header = column_header
-      self.header = header
-
   # SEVERITY_UNKNOWN should never occur since every warn_pattern listed has
   # a specified severity. It exists for protobuf, the other values must
   # map to non-zero values (since 0 is reserved for a default UNKNOWN), but
   # logic in clang_tidy_warn.py assumes severity level values are consecutive
   # ints starting with 0.
-  SEVERITY_UNKNOWN = SeverityInfo(0, 'blueviolet', 'Errors of unknown severity',
-                                  'Unknown severity (should not occur)')
+  SEVERITY_UNKNOWN = SeverityInfo(0, 'blueviolet', 'Unknown',
+                                  'Unknown-severity warnings)')
   FIXMENOW = SeverityInfo(1, 'fuschia', 'FixNow',
                           'Critical warnings, fix me now')
   HIGH = SeverityInfo(2, 'red', 'High', 'High severity warnings')
diff --git a/tools/warn/tidy_warn_patterns.py b/tools/warn/tidy_warn_patterns.py
index 2c5ab79..5416cb2 100644
--- a/tools/warn/tidy_warn_patterns.py
+++ b/tools/warn/tidy_warn_patterns.py
@@ -16,8 +16,8 @@
 """Warning patterns for clang-tidy."""
 
 # pylint:disable=relative-beyond-top-level
-from .cpp_warn_patterns import compile_patterns
 # pylint:disable=g-importing-member
+from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
 
@@ -78,6 +78,7 @@
     group_tidy_warn_pattern('android'),
     simple_tidy_warn_pattern('abseil-string-find-startswith'),
     simple_tidy_warn_pattern('bugprone-argument-comment'),
+    simple_tidy_warn_pattern('bugprone-branch-clone'),
     simple_tidy_warn_pattern('bugprone-copy-constructor-init'),
     simple_tidy_warn_pattern('bugprone-fold-init-type'),
     simple_tidy_warn_pattern('bugprone-forward-declaration-namespace'),
@@ -89,6 +90,9 @@
     simple_tidy_warn_pattern('bugprone-macro-parentheses'),
     simple_tidy_warn_pattern('bugprone-misplaced-widening-cast'),
     simple_tidy_warn_pattern('bugprone-move-forwarding-reference'),
+    simple_tidy_warn_pattern('bugprone-parent-virtual-call'),
+    simple_tidy_warn_pattern('bugprone-posix-return'),
+    simple_tidy_warn_pattern('bugprone-sizeof-container'),
     simple_tidy_warn_pattern('bugprone-sizeof-expression'),
     simple_tidy_warn_pattern('bugprone-string-constructor'),
     simple_tidy_warn_pattern('bugprone-string-integer-assignment'),
@@ -96,10 +100,25 @@
     simple_tidy_warn_pattern('bugprone-suspicious-missing-comma'),
     simple_tidy_warn_pattern('bugprone-suspicious-string-compare'),
     simple_tidy_warn_pattern('bugprone-suspicious-semicolon'),
+    simple_tidy_warn_pattern('bugprone-terminating-continue'),
+    simple_tidy_warn_pattern('bugprone-too-small-loop-variable'),
     simple_tidy_warn_pattern('bugprone-undefined-memory-manipulation'),
+    simple_tidy_warn_pattern('bugprone-unhandled-self-assignment'),
     simple_tidy_warn_pattern('bugprone-unused-raii'),
+    simple_tidy_warn_pattern('bugprone-unused-return-value'),
     simple_tidy_warn_pattern('bugprone-use-after-move'),
     group_tidy_warn_pattern('bugprone'),
+    simple_tidy_warn_pattern('cert-dcl16-c'),
+    simple_tidy_warn_pattern('cert-dcl21-cpp'),
+    simple_tidy_warn_pattern('cert-dcl50-cpp'),
+    simple_tidy_warn_pattern('cert-dcl54-cpp'),
+    simple_tidy_warn_pattern('cert-dcl59-cpp'),
+    simple_tidy_warn_pattern('cert-env33-c'),
+    simple_tidy_warn_pattern('cert-err34-c'),
+    simple_tidy_warn_pattern('cert-err52-cpp'),
+    simple_tidy_warn_pattern('cert-msc30-c'),
+    simple_tidy_warn_pattern('cert-msc50-cpp'),
+    simple_tidy_warn_pattern('cert-oop54-cpp'),
     group_tidy_warn_pattern('cert'),
     group_tidy_warn_pattern('clang-diagnostic'),
     group_tidy_warn_pattern('cppcoreguidelines'),
diff --git a/tools/warn/warn.py b/tools/warn/warn.py
index bdfd489..56e8787 100755
--- a/tools/warn/warn.py
+++ b/tools/warn/warn.py
@@ -17,21 +17,51 @@
 """Simple wrapper to run warn_common with Python standard Pool."""
 
 import multiprocessing
+import signal
+import sys
 
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
-from .warn_common import common_main
+from . import warn_common as common
 
 
-# This parallel_process could be changed depending on platform
-# and availability of multi-process library functions.
-def parallel_process(num_cpu, classify_warnings, groups):
+def classify_warnings(args):
+  """Classify a list of warning lines.
+
+  Args:
+    args: dictionary {
+        'group': list of (warning, link),
+        'project_patterns': re.compile(project_list[p][1]),
+        'warn_patterns': list of warn_pattern,
+        'num_processes': number of processes being used for multiprocessing }
+  Returns:
+    results: a list of the classified warnings.
+  """
+  results = []
+  for line, link in args['group']:
+    common.classify_one_warning(line, link, results, args['project_patterns'],
+                                args['warn_patterns'])
+
+  # After the main work, ignore all other signals to a child process,
+  # to avoid bad warning/error messages from the exit clean-up process.
+  if args['num_processes'] > 1:
+    signal.signal(signal.SIGTERM, lambda *args: sys.exit(-signal.SIGTERM))
+  return results
+
+
+def create_and_launch_subprocesses(num_cpu, classify_warnings_fn, arg_groups,
+                                   group_results):
   pool = multiprocessing.Pool(num_cpu)
-  return pool.map(classify_warnings, groups)
+  for cpu in range(num_cpu):
+    proc_result = pool.map(classify_warnings_fn, arg_groups[cpu])
+    if proc_result is not None:
+      group_results.append(proc_result)
+  return group_results
 
 
 def main():
-  common_main(parallel_process)
+  use_google3 = False
+  common.common_main(use_google3, create_and_launch_subprocesses,
+                     classify_warnings)
 
 
 if __name__ == '__main__':
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 0c9d9ef..68ed995 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -18,6 +18,8 @@
 Default is to output warnings in HTML tables grouped by warning severity.
 Use option --byproject to output tables grouped by source file projects.
 Use option --gencsv to output warning counts in CSV format.
+
+Default input file is build.log, which can be changed with the --log flag.
 """
 
 # List of important data structures and functions in this script.
@@ -36,431 +38,203 @@
 #   project_patterns[p]              re.compile(project_list[p][1])
 #   project_names[p]                 project_list[p][0]
 #   warning_messages     array of each warning message, without source url
+#   warning_links        array of each warning code search link; for 'chrome'
 #   warning_records      array of [idx to warn_patterns,
 #                                  idx to project_names,
-#                                  idx to warning_messages]
-#   android_root
-#   platform_version
-#   target_product
-#   target_variant
+#                                  idx to warning_messages,
+#                                  idx to warning_links]
 #   parse_input_file
 #
-# To emit html page of warning messages:
-#   flags: --byproject, --url, --separator
-# Old stuff for static html components:
-#   html_script_style:  static html scripts and styles
-#   htmlbig:
-#   dump_stats, dump_html_prologue, dump_html_epilogue:
-#   emit_buttons:
-#   dump_fixed
-#   sort_warnings:
-#   emit_stats_by_project:
-#   all_patterns,
-#   findproject, classify_warning
-#   dump_html
-#
-# New dynamic HTML page's static JavaScript data:
-#   Some data are copied from Python to JavaScript, to generate HTML elements.
-#   FlagURL                args.url
-#   FlagSeparator          args.separator
-#   SeverityColors:        list of colors for all severity levels
-#   SeverityHeaders:       list of headers for all severity levels
-#   SeverityColumnHeaders: list of column_headers for all severity levels
-#   ProjectNames:          project_names, or project_list[*][0]
-#   WarnPatternsSeverity:     warn_patterns[*]['severity']
-#   WarnPatternsDescription:  warn_patterns[*]['description']
-#   WarningMessages:          warning_messages
-#   Warnings:                 warning_records
-#   StatsHeader:           warning count table header row
-#   StatsRows:             array of warning count table rows
-#
-# New dynamic HTML page's dynamic JavaScript data:
-#
-# New dynamic HTML related function to emit data:
-#   escape_string, strip_escape_string, emit_warning_arrays
-#   emit_js_data():
-
-from __future__ import print_function
 import argparse
-import cgi
-import csv
 import io
 import multiprocessing
 import os
 import re
-import signal
 import sys
 
 # pylint:disable=relative-beyond-top-level
-from . import cpp_warn_patterns
-from . import java_warn_patterns
-from . import make_warn_patterns
-from . import other_warn_patterns
-from . import tidy_warn_patterns
 # pylint:disable=g-importing-member
-from .android_project_list import project_list
-from .severity import Severity
-
-parser = argparse.ArgumentParser(description='Convert a build log into HTML')
-parser.add_argument('--csvpath',
-                    help='Save CSV warning file to the passed absolute path',
-                    default=None)
-parser.add_argument('--gencsv',
-                    help='Generate a CSV file with number of various warnings',
-                    action='store_true',
-                    default=False)
-parser.add_argument('--byproject',
-                    help='Separate warnings in HTML output by project names',
-                    action='store_true',
-                    default=False)
-parser.add_argument('--url',
-                    help='Root URL of an Android source code tree prefixed '
-                    'before files in warnings')
-parser.add_argument('--separator',
-                    help='Separator between the end of a URL and the line '
-                    'number argument. e.g. #')
-parser.add_argument('--processes',
-                    type=int,
-                    default=multiprocessing.cpu_count(),
-                    help='Number of parallel processes to process warnings')
-parser.add_argument(dest='buildlog', metavar='build.log',
-                    help='Path to build.log file')
-args = parser.parse_args()
-
-warn_patterns = make_warn_patterns.warn_patterns
-warn_patterns.extend(cpp_warn_patterns.warn_patterns)
-warn_patterns.extend(java_warn_patterns.warn_patterns)
-warn_patterns.extend(tidy_warn_patterns.warn_patterns)
-warn_patterns.extend(other_warn_patterns.warn_patterns)
-
-project_patterns = []
-project_names = []
-warning_messages = []
-warning_records = []
+from . import android_project_list
+from . import chrome_project_list
+from . import cpp_warn_patterns as cpp_patterns
+from . import html_writer
+from . import java_warn_patterns as java_patterns
+from . import make_warn_patterns as make_patterns
+from . import other_warn_patterns as other_patterns
+from . import tidy_warn_patterns as tidy_patterns
 
 
-def initialize_arrays():
-  """Complete global arrays before they are used."""
-  global project_names, project_patterns
-  project_names = [p[0] for p in project_list]
-  project_patterns = [re.compile(p[1]) for p in project_list]
-  for w in warn_patterns:
-    w['members'] = []
-    # Each warning pattern has a 'projects' dictionary, that
-    # maps a project name to number of warnings in that project.
-    w['projects'] = {}
+def parse_args(use_google3):
+  """Define and parse the args. Return the parse_args() result."""
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument('--capacitor_path', default='',
+                      help='Save capacitor warning file to the passed absolute'
+                      ' path')
+  # csvpath has a different naming than the above path because historically the
+  # original Android script used csvpath, so other scripts rely on it
+  parser.add_argument('--csvpath', default='',
+                      help='Save CSV warning file to the passed path')
+  parser.add_argument('--gencsv', action='store_true',
+                      help='Generate CSV file with number of various warnings')
+  parser.add_argument('--byproject', action='store_true',
+                      help='Separate warnings in HTML output by project names')
+  parser.add_argument('--url', default='',
+                      help='Root URL of an Android source code tree prefixed '
+                      'before files in warnings')
+  parser.add_argument('--separator', default='?l=',
+                      help='Separator between the end of a URL and the line '
+                      'number argument. e.g. #')
+  parser.add_argument('--processes', default=multiprocessing.cpu_count(),
+                      type=int,
+                      help='Number of parallel processes to process warnings')
+  # Old Android build scripts call warn.py without --platform,
+  # so the default platform is set to 'android'.
+  parser.add_argument('--platform', default='android',
+                      choices=['chrome', 'android'],
+                      help='Platform of the build log')
+  # Old Android build scripts call warn.py with only a build.log file path.
+  parser.add_argument('--log', help='Path to build log file')
+  parser.add_argument(dest='buildlog', metavar='build.log',
+                      default='build.log', nargs='?',
+                      help='Path to build.log file')
+  flags = parser.parse_args()
+  if not flags.log:
+    flags.log = flags.buildlog
+  if not use_google3 and not os.path.exists(flags.log):
+    sys.exit('Cannot find log file: ' + flags.log)
+  return flags
 
 
-initialize_arrays()
+def get_project_names(project_list):
+  """Get project_names from project_list."""
+  return [p[0] for p in project_list]
 
 
-android_root = ''
-platform_version = 'unknown'
-target_product = 'unknown'
-target_variant = 'unknown'
-
-
-##### Data and functions to dump html file. ##################################
-
-html_head_scripts = """\
-  <script type="text/javascript">
-  function expand(id) {
-    var e = document.getElementById(id);
-    var f = document.getElementById(id + "_mark");
-    if (e.style.display == 'block') {
-       e.style.display = 'none';
-       f.innerHTML = '&#x2295';
-    }
-    else {
-       e.style.display = 'block';
-       f.innerHTML = '&#x2296';
-    }
-  };
-  function expandCollapse(show) {
-    for (var id = 1; ; id++) {
-      var e = document.getElementById(id + "");
-      var f = document.getElementById(id + "_mark");
-      if (!e || !f) break;
-      e.style.display = (show ? 'block' : 'none');
-      f.innerHTML = (show ? '&#x2296' : '&#x2295');
-    }
-  };
-  </script>
-  <style type="text/css">
-  th,td{border-collapse:collapse; border:1px solid black;}
-  .button{color:blue;font-size:110%;font-weight:bolder;}
-  .bt{color:black;background-color:transparent;border:none;outline:none;
-      font-size:140%;font-weight:bolder;}
-  .c0{background-color:#e0e0e0;}
-  .c1{background-color:#d0d0d0;}
-  .t1{border-collapse:collapse; width:100%; border:1px solid black;}
-  </style>
-  <script src="https://www.gstatic.com/charts/loader.js"></script>
-"""
-
-
-def make_writer(output_stream):
-
-  def writer(text):
-    return output_stream.write(text + '\n')
-
-  return writer
-
-
-def html_big(param):
-  return '<font size="+2">' + param + '</font>'
-
-
-def dump_html_prologue(title, writer):
-  writer('<html>\n<head>')
-  writer('<title>' + title + '</title>')
-  writer(html_head_scripts)
-  emit_stats_by_project(writer)
-  writer('</head>\n<body>')
-  writer(html_big(title))
-  writer('<p>')
-
-
-def dump_html_epilogue(writer):
-  writer('</body>\n</head>\n</html>')
-
-
-def sort_warnings():
-  for i in warn_patterns:
-    i['members'] = sorted(set(i['members']))
-
-
-def emit_stats_by_project(writer):
-  """Dump a google chart table of warnings per project and severity."""
-  # warnings[p][s] is number of warnings in project p of severity s.
-  # pylint:disable=g-complex-comprehension
-  warnings = {p: {s.value: 0 for s in Severity.levels} for p in project_names}
-  for i in warn_patterns:
-    # pytype: disable=attribute-error
-    s = i['severity'].value
-    # pytype: enable=attribute-error
-    for p in i['projects']:
-      warnings[p][s] += i['projects'][p]
-
-  # total_by_project[p] is number of warnings in project p.
-  total_by_project = {
-      p: sum(warnings[p][s.value] for s in Severity.levels)
-      for p in project_names
-  }
-
-  # total_by_severity[s] is number of warnings of severity s.
-  total_by_severity = {
-      s.value: sum(warnings[p][s.value] for p in project_names)
-      for s in Severity.levels
-  }
-
-  # emit table header
-  stats_header = ['Project']
-  for s in Severity.levels:
-    if total_by_severity[s.value]:
-      stats_header.append(
-          '<span style=\'background-color:{}\'>{}</span>'.format(
-              s.color, s.column_header))
-  stats_header.append('TOTAL')
-
-  # emit a row of warning counts per project, skip no-warning projects
-  total_all_projects = 0
-  stats_rows = []
-  for p in project_names:
-    if total_by_project[p]:
-      one_row = [p]
-      for s in Severity.levels:
-        if total_by_severity[s.value]:
-          one_row.append(warnings[p][s.value])
-      one_row.append(total_by_project[p])
-      stats_rows.append(one_row)
-      total_all_projects += total_by_project[p]
-
-  # emit a row of warning counts per severity
-  total_all_severities = 0
-  one_row = ['<b>TOTAL</b>']
-  for s in Severity.levels:
-    if total_by_severity[s.value]:
-      one_row.append(total_by_severity[s.value])
-      total_all_severities += total_by_severity[s.value]
-  one_row.append(total_all_projects)
-  stats_rows.append(one_row)
-  writer('<script>')
-  emit_const_string_array('StatsHeader', stats_header, writer)
-  emit_const_object_array('StatsRows', stats_rows, writer)
-  writer(draw_table_javascript)
-  writer('</script>')
-
-
-def dump_stats(writer):
-  """Dump some stats about total number of warnings and such."""
-  known = 0
-  skipped = 0
-  unknown = 0
-  sort_warnings()
-  for i in warn_patterns:
-    if i['severity'] == Severity.UNMATCHED:
-      unknown += len(i['members'])
-    elif i['severity'] == Severity.SKIP:
-      skipped += len(i['members'])
-    else:
-      known += len(i['members'])
-  writer('Number of classified warnings: <b>' + str(known) + '</b><br>')
-  writer('Number of skipped warnings: <b>' + str(skipped) + '</b><br>')
-  writer('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
-  total = unknown + known + skipped
-  extra_msg = ''
-  if total < 1000:
-    extra_msg = ' (low count may indicate incremental build)'
-  writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
-
-
-# New base table of warnings, [severity, warn_id, project, warning_message]
-# Need buttons to show warnings in different grouping options.
-# (1) Current, group by severity, id for each warning pattern
-#     sort by severity, warn_id, warning_message
-# (2) Current --byproject, group by severity,
-#     id for each warning pattern + project name
-#     sort by severity, warn_id, project, warning_message
-# (3) New, group by project + severity,
-#     id for each warning pattern
-#     sort by project, severity, warn_id, warning_message
-def emit_buttons(writer):
-  writer('<button class="button" onclick="expandCollapse(1);">'
-         'Expand all warnings</button>\n'
-         '<button class="button" onclick="expandCollapse(0);">'
-         'Collapse all warnings</button>\n'
-         '<button class="button" onclick="groupBySeverity();">'
-         'Group warnings by severity</button>\n'
-         '<button class="button" onclick="groupByProject();">'
-         'Group warnings by project</button><br>')
-
-
-def all_patterns(category):
-  patterns = ''
-  for i in category['patterns']:
-    patterns += i
-    patterns += ' / '
-  return patterns
-
-
-def dump_fixed(writer):
-  """Show which warnings no longer occur."""
-  anchor = 'fixed_warnings'
-  mark = anchor + '_mark'
-  writer('\n<br><p style="background-color:lightblue"><b>'
-         '<button id="' + mark + '" '
-         'class="bt" onclick="expand(\'' + anchor + '\');">'
-         '&#x2295</button> Fixed warnings. '
-         'No more occurrences. Please consider turning these into '
-         'errors if possible, before they are reintroduced in to the build'
-         ':</b></p>')
-  writer('<blockquote>')
-  fixed_patterns = []
-  for i in warn_patterns:
-    if not i['members']:
-      fixed_patterns.append(i['description'] + ' (' + all_patterns(i) + ')')
-  fixed_patterns = sorted(fixed_patterns)
-  writer('<div id="' + anchor + '" style="display:none;"><table>')
-  cur_row_class = 0
-  for text in fixed_patterns:
-    cur_row_class = 1 - cur_row_class
-    # remove last '\n'
-    t = text[:-1] if text[-1] == '\n' else text
-    writer('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
-  writer('</table></div>')
-  writer('</blockquote>')
-
-
-def find_project_index(line):
-  for p in range(len(project_patterns)):
-    if project_patterns[p].match(line):
-      return p
+def find_project_index(line, project_patterns):
+  for i, p in enumerate(project_patterns):
+    if p.match(line):
+      return i
   return -1
 
 
-def classify_one_warning(line, results):
+def classify_one_warning(warning, link, results, project_patterns,
+                         warn_patterns):
   """Classify one warning line."""
-  for i in range(len(warn_patterns)):
-    w = warn_patterns[i]
+  for i, w in enumerate(warn_patterns):
     for cpat in w['compiled_patterns']:
-      # pytype: disable=attribute-error
-      if cpat.match(line):
-        p = find_project_index(line)
-        results.append([line, i, p])
+      if cpat.match(warning):
+        p = find_project_index(warning, project_patterns)
+        results.append([warning, link, i, p])
         return
       else:
         # If we end up here, there was a problem parsing the log
         # probably caused by 'make -j' mixing the output from
         # 2 or more concurrent compiles
         pass
-      # pytype: enable=attribute-error
 
 
-def classify_warnings(lines):
-  results = []
-  for line in lines:
-    classify_one_warning(line, results)
-  # After the main work, ignore all other signals to a child process,
-  # to avoid bad warning/error messages from the exit clean-up process.
-  if args.processes > 1:
-    signal.signal(signal.SIGTERM, lambda *args: sys.exit(-signal.SIGTERM))
-  return results
+def remove_prefix(s, sub):
+  """Remove everything before last occurrence of substring sub in string s."""
+  if sub in s:
+    inc_sub = s.rfind(sub)
+    return s[inc_sub:]
+  return s
 
 
-def parallel_classify_warnings(warning_lines, parallel_process):
-  """Classify all warning lines with num_cpu parallel processes."""
-  num_cpu = args.processes
-  if num_cpu > 1:
-    groups = [[] for x in range(num_cpu)]
-    i = 0
-    for x in warning_lines:
-      groups[i].append(x)
-      i = (i + 1) % num_cpu
-    group_results = parallel_process(num_cpu, classify_warnings, groups)
-  else:
-    group_results = [classify_warnings(warning_lines)]
+# TODO(emmavukelj): Don't have any generate_*_cs_link functions call
+# normalize_path a second time (the first time being in parse_input_file)
+def generate_cs_link(warning_line, flags, android_root=None):
+  if flags.platform == 'chrome':
+    return generate_chrome_cs_link(warning_line, flags)
+  if flags.platform == 'android':
+    return generate_android_cs_link(warning_line, flags, android_root)
+  return 'https://cs.corp.google.com/'
 
-  for result in group_results:
-    for line, pattern_idx, project_idx in result:
-      pattern = warn_patterns[pattern_idx]
-      pattern['members'].append(line)
-      message_idx = len(warning_messages)
-      warning_messages.append(line)
-      warning_records.append([pattern_idx, project_idx, message_idx])
-      pname = '???' if project_idx < 0 else project_names[project_idx]
-      # Count warnings by project.
-      if pname in pattern['projects']:
-        pattern['projects'][pname] += 1
-      else:
-        pattern['projects'][pname] = 1
+
+def generate_android_cs_link(warning_line, flags, android_root):
+  """Generate the code search link for a warning line in Android."""
+  # max_splits=2 -> only 3 items
+  raw_path, line_number_str, _ = warning_line.split(':', 2)
+  normalized_path = normalize_path(raw_path, flags, android_root)
+  if not flags.url:
+    return normalized_path
+  link_path = flags.url + '/' + normalized_path
+  if line_number_str.isdigit():
+    link_path += flags.separator + line_number_str
+  return link_path
+
+
+def generate_chrome_cs_link(warning_line, flags):
+  """Generate the code search link for a warning line in Chrome."""
+  split_line = warning_line.split(':')
+  raw_path = split_line[0]
+  normalized_path = normalize_path(raw_path, flags)
+  link_base = 'https://cs.chromium.org/'
+  link_add = 'chromium'
+  link_path = None
+
+  # Basically just going through a few specific directory cases and specifying
+  # the proper behavior for that case. This list of cases was accumulated
+  # through trial and error manually going through the warnings.
+  #
+  # This code pattern of using case-specific "if"s instead of "elif"s looks
+  # possibly accidental and mistaken but it is intentional because some paths
+  # fall under several cases (e.g. third_party/lib/nghttp2_frame.c) and for
+  # those we want the most specific case to be applied. If there is reliable
+  # knowledge of exactly where these occur, this could be changed to "elif"s
+  # but there is no reliable set of paths falling under multiple cases at the
+  # moment.
+  if '/src/third_party' in raw_path:
+    link_path = remove_prefix(raw_path, '/src/third_party/')
+  if '/chrome_root/src_internal/' in raw_path:
+    link_path = remove_prefix(raw_path, '/chrome_root/src_internal/')
+    link_path = link_path[len('/chrome_root'):]  # remove chrome_root
+  if '/chrome_root/src/' in raw_path:
+    link_path = remove_prefix(raw_path, '/chrome_root/src/')
+    link_path = link_path[len('/chrome_root'):]  # remove chrome_root
+  if '/libassistant/' in raw_path:
+    link_add = 'eureka_internal/chromium/src'
+    link_base = 'https://cs.corp.google.com/'  # internal data
+    link_path = remove_prefix(normalized_path, '/libassistant/')
+  if raw_path.startswith('gen/'):
+    link_path = '/src/out/Debug/gen/' + normalized_path
+  if '/gen/' in raw_path:
+    return '%s?q=file:%s' % (link_base, remove_prefix(normalized_path, '/gen/'))
+
+  if not link_path and (raw_path.startswith('src/') or
+                        raw_path.startswith('src_internal/')):
+    link_path = '/%s' % raw_path
+
+  if not link_path:  # can't find specific link, send a query
+    return '%s?q=file:%s' % (link_base, normalized_path)
+
+  line_number = int(split_line[1])
+  link = '%s%s%s?l=%d' % (link_base, link_add, link_path, line_number)
+  return link
 
 
 def find_warn_py_and_android_root(path):
-  """Set and return android_root path if it is found."""
-  global android_root
+  """Return android source root path if warn.py is found."""
   parts = path.split('/')
   for idx in reversed(range(2, len(parts))):
     root_path = '/'.join(parts[:idx])
     # Android root directory should contain this script.
     if os.path.exists(root_path + '/build/make/tools/warn.py'):
-      android_root = root_path
-      return True
-  return False
+      return root_path
+  return ''
 
 
-def find_android_root():
-  """Guess android_root from common prefix of file paths."""
+def find_android_root(buildlog):
+  """Guess android source root from common prefix of file paths."""
   # Use the longest common prefix of the absolute file paths
   # of the first 10000 warning messages as the android_root.
-  global android_root
-  warning_lines = set()
+  warning_lines = []
   warning_pattern = re.compile('^/[^ ]*/[^ ]*: warning: .*')
   count = 0
-  infile = io.open(args.buildlog, mode='r', encoding='utf-8')
-  for line in infile:
+  for line in buildlog:
     if warning_pattern.match(line):
-      warning_lines.add(line)
+      warning_lines.append(line)
       count += 1
       if count > 9999:
         break
@@ -468,56 +242,110 @@
       # the source tree root.
       if count < 100:
         path = os.path.normpath(re.sub(':.*$', '', line))
-        if find_warn_py_and_android_root(path):
-          return
+        android_root = find_warn_py_and_android_root(path)
+        if android_root:
+          return android_root
   # Do not use common prefix of a small number of paths.
   if count > 10:
     # pytype: disable=wrong-arg-types
     root_path = os.path.commonprefix(warning_lines)
     # pytype: enable=wrong-arg-types
     if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
-      android_root = root_path[:-1]
+      return root_path[:-1]
+  return ''
 
 
-def remove_android_root_prefix(path):
+def remove_android_root_prefix(path, android_root):
   """Remove android_root prefix from path if it is found."""
   if path.startswith(android_root):
     return path[1 + len(android_root):]
+  return path
+
+
+def normalize_path(path, flags, android_root=None):
+  """Normalize file path relative to src/ or src-internal/ directory."""
+  path = os.path.normpath(path)
+
+  if flags.platform == 'android':
+    if android_root:
+      return remove_android_root_prefix(path, android_root)
+    return path
+
+  # Remove known prefix of root path and normalize the suffix.
+  idx = path.find('chrome_root/')
+  if idx >= 0:
+    # remove chrome_root/, we want path relative to that
+    return path[idx + len('chrome_root/'):]
   else:
     return path
 
 
-def normalize_path(path):
-  """Normalize file path relative to android_root."""
-  # If path is not an absolute path, just normalize it.
-  path = os.path.normpath(path)
-  # Remove known prefix of root path and normalize the suffix.
-  if path[0] == '/' and android_root:
-    return remove_android_root_prefix(path)
-  return path
-
-
-def normalize_warning_line(line):
-  """Normalize file path relative to android_root in a warning line."""
-  # replace fancy quotes with plain ol' quotes
+def normalize_warning_line(line, flags, android_root=None):
+  """Normalize file path relative to src directory in a warning line."""
   line = re.sub(u'[\u2018\u2019]', '\'', line)
   # replace non-ASCII chars to spaces
   line = re.sub(u'[^\x00-\x7f]', ' ', line)
   line = line.strip()
   first_column = line.find(':')
-  if first_column > 0:
-    return normalize_path(line[:first_column]) + line[first_column:]
-  else:
-    return line
+  return normalize_path(line[:first_column], flags,
+                        android_root) + line[first_column:]
 
 
-def parse_input_file(infile):
-  """Parse input file, collect parameters and warning lines."""
-  global android_root
-  global platform_version
-  global target_product
-  global target_variant
-  line_counter = 0
+def parse_input_file_chrome(infile, flags):
+  """Parse Chrome input file, collect parameters and warning lines."""
+  platform_version = 'unknown'
+  board_name = 'unknown'
+  architecture = 'unknown'
+
+  # only handle warning lines of format 'file_path:line_no:col_no: warning: ...'
+  chrome_warning_pattern = r'^[^ ]*/[^ ]*:[0-9]+:[0-9]+: warning: .*'
+
+  warning_pattern = re.compile(chrome_warning_pattern)
+
+  # Collect all unique warning lines
+  # Remove the duplicated warnings save ~8% of time when parsing
+  # one typical build log than before
+  unique_warnings = dict()
+  for line in infile:
+    if warning_pattern.match(line):
+      normalized_line = normalize_warning_line(line, flags)
+      if normalized_line not in unique_warnings:
+        unique_warnings[normalized_line] = generate_cs_link(line, flags)
+    elif (platform_version == 'unknown' or board_name == 'unknown' or
+          architecture == 'unknown'):
+      m = re.match(r'.+Package:.+chromeos-base/chromeos-chrome-', line)
+      if m is not None:
+        platform_version = 'R' + line.split('chrome-')[1].split('_')[0]
+        continue
+      m = re.match(r'.+Source\sunpacked\sin\s(.+)', line)
+      if m is not None:
+        board_name = m.group(1).split('/')[2]
+        continue
+      m = re.match(r'.+USE:\s*([^\s]*).*', line)
+      if m is not None:
+        architecture = m.group(1)
+        continue
+
+  header_str = '%s - %s - %s' % (platform_version, board_name, architecture)
+  return unique_warnings, header_str
+
+
+def add_normalized_line_to_warnings(line, flags, android_root, unique_warnings):
+  """Parse/normalize path, updating warning line and add to warnings dict."""
+  normalized_line = normalize_warning_line(line, flags, android_root)
+  if normalized_line not in unique_warnings:
+    unique_warnings[normalized_line] = generate_cs_link(line, flags,
+                                                        android_root)
+  return unique_warnings
+
+
+def parse_input_file_android(infile, flags):
+  """Parse Android input file, collect parameters and warning lines."""
+  platform_version = 'unknown'
+  target_product = 'unknown'
+  target_variant = 'unknown'
+  android_root = find_android_root(infile)
+  infile.seek(0)
 
   # rustc warning messages have two lines that should be combined:
   #     warning: description
@@ -532,20 +360,25 @@
   warning_without_file = re.compile('^warning: .*')
   rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
 
-  # Collect all warnings into the warning_lines set.
-  warning_lines = set()
+   # Collect all unique warning lines
+  # Remove the duplicated warnings save ~8% of time when parsing
+  # one typical build log than before
+  unique_warnings = dict()
+  line_counter = 0
   prev_warning = ''
   for line in infile:
     if prev_warning:
       if rustc_file_position.match(line):
         # must be a rustc warning, combine 2 lines into one warning
         line = line.strip().replace('--> ', '') + ': ' + prev_warning
-        warning_lines.add(normalize_warning_line(line))
+        unique_warnings = add_normalized_line_to_warnings(
+            line, flags, android_root, unique_warnings)
         prev_warning = ''
         continue
       # add prev_warning, and then process the current line
       prev_warning = 'unknown_source_file: ' + prev_warning
-      warning_lines.add(normalize_warning_line(prev_warning))
+      unique_warnings = add_normalized_line_to_warnings(
+          prev_warning, flags, android_root, unique_warnings)
       prev_warning = ''
 
     if warning_pattern.match(line):
@@ -553,7 +386,8 @@
         # save this line and combine it with the next line
         prev_warning = line
       else:
-        warning_lines.add(normalize_warning_line(line))
+        unique_warnings = add_normalized_line_to_warnings(
+            line, flags, android_root, unique_warnings)
       continue
 
     if line_counter < 100:
@@ -568,336 +402,182 @@
       m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
       if m is not None:
         target_variant = m.group(0)
-      m = re.search('.* TOP=([^ ]*) .*', line)
+      m = re.search('(?<=^TOP=).*', line)
       if m is not None:
         android_root = m.group(1)
-  return warning_lines
+
+  if android_root:
+    new_unique_warnings = dict()
+    for warning_line in unique_warnings:
+      normalized_line = normalize_warning_line(warning_line, flags,
+                                               android_root)
+      new_unique_warnings[normalized_line] = generate_android_cs_link(
+          warning_line, flags, android_root)
+    unique_warnings = new_unique_warnings
+
+  header_str = '%s - %s - %s' % (platform_version, target_product,
+                                 target_variant)
+  return unique_warnings, header_str
 
 
-# Return s with escaped backslash and quotation characters.
-def escape_string(s):
-  # pytype: disable=attribute-error
-  return s.replace('\\', '\\\\').replace('"', '\\"')
-  # pytype: enable=attribute-error
+def parse_input_file(infile, flags):
+  if flags.platform == 'chrome':
+    return parse_input_file_chrome(infile, flags)
+  if flags.platform == 'android':
+    return parse_input_file_android(infile, flags)
+  raise RuntimeError('parse_input_file not defined for platform %s' %
+                     flags.platform)
 
 
-# Return s without trailing '\n' and escape the quotation characters.
-def strip_escape_string(s):
-  if not s:
-    return s
-  s = s[:-1] if s[-1] == '\n' else s
-  return escape_string(s)
+def parse_compiler_output(compiler_output):
+  """Parse compiler output for relevant info."""
+  split_output = compiler_output.split(':', 3)  # 3 = max splits
+  file_path = split_output[0]
+  line_number = int(split_output[1])
+  col_number = int(split_output[2].split(' ')[0])
+  warning_message = split_output[3]
+  return file_path, line_number, col_number, warning_message
 
 
-def emit_warning_array(name, writer):
-  writer('var warning_{} = ['.format(name))
-  for i in range(len(warn_patterns)):
-    writer('{},'.format(warn_patterns[i][name]))
-  writer('];')
-
-
-def emit_warning_arrays(writer):
-  emit_warning_array('severity', writer)
-  writer('var warning_description = [')
-  for i in range(len(warn_patterns)):
-    if warn_patterns[i]['members']:
-      writer('"{}",'.format(escape_string(warn_patterns[i]['description'])))
-    else:
-      writer('"",')  # no such warning
-  writer('];')
-
-
-scripts_for_warning_groups = """
-  function compareMessages(x1, x2) { // of the same warning type
-    return (WarningMessages[x1[2]] <= WarningMessages[x2[2]]) ? -1 : 1;
-  }
-  function byMessageCount(x1, x2) {
-    return x2[2] - x1[2];  // reversed order
-  }
-  function bySeverityMessageCount(x1, x2) {
-    // orer by severity first
-    if (x1[1] != x2[1])
-      return  x1[1] - x2[1];
-    return byMessageCount(x1, x2);
-  }
-  const ParseLinePattern = /^([^ :]+):(\\d+):(.+)/;
-  function addURL(line) {
-    if (FlagURL == "") return line;
-    if (FlagSeparator == "") {
-      return line.replace(ParseLinePattern,
-        "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
-    }
-    return line.replace(ParseLinePattern,
-      "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
-        "$2'>$1:$2</a>:$3");
-  }
-  function createArrayOfDictionaries(n) {
-    var result = [];
-    for (var i=0; i<n; i++) result.push({});
-    return result;
-  }
-  function groupWarningsBySeverity() {
-    // groups is an array of dictionaries,
-    // each dictionary maps from warning type to array of warning messages.
-    var groups = createArrayOfDictionaries(SeverityColors.length);
-    for (var i=0; i<Warnings.length; i++) {
-      var w = Warnings[i][0];
-      var s = WarnPatternsSeverity[w];
-      var k = w.toString();
-      if (!(k in groups[s]))
-        groups[s][k] = [];
-      groups[s][k].push(Warnings[i]);
-    }
-    return groups;
-  }
-  function groupWarningsByProject() {
-    var groups = createArrayOfDictionaries(ProjectNames.length);
-    for (var i=0; i<Warnings.length; i++) {
-      var w = Warnings[i][0];
-      var p = Warnings[i][1];
-      var k = w.toString();
-      if (!(k in groups[p]))
-        groups[p][k] = [];
-      groups[p][k].push(Warnings[i]);
-    }
-    return groups;
-  }
-  var GlobalAnchor = 0;
-  function createWarningSection(header, color, group) {
-    var result = "";
-    var groupKeys = [];
-    var totalMessages = 0;
-    for (var k in group) {
-       totalMessages += group[k].length;
-       groupKeys.push([k, WarnPatternsSeverity[parseInt(k)], group[k].length]);
-    }
-    groupKeys.sort(bySeverityMessageCount);
-    for (var idx=0; idx<groupKeys.length; idx++) {
-      var k = groupKeys[idx][0];
-      var messages = group[k];
-      var w = parseInt(k);
-      var wcolor = SeverityColors[WarnPatternsSeverity[w]];
-      var description = WarnPatternsDescription[w];
-      if (description.length == 0)
-          description = "???";
-      GlobalAnchor += 1;
-      result += "<table class='t1'><tr bgcolor='" + wcolor + "'><td>" +
-                "<button class='bt' id='" + GlobalAnchor + "_mark" +
-                "' onclick='expand(\\"" + GlobalAnchor + "\\");'>" +
-                "&#x2295</button> " +
-                description + " (" + messages.length + ")</td></tr></table>";
-      result += "<div id='" + GlobalAnchor +
-                "' style='display:none;'><table class='t1'>";
-      var c = 0;
-      messages.sort(compareMessages);
-      for (var i=0; i<messages.length; i++) {
-        result += "<tr><td class='c" + c + "'>" +
-                  addURL(WarningMessages[messages[i][2]]) + "</td></tr>";
-        c = 1 - c;
-      }
-      result += "</table></div>";
-    }
-    if (result.length > 0) {
-      return "<br><span style='background-color:" + color + "'><b>" +
-             header + ": " + totalMessages +
-             "</b></span><blockquote><table class='t1'>" +
-             result + "</table></blockquote>";
-
-    }
-    return "";  // empty section
-  }
-  function generateSectionsBySeverity() {
-    var result = "";
-    var groups = groupWarningsBySeverity();
-    for (s=0; s<SeverityColors.length; s++) {
-      result += createWarningSection(SeverityHeaders[s], SeverityColors[s],
-                                     groups[s]);
-    }
-    return result;
-  }
-  function generateSectionsByProject() {
-    var result = "";
-    var groups = groupWarningsByProject();
-    for (i=0; i<groups.length; i++) {
-      result += createWarningSection(ProjectNames[i], 'lightgrey', groups[i]);
-    }
-    return result;
-  }
-  function groupWarnings(generator) {
-    GlobalAnchor = 0;
-    var e = document.getElementById("warning_groups");
-    e.innerHTML = generator();
-  }
-  function groupBySeverity() {
-    groupWarnings(generateSectionsBySeverity);
-  }
-  function groupByProject() {
-    groupWarnings(generateSectionsByProject);
-  }
-"""
-
-
-# Emit a JavaScript const string
-def emit_const_string(name, value, writer):
-  writer('const ' + name + ' = "' + escape_string(value) + '";')
-
-
-# Emit a JavaScript const integer array.
-def emit_const_int_array(name, array, writer):
-  writer('const ' + name + ' = [')
-  for n in array:
-    writer(str(n) + ',')
-  writer('];')
-
-
-# Emit a JavaScript const string array.
-def emit_const_string_array(name, array, writer):
-  writer('const ' + name + ' = [')
-  for s in array:
-    writer('"' + strip_escape_string(s) + '",')
-  writer('];')
-
-
-# Emit a JavaScript const string array for HTML.
-def emit_const_html_string_array(name, array, writer):
-  writer('const ' + name + ' = [')
-  for s in array:
-    # Not using html.escape yet, to work for both python 2 and 3,
-    # until all users switch to python 3.
-    # pylint:disable=deprecated-method
-    writer('"' + cgi.escape(strip_escape_string(s)) + '",')
-  writer('];')
-
-
-# Emit a JavaScript const object array.
-def emit_const_object_array(name, array, writer):
-  writer('const ' + name + ' = [')
-  for x in array:
-    writer(str(x) + ',')
-  writer('];')
-
-
-def emit_js_data(writer):
-  """Dump dynamic HTML page's static JavaScript data."""
-  emit_const_string('FlagURL',
-                    args.url if args.url else '', writer)
-  emit_const_string('FlagSeparator',
-                    args.separator if args.separator else '', writer)
-  emit_const_string_array('SeverityColors',
-                          [s.color for s in Severity.levels], writer)
-  emit_const_string_array('SeverityHeaders',
-                          [s.header for s in Severity.levels], writer)
-  emit_const_string_array('SeverityColumnHeaders',
-                          [s.column_header for s in Severity.levels], writer)
-  emit_const_string_array('ProjectNames', project_names, writer)
-  # pytype: disable=attribute-error
-  emit_const_int_array('WarnPatternsSeverity',
-                       [w['severity'].value for w in warn_patterns], writer)
-  # pytype: enable=attribute-error
-  emit_const_html_string_array('WarnPatternsDescription',
-                               [w['description'] for w in warn_patterns],
-                               writer)
-  emit_const_html_string_array('WarningMessages', warning_messages, writer)
-  emit_const_object_array('Warnings', warning_records, writer)
-
-draw_table_javascript = """
-google.charts.load('current', {'packages':['table']});
-google.charts.setOnLoadCallback(drawTable);
-function drawTable() {
-  var data = new google.visualization.DataTable();
-  data.addColumn('string', StatsHeader[0]);
-  for (var i=1; i<StatsHeader.length; i++) {
-    data.addColumn('number', StatsHeader[i]);
-  }
-  data.addRows(StatsRows);
-  for (var i=0; i<StatsRows.length; i++) {
-    for (var j=0; j<StatsHeader.length; j++) {
-      data.setProperty(i, j, 'style', 'border:1px solid black;');
-    }
-  }
-  var table = new google.visualization.Table(
-      document.getElementById('stats_table'));
-  table.draw(data, {allowHtml: true, alternatingRowStyle: true});
-}
-"""
-
-
-def dump_html(output_stream):
-  """Dump the html output to output_stream."""
-  writer = make_writer(output_stream)
-  dump_html_prologue('Warnings for ' + platform_version + ' - ' +
-                     target_product + ' - ' + target_variant, writer)
-  dump_stats(writer)
-  writer('<br><div id="stats_table"></div><br>')
-  writer('\n<script>')
-  emit_js_data(writer)
-  writer(scripts_for_warning_groups)
-  writer('</script>')
-  emit_buttons(writer)
-  # Warning messages are grouped by severities or project names.
-  writer('<br><div id="warning_groups"></div>')
-  if args.byproject:
-    writer('<script>groupByProject();</script>')
+def get_warn_patterns(platform):
+  """Get and initialize warn_patterns."""
+  warn_patterns = []
+  if platform == 'chrome':
+    warn_patterns = cpp_patterns.warn_patterns
+  elif platform == 'android':
+    warn_patterns = make_patterns.warn_patterns + cpp_patterns.warn_patterns + java_patterns.warn_patterns + tidy_patterns.warn_patterns + other_patterns.warn_patterns
   else:
-    writer('<script>groupBySeverity();</script>')
-  dump_fixed(writer)
-  dump_html_epilogue(writer)
+    raise Exception('platform name %s is not valid' % platform)
+  for w in warn_patterns:
+    w['members'] = []
+    # Each warning pattern has a 'projects' dictionary, that
+    # maps a project name to number of warnings in that project.
+    w['projects'] = {}
+  return warn_patterns
 
 
-##### Functions to count warnings and dump csv file. #########################
+def get_project_list(platform):
+  """Return project list for appropriate platform."""
+  if platform == 'chrome':
+    return chrome_project_list.project_list
+  if platform == 'android':
+    return android_project_list.project_list
+  raise Exception('platform name %s is not valid' % platform)
 
 
-def description_for_csv(category):
-  if not category['description']:
-    return '?'
-  return category['description']
+def parallel_classify_warnings(warning_data, args, project_names,
+                               project_patterns, warn_patterns,
+                               use_google3, create_launch_subprocs_fn,
+                               classify_warnings_fn):
+  """Classify all warning lines with num_cpu parallel processes."""
+  num_cpu = args.processes
+  group_results = []
 
+  if num_cpu > 1:
+    # set up parallel processing for this...
+    warning_groups = [[] for _ in range(num_cpu)]
+    i = 0
+    for warning, link in warning_data.items():
+      warning_groups[i].append((warning, link))
+      i = (i + 1) % num_cpu
+    arg_groups = [[] for _ in range(num_cpu)]
+    for i, group in enumerate(warning_groups):
+      arg_groups[i] = [{
+          'group': group,
+          'project_patterns': project_patterns,
+          'warn_patterns': warn_patterns,
+          'num_processes': num_cpu
+      }]
 
-def count_severity(writer, sev, kind):
-  """Count warnings of given severity."""
-  total = 0
-  for i in warn_patterns:
-    if i['severity'] == sev and i['members']:
-      n = len(i['members'])
-      total += n
-      warning = kind + ': ' + description_for_csv(i)
-      writer.writerow([n, '', warning])
-      # print number of warnings for each project, ordered by project name.
-      # pytype: disable=attribute-error
-      projects = sorted(i['projects'].keys())
-      # pytype: enable=attribute-error
-      for p in projects:
-        writer.writerow([i['projects'][p], p, warning])
-  writer.writerow([total, '', kind + ' warnings'])
-
-  return total
-
-
-# dump number of warnings in csv format to stdout
-def dump_csv(writer):
-  """Dump number of warnings in csv format to stdout."""
-  sort_warnings()
-  total = 0
-  for s in Severity.levels:
-    if s != Severity.SEVERITY_UNKNOWN:
-      total += count_severity(writer, s, s.column_header)
-  writer.writerow([total, '', 'All warnings'])
-
-
-def common_main(parallel_process):
-  """Real main function to classify warnings and generate .html file."""
-  find_android_root()
-  # We must use 'utf-8' codec to parse some non-ASCII code in warnings.
-  warning_lines = parse_input_file(
-      io.open(args.buildlog, mode='r', encoding='utf-8'))
-  parallel_classify_warnings(warning_lines, parallel_process)
-  # If a user pases a csv path, save the fileoutput to the path
-  # If the user also passed gencsv write the output to stdout
-  # If the user did not pass gencsv flag dump the html report to stdout.
-  if args.csvpath:
-    with open(args.csvpath, 'w') as f:
-      dump_csv(csv.writer(f, lineterminator='\n'))
-  if args.gencsv:
-    dump_csv(csv.writer(sys.stdout, lineterminator='\n'))
+    group_results = create_launch_subprocs_fn(num_cpu,
+                                              classify_warnings_fn,
+                                              arg_groups,
+                                              group_results)
   else:
-    dump_html(sys.stdout)
+    group_results = []
+    for warning, link in warning_data.items():
+      classify_one_warning(warning, link, group_results,
+                           project_patterns, warn_patterns)
+    group_results = [group_results]
+
+  warning_messages = []
+  warning_links = []
+  warning_records = []
+  if use_google3:
+    group_results = [group_results]
+  for group_result in group_results:
+    for result in group_result:
+      for line, link, pattern_idx, project_idx in result:
+        pattern = warn_patterns[pattern_idx]
+        pattern['members'].append(line)
+        message_idx = len(warning_messages)
+        warning_messages.append(line)
+        link_idx = len(warning_links)
+        warning_links.append(link)
+        warning_records.append([pattern_idx, project_idx, message_idx,
+                                link_idx])
+        pname = '???' if project_idx < 0 else project_names[project_idx]
+        # Count warnings by project.
+        if pname in pattern['projects']:
+          pattern['projects'][pname] += 1
+        else:
+          pattern['projects'][pname] = 1
+  return warning_messages, warning_links, warning_records
+
+
+def process_log(logfile, flags, project_names, project_patterns, warn_patterns,
+                html_path, use_google3, create_launch_subprocs_fn,
+                classify_warnings_fn, logfile_object):
+  # pylint: disable=g-doc-args
+  # pylint: disable=g-doc-return-or-yield
+  """Function that handles processing of a log.
+
+  This is isolated into its own function (rather than just taking place in main)
+  so that it can be used by both warn.py and the borg job process_gs_logs.py, to
+  avoid duplication of code.
+  Note that if the arguments to this function change, process_gs_logs.py must
+  be updated accordingly.
+  """
+  if logfile_object is None:
+    with io.open(logfile, encoding='utf-8') as log:
+      warning_lines_and_links, header_str = parse_input_file(log, flags)
+  else:
+    warning_lines_and_links, header_str = parse_input_file(
+        logfile_object, flags)
+  warning_messages, warning_links, warning_records = parallel_classify_warnings(
+      warning_lines_and_links, flags, project_names, project_patterns,
+      warn_patterns, use_google3, create_launch_subprocs_fn,
+      classify_warnings_fn)
+
+  html_writer.write_html(flags, project_names, warn_patterns, html_path,
+                         warning_messages, warning_links, warning_records,
+                         header_str)
+
+  return warning_messages, warning_links, warning_records, header_str
+
+
+def common_main(use_google3, create_launch_subprocs_fn, classify_warnings_fn,
+                logfile_object=None):
+  """Shared main function for Google3 and non-Google3 versions of warn.py."""
+  flags = parse_args(use_google3)
+  warn_patterns = get_warn_patterns(flags.platform)
+  project_list = get_project_list(flags.platform)
+
+  project_names = get_project_names(project_list)
+  project_patterns = [re.compile(p[1]) for p in project_list]
+
+  # html_path=None because we output html below if not outputting CSV
+  warning_messages, warning_links, warning_records, header_str = process_log(
+      logfile=flags.log, flags=flags, project_names=project_names,
+      project_patterns=project_patterns, warn_patterns=warn_patterns,
+      html_path=None, use_google3=use_google3,
+      create_launch_subprocs_fn=create_launch_subprocs_fn,
+      classify_warnings_fn=classify_warnings_fn,
+      logfile_object=logfile_object)
+
+  html_writer.write_out_csv(flags, warn_patterns, warning_messages,
+                            warning_links, warning_records, header_str,
+                            project_names)
+
+  # Return these values, so that caller can use them, if desired.
+  return flags, warning_messages, warning_records, warn_patterns