Merge "Rename DexPreoptImageLocations to DexPreoptImageLocationsOnHost"
diff --git a/core/Makefile b/core/Makefile
index 40866ec..b70948b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2330,64 +2330,54 @@
     $(ALL_GENERATED_SOURCES) \
     $(ALL_DEFAULT_INSTALLED_MODULES))
 
-# Note: TARGET_DEBUG_RAMDISK_OUT will be $(PRODUCT_OUT)/debug_ramdisk/first_stage_ramdisk,
-# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/debug_ramdisk.
-# But the root dir of the ramdisk to build is always $(PRODUCT_OUT)/debug_ramdisk.
-my_debug_ramdisk_root_dir := $(PRODUCT_OUT)/debug_ramdisk
-
 INSTALLED_FILES_FILE_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-ramdisk-debug.txt
 INSTALLED_FILES_JSON_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_DEBUG_RAMDISK:.txt=.json)
 $(INSTALLED_FILES_FILE_DEBUG_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_DEBUG_RAMDISK)
-$(INSTALLED_FILES_FILE_DEBUG_RAMDISK): DEBUG_RAMDISK_ROOT_DIR := $(my_debug_ramdisk_root_dir)
 
-# Cannot just depend on INTERNAL_DEBUG_RAMDISK_FILES like other INSTALLED_FILES_FILE_* rules.
-# Because ramdisk-debug.img will rsync from either ramdisk.img or ramdisk-recovery.img.
-# Need to depend on the built ramdisk-debug.img, to get a complete list of the installed files.
-$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INSTALLED_DEBUG_RAMDISK_TARGET)
+# ramdisk-debug.img will merge the content from either ramdisk.img or
+# ramdisk-recovery.img, depending on whether BOARD_USES_RECOVERY_AS_BOOT
+# is set or not.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+  $(INSTALLED_FILES_FILE_DEBUG_RAMDISK): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+  $(INSTALLED_FILES_FILE_DEBUG_RAMDISK): $(recovery_ramdisk)
+else
+  $(INSTALLED_FILES_FILE_DEBUG_RAMDISK): PRIVATE_ADDITIONAL_DIR := $(TARGET_RAMDISK_OUT)
+  $(INSTALLED_FILES_FILE_DEBUG_RAMDISK): $(INSTALLED_RAMDISK_TARGET)
+endif # BOARD_USES_RECOVERY_AS_BOOT
+
 $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INTERNAL_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	mkdir -p $(dir $@)
 	rm -f $@
-	$(FILESLIST) $(DEBUG_RAMDISK_ROOT_DIR) > $(@:.txt=.json)
+	$(FILESLIST) $(TARGET_DEBUG_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-# ramdisk-debug.img will rsync the content from either ramdisk.img or ramdisk-recovery.img,
-# depending on whether BOARD_USES_RECOVERY_AS_BOOT is set or not.
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-my_debug_ramdisk_sync_dir := $(TARGET_RECOVERY_ROOT_OUT)
+  $(INSTALLED_DEBUG_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+  $(INSTALLED_DEBUG_RAMDISK_TARGET): $(recovery_ramdisk)
 else
-my_debug_ramdisk_sync_dir := $(TARGET_RAMDISK_OUT)
+  $(INSTALLED_DEBUG_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RAMDISK_OUT)
+  $(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_RAMDISK_TARGET)
 endif # BOARD_USES_RECOVERY_AS_BOOT
 
-$(INSTALLED_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_SYNC_DIR := $(my_debug_ramdisk_sync_dir)
-$(INSTALLED_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_ROOT_DIR := $(my_debug_ramdisk_root_dir)
-
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-# ramdisk-recovery.img isn't a make target, need to depend on boot.img if it's for recovery.
-$(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_BOOTIMAGE_TARGET)
-else
-# Depends on ramdisk.img, note that some target has ramdisk.img but no boot.img, e.g., emulator.
-$(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_RAMDISK_TARGET)
-endif # BOARD_USES_RECOVERY_AS_BOOT
+$(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_FILES_FILE_DEBUG_RAMDISK)
 $(INSTALLED_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
 	$(call pretty,"Target debug ramdisk: $@")
 	mkdir -p $(TARGET_DEBUG_RAMDISK_OUT)
 	touch $(TARGET_DEBUG_RAMDISK_OUT)/force_debuggable
-	rsync -a $(DEBUG_RAMDISK_SYNC_DIR)/ $(DEBUG_RAMDISK_ROOT_DIR)
-	$(MKBOOTFS) -d $(TARGET_OUT) $(DEBUG_RAMDISK_ROOT_DIR) | $(COMPRESSION_COMMAND) > $@
+	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_DEBUG_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
 
 .PHONY: ramdisk_debug-nodeps
-ramdisk_debug-nodeps: DEBUG_RAMDISK_SYNC_DIR := $(my_debug_ramdisk_sync_dir)
-ramdisk_debug-nodeps: DEBUG_RAMDISK_ROOT_DIR := $(my_debug_ramdisk_root_dir)
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+  ramdisk_debug-nodeps: PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+else
+  ramdisk_debug-nodeps: PRIVATE_ADDITIONAL_DIR := $(TARGET_RAMDISK_OUT)
+endif # BOARD_USES_RECOVERY_AS_BOOT
 ramdisk_debug-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
 	echo "make $@: ignoring dependencies"
 	mkdir -p $(TARGET_DEBUG_RAMDISK_OUT)
 	touch $(TARGET_DEBUG_RAMDISK_OUT)/force_debuggable
-	rsync -a $(DEBUG_RAMDISK_SYNC_DIR)/ $(DEBUG_RAMDISK_ROOT_DIR)
-	$(MKBOOTFS) -d $(TARGET_OUT) $(DEBUG_RAMDISK_ROOT_DIR) | $(COMPRESSION_COMMAND) > $(INSTALLED_DEBUG_RAMDISK_TARGET)
-
-my_debug_ramdisk_sync_dir :=
-my_debug_ramdisk_root_dir :=
+	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_DEBUG_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $(INSTALLED_DEBUG_RAMDISK_TARGET)
 
 endif # BUILDING_RAMDISK_IMAGE
 
@@ -2407,9 +2397,9 @@
 
 # Replace ramdisk.img in $(MKBOOTIMG) ARGS with ramdisk-debug.img to build boot-debug.img
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-INTERNAL_DEBUG_BOOTIMAGE_ARGS := $(subst $(recovery_ramdisk),$(INSTALLED_DEBUG_RAMDISK_TARGET), $(INTERNAL_RECOVERYIMAGE_ARGS))
+  INTERNAL_DEBUG_BOOTIMAGE_ARGS := $(subst $(recovery_ramdisk),$(INSTALLED_DEBUG_RAMDISK_TARGET), $(INTERNAL_RECOVERYIMAGE_ARGS))
 else
-INTERNAL_DEBUG_BOOTIMAGE_ARGS := $(subst $(INSTALLED_RAMDISK_TARGET),$(INSTALLED_DEBUG_RAMDISK_TARGET), $(INTERNAL_BOOTIMAGE_ARGS))
+  INTERNAL_DEBUG_BOOTIMAGE_ARGS := $(subst $(INSTALLED_RAMDISK_TARGET),$(INSTALLED_DEBUG_RAMDISK_TARGET), $(INTERNAL_BOOTIMAGE_ARGS))
 endif
 
 # If boot.img is chained but boot-debug.img is not signed, libavb in bootloader
@@ -2460,9 +2450,24 @@
 # -----------------------------------------------------------------
 # vendor debug ramdisk
 # Combines vendor ramdisk files and debug ramdisk files to build the vendor debug ramdisk.
+#
+INTERNAL_VENDOR_DEBUG_RAMDISK_FILES := $(filter $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/%, \
+    $(ALL_GENERATED_SOURCES) \
+    $(ALL_DEFAULT_INSTALLED_MODULES))
+
+INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk-debug.txt
+INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK:.txt=.json)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
+	@echo Installed file list: $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	mkdir -p $(TARGET_VENDOR_DEBUG_RAMDISK_OUT) # The dir might not be created if no modules are installed here.
+	$(FILESLIST) $(TARGET_VENDOR_RAMDISK_OUT) $(TARGET_DEBUG_RAMDISK_OUT) $(TARGET_VENDOR_DEBUG_RAMDISK_OUT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+
 INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor_ramdisk-debug.cpio$(RAMDISK_EXT)
-$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
-$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
 
 # Exclude recovery files in the default vendor ramdisk if including a standalone
 # recovery ramdisk in vendor_boot.
@@ -2473,23 +2478,12 @@
 endif
 endif
 
-INTERNAL_VENDOR_DEBUG_RAMDISK_FILES := $(filter $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/%, \
-    $(ALL_GENERATED_SOURCES) \
-    $(ALL_DEFAULT_INSTALLED_MODULES))
-
-# Note: TARGET_VENDOR_DEBUG_RAMDISK_OUT will be $(PRODUCT_OUT)/vendor_debug_ramdisk/first_stage_ramdisk,
-# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/vendor_debug_ramdisk.
-# But the path of $(VENDOR_DEBUG_RAMDISK_DIR) to build the vendor debug ramdisk, is always
-# $(PRODUCT_OUT)/vendor_debug_ramdisk.
-$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/debug_ramdisk
-$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
+# The vendor debug ramdisk combines vendor ramdisk and debug ramdisk.
 $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK)
 $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
 	mkdir -p $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)
-	touch $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/force_debuggable
-	$(foreach debug_file,$(DEBUG_RAMDISK_FILES), \
-	  cp -f $(debug_file) $(patsubst $(DEBUG_RAMDISK_DIR)/%,$(VENDOR_DEBUG_RAMDISK_DIR)/%,$(debug_file)) &&) true
-	$(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_RAMDISK_DIR) $(VENDOR_DEBUG_RAMDISK_DIR) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
+	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) $(TARGET_DEBUG_RAMDISK_OUT) $(TARGET_VENDOR_DEBUG_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
 
 ifeq (true,$(BOARD_BUILD_VENDOR_RAMDISK_IMAGE))
 INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor_ramdisk-debug.img
@@ -2498,21 +2492,6 @@
 	$(copy-file-to-target)
 endif
 
-INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk-debug.txt
-INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK:.txt=.json)
-$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK)
-$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
-
-# The vendor debug ramdisk will rsync from $(TARGET_VENDOR_RAMDISK_OUT) and $(INTERNAL_DEBUG_RAMDISK_FILES),
-# so we have to wait for the vendor debug ramdisk to be built before generating the installed file list.
-$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
-$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
-	@echo Installed file list: $@
-	mkdir -p $(dir $@)
-	rm -f $@
-	$(FILESLIST) $(VENDOR_DEBUG_RAMDISK_DIR) > $(@:.txt=.json)
-	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
-
 # -----------------------------------------------------------------
 # vendor_boot-debug.img.
 INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot-debug.img
@@ -2554,13 +2533,7 @@
 BUILT_TEST_HARNESS_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk-test-harness.img
 INSTALLED_TEST_HARNESS_RAMDISK_TARGET := $(BUILT_TEST_HARNESS_RAMDISK_TARGET)
 
-# rsync the content from ramdisk-debug.img to ramdisk-test-harness.img, then
-# appends a few test harness specific properties into the adb_debug.prop.
-TEST_HARNESS_RAMDISK_SYNC_DIR := $(PRODUCT_OUT)/debug_ramdisk
-TEST_HARNESS_RAMDISK_ROOT_DIR := $(PRODUCT_OUT)/test_harness_ramdisk
-
-# The following TARGET_TEST_HARNESS_RAMDISK_OUT will be $(PRODUCT_OUT)/test_harness_ramdisk/first_stage_ramdisk,
-# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/test_harness_ramdisk.
+# Appends a few test harness specific properties into the adb_debug.prop.
 TEST_HARNESS_PROP_TARGET := $(TARGET_TEST_HARNESS_RAMDISK_OUT)/adb_debug.prop
 ADDITIONAL_TEST_HARNESS_PROPERTIES := ro.audio.silent=1
 ADDITIONAL_TEST_HARNESS_PROPERTIES += ro.test_harness=1
@@ -2574,19 +2547,40 @@
   $(foreach line,$(1), echo "$(line)" >> $(2);)
 endef
 
+INTERNAL_TEST_HARNESS_RAMDISK_FILES := $(filter $(TARGET_TEST_HARNESS_RAMDISK_OUT)/%, \
+    $(ALL_GENERATED_SOURCES) \
+    $(ALL_DEFAULT_INSTALLED_MODULES))
+
+# ramdisk-test-harness.img will merge the content from either ramdisk.img or
+# ramdisk-recovery.img, depending on whether BOARD_USES_RECOVERY_AS_BOOT is set
+# or not.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+  $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+  $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(recovery_ramdisk)
+else
+  $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RAMDISK_OUT)
+  $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(INSTALLED_RAMDISK_TARGET)
+endif # BOARD_USES_RECOVERY_AS_BOOT
+
+# The test harness ramdisk will rsync the files from the debug ramdisk, then appends some props.
 $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(INSTALLED_DEBUG_RAMDISK_TARGET)
 $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_TEST_HARNESS_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
 	$(call pretty,"Target test harness ramdisk: $@")
-	rsync --chmod=u+w -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
+	rsync --chmod=u+w -a $(TARGET_DEBUG_RAMDISK_OUT)/ $(TARGET_TEST_HARNESS_RAMDISK_OUT)
 	$(call append-test-harness-props,$(ADDITIONAL_TEST_HARNESS_PROPERTIES),$(TEST_HARNESS_PROP_TARGET))
-	$(MKBOOTFS) -d $(TARGET_OUT) $(TEST_HARNESS_RAMDISK_ROOT_DIR) | $(COMPRESSION_COMMAND) > $@
+	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_TEST_HARNESS_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
 
 .PHONY: ramdisk_test_harness-nodeps
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+  ramdisk_test_harness-nodeps: PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+else
+  ramdisk_test_harness-nodeps: PRIVATE_ADDITIONAL_DIR := $(TARGET_RAMDISK_OUT)
+endif # BOARD_USES_RECOVERY_AS_BOOT
 ramdisk_test_harness-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
 	echo "make $@: ignoring dependencies"
-	rsync -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
+	rsync --chmod=u+w -a $(TARGET_DEBUG_RAMDISK_OUT)/ $(TARGET_TEST_HARNESS_RAMDISK_OUT)
 	$(call append-test-harness-props,$(ADDITIONAL_TEST_HARNESS_PROPERTIES),$(TEST_HARNESS_PROP_TARGET))
-	$(MKBOOTFS) -d $(TARGET_OUT) $(TEST_HARNESS_RAMDISK_ROOT_DIR) | $(COMPRESSION_COMMAND) > $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_TEST_HARNESS_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
 
 endif # BUILDING_RAMDISK_IMAGE
 
@@ -5962,7 +5956,7 @@
 FUZZ_SHARED_DEPS := $(call copy-many-files,$(strip $(FUZZ_TARGET_SHARED_DEPS_INSTALL_PAIRS)))
 
 # -----------------------------------------------------------------
-# The rule to build all fuzz targets, and package them.
+# The rule to build all fuzz targets for C++ and Rust, and package them.
 # Note: The packages are created in Soong, and in a perfect world,
 # we'd be able to create the phony rule there. But, if we want to
 # have dist goals for the fuzz target, we need to have the PHONY
@@ -5974,3 +5968,7 @@
 .PHONY: haiku
 haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
 $(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
+
+.PHONY: haiku-rust
+haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
+$(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index c9fcf47..74558f5 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -27,6 +27,7 @@
 # Add variables to the namespace below:
 
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
+$(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
 
 # TODO(b/172480615): Remove when platform uses ART Module prebuilts by default.
 ifeq (,$(filter art_module,$(SOONG_CONFIG_NAMESPACES)))
diff --git a/core/board_config.mk b/core/board_config.mk
index be37292..9061342 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -331,16 +331,6 @@
 endif
 
 ###########################################
-# Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
-ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT) \
-  $(BOARD_GKI_NONAB_COMPAT) $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT) \
-  $(BOARD_USES_GENERIC_KERNEL_IMAGE)))
-TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
-TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK := vendor_debug_ramdisk/first_stage_ramdisk
-TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk/first_stage_ramdisk
-endif
-
-###########################################
 # Configure whether we're building the system image
 BUILDING_SYSTEM_IMAGE := true
 ifeq ($(PRODUCT_BUILD_SYSTEM_IMAGE),)
diff --git a/core/definitions.mk b/core/definitions.mk
index 2951c05..7f2cc42 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2544,8 +2544,12 @@
 # $(1): source file
 # $(2): destination file
 define copy-init-script-file-checked
+ifdef TARGET_BUILD_UNBUNDLED
+# TODO (b/185624993): Remove the chck on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
+# without requiring the HIDL interface map.
+$(2): $(1)
+else ifneq ($(HOST_OS),darwin)
 # Host init verifier doesn't exist on darwin.
-ifneq ($(HOST_OS),darwin)
 $(2): \
 	$(1) \
 	$(HOST_INIT_VERIFIER) \
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index dd31999..593ad66 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -62,7 +62,9 @@
 
 boot_zip := $(PRODUCT_OUT)/boot.zip
 bootclasspath_jars := $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
-system_server_jars := $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),$(PRODUCT_OUT)/system/framework/$(m).jar)
+system_server_jars := \
+  $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),\
+    $(PRODUCT_OUT)/system/framework/$(call word-colon,2,$(m)).jar)
 
 $(boot_zip): PRIVATE_BOOTCLASSPATH_JARS := $(bootclasspath_jars)
 $(boot_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 8f0702b..393053d 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -68,7 +68,7 @@
   endif
   LOCAL_MODULE_CLASS := ETC
   include $(BUILD_PREBUILT)
-  $(LOCAL_BUILT_MODULE): $(my_unstripped_installed)
+  $(LOCAL_BUILT_MODULE): | $(my_unstripped_installed)
   # Installing boot.art causes all boot image bits to be installed.
   # Keep this old behavior in case anyone still needs it.
   $(LOCAL_INSTALLED_MODULE): $(my_installed)
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 8f1352f..d925899 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -70,9 +70,10 @@
 # /data. If we don't do this they will need to be extracted which is not favorable for RAM usage
 # or performance. If my_preopt_for_extracted_apk is true, we ignore the only preopt boot image
 # options.
+system_server_jars := $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),$(call word-colon,2,$(m)))
 ifneq (true,$(my_preopt_for_extracted_apk))
   ifeq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
-    ifeq ($(filter $(PRODUCT_SYSTEM_SERVER_JARS) $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE)),)
+    ifeq ($(filter $(system_server_jars) $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE)),)
       LOCAL_DEX_PREOPT :=
     endif
   endif
@@ -266,7 +267,7 @@
 	  $(PRIVATE_DEXPREOPT_CONFIGS) \
 	  $(PRIVATE_RELAX_CHECK) \
 	  $<
-  $(built_module) : $(my_enforced_uses_libraries)
+  $(LOCAL_BUILT_MODULE) : $(my_enforced_uses_libraries)
 endif
 
 ################################################################################
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 48072b3..a9b3720 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -87,9 +87,14 @@
 ###########################################################
 ## Strip
 ###########################################################
-strip_input := $(symbolic_output)
+strip_input := $(inject_module)
 strip_output := $(LOCAL_BUILT_MODULE)
 
+# Use an order-only dependency to ensure the unstripped file in the symbols
+# directory is copied when the module is built, but does not force the
+# module to be rebuilt when the symbols directory is cleaned by installclean.
+$(strip_output): | $(symbolic_output)
+
 my_strip_module := $(firstword \
   $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
   $(LOCAL_STRIP_MODULE))
@@ -127,12 +132,16 @@
 ifneq (,$(my_strip_module))
   $(strip_output): PRIVATE_STRIP_ARGS := $(my_strip_args)
   $(strip_output): PRIVATE_TOOLS_PREFIX := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)TOOLS_PREFIX)
-  $(strip_output): $(strip_input) $(SOONG_STRIP_PATH)
+  $(strip_output): $(strip_input) $(SOONG_STRIP_PATH) $(XZ)
 	@echo "$($(PRIVATE_PREFIX)DISPLAY) Strip: $(PRIVATE_MODULE) ($@)"
 	CLANG_BIN=$(LLVM_PREBUILTS_PATH) \
 	CROSS_COMPILE=$(PRIVATE_TOOLS_PREFIX) \
 	XZ=$(XZ) \
+	CREATE_MINIDEBUGINFO=${CREATE_MINIDEBUGINFO} \
 	$(SOONG_STRIP_PATH) -i $< -o $@ -d $@.strip.d $(PRIVATE_STRIP_ARGS)
+  ifneq ($(HOST_OS),darwin)
+    $(strip_output): $(CREATE_MINIDEBUGINFO)
+  endif
   $(call include-depfile,$(strip_output).strip.d,$(strip_output))
 else
   # Don't strip the binary, just copy it.  We can't skip this step
diff --git a/core/product_config.mk b/core/product_config.mk
index eb6f69f..5c85fb8 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -231,12 +231,23 @@
 PRODUCT_AAPT_CONFIG_SP := $(PRODUCT_AAPT_CONFIG)
 PRODUCT_AAPT_CONFIG := $(subst $(space),$(comma),$(PRODUCT_AAPT_CONFIG))
 
+###########################################################
+## Add 'platform:' prefix to jars not in <apex>:<module> format.
+##
+## This makes sure that a jar corresponds to ConfigureJarList format of <apex> and <module> pairs
+## where needed.
+##
+## $(1): a list of jars either in <module> or <apex>:<module> format
+###########################################################
+
+define qualify-platform-jars
+  $(foreach jar,$(1),$(if $(findstring :,$(jar)),,platform:)$(jar))
+endef
+
 # Extra boot jars must be appended at the end after common boot jars.
 PRODUCT_BOOT_JARS += $(PRODUCT_BOOT_JARS_EXTRA)
 
-# Add 'platform:' prefix to unqualified boot jars
-PRODUCT_BOOT_JARS := $(foreach pair,$(PRODUCT_BOOT_JARS), \
-  $(if $(findstring :,$(pair)),,platform:)$(pair))
+PRODUCT_BOOT_JARS := $(call qualify-platform-jars,$(PRODUCT_BOOT_JARS))
 
 # Replaces references to overridden boot jar modules in a boot jars variable.
 # $(1): Name of a boot jars variable with <apex>:<jar> pairs.
@@ -254,6 +265,8 @@
 # The extra system server jars must be appended at the end after common system server jars.
 PRODUCT_SYSTEM_SERVER_JARS += $(PRODUCT_SYSTEM_SERVER_JARS_EXTRA)
 
+PRODUCT_SYSTEM_SERVER_JARS := $(call qualify-platform-jars,$(PRODUCT_SYSTEM_SERVER_JARS))
+
 ifndef PRODUCT_SYSTEM_NAME
   PRODUCT_SYSTEM_NAME := $(PRODUCT_NAME)
 endif
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index b7c21b8..82fb413 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -147,7 +147,7 @@
 # install symbol files of JNI libraries
 my_jni_lib_symbols_copy_files := $(foreach f,$(LOCAL_SOONG_JNI_LIBS_SYMBOLS),\
   $(call word-colon,1,$(f)):$(patsubst $(PRODUCT_OUT)/%,$(TARGET_OUT_UNSTRIPPED)/%,$(call word-colon,2,$(f))))
-$(LOCAL_BUILT_MODULE): $(call copy-many-files, $(my_jni_lib_symbols_copy_files))
+$(LOCAL_BUILT_MODULE): | $(call copy-many-files, $(my_jni_lib_symbols_copy_files))
 
 # embedded JNI will already have been handled by soong
 my_embed_jni :=
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index a12ef66..4d7b614 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -170,7 +170,7 @@
       my_unstripped_path := $(patsubst $(TARGET_OUT_UNSTRIPPED)/root/%,$(TARGET_OUT_UNSTRIPPED)/%, $(my_unstripped_path))
       symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
       $(eval $(call copy-one-file,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
-      $(call add-dependency,$(LOCAL_BUILT_MODULE),$(symbolic_output))
+      $(LOCAL_BUILT_MODULE): | $(symbolic_output)
 
       ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
         my_breakpad_path := $(TARGET_OUT_BREAKPAD)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_symbol_path))
diff --git a/core/soong_rust_prebuilt.mk b/core/soong_rust_prebuilt.mk
index c382f6a..26c099b 100644
--- a/core/soong_rust_prebuilt.mk
+++ b/core/soong_rust_prebuilt.mk
@@ -119,7 +119,7 @@
     my_unstripped_path := $(patsubst $(TARGET_OUT_UNSTRIPPED)/root/%,$(TARGET_OUT_UNSTRIPPED)/%, $(my_unstripped_path))
     symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
     $(eval $(call copy-one-file,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
-    $(call add-dependency,$(LOCAL_BUILT_MODULE),$(symbolic_output))
+    $(LOCAL_BUILT_MODULE): | $(symbolic_output)
   endif
 endif
 
diff --git a/envsetup.sh b/envsetup.sh
index b5c729d..8a995c7 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1458,13 +1458,17 @@
 # Verifies that module-info.txt exists, creating it if it doesn't.
 function verifymodinfo() {
     if [ ! "$ANDROID_PRODUCT_OUT" ]; then
-        echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+        if [ "$QUIET_VERIFYMODINFO" != "true" ] ; then
+            echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+        fi
         return 1
     fi
 
     if [ ! -f "$ANDROID_PRODUCT_OUT/module-info.json" ]; then
-        echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
-        refreshmod || return 1
+        if [ "$QUIET_VERIFYMODINFO" != "true" ] ; then
+            echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
+        fi
+        return 1
     fi
 }
 
@@ -1473,7 +1477,7 @@
 function allmod() {
     verifymodinfo || return 1
 
-    python -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
+    python3 -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
 }
 
 # Get the path of a specific module in the android tree, as cached in module-info.json.
@@ -1487,7 +1491,7 @@
 
     verifymodinfo || return 1
 
-    local relpath=$(python -c "import json, os
+    local relpath=$(python3 -c "import json, os
 module = '$1'
 module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
 if module not in module_info:
@@ -1513,7 +1517,7 @@
 
     verifymodinfo || return 1
 
-    python -c "import json, os
+    python3 -c "import json, os
 dir = '$1'
 while dir.endswith('/'):
     dir = dir[:-1]
@@ -1558,7 +1562,7 @@
     verifymodinfo || return 1
 
     local relpath
-    relpath=$(python -c "import json, os
+    relpath=$(python3 -c "import json, os
 module = '$1'
 module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
 if module not in module_info:
@@ -1602,7 +1606,7 @@
 
 function _complete_android_module_names() {
     local word=${COMP_WORDS[COMP_CWORD]}
-    COMPREPLY=( $(allmod | grep -E "^$word") )
+    COMPREPLY=( $(QUIET_VERIFYMODINFO=true allmod | grep -E "^$word") )
 }
 
 # Print colored exit condition
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index a2150ad..c577870 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -41,6 +41,10 @@
 BOARD_AVB_SYSTEM_ALGORITHM := SHA256_RSA2048
 BOARD_AVB_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
 BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
+
+# Using sha256 for dm-verity partitions. b/156162446
+BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+
 ifdef BUILDING_GSI
 # super.img spec for GSI targets
 BOARD_SUPER_PARTITION_SIZE := 3229614080
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 15c311c..1229327 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -98,9 +98,9 @@
 BOARD_USES_RECOVERY_AS_BOOT :=
 TARGET_NO_KERNEL := false
 BOARD_USES_GENERIC_KERNEL_IMAGE := true
+# TODO(b/187432172): Add 5.10-android12-unstable
 BOARD_KERNEL_MODULE_INTERFACE_VERSIONS := \
     5.4-android12-0 \
-    5.10-android12-0 \
 
 # Copy boot image in $OUT to target files. This is defined for targets where
 # the installed GKI APEXes are built from source.
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index bb17dda..0e652c1 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -18,26 +18,47 @@
   $(error ART_APEX_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
 endif
 
-# The order matters for runtime class lookup performance.
+# Order of the jars on BOOTCLASSPATH follows:
+# 1. ART APEX jars
+# 2. System jars
+# 3. System_ext jars
+# 4. Non-updatable APEX jars
+# 5. Updatable APEX jars
+#
+# ART APEX jars (1) are defined in ART_APEX_JARS. System, system_ext, and non updatable boot jars
+# are defined below in PRODUCT_BOOT_JARS. All updatable APEX boot jars are part of
+# PRODUCT_UPDATABLE_BOOT_JARS.
+#
+# The actual runtime ordering matching above is determined by derive_classpath service at runtime.
+# See packages/modules/SdkExtensions/README.md for more details.
+
+# The order of PRODUCT_BOOT_JARS matters for runtime class lookup performance.
 PRODUCT_BOOT_JARS := \
-    $(ART_APEX_JARS) \
+    $(ART_APEX_JARS)
+
+# /system and /system_ext boot jars.
+PRODUCT_BOOT_JARS += \
     framework-minus-apex \
     ext \
-    com.android.i18n:core-icu4j \
     telephony-common \
     voip-common \
     ims-common
 
+# Non-updatable APEX jars. Keep the list sorted.
+PRODUCT_BOOT_JARS += \
+    com.android.i18n:core-icu4j
+
+# Updatable APEX jars. Keep the list sorted by module names and then library names.
 PRODUCT_UPDATABLE_BOOT_JARS := \
     com.android.conscrypt:conscrypt \
+    com.android.ipsec:android.net.ipsec.ike \
     com.android.media:updatable-media \
     com.android.mediaprovider:framework-mediaprovider \
     com.android.os.statsd:framework-statsd \
     com.android.permission:framework-permission \
     com.android.sdkext:framework-sdkextensions \
-    com.android.wifi:framework-wifi \
     com.android.tethering:framework-tethering \
-    com.android.ipsec:android.net.ipsec.ike
+    com.android.wifi:framework-wifi
 
 # Minimal configuration for running dex2oat (default argument values).
 # PRODUCT_USES_DEFAULT_ART_CONFIG must be true to enable boot image compilation.
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 25fa68b..82af45f 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -42,6 +42,9 @@
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
 
+# Enable various debugfs restrictions
+PRODUCT_SET_DEBUGFS_RESTRICTIONS := true
+
 # GSI targets should install "unflattened" APEXes in /system
 TARGET_FLATTEN_APEX := false
 
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
index d606e00..c8dc8b0 100644
--- a/target/product/updatable_apex.mk
+++ b/target/product/updatable_apex.mk
@@ -22,9 +22,4 @@
   PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_prebuilt
   PRODUCT_VENDOR_PROPERTIES := ro.apex.updatable=true
   TARGET_FLATTEN_APEX := false
-  # Use compressed apexes in pre-installed partitions.
-  # Note: this doesn't mean that all pre-installed apexes will be compressed.
-  #  Whether an apex is compressed or not is controlled at apex Soong module
-  #  via compresible property.
-  PRODUCT_COMPRESSED_APEX := true
 endif
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 00bbb21..2aceb78 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -259,6 +259,7 @@
       block_list=block_list)
   return img.name
 
+
 def AddOdmDlkm(output_zip):
   """Turn the contents of OdmDlkm into an odm_dlkm image and store it in output_zip."""
 
@@ -310,6 +311,7 @@
   img.Write()
   return img.name
 
+
 def AddPvmfw(output_zip):
   """Adds the pvmfw image.
 
@@ -345,6 +347,7 @@
   img.Write()
   return img.name
 
+
 def AddCustomImages(output_zip, partition_name):
   """Adds and signs custom images in IMAGES/.
 
@@ -359,8 +362,6 @@
     AssertionError: If image can't be found.
   """
 
-  partition_size = OPTIONS.info_dict.get(
-      "avb_{}_partition_size".format(partition_name))
   key_path = OPTIONS.info_dict.get("avb_{}_key_path".format(partition_name))
   algorithm = OPTIONS.info_dict.get("avb_{}_algorithm".format(partition_name))
   extra_args = OPTIONS.info_dict.get(
@@ -955,6 +956,20 @@
     with open(pack_radioimages_txt) as f:
       AddPackRadioImages(output_zip, f.readlines())
 
+  # Calculate the vbmeta digest and put the result in to META/
+  boot_images = OPTIONS.info_dict.get("boot_images")
+  # Disable the digest calculation if the target_file is used as a container
+  # for boot images.
+  boot_container = boot_images and len(boot_images.split()) >= 2
+  if (OPTIONS.info_dict.get("avb_enable") == "true" and not boot_container and
+      OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true"):
+    avbtool = OPTIONS.info_dict["avb_avbtool"]
+    digest = verity_utils.CalculateVbmetaDigest(OPTIONS.input_tmp, avbtool)
+    vbmeta_digest_txt = os.path.join(OPTIONS.input_tmp, "META",
+                                     "vbmeta_digest.txt")
+    with open(vbmeta_digest_txt, 'w') as f:
+      f.write(digest)
+
   if output_zip:
     common.ZipClose(output_zip)
     if OPTIONS.replace_updated_files_list:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 301d0da..2492da9 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -386,13 +386,14 @@
             in_dir, du_str,
             int(prop_dict.get("partition_reserved_size", 0)),
             int(prop_dict.get("partition_reserved_size", 0)) // BYTES_IN_MB))
-    print(
-        "The max image size for filesystem files is {} bytes ({} MB), out of a "
-        "total partition size of {} bytes ({} MB).".format(
-            int(prop_dict["image_size"]),
-            int(prop_dict["image_size"]) // BYTES_IN_MB,
-            int(prop_dict["partition_size"]),
-            int(prop_dict["partition_size"]) // BYTES_IN_MB))
+    if ("image_size" in prop_dict and "partition_size" in prop_dict):
+      print(
+          "The max image size for filesystem files is {} bytes ({} MB), "
+          "out of a total partition size of {} bytes ({} MB).".format(
+              int(prop_dict["image_size"]),
+              int(prop_dict["image_size"]) // BYTES_IN_MB,
+              int(prop_dict["partition_size"]),
+              int(prop_dict["partition_size"]) // BYTES_IN_MB))
     raise
 
   if run_e2fsck and prop_dict.get("skip_fsck") != "true":
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 83425cc..b397fd0 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -138,6 +138,7 @@
 # existing search paths.
 RAMDISK_BUILD_PROP_REL_PATHS = ['system/etc/ramdisk/build.prop']
 
+
 class ErrorCode(object):
   """Define error_codes for failures that happen during the actual
   update package installation.
@@ -226,6 +227,7 @@
 def SetHostToolLocation(tool_name, location):
   OPTIONS.host_tools[tool_name] = location
 
+
 def FindHostToolPath(tool_name):
   """Finds the path to the host tool.
 
@@ -246,6 +248,7 @@
 
   return tool_name
 
+
 def Run(args, verbose=None, **kwargs):
   """Creates and returns a subprocess.Popen object.
 
@@ -434,6 +437,13 @@
     return self._fingerprint
 
   @property
+  def is_vabc(self):
+    vendor_prop = self.info_dict.get("vendor.build.prop")
+    vabc_enabled = vendor_prop and \
+        vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true"
+    return vabc_enabled
+
+  @property
   def oem_props(self):
     return self._oem_props
 
@@ -461,7 +471,7 @@
     """Returns the inquired build property for the provided partition."""
 
     # Boot image uses ro.[product.]bootimage instead of boot.
-    prop_partition =  "bootimage" if partition == "boot" else partition
+    prop_partition = "bootimage" if partition == "boot" else partition
 
     # If provided a partition for this property, only look within that
     # partition's build.prop.
@@ -652,10 +662,12 @@
       raise KeyError(fn)
     return file
 
+
 class RamdiskFormat(object):
   LZ4 = 1
   GZ = 2
 
+
 def _GetRamdiskFormat(info_dict):
   if info_dict.get('lz4_ramdisks') == 'true':
     ramdisk_format = RamdiskFormat.LZ4
@@ -663,6 +675,7 @@
     ramdisk_format = RamdiskFormat.GZ
   return ramdisk_format
 
+
 def LoadInfoDict(input_file, repacking=False):
   """Loads the key/value pairs from the given input target_files.
 
@@ -781,7 +794,8 @@
     for partition in PARTITIONS_WITH_BUILD_PROP:
       fingerprint = build_info.GetPartitionFingerprint(partition)
       if fingerprint:
-        d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
+        d["avb_{}_salt".format(partition)] = sha256(
+            fingerprint.encode()).hexdigest()
   try:
     d["ab_partitions"] = read_helper("META/ab_partitions.txt").split("\n")
   except KeyError:
@@ -789,7 +803,6 @@
   return d
 
 
-
 def LoadListFromFile(file_path):
   with open(file_path) as f:
     return f.read().splitlines()
@@ -859,7 +872,8 @@
     """Loads the build.prop file and builds the attributes."""
 
     if name == "boot":
-      data = PartitionBuildProps._ReadBootPropFile(input_file, ramdisk_format=ramdisk_format)
+      data = PartitionBuildProps._ReadBootPropFile(
+          input_file, ramdisk_format=ramdisk_format)
     else:
       data = PartitionBuildProps._ReadPartitionPropFile(input_file, name)
 
@@ -1106,7 +1120,7 @@
     return " ".join(sorted(combined))
 
   if (framework_dict.get("use_dynamic_partitions") !=
-      "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
+          "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
     raise ValueError("Both dictionaries must have use_dynamic_partitions=true")
 
   merged_dict = {"use_dynamic_partitions": "true"}
@@ -1371,7 +1385,8 @@
 
   # Checks key_path exists, before appending --gki_signing_* args.
   if not os.path.exists(key_path):
-    raise ExternalError('gki_signing_key_path: "{}" not found'.format(key_path))
+    raise ExternalError(
+        'gki_signing_key_path: "{}" not found'.format(key_path))
 
   algorithm = OPTIONS.info_dict.get("gki_signing_algorithm")
   if key_path and algorithm:
@@ -1588,7 +1603,7 @@
   RunAndCheckOutput(cmd)
 
   if (info_dict.get("boot_signer") == "true" and
-      info_dict.get("verity_key")):
+          info_dict.get("verity_key")):
     # Hard-code the path as "/boot" for two-step special recovery image (which
     # will be loaded into /boot during the two-step OTA).
     if two_step_image:
@@ -1753,14 +1768,17 @@
   if os.access(fn, os.F_OK):
     ramdisk_fragments = shlex.split(open(fn).read().rstrip("\n"))
     for ramdisk_fragment in ramdisk_fragments:
-      fn = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment, "mkbootimg_args")
+      fn = os.path.join(sourcedir, "RAMDISK_FRAGMENTS",
+                        ramdisk_fragment, "mkbootimg_args")
       cmd.extend(shlex.split(open(fn).read().rstrip("\n")))
-      fn = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment, "prebuilt_ramdisk")
+      fn = os.path.join(sourcedir, "RAMDISK_FRAGMENTS",
+                        ramdisk_fragment, "prebuilt_ramdisk")
       # Use prebuilt image if found, else create ramdisk from supplied files.
       if os.access(fn, os.F_OK):
         ramdisk_fragment_pathname = fn
       else:
-        ramdisk_fragment_root = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment)
+        ramdisk_fragment_root = os.path.join(
+            sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment)
         ramdisk_fragment_img = _MakeRamdisk(ramdisk_fragment_root,
                                             ramdisk_format=ramdisk_format)
         ramdisk_fragment_imgs.append(ramdisk_fragment_img)
@@ -3533,7 +3551,7 @@
 
     for g in tgt_groups:
       for p in shlex.split(info_dict.get(
-          "super_%s_partition_list" % g, "").strip()):
+              "super_%s_partition_list" % g, "").strip()):
         assert p in self._partition_updates, \
             "{} is in target super_{}_partition_list but no BlockDifference " \
             "object is provided.".format(p, g)
@@ -3541,7 +3559,7 @@
 
     for g in src_groups:
       for p in shlex.split(source_info_dict.get(
-          "super_%s_partition_list" % g, "").strip()):
+              "super_%s_partition_list" % g, "").strip()):
         assert p in self._partition_updates, \
             "{} is in source super_{}_partition_list but no BlockDifference " \
             "object is provided.".format(p, g)
@@ -3650,7 +3668,7 @@
       if u.src_size is not None and u.tgt_size is None:
         append('remove_group %s' % g)
       if (u.src_size is not None and u.tgt_size is not None and
-          u.src_size > u.tgt_size):
+              u.src_size > u.tgt_size):
         comment('Shrink group %s from %d to %d' % (g, u.src_size, u.tgt_size))
         append('resize_group %s %d' % (g, u.tgt_size))
 
@@ -3659,7 +3677,7 @@
         comment('Add group %s with maximum size %d' % (g, u.tgt_size))
         append('add_group %s %d' % (g, u.tgt_size))
       if (u.src_size is not None and u.tgt_size is not None and
-          u.src_size < u.tgt_size):
+              u.src_size < u.tgt_size):
         comment('Grow group %s from %d to %d' % (g, u.src_size, u.tgt_size))
         append('resize_group %s %d' % (g, u.tgt_size))
 
@@ -3693,7 +3711,8 @@
   """
   tmp_dir = MakeTempDir('boot_', suffix='.img')
   try:
-    RunAndCheckOutput(['unpack_bootimg', '--boot_img', boot_img, '--out', tmp_dir])
+    RunAndCheckOutput(['unpack_bootimg', '--boot_img',
+                      boot_img, '--out', tmp_dir])
     ramdisk = os.path.join(tmp_dir, 'ramdisk')
     if not os.path.isfile(ramdisk):
       logger.warning('Unable to get boot image timestamp: no ramdisk in boot')
@@ -3704,7 +3723,8 @@
     elif ramdisk_format == RamdiskFormat.GZ:
       with open(ramdisk, 'rb') as input_stream:
         with open(uncompressed_ramdisk, 'wb') as output_stream:
-          p2 = Run(['minigzip', '-d'], stdin=input_stream.fileno(), stdout=output_stream.fileno())
+          p2 = Run(['minigzip', '-d'], stdin=input_stream.fileno(),
+                   stdout=output_stream.fileno())
           p2.wait()
     else:
       logger.error('Only support lz4 or minigzip ramdisk format.')
@@ -3715,13 +3735,14 @@
     # Use "toybox cpio" instead of "cpio" because the latter invokes cpio from
     # the host environment.
     RunAndCheckOutput(['toybox', 'cpio', '-F', abs_uncompressed_ramdisk, '-i'],
-               cwd=extracted_ramdisk)
+                      cwd=extracted_ramdisk)
 
     for search_path in RAMDISK_BUILD_PROP_REL_PATHS:
       prop_file = os.path.join(extracted_ramdisk, search_path)
       if os.path.isfile(prop_file):
         return prop_file
-      logger.warning('Unable to get boot image timestamp: no %s in ramdisk', search_path)
+      logger.warning(
+          'Unable to get boot image timestamp: no %s in ramdisk', search_path)
 
     return None
 
@@ -3754,7 +3775,8 @@
     timestamp = props.GetProp('ro.bootimage.build.date.utc')
     if timestamp:
       return int(timestamp)
-    logger.warning('Unable to get boot image timestamp: ro.bootimage.build.date.utc is undefined')
+    logger.warning(
+        'Unable to get boot image timestamp: ro.bootimage.build.date.utc is undefined')
     return None
 
   except ExternalError as e:
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 02b2b4d..61c8212 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -215,6 +215,12 @@
   --disable_vabc
       Disable Virtual A/B Compression, for builds that have compression enabled
       by default.
+
+  --vabc_downgrade
+      Don't disable Virtual A/B Compression for downgrading OTAs.
+      For VABC downgrades, we must finish merging before doing data wipe, and
+      since data wipe is required for downgrading OTA, this might cause long
+      wait time in recovery.
 """
 
 from __future__ import print_function
@@ -278,6 +284,7 @@
 OPTIONS.custom_images = {}
 OPTIONS.disable_vabc = False
 OPTIONS.spl_downgrade = False
+OPTIONS.vabc_downgrade = False
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -1051,15 +1058,18 @@
         "META/ab_partitions.txt is required for ab_update."
     target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
     source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
-    vendor_prop = source_info.info_dict.get("vendor.build.prop")
-    vabc_used = vendor_prop and \
-        vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true" and \
-        not OPTIONS.disable_vabc
-    if vabc_used:
+    # If source supports VABC, delta_generator/update_engine will attempt to
+    # use VABC. This dangerous, as the target build won't have snapuserd to
+    # serve I/O request when device boots. Therefore, disable VABC if source
+    # build doesn't supports it.
+    if not source_info.is_vabc or not target_info.is_vabc:
+      OPTIONS.disable_vabc = True
+    if not OPTIONS.disable_vabc:
       # TODO(zhangkelvin) Remove this once FEC on VABC is supported
       logger.info("Virtual AB Compression enabled, disabling FEC")
       OPTIONS.disable_fec_computation = True
       OPTIONS.disable_verity_computation = True
+
   else:
     assert "ab_partitions" in OPTIONS.info_dict, \
         "META/ab_partitions.txt is required for ab_update."
@@ -1281,6 +1291,8 @@
     elif o == "--spl_downgrade":
       OPTIONS.spl_downgrade = True
       OPTIONS.wipe_user_data = True
+    elif o == "--vabc_downgrade":
+      OPTIONS.vabc_downgrade = True
     else:
       return False
     return True
@@ -1323,7 +1335,8 @@
                                  "partial=",
                                  "custom_image=",
                                  "disable_vabc",
-                                 "spl_downgrade"
+                                 "spl_downgrade",
+                                 "vabc_downgrade",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -1344,7 +1357,14 @@
   else:
     OPTIONS.info_dict = ParseInfoDict(args[0])
 
-  if OPTIONS.downgrade:
+  if OPTIONS.wipe_user_data:
+    if not OPTIONS.vabc_downgrade:
+      logger.info("Detected downgrade/datawipe OTA."
+                  "When wiping userdata, VABC OTA makes the user "
+                  "wait in recovery mode for merge to finish. Disable VABC by "
+                  "default. If you really want to do VABC downgrade, pass "
+                  "--vabc_downgrade")
+      OPTIONS.disable_vabc = True
     # We should only allow downgrading incrementals (as opposed to full).
     # Otherwise the device may go back from arbitrary build with this full
     # OTA package.
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 313d1e6..f0110ea 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -629,6 +629,10 @@
     elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
       pass
 
+    # Skip the vbmeta digest as we will recalculate it.
+    elif filename == "META/vbmeta_digest.txt":
+      pass
+
     # Skip the care_map as we will regenerate the system/vendor images.
     elif filename in ["META/care_map.pb", "META/care_map.txt"]:
       pass
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index a850390..e2a022a 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -27,7 +27,8 @@
 from test_utils import (
     get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable)
 from verity_utils import (
-    CreateHashtreeInfoGenerator, CreateVerityImageBuilder, HashtreeInfo,
+    CalculateVbmetaDigest, CreateHashtreeInfoGenerator,
+    CreateVerityImageBuilder, HashtreeInfo,
     VerifiedBootVersion1HashtreeInfoGenerator)
 
 BLOCK_SIZE = common.BLOCK_SIZE
@@ -388,3 +389,31 @@
       self.assertLess(
           _SizeCalculator(min_partition_size - BLOCK_SIZE),
           image_size)
+
+  @SkipIfExternalToolsUnavailable()
+  def test_CalculateVbmetaDigest(self):
+    prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
+    verity_image_builder = CreateVerityImageBuilder(prop_dict)
+    self.assertEqual(2, verity_image_builder.version)
+
+    input_dir = common.MakeTempDir()
+    image_dir = common.MakeTempDir()
+    os.mkdir(os.path.join(image_dir, 'IMAGES'))
+    system_image = os.path.join(image_dir, 'IMAGES', 'system.img')
+    system_image_size = verity_image_builder.CalculateMaxImageSize()
+    cmd = ['mkuserimg_mke2fs', input_dir, system_image, 'ext4', '/system',
+           str(system_image_size), '-j', '0', '-s']
+    common.RunAndCheckOutput(cmd)
+    verity_image_builder.Build(system_image)
+
+    # Additionally make vbmeta image
+    vbmeta_image = os.path.join(image_dir, 'IMAGES', 'vbmeta.img')
+    cmd = ['avbtool', 'make_vbmeta_image', '--include_descriptors_from_image',
+           system_image, '--output', vbmeta_image]
+    common.RunAndCheckOutput(cmd)
+
+    # Verify the verity metadata.
+    cmd = ['avbtool', 'verify_image', '--image', vbmeta_image]
+    common.RunAndCheckOutput(cmd)
+    digest = CalculateVbmetaDigest(image_dir, 'avbtool')
+    self.assertIsNotNone(digest)
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 8faa2d1..a08ddbe 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -26,6 +26,7 @@
 import os.path
 import shlex
 import struct
+import sys
 
 import common
 import sparse_img
@@ -739,6 +740,30 @@
   return int(output.split()[0]) * 1024
 
 
+def CalculateVbmetaDigest(extracted_dir, avbtool):
+  """Calculates the vbmeta digest of the images in the extracted target_file"""
+
+  images_dir = common.MakeTempDir()
+  for name in ("PREBUILT_IMAGES", "RADIO", "IMAGES"):
+    path = os.path.join(extracted_dir, name)
+    if not os.path.exists(path):
+      continue
+
+    # Create symlink for image files under PREBUILT_IMAGES, RADIO and IMAGES,
+    # and put them into one directory.
+    for filename in os.listdir(path):
+      if not filename.endswith(".img"):
+        continue
+      symlink_path = os.path.join(images_dir, filename)
+      # The files in latter directory overwrite the existing links
+      common.RunAndCheckOutput(
+        ['ln', '-sf', os.path.join(path, filename), symlink_path])
+
+  cmd = [avbtool, "calculate_vbmeta_digest", "--image",
+         os.path.join(images_dir, 'vbmeta.img')]
+  return common.RunAndCheckOutput(cmd)
+
+
 def main(argv):
   if len(argv) != 2:
     print(__doc__)
diff --git a/tools/warn.py b/tools/warn.py
index 5f796f5..97f54f9 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -27,6 +27,7 @@
 
 
 def main():
+  """Old main() calls warn.warn."""
   os.environ['PYTHONPATH'] = os.path.dirname(os.path.abspath(__file__))
   subprocess.check_call(['/usr/bin/python3', '-m', 'warn.warn'] + sys.argv[1:])
 
diff --git a/tools/warn/.pylintrc b/tools/warn/.pylintrc
new file mode 100644
index 0000000..6aeaed6
--- /dev/null
+++ b/tools/warn/.pylintrc
@@ -0,0 +1,4 @@
+[FORMAT]
+
+# Two spaces for each indentation level.
+indent-string='  '
diff --git a/tools/warn/android_project_list.py b/tools/warn/android_project_list.py
index 82c0fbd..8383dc0 100644
--- a/tools/warn/android_project_list.py
+++ b/tools/warn/android_project_list.py
@@ -17,6 +17,7 @@
 
 
 def create_pattern(name, pattern=None):
+  """Return a tuple of name and warn patten."""
   if pattern is not None:
     return [name, '(^|.*/)' + pattern + '/.*: warning:']
   return [name, '(^|.*/)' + name + '/.*: warning:']
diff --git a/tools/warn/chrome_project_list.py b/tools/warn/chrome_project_list.py
index 6096522..d8b2179 100644
--- a/tools/warn/chrome_project_list.py
+++ b/tools/warn/chrome_project_list.py
@@ -8,6 +8,7 @@
 
 
 def create_pattern(pattern):
+  """Return a tuple of name and warn patten."""
   return [pattern, '(^|.*/)' + pattern + '/.*: warning:']
 
 
diff --git a/tools/warn/cpp_warn_patterns.py b/tools/warn/cpp_warn_patterns.py
index e8783bc..2fa9916 100644
--- a/tools/warn/cpp_warn_patterns.py
+++ b/tools/warn/cpp_warn_patterns.py
@@ -15,10 +15,12 @@
 
 """Warning patterns for C/C++ compiler, but not clang-tidy."""
 
+# No need of doc strings for trivial small functions.
+# pylint:disable=missing-function-docstring
+
 import re
 
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
 from .severity import Severity
 
 
@@ -56,7 +58,8 @@
 
 
 warn_patterns = [
-    # pylint:disable=line-too-long,g-inconsistent-quotes
+    # pylint does not recognize g-inconsistent-quotes
+    # pylint:disable=line-too-long,bad-option-value,g-inconsistent-quotes
     medium('Implicit function declaration',
            [r".*: warning: implicit declaration of function .+",
             r".*: warning: implicitly declaring library function"]),
@@ -300,7 +303,7 @@
     medium('Missing noreturn',
            [r".*: warning: function '.*' could be declared with attribute 'noreturn'"]),
     medium('User warning',
-           [r".*: warning: #warning "".+"""]),
+           [r".*: warning: #warning \".+\""]),
     medium('Vexing parsing problem',
            [r".*: warning: empty parentheses interpreted as a function declaration"]),
     medium('Dereferencing void*',
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index be71b55..ac5d4b7 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -15,6 +15,9 @@
 
 """Emit warning messages to html or csv files."""
 
+# Many functions in this module have too many arguments to be refactored.
+# pylint:disable=too-many-arguments,missing-function-docstring
+
 # To emit html page of warning messages:
 #   flags: --byproject, --url, --separator
 # Old stuff for static html components:
@@ -57,11 +60,10 @@
 import sys
 
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
 from .severity import Severity
 
 
-html_head_scripts = """\
+HTML_HEAD_SCRIPTS = """\
   <script type="text/javascript">
   function expand(id) {
     var e = document.getElementById(id);
@@ -113,7 +115,7 @@
 def dump_html_prologue(title, writer, warn_patterns, project_names):
   writer('<html>\n<head>')
   writer('<title>' + title + '</title>')
-  writer(html_head_scripts)
+  writer(HTML_HEAD_SCRIPTS)
   emit_stats_by_project(writer, warn_patterns, project_names)
   writer('</head>\n<body>')
   writer(html_big(title))
@@ -142,18 +144,16 @@
     2D warnings array where warnings[p][s] is # of warnings in project name p of
     severity level s
   """
-  # pylint:disable=g-complex-comprehension
   warnings = {p: {s.value: 0 for s in Severity.levels} for p in project_names}
-  for i in warn_patterns:
-    s = i['severity'].value
-    for p in i['projects']:
-      warnings[p][s] += i['projects'][p]
+  for pattern in warn_patterns:
+    value = pattern['severity'].value
+    for project in pattern['projects']:
+      warnings[project][value] += pattern['projects'][project]
   return warnings
 
 
 def get_total_by_project(warnings, project_names):
   """Returns dict, project as key and # warnings for that project as value."""
-  # pylint:disable=g-complex-comprehension
   return {
       p: sum(warnings[p][s.value] for s in Severity.levels)
       for p in project_names
@@ -162,7 +162,6 @@
 
 def get_total_by_severity(warnings, project_names):
   """Returns dict, severity as key and # warnings of that severity as value."""
-  # pylint:disable=g-complex-comprehension
   return {
       s.value: sum(warnings[p][s.value] for p in project_names)
       for s in Severity.levels
@@ -173,11 +172,11 @@
   """Returns list of HTML-formatted content for severity stats."""
 
   stats_header = ['Project']
-  for s in Severity.levels:
-    if total_by_severity[s.value]:
+  for severity in Severity.levels:
+    if total_by_severity[severity.value]:
       stats_header.append(
           '<span style=\'background-color:{}\'>{}</span>'.format(
-              s.color, s.column_header))
+              severity.color, severity.column_header))
   stats_header.append('TOTAL')
   return stats_header
 
@@ -200,15 +199,15 @@
 
   total_all_projects = 0
   stats_rows = []
-  for p in project_names:
-    if total_by_project[p]:
-      one_row = [p]
-      for s in Severity.levels:
-        if total_by_severity[s.value]:
-          one_row.append(warnings[p][s.value])
-      one_row.append(total_by_project[p])
+  for p_name in project_names:
+    if total_by_project[p_name]:
+      one_row = [p_name]
+      for severity in Severity.levels:
+        if total_by_severity[severity.value]:
+          one_row.append(warnings[p_name][severity.value])
+      one_row.append(total_by_project[p_name])
       stats_rows.append(one_row)
-      total_all_projects += total_by_project[p]
+      total_all_projects += total_by_project[p_name]
   return total_all_projects, stats_rows
 
 
@@ -226,16 +225,16 @@
 
   total_all_severities = 0
   one_row = ['<b>TOTAL</b>']
-  for s in Severity.levels:
-    if total_by_severity[s.value]:
-      one_row.append(total_by_severity[s.value])
-      total_all_severities += total_by_severity[s.value]
+  for severity in Severity.levels:
+    if total_by_severity[severity.value]:
+      one_row.append(total_by_severity[severity.value])
+      total_all_severities += total_by_severity[severity.value]
   one_row.append(total_all_projects)
   stats_rows.append(one_row)
   writer('<script>')
   emit_const_string_array('StatsHeader', stats_header, writer)
   emit_const_object_array('StatsRows', stats_rows, writer)
-  writer(draw_table_javascript)
+  writer(DRAW_TABLE_JAVASCRIPT)
   writer('</script>')
 
 
@@ -246,8 +245,8 @@
   total_by_project = get_total_by_project(warnings, project_names)
   total_by_severity = get_total_by_severity(warnings, project_names)
   stats_header = emit_table_header(total_by_severity)
-  total_all_projects, stats_rows = \
-    emit_row_counts_per_project(warnings, total_by_project, total_by_severity, project_names)
+  total_all_projects, stats_rows = emit_row_counts_per_project(
+      warnings, total_by_project, total_by_severity, project_names)
   emit_row_counts_per_severity(total_by_severity, stats_header, stats_rows,
                                total_all_projects, writer)
 
@@ -287,6 +286,7 @@
 #     id for each warning pattern
 #     sort by project, severity, warn_id, warning_message
 def emit_buttons(writer):
+  """Write the button elements in HTML."""
   writer('<button class="button" onclick="expandCollapse(1);">'
          'Expand all warnings</button>\n'
          '<button class="button" onclick="expandCollapse(0);">'
@@ -327,8 +327,8 @@
   for text in fixed_patterns:
     cur_row_class = 1 - cur_row_class
     # remove last '\n'
-    t = text[:-1] if text[-1] == '\n' else text
-    writer('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
+    out_text = text[:-1] if text[-1] == '\n' else text
+    writer('<tr><td class="c' + str(cur_row_class) + '">' + out_text + '</td></tr>')
   writer('</table></div>')
   writer('</blockquote>')
 
@@ -338,10 +338,10 @@
   total = 0
   for pattern in warn_patterns:
     if pattern['severity'] == sev and pattern['members']:
-      n = len(pattern['members'])
-      total += n
+      num_members = len(pattern['members'])
+      total += num_members
       warning = kind + ': ' + (pattern['description'] or '?')
-      csvwriter.writerow([n, '', warning])
+      csvwriter.writerow([num_members, '', warning])
       # print number of warnings for each project, ordered by project name
       projects = sorted(pattern['projects'].keys())
       for project in projects:
@@ -354,8 +354,8 @@
   """Dump number of warnings in CSV format to writer."""
   sort_warnings(warn_patterns)
   total = 0
-  for s in Severity.levels:
-    total += write_severity(csvwriter, s, s.column_header, warn_patterns)
+  for severity in Severity.levels:
+    total += write_severity(csvwriter, severity, severity.column_header, warn_patterns)
   csvwriter.writerow([total, '', 'All warnings'])
 
 
@@ -378,41 +378,41 @@
     csvwriter.writerow(output)
 
 
-# Return s with escaped backslash and quotation characters.
-def escape_string(s):
-  return s.replace('\\', '\\\\').replace('"', '\\"')
+# Return line with escaped backslash and quotation characters.
+def escape_string(line):
+  return line.replace('\\', '\\\\').replace('"', '\\"')
 
 
-# Return s without trailing '\n' and escape the quotation characters.
-def strip_escape_string(s):
-  if not s:
-    return s
-  s = s[:-1] if s[-1] == '\n' else s
-  return escape_string(s)
+# Return line without trailing '\n' and escape the quotation characters.
+def strip_escape_string(line):
+  if not line:
+    return line
+  line = line[:-1] if line[-1] == '\n' else line
+  return escape_string(line)
 
 
 def emit_warning_array(name, writer, warn_patterns):
   writer('var warning_{} = ['.format(name))
-  for w in warn_patterns:
+  for pattern in warn_patterns:
     if name == 'severity':
-      writer('{},'.format(w[name].value))
+      writer('{},'.format(pattern[name].value))
     else:
-      writer('{},'.format(w[name]))
+      writer('{},'.format(pattern[name]))
   writer('];')
 
 
 def emit_warning_arrays(writer, warn_patterns):
   emit_warning_array('severity', writer, warn_patterns)
   writer('var warning_description = [')
-  for w in warn_patterns:
-    if w['members']:
-      writer('"{}",'.format(escape_string(w['description'])))
+  for pattern in warn_patterns:
+    if pattern['members']:
+      writer('"{}",'.format(escape_string(pattern['description'])))
     else:
       writer('"",')  # no such warning
   writer('];')
 
 
-scripts_for_warning_groups = """
+SCRIPTS_FOR_WARNING_GROUPS = """
   function compareMessages(x1, x2) { // of the same warning type
     return (WarningMessages[x1[2]] <= WarningMessages[x2[2]]) ? -1 : 1;
   }
@@ -565,32 +565,32 @@
 # Emit a JavaScript const integer array.
 def emit_const_int_array(name, array, writer):
   writer('const ' + name + ' = [')
-  for n in array:
-    writer(str(n) + ',')
+  for item in array:
+    writer(str(item) + ',')
   writer('];')
 
 
 # Emit a JavaScript const string array.
 def emit_const_string_array(name, array, writer):
   writer('const ' + name + ' = [')
-  for s in array:
-    writer('"' + strip_escape_string(s) + '",')
+  for item in array:
+    writer('"' + strip_escape_string(item) + '",')
   writer('];')
 
 
 # Emit a JavaScript const string array for HTML.
 def emit_const_html_string_array(name, array, writer):
   writer('const ' + name + ' = [')
-  for s in array:
-    writer('"' + html.escape(strip_escape_string(s)) + '",')
+  for item in array:
+    writer('"' + html.escape(strip_escape_string(item)) + '",')
   writer('];')
 
 
 # Emit a JavaScript const object array.
 def emit_const_object_array(name, array, writer):
   writer('const ' + name + ' = [')
-  for x in array:
-    writer(str(x) + ',')
+  for item in array:
+    writer(str(item) + ',')
   writer('];')
 
 
@@ -620,7 +620,7 @@
     emit_const_html_string_array('WarningLinks', warning_links, writer)
 
 
-draw_table_javascript = """
+DRAW_TABLE_JAVASCRIPT = """
 google.charts.load('current', {'packages':['table']});
 google.charts.setOnLoadCallback(drawTable);
 function drawTable() {
@@ -653,7 +653,7 @@
   writer('\n<script>')
   emit_js_data(writer, flags, warning_messages, warning_links, warning_records,
                warn_patterns, project_names)
-  writer(scripts_for_warning_groups)
+  writer(SCRIPTS_FOR_WARNING_GROUPS)
   writer('</script>')
   emit_buttons(writer)
   # Warning messages are grouped by severities or project names.
@@ -670,8 +670,8 @@
                warning_links, warning_records, header_str):
   """Write warnings html file."""
   if html_path:
-    with open(html_path, 'w') as f:
-      dump_html(flags, f, warning_messages, warning_links, warning_records,
+    with open(html_path, 'w') as outf:
+      dump_html(flags, outf, warning_messages, warning_links, warning_records,
                 header_str, warn_patterns, project_names)
 
 
@@ -679,12 +679,12 @@
                   warning_records, header_str, project_names):
   """Write warnings csv file."""
   if flags.csvpath:
-    with open(flags.csvpath, 'w') as f:
-      dump_csv(csv.writer(f, lineterminator='\n'), warn_patterns)
+    with open(flags.csvpath, 'w') as outf:
+      dump_csv(csv.writer(outf, lineterminator='\n'), warn_patterns)
 
   if flags.csvwithdescription:
-    with open(flags.csvwithdescription, 'w') as f:
-      dump_csv_with_description(csv.writer(f, lineterminator='\n'),
+    with open(flags.csvwithdescription, 'w') as outf:
+      dump_csv_with_description(csv.writer(outf, lineterminator='\n'),
                                 warning_records, warning_messages,
                                 warn_patterns, project_names)
 
diff --git a/tools/warn/java_warn_patterns.py b/tools/warn/java_warn_patterns.py
index ac1ed5d..534f48d 100644
--- a/tools/warn/java_warn_patterns.py
+++ b/tools/warn/java_warn_patterns.py
@@ -15,8 +15,10 @@
 
 """Warning patterns for Java compiler tools."""
 
+# No need of doc strings for trivial small functions.
+# pylint:disable=missing-function-docstring
+
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
 from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
@@ -59,7 +61,8 @@
 
 
 warn_patterns = [
-    # pylint:disable=line-too-long,g-inconsistent-quotes
+    # pylint does not recognize g-inconsistent-quotes
+    # pylint:disable=line-too-long,bad-option-value,g-inconsistent-quotes
     # Warnings from Javac
     java_medium('Use of deprecated',
                 [r'.*: warning: \[deprecation\] .+',
diff --git a/tools/warn/make_warn_patterns.py b/tools/warn/make_warn_patterns.py
index 4b20493..a54c502 100644
--- a/tools/warn/make_warn_patterns.py
+++ b/tools/warn/make_warn_patterns.py
@@ -16,12 +16,12 @@
 """Warning patterns for build make tools."""
 
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
 from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
 warn_patterns = [
-    # pylint:disable=line-too-long,g-inconsistent-quotes
+    # pylint does not recognize g-inconsistent-quotes
+    # pylint:disable=line-too-long,bad-option-value,g-inconsistent-quotes
     {'category': 'make', 'severity': Severity.MEDIUM,
      'description': 'make: overriding commands/ignoring old commands',
      'patterns': [r".*: warning: overriding commands for target .+",
diff --git a/tools/warn/other_warn_patterns.py b/tools/warn/other_warn_patterns.py
index 8df5b87..d05c8e9 100644
--- a/tools/warn/other_warn_patterns.py
+++ b/tools/warn/other_warn_patterns.py
@@ -15,8 +15,10 @@
 
 """Warning patterns from other tools."""
 
+# No need of doc strings for trivial small functions.
+# pylint:disable=missing-function-docstring
+
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
 from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
@@ -57,7 +59,8 @@
 
 
 warn_patterns = [
-    # pylint:disable=line-too-long,g-inconsistent-quotes
+    # pylint does not recognize g-inconsistent-quotes
+    # pylint:disable=line-too-long,bad-option-value,g-inconsistent-quotes
     # aapt warnings
     aapt('No comment for public symbol',
          [r".*: warning: No comment for public symbol .+"]),
diff --git a/tools/warn/severity.py b/tools/warn/severity.py
index b4c03c9..20064c3 100644
--- a/tools/warn/severity.py
+++ b/tools/warn/severity.py
@@ -19,8 +19,9 @@
 """
 
 
-# pylint:disable=old-style-class
+# pylint:disable=too-few-public-methods
 class SeverityInfo:
+  """Class of Severity Info, part of a Severity object."""
 
   def __init__(self, value, color, column_header, header):
     self.value = value
@@ -29,7 +30,7 @@
     self.header = header
 
 
-# pylint:disable=old-style-class
+# pylint:disable=too-few-public-methods
 class Severity:
   """Class of Severity levels where each level is a SeverityInfo."""
 
diff --git a/tools/warn/tidy_warn_patterns.py b/tools/warn/tidy_warn_patterns.py
index 5416cb2..7018d10 100644
--- a/tools/warn/tidy_warn_patterns.py
+++ b/tools/warn/tidy_warn_patterns.py
@@ -15,8 +15,10 @@
 
 """Warning patterns for clang-tidy."""
 
+# No need of doc strings for trivial small functions.
+# pylint:disable=missing-function-docstring
+
 # pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
 from .cpp_warn_patterns import compile_patterns
 from .severity import Severity
 
@@ -39,7 +41,6 @@
 
 
 def analyzer_high(description, patterns):
-  # Important clang analyzer warnings to be fixed ASAP.
   return {
       'category': 'C/C++',
       'severity': Severity.HIGH,
@@ -74,7 +75,8 @@
 
 
 warn_patterns = [
-    # pylint:disable=line-too-long,g-inconsistent-quotes
+    # pylint does not recognize g-inconsistent-quotes
+    # pylint:disable=line-too-long,bad-option-value,g-inconsistent-quotes
     group_tidy_warn_pattern('android'),
     simple_tidy_warn_pattern('abseil-string-find-startswith'),
     simple_tidy_warn_pattern('bugprone-argument-comment'),
diff --git a/tools/warn/warn.py b/tools/warn/warn.py
index cb5daec..acfbb55 100755
--- a/tools/warn/warn.py
+++ b/tools/warn/warn.py
@@ -20,7 +20,8 @@
 import signal
 import sys
 
-# pylint:disable=relative-beyond-top-level
+# pylint:disable=relative-beyond-top-level,no-name-in-module
+# suppress false positive of no-name-in-module warnings
 from . import warn_common as common
 
 
@@ -50,6 +51,7 @@
 
 def create_and_launch_subprocesses(num_cpu, classify_warnings_fn, arg_groups,
                                    group_results):
+  """Fork num_cpu processes to classify warnings."""
   pool = multiprocessing.Pool(num_cpu)
   for cpu in range(num_cpu):
     proc_result = pool.map(classify_warnings_fn, arg_groups[cpu])
@@ -59,6 +61,7 @@
 
 
 def main():
+  """Old main() calls new common_main."""
   use_google3 = False
   common.common_main(use_google3, create_and_launch_subprocesses,
                      classify_warnings)
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index b2dd8ab..844f629 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -52,8 +52,8 @@
 import re
 import sys
 
-# pylint:disable=relative-beyond-top-level
-# pylint:disable=g-importing-member
+# pylint:disable=relative-beyond-top-level,no-name-in-module
+# suppress false positive of no-name-in-module warnings
 from . import android_project_list
 from . import chrome_project_list
 from . import cpp_warn_patterns as cpp_patterns
@@ -115,39 +115,39 @@
 
 
 def find_project_index(line, project_patterns):
-  for i, p in enumerate(project_patterns):
-    if p.match(line):
-      return i
+  """Return the index to the project pattern array."""
+  for idx, pattern in enumerate(project_patterns):
+    if pattern.match(line):
+      return idx
   return -1
 
 
 def classify_one_warning(warning, link, results, project_patterns,
                          warn_patterns):
   """Classify one warning line."""
-  for i, w in enumerate(warn_patterns):
-    for cpat in w['compiled_patterns']:
+  for idx, pattern in enumerate(warn_patterns):
+    for cpat in pattern['compiled_patterns']:
       if cpat.match(warning):
-        p = find_project_index(warning, project_patterns)
-        results.append([warning, link, i, p])
+        project_idx = find_project_index(warning, project_patterns)
+        results.append([warning, link, idx, project_idx])
         return
-      else:
-        # If we end up here, there was a problem parsing the log
-        # probably caused by 'make -j' mixing the output from
-        # 2 or more concurrent compiles
-        pass
+  # If we end up here, there was a problem parsing the log
+  # probably caused by 'make -j' mixing the output from
+  # 2 or more concurrent compiles
 
 
-def remove_prefix(s, sub):
-  """Remove everything before last occurrence of substring sub in string s."""
-  if sub in s:
-    inc_sub = s.rfind(sub)
-    return s[inc_sub:]
-  return s
+def remove_prefix(src, sub):
+  """Remove everything before last occurrence of substring sub in string src."""
+  if sub in src:
+    inc_sub = src.rfind(sub)
+    return src[inc_sub:]
+  return src
 
 
 # TODO(emmavukelj): Don't have any generate_*_cs_link functions call
 # normalize_path a second time (the first time being in parse_input_file)
 def generate_cs_link(warning_line, flags, android_root=None):
+  """Try to add code search HTTP URL prefix."""
   if flags.platform == 'chrome':
     return generate_chrome_cs_link(warning_line, flags)
   if flags.platform == 'android':
@@ -279,8 +279,7 @@
   if idx >= 0:
     # remove chrome_root/, we want path relative to that
     return path[idx + len('chrome_root/'):]
-  else:
-    return path
+  return path
 
 
 def normalize_warning_line(line, flags, android_root=None):
@@ -316,17 +315,17 @@
         unique_warnings[normalized_line] = generate_cs_link(line, flags)
     elif (platform_version == 'unknown' or board_name == 'unknown' or
           architecture == 'unknown'):
-      m = re.match(r'.+Package:.+chromeos-base/chromeos-chrome-', line)
-      if m is not None:
+      result = re.match(r'.+Package:.+chromeos-base/chromeos-chrome-', line)
+      if result is not None:
         platform_version = 'R' + line.split('chrome-')[1].split('_')[0]
         continue
-      m = re.match(r'.+Source\sunpacked\sin\s(.+)', line)
-      if m is not None:
-        board_name = m.group(1).split('/')[2]
+      result = re.match(r'.+Source\sunpacked\sin\s(.+)', line)
+      if result is not None:
+        board_name = result.group(1).split('/')[2]
         continue
-      m = re.match(r'.+USE:\s*([^\s]*).*', line)
-      if m is not None:
-        architecture = m.group(1)
+      result = re.match(r'.+USE:\s*([^\s]*).*', line)
+      if result is not None:
+        architecture = result.group(1)
         continue
 
   header_str = '%s - %s - %s' % (platform_version, board_name, architecture)
@@ -344,6 +343,7 @@
 
 def parse_input_file_android(infile, flags):
   """Parse Android input file, collect parameters and warning lines."""
+  # pylint:disable=too-many-locals,too-many-branches
   platform_version = 'unknown'
   target_product = 'unknown'
   target_variant = 'unknown'
@@ -396,18 +396,18 @@
     if line_counter < 100:
       # save a little bit of time by only doing this for the first few lines
       line_counter += 1
-      m = re.search('(?<=^PLATFORM_VERSION=).*', line)
-      if m is not None:
-        platform_version = m.group(0)
-      m = re.search('(?<=^TARGET_PRODUCT=).*', line)
-      if m is not None:
-        target_product = m.group(0)
-      m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
-      if m is not None:
-        target_variant = m.group(0)
-      m = re.search('(?<=^TOP=).*', line)
-      if m is not None:
-        android_root = m.group(1)
+      result = re.search('(?<=^PLATFORM_VERSION=).*', line)
+      if result is not None:
+        platform_version = result.group(0)
+      result = re.search('(?<=^TARGET_PRODUCT=).*', line)
+      if result is not None:
+        target_product = result.group(0)
+      result = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
+      if result is not None:
+        target_variant = result.group(0)
+      result = re.search('(?<=^TOP=).*', line)
+      if result is not None:
+        android_root = result.group(1)
 
   if android_root:
     new_unique_warnings = dict()
@@ -424,6 +424,7 @@
 
 
 def parse_input_file(infile, flags):
+  """Parse one input file for chrome or android."""
   if flags.platform == 'chrome':
     return parse_input_file_chrome(infile, flags)
   if flags.platform == 'android':
@@ -448,14 +449,16 @@
   if platform == 'chrome':
     warn_patterns = cpp_patterns.warn_patterns
   elif platform == 'android':
-    warn_patterns = make_patterns.warn_patterns + cpp_patterns.warn_patterns + java_patterns.warn_patterns + tidy_patterns.warn_patterns + other_patterns.warn_patterns
+    warn_patterns = (make_patterns.warn_patterns + cpp_patterns.warn_patterns +
+                     java_patterns.warn_patterns + tidy_patterns.warn_patterns +
+                     other_patterns.warn_patterns)
   else:
     raise Exception('platform name %s is not valid' % platform)
-  for w in warn_patterns:
-    w['members'] = []
+  for pattern in warn_patterns:
+    pattern['members'] = []
     # Each warning pattern has a 'projects' dictionary, that
     # maps a project name to number of warnings in that project.
-    w['projects'] = {}
+    pattern['projects'] = {}
   return warn_patterns
 
 
@@ -473,6 +476,7 @@
                                use_google3, create_launch_subprocs_fn,
                                classify_warnings_fn):
   """Classify all warning lines with num_cpu parallel processes."""
+  # pylint:disable=too-many-arguments,too-many-locals
   num_cpu = args.processes
   group_results = []
 
@@ -531,8 +535,10 @@
 def process_log(logfile, flags, project_names, project_patterns, warn_patterns,
                 html_path, use_google3, create_launch_subprocs_fn,
                 classify_warnings_fn, logfile_object):
-  # pylint: disable=g-doc-args
-  # pylint: disable=g-doc-return-or-yield
+  # pylint does not recognize g-doc-*
+  # pylint: disable=bad-option-value,g-doc-args
+  # pylint: disable=bad-option-value,g-doc-return-or-yield
+  # pylint: disable=too-many-arguments,too-many-locals
   """Function that handles processing of a log.
 
   This is isolated into its own function (rather than just taking place in main)