Merge "Add lpunpack to otatools for rebuilding super.img"
diff --git a/core/Makefile b/core/Makefile
index 03941ff..5b8416b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1328,7 +1328,8 @@
 INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
 endif
 
-INTERNAL_USERIMAGES_BINARY_PATHS := $(sort $(dir $(INTERNAL_USERIMAGES_DEPS)))
+# Get a colon-separated list of search paths.
+INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
 
 ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
 INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
@@ -2251,7 +2252,7 @@
   @mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
   $(call generate-image-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt,system, \
       skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
       || ( mkdir -p $${DIST_DIR}; cp $(INSTALLED_FILES_FILE) $${DIST_DIR}/installed-files-rescued.txt; \
@@ -2499,7 +2500,7 @@
   @mkdir -p $(TARGET_OUT_DATA)
   @mkdir -p $(userdataimage_intermediates) && rm -rf $(userdataimage_intermediates)/userdata_image_info.txt
   $(call generate-image-prop-dictionary, $(userdataimage_intermediates)/userdata_image_info.txt,userdata,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt $(INSTALLED_USERDATAIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE))
@@ -2605,7 +2606,7 @@
   @mkdir -p $(TARGET_OUT_CACHE)
   @mkdir -p $(cacheimage_intermediates) && rm -rf $(cacheimage_intermediates)/cache_image_info.txt
   $(call generate-image-prop-dictionary, $(cacheimage_intermediates)/cache_image_info.txt,cache,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt $(INSTALLED_CACHEIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
@@ -2676,7 +2677,7 @@
   @mkdir -p $(TARGET_OUT_SYSTEM_OTHER)
   @mkdir -p $(systemotherimage_intermediates) && rm -rf $(systemotherimage_intermediates)/system_other_image_info.txt
   $(call generate-image-prop-dictionary, $(systemotherimage_intermediates)/system_other_image_info.txt,system,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_SYSTEM_OTHER) $(systemotherimage_intermediates)/system_other_image_info.txt $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMOTHERIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
@@ -2814,7 +2815,7 @@
   $(call create-vendor-odm-symlink)
   @mkdir -p $(vendorimage_intermediates) && rm -rf $(vendorimage_intermediates)/vendor_image_info.txt
   $(call generate-image-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt,vendor,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_VENDOR) $(vendorimage_intermediates)/vendor_image_info.txt $(INSTALLED_VENDORIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
@@ -2869,7 +2870,7 @@
   @mkdir -p $(TARGET_OUT_PRODUCT)
   @mkdir -p $(productimage_intermediates) && rm -rf $(productimage_intermediates)/product_image_info.txt
   $(call generate-image-prop-dictionary, $(productimage_intermediates)/product_image_info.txt,product,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
@@ -2967,7 +2968,7 @@
   @mkdir -p $(TARGET_OUT_PRODUCT_SERVICES)
   @mkdir -p $(product_servicesimage_intermediates) && rm -rf $(product_servicesimage_intermediates)/product_services_image_info.txt
   $(call generate-image-prop-dictionary, $(product_servicesimage_intermediates)/product_services_image_info.txt,product_services, skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_PRODUCT_SERVICES) $(product_servicesimage_intermediates)/product_services_image_info.txt $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET),$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE))
@@ -3018,7 +3019,7 @@
   @mkdir -p $(TARGET_OUT_ODM)
   @mkdir -p $(odmimage_intermediates) && rm -rf $(odmimage_intermediates)/odm_image_info.txt
   $(call generate-userimage-prop-dictionary, $(odmimage_intermediates)/odm_image_info.txt, skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_ODM) $(odmimage_intermediates)/odm_image_info.txt $(INSTALLED_ODMIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_ODMIMAGE_TARGET),$(BOARD_ODMIMAGE_PARTITION_SIZE))
@@ -4106,7 +4107,7 @@
 	$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 ifdef BUILDING_SYSTEM_IMAGE
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	    build/make/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
 endif # BUILDING_SYSTEM_IMAGE
 endif
@@ -4230,7 +4231,7 @@
 	        echo "$(group)_partition_list=$(_group_partition_list)" >> $(zip_root)/META/dynamic_partitions_info.txt;))
 endif # BOARD_SUPER_PARTITION_GROUPS
 	@# TODO(b/134525174): Remove `-r` after addressing the issue with recovery patch generation.
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	    build/make/tools/releasetools/add_img_to_target_files -a -r -v -p $(HOST_OUT) $(zip_root)
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
@@ -4261,7 +4262,7 @@
 # $(1): output file
 # $(2): additional args
 define build-ota-package-target
-PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
    build/make/tools/releasetools/ota_from_target_files -v \
    --block \
    --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
diff --git a/core/aapt2.mk b/core/aapt2.mk
index fbbf3dd..7b17df4 100644
--- a/core/aapt2.mk
+++ b/core/aapt2.mk
@@ -61,8 +61,8 @@
 
 # Always set --pseudo-localize, it will be stripped out later for release
 # builds that don't want it.
-$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_resources_flata): \
-  PRIVATE_AAPT2_CFLAGS := --pseudo-localize
+$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_resources_flata) $(my_generated_resources_flata) $(my_zippped_resources_flata): \
+  PRIVATE_AAPT2_CFLAGS := --pseudo-localize $(filter --legacy,$(LOCAL_AAPT_FLAGS))
 
 # TODO(b/78447299): Forbid LOCAL_STATIC_JAVA_AAR_LIBRARIES in aapt2 and remove
 # support for it.
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index ebce00b..7a9f23e 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -246,6 +246,9 @@
   my_sanitize := $(filter-out cfi,$(my_sanitize))
   my_cflags += -fno-lto
   my_ldflags += -fno-lto
+
+  # TODO(b/133876586): Disable experimental pass manager for fuzzer builds.
+  my_cflags += -fno-experimental-new-pass-manager
 endif
 
 ifneq ($(filter integer_overflow,$(my_sanitize)),)
diff --git a/core/package_internal.mk b/core/package_internal.mk
index d693c58..c414295 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -328,7 +328,7 @@
 built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
 endif
 
-$(R_file_stamp) $(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
+$(R_file_stamp) $(my_res_package): PRIVATE_AAPT_FLAGS := $(filter-out --legacy,$(LOCAL_AAPT_FLAGS))
 $(R_file_stamp) $(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
 $(R_file_stamp) $(my_res_package): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
 $(R_file_stamp) $(my_res_package): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
@@ -346,7 +346,6 @@
 my_full_asset_paths := $(all_assets)
 
 # Add AAPT2 link specific flags.
-$(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
 ifndef LOCAL_AAPT_NAMESPACES
   $(my_res_package): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
 endif
@@ -416,14 +415,6 @@
 
 $(my_res_package) : $(all_library_res_package_export_deps)
 
-# These four are set above for $(R_stamp_file) and $(my_res_package), but
-# $(LOCAL_BUILT_MODULE) is not set before java.mk, so they have to be set again
-# here.
-$(LOCAL_BUILT_MODULE): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
-$(LOCAL_BUILT_MODULE): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
-$(LOCAL_BUILT_MODULE): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
-
 ifneq ($(full_classes_jar),)
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
 # Use the jarjar processed arhive as the initial package file.
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 0028ce4..f071c7c 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -18,25 +18,33 @@
 device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
 # Create an artifact to include a list of test config files in device-tests.
 device-tests-list-zip := $(PRODUCT_OUT)/device-tests_list.zip
+# Create an artifact to include all test config files in device-tests.
+device-tests-configs-zip := $(PRODUCT_OUT)/device-tests_configs.zip
 my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES))
-$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip)
+$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip)
 $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
 $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
 $(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
 	echo $(sort $(COMPATIBILITY.device-tests.FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $@-host.list; \
 	done
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+	$(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
 	rm -f $(PRIVATE_device_tests_list)
-	$(hide) grep -e .*.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list)
-	$(hide) grep -e .*.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list)
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list)
-	rm -f $@.list $@-host.list $@-target.list $(PRIVATE_device_tests_list)
+	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
+	  $(PRIVATE_device_tests_list)
 
 device-tests: $(device-tests-zip)
-$(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip))
+$(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip))
 
 tests: device-tests
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 36ab101..9ea4e62 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -34,11 +34,18 @@
 
 my_host_shared_lib_for_general_tests += $(call copy-many-files,$(my_general_tests_shared_lib_files))
 
+# Create an artifact to include all test config files in general-tests.
+general_tests_configs_zip := $(PRODUCT_OUT)/general-tests_configs.zip
+# Create an artifact to include all shared librariy files in general-tests.
+general_tests_host_shared_libs_zip := $(PRODUCT_OUT)/general-tests_host-shared-libs.zip
+
 $(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
-$(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip)
+$(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip)
 $(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
 $(general_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
 $(general_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_general_tests)
+$(general_tests_zip) : PRIVATE_general_tests_configs_zip := $(general_tests_configs_zip)
+$(general_tests_zip) : PRIVATE_general_host_shared_libs_zip := $(general_tests_host_shared_libs_zip)
 $(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(my_host_shared_lib_for_general_tests) $(SOONG_ZIP)
 	rm -rf $(PRIVATE_INTERMEDIATES_DIR)
 	rm -f $@ $(PRIVATE_general_tests_list_zip)
@@ -46,22 +53,33 @@
 	echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
 	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
 	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/host.list; \
+	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
+	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
 	cp -fp $(PRIVATE_TOOLS) $(PRIVATE_INTERMEDIATES_DIR)/tools/
 	$(SOONG_ZIP) -d -o $@ \
 	  -P host -C $(PRIVATE_INTERMEDIATES_DIR) -D $(PRIVATE_INTERMEDIATES_DIR)/tools \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
 	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list
-	grep -e .*.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
-	grep -e .*.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
+	$(SOONG_ZIP) -d -o $(PRIVATE_general_tests_configs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	$(SOONG_ZIP) -d -o $(PRIVATE_general_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
 	$(SOONG_ZIP) -d -o $(PRIVATE_general_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/general-tests_list
 
 general-tests: $(general_tests_zip)
-$(call dist-for-goals, general-tests, $(general_tests_zip) $(general_tests_list_zip))
+$(call dist-for-goals, general-tests, $(general_tests_zip) $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip))
 
 intermediates_dir :=
 general_tests_tools :=
 general_tests_zip :=
 general_tests_list_zip :=
+general_tests_configs_zip :=
+general_tests_host_shared_libs_zip :=
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
index e9c506a..489feeb 100644
--- a/core/tasks/oem_image.mk
+++ b/core/tasks/oem_image.mk
@@ -34,7 +34,7 @@
 	@mkdir -p $(TARGET_OUT_OEM)
 	@mkdir -p $(oemimage_intermediates) && rm -rf $(oemimage_intermediates)/oem_image_info.txt
 	$(call generate-image-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt,oem,skip_fsck=true)
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
 	  build/make/tools/releasetools/build_image.py \
 	  $(TARGET_OUT_OEM) $(oemimage_intermediates)/oem_image_info.txt $@ $(TARGET_OUT)
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_OEMIMAGE_PARTITION_SIZE))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index a58019e..b0d1a0c 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -151,7 +151,7 @@
 	# Generate the image.
 	$(if $(filter oem,$(PRIVATE_MOUNT_POINT)), \
 	  $(hide) echo "oem.buildnumber=$(BUILD_NUMBER_FROM_FILE)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
 	  build/make/tools/releasetools/build_image.py \
 	  $(PRIVATE_STAGING_DIR) $(PRIVATE_INTERMEDIATES)/image_info.txt $@ $(TARGET_OUT)
 
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index b23eef1..ecb1d31 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -126,7 +126,7 @@
 
     assert len(self.data) % self.blocksize == 0
 
-    self.total_blocks = len(self.data) / self.blocksize
+    self.total_blocks = len(self.data) // self.blocksize
     self.care_map = RangeSet(data=(0, self.total_blocks))
     # When the last block is padded, we always write the whole block even for
     # incremental OTAs. Because otherwise the last block may get skipped if
@@ -179,8 +179,7 @@
   def TotalSha1(self, include_clobbered_blocks=False):
     if not include_clobbered_blocks:
       return self.RangeSha1(self.care_map.subtract(self.clobbered_blocks))
-    else:
-      return sha1(self.data).hexdigest()
+    return sha1(self.data).hexdigest()
 
   def WriteRangeDataToFd(self, ranges, fd):
     for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
@@ -194,13 +193,13 @@
     self.path = path
     self.blocksize = 4096
     self._file_size = os.path.getsize(self.path)
-    self._file = open(self.path, 'r')
+    self._file = open(self.path, 'rb')
 
     if self._file_size % self.blocksize != 0:
       raise ValueError("Size of file %s must be multiple of %d bytes, but is %d"
                        % self.path, self.blocksize, self._file_size)
 
-    self.total_blocks = self._file_size / self.blocksize
+    self.total_blocks = self._file_size // self.blocksize
     self.care_map = RangeSet(data=(0, self.total_blocks))
     self.clobbered_blocks = RangeSet()
     self.extended = RangeSet()
@@ -391,7 +390,7 @@
 
     def print_header(header, separator):
       logger.info(header)
-      logger.info(separator * len(header) + '\n')
+      logger.info('%s\n', separator * len(header))
 
     print_header('  Imgdiff Stats Report  ', '=')
     for key in self.REASONS:
@@ -779,7 +778,7 @@
     out.insert(2, "0\n")
     out.insert(3, str(max_stashed_blocks) + "\n")
 
-    with open(prefix + ".transfer.list", "wb") as f:
+    with open(prefix + ".transfer.list", "w") as f:
       for i in out:
         f.write(i)
 
@@ -1009,7 +1008,7 @@
     # - we write every block we care about exactly once.
 
     # Start with no blocks having been touched yet.
-    touched = array.array("B", "\0" * self.tgt.total_blocks)
+    touched = array.array("B", b"\0" * self.tgt.total_blocks)
 
     # Imagine processing the transfers in order.
     for xf in self.transfers:
@@ -1671,8 +1670,8 @@
 
         split_tgt_size = int(info[1])
         assert split_tgt_size % 4096 == 0
-        assert split_tgt_size / 4096 <= tgt_remain.size()
-        split_tgt_ranges = tgt_remain.first(split_tgt_size / 4096)
+        assert split_tgt_size // 4096 <= tgt_remain.size()
+        split_tgt_ranges = tgt_remain.first(split_tgt_size // 4096)
         tgt_remain = tgt_remain.subtract(split_tgt_ranges)
 
         # Find the split_src_ranges within the image file from its relative
@@ -1744,7 +1743,7 @@
                                                     lines)
         for index, (patch_start, patch_length, split_tgt_ranges,
                     split_src_ranges) in enumerate(split_info_list):
-          with open(patch_file) as f:
+          with open(patch_file, 'rb') as f:
             f.seek(patch_start)
             patch_content = f.read(patch_length)
 
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 7cff831..107b6f5 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -14,6 +14,7 @@
 
 from __future__ import print_function
 
+import base64
 import collections
 import copy
 import errno
@@ -30,7 +31,6 @@
 import re
 import shlex
 import shutil
-import string
 import subprocess
 import sys
 import tempfile
@@ -190,6 +190,8 @@
     kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
         stdin, etc. stdout and stderr will default to subprocess.PIPE and
         subprocess.STDOUT respectively unless caller specifies any of them.
+        universal_newlines will default to True, as most of the users in
+        releasetools expect string output.
 
   Returns:
     A subprocess.Popen object.
@@ -197,6 +199,8 @@
   if 'stdout' not in kwargs and 'stderr' not in kwargs:
     kwargs['stdout'] = subprocess.PIPE
     kwargs['stderr'] = subprocess.STDOUT
+  if 'universal_newlines' not in kwargs:
+    kwargs['universal_newlines'] = True
   # Don't log any if caller explicitly says so.
   if verbose != False:
     logger.info("  Running: \"%s\"", " ".join(args))
@@ -314,7 +318,7 @@
 
   def read_helper(fn):
     if isinstance(input_file, zipfile.ZipFile):
-      return input_file.read(fn)
+      return input_file.read(fn).decode()
     else:
       path = os.path.join(input_file, *fn.split("/"))
       try:
@@ -526,7 +530,7 @@
   # system. Other areas assume system is always at "/system" so point /system
   # at /.
   if system_root_image:
-    assert not d.has_key("/system") and d.has_key("/")
+    assert '/system' not in d and '/' in d
     d["/system"] = d["/"]
   return d
 
@@ -953,7 +957,7 @@
     # filename listed in system.map may contain an additional leading slash
     # (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
     # results.
-    arcname = string.replace(entry, which, which.upper(), 1).lstrip('/')
+    arcname = entry.replace(which, which.upper(), 1).lstrip('/')
 
     # Special handling another case, where files not under /system
     # (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
@@ -1223,7 +1227,7 @@
     if basename:
       installed_files.add(basename)
 
-  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
+  for line in tf_zip.read('META/apkcerts.txt').decode().split('\n'):
     line = line.strip()
     if not line:
       continue
@@ -1433,6 +1437,8 @@
 
       if not first:
         print("key file %s still missing some passwords." % (self.pwfile,))
+        if sys.version_info[0] >= 3:
+          raw_input = input  # pylint: disable=redefined-builtin
         answer = raw_input("try to edit again? [y]> ").strip()
         if answer and answer[0] not in 'yY':
           raise RuntimeError("key passwords unavailable")
@@ -2185,7 +2191,7 @@
   This gives the same result as `openssl x509 -in <filename> -outform DER`.
 
   Returns:
-    The decoded certificate string.
+    The decoded certificate bytes.
   """
   cert_buffer = []
   save = False
@@ -2196,7 +2202,7 @@
       cert_buffer.append(line)
     if "--BEGIN CERTIFICATE--" in line:
       save = True
-  cert = "".join(cert_buffer).decode('base64')
+  cert = base64.b64decode("".join(cert_buffer))
   return cert
 
 
@@ -2338,7 +2344,7 @@
 
   logger.info("putting script in %s", sh_location)
 
-  output_sink(sh_location, sh)
+  output_sink(sh_location, sh.encode())
 
 
 class DynamicPartitionUpdate(object):
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 36becf4..9d6e9fb 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -52,9 +52,12 @@
   def __ne__(self, other):
     return self.data != other.data
 
-  def __nonzero__(self):
+  def __bool__(self):
     return bool(self.data)
 
+  # Python 2 uses __nonzero__, while Python 3 uses __bool__.
+  __nonzero__ = __bool__
+
   def __str__(self):
     if not self.data:
       return "empty"
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index c174d2f..668ad9b 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -488,23 +488,33 @@
       continue
 
     # System properties.
-    elif filename in ("SYSTEM/build.prop",
-                      "VENDOR/build.prop",
-                      "SYSTEM/vendor/build.prop",
-                      "ODM/build.prop",  # legacy
-                      "ODM/etc/build.prop",
-                      "VENDOR/odm/build.prop",  # legacy
-                      "VENDOR/odm/etc/build.prop",
-                      "PRODUCT/build.prop",
-                      "SYSTEM/product/build.prop",
-                      "PRODUCT_SERVICES/build.prop",
-                      "SYSTEM/product_services/build.prop",
-                      "SYSTEM/etc/prop.default",
-                      "BOOT/RAMDISK/prop.default",
-                      "BOOT/RAMDISK/default.prop",  # legacy
-                      "ROOT/default.prop",  # legacy
-                      "RECOVERY/RAMDISK/prop.default",
-                      "RECOVERY/RAMDISK/default.prop"):  # legacy
+    elif filename in (
+        "SYSTEM/build.prop",
+
+        "VENDOR/build.prop",
+        "SYSTEM/vendor/build.prop",
+
+        "ODM/etc/build.prop",
+        "VENDOR/odm/etc/build.prop",
+
+        "PRODUCT/build.prop",
+        "SYSTEM/product/build.prop",
+
+        "PRODUCT_SERVICES/build.prop",
+        "SYSTEM/product_services/build.prop",
+
+        "SYSTEM/etc/prop.default",
+        "BOOT/RAMDISK/prop.default",
+        "RECOVERY/RAMDISK/prop.default",
+
+        # ROOT/default.prop is a legacy path, but may still exist for upgrading
+        # devices that don't support `property_overrides_split_enabled`.
+        "ROOT/default.prop",
+
+        # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
+        # as a symlink in the current code. So it's a no-op here. Keeping the
+        # path here for clarity.
+        "RECOVERY/RAMDISK/default.prop"):
       print("Rewriting %s:" % (filename,))
       if stat.S_ISLNK(info.external_attr >> 16):
         new_data = data
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index b6d47d4..4c86933 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -268,6 +268,7 @@
 
 
 class DataImageTest(ReleaseToolsTestCase):
+
   def test_read_range_set(self):
     data = "file" + ('\0' * 4092)
     image = DataImage(data)
@@ -275,10 +276,11 @@
 
 
 class FileImageTest(ReleaseToolsTestCase):
+
   def setUp(self):
     self.file_path = common.MakeTempFile()
     self.data = os.urandom(4096 * 4)
-    with open(self.file_path, 'w') as f:
+    with open(self.file_path, 'wb') as f:
       f.write(self.data)
     self.file = FileImage(self.file_path)
 
@@ -292,18 +294,18 @@
         expected_data = self.data[s * blocksize : e * blocksize]
 
         rs = RangeSet([s, e])
-        data = "".join(self.file.ReadRangeSet(rs))
+        data = b''.join(self.file.ReadRangeSet(rs))
         self.assertEqual(expected_data, data)
 
         sha1sum = self.file.RangeSha1(rs)
         self.assertEqual(sha1(expected_data).hexdigest(), sha1sum)
 
         tmpfile = common.MakeTempFile()
-        with open(tmpfile, 'w') as f:
+        with open(tmpfile, 'wb') as f:
           self.file.WriteRangeDataToFd(rs, f)
-        with open(tmpfile, 'r') as f:
+        with open(tmpfile, 'rb') as f:
           self.assertEqual(expected_data, f.read())
 
   def test_read_all(self):
-    data = "".join(self.file.ReadRangeSet(self.file.care_map))
+    data = b''.join(self.file.ReadRangeSet(self.file.care_map))
     self.assertEqual(self.data, data)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 11ac9f5..914e58e 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -579,7 +579,7 @@
   def test_ExtractPublicKey(self):
     cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
     pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
-    with open(pubkey, 'rb') as pubkey_fp:
+    with open(pubkey) as pubkey_fp:
       self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
 
   def test_ExtractPublicKey_invalidInput(self):
@@ -590,15 +590,16 @@
   def test_ExtractAvbPublicKey(self):
     privkey = os.path.join(self.testdata_dir, 'testkey.key')
     pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
-    with open(common.ExtractAvbPublicKey(privkey)) as privkey_fp, \
-        open(common.ExtractAvbPublicKey(pubkey)) as pubkey_fp:
+    with open(common.ExtractAvbPublicKey(privkey), 'rb') as privkey_fp, \
+        open(common.ExtractAvbPublicKey(pubkey), 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
     cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
 
     cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                      universal_newlines=False)
     expected, _ = proc.communicate()
     self.assertEqual(0, proc.returncode)
 
@@ -914,7 +915,7 @@
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
       info_values = ''.join(
-          ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.iteritems())])
+          ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
       common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
 
       FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
@@ -1085,7 +1086,7 @@
     loc = os.path.join(self._tempdir, prefix, name)
     if not os.path.exists(os.path.dirname(loc)):
       os.makedirs(os.path.dirname(loc))
-    with open(loc, "w+") as f:
+    with open(loc, "wb") as f:
       f.write(data)
 
   def test_full_recovery(self):
@@ -1110,7 +1111,7 @@
     validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
                                                         self._info)
     # Validate 'recovery-from-boot' with bonus argument.
-    self._out_tmp_sink("etc/recovery-resource.dat", "bonus", "SYSTEM")
+    self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
     common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
                              recovery_image, boot_image, self._info)
     validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
@@ -1118,25 +1119,30 @@
 
 
 class MockScriptWriter(object):
-  """A class that mocks edify_generator.EdifyGenerator.
-  """
+  """A class that mocks edify_generator.EdifyGenerator."""
+
   def __init__(self, enable_comments=False):
     self.lines = []
     self.enable_comments = enable_comments
+
   def Comment(self, comment):
     if self.enable_comments:
-      self.lines.append("# {}".format(comment))
+      self.lines.append('# {}'.format(comment))
+
   def AppendExtra(self, extra):
     self.lines.append(extra)
+
   def __str__(self):
-    return "\n".join(self.lines)
+    return '\n'.join(self.lines)
 
 
 class MockBlockDifference(object):
+
   def __init__(self, partition, tgt, src=None):
     self.partition = partition
     self.tgt = tgt
     self.src = src
+
   def WriteScript(self, script, _, progress=None,
                   write_verify_script=False):
     if progress:
@@ -1144,11 +1150,13 @@
     script.AppendExtra("patch({});".format(self.partition))
     if write_verify_script:
       self.WritePostInstallVerifyScript(script)
+
   def WritePostInstallVerifyScript(self, script):
     script.AppendExtra("verify({});".format(self.partition))
 
 
 class FakeSparseImage(object):
+
   def __init__(self, size):
     self.blocksize = 4096
     self.total_blocks = size // 4096
@@ -1156,12 +1164,13 @@
 
 
 class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
+
   @staticmethod
   def get_op_list(output_path):
     with zipfile.ZipFile(output_path) as output_zip:
-      with output_zip.open("dynamic_partitions_op_list") as op_list:
-        return [line.strip() for line in op_list.readlines()
-                if not line.startswith("#")]
+      with output_zip.open('dynamic_partitions_op_list') as op_list:
+        return [line.decode().strip() for line in op_list.readlines()
+                if not line.startswith(b'#')]
 
   def setUp(self):
     self.script = MockScriptWriter()
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index 1cc539f..d02bc7f 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -47,25 +47,22 @@
     }
 
     self.hash_algorithm = "sha256"
-    self.fixed_salt = \
-        "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
-    self.expected_root_hash = \
-        "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d"
+    self.fixed_salt = (
+        "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7")
+    self.expected_root_hash = (
+        "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d")
 
-  def _create_simg(self, raw_data):
+  def _CreateSimg(self, raw_data):  # pylint: disable=no-self-use
     output_file = common.MakeTempFile()
     raw_image = common.MakeTempFile()
     with open(raw_image, 'wb') as f:
       f.write(raw_data)
 
     cmd = ["img2simg", raw_image, output_file, '4096']
-    p = common.Run(cmd)
-    p.communicate()
-    self.assertEqual(0, p.returncode)
-
+    common.RunAndCheckOutput(cmd)
     return output_file
 
-  def _generate_image(self):
+  def _GenerateImage(self):
     partition_size = 1024 * 1024
     prop_dict = {
         'partition_size': str(partition_size),
@@ -79,11 +76,11 @@
     self.assertIsNotNone(verity_image_builder)
     adjusted_size = verity_image_builder.CalculateMaxImageSize()
 
-    raw_image = ""
+    raw_image = bytearray(adjusted_size)
     for i in range(adjusted_size):
-      raw_image += str(i % 10)
+      raw_image[i] = ord('0') + i % 10
 
-    output_file = self._create_simg(raw_image)
+    output_file = self._CreateSimg(raw_image)
 
     # Append the verity metadata.
     verity_image_builder.Build(output_file)
@@ -92,7 +89,7 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_CreateHashtreeInfoGenerator(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
 
     generator = CreateHashtreeInfoGenerator(
         'system', image_file, self.prop_dict)
@@ -103,7 +100,7 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_DecomposeSparseImage(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
 
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
@@ -114,7 +111,7 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_ParseHashtreeMetadata(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
     generator.DecomposeSparseImage(image_file)
@@ -131,12 +128,12 @@
   def test_ValidateHashtree_smoke(self):
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._generate_image())
+    generator.image = sparse_img.SparseImage(self._GenerateImage())
 
     generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
     info.hashtree_range = RangeSet(
-        data=[991232 / 4096, (991232 + 12288) / 4096])
+        data=[991232 // 4096, (991232 + 12288) // 4096])
     info.hash_algorithm = self.hash_algorithm
     info.salt = self.fixed_salt
     info.root_hash = self.expected_root_hash
@@ -147,12 +144,12 @@
   def test_ValidateHashtree_failure(self):
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._generate_image())
+    generator.image = sparse_img.SparseImage(self._GenerateImage())
 
     generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
     info.hashtree_range = RangeSet(
-        data=[991232 / 4096, (991232 + 12288) / 4096])
+        data=[991232 // 4096, (991232 + 12288) // 4096])
     info.hash_algorithm = self.hash_algorithm
     info.salt = self.fixed_salt
     info.root_hash = "a" + self.expected_root_hash[1:]
@@ -161,12 +158,12 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_Generate(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
     generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
     info = generator.Generate(image_file)
 
-    self.assertEqual(RangeSet(data=[0, 991232 / 4096]), info.filesystem_range)
-    self.assertEqual(RangeSet(data=[991232 / 4096, (991232 + 12288) / 4096]),
+    self.assertEqual(RangeSet(data=[0, 991232 // 4096]), info.filesystem_range)
+    self.assertEqual(RangeSet(data=[991232 // 4096, (991232 + 12288) // 4096]),
                      info.hashtree_range)
     self.assertEqual(self.hash_algorithm, info.hash_algorithm)
     self.assertEqual(self.fixed_salt, info.salt)
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 37d5d27..5d99c99 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -44,7 +44,7 @@
   """Constructs and returns a File object. Rounds up its size if needed."""
 
   assert os.path.exists(unpacked_name)
-  with open(unpacked_name, 'r') as f:
+  with open(unpacked_name, 'rb') as f:
     file_data = f.read()
   file_size = len(file_data)
   if round_up:
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 3063800..e7f84f5 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -131,7 +131,8 @@
     BuildVerityImageError: On error.
   """
   try:
-    with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
+    with open(target, 'ab') as out_file, \
+        open(file_to_append, 'rb') as input_file:
       for line in input_file:
         out_file.write(line)
   except IOError:
@@ -178,6 +179,8 @@
     # key_path and algorithm are only available when chain partition is used.
     key_path = prop_dict.get("avb_key_path")
     algorithm = prop_dict.get("avb_algorithm")
+
+    # Image uses hash footer.
     if prop_dict.get("avb_hash_enable") == "true":
       return VerifiedBootVersion2VerityImageBuilder(
           prop_dict["partition_name"],
@@ -188,16 +191,17 @@
           algorithm,
           prop_dict.get("avb_salt"),
           prop_dict["avb_add_hash_footer_args"])
-    else:
-      return VerifiedBootVersion2VerityImageBuilder(
-          prop_dict["partition_name"],
-          partition_size,
-          VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
-          prop_dict["avb_avbtool"],
-          key_path,
-          algorithm,
-          prop_dict.get("avb_salt"),
-          prop_dict["avb_add_hashtree_footer_args"])
+
+    # Image uses hashtree footer.
+    return VerifiedBootVersion2VerityImageBuilder(
+        prop_dict["partition_name"],
+        partition_size,
+        VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
+        prop_dict["avb_avbtool"],
+        key_path,
+        algorithm,
+        prop_dict.get("avb_salt"),
+        prop_dict["avb_add_hashtree_footer_args"])
 
   return None
 
@@ -605,19 +609,19 @@
     self.metadata_size = metadata_size
 
     self.hashtree_info.filesystem_range = RangeSet(
-        data=[0, adjusted_size / self.block_size])
+        data=[0, adjusted_size // self.block_size])
     self.hashtree_info.hashtree_range = RangeSet(
-        data=[adjusted_size / self.block_size,
-              (adjusted_size + verity_tree_size) / self.block_size])
+        data=[adjusted_size // self.block_size,
+              (adjusted_size + verity_tree_size) // self.block_size])
 
   def _ParseHashtreeMetadata(self):
     """Parses the hash_algorithm, root_hash, salt from the metadata block."""
 
     metadata_start = self.filesystem_size + self.hashtree_size
     metadata_range = RangeSet(
-        data=[metadata_start / self.block_size,
-              (metadata_start + self.metadata_size) / self.block_size])
-    meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
+        data=[metadata_start // self.block_size,
+              (metadata_start + self.metadata_size) // self.block_size])
+    meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
 
     # More info about the metadata structure available in:
     # system/extras/verity/build_verity_metadata.py
@@ -640,9 +644,9 @@
     assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
             int(table_entries[6]) * self.block_size == self.filesystem_size)
 
-    self.hashtree_info.hash_algorithm = table_entries[7]
-    self.hashtree_info.root_hash = table_entries[8]
-    self.hashtree_info.salt = table_entries[9]
+    self.hashtree_info.hash_algorithm = table_entries[7].decode()
+    self.hashtree_info.root_hash = table_entries[8].decode()
+    self.hashtree_info.salt = table_entries[9].decode()
 
   def ValidateHashtree(self):
     """Checks that we can reconstruct the verity hash tree."""
@@ -669,8 +673,8 @@
 
     # Reads the generated hash tree and checks if it has the exact same bytes
     # as the one in the sparse image.
-    with open(generated_verity_tree, "rb") as fd:
-      return fd.read() == ''.join(self.image.ReadRangeSet(
+    with open(generated_verity_tree, 'rb') as fd:
+      return fd.read() == b''.join(self.image.ReadRangeSet(
           self.hashtree_info.hashtree_range))
 
   def Generate(self, image):