Merge "Add <ramdisk>/system/etc/ramdisk/build.prop."
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 6352e38..41defb2 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -505,9 +505,9 @@
 
 # Remove *_OUT_INTERMEDIATE_LIBRARIES
 $(call add-clean-step, rm -rf $(addsuffix /lib,\
-  $(HOST_OUT_INTERMEDIATES) $(2ND_HOST_OUT_INTERMEDIATES) \
-  $(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
-  $(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
+$(HOST_OUT_INTERMEDIATES) $(2ND_HOST_OUT_INTERMEDIATES) \
+$(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
+$(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
 
 # Remove strip.sh intermediates to save space
 $(call add-clean-step, find $(OUT_DIR) \( -name "*.so.debug" -o -name "*.so.dynsyms" -o -name "*.so.funcsyms" -o -name "*.so.keep_symbols" -o -name "*.so.mini_debuginfo.xz" \) -print0 | xargs -0 rm -f)
@@ -646,6 +646,8 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/odm/build.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/odm/build.prop)
 
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/apex)
+
 # Remove libcameraservice and libcamera_client from base_system
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libcameraservice.so)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libcamera_client.so)
@@ -685,6 +687,9 @@
 # Migrate preopt files to system_other for some devices
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/*/*app/*/oat)
 
+# Migrate preopt files from system_other for some devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system_other)
+
 # Remove Android Core Library artifacts from the system partition, now
 # that they live in the ART APEX (b/142944799).
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/framework/*.jar)
@@ -699,9 +704,27 @@
 # again, as the original change removing them was reverted.
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/framework/*.jar)
 
+# Remove cas@1.1 from the vendor partition
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/bin/hw/android.hardware.cas@1.1*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/init/android.hardware.cas@1.1*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/vintf/manifest/android.hardware.cas@1.1*)
+
+# Remove com.android.cellbroadcast apex for Go devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/apex/com.android.cellbroadcast.apex)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/apex/com.android.cellbroadcast)
+
+# Remove CellBroadcastLegacyApp for Go devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/CellBroadcastLegacyApp)
+
+# Remove MediaProvider after moving into APEX
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/MediaProvider)
+
 # The core image variant has been renamed to ""
 $(call add-clean-step, find $(SOONG_OUT_DIR)/.intermediates -type d -name "android_*_core*" -print0 | xargs -0 rm -rf)
 
+# Remove 'media' command
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/media)
+
 # Remove CtsShim apks from system partition, since the have been moved inside
 # the cts shim apex. Also remove the cts shim apex prebuilt since it has been
 # removed in flattened apexs configurations.
@@ -713,6 +736,9 @@
 $(call add-clean-step, find $(SOONG_OUT_DIR)/.intermediates -type d -name "android_*_recovery*" -print0 | xargs -0 rm -rf)
 $(call add-clean-step, find $(SOONG_OUT_DIR)/.intermediates -type d -name "android_*_vendor*" -print0 | xargs -0 rm -rf)
 
+# Remove PermissionController after moving into APEX
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/*PermissionController)
+
 # Clean up VTS-Core and VTS10 related artifacts.
 $(call add-clean-step, rm -rf $(HOST_OUT)/vts-core/*)
 $(call add-clean-step, rm -rf $(HOST_OUT)/framework/vts-core-tradefed.jar)
@@ -725,6 +751,10 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/default.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/prop.default)
 
+# Workaround for Soong not being able to rebuild the host binary if its
+# JNI dependencies change: b/170389375
+$(call add-clean-step, rm -rf $(OUT_DIR)/soong/host/*/lib*/libconscrypt_openjdk_jni.so)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/OWNERS b/OWNERS
index 05f8b3d..4cac0f5 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,11 +1 @@
-# Core build team (MTV)
-ccross@android.com
-dwillemsen@google.com
-asmundak@google.com
-jungjw@google.com
-
-# To expedite LON reviews
-hansson@google.com
-
-# For version updates
-per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
+include platform/build/soong:/OWNERS
diff --git a/core/Makefile b/core/Makefile
index dd4190a..2bbf688 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -495,9 +495,20 @@
 		--title="Remaining Android.mk files for $(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)" \
 		--codesearch=$(PRIVATE_CODE_SEARCH_BASE_URL) \
 		--out_dir="$(OUT_DIR)" \
+		--mode=html \
 		> $@
 $(call dist-for-goals,droidcore,$(MK2BP_REMAINING_HTML))
 
+MK2BP_REMAINING_CSV := $(PRODUCT_OUT)/mk2bp_remaining.csv
+$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+	@rm -f $@
+	$(hide) $(MK2BP_CATALOG_SCRIPT) \
+		--device=$(TARGET_DEVICE) \
+		--out_dir="$(OUT_DIR)" \
+		--mode=csv \
+		> $@
+$(call dist-for-goals,droidcore,$(MK2BP_REMAINING_CSV))
+
 # -----------------------------------------------------------------
 # Modules use -Wno-error, or added default -Wall -Werror
 WALL_WERROR := $(PRODUCT_OUT)/wall_werror.txt
@@ -753,7 +764,7 @@
 endif
 
 INTERNAL_MKBOOTIMG_VERSION_ARGS := \
-    --os_version $(PLATFORM_VERSION) \
+    --os_version $(PLATFORM_VERSION_LAST_STABLE) \
     --os_patch_level $(PLATFORM_SECURITY_PATCH)
 
 # Define these only if we are building boot
@@ -762,8 +773,11 @@
 
 ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
 $(error TARGET_BOOTIMAGE_USE_EXT2 is not supported anymore)
+endif # TARGET_BOOTIMAGE_USE_EXT2
 
-else ifeq (true,$(BOARD_AVB_ENABLE)) # TARGET_BOOTIMAGE_USE_EXT2 != true
+$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET), $(eval $(call add-dependency,$(b),$(call bootimage-to-kernel,$(b)))))
+
+ifeq (true,$(BOARD_AVB_ENABLE))
 
 # $1: boot image target
 define build_boot_board_avb_enabled
@@ -776,7 +790,7 @@
           $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 endef
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(call bootimage-to-kernel,$@)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH)
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_board_avb_enabled,$@)
 
@@ -794,7 +808,7 @@
   $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 endef
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER) $(call bootimage-to-kernel,$@)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_supports_boot_signer,$@)
 
@@ -812,7 +826,7 @@
   $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 endef
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY) $(call bootimage-to-kernel,$@)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY)
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_supports_vboot,$@)
 
@@ -829,7 +843,7 @@
   $(call assert-max-image-size,$1,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 endef
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(call bootimage-to-kernel,$@)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES)
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_novboot,$@)
 
@@ -838,7 +852,7 @@
 	@echo "make $@: ignoring dependencies"
 	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_novboot,$(b)))
 
-endif # TARGET_BOOTIMAGE_USE_EXT2
+endif # BOARD_AVB_ENABLE
 endif # BUILDING_BOOT_IMAGE
 
 else # TARGET_NO_KERNEL == "true"
@@ -1040,6 +1054,8 @@
 license_modules := $(filter-out $(TARGET_OUT_TESTCASES)/%,$(license_modules))
 # filesystem images: system, vendor, product, system_ext, odm, vendor_dlkm, and odm_dlkm
 license_modules_system := $(filter $(TARGET_OUT)/%,$(license_modules))
+# system_other is relevant to system partition.
+license_modules_system += $(filter $(TARGET_OUT_SYSTEM_OTHER)/%,$(license_modules))
 license_modules_vendor := $(filter $(TARGET_OUT_VENDOR)/%,$(license_modules))
 license_modules_product := $(filter $(TARGET_OUT_PRODUCT)/%,$(license_modules))
 license_modules_system_ext := $(filter $(TARGET_OUT_SYSTEM_EXT)/%,$(license_modules))
@@ -1273,7 +1289,13 @@
 # Get a colon-separated list of search paths.
 INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
 
+# Collects file_contexts files from modules to be installed
+$(call merge-fc-files, \
+  $(sort $(foreach m,$(product_MODULES),$(ALL_MODULES.$(m).FILE_CONTEXTS))),\
+  $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.modules.tmp)
+
 SELINUX_FC := $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.bin
+
 INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
 
 ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
@@ -1965,6 +1987,7 @@
 endif
 
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET), $(eval $(call add-dependency,$(b),$(call bootimage-to-kernel,$(b)))))
 $(INSTALLED_BOOTIMAGE_TARGET): $(recoveryimage-deps)
 	$(call pretty,"Target boot image from recovery: $@")
 	$(call build-recoveryimage-target, $@, $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $@))))
@@ -2081,6 +2104,7 @@
 #
 # Note: it's intentional to skip signing for boot-debug.img, because it
 # can only be used if the device is unlocked with verification error.
+ifdef BUILDING_BOOT_IMAGE
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
   INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(foreach k,$(subst kernel,boot-debug,$(BOARD_KERNEL_BINARIES)), \
@@ -2135,6 +2159,7 @@
 	$(foreach b,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call build-debug-bootimage-target,$b))
 
 endif # TARGET_NO_KERNEL
+endif # BUILDING_BOOT_IMAGE
 
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
 ifeq ($(BUILDING_RAMDISK_IMAGE),true)
@@ -3136,22 +3161,22 @@
 
 BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += \
     --prop com.android.build.system.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.system.os_version:$(PLATFORM_VERSION) \
+    --prop com.android.build.system.os_version:$(PLATFORM_VERSION_LAST_STABLE) \
     --prop com.android.build.system.security_patch:$(PLATFORM_SECURITY_PATCH)
 
 BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += \
     --prop com.android.build.product.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.product.os_version:$(PLATFORM_VERSION) \
+    --prop com.android.build.product.os_version:$(PLATFORM_VERSION_LAST_STABLE) \
     --prop com.android.build.product.security_patch:$(PLATFORM_SECURITY_PATCH)
 
 BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += \
     --prop com.android.build.system_ext.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.system_ext.os_version:$(PLATFORM_VERSION) \
+    --prop com.android.build.system_ext.os_version:$(PLATFORM_VERSION_LAST_STABLE) \
     --prop com.android.build.system_ext.security_patch:$(PLATFORM_SECURITY_PATCH)
 
 BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
     --prop com.android.build.boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.boot.os_version:$(PLATFORM_VERSION)
+    --prop com.android.build.boot.os_version:$(PLATFORM_VERSION_LAST_STABLE)
 
 BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS += \
     --prop com.android.build.vendor_boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
@@ -3161,11 +3186,11 @@
 
 BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += \
     --prop com.android.build.vendor.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.vendor.os_version:$(PLATFORM_VERSION)
+    --prop com.android.build.vendor.os_version:$(PLATFORM_VERSION_LAST_STABLE)
 
 BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS += \
     --prop com.android.build.odm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.odm.os_version:$(PLATFORM_VERSION)
+    --prop com.android.build.odm.os_version:$(PLATFORM_VERSION_LAST_STABLE)
 
 BOARD_AVB_VENDOR_DLKM_ADD_HASHTREE_FOOTER_ARGS += \
     --prop com.android.build.vendor_dlkm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
@@ -4758,7 +4783,7 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 $(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
 
@@ -4776,7 +4801,8 @@
 $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): \
     $(BUILT_TARGET_FILES_PACKAGE) \
-    $(OTA_FROM_TARGET_FILES)
+    $(OTA_FROM_TARGET_FILES) \
+    $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package OTA (retrofit dynamic partitions): $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --retrofit_dynamic_partitions)
 
@@ -4876,7 +4902,7 @@
   $(PROFDATA_ZIP): $(SOONG_ZIP)
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION) -f $(LLVM_PROFDATA) -f $(LIBCXX)
 
-  $(call dist-for-goals,droidcore,$(PROFDATA_ZIP))
+  $(call dist-for-goals,droidcore apps_only,$(PROFDATA_ZIP))
 endif
 
 # -----------------------------------------------------------------
@@ -5457,7 +5483,3 @@
 .PHONY: haiku
 haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
 $(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
-
-# -----------------------------------------------------------------
-# The makefile for haiku line coverage.
-include $(BUILD_SYSTEM)/line_coverage.mk
diff --git a/core/OWNERS b/core/OWNERS
index 459683e..5456d4f 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,2 +1,6 @@
 per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,dbrazdil@google.com
 per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
+
+# For version updates
+per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
+
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 1b41898..adf61f1 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -766,7 +766,7 @@
     $(foreach test_config_file, $(LOCAL_EXTRA_FULL_TEST_CONFIGS), \
       $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
         $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
-          $(test_config_file):$(dir)/$(notdir $(test_config_file))))))
+          $(test_config_file):$(dir)/$(basename $(notdir $(test_config_file))).config))))
   endif
 
   ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
@@ -987,6 +987,9 @@
 ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
 ALL_MODULES.$(my_register_name).EXTRA_TEST_CONFIGS := $(LOCAL_EXTRA_FULL_TEST_CONFIGS)
 ALL_MODULES.$(my_register_name).TEST_MAINLINE_MODULES := $(LOCAL_TEST_MAINLINE_MODULES)
+ifndef LOCAL_IS_HOST_MODULE
+ALL_MODULES.$(my_register_name).FILE_CONTEXTS := $(LOCAL_FILE_CONTEXTS)
+endif
 test_config :=
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/board_config.mk b/core/board_config.mk
index 95d8af8..05b6b29 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -342,23 +342,29 @@
 
 # Are we building a boot image
 BUILDING_BOOT_IMAGE :=
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-  BUILDING_BOOT_IMAGE :=
-else ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),)
-  ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
+ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),)
+  ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+    BUILDING_BOOT_IMAGE :=
+  else ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
     BUILDING_BOOT_IMAGE := true
   endif
 else ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),true)
-  BUILDING_BOOT_IMAGE := true
+  ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+    $(warning *** PRODUCT_BUILD_BOOT_IMAGE is true, but so is BOARD_USES_RECOVERY_AS_BOOT.)
+    $(warning *** Skipping building boot image.)
+    BUILDING_BOOT_IMAGE :=
+  else
+    BUILDING_BOOT_IMAGE := true
+  endif
 endif
 .KATI_READONLY := BUILDING_BOOT_IMAGE
 
 # Are we building a recovery image
 BUILDING_RECOVERY_IMAGE :=
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-  BUILDING_RECOVERY_IMAGE := true
-else ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),)
-  ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
+ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),)
+  ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+    BUILDING_RECOVERY_IMAGE := true
+  else ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
     ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
       BUILDING_RECOVERY_IMAGE := true
     endif
@@ -372,7 +378,14 @@
 BUILDING_VENDOR_BOOT_IMAGE :=
 ifdef BOARD_BOOT_HEADER_VERSION
   ifneq ($(call math_gt_or_eq,$(BOARD_BOOT_HEADER_VERSION),3),)
-    ifneq ($(TARGET_NO_VENDOR_BOOT),true)
+    ifneq ($(TARGET_NO_VENDOR_BOOT),)
+      $(warning TARGET_NO_VENDOR_BOOT has been deprecated. Please use PRODUCT_BUILD_VENDOR_BOOT_IMAGE.)
+      ifneq ($(TARGET_NO_VENDOR_BOOT),true)
+        BUILDING_VENDOR_BOOT_IMAGE := true
+      endif
+    else ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),)
+      BUILDING_VENDOR_BOOT_IMAGE := true
+    else ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),true)
       BUILDING_VENDOR_BOOT_IMAGE := true
     endif
   endif
diff --git a/core/check_elf_file.mk b/core/check_elf_file.mk
index d54a5b7..b5be81f 100644
--- a/core/check_elf_file.mk
+++ b/core/check_elf_file.mk
@@ -14,12 +14,14 @@
 # - my_installed_module_stem
 # - my_prebuilt_src_file
 # - my_check_elf_file_shared_lib_files
+# - my_system_shared_libraries
 
 ifndef LOCAL_IS_HOST_MODULE
 ifneq ($(filter $(LOCAL_MODULE_CLASS),SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS),)
 check_elf_files_stamp := $(intermediates)/check_elf_files.timestamp
 $(check_elf_files_stamp): PRIVATE_SONAME := $(if $(filter $(LOCAL_MODULE_CLASS),SHARED_LIBRARIES),$(my_installed_module_stem))
 $(check_elf_files_stamp): PRIVATE_ALLOW_UNDEFINED_SYMBOLS := $(LOCAL_ALLOW_UNDEFINED_SYMBOLS)
+$(check_elf_files_stamp): PRIVATE_SYSTEM_SHARED_LIBRARIES := $(my_system_shared_libraries)
 # PRIVATE_SHARED_LIBRARY_FILES are file paths to built shared libraries.
 # In addition to $(my_check_elf_file_shared_lib_files), some file paths are
 # added by `resolve-shared-libs-for-elf-file-check` from `core/main.mk`.
@@ -33,6 +35,7 @@
 	    --skip-unknown-elf-machine \
 	    $(if $(PRIVATE_SONAME),--soname $(PRIVATE_SONAME)) \
 	    $(foreach l,$(PRIVATE_SHARED_LIBRARY_FILES),--shared-lib $(l)) \
+	    $(foreach l,$(PRIVATE_SYSTEM_SHARED_LIBRARIES),--system-shared-lib $(l)) \
 	    $(if $(PRIVATE_ALLOW_UNDEFINED_SYMBOLS),--allow-undefined-symbols) \
 	    --llvm-readobj=$(LLVM_READOBJ) \
 	    $<
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index d515db3..7d79baf 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -102,6 +102,7 @@
 LOCAL_EXTRACT_APK:=
 LOCAL_EXTRACT_DPI_APK:=
 LOCAL_FDO_SUPPORT:=
+LOCAL_FILE_CONTEXTS:=
 LOCAL_FINDBUGS_FLAGS:=
 LOCAL_FORCE_STATIC_EXECUTABLE:=
 LOCAL_FULL_CLASSES_JACOCO_JAR:=
diff --git a/core/combo/HOST_CROSS_linux_bionic-arm64.mk b/core/combo/HOST_CROSS_linux_bionic-arm64.mk
new file mode 100644
index 0000000..df6865f
--- /dev/null
+++ b/core/combo/HOST_CROSS_linux_bionic-arm64.mk
@@ -0,0 +1,22 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Configuration for builds hosted on linux_arm-arm64
+# Included by combo/select.mk
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
diff --git a/core/combo/arch/arm64/armv8-2a-dotprod.mk b/core/combo/arch/arm64/armv8-2a-dotprod.mk
new file mode 100644
index 0000000..c775cf7
--- /dev/null
+++ b/core/combo/arch/arm64/armv8-2a-dotprod.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# .mk file required to support build for the new armv8-2a-dotprod Arm64 arch
+# variant. The file just needs to be present but does not require to contain
+# anything
diff --git a/core/config.mk b/core/config.mk
index 1bbb78c..16fa988 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -155,6 +155,8 @@
 $(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
 $(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.)
 $(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.)
+$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead.)
+$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead.)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
@@ -768,7 +770,7 @@
 # is made which breaks compatibility with the previous platform sepolicy version,
 # not just on every increase in PLATFORM_SDK_VERSION.  The minor version should
 # be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 29
+sepolicy_major_vers := 30
 sepolicy_minor_vers := 0
 
 ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
diff --git a/core/definitions.mk b/core/definitions.mk
index ace3ff8..bfbeee3 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -3187,11 +3187,12 @@
 
 ###########################################################
 ## Find system_$(VER) in LOCAL_SDK_VERSION
+## note: system_server_* is excluded. It's a different API surface
 ##
 ## $(1): LOCAL_SDK_VERSION
 ###########################################################
 define has-system-sdk-version
-$(filter system_%,$(1))
+$(filter-out system_server_%,$(filter system_%,$(1)))
 endef
 
 ###########################################################
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 76e7dd3..a5571ae 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -148,15 +148,25 @@
 # BUILD_OS is the real host doing the build.
 BUILD_OS := $(HOST_OS)
 
-HOST_CROSS_OS :=
-# We can cross-build Windows binaries on Linux
+# We can do the cross-build only on Linux
 ifeq ($(HOST_OS),linux)
-ifeq ($(BUILD_HOST_static),)
-HOST_CROSS_OS := windows
-HOST_CROSS_ARCH := x86
-HOST_CROSS_2ND_ARCH := x86_64
-2ND_HOST_CROSS_IS_64_BIT := true
-endif
+  # Windows has been the default host_cross OS
+  ifeq (,$(filter-out windows,$(HOST_CROSS_OS)))
+    # We can only create static host binaries for Linux, so if static host
+    # binaries are requested, turn off Windows cross-builds.
+    ifeq ($(BUILD_HOST_static),)
+      HOST_CROSS_OS := windows
+      HOST_CROSS_ARCH := x86
+      HOST_CROSS_2ND_ARCH := x86_64
+      2ND_HOST_CROSS_IS_64_BIT := true
+    endif
+  else ifeq ($(HOST_CROSS_OS),linux_bionic)
+    ifeq (,$(HOST_CROSS_ARCH))
+      $(error HOST_CROSS_ARCH missing.)
+    endif
+  else
+    $(error Unsupported HOST_CROSS_OS $(HOST_CROSS_OS))
+  endif
 endif
 
 ifeq ($(HOST_OS),)
diff --git a/core/line_coverage.mk b/core/line_coverage.mk
deleted file mode 100644
index 6bfbb8d..0000000
--- a/core/line_coverage.mk
+++ /dev/null
@@ -1,94 +0,0 @@
-# -----------------------------------------------------------------
-# Make target for line coverage. This target generates a zip file
-# called `line_coverage_profiles.zip` that contains a large set of
-# zip files one for each fuzz target/critical component. Each zip
-# file contains a set of profile files (*.gcno) that we will use
-# to generate line coverage reports. Furthermore, target compiles
-# all fuzz targets with line coverage instrumentation enabled and
-# packs them into another zip file called `line_coverage_profiles.zip`.
-#
-# To run the make target set the coverage related envvars first:
-# 	NATIVE_COVERAGE=true NATIVE_COVERAGE_PATHS=* make haiku-line-coverage
-# -----------------------------------------------------------------
-
-# TODO(b/148306195): Due this issue some fuzz targets cannot be built with
-# line coverage instrumentation. For now we just block them.
-blocked_fuzz_targets := libneuralnetworks_fuzzer
-
-fuzz_targets := $(ALL_FUZZ_TARGETS)
-fuzz_targets := $(filter-out $(blocked_fuzz_targets),$(fuzz_targets))
-
-
-# Android components that considered critical.
-# Please note that adding/Removing critical components is very rare.
-critical_components_static := \
-	lib-bt-packets \
-	libbt-stack \
-	libffi \
-	libhevcdec \
-	libhevcenc \
-	libmpeg2dec \
-	libosi \
-	libpdx \
-	libselinux \
-	libvold \
-	libyuv
-
-# Format is <module_name> or <module_name>:<apex_name>
-critical_components_shared := \
-	libaudioprocessing \
-	libbinder \
-	libbluetooth_gd \
-	libbrillo \
-	libcameraservice \
-	libcurl \
-	libhardware \
-	libinputflinger \
-	libopus \
-	libstagefright \
-	libvixl:com.android.art.debug
-
-# Use the intermediates directory to avoid installing libraries to the device.
-intermediates := $(call intermediates-dir-for,PACKAGING,haiku-line-coverage)
-
-
-# We want the profile files for all fuzz targets + critical components.
-line_coverage_profiles := $(intermediates)/line_coverage_profiles.zip
-
-critical_components_static_inputs := $(foreach lib,$(critical_components_static), \
-	$(call intermediates-dir-for,STATIC_LIBRARIES,$(lib))/$(lib).a)
-
-critical_components_shared_inputs := $(foreach lib,$(critical_components_shared), \
-	$(eval filename := $(call word-colon,1,$(lib))) \
-	$(eval modulename := $(subst :,.,$(lib))) \
-	$(call intermediates-dir-for,SHARED_LIBRARIES,$(modulename))/$(filename).so)
-
-fuzz_target_inputs := $(foreach fuzz,$(fuzz_targets), \
-	$(call intermediates-dir-for,EXECUTABLES,$(fuzz))/$(fuzz))
-
-# When coverage is enabled (NATIVE_COVERAGE is set), make creates
-# a "coverage" directory and stores all profile (*.gcno) files in inside.
-# We need everything that is stored inside this directory.
-$(line_coverage_profiles): $(fuzz_target_inputs)
-$(line_coverage_profiles): $(critical_components_static_inputs)
-$(line_coverage_profiles): $(critical_components_shared_inputs)
-$(line_coverage_profiles): $(SOONG_ZIP)
-	$(SOONG_ZIP) -o $@ -D $(PRODUCT_OUT)/coverage
-
-
-# Zip all fuzz targets compiled with line coverage.
-line_coverage_fuzz_targets := $(intermediates)/line_coverage_fuzz_targets.zip
-
-$(line_coverage_fuzz_targets): $(fuzz_target_inputs)
-$(line_coverage_fuzz_targets): $(SOONG_ZIP)
-	$(SOONG_ZIP) -o $@ -j $(addprefix -f ,$(fuzz_target_inputs))
-
-
-.PHONY: haiku-line-coverage
-haiku-line-coverage: $(line_coverage_profiles) $(line_coverage_fuzz_targets)
-$(call dist-for-goals, haiku-line-coverage, \
-	$(line_coverage_profiles):line_coverage_profiles.zip \
-	$(line_coverage_fuzz_targets):line_coverage_fuzz_targets.zip)
-
-line_coverage_profiles :=
-line_coverage_fuzz_targets :=
diff --git a/core/main.mk b/core/main.mk
index 0e419a2..40cc70f 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -81,6 +81,8 @@
 -include test/mts/tools/build/config.mk
 # VTS-Core-specific config.
 -include test/vts/tools/vts-core-tradefed/build/config.mk
+# CSUITE-specific config.
+-include test/app_compat/csuite/tools/build/config.mk
 
 # Clean rules
 .PHONY: clean-dex-files
@@ -606,8 +608,8 @@
   $(eval modules_32 := $(patsubst %:32,%,$(filter %:32,$(2)))) \
   $(eval modules_64 := $(patsubst %:64,%,$(filter %:64,$(2)))) \
   $(eval modules_both := $(filter-out %:32 %:64,$(2))) \
-  $(eval ### For host cross modules, the primary arch is windows x86 and secondary is x86_64) \
-  $(if $(filter HOST_CROSS,$(1)), \
+  $(eval ### if 2ND_HOST_CROSS_IS_64_BIT, then primary/secondary are reversed for HOST_CROSS modules) \
+  $(if $(filter HOST_CROSS_true,$(1)_$(2ND_HOST_CROSS_IS_64_BIT)), \
     $(eval modules_1st_arch := $(modules_32)) \
     $(eval modules_2nd_arch := $(modules_64)), \
     $(eval modules_1st_arch := $(modules_64)) \
@@ -1637,6 +1639,10 @@
 # dist_files only for putting your library into the dist directory with a full build.
 .PHONY: dist_files
 
+ifeq ($(SOONG_COLLECT_JAVA_DEPS), true)
+  $(call dist-for-goals, dist_files, $(SOONG_OUT_DIR)/module_bp_java_deps.json)
+endif
+
 .PHONY: apps_only
 ifneq ($(TARGET_BUILD_APPS),)
   # If this build is just for apps, only build apps and not the full system by default.
@@ -1795,9 +1801,11 @@
   # Put XML formatted API files in the dist dir.
   $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-files,android_stubs_current) $(APICHECK)
   $(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-files,android_system_stubs_current) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/module-lib-api.xml: $(call java-lib-files,android_module_lib_stubs_current) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-server-api.xml: $(call java-lib-files,android_system_server_stubs_current) $(APICHECK)
   $(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-files,android_test_stubs_current) $(APICHECK)
 
-  api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml test-api.xml)
+  api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml module-lib-api.xml system-server-api.xml test-api.xml)
   $(api_xmls):
 	$(hide) echo "Converting API file to XML: $@"
 	$(hide) mkdir -p $(dir $@)
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 6fccacd..2e1bd69 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -39,7 +39,6 @@
 	user \
 	userdataimage \
 	userdebug \
-	vts10 \
 	win_sdk \
 	winsdk-tools
 
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 721a034..0430007 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -79,8 +79,9 @@
       endif
       module_installed_filename := \
           $(patsubst $(PRODUCT_OUT)/%,%,$($(my_prefix)OUT_JAVA_LIBRARIES))/$(module_leaf)
-    else ifeq ($(LOCAL_MODULE_CLASS),ETC)
-      # ETC modules may be uninstallable, yet still have a NOTICE file. e.g. apex components
+    else ifneq ($(filter ETC DATA,$(LOCAL_MODULE_CLASS)),)
+      # ETC and DATA modules may be uninstallable, yet still have a NOTICE file.
+      # e.g. apex components
       module_installed_filename :=
     else ifneq (,$(and $(filter %.sdk,$(LOCAL_MODULE)),$(filter $(patsubst %.sdk,%,$(LOCAL_MODULE)),$(SOONG_SDK_VARIANT_MODULES))))
       # Soong produces uninstallable *.sdk shared libraries for embedding in APKs.
diff --git a/core/product.mk b/core/product.mk
index 624501e..666e390 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -380,6 +380,7 @@
 _product_single_value_vars += PRODUCT_BUILD_USERDATA_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_RECOVERY_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_BOOT_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_VENDOR_BOOT_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_VBMETA_IMAGE
 
 # List of boot jars delivered via apex
diff --git a/core/rbe.mk b/core/rbe.mk
index 5e55cfb..91606d4 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -57,7 +57,7 @@
   java_r8_d8_platform := $(platform),Pool=java16
 
   RBE_WRAPPER := $(rbe_dir)/rewrapper
-  RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_whitelist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
+  RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_allowlist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
 
   # Append rewrapper to existing *_WRAPPER variables so it's possible to
   # use both ccache and rewrapper.
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
index 0259a9c..1742925 100644
--- a/core/sdk_font.mk
+++ b/core/sdk_font.mk
@@ -19,9 +19,9 @@
 
 # The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
 sdk_font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
-sdk_font_config :=  $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_font_config)))
+sdk_font_config :=  $(addprefix $(SDK_FONT_TEMP)/standard/, $(notdir $(sdk_font_config)))
 
-$(sdk_font_config): $(SDK_FONT_TEMP)/%.xml: \
+$(sdk_font_config): $(SDK_FONT_TEMP)/standard/%.xml: \
 			frameworks/base/data/fonts/%.xml
 	$(hide) mkdir -p $(dir $@)
 	$(hide) cp -vf $< $@
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index a0315a5..a12ef66 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -91,6 +91,7 @@
 ifdef LOCAL_INSTALLED_MODULE
   ifneq ($(LOCAL_CHECK_ELF_FILES),)
     my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
+    my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
     include $(BUILD_SYSTEM)/check_elf_file.mk
   endif
 endif
@@ -149,11 +150,14 @@
 $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
 ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
 	$(copy-or-link-prebuilt-to-target)
+  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+	[ -x $@ ] || ( $(call echo-error,$@,Target of symlink is not executable); false )
+  endif
 else
 	$(transform-prebuilt-to-target)
-endif
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
 	$(hide) chmod +x $@
+  endif
 endif
 
 ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/soong_config.mk b/core/soong_config.mk
index ad2e816..d5e16f7 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -174,8 +174,9 @@
 $(call add_json_list, BoardOdmSepolicyDirs,              $(BOARD_ODM_SEPOLICY_DIRS))
 $(call add_json_list, BoardVendorDlkmSepolicyDirs,       $(BOARD_VENDOR_DLKM_SEPOLICY_DIRS))
 $(call add_json_list, BoardOdmDlkmSepolicyDirs,          $(BOARD_ODM_DLKM_SEPOLICY_DIRS))
-$(call add_json_list, BoardPlatPublicSepolicyDirs,       $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
-$(call add_json_list, BoardPlatPrivateSepolicyDirs,      $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
+# TODO: BOARD_PLAT_* dirs only kept for compatibility reasons. Will be a hard error on API level 31
+$(call add_json_list, SystemExtPublicSepolicyDirs,       $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS) $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
+$(call add_json_list, SystemExtPrivateSepolicyDirs,      $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS) $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
 $(call add_json_list, BoardSepolicyM4Defs,               $(BOARD_SEPOLICY_M4DEFS))
 
 $(call add_json_bool, Flatten_apex,                      $(filter true,$(TARGET_FLATTEN_APEX)))
diff --git a/core/soong_rust_prebuilt.mk b/core/soong_rust_prebuilt.mk
index de6bafd..dea7340 100644
--- a/core/soong_rust_prebuilt.mk
+++ b/core/soong_rust_prebuilt.mk
@@ -28,9 +28,9 @@
   $(call pretty-error,Unsupported LOCAL_MODULE_$(my_prefix)ARCH=$(LOCAL_MODULE_$(my_prefix)ARCH))
 endif
 
-# Don't install rlib/proc_macro libraries.
+# Don't install static/rlib/proc_macro libraries.
 ifndef LOCAL_UNINSTALLABLE_MODULE
-  ifneq ($(filter RLIB_LIBRARIES PROC_MACRO_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+  ifneq ($(filter STATIC_LIBRARIES RLIB_LIBRARIES PROC_MACRO_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
     LOCAL_UNINSTALLABLE_MODULE := true
   endif
 endif
@@ -59,11 +59,14 @@
 $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
 ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
 	$(copy-or-link-prebuilt-to-target)
+  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+	[ -x $@ ] || ( $(call echo-error,$@,Target of symlink is not executable); false )
+  endif
 else
 	$(transform-prebuilt-to-target)
-endif
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
 	$(hide) chmod +x $@
+  endif
 endif
 
 ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
new file mode 100644
index 0000000..96e2c74
--- /dev/null
+++ b/core/tasks/art-host-tests.mk
@@ -0,0 +1,29 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.PHONY: art-host-tests
+
+intermediates_dir := $(call intermediates-dir-for,PACKAGING,art-host-tests)
+art_host_tests_zip := $(PRODUCT_OUT)/art-host-tests.zip
+$(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(SOONG_ZIP)
+	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $@.list
+	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+	rm -f $@.list $@-host.list $@-target.list
+
+art-host-tests: $(art_host_tests_zip)
+$(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
+
+tests: art-host-tests
diff --git a/core/tasks/check_boot_jars/package_allowed_list.txt b/core/tasks/check_boot_jars/package_allowed_list.txt
index 6240ffd..18ab427 100644
--- a/core/tasks/check_boot_jars/package_allowed_list.txt
+++ b/core/tasks/check_boot_jars/package_allowed_list.txt
@@ -122,8 +122,6 @@
 libcore\..*
 android\..*
 com\.android\..*
-
-
 ###################################################
 # android.test.base.jar
 junit\.extensions
@@ -241,6 +239,8 @@
 # Packages in the google namespace across all bootclasspath jars.
 com\.google\.android\..*
 com\.google\.vr\.platform.*
+com\.google\.i18n\.phonenumbers\..*
+com\.google\.i18n\.phonenumbers
 
 ###################################################
 # Packages used for Android in Chrome OS
diff --git a/core/tasks/csuite.mk b/core/tasks/csuite.mk
new file mode 100644
index 0000000..a8dba1d
--- /dev/null
+++ b/core/tasks/csuite.mk
@@ -0,0 +1,23 @@
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+test_suite_name := csuite
+test_suite_tradefed := csuite-tradefed
+test_suite_readme := test/app_compat/csuite/README.md
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+.PHONY: csuite
+csuite: $(compatibility_zip)
+$(call dist-for-goals, csuite, $(compatibility_zip))
diff --git a/core/tasks/find-shareduid-violation.mk b/core/tasks/find-shareduid-violation.mk
index 86052f2..972b1ec 100644
--- a/core/tasks/find-shareduid-violation.mk
+++ b/core/tasks/find-shareduid-violation.mk
@@ -28,5 +28,13 @@
 
 $(shareduid_violation_modules_filename): $(find_shareduid_script)
 $(shareduid_violation_modules_filename): $(AAPT2)
-	$(find_shareduid_script) $(PRODUCT_OUT) $(AAPT2) > $@
+	$(find_shareduid_script) \
+		--product_out $(PRODUCT_OUT) \
+		--aapt $(AAPT2) \
+		--copy_out_system $(TARGET_COPY_OUT_SYSTEM) \
+		--copy_out_vendor $(TARGET_COPY_OUT_VENDOR) \
+		--copy_out_product $(TARGET_COPY_OUT_PRODUCT) \
+		--copy_out_system_ext $(TARGET_COPY_OUT_SYSTEM_EXT) \
+		> $@
+
 $(call dist-for-goals,droidcore,$(shareduid_violation_modules_filename))
diff --git a/core/tasks/find-shareduid-violation.py b/core/tasks/find-shareduid-violation.py
index 1f8e4df..8dba5a1 100755
--- a/core/tasks/find-shareduid-violation.py
+++ b/core/tasks/find-shareduid-violation.py
@@ -14,19 +14,31 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+import argparse
+import json
 import os
 import subprocess
-from glob import glob
-from collections import defaultdict
 import sys
-import json
 
-if len(sys.argv) < 3:
-    product_out = os.environ["PRODUCT_OUT"]
-    aapt = "aapt2"
-else:
-    product_out = sys.argv[1]
-    aapt = sys.argv[2]
+from collections import defaultdict
+from glob import glob
+
+def parse_args():
+    """Parse commandline arguments."""
+    parser = argparse.ArgumentParser(description='Find sharedUserId violators')
+    parser.add_argument('--product_out', help='PRODUCT_OUT directory',
+                        default=os.environ.get("PRODUCT_OUT"))
+    parser.add_argument('--aapt', help='Path to aapt or aapt2',
+                        default="aapt2")
+    parser.add_argument('--copy_out_system', help='TARGET_COPY_OUT_SYSTEM',
+                        default="system")
+    parser.add_argument('--copy_out_vendor', help='TARGET_COPY_OUT_VENDOR',
+                        default="vendor")
+    parser.add_argument('--copy_out_product', help='TARGET_COPY_OUT_PRODUCT',
+                        default="product")
+    parser.add_argument('--copy_out_system_ext', help='TARGET_COPY_OUT_SYSTEM_EXT',
+                        default="system_ext")
+    return parser.parse_args()
 
 def execute(cmd):
     p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -45,7 +57,6 @@
     else:
         print(error_msg, file=sys.stderr)
         sys.exit()
-        return None
 
     for l in manifest.split('\n'):
         if "sharedUserId" in l:
@@ -53,18 +64,28 @@
     return None
 
 
-partitions = ["system", "vendor", "product"]
+args = parse_args()
+
+product_out = args.product_out
+aapt = args.aapt
+
+partitions = (
+        ("system", args.copy_out_system),
+        ("vendor", args.copy_out_vendor),
+        ("product", args.copy_out_product),
+        ("system_ext", args.copy_out_system_ext),
+)
 
 shareduid_app_dict = defaultdict(list)
 
-for p in partitions:
-    for f in glob(os.path.join(product_out, p, "*", "*", "*.apk")):
+for part, location in partitions:
+    for f in glob(os.path.join(product_out, location, "*", "*", "*.apk")):
         apk_file = os.path.basename(f)
         shared_uid = extract_shared_uid(f)
 
         if shared_uid is None:
             continue
-        shareduid_app_dict[shared_uid].append((p, apk_file))
+        shareduid_app_dict[shared_uid].append((part, apk_file))
 
 
 output = defaultdict(lambda: defaultdict(list))
diff --git a/core/tasks/platform_availability_check.mk b/core/tasks/platform_availability_check.mk
index f252ff5..7ce6b40 100644
--- a/core/tasks/platform_availability_check.mk
+++ b/core/tasks/platform_availability_check.mk
@@ -51,6 +51,6 @@
   endef
 
   $(foreach m,$(_modules_not_available_for_platform),\
-    $(foreach i,$(ALL_MODULES.$(m).INSTALLED),\
+    $(foreach i,$(filter-out $(HOST_OUT)/%,$(ALL_MODULES.$(m).INSTALLED)),\
       $(eval $(call not_available_for_platform_rule,$(i),$(m)))))
 endif
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 6cafa4a..2b43f0f 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -50,7 +50,7 @@
 ifeq ($(ALLOW_MISSING_DEPENDENCIES),true)
   # Ignore unknown installed files on partial builds
   my_missing_files =
-else ifeq ($(my_modules_strict),true)
+else ifneq ($(my_modules_strict),false)
   my_missing_files = $(shell $(call echo-error,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(eval my_missing_error := true)
 endif
 
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
new file mode 100644
index 0000000..47bf29c
--- /dev/null
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -0,0 +1,33 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# $(1): List of target native files to copy.
+# $(2): Copy destination directory.
+# Evaluates to a list of ":"-separated pairs src:dst.
+define target-native-copy-pairs
+$(foreach m,$(1),\
+  $(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
+  $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
+  $(foreach i, $(_built_files),\
+    $(eval bui_ins := $(subst :,$(space),$(i)))\
+    $(eval ins := $(word 2,$(bui_ins)))\
+    $(if $(filter $(TARGET_OUT_ROOT)/%,$(ins)),\
+      $(eval bui := $(word 1,$(bui_ins)))\
+      $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
+                               $(patsubst system/%,DATA/%,\
+                                   $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+      $(bui):$(2)/$(my_copy_dest))))
+endef
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index a3247da..95c4d24 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -15,6 +15,8 @@
 -include external/linux-kselftest/android/kselftest_test_list.mk
 -include external/ltp/android/ltp_package_list.mk
 
+include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
+
 test_suite_name := vts
 test_suite_tradefed := vts-tradefed
 test_suite_readme := test/vts/tools/vts-core-tradefed/README
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index c877e8b..41696e8 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -84,40 +84,17 @@
 # generate the range of allowed SDK versions, so it must have an entry for every
 # unreleased API level targetable by this branch, not just those that are valid
 # lunch targets for this branch.
-PLATFORM_VERSION.RP1A := R
-PLATFORM_VERSION.SP1A := S
+
+# The last stable version name of the platform that was released.  During
+# development, this stays at that previous version, while the codename indicates
+# further work based on the previous version.
+PLATFORM_VERSION_LAST_STABLE := 11
+.KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
 
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.RP1A := R
 PLATFORM_VERSION_CODENAME.SP1A := S
 
-ifndef PLATFORM_VERSION
-  PLATFORM_VERSION := $(PLATFORM_VERSION.$(TARGET_PLATFORM_VERSION))
-  ifndef PLATFORM_VERSION
-    # PLATFORM_VERSION falls back to TARGET_PLATFORM_VERSION
-    PLATFORM_VERSION := $(TARGET_PLATFORM_VERSION)
-  endif
-endif
-.KATI_READONLY := PLATFORM_VERSION
-
-ifndef PLATFORM_SDK_VERSION
-  # This is the canonical definition of the SDK version, which defines
-  # the set of APIs and functionality available in the platform.  It
-  # is a single integer that increases monotonically as updates to
-  # the SDK are released.  It should only be incremented when the APIs for
-  # the new release are frozen (so that developers don't write apps against
-  # intermediate builds).  During development, this number remains at the
-  # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
-  # the code-name of the new development work.
-
-  # When you increment the PLATFORM_SDK_VERSION please ensure you also
-  # clear out the following text file of all older PLATFORM_VERSION's:
-  # cts/tests/tests/os/assets/platform_versions.txt
-  PLATFORM_SDK_VERSION := 29
-endif
-.KATI_READONLY := PLATFORM_SDK_VERSION
-
 ifndef PLATFORM_VERSION_CODENAME
   PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
   ifndef PLATFORM_VERSION_CODENAME
@@ -152,6 +129,32 @@
   PLATFORM_VERSION_CODENAME \
   PLATFORM_VERSION_ALL_CODENAMES
 
+ifndef PLATFORM_VERSION
+  ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+      PLATFORM_VERSION := $(PLATFORM_VERSION_LAST_STABLE)
+  else
+      PLATFORM_VERSION := $(PLATFORM_VERSION_CODENAME)
+  endif
+endif
+.KATI_READONLY := PLATFORM_VERSION
+
+ifndef PLATFORM_SDK_VERSION
+  # This is the canonical definition of the SDK version, which defines
+  # the set of APIs and functionality available in the platform.  It
+  # is a single integer that increases monotonically as updates to
+  # the SDK are released.  It should only be incremented when the APIs for
+  # the new release are frozen (so that developers don't write apps against
+  # intermediate builds).  During development, this number remains at the
+  # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
+  # the code-name of the new development work.
+
+  # When you increment the PLATFORM_SDK_VERSION please ensure you also
+  # clear out the following text file of all older PLATFORM_VERSION's:
+  # cts/tests/tests/os/assets/platform_versions.txt
+  PLATFORM_SDK_VERSION := 30
+endif
+.KATI_READONLY := PLATFORM_SDK_VERSION
+
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
   PLATFORM_PREVIEW_SDK_VERSION := 0
 else
@@ -237,7 +240,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2020-08-05
+      PLATFORM_SECURITY_PATCH := 2020-10-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/envsetup.sh b/envsetup.sh
index e981034..a3b07a7 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -34,6 +34,7 @@
 - gomod:      Go to the directory containing a module.
 - pathmod:    Get the directory containing a module.
 - refreshmod: Refresh list of modules for allmod/gomod/pathmod.
+- syswrite:   Remount partitions (e.g. system.img) as writable, rebooting if necessary.
 
 Environment options:
 - SANITIZE_HOST: Set to 'address' to use ASAN for all host modules.
@@ -355,7 +356,7 @@
 
 function addcompletions()
 {
-    local T dir f
+    local f=
 
     # Keep us from trying to run in something that's neither bash nor zsh.
     if [ -z "$BASH_VERSION" -a -z "$ZSH_VERSION" ]; then
@@ -857,6 +858,18 @@
     fi
 }
 
+# syswrite - disable verity, reboot if needed, and remount image
+#
+# Easy way to make system.img/etc writable
+function syswrite() {
+  adb wait-for-device && adb root || return 1
+  if [[ $(adb disable-verity | grep "reboot") ]]; then
+      echo "rebooting"
+      adb reboot && adb wait-for-device && adb root || return 1
+  fi
+  adb wait-for-device && adb remount || return 1
+}
+
 # coredump_setup - enable core dumps globally for any process
 #                  that has the core-file-size limit set correctly
 #
@@ -1613,6 +1626,41 @@
     done
 }
 
+function showcommands() {
+    local T=$(gettop)
+    if [[ -z "$TARGET_PRODUCT" ]]; then
+        >&2 echo "TARGET_PRODUCT not set. Run lunch."
+        return
+    fi
+    case $(uname -s) in
+        Darwin)
+            PREBUILT_NAME=darwin-x86
+            ;;
+        Linux)
+            PREBUILT_NAME=linux-x86
+            ;;
+        *)
+            >&2 echo Unknown host $(uname -s)
+            return
+            ;;
+    esac
+    if [[ -z "$OUT_DIR" ]]; then
+      if [[ -z "$OUT_DIR_COMMON_BASE" ]]; then
+        OUT_DIR=out
+      else
+        OUT_DIR=${OUT_DIR_COMMON_BASE}/${PWD##*/}
+      fi
+    fi
+    if [[ "$1" == "--regenerate" ]]; then
+      shift 1
+      NINJA_ARGS="-t commands $@" m
+    else
+      (cd $T && prebuilts/build-tools/$PREBUILT_NAME/bin/ninja \
+          -f $OUT_DIR/combined-${TARGET_PRODUCT}.ninja \
+          -t commands "$@")
+    fi
+}
+
 validate_current_shell
 source_vendorsetup
 addcompletions
diff --git a/rbesetup.sh b/rbesetup.sh
index 724ad7d..0182bfd 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -1,4 +1,31 @@
-source build/envsetup.sh
+function _source_env_setup_script() {
+  local -r ENV_SETUP_SCRIPT="build/make/envsetup.sh"
+  local -r TOP_DIR=$(
+    while [[ ! -f "${ENV_SETUP_SCRIPT}" ]] && [[ "${PWD}" != "/" ]]; do
+      \cd ..
+    done
+    if [[ -f "${ENV_SETUP_SCRIPT}" ]]; then
+      echo "$(PWD= /bin/pwd -P)"
+    fi
+  )
+
+  local -r FULL_PATH_ENV_SETUP_SCRIPT="${TOP_DIR}/${ENV_SETUP_SCRIPT}"
+  if [[ ! -f "${FULL_PATH_ENV_SETUP_SCRIPT}" ]]; then
+    echo "ERROR: Unable to source ${ENV_SETUP_SCRIPT}"
+    return 1
+  fi
+
+  # Need to change directory to the repo root so vendor scripts can be sourced
+  # as well.
+  local -r CUR_DIR=$PWD
+  \cd "${TOP_DIR}"
+  source "${FULL_PATH_ENV_SETUP_SCRIPT}"
+  \cd "${CUR_DIR}"
+}
+
+# This function needs to run first as the remaining defining functions may be
+# using the envsetup.sh defined functions.
+_source_env_setup_script || return
 
 # This function prefixes the given command with appropriate variables needed
 # for the build to be executed with RBE.
@@ -28,7 +55,7 @@
 # ANDROID_ENABLE_METRICS_UPLOAD.
 function _export_metrics_uploader() {
   local uploader_path="$(gettop)/vendor/google/misc/metrics_uploader_prebuilt/metrics_uploader.sh"
-  if [ -x "${uploader_path}" ]; then
+  if [[ -x "${uploader_path}" ]]; then
     export ANDROID_ENABLE_METRICS_UPLOAD="${uploader_path}"
   fi
 }
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 9d55f42..e34dc23 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -11,7 +11,9 @@
 # This flag is set by mainline but isn't desired for GSI.
 BOARD_USES_SYSTEM_OTHER_ODEX :=
 
-# system.img is always ext4 with sparse option
+# system.img is always ext4 and non-sparsed.
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+
 # GSI also includes make_f2fs to support userdata parition in f2fs
 # for some devices
 TARGET_USERIMAGES_USE_F2FS := true
diff --git a/target/board/emulator_arm64/BoardConfig.mk b/target/board/emulator_arm64/BoardConfig.mk
index b34ccb4..a17cb75 100644
--- a/target/board/emulator_arm64/BoardConfig.mk
+++ b/target/board/emulator_arm64/BoardConfig.mk
@@ -23,7 +23,7 @@
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
 
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk vts10,$(MAKECMDGOALS)),)
+ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
 # DO NOT USE
 # DO NOT USE
 #
diff --git a/target/board/emulator_arm64/device.mk b/target/board/emulator_arm64/device.mk
index 73dc2f4..dc84192 100644
--- a/target/board/emulator_arm64/device.mk
+++ b/target/board/emulator_arm64/device.mk
@@ -19,7 +19,7 @@
 
 # Cuttlefish has GKI kernel prebuilts, so use those for the GKI boot.img.
 ifeq ($(TARGET_PREBUILT_KERNEL),)
-    LOCAL_KERNEL := device/google/cuttlefish_kernel/5.4-arm64/kernel
+    LOCAL_KERNEL := kernel/prebuilts/5.4/arm64/kernel-5.4-lz4
 else
     LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL)
 endif
diff --git a/target/board/generic_64bitonly_x86_64/BoardConfig.mk b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
new file mode 100644
index 0000000..71c4357
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
@@ -0,0 +1,45 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# x86_64 emulator specific definitions
+TARGET_CPU_ABI := x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := x86_64
+
+# Keep the following for 32-bit native code support
+# There are a few native services still on 32-bit modes, e.g. media & audio.
+# Remove them in S.
+TARGET_2ND_CPU_ABI := x86
+TARGET_2ND_ARCH := x86
+TARGET_2ND_ARCH_VARIANT := x86_64
+
+TARGET_PRELINK_MODULE := false
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
+
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
diff --git a/target/board/generic_64bitonly_x86_64/README.txt b/target/board/generic_64bitonly_x86_64/README.txt
new file mode 100644
index 0000000..dc7efd3
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/README.txt
@@ -0,0 +1,7 @@
+The "generic_x86_64_app" product defines a non-hardware-specific IA target
+without a kernel or bootloader.
+
+It can be used to build the entire user-level system, and
+will work with the IA version of the emulator,
+
+This supports 64-bit apps only.
diff --git a/target/board/generic_64bitonly_x86_64/device.mk b/target/board/generic_64bitonly_x86_64/device.mk
new file mode 100644
index 0000000..bb49057
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/device.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ifdef NET_ETH0_STARTONBOOT
+  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
+endif
+
+# Ensure we package the BIOS files too.
+PRODUCT_HOST_PACKAGES += \
+    bios.bin \
+    vgabios-cirrus.bin \
diff --git a/target/board/generic_64bitonly_x86_64/system.prop b/target/board/generic_64bitonly_x86_64/system.prop
new file mode 100644
index 0000000..ed9d173
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/system.prop
@@ -0,0 +1,5 @@
+#
+# system.prop for generic sdk
+#
+
+rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index c20c782..d5331ad 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -23,7 +23,7 @@
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
 
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk vts10,$(MAKECMDGOALS)),)
+ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
 # DO NOT USE
 # DO NOT USE
 #
@@ -58,6 +58,7 @@
 TARGET_NO_VENDOR_BOOT := true
 BOARD_USES_RECOVERY_AS_BOOT := true
 
+BOARD_KERNEL-4.19-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
 BOARD_KERNEL-5.4_BOOTIMAGE_PARTITION_SIZE := 67108864
 BOARD_KERNEL-5.4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
 BOARD_KERNEL-5.4-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
@@ -74,7 +75,7 @@
 BOARD_BOOT_HEADER_VERSION := 3
 BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION)
 
-BOARD_KERNEL_BINARIES := kernel-5.4 kernel-5.4-gz kernel-5.4-lz4 \
+BOARD_KERNEL_BINARIES := kernel-4.19-gz kernel-5.4 kernel-5.4-gz kernel-5.4-lz4 \
     kernel-mainline kernel-mainline-gz kernel-mainline-lz4
 ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
 BOARD_KERNEL_BINARIES += kernel-5.4-allsyms kernel-5.4-gz-allsyms kernel-5.4-lz4-allsyms
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index dfa146a..6f1bba0 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -15,16 +15,17 @@
 #
 
 PRODUCT_COPY_FILES += \
-    device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4:kernel-5.4 \
-    device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4-gz:kernel-5.4-gz \
-    device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4-lz4:kernel-5.4-lz4 \
-    kernel/prebuilts/mainline/arm64/kernel-mainline:kernel-mainline \
-    kernel/prebuilts/mainline/arm64/kernel-mainline-gz:kernel-mainline-gz \
-    kernel/prebuilts/mainline/arm64/kernel-mainline-lz4:kernel-mainline-lz4
+    kernel/prebuilts/4.19/arm64/Image.gz:kernel-4.19-gz \
+    kernel/prebuilts/5.4/arm64/kernel-5.4:kernel-5.4 \
+    kernel/prebuilts/5.4/arm64/kernel-5.4-gz:kernel-5.4-gz \
+    kernel/prebuilts/5.4/arm64/kernel-5.4-lz4:kernel-5.4-lz4 \
+    kernel/prebuilts/mainline/arm64/kernel-mainline-allsyms:kernel-mainline \
+    kernel/prebuilts/mainline/arm64/kernel-mainline-gz-allsyms:kernel-mainline-gz \
+    kernel/prebuilts/mainline/arm64/kernel-mainline-lz4-allsyms:kernel-mainline-lz4
 
 ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
 PRODUCT_COPY_FILES += \
-    device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4:kernel-5.4-allsyms \
-    device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4-gz:kernel-5.4-gz-allsyms \
-    device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4-lz4:kernel-5.4-lz4-allsyms
+    kernel/prebuilts/5.4/arm64/kernel-5.4:kernel-5.4-allsyms \
+    kernel/prebuilts/5.4/arm64/kernel-5.4-gz:kernel-5.4-gz-allsyms \
+    kernel/prebuilts/5.4/arm64/kernel-5.4-lz4:kernel-5.4-lz4-allsyms
 endif
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 8c069ba..61a7583 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -43,6 +43,7 @@
 
 else
 PRODUCT_MAKEFILES := \
+    $(LOCAL_DIR)/aosp_64bitonly_x86_64.mk \
     $(LOCAL_DIR)/aosp_arm64_ab.mk \
     $(LOCAL_DIR)/aosp_arm64.mk \
     $(LOCAL_DIR)/aosp_arm_ab.mk \
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
new file mode 100644
index 0000000..4de4e0c
--- /dev/null
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -0,0 +1,72 @@
+#
+# Copyright 2020 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+# The system image of aosp_x86_64_app-userdebug is a GSI for the devices with:
+# - x86 64 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+# GSI for system/product & support 64-bit apps only
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
+
+#
+# Special settings for GSI releasing
+#
+ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
+endif
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+    root/init.zygote64.rc
+
+# This build configuration supports 64-bit apps only
+PRODUCT_NAME := aosp_64bitonly_x86_64
+PRODUCT_DEVICE := generic_64bitonly_x86_64
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86_64 App
diff --git a/target/product/aosp_product.mk b/target/product/aosp_product.mk
index a3da1c9..e396ad1 100644
--- a/target/product/aosp_product.mk
+++ b/target/product/aosp_product.mk
@@ -31,6 +31,7 @@
 PRODUCT_PACKAGES += \
     messaging \
     PhotoTable \
+    preinstalled-packages-platform-aosp-product.xml \
     WallpaperPicker \
 
 # Telephony:
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 2ed550c..0166ecc 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -21,3 +21,4 @@
     passwd_product \
     product_compatibility_matrix.xml \
     product_manifest.xml \
+    selinux_policy_product \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index cf32977..586c058 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -38,6 +38,7 @@
     bcc \
     blank_screen \
     blkid \
+    service-blobstore \
     bmgr \
     bootanimation \
     bootstat \
@@ -50,21 +51,27 @@
     charger \
     cmd \
     com.android.adbd \
-    com.android.apex.cts.shim.v1 \
     com.android.conscrypt \
     com.android.cronet \
+    com.android.extservices \
     com.android.i18n \
     com.android.ipsec \
     com.android.location.provider \
     com.android.media \
     com.android.media.swcodec \
+    com.android.mediaprovider \
+    com.android.os.statsd \
+    com.android.permission \
     com.android.resolv \
     com.android.neuralnetworks \
     com.android.sdkext \
     com.android.tethering \
     com.android.tzdata \
+    com.android.wifi \
     ContactsProvider \
     content \
+    CtsShimPrebuilt \
+    CtsShimPrivPrebuilt \
     debuggerd\
     device_config \
     dmctl \
@@ -75,7 +82,6 @@
     dumpsys \
     DynamicSystemInstallationService \
     e2fsck \
-    ExtServices \
     ExtShared \
     flags_health_check \
     framework-minus-apex \
@@ -94,7 +100,6 @@
     gpuservice \
     hid \
     hwservicemanager \
-    idmap \
     idmap2 \
     idmap2d \
     ime \
@@ -102,6 +107,7 @@
     incident \
     incidentd \
     incident_helper \
+    incident-helper-cmd \
     init.environ.rc \
     init_system \
     input \
@@ -111,7 +117,9 @@
     iptables \
     ip-up-vpn \
     javax.obex \
+    service-jobscheduler \
     keystore \
+    credstore \
     ld.mc \
     libaaudio \
     libamidi \
@@ -161,7 +169,6 @@
     libOpenMAXAL \
     libOpenSLES \
     libpdfium \
-    libpixelflinger \
     libpower \
     libpowermanager \
     libradio_metadata \
@@ -184,7 +191,6 @@
     libusbhost \
     libutils \
     libvulkan \
-    libwifi-service \
     libwilhelm \
     linker \
     linkerconfig \
@@ -197,13 +203,11 @@
     lpdump \
     lshal \
     mdnsd \
-    media \
     mediacodec.policy \
-    mediadrmserver \
     mediaextractor \
     mediametrics \
     media_profiles_V1_0.dtd \
-    MediaProvider \
+    MediaProviderLegacy \
     mediaserver \
     mke2fs \
     monkey \
@@ -216,12 +220,12 @@
     PackageInstaller \
     passwd_system \
     perfetto \
-    PermissionController \
     ping \
     ping6 \
     platform.xml \
     pm \
     pppd \
+    preinstalled-packages-platform.xml \
     privapp-permissions-platform.xml \
     racoon \
     recovery-persist \
@@ -246,7 +250,7 @@
     shell_and_utilities_system \
     sm \
     snapshotctl \
-    statsd \
+    SoundPicker \
     storaged \
     surfaceflinger \
     svc \
@@ -269,7 +273,7 @@
     WallpaperBackup \
     watchdogd \
     wificond \
-    wifi-service \
+    wifi.rc \
     wm \
 
 # VINTF data for system image
@@ -277,6 +281,12 @@
     system_manifest.xml \
     system_compatibility_matrix.xml \
 
+# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
+ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
+  PRODUCT_PACKAGES += \
+   libclang_rt.hwasan-aarch64-android.bootstrap
+endif
+
 # Host tools to install
 PRODUCT_HOST_PACKAGES += \
     BugReport \
@@ -325,12 +335,16 @@
     com.android.i18n:core-icu4j \
     telephony-common \
     voip-common \
-    ims-common \
+    ims-common
 
 PRODUCT_UPDATABLE_BOOT_JARS := \
     com.android.conscrypt:conscrypt \
     com.android.media:updatable-media \
+    com.android.mediaprovider:framework-mediaprovider \
+    com.android.os.statsd:framework-statsd \
+    com.android.permission:framework-permission \
     com.android.sdkext:framework-sdkextensions \
+    com.android.wifi:framework-wifi \
     com.android.tethering:framework-tethering
 
 PRODUCT_COPY_FILES += \
@@ -352,6 +366,7 @@
 PRODUCT_SYSTEM_PROPERTIES += ro.zygote?=zygote32
 
 PRODUCT_SYSTEM_PROPERTIES += debug.atrace.tags.enableflags=0
+PRODUCT_SYSTEM_PROPERTIES += persist.traced.enable=1
 
 PRODUCT_PROPERTY_OVERRIDES += ro.gfx.angle.supported=true
 
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index b67549a..58921d8 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -19,3 +19,4 @@
     group_system_ext \
     system_ext_manifest.xml \
     passwd_system_ext \
+    selinux_policy_system_ext \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 47c4e23..b955841 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -40,7 +40,7 @@
 
 # Base modules and settings for the vendor partition.
 PRODUCT_PACKAGES += \
-    android.hardware.cas@1.1-service \
+    android.hardware.cas@1.2-service \
     android.hardware.media.omx@1.0-service \
     boringssl_self_test_vendor \
     dumpsys_vendor \
diff --git a/target/product/emulated_storage.mk b/target/product/emulated_storage.mk
index 4c6c644..7d380d9 100644
--- a/target/product/emulated_storage.mk
+++ b/target/product/emulated_storage.mk
@@ -19,3 +19,5 @@
 
 PRODUCT_FS_CASEFOLD := 1
 PRODUCT_VENDOR_PROPERTIES += external_storage.casefold.enabled=1
+
+PRODUCT_VENDOR_PROPERTIES += external_storage.sdcardfs.enabled=0
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index 9dffc1a..36da1f7 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -50,12 +50,6 @@
 #PRODUCT_VENDOR_PROPERTIES += \
 #config.disable_location=true
 
-# Enable Perfetto traced
-# There is a stable property API for this prop so we can move it to /product.
-# https://android-review.googlesource.com/c/platform/system/libsysprop/+/952375
-PRODUCT_PRODUCT_PROPERTIES += \
-    persist.traced.enable=1
-
 # enable Google-specific location features,
 # like NetworkLocationProvider and LocationCollector
 PRODUCT_SYSTEM_EXT_PROPERTIES += \
diff --git a/target/product/emulator_vendor.mk b/target/product/emulator_vendor.mk
index bb679ec..89c3f3a 100644
--- a/target/product/emulator_vendor.mk
+++ b/target/product/emulator_vendor.mk
@@ -42,12 +42,6 @@
 #PRODUCT_VENDOR_PROPERTIES += \
 #config.disable_location=true
 
-# Enable Perfetto traced
-# There is a stable property API for this prop so we can move it to /product.
-# https://android-review.googlesource.com/c/platform/system/libsysprop/+/952375
-PRODUCT_PRODUCT_PROPERTIES += \
-    persist.traced.enable=1
-
 # enable Google-specific location features,
 # like NetworkLocationProvider and LocationCollector
 PRODUCT_SYSTEM_EXT_PROPERTIES += \
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index 64f61ff..a8e1e91 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -25,7 +25,8 @@
 
 PRODUCT_PACKAGES += \
     LiveWallpapersPicker \
-    PhotoTable
+    PhotoTable \
+    preinstalled-packages-platform-full-base.xml
 
 # Bluetooth:
 #   audio.a2dp.default is a system module. Generic system image includes
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 81ac600..731a450 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -34,7 +34,6 @@
     PartnerBookmarksProvider \
     PresencePolling \
     RcsService \
-    SafetyRegulatoryInfo \
     Stk \
     Tag \
     TimeZoneUpdater \
@@ -93,11 +92,6 @@
     libhidltransport \
     libhwbinder \
 
-# Camera service uses 'libdepthphoto' for adding dynamic depth
-# metadata inside depth jpegs.
-PRODUCT_PACKAGES += \
-    libdepthphoto \
-
 PRODUCT_PACKAGES_DEBUG += \
     avbctl \
     bootctl \
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index d324aa9..7f19615 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -20,7 +20,6 @@
 # Set lowram options and enable traced by default
 PRODUCT_VENDOR_PROPERTIES += \
      ro.config.low_ram=true \
-     persist.traced.enable=1 \
 
 # Speed profile services and wifi-service to reduce RAM and storage.
 PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
@@ -40,6 +39,7 @@
 # Do not spin up a separate process for the network stack on go devices, use an in-process APK.
 PRODUCT_PACKAGES += InProcessNetworkStack
 PRODUCT_PACKAGES += CellBroadcastAppPlatform
+PRODUCT_PACKAGES += CellBroadcastServiceModulePlatform
 PRODUCT_PACKAGES += com.android.tethering.inprocess
 
 # Strip the local variable table and the local variable type table to reduce
diff --git a/target/product/gsi/30.txt b/target/product/gsi/30.txt
new file mode 100644
index 0000000..0589517
--- /dev/null
+++ b/target/product/gsi/30.txt
@@ -0,0 +1,309 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libbinder_ndk.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libselinux.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.common-V1-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common-V1-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libgralloctypes.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.frameworks.automotive.display@1.0.so
+VNDK-core: android.frameworks.cameraservice.common@2.0.so
+VNDK-core: android.frameworks.cameraservice.device@2.0.so
+VNDK-core: android.frameworks.cameraservice.service@2.0.so
+VNDK-core: android.frameworks.cameraservice.service@2.1.so
+VNDK-core: android.frameworks.displayservice@1.0.so
+VNDK-core: android.frameworks.schedulerservice@1.0.so
+VNDK-core: android.frameworks.sensorservice@1.0.so
+VNDK-core: android.frameworks.stats@1.0.so
+VNDK-core: android.hardware.atrace@1.0.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.audio.common@4.0.so
+VNDK-core: android.hardware.audio.common@5.0.so
+VNDK-core: android.hardware.audio.common@6.0.so
+VNDK-core: android.hardware.audio.effect@2.0.so
+VNDK-core: android.hardware.audio.effect@4.0.so
+VNDK-core: android.hardware.audio.effect@5.0.so
+VNDK-core: android.hardware.audio.effect@6.0.so
+VNDK-core: android.hardware.audio@2.0.so
+VNDK-core: android.hardware.audio@4.0.so
+VNDK-core: android.hardware.audio@5.0.so
+VNDK-core: android.hardware.audio@6.0.so
+VNDK-core: android.hardware.authsecret@1.0.so
+VNDK-core: android.hardware.automotive.audiocontrol@1.0.so
+VNDK-core: android.hardware.automotive.audiocontrol@2.0.so
+VNDK-core: android.hardware.automotive.can@1.0.so
+VNDK-core: android.hardware.automotive.evs@1.0.so
+VNDK-core: android.hardware.automotive.evs@1.1.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
+VNDK-core: android.hardware.automotive.sv@1.0.so
+VNDK-core: android.hardware.automotive.vehicle@2.0.so
+VNDK-core: android.hardware.biometrics.face@1.0.so
+VNDK-core: android.hardware.biometrics.fingerprint@2.1.so
+VNDK-core: android.hardware.biometrics.fingerprint@2.2.so
+VNDK-core: android.hardware.bluetooth.a2dp@1.0.so
+VNDK-core: android.hardware.bluetooth.audio@2.0.so
+VNDK-core: android.hardware.bluetooth@1.0.so
+VNDK-core: android.hardware.bluetooth@1.1.so
+VNDK-core: android.hardware.boot@1.0.so
+VNDK-core: android.hardware.boot@1.1.so
+VNDK-core: android.hardware.broadcastradio@1.0.so
+VNDK-core: android.hardware.broadcastradio@1.1.so
+VNDK-core: android.hardware.broadcastradio@2.0.so
+VNDK-core: android.hardware.camera.common@1.0.so
+VNDK-core: android.hardware.camera.device@1.0.so
+VNDK-core: android.hardware.camera.device@3.2.so
+VNDK-core: android.hardware.camera.device@3.3.so
+VNDK-core: android.hardware.camera.device@3.4.so
+VNDK-core: android.hardware.camera.device@3.5.so
+VNDK-core: android.hardware.camera.device@3.6.so
+VNDK-core: android.hardware.camera.metadata@3.2.so
+VNDK-core: android.hardware.camera.metadata@3.3.so
+VNDK-core: android.hardware.camera.metadata@3.4.so
+VNDK-core: android.hardware.camera.metadata@3.5.so
+VNDK-core: android.hardware.camera.provider@2.4.so
+VNDK-core: android.hardware.camera.provider@2.5.so
+VNDK-core: android.hardware.camera.provider@2.6.so
+VNDK-core: android.hardware.cas.native@1.0.so
+VNDK-core: android.hardware.cas@1.0.so
+VNDK-core: android.hardware.cas@1.1.so
+VNDK-core: android.hardware.cas@1.2.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.confirmationui@1.0.so
+VNDK-core: android.hardware.contexthub@1.0.so
+VNDK-core: android.hardware.contexthub@1.1.so
+VNDK-core: android.hardware.drm@1.0.so
+VNDK-core: android.hardware.drm@1.1.so
+VNDK-core: android.hardware.drm@1.2.so
+VNDK-core: android.hardware.drm@1.3.so
+VNDK-core: android.hardware.dumpstate@1.0.so
+VNDK-core: android.hardware.dumpstate@1.1.so
+VNDK-core: android.hardware.fastboot@1.0.so
+VNDK-core: android.hardware.gatekeeper@1.0.so
+VNDK-core: android.hardware.gnss.measurement_corrections@1.0.so
+VNDK-core: android.hardware.gnss.measurement_corrections@1.1.so
+VNDK-core: android.hardware.gnss.visibility_control@1.0.so
+VNDK-core: android.hardware.gnss@1.0.so
+VNDK-core: android.hardware.gnss@1.1.so
+VNDK-core: android.hardware.gnss@2.0.so
+VNDK-core: android.hardware.gnss@2.1.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.graphics.composer@2.1.so
+VNDK-core: android.hardware.graphics.composer@2.2.so
+VNDK-core: android.hardware.graphics.composer@2.3.so
+VNDK-core: android.hardware.graphics.composer@2.4.so
+VNDK-core: android.hardware.health.storage@1.0.so
+VNDK-core: android.hardware.health@1.0.so
+VNDK-core: android.hardware.health@2.0.so
+VNDK-core: android.hardware.health@2.1.so
+VNDK-core: android.hardware.identity-V2-ndk_platform.so
+VNDK-core: android.hardware.input.classifier@1.0.so
+VNDK-core: android.hardware.input.common@1.0.so
+VNDK-core: android.hardware.ir@1.0.so
+VNDK-core: android.hardware.keymaster-V2-ndk_platform.so
+VNDK-core: android.hardware.keymaster@3.0.so
+VNDK-core: android.hardware.keymaster@4.0.so
+VNDK-core: android.hardware.keymaster@4.1.so
+VNDK-core: android.hardware.light-V1-ndk_platform.so
+VNDK-core: android.hardware.light@2.0.so
+VNDK-core: android.hardware.media.bufferpool@1.0.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.c2@1.0.so
+VNDK-core: android.hardware.media.c2@1.1.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.neuralnetworks@1.0.so
+VNDK-core: android.hardware.neuralnetworks@1.1.so
+VNDK-core: android.hardware.neuralnetworks@1.2.so
+VNDK-core: android.hardware.neuralnetworks@1.3.so
+VNDK-core: android.hardware.nfc@1.0.so
+VNDK-core: android.hardware.nfc@1.1.so
+VNDK-core: android.hardware.nfc@1.2.so
+VNDK-core: android.hardware.oemlock@1.0.so
+VNDK-core: android.hardware.power-V1-ndk_platform.so
+VNDK-core: android.hardware.power.stats@1.0.so
+VNDK-core: android.hardware.power@1.0.so
+VNDK-core: android.hardware.power@1.1.so
+VNDK-core: android.hardware.power@1.2.so
+VNDK-core: android.hardware.power@1.3.so
+VNDK-core: android.hardware.radio.config@1.0.so
+VNDK-core: android.hardware.radio.config@1.1.so
+VNDK-core: android.hardware.radio.config@1.2.so
+VNDK-core: android.hardware.radio.deprecated@1.0.so
+VNDK-core: android.hardware.radio@1.0.so
+VNDK-core: android.hardware.radio@1.1.so
+VNDK-core: android.hardware.radio@1.2.so
+VNDK-core: android.hardware.radio@1.3.so
+VNDK-core: android.hardware.radio@1.4.so
+VNDK-core: android.hardware.radio@1.5.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
+VNDK-core: android.hardware.secure_element@1.0.so
+VNDK-core: android.hardware.secure_element@1.1.so
+VNDK-core: android.hardware.secure_element@1.2.so
+VNDK-core: android.hardware.sensors@1.0.so
+VNDK-core: android.hardware.sensors@2.0.so
+VNDK-core: android.hardware.sensors@2.1.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.soundtrigger@2.1.so
+VNDK-core: android.hardware.soundtrigger@2.2.so
+VNDK-core: android.hardware.soundtrigger@2.3.so
+VNDK-core: android.hardware.tetheroffload.config@1.0.so
+VNDK-core: android.hardware.tetheroffload.control@1.0.so
+VNDK-core: android.hardware.thermal@1.0.so
+VNDK-core: android.hardware.thermal@1.1.so
+VNDK-core: android.hardware.thermal@2.0.so
+VNDK-core: android.hardware.tv.cec@1.0.so
+VNDK-core: android.hardware.tv.cec@2.0.so
+VNDK-core: android.hardware.tv.input@1.0.so
+VNDK-core: android.hardware.tv.tuner@1.0.so
+VNDK-core: android.hardware.usb.gadget@1.0.so
+VNDK-core: android.hardware.usb.gadget@1.1.so
+VNDK-core: android.hardware.usb@1.0.so
+VNDK-core: android.hardware.usb@1.1.so
+VNDK-core: android.hardware.usb@1.2.so
+VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
+VNDK-core: android.hardware.vibrator@1.0.so
+VNDK-core: android.hardware.vibrator@1.1.so
+VNDK-core: android.hardware.vibrator@1.2.so
+VNDK-core: android.hardware.vibrator@1.3.so
+VNDK-core: android.hardware.vr@1.0.so
+VNDK-core: android.hardware.weaver@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.1.so
+VNDK-core: android.hardware.wifi.hostapd@1.2.so
+VNDK-core: android.hardware.wifi.offload@1.0.so
+VNDK-core: android.hardware.wifi.supplicant@1.0.so
+VNDK-core: android.hardware.wifi.supplicant@1.1.so
+VNDK-core: android.hardware.wifi.supplicant@1.2.so
+VNDK-core: android.hardware.wifi.supplicant@1.3.so
+VNDK-core: android.hardware.wifi@1.0.so
+VNDK-core: android.hardware.wifi@1.1.so
+VNDK-core: android.hardware.wifi@1.2.so
+VNDK-core: android.hardware.wifi@1.3.so
+VNDK-core: android.hardware.wifi@1.4.so
+VNDK-core: android.hidl.allocator@1.0.so
+VNDK-core: android.hidl.memory.block@1.0.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.system.net.netd@1.0.so
+VNDK-core: android.system.net.netd@1.1.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: android.system.wifi.keystore@1.0.so
+VNDK-core: libadf.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libjpeg.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 345faa4..de6644c 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -26,6 +26,7 @@
 VNDK-SP: android.hardware.graphics.mapper@2.0.so
 VNDK-SP: android.hardware.graphics.mapper@2.1.so
 VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
 VNDK-SP: android.hardware.renderscript@1.0.so
 VNDK-SP: android.hidl.memory.token@1.0.so
 VNDK-SP: android.hidl.memory@1.0-impl.so
@@ -41,6 +42,7 @@
 VNDK-SP: libc++.so
 VNDK-SP: libcompiler_rt.so
 VNDK-SP: libcutils.so
+VNDK-SP: libgralloctypes.so
 VNDK-SP: libhardware.so
 VNDK-SP: libhidlbase.so
 VNDK-SP: libhidlmemory.so
@@ -53,12 +55,14 @@
 VNDK-SP: libutilscallstack.so
 VNDK-SP: libz.so
 VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
 VNDK-core: android.hardware.configstore-utils.so
 VNDK-core: android.hardware.configstore@1.0.so
 VNDK-core: android.hardware.configstore@1.1.so
 VNDK-core: android.hardware.confirmationui-support-lib.so
 VNDK-core: android.hardware.graphics.allocator@2.0.so
 VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
 VNDK-core: android.hardware.identity-V2-ndk_platform.so
@@ -69,16 +73,19 @@
 VNDK-core: android.hardware.media@1.0.so
 VNDK-core: android.hardware.memtrack@1.0.so
 VNDK-core: android.hardware.power-V1-ndk_platform.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
 VNDK-core: android.hardware.soundtrigger@2.0-core.so
 VNDK-core: android.hardware.soundtrigger@2.0.so
 VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
 VNDK-core: android.hidl.token@1.0-utils.so
 VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.system.keystore2-V1-ndk_platform.so
 VNDK-core: android.system.suspend@1.0.so
 VNDK-core: libadf.so
 VNDK-core: libaudioroute.so
 VNDK-core: libaudioutils.so
 VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
 VNDK-core: libcamera_metadata.so
 VNDK-core: libcap.so
 VNDK-core: libcn-cbor.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 46c956d..241b6ba 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -46,11 +46,16 @@
 # GSI targets should install "flattened" APEXes in /system_ext as well
 PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES := true
 
+# The flattened version of com.android.apex.cts.shim.v1 should be explicitly installed
+# because the shim apex is prebuilt one and PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES is not
+# supported for prebuilt_apex modules yet.
+PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_with_prebuilts.flattened
+
 # GSI specific tasks on boot
 PRODUCT_PACKAGES += \
     gsi_skip_mount.cfg \
     init.gsi.rc \
     init.vndk-nodef.rc \
 
-# Support additional P and Q VNDK packages
-PRODUCT_EXTRA_VNDK_VERSIONS := 28 29
+# Support additional P, Q and R VNDK packages
+PRODUCT_EXTRA_VNDK_VERSIONS := 28 29 30
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
index e03c212..2199c57 100644
--- a/target/product/handheld_product.mk
+++ b/target/product/handheld_product.mk
@@ -31,6 +31,7 @@
     LatinIME \
     Music \
     OneTimeInitializer \
+    preinstalled-packages-platform-handheld-product.xml \
     QuickSearchBox \
     SettingsIntelligence \
     frameworks-base-overlays
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index e2c91b6..c2608c4 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -53,10 +53,9 @@
     librs_jni \
     ManagedProvisioning \
     MmsService \
-    MtpDocumentsProvider \
+    MtpService \
     MusicFX \
     NfcNci \
-    OsuLogin \
     PacProcessor \
     PrintRecommendationService \
     PrintSpooler \
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/iorap_large_memory_config.mk
new file mode 100644
index 0000000..9aa6642
--- /dev/null
+++ b/target/product/iorap_large_memory_config.mk
@@ -0,0 +1,18 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Disable Camera pinner by default
+PRODUCT_PRODUCT_PROPERTIES += \
+    pinner.pin_camera=false
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 7a2dd73..1004dc5 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -52,12 +52,13 @@
 PRODUCT_SYSTEM_SERVER_JARS := \
     com.android.location.provider \
     services \
-    ethernet-service \
-    wifi-service \
+    ethernet-service
 
 # system server jars which are updated via apex modules.
 # The values should be of the format <apex name>:<jar name>
 PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS := \
+    com.android.permission:service-permission \
+    com.android.wifi:service-wifi \
     com.android.ipsec:android.net.ipsec.ike \
 
 PRODUCT_COPY_FILES += \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b96601d..7633abe 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -92,8 +92,4 @@
     dalvik.vm.minidebuginfo=true \
     dalvik.vm.dex2oat-minidebuginfo=true
 
-# Disable iorapd by default
-PRODUCT_SYSTEM_PROPERTIES += \
-    ro.iorapd.enable=false
-
 PRODUCT_USES_DEFAULT_ART_CONFIG := true
diff --git a/target/product/sysconfig/Android.bp b/target/product/sysconfig/Android.bp
new file mode 100644
index 0000000..5632d17
--- /dev/null
+++ b/target/product/sysconfig/Android.bp
@@ -0,0 +1,33 @@
+// Copyright (C} 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License"};
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+    name: "preinstalled-packages-platform-aosp-product.xml",
+    product_specific: true,
+    sub_dir: "sysconfig",
+    src: "preinstalled-packages-platform-aosp-product.xml",
+}
+
+prebuilt_etc {
+    name: "preinstalled-packages-platform-full-base.xml",
+    sub_dir: "sysconfig",
+    src: "preinstalled-packages-platform-full-base.xml",
+}
+
+prebuilt_etc {
+    name: "preinstalled-packages-platform-handheld-product.xml",
+    product_specific: true,
+    sub_dir: "sysconfig",
+    src: "preinstalled-packages-platform-handheld-product.xml",
+}
\ No newline at end of file
diff --git a/target/product/sysconfig/preinstalled-packages-platform-aosp-product.xml b/target/product/sysconfig/preinstalled-packages-platform-aosp-product.xml
new file mode 100644
index 0000000..eec1326
--- /dev/null
+++ b/target/product/sysconfig/preinstalled-packages-platform-aosp-product.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- System packages to preinstall on all devices with aosp_product, per user type.
+     Documentation at frameworks/base/data/etc/preinstalled-packages-platform.xml
+-->
+<config>
+    <install-in-user-type package="com.android.wallpaperpicker">
+        <install-in user-type="FULL" />
+    </install-in-user-type>
+</config>
diff --git a/target/product/sysconfig/preinstalled-packages-platform-full-base.xml b/target/product/sysconfig/preinstalled-packages-platform-full-base.xml
new file mode 100644
index 0000000..f601355
--- /dev/null
+++ b/target/product/sysconfig/preinstalled-packages-platform-full-base.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- System packages to preinstall on all devices with full_base, per user type.
+     Documentation at frameworks/base/data/etc/preinstalled-packages-platform.xml
+-->
+<config>
+    <install-in-user-type package="com.android.wallpaper.livepicker">
+        <install-in user-type="FULL" />
+    </install-in-user-type>
+</config>
diff --git a/target/product/sysconfig/preinstalled-packages-platform-handheld-product.xml b/target/product/sysconfig/preinstalled-packages-platform-handheld-product.xml
new file mode 100644
index 0000000..a5d9ba2
--- /dev/null
+++ b/target/product/sysconfig/preinstalled-packages-platform-handheld-product.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- System packages to preinstall on all devices with handheld_product, per user type.
+     Documentation at frameworks/base/data/etc/preinstalled-packages-platform.xml
+-->
+<config>
+    <install-in-user-type package="com.android.wallpapercropper">
+        <install-in user-type="FULL" />
+    </install-in-user-type>
+</config>
diff --git a/target/product/telephony_system.mk b/target/product/telephony_system.mk
index c306a04..ef48719 100644
--- a/target/product/telephony_system.mk
+++ b/target/product/telephony_system.mk
@@ -21,7 +21,7 @@
     ONS \
     CarrierDefaultApp \
     CallLogBackup \
-    CellBroadcastApp \
-    CellBroadcastServiceModule \
+    com.android.cellbroadcast \
+    CellBroadcastLegacyApp \
 
 PRODUCT_COPY_FILES := \
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
index 2730f0e..c8dc8b0 100644
--- a/target/product/updatable_apex.mk
+++ b/target/product/updatable_apex.mk
@@ -17,6 +17,8 @@
 # Inherit this when the target needs to support updating APEXes
 
 ifneq ($(OVERRIDE_TARGET_FLATTEN_APEX),true)
+  # com.android.apex.cts.shim.v1_prebuilt overrides CtsShimPrebuilt
+  # and CtsShimPrivPrebuilt since they are packaged inside the APEX.
   PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_prebuilt
   PRODUCT_VENDOR_PROPERTIES := ro.apex.updatable=true
   TARGET_FLATTEN_APEX := false
diff --git a/target/product/virtual_ab_ota_compression.mk b/target/product/virtual_ab_ota_compression.mk
new file mode 100644
index 0000000..c4849be
--- /dev/null
+++ b/target/product/virtual_ab_ota_compression.mk
@@ -0,0 +1,21 @@
+#
+# Copyright (C) 2020 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+
+PRODUCT_PACKAGES += snapuserd_ramdisk
diff --git a/target/product/virtual_ab_ota_retrofit_compression.mk b/target/product/virtual_ab_ota_retrofit_compression.mk
new file mode 100644
index 0000000..8059f75
--- /dev/null
+++ b/target/product/virtual_ab_ota_retrofit_compression.mk
@@ -0,0 +1,22 @@
+#
+# Copyright (C) 2020 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota_retrofit.mk)
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+
+PRODUCT_PACKAGES += snapuserd_ramdisk
+
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 09d8f70..9bee115 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -11,7 +11,8 @@
 echo "ro.build.version.preview_sdk_fingerprint=$PLATFORM_PREVIEW_SDK_FINGERPRINT"
 echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
 echo "ro.build.version.all_codenames=$PLATFORM_VERSION_ALL_CODENAMES"
-echo "ro.build.version.release=$PLATFORM_VERSION"
+echo "ro.build.version.release=$PLATFORM_VERSION_LAST_STABLE"
+echo "ro.build.version.release_or_codename=$PLATFORM_VERSION"
 echo "ro.build.version.security_patch=$PLATFORM_SECURITY_PATCH"
 echo "ro.build.version.base_os=$PLATFORM_BASE_OS"
 echo "ro.build.version.min_supported_target_sdk=$PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION"
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 372404b..1ff8e65 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -207,8 +207,8 @@
   def _parse_llvm_readobj(cls, elf_file_path, header, lines):
     """Parse the output of llvm-readobj."""
     lines_it = iter(lines)
-    imported, exported = cls._parse_dynamic_symbols(lines_it)
     dt_soname, dt_needed = cls._parse_dynamic_table(elf_file_path, lines_it)
+    imported, exported = cls._parse_dynamic_symbols(lines_it)
     return ELF(dt_soname, dt_needed, imported, exported, header)
 
 
@@ -397,7 +397,7 @@
       sys.exit(2)
 
 
-  def check_dt_needed(self):
+  def check_dt_needed(self, system_shared_lib_names):
     """Check whether all DT_NEEDED entries are specified in the build
     system."""
 
@@ -417,6 +417,11 @@
       dt_needed = sorted(set(self._file_under_test.dt_needed))
       modules = [re.sub('\\.so$', '', lib) for lib in dt_needed]
 
+      # Remove system shared libraries from the suggestion since they are added
+      # by default.
+      modules = [name for name in modules
+                 if name not in system_shared_lib_names]
+
       self._note()
       self._note('Fix suggestions:')
       self._note(
@@ -502,6 +507,11 @@
   parser.add_argument('--shared-lib', action='append', default=[],
                       help='Path to shared library dependencies')
 
+  # System Shared library names
+  parser.add_argument('--system-shared-lib', action='append', default=[],
+                      help='System shared libraries to be hidden from fix '
+                      'suggestions')
+
   # Check options
   parser.add_argument('--skip-bad-elf-magic', action='store_true',
                       help='Ignore the input file without the ELF magic word')
@@ -535,7 +545,7 @@
   if args.soname:
     checker.check_dt_soname(args.soname)
 
-  checker.check_dt_needed()
+  checker.check_dt_needed(args.system_shared_lib)
 
   if not args.allow_undefined_symbols:
     checker.check_symbols()
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 41e8ca5..5fb68b8 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -24,7 +24,7 @@
 $(error Using $(TARGET_DEVICE_DIR)/android_filesystem_config.h is deprecated, please use TARGET_FS_CONFIG_GEN instead)
 endif
 
-system_android_filesystem_config := system/core/include/private/android_filesystem_config.h
+system_android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h
 system_capability_header := bionic/libc/kernel/uapi/linux/capability.h
 
 # List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, product and system_ext Partitions
diff --git a/tools/fs_config/README b/tools/fs_config/README
deleted file mode 100644
index 21bdeb8..0000000
--- a/tools/fs_config/README
+++ /dev/null
@@ -1,137 +0,0 @@
- _____  _____  _____  _____  __  __  _____
-/  _  \/   __\/  _  \|  _  \/  \/  \/   __\
-|  _  <|   __||  _  ||  |  ||  \/  ||   __|
-\__|\_/\_____/\__|__/|_____/\__ \__/\_____/
-
-The fs_config_generator.py tool uses the platform android_filesystem_config.h and the
-TARGET_FS_CONFIG_GEN files to generate the fs_config_dirs and fs_config_files files for each
-partition, as well as passwd and group files, and the generated_oem_aid.h header.
-
-The fs_config_dirs and fs_config_files binary files are interpreted by the libcutils fs_config()
-function, along with the built-in defaults, to serve as overrides to complete the results. The
-Target files are used by filesystem and adb tools to ensure that the file and directory properties
-are preserved during runtime operations. The host files in the ${OUT} directory are used in the
-final stages when building the filesystem images to set the file and directory properties.
-
-See ./fs_config_generator.py fsconfig --help for how these files are generated.
-
-The passwd and group files are formatted as documented in man pages passwd(5) and group(5) and used
-by bionic for implementing getpwnam() and related functions.
-
-See ./fs_config_generator.py passwd --help and ./fs_config_generator.py group --help for how these
-files are generated.
-
-The generated_oem_aid.h creates identifiers for non-platform AIDs for developers wishing to use them
-in their native code.  To do so, include the oemaids_headers header library in the corresponding
-makefile and #include "generated_oem_aid.h" in the code wishing to use these identifiers.
-
-See ./fs_config_generator.py oemaid --help for how this file is generated.
-
-The parsing of the TARGET_FS_CONFIG_GEN files follows the Python ConfigParser specification, with
-the sections and fields as defined below. There are two types of sections, both sections require all
-options to be specified. The first section type is the "caps" section.
-
-The "caps" section follows the following syntax:
-
-[path]
-mode: Octal file mode
-user: AID_<user>
-group: AID_<group>
-caps: cap*
-
-Where:
-
-[path]
-  The filesystem path to configure. A path ending in / is considered a dir,
-  else its a file.
-
-mode:
-  A valid octal file mode of at least 3 digits. If 3 is specified, it is
-  prefixed with a 0, else mode is used as is.
-
-user:
-  Either the C define for a valid AID or the friendly name. For instance both
-  AID_RADIO and radio are acceptable. Note custom AIDs can be defined in the
-  AID section documented below.
-
-group:
-  Same as user.
-
-caps:
-  The name as declared in
-  system/core/include/private/android_filesystem_capability.h without the
-  leading CAP_. Mixed case is allowed. Caps can also be the raw:
-   * binary (0b0101)
-   * octal (0455)
-   * int (42)
-   * hex (0xFF)
-  For multiple caps, just separate by whitespace.
-
-It is an error to specify multiple sections with the same [path] in different
-files. Note that the same file may contain sections that override the previous
-section in Python versions <= 3.2. In Python 3.2 it's set to strict mode.
-
-
-The next section type is the "AID" section, for specifying OEM specific AIDS.
-
-The AID section follows the following syntax:
-
-[AID_<name>]
-value: <number>
-
-Where:
-
-[AID_<name>]
-  The <name> can contain characters in the set uppercase, numbers
-  and underscores.
-
-value:
-  A valid C style number string. Hex, octal, binary and decimal are supported.
-  See "caps" above for more details on number formatting.
-
-It is an error to specify multiple sections with the same [AID_<name>]. With
-the same constraints as [path] described above. It is also an error to specify
-multiple sections with the same value option. It is also an error to specify a
-value that is outside of the inclusive OEM ranges:
- * AID_OEM_RESERVED_START(2900) - AID_OEM_RESERVED_END(2999)
- * AID_OEM_RESERVED_2_START(5000) - AID_OEM_RESERVED_2_END(5999)
-
-as defined by system/core/include/private/android_filesystem_config.h.
-
-Ordering within the TARGET_FS_CONFIG_GEN files is not relevant. The paths for files are sorted
-like so within their respective array definition:
- * specified path before prefix match
- ** ie foo before f*
- * lexicographical less than before other
- ** ie boo before foo
-
-Given these paths:
-
-paths=['ac', 'a', 'acd', 'an', 'a*', 'aa', 'ac*']
-
-The sort order would be:
-paths=['a', 'aa', 'ac', 'acd', 'an', 'ac*', 'a*']
-
-Thus the fs_config tools will match on specified paths before attempting prefix, and match on the
-longest matching prefix.
-
-The declared AIDS are sorted in ascending numerical order based on the option "value". The string
-representation of value is preserved. Both choices were made for maximum readability of the generated
-file and to line up files. Sync lines are placed with the source file as comments in the generated
-header file.
-
-Unit Tests:
-
-From within the fs_config directory, unit tests can be executed like so:
-$ python -m unittest test_fs_config_generator.Tests
-.............
-----------------------------------------------------------------------
-Ran 13 tests in 0.004s
-
-OK
-
-One could also use nose if they would like:
-$ nose2
-
-To add new tests, simply add a test_<xxx> method to the test class. It will automatically
-get picked up and added to the test suite.
diff --git a/tools/fs_config/README.md b/tools/fs_config/README.md
new file mode 100644
index 0000000..bad5e10
--- /dev/null
+++ b/tools/fs_config/README.md
@@ -0,0 +1,84 @@
+# FS Config Generator
+
+The `fs_config_generator.py` tool uses the platform `android_filesystem_config.h` and the
+`TARGET_FS_CONFIG_GEN` files to generate the following:
+* `fs_config_dirs` and `fs_config_files` files for each partition
+* `passwd` and `group` files for each partition
+* The `generated_oem_aid.h` header
+
+## Outputs
+
+### `fs_config_dirs` and `fs_config_files`
+
+The `fs_config_dirs` and `fs_config_files` binary files are interpreted by the libcutils
+`fs_config()` function, along with the built-in defaults, to serve as overrides to complete the
+results. The Target files are used by filesystem and adb tools to ensure that the file and directory
+properties are preserved during runtime operations. The host files in the `$OUT` directory are used
+in the final stages when building the filesystem images to set the file and directory properties.
+
+See `./fs_config_generator.py fsconfig --help` for how these files are generated.
+
+### `passwd` and `group` files
+
+The `passwd` and `group` files are formatted as documented in man pages passwd(5) and group(5) and
+used by bionic for implementing `getpwnam()` and related functions.
+
+See `./fs_config_generator.py passwd --help` and `./fs_config_generator.py group --help` for how
+these files are generated.
+
+### The `generated_oem_aid.h` header
+
+The `generated_oem_aid.h` creates identifiers for non-platform AIDs for developers wishing to use
+them in their native code.  To do so, include the `oemaids_headers` header library in the
+corresponding makefile and `#include "generated_oem_aid.h"` in the code wishing to use these
+identifiers.
+
+See `./fs_config_generator.py oemaid --help` for how this file is generated.
+
+## Parsing
+
+See the documentation on [source.android.com](https://source.android.com/devices/tech/config/filesystem#configuring-aids) for details and examples.
+
+
+## Ordering
+
+Ordering within the `TARGET_FS_CONFIG_GEN` files is not relevant. The paths for files are sorted
+like so within their respective array definition:
+ * specified path before prefix match
+   * for example: foo before f*
+ * lexicographical less than before other
+   * for example: boo before foo
+
+Given these paths:
+
+    paths=['ac', 'a', 'acd', 'an', 'a*', 'aa', 'ac*']
+
+The sort order would be:
+
+    paths=['a', 'aa', 'ac', 'acd', 'an', 'ac*', 'a*']
+
+Thus the `fs_config` tools will match on specified paths before attempting prefix, and match on the
+longest matching prefix.
+
+The declared AIDs are sorted in ascending numerical order based on the option "value". The string
+representation of value is preserved. Both choices were made for maximum readability of the
+generated file and to line up files. Sync lines are placed with the source file as comments in the
+generated header file.
+
+## Unit Tests
+
+From within the `fs_config` directory, unit tests can be executed like so:
+
+    $ python -m unittest test_fs_config_generator.Tests
+    .............
+    ----------------------------------------------------------------------
+    Ran 13 tests in 0.004s
+
+    OK
+
+One could also use nose if they would like:
+
+    $ nose2
+
+To add new tests, simply add a `test_<xxx>` method to the test class. It will automatically
+get picked up and added to the test suite.
diff --git a/tools/fs_config/end_to_end_test/run_test.sh b/tools/fs_config/end_to_end_test/run_test.sh
index 7402276..b5a7e83 100755
--- a/tools/fs_config/end_to_end_test/run_test.sh
+++ b/tools/fs_config/end_to_end_test/run_test.sh
@@ -1,7 +1,7 @@
 cd $ANDROID_BUILD_TOP/build/make/tools/fs_config/end_to_end_test
 
 $ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
-  --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+  --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
   --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
   --partition system \
   --all-partitions vendor,product \
@@ -13,7 +13,7 @@
   echo 'Fail: Mismatch between system_fs_config_files and result_system_fs_config_files'
 
 $ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
-  --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+  --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
   --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
   --partition system \
   --all-partitions vendor,product \
@@ -25,7 +25,7 @@
   echo 'Fail: Mismatch between system_fs_config_dirs and result_system_fs_config_dirs'
 
 $ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
-  --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+  --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
   --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
   --partition vendor \
   --files \
@@ -36,7 +36,7 @@
   echo 'Fail: Mismatch between vendor_fs_config_files and result_vendor_fs_config_files'
 
 $ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
-  --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+  --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
   --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
   --partition vendor \
   --dirs \
@@ -47,7 +47,7 @@
   echo 'Fail: Mismatch between vendor_fs_config_dirs and result_vendor_fs_config_dirs'
 
 $ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
-  --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+  --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
   --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
   --partition product \
   --files \
@@ -58,7 +58,7 @@
   echo 'Fail: Mismatch between product_fs_config_files and result_product_fs_config_files'
 
 $ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
-  --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+  --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
   --capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
   --partition product \
   --dirs \
diff --git a/tools/mk2bp_catalog.py b/tools/mk2bp_catalog.py
index 83abd62..c2afb9b 100755
--- a/tools/mk2bp_catalog.py
+++ b/tools/mk2bp_catalog.py
@@ -168,22 +168,24 @@
       return True
   return False
 
-def make_annotation_link(annotations, analysis, modules):
-  if analysis:
-    return "<a href='javascript:update_details(%d)'>%s</a>" % (
-      annotations.Add(analysis, modules),
-      len(analysis)
-    )
-  else:
-    return "";
-
-
 def is_clean(makefile):
   for analysis in makefile.analyses.values():
     if analysis:
       return False
   return True
 
+def clean_and_only_blocked_by_clean(soong, all_makefiles, makefile):
+  if not is_clean(makefile):
+    return False
+  modules = soong.reverse_makefiles[makefile.filename]
+  for module in modules:
+    for dep in soong.transitive_deps(module):
+      for filename in soong.makefiles.get(dep, []):
+        m = all_makefiles.get(filename)
+        if m and not is_clean(m):
+          return False
+  return True
+
 class Annotations(object):
   def __init__(self):
     self.entries = []
@@ -205,6 +207,7 @@
     self.makefiles = dict()
     self.reverse_makefiles = dict()
     self.installed = dict()
+    self.reverse_installed = dict()
     self.modules = set()
 
     for (module, module_type, problem, dependencies, makefiles, installed) in reader:
@@ -222,6 +225,29 @@
         self.reverse_makefiles.setdefault(f, []).append(module)
       for f in installed.strip().split(' '):
         self.installed[f] = module
+        self.reverse_installed.setdefault(module, []).append(f)
+
+  def transitive_deps(self, module):
+    results = set()
+    def traverse(module):
+      for dep in self.deps.get(module, []):
+        if not dep in results:
+          results.add(dep)
+          traverse(module)
+    traverse(module)
+    return results
+
+  def contains_unblocked_modules(self, filename):
+    for m in self.reverse_makefiles[filename]:
+      if len(self.deps[m]) == 0:
+        return True
+    return False
+
+  def contains_blocked_modules(self, filename):
+    for m in self.reverse_makefiles[filename]:
+      if len(self.deps[m]) > 0:
+        return True
+    return False
 
 def count_deps(depsdb, module, seen):
   """Based on the depsdb, count the number of transitive dependencies.
@@ -237,18 +263,6 @@
       count += 1 + count_deps(depsdb, dep, seen)
   return count
 
-def contains_unblocked_modules(soong, modules):
-  for m in modules:
-    if len(soong.deps[m]) == 0:
-      return True
-  return False
-
-def contains_blocked_modules(soong, modules):
-  for m in modules:
-    if len(soong.deps[m]) > 0:
-      return True
-  return False
-
 OTHER_PARTITON = "_other"
 HOST_PARTITON = "_host"
 
@@ -273,6 +287,27 @@
 def format_module_list(modules):
   return "".join(["<div>%s</div>" % format_module_link(m) for m in modules])
 
+def print_analysis_header(link, title):
+  print("""
+    <a name="%(link)s"></a>
+    <h2>%(title)s</h2>
+    <table>
+      <tr>
+        <th class="RowTitle">Directory</th>
+        <th class="Count">Total</th>
+        <th class="Count Clean">Easy</th>
+        <th class="Count Clean">Unblocked Clean</th>
+        <th class="Count Unblocked">Unblocked</th>
+        <th class="Count Blocked">Blocked</th>
+        <th class="Count Clean">Clean</th>
+  """ % {
+    "link": link,
+    "title": title
+  })
+  for analyzer in ANALYZERS:
+    print("""<th class="Count Warning">%s</th>""" % analyzer.title)
+  print("      </tr>")
+
 def main():
   parser = argparse.ArgumentParser(description="Info about remaining Android.mk files.")
   parser.add_argument("--device", type=str, required=True,
@@ -287,6 +322,9 @@
                       help="Equivalent of $OUT_DIR, which will also be checked if"
                         + " --out_dir is unset. If neither is set, default is"
                         + " 'out'.")
+  parser.add_argument("--mode", type=str,
+                      default="html",
+                      help="output format: csv or html")
 
   args = parser.parse_args()
 
@@ -297,14 +335,11 @@
     args.out_dir = args.out_dir[:-1]
 
   TARGET_DEVICE = args.device
-  HOST_OUT_ROOT = args.out_dir + "host"
+  global HOST_OUT_ROOT
+  HOST_OUT_ROOT = args.out_dir + "/host"
+  global PRODUCT_OUT
   PRODUCT_OUT = args.out_dir + "/target/product/%s" % TARGET_DEVICE
 
-  if args.title:
-    page_title = args.title
-  else:
-    page_title = "Remaining Android.mk files"
-
   # Read target information
   # TODO: Pull from configurable location. This is also slightly different because it's
   # only a single build, where as the tree scanning we do below is all Android.mk files.
@@ -312,580 +347,688 @@
       % PRODUCT_OUT, "r", errors="ignore") as csvfile:
     soong = SoongData(csv.reader(csvfile))
 
-  # Which modules are installed where
-  modules_by_partition = dict()
-  partitions = set()
-  for installed, module in soong.installed.items():
-    partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
-    modules_by_partition.setdefault(partition, []).append(module)
-    partitions.add(partition)
+  # Read the makefiles
+  all_makefiles = dict()
+  for filename, modules in soong.reverse_makefiles.items():
+    if filename.startswith(args.out_dir + "/"):
+      continue
+    all_makefiles[filename] = Makefile(filename)
 
-  print("""
-  <html>
-    <head>
-      <title>%(page_title)s</title>
-      <style type="text/css">
-        body, table {
-          font-family: Roboto, sans-serif;
-          font-size: 9pt;
-        }
-        body {
-          margin: 0;
-          padding: 0;
-          display: flex;
-          flex-direction: column;
-          height: 100vh;
-        }
-        #container {
-          flex: 1;
-          display: flex;
-          flex-direction: row;
-          overflow: hidden;
-        }
-        #tables {
-          padding: 0 20px 0 20px;
-          overflow: scroll;
-          flex: 2 2 600px;
-        }
-        #details {
-          display: none;
-          overflow: scroll;
-          flex: 1 1 650px;
-          padding: 0 20px 0 20px;
-        }
-        h1 {
-          margin: 16px 0 16px 20px;
-        }
-        h2 {
-          margin: 12px 0 4px 0;
-        }
-        .DirName {
-          text-align: left;
-          width: 200px;
-          min-width: 200px;
-        }
-        .Count {
-          text-align: center;
-          width: 60px;
-          min-width: 60px;
-          max-width: 60px;
-        }
-        th.Clean,
-        th.Unblocked {
-          background-color: #1e8e3e;
-        }
-        th.Blocked {
-          background-color: #d93025;
-        }
-        th.Warning {
-          background-color: #e8710a;
-        }
-        th {
-          background-color: #1a73e8;
-          color: white;
-          font-weight: bold;
-        }
-        td.Unblocked {
-          background-color: #81c995;
-        }
-        td.Blocked {
-          background-color: #f28b82;
-        }
-        td, th {
-          padding: 2px 4px;
-          border-right: 2px solid white;
-        }
-        tr.AospDir td {
-          background-color: #e6f4ea;
-          border-right-color: #e6f4ea;
-        }
-        tr.GoogleDir td {
-          background-color: #e8f0fe;
-          border-right-color: #e8f0fe;
-        }
-        tr.PartnerDir td {
-          background-color: #fce8e6;
-          border-right-color: #fce8e6;
-        }
-        table {
-          border-spacing: 0;
-          border-collapse: collapse;
-        }
-        div.Makefile {
-          margin: 12px 0 0 0;
-        }
-        div.Makefile:first {
-          margin-top: 0;
-        }
-        div.FileModules {
-          padding: 4px 0 0 20px;
-        }
-        td.LineNo {
-          vertical-align: baseline;
-          padding: 6px 0 0 20px;
-          width: 50px;
-          vertical-align: baseline;
-        }
-        td.LineText {
-          vertical-align: baseline;
-          font-family: monospace;
-          padding: 6px 0 0 0;
-        }
-        a.CsLink {
-          font-family: monospace;
-        }
-        div.Help {
-          width: 550px;
-        }
-        table.HelpColumns tr {
-          border-bottom: 2px solid white;
-        }
-        .ModuleName {
-          vertical-align: baseline;
-          padding: 6px 0 0 20px;
-          width: 275px;
-        }
-        .ModuleDeps {
-          vertical-align: baseline;
-          padding: 6px 0 0 0;
-        }
-        table#Modules td {
-          vertical-align: baseline;
-        }
-        tr.Alt {
-          background-color: #ececec;
-        }
-        tr.Alt td {
-          border-right-color: #ececec;
-        }
-        .AnalysisCol {
-          width: 300px;
-          padding: 2px;
-          line-height: 21px;
-        }
-        .Analysis {
-          color: white;
-          font-weight: bold;
-          background-color: #e8710a;
-          border-radius: 6px;
-          margin: 4px;
-          padding: 2px 6px;
-          white-space: nowrap;
-        }
-        .Nav {
-          margin: 4px 0 16px 20px;
-        }
-        .NavSpacer {
-          display: inline-block;
-          width: 6px;
-        }
-        .ModuleDetails {
-          margin-top: 20px;
-        }
-        .ModuleDetails td {
-          vertical-align: baseline;
-        }
-      </style>
-    </head>
-    <body>
-      <h1>%(page_title)s</h1>
-      <div class="Nav">
-        <a href='#help'>Help</a>
-        <span class='NavSpacer'></span><span class='NavSpacer'> </span>
-        Partitions:
-  """ % {
-    "page_title": page_title,
-  })
-  for partition in sorted(partitions):
-    print("<a href='#partition_%s'>%s</a><span class='NavSpacer'></span>" % (partition, partition))
+  if args.mode == "html":
+    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+  elif args.mode == "csv":
+    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
 
-  print("""
-        <span class='NavSpacer'></span><span class='NavSpacer'> </span>
-      </div>
-      <div id="container">
-        <div id="tables">
-        <a name="help"></a>
-        <div class="Help">
-          <p>
-          This page analyzes the remaining Android.mk files in the Android Source tree.
-          <p>
-          The modules are first broken down by which of the device filesystem partitions
-          they are installed to. This also includes host tools and testcases which don't
-          actually reside in their own partition but convenitely group together.
-          <p>
-          The makefiles for each partition are further are grouped into a set of directories
-          aritrarily picked to break down the problem size by owners.
-          <ul style="width: 300px">
-            <li style="background-color: #e6f4ea">AOSP directories are colored green.</li>
-            <li style="background-color: #e8f0fe">Google directories are colored blue.</li>
-            <li style="background-color: #fce8e6">Other partner directories are colored red.</li>
-          </ul>
-          Each of the makefiles are scanned for issues that are likely to come up during
-          conversion to soong.  Clicking the number in each cell shows additional information,
-          including the line that triggered the warning.
-          <p>
-          <table class="HelpColumns">
-            <tr>
-              <th>Total</th>
-              <td>The total number of makefiles in this each directory.</td>
-            </tr>
-            <tr>
-              <th class="Unblocked">Unblocked</th>
-              <td>Makefiles containing one or more modules that don't have any
-                  additional dependencies pending before conversion.</td>
-            </tr>
-            <tr>
-              <th class="Blocked">Blocked</th>
-              <td>Makefiles containiong one or more modules which <i>do</i> have
-                  additional prerequesite depenedencies that are not yet converted.</td>
-            </tr>
-            <tr>
-              <th class="Clean">Clean</th>
-              <td>The number of makefiles that have none of the following warnings.</td>
-            </tr>
-            <tr>
-              <th class="Warning">ifeq / ifneq</th>
-              <td>Makefiles that use <code>ifeq</code> or <code>ifneq</code>. i.e.
-              conditionals.</td>
-            </tr>
-            <tr>
-              <th class="Warning">Wacky Includes</th>
-              <td>Makefiles that <code>include</code> files other than the standard build-system
-                  defined template and macros.</td>
-            </tr>
-            <tr>
-              <th class="Warning">Calls base_rules</th>
-              <td>Makefiles that include base_rules.mk directly.</td>
-            </tr>
-            <tr>
-              <th class="Warning">Calls define</th>
-              <td>Makefiles that define their own macros. Some of these are easy to convert
-                  to soong <code>defaults</code>, but others are complex.</td>
-            </tr>
-            <tr>
-              <th class="Warning">Has ../</th>
-              <td>Makefiles containing the string "../" outside of a comment. These likely
-                  access files outside their directories.</td>
-            </tr>
-            <tr>
-              <th class="Warning">dist-for-goals</th>
-              <td>Makefiles that call <code>dist-for-goals</code> directly.</td>
-            </tr>
-            <tr>
-              <th class="Warning">.PHONY</th>
-              <td>Makefiles that declare .PHONY targets.</td>
-            </tr>
-            <tr>
-              <th class="Warning">renderscript</th>
-              <td>Makefiles defining targets that depend on <code>.rscript</code> source files.</td>
-            </tr>
-            <tr>
-              <th class="Warning">vts src</th>
-              <td>Makefiles defining targets that depend on <code>.vts</code> source files.</td>
-            </tr>
-            <tr>
-              <th class="Warning">COPY_HEADERS</th>
-              <td>Makefiles using LOCAL_COPY_HEADERS.</td>
-            </tr>
-          </table>
-          <p>
-          Following the list of directories is a list of the modules that are installed on
-          each partition. Potential issues from their makefiles are listed, as well as the
-          total number of dependencies (both blocking that module and blocked by that module)
-          and the list of direct dependencies.  Note: The number is the number of all transitive
-          dependencies and the list of modules is only the direct dependencies.
-        </div>
-  """)
+class HtmlProcessor(object):
+  def __init__(self, args, soong, all_makefiles):
+    self.args = args
+    self.soong = soong
+    self.all_makefiles = all_makefiles
+    self.annotations = Annotations()
 
-  annotations = Annotations()
+  def execute(self):
+    if self.args.title:
+      page_title = self.args.title
+    else:
+      page_title = "Remaining Android.mk files"
 
-  # For each partition
-  makefiles_for_partitions = dict()
-  for partition in sorted(partitions):
-    modules = modules_by_partition[partition]
-
-    makefiles = set(itertools.chain.from_iterable(
-        [soong.makefiles[module] for module in modules]))
-
-    # Read makefiles
-    summary = Summary()
-    for filename in makefiles:
-      if not filename.startswith(args.out_dir + "/"):
-        summary.Add(Makefile(filename))
-
-    # Categorize directories by who is responsible
-    aosp_dirs = []
-    google_dirs = []
-    partner_dirs = []
-    for dirname in sorted(summary.directories.keys()):
-      if is_aosp(dirname):
-        aosp_dirs.append(dirname)
-      elif is_google(dirname):
-        google_dirs.append(dirname)
-      else:
-        partner_dirs.append(dirname)
+    # Which modules are installed where
+    modules_by_partition = dict()
+    partitions = set()
+    for installed, module in self.soong.installed.items():
+      partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
+      modules_by_partition.setdefault(partition, []).append(module)
+      partitions.add(partition)
 
     print("""
-      <a name="partition_%(partition)s"></a>
-      <h2>%(partition)s</h2>
-      <table>
-        <tr>
-          <th class="DirName">Directory</th>
-          <th class="Count">Total</th>
-          <th class="Count Unblocked">Unblocked</th>
-          <th class="Count Blocked">Blocked</th>
-          <th class="Count Clean">Clean</th>
+    <html>
+      <head>
+        <title>%(page_title)s</title>
+        <style type="text/css">
+          body, table {
+            font-family: Roboto, sans-serif;
+            font-size: 9pt;
+          }
+          body {
+            margin: 0;
+            padding: 0;
+            display: flex;
+            flex-direction: column;
+            height: 100vh;
+          }
+          #container {
+            flex: 1;
+            display: flex;
+            flex-direction: row;
+            overflow: hidden;
+          }
+          #tables {
+            padding: 0 20px 40px 20px;
+            overflow: scroll;
+            flex: 2 2 600px;
+          }
+          #details {
+            display: none;
+            overflow: scroll;
+            flex: 1 1 650px;
+            padding: 0 20px 0 20px;
+          }
+          h1 {
+            margin: 16px 0 16px 20px;
+          }
+          h2 {
+            margin: 12px 0 4px 0;
+          }
+          .RowTitle {
+            text-align: left;
+            width: 200px;
+            min-width: 200px;
+          }
+          .Count {
+            text-align: center;
+            width: 60px;
+            min-width: 60px;
+            max-width: 60px;
+          }
+          th.Clean,
+          th.Unblocked {
+            background-color: #1e8e3e;
+          }
+          th.Blocked {
+            background-color: #d93025;
+          }
+          th.Warning {
+            background-color: #e8710a;
+          }
+          th {
+            background-color: #1a73e8;
+            color: white;
+            font-weight: bold;
+          }
+          td.Unblocked {
+            background-color: #81c995;
+          }
+          td.Blocked {
+            background-color: #f28b82;
+          }
+          td, th {
+            padding: 2px 4px;
+            border-right: 2px solid white;
+          }
+          tr.TotalRow td {
+            background-color: white;
+            border-right-color: white;
+          }
+          tr.AospDir td {
+            background-color: #e6f4ea;
+            border-right-color: #e6f4ea;
+          }
+          tr.GoogleDir td {
+            background-color: #e8f0fe;
+            border-right-color: #e8f0fe;
+          }
+          tr.PartnerDir td {
+            background-color: #fce8e6;
+            border-right-color: #fce8e6;
+          }
+          table {
+            border-spacing: 0;
+            border-collapse: collapse;
+          }
+          div.Makefile {
+            margin: 12px 0 0 0;
+          }
+          div.Makefile:first {
+            margin-top: 0;
+          }
+          div.FileModules {
+            padding: 4px 0 0 20px;
+          }
+          td.LineNo {
+            vertical-align: baseline;
+            padding: 6px 0 0 20px;
+            width: 50px;
+            vertical-align: baseline;
+          }
+          td.LineText {
+            vertical-align: baseline;
+            font-family: monospace;
+            padding: 6px 0 0 0;
+          }
+          a.CsLink {
+            font-family: monospace;
+          }
+          div.Help {
+            width: 550px;
+          }
+          table.HelpColumns tr {
+            border-bottom: 2px solid white;
+          }
+          .ModuleName {
+            vertical-align: baseline;
+            padding: 6px 0 0 20px;
+            width: 275px;
+          }
+          .ModuleDeps {
+            vertical-align: baseline;
+            padding: 6px 0 0 0;
+          }
+          table#Modules td {
+            vertical-align: baseline;
+          }
+          tr.Alt {
+            background-color: #ececec;
+          }
+          tr.Alt td {
+            border-right-color: #ececec;
+          }
+          .AnalysisCol {
+            width: 300px;
+            padding: 2px;
+            line-height: 21px;
+          }
+          .Analysis {
+            color: white;
+            font-weight: bold;
+            background-color: #e8710a;
+            border-radius: 6px;
+            margin: 4px;
+            padding: 2px 6px;
+            white-space: nowrap;
+          }
+          .Nav {
+            margin: 4px 0 16px 20px;
+          }
+          .NavSpacer {
+            display: inline-block;
+            width: 6px;
+          }
+          .ModuleDetails {
+            margin-top: 20px;
+          }
+          .ModuleDetails td {
+            vertical-align: baseline;
+          }
+        </style>
+      </head>
+      <body>
+        <h1>%(page_title)s</h1>
+        <div class="Nav">
+          <a href='#help'>Help</a>
+          <span class='NavSpacer'></span><span class='NavSpacer'> </span>
+          Partitions:
     """ % {
-      "partition": partition
+      "page_title": page_title,
     })
+    for partition in sorted(partitions):
+      print("<a href='#partition_%s'>%s</a><span class='NavSpacer'></span>" % (partition, partition))
 
-    for analyzer in ANALYZERS:
-      print("""<th class="Count Warning">%s</th>""" % analyzer.title)
-
-    print("      </tr>")
-    for dirgroup, rowclass in [(aosp_dirs, "AospDir"),
-                               (google_dirs, "GoogleDir"),
-                               (partner_dirs, "PartnerDir"),]:
-      for dirname in dirgroup:
-        makefiles = summary.directories[dirname]
-
-        all_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles]
-        clean_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
-            if is_clean(makefile)]
-        unblocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
-            if contains_unblocked_modules(soong,
-              soong.reverse_makefiles[makefile.filename])]
-        blocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
-            if contains_blocked_modules(soong,
-              soong.reverse_makefiles[makefile.filename])]
-
-        print("""
-          <tr class="%(rowclass)s">
-            <td class="DirName">%(dirname)s</td>
-            <td class="Count">%(makefiles)s</td>
-            <td class="Count">%(unblocked)s</td>
-            <td class="Count">%(blocked)s</td>
-            <td class="Count">%(clean)s</td>
-        """ % {
-          "rowclass": rowclass,
-          "dirname": dirname,
-          "makefiles": make_annotation_link(annotations, all_makefiles, modules),
-          "unblocked": make_annotation_link(annotations, unblocked_makefiles, modules),
-          "blocked": make_annotation_link(annotations, blocked_makefiles, modules),
-          "clean": make_annotation_link(annotations, clean_makefiles, modules),
-        })
-        for analyzer in ANALYZERS:
-          analyses = [m.analyses.get(analyzer) for m in makefiles if m.analyses.get(analyzer)]
-          print("""<td class="Count">%s</td>"""
-              % make_annotation_link(annotations, analyses, modules))
-
-        print("      </tr>")
     print("""
-      </table>
+          <span class='NavSpacer'></span><span class='NavSpacer'> </span>
+          <a href='#summary'>Overall Summary</a>
+        </div>
+        <div id="container">
+          <div id="tables">
+          <a name="help"></a>
+          <div class="Help">
+            <p>
+            This page analyzes the remaining Android.mk files in the Android Source tree.
+            <p>
+            The modules are first broken down by which of the device filesystem partitions
+            they are installed to. This also includes host tools and testcases which don't
+            actually reside in their own partition but convenitely group together.
+            <p>
+            The makefiles for each partition are further are grouped into a set of directories
+            aritrarily picked to break down the problem size by owners.
+            <ul style="width: 300px">
+              <li style="background-color: #e6f4ea">AOSP directories are colored green.</li>
+              <li style="background-color: #e8f0fe">Google directories are colored blue.</li>
+              <li style="background-color: #fce8e6">Other partner directories are colored red.</li>
+            </ul>
+            Each of the makefiles are scanned for issues that are likely to come up during
+            conversion to soong.  Clicking the number in each cell shows additional information,
+            including the line that triggered the warning.
+            <p>
+            <table class="HelpColumns">
+              <tr>
+                <th>Total</th>
+                <td>The total number of makefiles in this each directory.</td>
+              </tr>
+              <tr>
+                <th class="Clean">Easy</th>
+                <td>The number of makefiles that have no warnings themselves, and also
+                    none of their dependencies have warnings either.</td>
+              </tr>
+              <tr>
+                <th class="Clean">Unblocked Clean</th>
+                <td>The number of makefiles that are both Unblocked and Clean.</td>
+              </tr>
+
+              <tr>
+                <th class="Unblocked">Unblocked</th>
+                <td>Makefiles containing one or more modules that don't have any
+                    additional dependencies pending before conversion.</td>
+              </tr>
+              <tr>
+                <th class="Blocked">Blocked</th>
+                <td>Makefiles containiong one or more modules which <i>do</i> have
+                    additional prerequesite depenedencies that are not yet converted.</td>
+              </tr>
+              <tr>
+                <th class="Clean">Clean</th>
+                <td>The number of makefiles that have none of the following warnings.</td>
+              </tr>
+              <tr>
+                <th class="Warning">ifeq / ifneq</th>
+                <td>Makefiles that use <code>ifeq</code> or <code>ifneq</code>. i.e.
+                conditionals.</td>
+              </tr>
+              <tr>
+                <th class="Warning">Wacky Includes</th>
+                <td>Makefiles that <code>include</code> files other than the standard build-system
+                    defined template and macros.</td>
+              </tr>
+              <tr>
+                <th class="Warning">Calls base_rules</th>
+                <td>Makefiles that include base_rules.mk directly.</td>
+              </tr>
+              <tr>
+                <th class="Warning">Calls define</th>
+                <td>Makefiles that define their own macros. Some of these are easy to convert
+                    to soong <code>defaults</code>, but others are complex.</td>
+              </tr>
+              <tr>
+                <th class="Warning">Has ../</th>
+                <td>Makefiles containing the string "../" outside of a comment. These likely
+                    access files outside their directories.</td>
+              </tr>
+              <tr>
+                <th class="Warning">dist-for-goals</th>
+                <td>Makefiles that call <code>dist-for-goals</code> directly.</td>
+              </tr>
+              <tr>
+                <th class="Warning">.PHONY</th>
+                <td>Makefiles that declare .PHONY targets.</td>
+              </tr>
+              <tr>
+                <th class="Warning">renderscript</th>
+                <td>Makefiles defining targets that depend on <code>.rscript</code> source files.</td>
+              </tr>
+              <tr>
+                <th class="Warning">vts src</th>
+                <td>Makefiles defining targets that depend on <code>.vts</code> source files.</td>
+              </tr>
+              <tr>
+                <th class="Warning">COPY_HEADERS</th>
+                <td>Makefiles using LOCAL_COPY_HEADERS.</td>
+              </tr>
+            </table>
+            <p>
+            Following the list of directories is a list of the modules that are installed on
+            each partition. Potential issues from their makefiles are listed, as well as the
+            total number of dependencies (both blocking that module and blocked by that module)
+            and the list of direct dependencies.  Note: The number is the number of all transitive
+            dependencies and the list of modules is only the direct dependencies.
+          </div>
     """)
 
-    module_details = [(count_deps(soong.deps, m, []), -count_deps(soong.reverse_deps, m, []), m)
-               for m in modules]
-    module_details.sort()
-    module_details = [m[2] for m in module_details]
-    print("""
-      <table class="ModuleDetails">""")
-    print("<tr>")
-    print("  <th>Module Name</th>")
-    print("  <th>Issues</th>")
-    print("  <th colspan='2'>Blocked By</th>")
-    print("  <th colspan='2'>Blocking</th>")
-    print("</tr>")
-    altRow = True
-    for module in module_details:
-      analyses = set()
-      for filename in soong.makefiles[module]:
-        makefile = summary.makefiles.get(filename)
+    overall_summary = Summary()
+
+    # For each partition
+    for partition in sorted(partitions):
+      modules = modules_by_partition[partition]
+
+      makefiles = set(itertools.chain.from_iterable(
+          [self.soong.makefiles[module] for module in modules]))
+
+      # Read makefiles
+      summary = Summary()
+      for filename in makefiles:
+        makefile = self.all_makefiles.get(filename)
         if makefile:
-          for analyzer, analysis in makefile.analyses.items():
-            if analysis:
-              analyses.add(analyzer.title)
+          summary.Add(makefile)
+          overall_summary.Add(makefile)
 
-      altRow = not altRow
-      print("<tr class='%s'>" % ("Alt" if altRow else "",))
-      print("  <td><a name='module_%s'></a>%s</td>" % (module, module))
-      print("  <td class='AnalysisCol'>%s</td>" % " ".join(["<span class='Analysis'>%s</span>" % title
-          for title in analyses]))
-      print("  <td>%s</td>" % count_deps(soong.deps, module, []))
-      print("  <td>%s</td>" % format_module_list(soong.deps.get(module, [])))
-      print("  <td>%s</td>" % count_deps(soong.reverse_deps, module, []))
-      print("  <td>%s</td>" % format_module_list(soong.reverse_deps.get(module, [])))
+      # Categorize directories by who is responsible
+      aosp_dirs = []
+      google_dirs = []
+      partner_dirs = []
+      for dirname in sorted(summary.directories.keys()):
+        if is_aosp(dirname):
+          aosp_dirs.append(dirname)
+        elif is_google(dirname):
+          google_dirs.append(dirname)
+        else:
+          partner_dirs.append(dirname)
+
+      print_analysis_header("partition_" + partition, partition)
+
+      for dirgroup, rowclass in [(aosp_dirs, "AospDir"),
+                                 (google_dirs, "GoogleDir"),
+                                 (partner_dirs, "PartnerDir"),]:
+        for dirname in dirgroup:
+          self.print_analysis_row(summary, modules,
+                               dirname, rowclass, summary.directories[dirname])
+
+      self.print_analysis_row(summary, modules,
+                           "Total", "TotalRow",
+                           set(itertools.chain.from_iterable(summary.directories.values())))
+      print("""
+        </table>
+      """)
+
+      module_details = [(count_deps(self.soong.deps, m, []),
+                         -count_deps(self.soong.reverse_deps, m, []), m)
+                 for m in modules]
+      module_details.sort()
+      module_details = [m[2] for m in module_details]
+      print("""
+        <table class="ModuleDetails">""")
+      print("<tr>")
+      print("  <th>Module Name</th>")
+      print("  <th>Issues</th>")
+      print("  <th colspan='2'>Blocked By</th>")
+      print("  <th colspan='2'>Blocking</th>")
       print("</tr>")
-    print("""</table>""")
+      altRow = True
+      for module in module_details:
+        analyses = set()
+        for filename in self.soong.makefiles[module]:
+          makefile = summary.makefiles.get(filename)
+          if makefile:
+            for analyzer, analysis in makefile.analyses.items():
+              if analysis:
+                analyses.add(analyzer.title)
 
-  print("""
-    <script type="text/javascript">
-    function close_details() {
-      document.getElementById('details').style.display = 'none';
-    }
+        altRow = not altRow
+        print("<tr class='%s'>" % ("Alt" if altRow else "",))
+        print("  <td><a name='module_%s'></a>%s</td>" % (module, module))
+        print("  <td class='AnalysisCol'>%s</td>" % " ".join(["<span class='Analysis'>%s</span>" % title
+            for title in analyses]))
+        print("  <td>%s</td>" % count_deps(self.soong.deps, module, []))
+        print("  <td>%s</td>" % format_module_list(self.soong.deps.get(module, [])))
+        print("  <td>%s</td>" % count_deps(self.soong.reverse_deps, module, []))
+        print("  <td>%s</td>" % format_module_list(self.soong.reverse_deps.get(module, [])))
+        print("</tr>")
+      print("""</table>""")
 
-    class LineMatch {
-      constructor(lineno, text) {
-        this.lineno = lineno;
-        this.text = text;
-      }
-    }
+    print_analysis_header("summary", "Overall Summary")
 
-    class Analysis {
-      constructor(filename, modules, line_matches) {
-        this.filename = filename;
-        this.modules = modules;
-        this.line_matches = line_matches;
-      }
-    }
+    modules = [module for installed, module in self.soong.installed.items()]
+    self.print_analysis_row(overall_summary, modules,
+                         "All Makefiles", "TotalRow",
+                         set(itertools.chain.from_iterable(overall_summary.directories.values())))
+    print("""
+        </table>
+    """)
 
-    class Module {
-      constructor(deps) {
-        this.deps = deps;
-      }
-    }
-
-    function make_module_link(module) {
-      var a = document.createElement('a');
-      a.className = 'ModuleLink';
-      a.innerText = module;
-      a.href = '#module_' + module;
-      return a;
-    }
-
-    function update_details(id) {
-      document.getElementById('details').style.display = 'block';
-
-      var analyses = ANALYSIS[id];
-
-      var details = document.getElementById("details_data");
-      while (details.firstChild) {
-          details.removeChild(details.firstChild);
+    print("""
+      <script type="text/javascript">
+      function close_details() {
+        document.getElementById('details').style.display = 'none';
       }
 
-      for (var i=0; i<analyses.length; i++) {
-        var analysis = analyses[i];
+      class LineMatch {
+        constructor(lineno, text) {
+          this.lineno = lineno;
+          this.text = text;
+        }
+      }
 
-        var makefileDiv = document.createElement('div');
-        makefileDiv.className = 'Makefile';
-        details.appendChild(makefileDiv);
+      class Analysis {
+        constructor(filename, modules, line_matches) {
+          this.filename = filename;
+          this.modules = modules;
+          this.line_matches = line_matches;
+        }
+      }
 
-        var fileA = document.createElement('a');
-        makefileDiv.appendChild(fileA);
-        fileA.className = 'CsLink';
-        fileA.href = '%(codesearch)s' + analysis.filename;
-        fileA.innerText = analysis.filename;
-        fileA.target = "_blank";
+      class Module {
+        constructor(deps) {
+          this.deps = deps;
+        }
+      }
 
-        if (analysis.modules.length > 0) {
-          var moduleTable = document.createElement('table');
-          details.appendChild(moduleTable);
+      function make_module_link(module) {
+        var a = document.createElement('a');
+        a.className = 'ModuleLink';
+        a.innerText = module;
+        a.href = '#module_' + module;
+        return a;
+      }
 
-          for (var j=0; j<analysis.modules.length; j++) {
-            var moduleRow = document.createElement('tr');
-            moduleTable.appendChild(moduleRow);
+      function update_details(id) {
+        document.getElementById('details').style.display = 'block';
 
-            var moduleNameCell = document.createElement('td');
-            moduleRow.appendChild(moduleNameCell);
-            moduleNameCell.className = 'ModuleName';
-            moduleNameCell.appendChild(make_module_link(analysis.modules[j]));
+        var analyses = ANALYSIS[id];
 
-            var moduleData = MODULE_DATA[analysis.modules[j]];
-            console.log(moduleData);
+        var details = document.getElementById("details_data");
+        while (details.firstChild) {
+            details.removeChild(details.firstChild);
+        }
 
-            var depCell = document.createElement('td');
-            moduleRow.appendChild(depCell);
+        for (var i=0; i<analyses.length; i++) {
+          var analysis = analyses[i];
 
-            if (moduleData.deps.length == 0) {
-              depCell.className = 'ModuleDeps Unblocked';
-              depCell.innerText = 'UNBLOCKED';
-            } else {
-              depCell.className = 'ModuleDeps Blocked';
+          var makefileDiv = document.createElement('div');
+          makefileDiv.className = 'Makefile';
+          details.appendChild(makefileDiv);
 
-              for (var k=0; k<moduleData.deps.length; k++) {
-                depCell.appendChild(make_module_link(moduleData.deps[k]));
-                depCell.appendChild(document.createElement('br'));
+          var fileA = document.createElement('a');
+          makefileDiv.appendChild(fileA);
+          fileA.className = 'CsLink';
+          fileA.href = '%(codesearch)s' + analysis.filename;
+          fileA.innerText = analysis.filename;
+          fileA.target = "_blank";
+
+          if (analysis.modules.length > 0) {
+            var moduleTable = document.createElement('table');
+            details.appendChild(moduleTable);
+
+            for (var j=0; j<analysis.modules.length; j++) {
+              var moduleRow = document.createElement('tr');
+              moduleTable.appendChild(moduleRow);
+
+              var moduleNameCell = document.createElement('td');
+              moduleRow.appendChild(moduleNameCell);
+              moduleNameCell.className = 'ModuleName';
+              moduleNameCell.appendChild(make_module_link(analysis.modules[j]));
+
+              var moduleData = MODULE_DATA[analysis.modules[j]];
+              console.log(moduleData);
+
+              var depCell = document.createElement('td');
+              moduleRow.appendChild(depCell);
+
+              if (moduleData.deps.length == 0) {
+                depCell.className = 'ModuleDeps Unblocked';
+                depCell.innerText = 'UNBLOCKED';
+              } else {
+                depCell.className = 'ModuleDeps Blocked';
+
+                for (var k=0; k<moduleData.deps.length; k++) {
+                  depCell.appendChild(make_module_link(moduleData.deps[k]));
+                  depCell.appendChild(document.createElement('br'));
+                }
               }
             }
           }
-        }
 
-        if (analysis.line_matches.length > 0) {
-          var lineTable = document.createElement('table');
-          details.appendChild(lineTable);
+          if (analysis.line_matches.length > 0) {
+            var lineTable = document.createElement('table');
+            details.appendChild(lineTable);
 
-          for (var j=0; j<analysis.line_matches.length; j++) {
-            var line_match = analysis.line_matches[j];
+            for (var j=0; j<analysis.line_matches.length; j++) {
+              var line_match = analysis.line_matches[j];
 
-            var lineRow = document.createElement('tr');
-            lineTable.appendChild(lineRow);
+              var lineRow = document.createElement('tr');
+              lineTable.appendChild(lineRow);
 
-            var linenoCell = document.createElement('td');
-            lineRow.appendChild(linenoCell);
-            linenoCell.className = 'LineNo';
+              var linenoCell = document.createElement('td');
+              lineRow.appendChild(linenoCell);
+              linenoCell.className = 'LineNo';
 
-            var linenoA = document.createElement('a');
-            linenoCell.appendChild(linenoA);
-            linenoA.className = 'CsLink';
-            linenoA.href = '%(codesearch)s' + analysis.filename
-                + ';l=' + line_match.lineno;
-            linenoA.innerText = line_match.lineno;
-            linenoA.target = "_blank";
+              var linenoA = document.createElement('a');
+              linenoCell.appendChild(linenoA);
+              linenoA.className = 'CsLink';
+              linenoA.href = '%(codesearch)s' + analysis.filename
+                  + ';l=' + line_match.lineno;
+              linenoA.innerText = line_match.lineno;
+              linenoA.target = "_blank";
 
-            var textCell = document.createElement('td');
-            lineRow.appendChild(textCell);
-            textCell.className = 'LineText';
-            textCell.innerText = line_match.text;
+              var textCell = document.createElement('td');
+              lineRow.appendChild(textCell);
+              textCell.className = 'LineText';
+              textCell.innerText = line_match.text;
+            }
           }
         }
       }
-    }
 
-    var ANALYSIS = [
-    """ % {
-        "codesearch": args.codesearch,
-    })
-  for entry, mods in annotations.entries:
-    print("  [")
-    for analysis in entry:
-      print("    new Analysis('%(filename)s', %(modules)s, [%(line_matches)s])," % {
-        "filename": analysis.filename,
-        #"modules": json.dumps([m for m in mods if m in filename in soong.makefiles[m]]),
-        "modules": json.dumps(
-            [m for m in soong.reverse_makefiles[analysis.filename] if m in mods]),
-        "line_matches": ", ".join([
-            "new LineMatch(%d, %s)" % (lineno, json.dumps(text))
-            for lineno, text in analysis.line_matches]),
+      var ANALYSIS = [
+      """ % {
+          "codesearch": self.args.codesearch,
       })
-    print("  ],")
-  print("""
-    ];
-    var MODULE_DATA = {
-  """)
-  for module in soong.modules:
-    print("      '%(name)s': new Module(%(deps)s)," % {
-      "name": module,
-      "deps": json.dumps(soong.deps[module]),
-    })
-  print("""
-    };
-    </script>
+    for entry, mods in self.annotations.entries:
+      print("  [")
+      for analysis in entry:
+        print("    new Analysis('%(filename)s', %(modules)s, [%(line_matches)s])," % {
+          "filename": analysis.filename,
+          #"modules": json.dumps([m for m in mods if m in filename in self.soong.makefiles[m]]),
+          "modules": json.dumps(
+              [m for m in self.soong.reverse_makefiles[analysis.filename] if m in mods]),
+          "line_matches": ", ".join([
+              "new LineMatch(%d, %s)" % (lineno, json.dumps(text))
+              for lineno, text in analysis.line_matches]),
+        })
+      print("  ],")
+    print("""
+      ];
+      var MODULE_DATA = {
+    """)
+    for module in self.soong.modules:
+      print("      '%(name)s': new Module(%(deps)s)," % {
+        "name": module,
+        "deps": json.dumps(self.soong.deps[module]),
+      })
+    print("""
+      };
+      </script>
 
-  """)
+    """)
 
-  print("""
-      </div> <!-- id=tables -->
-      <div id="details">
-        <div style="text-align: right;">
-          <a href="javascript:close_details();">
-            <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>
-          </a>
+    print("""
+        </div> <!-- id=tables -->
+        <div id="details">
+          <div style="text-align: right;">
+            <a href="javascript:close_details();">
+              <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>
+            </a>
+          </div>
+          <div id="details_data"></div>
         </div>
-        <div id="details_data"></div>
-      </div>
-    </body>
-  </html>
-  """)
+      </body>
+    </html>
+    """)
+
+  def traverse_ready_makefiles(self, summary, makefiles):
+    return [Analysis(makefile.filename, []) for makefile in makefiles
+        if clean_and_only_blocked_by_clean(self.soong, self.all_makefiles, makefile)]
+
+  def print_analysis_row(self, summary, modules, rowtitle, rowclass, makefiles):
+    all_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles]
+    clean_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+        if is_clean(makefile)]
+    easy_makefiles = self.traverse_ready_makefiles(summary, makefiles)
+    unblocked_clean_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+        if (self.soong.contains_unblocked_modules(makefile.filename)
+            and is_clean(makefile))]
+    unblocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+        if self.soong.contains_unblocked_modules(makefile.filename)]
+    blocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+        if self.soong.contains_blocked_modules(makefile.filename)]
+
+    print("""
+      <tr class="%(rowclass)s">
+        <td class="RowTitle">%(rowtitle)s</td>
+        <td class="Count">%(makefiles)s</td>
+        <td class="Count">%(easy)s</td>
+        <td class="Count">%(unblocked_clean)s</td>
+        <td class="Count">%(unblocked)s</td>
+        <td class="Count">%(blocked)s</td>
+        <td class="Count">%(clean)s</td>
+    """ % {
+      "rowclass": rowclass,
+      "rowtitle": rowtitle,
+      "makefiles": self.make_annotation_link(all_makefiles, modules),
+      "unblocked": self.make_annotation_link(unblocked_makefiles, modules),
+      "blocked": self.make_annotation_link(blocked_makefiles, modules),
+      "clean": self.make_annotation_link(clean_makefiles, modules),
+      "unblocked_clean": self.make_annotation_link(unblocked_clean_makefiles, modules),
+      "easy": self.make_annotation_link(easy_makefiles, modules),
+    })
+
+    for analyzer in ANALYZERS:
+      analyses = [m.analyses.get(analyzer) for m in makefiles if m.analyses.get(analyzer)]
+      print("""<td class="Count">%s</td>"""
+          % self.make_annotation_link(analyses, modules))
+
+    print("      </tr>")
+
+  def make_annotation_link(self, analysis, modules):
+    if analysis:
+      return "<a href='javascript:update_details(%d)'>%s</a>" % (
+        self.annotations.Add(analysis, modules),
+        len(analysis)
+      )
+    else:
+      return "";
+
+class CsvProcessor(object):
+  def __init__(self, args, soong, all_makefiles):
+    self.args = args
+    self.soong = soong
+    self.all_makefiles = all_makefiles
+
+  def execute(self):
+    csvout = csv.writer(sys.stdout)
+
+    # Title row
+    row = ["Filename", "Module", "Partitions", "Easy", "Unblocked Clean", "Unblocked",
+           "Blocked", "Clean"]
+    for analyzer in ANALYZERS:
+      row.append(analyzer.title)
+    csvout.writerow(row)
+
+    # Makefile & module data
+    for filename in sorted(self.all_makefiles.keys()):
+      makefile = self.all_makefiles[filename]
+      for module in self.soong.reverse_makefiles[filename]:
+        row = [filename, module]
+        # Partitions
+        row.append(";".join(sorted(set([get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT,
+                                         installed)
+                                        for installed
+                                        in self.soong.reverse_installed.get(module, [])]))))
+        # Easy
+        row.append(1
+            if clean_and_only_blocked_by_clean(self.soong, self.all_makefiles, makefile)
+            else "")
+        # Unblocked Clean
+        row.append(1
+            if (self.soong.contains_unblocked_modules(makefile.filename) and is_clean(makefile))
+            else "")
+        # Unblocked
+        row.append(1 if self.soong.contains_unblocked_modules(makefile.filename) else "")
+        # Blocked
+        row.append(1 if self.soong.contains_blocked_modules(makefile.filename) else "")
+        # Clean
+        row.append(1 if is_clean(makefile) else "")
+        # Analysis
+        for analyzer in ANALYZERS:
+          row.append(1 if makefile.analyses.get(analyzer) else "")
+        # Write results
+        csvout.writerow(row)
 
 if __name__ == "__main__":
   main()
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 7f727fb..45e0514 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -272,7 +272,9 @@
         "bsdiff",
         "imgdiff",
         "minigzip",
+        "lz4",
         "mkbootfs",
+        "signapk",
     ],
 }
 
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index a1f8e31..eb041ec 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -739,6 +739,18 @@
   common.ZipClose(output_zip)
 
 
+def HasPartition(partition_name):
+  """Determines if the target files archive should build a given partition."""
+
+  return ((os.path.isdir(
+      os.path.join(OPTIONS.input_tmp, partition_name.upper())) and
+           OPTIONS.info_dict.get(
+               "building_{}_image".format(partition_name)) == "true") or
+          os.path.exists(
+              os.path.join(OPTIONS.input_tmp, "IMAGES",
+                           "{}.img".format(partition_name))))
+
+
 def AddImagesToTargetFiles(filename):
   """Creates and adds images (boot/recovery/system/...) to a target_files.zip.
 
@@ -767,49 +779,16 @@
   has_boot = OPTIONS.info_dict.get("no_boot") != "true"
   has_vendor_boot = OPTIONS.info_dict.get("vendor_boot") == "true"
 
-  # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm}.img
-  # are unlike system.img or
-  # system_other.img, because it could be built from source, or  dropped into
-  # target_files.zip as a prebuilt blob. We consider either of them as
-  # {vendor,product,system_ext}.img being available, which could be
-  # used when generating vbmeta.img for AVB.
-  has_vendor = ((os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) and
-                 OPTIONS.info_dict.get("building_vendor_image") == "true") or
-                os.path.exists(
-                    os.path.join(OPTIONS.input_tmp, "IMAGES", "vendor.img")))
-  has_odm = ((os.path.isdir(os.path.join(OPTIONS.input_tmp, "ODM")) and
-              OPTIONS.info_dict.get("building_odm_image") == "true") or
-             os.path.exists(
-                 os.path.join(OPTIONS.input_tmp, "IMAGES", "odm.img")))
-  has_vendor_dlkm = ((os.path.isdir(os.path.join(OPTIONS.input_tmp,
-                                                 "VENDOR_DLKM")) and
-                      OPTIONS.info_dict.get("building_vendor_dlkm_image")
-                      == "true") or
-                     os.path.exists(
-                         os.path.join(OPTIONS.input_tmp, "IMAGES",
-                                      "vendor_dlkm.img")))
-  has_odm_dlkm = ((os.path.isdir(os.path.join(OPTIONS.input_tmp,
-                                              "ODM_DLKM")) and
-                   OPTIONS.info_dict.get("building_odm_dlkm_image")
-                   == "true") or
-                  os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
-                                              "odm_dlkm.img")))
-  has_product = ((os.path.isdir(os.path.join(OPTIONS.input_tmp, "PRODUCT")) and
-                  OPTIONS.info_dict.get("building_product_image") == "true") or
-                 os.path.exists(
-                     os.path.join(OPTIONS.input_tmp, "IMAGES", "product.img")))
-  has_system_ext = (
-      (os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM_EXT")) and
-       OPTIONS.info_dict.get("building_system_ext_image") == "true") or
-      os.path.exists(
-          os.path.join(OPTIONS.input_tmp, "IMAGES", "system_ext.img")))
-  has_system = (
-      os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM")) and
-      OPTIONS.info_dict.get("building_system_image") == "true")
-
-  has_system_other = (
-      os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM_OTHER")) and
-      OPTIONS.info_dict.get("building_system_other_image") == "true")
+  # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm, system, system_other}.img
+  # can be built from source, or  dropped into target_files.zip as a prebuilt blob.
+  has_vendor = HasPartition("vendor")
+  has_odm = HasPartition("odm")
+  has_vendor_dlkm = HasPartition("vendor_dlkm")
+  has_odm_dlkm = HasPartition("odm_dlkm")
+  has_product = HasPartition("product")
+  has_system_ext = HasPartition("system_ext")
+  has_system = HasPartition("system")
+  has_system_other = HasPartition("system_other")
   has_userdata = OPTIONS.info_dict.get("building_userdata_image") == "true"
   has_cache = OPTIONS.info_dict.get("building_cache_image") == "true"
 
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 8783f25..3eb5196 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -177,7 +177,7 @@
 
     # Add the payload image back to the apex file.
     common.ZipDelete(self.apex_path, APEX_PAYLOAD_IMAGE)
-    with zipfile.ZipFile(self.apex_path, 'a') as output_apex:
+    with zipfile.ZipFile(self.apex_path, 'a', allowZip64=True) as output_apex:
       common.ZipWrite(output_apex, payload_img, APEX_PAYLOAD_IMAGE,
                       compress_type=zipfile.ZIP_STORED)
     return self.apex_path
@@ -351,7 +351,7 @@
   common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
   if APEX_PUBKEY in zip_items:
     common.ZipDelete(apex_file, APEX_PUBKEY)
-  apex_zip = zipfile.ZipFile(apex_file, 'a')
+  apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
   common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
   common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
   common.ZipClose(apex_zip)
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 0d990f1..58510a5 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -140,7 +140,7 @@
 
 def VerifyAbOtaPayload(cert, package):
   """Verifies the payload and metadata signatures in an A/B OTA payload."""
-  package_zip = zipfile.ZipFile(package, 'r')
+  package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
   if 'payload.bin' not in package_zip.namelist():
     common.ZipClose(package_zip)
     return
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 0edefac..3c2406c 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -252,7 +252,7 @@
     if os.path.isdir(target_files):
       return os.path.isdir(os.path.join(target_files, "VENDOR"))
     if zipfile.is_zipfile(target_files):
-      return HasPartition(zipfile.ZipFile(target_files), "vendor")
+      return HasPartition(zipfile.ZipFile(target_files, allowZip64=True), "vendor")
     raise ValueError("Unknown target_files argument")
 
   return (HasVendorPartition(target_files) and
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index c77d8c6..f5dfbec 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1568,7 +1568,7 @@
   cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
   if patterns is not None:
     # Filter out non-matching patterns. unzip will complain otherwise.
-    with zipfile.ZipFile(filename) as input_zip:
+    with zipfile.ZipFile(filename, allowZip64=True) as input_zip:
       names = input_zip.namelist()
     filtered = [
         pattern for pattern in patterns if fnmatch.filter(names, pattern)]
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 45532f5..bfd2f90 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -197,8 +197,6 @@
     'PREBUILT_IMAGES/*',
     'RADIO/*',
     'VENDOR/*',
-    'VENDOR_DLKM/*',
-    'ODM_DLKM/*',
 )
 
 # VENDOR_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 2833397..18b2b76 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -656,7 +656,7 @@
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
 
-  with zipfile.ZipFile(input_file, 'r') as input_zip:
+  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
     infolist = input_zip.infolist()
 
   input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
@@ -719,7 +719,7 @@
     The filename of target-files.zip that doesn't contain postinstall config.
   """
   # We should only make a copy if postinstall_config entry exists.
-  with zipfile.ZipFile(input_file, 'r') as input_zip:
+  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
     if POSTINSTALL_CONFIG not in input_zip.namelist():
       return input_file
 
@@ -754,7 +754,7 @@
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   shutil.copyfile(input_file, target_file)
 
-  with zipfile.ZipFile(input_file) as input_zip:
+  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
     namelist = input_zip.namelist()
 
   input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
@@ -822,7 +822,7 @@
   else:
     staging_file = output_file
   output_zip = zipfile.ZipFile(staging_file, "w",
-                               compression=zipfile.ZIP_DEFLATED)
+                               compression=zipfile.ZIP_DEFLATED, allowZip64=True)
 
   if source_file is not None:
     assert "ab_partitions" in OPTIONS.source_info_dict, \
@@ -893,7 +893,7 @@
 
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
-  target_zip = zipfile.ZipFile(target_file, "r")
+  target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
   if (target_info.get("verity") == "true" or
           target_info.get("avb_enable") == "true"):
     care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
@@ -1069,7 +1069,7 @@
   if OPTIONS.extracted_input is not None:
     OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    with zipfile.ZipFile(args[0], 'r') as input_zip:
+    with zipfile.ZipFile(args[0], 'r', allowZip64=True) as input_zip:
       OPTIONS.info_dict = common.LoadInfoDict(input_zip)
 
   logger.info("--- target info ---")
@@ -1078,7 +1078,7 @@
   # Load the source build dict if applicable.
   if OPTIONS.incremental_source is not None:
     OPTIONS.target_info_dict = OPTIONS.info_dict
-    with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+    with zipfile.ZipFile(OPTIONS.incremental_source, 'r', allowZip64=True) as source_zip:
       OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
 
     logger.info("--- source info ---")
diff --git a/tools/releasetools/ota_package_parser.py b/tools/releasetools/ota_package_parser.py
index 331122b..1e733b9 100755
--- a/tools/releasetools/ota_package_parser.py
+++ b/tools/releasetools/ota_package_parser.py
@@ -215,7 +215,7 @@
   logging.basicConfig(level=logging.INFO, format=logging_format)
 
   try:
-    with zipfile.ZipFile(args.ota_package, 'r') as package:
+    with zipfile.ZipFile(args.ota_package, 'r', allowZip64=True) as package:
       package_parser = OtaPackageParser(package)
       package_parser.Analyze()
   except:
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index d444d41..f0e4fcf 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -62,7 +62,7 @@
 
   def ComputeAllPropertyFiles(input_file, needed_property_files):
     # Write the current metadata entry with placeholders.
-    with zipfile.ZipFile(input_file) as input_zip:
+    with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
       for property_files in needed_property_files:
         metadata.property_files[property_files.name] = property_files.Compute(
             input_zip)
@@ -70,7 +70,7 @@
 
     if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
       ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
-    output_zip = zipfile.ZipFile(input_file, 'a')
+    output_zip = zipfile.ZipFile(input_file, 'a', allowZip64=True)
     WriteMetadata(metadata, output_zip)
     ZipClose(output_zip)
 
@@ -82,7 +82,7 @@
     return prelim_signing
 
   def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
-    with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
+    with zipfile.ZipFile(prelim_signing, allowZip64=True) as prelim_signing_zip:
       for property_files in needed_property_files:
         metadata.property_files[property_files.name] = property_files.Finalize(
             prelim_signing_zip,
@@ -108,7 +108,7 @@
 
   # Replace the METADATA entry.
   ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
-  output_zip = zipfile.ZipFile(prelim_signing, 'a')
+  output_zip = zipfile.ZipFile(prelim_signing, 'a', allowZip64=True)
   WriteMetadata(metadata, output_zip)
   ZipClose(output_zip)
 
@@ -119,7 +119,7 @@
     SignOutput(prelim_signing, output_file)
 
   # Reopen the final signed zip to double check the streaming metadata.
-  with zipfile.ZipFile(output_file) as output_zip:
+  with zipfile.ZipFile(output_file, allowZip64=True) as output_zip:
     for property_files in needed_property_files:
       property_files.Verify(
           output_zip, metadata.property_files[property_files.name].strip())
@@ -363,7 +363,7 @@
       partition_prop_key = "{}.build.prop".format(partition)
       input_file = info_dict[partition_prop_key].input_file
       if isinstance(input_file, zipfile.ZipFile):
-        with zipfile.ZipFile(input_file.filename) as input_zip:
+        with zipfile.ZipFile(input_file.filename, allowZip64=True) as input_zip:
           info_dict[partition_prop_key] = \
               PartitionBuildProps.FromInputFile(input_zip, partition,
                                                 placeholder_values)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index b4646b7..220f519 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -813,7 +813,7 @@
     keys: A list of public keys to use during OTA package verification.
   """
   temp_file = io.BytesIO()
-  certs_zip = zipfile.ZipFile(temp_file, "w")
+  certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
   for k in keys:
     common.ZipWrite(certs_zip, k)
   common.ZipClose(certs_zip)
@@ -1294,7 +1294,7 @@
 
   common.InitLogging()
 
-  input_zip = zipfile.ZipFile(args[0], "r")
+  input_zip = zipfile.ZipFile(args[0], "r", allowZip64=True)
   output_zip = zipfile.ZipFile(args[1], "w",
                                compression=zipfile.ZIP_DEFLATED,
                                allowZip64=True)
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index efa60b6..6b7a7db 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -93,10 +93,10 @@
 
     # Set up the output zip.
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       AddPackRadioImages(output_zip, images)
 
-    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+    with zipfile.ZipFile(output_file, 'r', allowZip64=True) as verify_zip:
       for image in images:
         self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
 
@@ -344,12 +344,12 @@
     image_paths = self._test_AddCareMapForAbOta()
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       AddCareMapForAbOta(output_zip, ['system', 'vendor'], image_paths)
 
     care_map_name = "META/care_map.pb"
     temp_dir = common.MakeTempDir()
-    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+    with zipfile.ZipFile(output_file, 'r', allowZip64=True) as verify_zip:
       self.assertTrue(care_map_name in verify_zip.namelist())
       verify_zip.extract(care_map_name, path=temp_dir)
 
@@ -367,7 +367,7 @@
     image_paths = self._test_AddCareMapForAbOta()
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       # Create an existing META/care_map.pb entry.
       common.ZipWriteStr(output_zip, 'META/care_map.pb',
                          'fake care_map.pb')
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
index 339ddc7..71f6433 100644
--- a/tools/releasetools/test_apex_utils.py
+++ b/tools/releasetools/test_apex_utils.py
@@ -174,8 +174,8 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_ApexApkSigner_noAssetDir(self):
     no_asset = common.MakeTempFile(suffix='.apex')
-    with zipfile.ZipFile(no_asset, 'w') as output_zip:
-      with zipfile.ZipFile(self.apex_with_apk, 'r') as input_zip:
+    with zipfile.ZipFile(no_asset, 'w', allowZip64=True) as output_zip:
+      with zipfile.ZipFile(self.apex_with_apk, 'r', allowZip64=True) as input_zip:
         name_list = input_zip.namelist()
         for name in name_list:
           if not name.startswith('assets'):
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 81ee53d..22fc85a 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -363,7 +363,7 @@
       self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
 
     # Reopen the zip file to verify.
-    zip_file = zipfile.ZipFile(zip_file_name, "r")
+    zip_file = zipfile.ZipFile(zip_file_name, "r", allowZip64=True)
 
     # Verify the timestamp.
     info = zip_file.getinfo(arcname)
@@ -399,7 +399,7 @@
       arcname = arcname[1:]
 
     zip_file.close()
-    zip_file = zipfile.ZipFile(zip_file_name, "w")
+    zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
       sha1_hash = sha1()
@@ -431,7 +431,7 @@
     zip_file_name = zip_file.name
     zip_file.close()
 
-    zip_file = zipfile.ZipFile(zip_file_name, "w")
+    zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
       expected_compress_type = extra_args.get("compress_type",
@@ -475,7 +475,7 @@
       arcname_large = arcname_large[1:]
 
     zip_file.close()
-    zip_file = zipfile.ZipFile(zip_file_name, "w")
+    zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
       sha1_hash = sha1()
@@ -599,7 +599,7 @@
 
     try:
       random_string = os.urandom(1024)
-      zip_file = zipfile.ZipFile(zip_file_name, "w")
+      zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
       # Default perms should be 0o644 when passing the filename.
       common.ZipWriteStr(zip_file, "foo", random_string)
       # Honor the specified perms.
@@ -644,7 +644,7 @@
 
     try:
       common.ZipDelete(zip_file.name, 'Test2')
-      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+      with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
         entries = check_zip.namelist()
         self.assertTrue('Test1' in entries)
         self.assertFalse('Test2' in entries)
@@ -652,21 +652,21 @@
 
       self.assertRaises(
           common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
-      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+      with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
         entries = check_zip.namelist()
         self.assertTrue('Test1' in entries)
         self.assertFalse('Test2' in entries)
         self.assertTrue('Test3' in entries)
 
       common.ZipDelete(zip_file.name, ['Test3'])
-      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+      with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
         entries = check_zip.namelist()
         self.assertTrue('Test1' in entries)
         self.assertFalse('Test2' in entries)
         self.assertFalse('Test3' in entries)
 
       common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
-      with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+      with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
         entries = check_zip.namelist()
         self.assertFalse('Test1' in entries)
         self.assertFalse('Test2' in entries)
@@ -834,7 +834,7 @@
     if additional is None:
       additional = []
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
       for entry in additional:
         target_files_zip.writestr(entry, '')
@@ -842,7 +842,7 @@
 
   def test_ReadApkCerts_NoncompressedApks(self):
     target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       certmap, ext = common.ReadApkCerts(input_zip)
 
     self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
@@ -855,7 +855,7 @@
         self.APKCERTS_TXT2,
         ['Compressed1.apk.gz', 'Compressed3.apk'])
 
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       certmap, ext = common.ReadApkCerts(input_zip)
 
     self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
@@ -865,7 +865,7 @@
     target_files = self._write_apkcerts_txt(
         self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
 
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       certmap, ext = common.ReadApkCerts(input_zip)
 
     self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
@@ -876,7 +876,7 @@
         self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
         ['Compressed1.apk.gz', 'Compressed3.apk'])
 
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       certmap, ext = common.ReadApkCerts(input_zip)
 
     certmap_merged = self.APKCERTS_CERTMAP1.copy()
@@ -889,7 +889,7 @@
         self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
         ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
 
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
 
   def test_ReadApkCerts_MismatchingKeys(self):
@@ -899,12 +899,12 @@
     )
     target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
 
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
 
   def test_ReadApkCerts_WithWithoutOptionalFields(self):
     target_files = self._write_apkcerts_txt(self.APKCERTS_TXT4)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       certmap, ext = common.ReadApkCerts(input_zip)
 
     self.assertDictEqual(self.APKCERTS_CERTMAP4, certmap)
@@ -973,7 +973,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_emptyBlockMapFile(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([
               (0xCAC1, 6),
@@ -985,7 +985,7 @@
       target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
 
     self.assertDictEqual(
@@ -1006,7 +1006,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_missingBlockMapFile(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([
               (0xCAC1, 6),
@@ -1017,7 +1017,7 @@
       target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       self.assertRaises(
           AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
           False)
@@ -1026,7 +1026,7 @@
   def test_GetSparseImage_sharedBlocks_notAllowed(self):
     """Tests the case of having overlapping blocks but disallowed."""
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([(0xCAC2, 16)]),
           arcname='IMAGES/system.img')
@@ -1040,7 +1040,7 @@
       target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       self.assertRaises(
           AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
           False)
@@ -1049,7 +1049,7 @@
   def test_GetSparseImage_sharedBlocks_allowed(self):
     """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       # Construct an image with a care_map of "0-5 9-12".
       target_files_zip.write(
           test_utils.construct_sparse_image([(0xCAC2, 16)]),
@@ -1064,7 +1064,7 @@
       target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
 
     self.assertDictEqual(
@@ -1094,7 +1094,7 @@
   def test_GetSparseImage_incompleteRanges(self):
     """Tests the case of ext4 images with holes."""
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([(0xCAC2, 16)]),
           arcname='IMAGES/system.img')
@@ -1108,7 +1108,7 @@
       target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
 
     self.assertEqual(
@@ -1119,7 +1119,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([(0xCAC2, 16)]),
           arcname='IMAGES/system.img')
@@ -1136,7 +1136,7 @@
       target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
 
     self.assertEqual(
@@ -1149,7 +1149,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([(0xCAC2, 16)]),
           arcname='IMAGES/system.img')
@@ -1163,7 +1163,7 @@
       target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
 
     self.assertEqual(
@@ -1174,7 +1174,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_fileNotFound(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.write(
           test_utils.construct_sparse_image([(0xCAC2, 16)]),
           arcname='IMAGES/system.img')
@@ -1186,7 +1186,7 @@
       target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
 
     tempdir = common.UnzipTemp(target_files)
-    with zipfile.ZipFile(target_files, 'r') as input_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
       self.assertRaises(
           AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
           False)
@@ -1274,7 +1274,7 @@
   @staticmethod
   def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       info_values = ''.join(
           ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
       common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
@@ -1294,7 +1294,7 @@
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
         'BOOT/RAMDISK/system/etc/recovery.fstab')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       loaded_dict = common.LoadInfoDict(target_files_zip)
       self.assertEqual(3, loaded_dict['recovery_api_version'])
       self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1305,7 +1305,7 @@
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
         'BOOT/RAMDISK/etc/recovery.fstab')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       loaded_dict = common.LoadInfoDict(target_files_zip)
       self.assertEqual(3, loaded_dict['recovery_api_version'])
       self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1346,7 +1346,7 @@
     target_files = self._test_LoadInfoDict_createTargetFiles(
         info_dict,
         'RECOVERY/RAMDISK/system/etc/recovery.fstab')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       loaded_dict = common.LoadInfoDict(target_files_zip)
       self.assertEqual(3, loaded_dict['recovery_api_version'])
       self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1362,7 +1362,7 @@
     target_files = self._test_LoadInfoDict_createTargetFiles(
         info_dict,
         'RECOVERY/RAMDISK/system/etc/recovery.fstab')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       loaded_dict = common.LoadInfoDict(target_files_zip)
       self.assertEqual(3, loaded_dict['recovery_api_version'])
       self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1376,7 +1376,7 @@
     target_files = self._test_LoadInfoDict_createTargetFiles(
         info_dict,
         'RECOVERY/RAMDISK/system/etc/recovery.fstab')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       loaded_dict = common.LoadInfoDict(target_files_zip)
       self.assertEqual(3, loaded_dict['recovery_api_version'])
       self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1388,7 +1388,7 @@
         self.INFO_DICT_DEFAULT,
         'BOOT/RAMDISK/system/etc/recovery.fstab')
     common.ZipDelete(target_files, 'META/misc_info.txt')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -1412,7 +1412,7 @@
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
         'BOOT/RAMDISK/system/etc/recovery.fstab')
-    with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
       self.assertRaises(
           AssertionError, common.LoadInfoDict, target_files_zip, True)
 
@@ -1704,7 +1704,7 @@
 
   @staticmethod
   def get_op_list(output_path):
-    with zipfile.ZipFile(output_path) as output_zip:
+    with zipfile.ZipFile(output_path, allowZip64=True) as output_zip:
       with output_zip.open('dynamic_partitions_op_list') as op_list:
         return [line.decode().strip() for line in op_list.readlines()
                 if not line.startswith(b'#')]
@@ -1724,7 +1724,7 @@
                    MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
 
     dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
-    with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+    with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
       dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
 
     self.assertEqual(str(self.script).strip(), """
@@ -1772,7 +1772,7 @@
     dp_diff = common.DynamicPartitionsDifference(target_info,
                                                  block_diffs=[],
                                                  source_info_dict=source_info)
-    with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+    with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
       dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
 
     lines = self.get_op_list(self.output_path)
@@ -1816,7 +1816,7 @@
 
     dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
                                                  source_info_dict=source_info)
-    with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+    with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
       dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
 
     metadata_idx = self.script.lines.index(
@@ -1887,7 +1887,7 @@
 
     dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
                                                  source_info_dict=source_info)
-    with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+    with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
       dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
 
     self.assertNotIn("block_image_update", str(self.script),
@@ -1910,7 +1910,7 @@
   @staticmethod
   def _BuildZipFile(entries):
     input_file = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       for name, content in entries.items():
         input_zip.writestr(name, content)
 
@@ -1927,7 +1927,7 @@
         'ODM/etc/build.prop': '\n'.join(build_prop),
     })
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': ['std', 'pro']
       }
@@ -1959,7 +1959,7 @@
         'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
     })
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': 'std'
       }
@@ -1974,7 +1974,7 @@
       'ro.product.odm.name': 'product1',
     }, partition_props.build_props)
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': 'pro'
       }
@@ -1995,7 +1995,7 @@
         'ODM/etc/build.prop': '\n'.join(build_prop),
     })
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       partition_props = common.PartitionBuildProps.FromInputFile(
           input_zip, 'odm')
 
@@ -2038,7 +2038,7 @@
         'ODM/etc/build_product2.prop': '\n'.join(product2_prop),
     })
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': 'std',
           'ro.boot.product.product_name': 'product1',
@@ -2055,7 +2055,7 @@
         'ro.product.odm.name': 'product1'
     }, partition_props.build_props)
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': 'pro',
           'ro.boot.product.product_name': 'product2',
@@ -2089,7 +2089,7 @@
         'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
     })
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': 'std',
       }
@@ -2126,7 +2126,7 @@
         'ODM/etc/build_product2.prop': '\n'.join(product2_prop),
     })
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       placeholder_values = {
           'ro.boot.product.device_name': 'std',
           'ro.boot.product.product_name': 'product1',
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 84cd4c8..f96bc7b 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -37,7 +37,7 @@
 def construct_target_files(secondary=False):
   """Returns a target-files.zip file for generating OTA packages."""
   target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
-  with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+  with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
     # META/update_engine_config.txt
     target_files_zip.writestr(
         'META/update_engine_config.txt',
@@ -417,7 +417,7 @@
         'super_google_dynamic_partitions_partition_list=system vendor product',
     ])
 
-    with zipfile.ZipFile(input_file, 'a') as append_zip:
+    with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip:
       common.ZipWriteStr(append_zip, 'META/misc_info.txt', misc_info)
       common.ZipWriteStr(append_zip, 'META/dynamic_partitions_info.txt',
                          dynamic_partitions_info)
@@ -472,7 +472,7 @@
     zip_file = PropertyFilesTest.construct_zip_package(entries)
     # Add a large entry of 1 GiB if requested.
     if large_entry:
-      with zipfile.ZipFile(zip_file, 'a') as zip_fp:
+      with zipfile.ZipFile(zip_file, 'a', allowZip64=True) as zip_fp:
         zip_fp.writestr(
             # Using 'zoo' so that the entry stays behind others after signing.
             'zoo',
@@ -514,7 +514,7 @@
         'optional-entry2',
     ]
     zip_file = PropertyFilesTest.construct_zip_package(entries)
-    with zipfile.ZipFile(zip_file, 'a') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'a', allowZip64=True) as zip_fp:
       zip_fp.writestr(
           # 'foo-entry1' will appear ahead of all other entries (in alphabetical
           # order) after the signing, which will in turn trigger the
@@ -558,7 +558,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
@@ -574,7 +574,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
@@ -587,7 +587,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       self.assertRaises(KeyError, property_files.Compute, zip_fp)
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -600,7 +600,7 @@
     ]
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
       streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
@@ -625,7 +625,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       # First get the raw metadata string (i.e. without padding space).
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
@@ -660,7 +660,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       # First get the raw metadata string (i.e. without padding space).
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
@@ -702,7 +702,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = StreamingPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
@@ -720,7 +720,7 @@
     ]
     zip_file = self.construct_zip_package(entries)
     property_files = StreamingPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
       streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
@@ -744,7 +744,7 @@
     )
     zip_file = self.construct_zip_package(entries)
     property_files = StreamingPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       # First get the raw metadata string (i.e. without padding space).
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
@@ -802,7 +802,7 @@
     payload.Sign(payload_signer)
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       payload.WriteToZip(output_zip)
 
     # Find out the payload metadata offset and size.
@@ -867,7 +867,7 @@
     payload.Sign(payload_signer)
 
     zip_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'w', allowZip64=True) as zip_fp:
       # 'payload.bin',
       payload.WriteToZip(zip_fp)
 
@@ -889,7 +889,7 @@
   def test_Compute(self):
     zip_file = self.construct_zip_package_withValidPayload()
     property_files = AbOtaPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
@@ -903,7 +903,7 @@
   def test_Finalize(self):
     zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
     property_files = AbOtaPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
       property_files_string = property_files.Finalize(
@@ -920,7 +920,7 @@
   def test_Verify(self):
     zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
     property_files = AbOtaPropertyFiles()
-    with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
       raw_metadata = property_files.GetPropertyFilesString(
           zip_fp, reserve_space=False)
 
@@ -1087,7 +1087,7 @@
     payload.Sign(PayloadSigner())
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       payload.WriteToZip(output_zip)
 
     import check_ota_package_signature
@@ -1101,7 +1101,7 @@
     payload.Sign(PayloadSigner())
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       payload.WriteToZip(output_zip)
 
     import check_ota_package_signature
@@ -1140,7 +1140,7 @@
     payload.Sign(PayloadSigner())
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       payload.WriteToZip(output_zip)
 
     with zipfile.ZipFile(output_file) as verify_zip:
@@ -1162,14 +1162,14 @@
     payload = self._create_payload_full()
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
 
     # Also test with incremental payload.
     payload = self._create_payload_incremental()
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -1178,7 +1178,7 @@
     payload.Sign(PayloadSigner())
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       payload.WriteToZip(output_zip)
 
     with zipfile.ZipFile(output_file) as verify_zip:
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 308172f..18e4858 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -164,15 +164,15 @@
         "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
 
     input_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
 
     # Test with the first certificate.
     cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'r') as input_zip, \
-         zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
+         zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       ReplaceVerityKeyId(input_zip, output_zip, cert_file)
 
     with zipfile.ZipFile(output_file) as output_zip:
@@ -181,8 +181,8 @@
     # Test with the second certificate.
     cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
 
-    with zipfile.ZipFile(input_file, 'r') as input_zip, \
-         zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
+         zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       ReplaceVerityKeyId(input_zip, output_zip, cert_file)
 
     with zipfile.ZipFile(output_file) as output_zip:
@@ -195,12 +195,12 @@
         "loop.max_part=7\n")
 
     input_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
 
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'r') as input_zip, \
-         zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
+         zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       ReplaceVerityKeyId(input_zip, output_zip, None)
 
     with zipfile.ZipFile(output_file) as output_zip:
@@ -284,7 +284,7 @@
     ]
     entry_name = 'SYSTEM/etc/security/otacerts.zip'
     output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w') as output_zip:
+    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
       WriteOtacerts(output_zip, entry_name, certs)
     with zipfile.ZipFile(output_file) as input_zip:
       self.assertIn(entry_name, input_zip.namelist())
@@ -294,7 +294,7 @@
 
   def test_CheckApkAndApexKeysAvailable(self):
     input_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       input_zip.writestr('SYSTEM/app/App1.apk', "App1-content")
       input_zip.writestr('SYSTEM/app/App2.apk.gz', "App2-content")
 
@@ -318,7 +318,7 @@
 
   def test_CheckApkAndApexKeysAvailable_invalidApexKeys(self):
     input_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       input_zip.writestr('SYSTEM/apex/Apex1.apex', "Apex1-content")
       input_zip.writestr('SYSTEM/apex/Apex2.apex', "Apex2-content")
 
@@ -466,10 +466,10 @@
 
   def test_ReadApexKeysInfo(self):
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
 
-    with zipfile.ZipFile(target_files) as target_files_zip:
+    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
       keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
@@ -491,10 +491,10 @@
         'container_private_key="build/make/target/product/security/testkey2.pk8" '
         'partition="system"')
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
-    with zipfile.ZipFile(target_files) as target_files_zip:
+    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
       self.assertRaises(ValueError, ReadApexKeysInfo, target_files_zip)
 
   def test_ReadApexKeysInfo_missingPayloadPrivateKey(self):
@@ -505,10 +505,10 @@
         'container_certificate="build/make/target/product/security/testkey.x509.pem" '
         'container_private_key="build/make/target/product/security/testkey.pk8"')
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
-    with zipfile.ZipFile(target_files) as target_files_zip:
+    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
       keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
@@ -528,10 +528,10 @@
         'container_certificate="build/make/target/product/security/testkey.x509.pem" '
         'container_private_key="build/make/target/product/security/testkey.pk8"')
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
-    with zipfile.ZipFile(target_files) as target_files_zip:
+    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
       keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
@@ -551,10 +551,10 @@
         'container_certificate="PRESIGNED" '
         'container_private_key="PRESIGNED"')
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
-    with zipfile.ZipFile(target_files) as target_files_zip:
+    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
       keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
@@ -574,10 +574,10 @@
         'container_certificate="PRESIGNED" '
         'container_private_key="PRESIGNED"')
     target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
-    with zipfile.ZipFile(target_files) as target_files_zip:
+    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
       keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index 7b7f22a..ccd97a9 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -199,7 +199,7 @@
   @staticmethod
   def construct_zip_package(entries):
     zip_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+    with zipfile.ZipFile(zip_file, 'w', allowZip64=True) as zip_fp:
       for entry in entries:
         zip_fp.writestr(
             entry,
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index ca70ca8..e9896ae 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -272,7 +272,7 @@
     input_file = common.MakeTempFile()
     all_entries = ['SYSTEM/', 'SYSTEM/b', 'SYSTEM/a', 'IMAGES/',
                    'IMAGES/system.map', 'IMAGES/system.img']
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       for name in all_entries:
         input_zip.write(os.path.join(input_tmp, name), arcname=name)
 
@@ -321,7 +321,7 @@
     input_file = common.MakeTempFile()
     all_entries = ['SYSTEM/', 'SYSTEM/abc', 'IMAGES/',
                    'IMAGES/system.map', 'IMAGES/system.img']
-    with zipfile.ZipFile(input_file, 'w') as input_zip:
+    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
       for name in all_entries:
         input_zip.write(os.path.join(input_tmp, name), arcname=name)
 
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 2e3aa74..d2178b2 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -487,7 +487,7 @@
   input_tmp = common.UnzipTemp(args.target_files)
 
   info_dict = common.LoadInfoDict(input_tmp)
-  with zipfile.ZipFile(args.target_files, 'r') as input_zip:
+  with zipfile.ZipFile(args.target_files, 'r', allowZip64=True) as input_zip:
     ValidateFileConsistency(input_zip, input_tmp, info_dict)
 
   CheckBuildPropDuplicity(input_tmp)
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 95ef05f..7e5c8fc 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -41,6 +41,7 @@
 import com.android.apksig.apk.ApkUtils;
 import com.android.apksig.apk.MinSdkVersionException;
 import com.android.apksig.util.DataSink;
+import com.android.apksig.util.DataSource;
 import com.android.apksig.util.DataSources;
 import com.android.apksig.zip.ZipFormatException;
 
@@ -57,6 +58,7 @@
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
+import java.io.RandomAccessFile;
 import java.lang.reflect.Constructor;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
@@ -1021,9 +1023,10 @@
                            "[-providerClass <className>] " +
                            "[--min-sdk-version <n>] " +
                            "[--disable-v2] " +
+                           "[--enable-v4] " +
                            "publickey.x509[.pem] privatekey.pk8 " +
                            "[publickey2.x509[.pem] privatekey2.pk8 ...] " +
-                           "input.jar output.jar");
+                           "input.jar output.jar [output-v4-file]");
         System.exit(2);
     }
 
@@ -1043,6 +1046,7 @@
         int alignment = 4;
         Integer minSdkVersionOverride = null;
         boolean signUsingApkSignatureSchemeV2 = true;
+        boolean signUsingApkSignatureSchemeV4 = false;
         SigningCertificateLineage certLineage = null;
 
         int argstart = 0;
@@ -1071,6 +1075,9 @@
             } else if ("--disable-v2".equals(args[argstart])) {
                 signUsingApkSignatureSchemeV2 = false;
                 ++argstart;
+            } else if ("--enable-v4".equals(args[argstart])) {
+                signUsingApkSignatureSchemeV4 = true;
+                ++argstart;
             } else if ("--lineage".equals(args[argstart])) {
                 File lineageFile = new File(args[++argstart]);
                 try {
@@ -1085,8 +1092,14 @@
             }
         }
 
-        if ((args.length - argstart) % 2 == 1) usage();
-        int numKeys = ((args.length - argstart) / 2) - 1;
+        int numArgsExcludeV4FilePath;
+        if (signUsingApkSignatureSchemeV4) {
+            numArgsExcludeV4FilePath = args.length - 1;
+        } else {
+            numArgsExcludeV4FilePath = args.length;
+        }
+        if ((numArgsExcludeV4FilePath - argstart) % 2 == 1) usage();
+        int numKeys = ((numArgsExcludeV4FilePath - argstart) / 2) - 1;
         if (signWholeFile && numKeys > 1) {
             System.err.println("Only one key may be used with -w.");
             System.exit(2);
@@ -1094,8 +1107,12 @@
 
         loadProviderIfNecessary(providerClass);
 
-        String inputFilename = args[args.length-2];
-        String outputFilename = args[args.length-1];
+        String inputFilename = args[numArgsExcludeV4FilePath - 2];
+        String outputFilename = args[numArgsExcludeV4FilePath - 1];
+        String outputV4Filename = "";
+        if (signUsingApkSignatureSchemeV4) {
+            outputV4Filename = args[args.length - 1];
+        }
 
         JarFile inputJar = null;
         FileOutputStream outputFile = null;
@@ -1233,6 +1250,13 @@
                     outputFile.close();
                     outputFile = null;
                     apkSigner.outputDone();
+
+                    if (signUsingApkSignatureSchemeV4) {
+                        final DataSource outputApkIn = DataSources.asDataSource(
+                                new RandomAccessFile(new File(outputFilename), "r"));
+                        final File outputV4File =  new File(outputV4Filename);
+                        apkSigner.signV4(outputApkIn, outputV4File, false /* ignore failures */);
+                    }
                 }
 
                 return;
diff --git a/tools/warn/android_project_list.py b/tools/warn/android_project_list.py
index 4726fa2..1010b24 100644
--- a/tools/warn/android_project_list.py
+++ b/tools/warn/android_project_list.py
@@ -102,6 +102,7 @@
     create_pattern('ndk'),
     # match vendor/unbungled_google/packages before other packages
     create_pattern('unbundled_google'),
+    create_pattern('packages/providers/MediaProvider'),
     create_pattern('packages'),
     create_pattern('pdk'),
     create_pattern('prebuilts'),
diff --git a/tools/warn/cpp_warn_patterns.py b/tools/warn/cpp_warn_patterns.py
index 65ce73a..e8783bc 100644
--- a/tools/warn/cpp_warn_patterns.py
+++ b/tools/warn/cpp_warn_patterns.py
@@ -155,6 +155,7 @@
            [r".*: warning: unknown attribute '.+'"]),
     medium('Attribute ignored',
            [r".*: warning: '_*packed_*' attribute ignored",
+            r".*: warning: .* not supported .*Wignored-attributes",
             r".*: warning: attribute declaration must precede definition .+ignored-attributes"]),
     medium('Visibility problem',
            [r".*: warning: declaration of '.+' will not be visible outside of this function"]),
@@ -251,6 +252,8 @@
            [r".*: warning: taking address of temporary"]),
     medium('Taking address of packed member',
            [r".*: warning: taking address of packed member"]),
+    medium('Pack alignment value is modified',
+           [r".*: warning: .*#pragma pack alignment value is modified.*Wpragma-pack.*"]),
     medium('Possible broken line continuation',
            [r".*: warning: backslash and newline separated by space"]),
     medium('Undefined variable template',
@@ -332,7 +335,7 @@
              [r".*: warning: extra tokens at end of #endif directive"]),
     medium('Comparison between different enums',
            [r".*: warning: comparison between '.+' and '.+'.+Wenum-compare",
-            r".*: warning: comparison of .* enumeration types .*-Wenum-compare-switch"]),
+            r".*: warning: comparison of .* enumeration types .*-Wenum-compare.*"]),
     medium('Conversion may change value',
            [r".*: warning: converting negative value '.+' to '.+'",
             r".*: warning: conversion to '.+' .+ may (alter|change)"]),
@@ -396,6 +399,8 @@
          r".*: warning: absolute value function '.+' given .+ which may cause truncation .+Wabsolute-value"]),
     low('Using C++11 extensions',
         [r".*: warning: 'auto' type specifier is a C\+\+11 extension"]),
+    low('Using C++17 extensions',
+        [r".*: warning: .* a C\+\+17 extension .+Wc\+\+17-extensions"]),
     low('Refers to implicitly defined namespace',
         [r".*: warning: using directive refers to implicitly-defined namespace .+"]),
     low('Invalid pp token',
@@ -437,8 +442,10 @@
            [r".*: warning: unannotated fall-through between switch labels.+Wimplicit-fallthrough"]),
     medium('Invalid partial specialization',
            [r".*: warning: class template partial specialization.+Winvalid-partial-specialization"]),
-    medium('Overlapping compatisons',
+    medium('Overlapping comparisons',
            [r".*: warning: overlapping comparisons.+Wtautological-overlap-compare"]),
+    medium('bitwise comparison',
+           [r".*: warning: bitwise comparison.+Wtautological-bitwise-compare"]),
     medium('int in bool context',
            [r".*: warning: converting.+to a boolean.+Wint-in-bool-context"]),
     medium('bitwise conditional parentheses',
diff --git a/tools/warn/java_warn_patterns.py b/tools/warn/java_warn_patterns.py
index 17e3864..ac1ed5d 100644
--- a/tools/warn/java_warn_patterns.py
+++ b/tools/warn/java_warn_patterns.py
@@ -486,6 +486,7 @@
                 [r'.*\.java:.*: warning: \[static\] static method should be qualified']),
     medium('AbstractInner'),
     medium('BothPackageInfoAndHtml'),
+    medium('BuilderSetStyle'),
     medium('CallbackName'),
     medium('ExecutorRegistration'),
     medium('HiddenTypeParameter'),
@@ -493,9 +494,11 @@
     medium('ListenerLast'),
     medium('MinMaxConstant'),
     medium('MissingBuildMethod'),
+    medium('MissingGetterMatchingBuilder'),
     medium('NoByteOrShort'),
     medium('OverlappingConstants'),
     medium('SetterReturnsThis'),
+    medium('StaticFinalBuilder'),
     medium('StreamFiles'),
     medium('Typo'),
     medium('UseIcu'),
diff --git a/tools/warn/other_warn_patterns.py b/tools/warn/other_warn_patterns.py
index 318c3d4..8df5b87 100644
--- a/tools/warn/other_warn_patterns.py
+++ b/tools/warn/other_warn_patterns.py
@@ -143,6 +143,8 @@
     # Yacc warnings
     yacc('deprecate directive',
          [r".*\.yy?:.*: warning: deprecated directive: "]),
+    yacc('reduce/reduce conflicts',
+         [r".*\.yy?: warning: .+ reduce/reduce conflicts "]),
     yacc('shift/reduce conflicts',
          [r".*\.yy?: warning: .+ shift/reduce conflicts "]),
     {'category': 'yacc', 'severity': Severity.SKIP,