Merge "Add the cts-platform-version check"
diff --git a/core/Makefile b/core/Makefile
index f6411ec..4b2a331 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -487,6 +487,12 @@
   endif
 endif
 
+ifneq ($(BOARD_DO_NOT_STRIP_RECOVERY_MODULES),true)
+	RECOVERY_STRIPPED_MODULE_STAGING_DIR := $(call intermediates-dir-for,PACKAGING,depmod_recovery_stripped)
+else
+	RECOVERY_STRIPPED_MODULE_STAGING_DIR :=
+endif
+
 ifneq ($(BOARD_DO_NOT_STRIP_VENDOR_MODULES),true)
 	VENDOR_STRIPPED_MODULE_STAGING_DIR := $(call intermediates-dir-for,PACKAGING,depmod_vendor_stripped)
 else
@@ -501,7 +507,7 @@
 
 BOARD_KERNEL_MODULE_DIRS += top
 $(foreach kmd,$(BOARD_KERNEL_MODULE_DIRS), \
-  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,RECOVERY,$(TARGET_RECOVERY_ROOT_OUT),,modules.load.recovery,,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,RECOVERY,$(TARGET_RECOVERY_ROOT_OUT),,modules.load.recovery,$(RECOVERY_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
   $(eval vendor_ramdisk_fragment := $(KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(kmd))) \
   $(if $(vendor_ramdisk_fragment), \
     $(eval output_dir := $(VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR)) \
@@ -1595,6 +1601,18 @@
 
 endif # PRODUCT_USE_DYNAMIC_PARTITIONS
 
+# $(1) the partition variable component (eg SYSTEM)
+# $(2) the partition prefix in properties (eg system)
+# $(3) the image prop file
+# $(4) optional "other" size
+define add-sparse-flags-to-image-props
+$(eval _size := $(BOARD_$(1)IMAGE_PARTITION_SIZE))
+$(eval _reserved := $(BOARD_$(1)IMAGE_PARTITION_RESERVED_SIZE))
+$(eval _headroom := $(PRODUCT_$(1)_HEADROOM))
+$(if $(or $(_size), $(_reserved), $(_headroom), $(4)),,
+    $(hide) echo "$(2)_disable_sparse=true" >> $(3))
+endef
+
 # $(1): the path of the output dictionary file
 # $(2): a subset of "system vendor cache userdata product system_ext oem odm vendor_dlkm odm_dlkm"
 # $(3): additional "key=value" pairs to append to the dictionary file.
@@ -1615,11 +1633,14 @@
     $(if $(PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
     $(if $(PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCT_SYSTEM_HEADROOM)" >> $(1))
     $(if $(BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "system_reserved_size=$(BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,SYSTEM,system,$(1))
     $(hide) echo "system_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_system_image=$(BUILDING_SYSTEM_IMAGE)" >> $(1)
 )
 $(if $(filter $(2),system_other),\
     $(hide) echo "building_system_other_image=$(BUILDING_SYSTEM_OTHER_IMAGE)" >> $(1)
+    $(if $(INTERNAL_SYSTEM_OTHER_PARTITION_SIZE),,
+        $(hide) echo "system_other_disable_sparse=true" >> $(1))
 )
 $(if $(filter $(2),userdata),\
     $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
@@ -1650,6 +1671,7 @@
     $(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashfs_disable_4k_align=$(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
     $(if $(PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
     $(if $(BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "vendor_reserved_size=$(BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,VENDOR,vendor,$(1))
     $(hide) echo "vendor_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_vendor_image=$(BUILDING_VENDOR_IMAGE)" >> $(1)
 )
@@ -1667,6 +1689,7 @@
     $(if $(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_squashfs_disable_4k_align=$(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
     $(if $(PRODUCT_PRODUCT_BASE_FS_PATH),$(hide) echo "product_base_fs_file=$(PRODUCT_PRODUCT_BASE_FS_PATH)" >> $(1))
     $(if $(BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "product_reserved_size=$(BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,PRODUCT,product,$(1))
     $(hide) echo "product_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_product_image=$(BUILDING_PRODUCT_IMAGE)" >> $(1)
 )
@@ -1683,6 +1706,7 @@
     $(if $(BOARD_SYSTEM_EXTIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_ext_squashfs_block_size=$(BOARD_SYSTEM_EXTIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
     $(if $(BOARD_SYSTEM_EXTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_ext_squashfs_disable_4k_align=$(BOARD_SYSTEM_EXTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
     $(if $(BOARD_SYSTEM_EXTIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "system_ext_reserved_size=$(BOARD_SYSTEM_EXTIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,SYSTEM_EXT,system_ext,$(1))
     $(hide) echo "system_ext_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_system_ext_image=$(BUILDING_SYSTEM_EXT_IMAGE)" >> $(1)
 )
@@ -1698,6 +1722,7 @@
     $(if $(BOARD_ODMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "odm_squashfs_disable_4k_align=$(BOARD_ODMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
     $(if $(PRODUCT_ODM_BASE_FS_PATH),$(hide) echo "odm_base_fs_file=$(PRODUCT_ODM_BASE_FS_PATH)" >> $(1))
     $(if $(BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "odm_reserved_size=$(BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,ODM,odm,$(1))
     $(hide) echo "odm_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_odm_image=$(BUILDING_ODM_IMAGE)" >> $(1)
 )
@@ -1714,6 +1739,7 @@
     $(if $(BOARD_VENDOR_DLKMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "vendor_dlkm_squashfs_block_size=$(BOARD_VENDOR_DLKMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
     $(if $(BOARD_VENDOR_DLKMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_dlkm_squashfs_disable_4k_align=$(BOARD_VENDOR_DLKMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
     $(if $(BOARD_VENDOR_DLKMIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "vendor_dlkm_reserved_size=$(BOARD_VENDOR_DLKMIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,VENDOR_DLKM,vendor_dlkm,$(1))
     $(hide) echo "vendor_dlkm_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_vendor_dlkm_image=$(BUILDING_VENDOR_DLKM_IMAGE)" >> $(1)
 )
@@ -1728,6 +1754,7 @@
     $(if $(BOARD_ODM_DLKMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "odm_dlkm_squashfs_block_size=$(BOARD_ODM_DLKMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
     $(if $(BOARD_ODM_DLKMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "odm_dlkm_squashfs_disable_4k_align=$(BOARD_ODM_DLKMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
     $(if $(BOARD_ODM_DLKMIMAGE_PARTITION_RESERVED_SIZE),$(hide) echo "odm_dlkm_reserved_size=$(BOARD_ODM_DLKMIMAGE_PARTITION_RESERVED_SIZE)" >> $(1))
+    $(call add-sparse-flags-to-image-props,ODM_DLKM,odm_dlkm,$(1))
     $(hide) echo "odm_dlkm_selinux_fc=$(SELINUX_FC)" >> $(1)
     $(hide) echo "building_odm_dlkm_image=$(BUILDING_ODM_DLKM_IMAGE)" >> $(1)
 )
@@ -2403,6 +2430,7 @@
 #
 # Note: it's intentional to skip signing for boot-debug.img, because it
 # can only be used if the device is unlocked with verification error.
+ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
 ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
@@ -2461,6 +2489,7 @@
 
 endif # TARGET_NO_KERNEL
 endif # INSTALLED_BOOTIMAGE_TARGET
+endif # BUILDING_VENDOR_BOOT_IMAGE is not true
 
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
 ifeq ($(BUILDING_RAMDISK_IMAGE),true)
@@ -2606,6 +2635,7 @@
 #
 # Note: it's intentional to skip signing for boot-test-harness.img, because it
 # can only be used if the device is unlocked with verification error.
+ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
 ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 
@@ -2648,6 +2678,7 @@
 
 endif # TARGET_NO_KERNEL
 endif # INSTALLED_BOOTIMAGE_TARGET
+endif # BUILDING_VENDOR_BOOT_IMAGE is not true
 endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true
 
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
@@ -3283,7 +3314,8 @@
   $(call pretty,"Target odm fs image: $(INSTALLED_ODMIMAGE_TARGET)")
   @mkdir -p $(TARGET_OUT_ODM)
   @mkdir -p $(odmimage_intermediates) && rm -rf $(odmimage_intermediates)/odm_image_info.txt
-  $(call generate-userimage-prop-dictionary, $(odmimage_intermediates)/odm_image_info.txt, skip_fsck=true)
+  $(call generate-image-prop-dictionary, $(odmimage_intermediates)/odm_image_info.txt, odm, \
+	  skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
           $(TARGET_OUT_ODM) $(odmimage_intermediates)/odm_image_info.txt \
@@ -3334,7 +3366,8 @@
   $(call pretty,"Target vendor_dlkm fs image: $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)")
   @mkdir -p $(TARGET_OUT_VENDOR_DLKM)
   @mkdir -p $(vendor_dlkmimage_intermediates) && rm -rf $(vendor_dlkmimage_intermediates)/vendor_dlkm_image_info.txt
-  $(call generate-userimage-prop-dictionary, $(vendor_dlkmimage_intermediates)/vendor_dlkm_image_info.txt, skip_fsck=true)
+  $(call generate-image-prop-dictionary, $(vendor_dlkmimage_intermediates)/vendor_dlkm_image_info.txt, \
+	  vendor_dlkm, skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
           $(TARGET_OUT_VENDOR_DLKM) $(vendor_dlkmimage_intermediates)/vendor_dlkm_image_info.txt \
@@ -3385,7 +3418,8 @@
   $(call pretty,"Target odm_dlkm fs image: $(INSTALLED_ODM_DLKMIMAGE_TARGET)")
   @mkdir -p $(TARGET_OUT_ODM_DLKM)
   @mkdir -p $(odm_dlkmimage_intermediates) && rm -rf $(odm_dlkmimage_intermediates)/odm_dlkm_image_info.txt
-  $(call generate-userimage-prop-dictionary, $(odm_dlkmimage_intermediates)/odm_dlkm_image_info.txt, skip_fsck=true)
+  $(call generate-image-prop-dictionary, $(odm_dlkmimage_intermediates)/odm_dlkm_image_info.txt, \
+	  odm_dlkm, skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
           $(TARGET_OUT_ODM_DLKM) $(odm_dlkmimage_intermediates)/odm_dlkm_image_info.txt \
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index fe97047..2197eb9 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -36,7 +36,9 @@
   $(call add_soong_config_namespace,art_module)
   SOONG_CONFIG_art_module += source_build
 endif
-ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
+ifneq (,$(SOONG_CONFIG_art_module_source_build))
+  # Keep an explicit setting.
+else ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
   # Build ART modules from source if they are listed in TARGET_BUILD_APPS.
   SOONG_CONFIG_art_module_source_build := true
 else ifeq (,$(filter-out modules_% mainline_modules_%,$(TARGET_PRODUCT)))
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index 137b118..798dd5f 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -22,8 +22,7 @@
 #   autogen_test_config_file: Path to the test config file generated.
 
 autogen_test_config_file := $(dir $(LOCAL_BUILT_MODULE))$(LOCAL_MODULE).config
-# TODO: (b/167308193) Switch to /data/local/tests/unrestricted as the default install base.
-autogen_test_install_base := /data/local/tmp
+autogen_test_install_base := /data/local/tests/unrestricted
 # Automatically setup test root for native test.
 ifeq (true,$(is_native))
   ifeq (true,$(LOCAL_VENDOR_MODULE))
diff --git a/core/config.mk b/core/config.mk
index acdf15e..0b317fb 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -259,7 +259,7 @@
 
 define add_soong_config_namespace
 $(eval SOONG_CONFIG_NAMESPACES += $1) \
-$(eval SOONG_CONFIG_$1 :=)
+$(eval SOONG_CONFIG_$(strip $1) :=)
 endef
 
 # The add_soong_config_var function adds a a list of soong config variables to
@@ -268,8 +268,8 @@
 # $1 is the namespace. $2 is the list of variables.
 # Ex: $(call add_soong_config_var,acme,COOL_FEATURE_A COOL_FEATURE_B)
 define add_soong_config_var
-$(eval SOONG_CONFIG_$1 += $2) \
-$(foreach v,$2,$(eval SOONG_CONFIG_$1_$v := $($v)))
+$(eval SOONG_CONFIG_$(strip $1) += $2) \
+$(foreach v,$(strip $2),$(eval SOONG_CONFIG_$(strip $1)_$v := $($v)))
 endef
 
 # The add_soong_config_var_value function defines a make variable and also adds
@@ -1004,6 +1004,14 @@
 BOARD_PREBUILT_HIDDENAPI_DIR ?=
 .KATI_READONLY := BOARD_PREBUILT_HIDDENAPI_DIR
 
+ifdef USE_HOST_MUSL
+  ifneq (,$(or $(BUILD_BROKEN_USES_BUILD_HOST_EXECUTABLE),\
+               $(BUILD_BROKEN_USES_BUILD_HOST_SHARED_LIBRARY),\
+               $(BUILD_BROKEN_USES_BUILD_HOST_STATIC_LIBRARY)))
+    $(error USE_HOST_MUSL can't be set when native host builds are enabled in Make with BUILD_BROKEN_USES_BUILD_HOST_*)
+  endif
+endif
+
 # ###############################################################
 # Set up final options.
 # ###############################################################
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 0f95202..07797c8 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -131,3 +131,8 @@
 ifeq ($(TURBINE_ENABLED),false)
 $(eval $(call copy-one-file,$(LOCAL_FULL_CLASSES_JACOCO_JAR),$(full_classes_header_jar)))
 endif
+
+#######################################
+# Capture deps added after base_rules.mk
+include $(BUILD_NOTICE_FILE)
+#######################################
diff --git a/core/main.mk b/core/main.mk
index 7b41ba2..6b8080c 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1670,8 +1670,7 @@
     $(INSTALLED_FILES_JSON_ROOT) \
     $(INSTALLED_FILES_FILE_RECOVERY) \
     $(INSTALLED_FILES_JSON_RECOVERY) \
-    $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
-    soong_docs
+    $(INSTALLED_ANDROID_INFO_TXT_TARGET)
 
 # The droidcore target depends on the droidcore-unbundled subset and any other
 # targets for a non-unbundled (full source) full system build.
@@ -1886,6 +1885,8 @@
   ifdef CLANG_COVERAGE
     $(foreach f,$(SOONG_NDK_API_XML), \
         $(call dist-for-goals,droidcore,$(f):ndk_apis/$(notdir $(f))))
+    $(foreach f,$(SOONG_CC_API_XML), \
+        $(call dist-for-goals,droidcore,$(f):cc_apis/$(notdir $(f))))
   endif
 
   # For full system build (whether unbundled or not), we configure
diff --git a/core/native_benchmark_test_config_template.xml b/core/native_benchmark_test_config_template.xml
index d1f0199..8a89241 100644
--- a/core/native_benchmark_test_config_template.xml
+++ b/core/native_benchmark_test_config_template.xml
@@ -22,10 +22,10 @@
 
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
-        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+        <option name="push" value="{MODULE}->/data/local/tests/unrestricted/{MODULE}" />
     </target_preparer>
     <test class="com.android.tradefed.testtype.GoogleBenchmarkTest" >
-        <option name="native-benchmark-device-path" value="/data/local/tmp" />
+        <option name="native-benchmark-device-path" value="/data/local/tests/unrestricted" />
         <option name="benchmark-module-name" value="{MODULE}" />
     </test>
 </configuration>
diff --git a/core/product_config.mk b/core/product_config.mk
index 7bed376..54fbb7d 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -87,6 +87,19 @@
 $(foreach f,$(1),$(f):$(2)/$(notdir $(f)))
 endef
 
+#
+# Convert the list of file names to the list of PRODUCT_COPY_FILES items
+# $(1): from pattern
+# $(2): to pattern
+# $(3): file names
+# E.g., calling product-copy-files-by-pattern with
+#   (from/%, to/%, a b)
+# returns
+#   from/a:to/a from/b:to/b
+define product-copy-files-by-pattern
+$(join $(patsubst %,$(1),$(3)),$(patsubst %,:$(2),$(3)))
+endef
+
 # ---------------------------------------------------------------
 # Check for obsolete PRODUCT- and APP- goals
 ifeq ($(CALLED_FROM_SETUP),true)
diff --git a/core/product_config.rbc b/core/product_config.rbc
index ef0d0c9..28b37a3 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -82,7 +82,7 @@
                     # Define SOONG_CONFIG_<ns> for Make, othewise
                     # it cannot be added to .KATI_READONLY list
                     if _options.format == "make":
-                        print("SOONG_CONFIG_" + nsname, ":=")
+                        print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
                     for var, val in sorted(nsvars.items()):
                         __print_attr("SOONG_CONFIG_%s_%s" % (nsname, var), val)
             elif attr not in _globals_base:
@@ -439,7 +439,7 @@
     """Returns regular expression equivalent to Make pattern."""
 
     # TODO(asmundak): this will mishandle '\%'
-    return "^(" + "|".join([w.replace("%", ".*", 1) for w in words]) + ")"
+    return "^(" + "|".join([w.replace("%", ".*", 1) for w in words if w]) + ")$"
 
 def _regex_match(regex, w):
     return rblf_regex(regex, w)
diff --git a/core/rbe.mk b/core/rbe.mk
index 19c0e42..370d4bd 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -22,6 +22,18 @@
     rbe_dir := prebuilts/remoteexecution-client/live/
   endif
 
+  ifdef RBE_CXX_POOL
+    cxx_pool := $(RBE_CXX_POOL)
+  else
+    cxx_pool := default
+  endif
+
+  ifdef RBE_JAVA_POOL
+    java_pool := $(RBE_JAVA_POOL)
+  else
+    java_pool := java16
+  endif
+
   ifdef RBE_CXX_EXEC_STRATEGY
     cxx_rbe_exec_strategy := $(RBE_CXX_EXEC_STRATEGY)
   else
@@ -59,8 +71,8 @@
   endif
 
   platform := container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62
-  cxx_platform := $(platform),Pool=default
-  java_r8_d8_platform := $(platform),Pool=java16
+  cxx_platform := $(platform),Pool=$(cxx_pool)
+  java_r8_d8_platform := $(platform),Pool=$(java_pool)
 
   RBE_WRAPPER := $(rbe_dir)/rewrapper
   RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_allowlist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
diff --git a/core/rust_device_benchmark_config_template.xml b/core/rust_device_benchmark_config_template.xml
index 2055df2..a117fc4 100644
--- a/core/rust_device_benchmark_config_template.xml
+++ b/core/rust_device_benchmark_config_template.xml
@@ -17,11 +17,11 @@
 <configuration description="Config to run {MODULE} rust benchmark tests.">
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="false" />
-        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+        <option name="push" value="{MODULE}->/data/local/tests/unrestricted/{MODULE}" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.rust.RustBinaryTest" >
-        <option name="test-device-path" value="/data/local/tmp" />
+        <option name="test-device-path" value="/data/local/tests/unrestricted" />
         <option name="module-name" value="{MODULE}" />
         <option name="is-benchmark" value="true" />
     </test>
diff --git a/core/rust_device_test_config_template.xml b/core/rust_device_test_config_template.xml
index 9429d38..536f57e 100644
--- a/core/rust_device_test_config_template.xml
+++ b/core/rust_device_test_config_template.xml
@@ -17,11 +17,11 @@
 <configuration description="Config to run {MODULE} device tests.">
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
-        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+        <option name="push" value="{MODULE}->/data/local/tests/unrestricted/{MODULE}" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.rust.RustBinaryTest" >
-        <option name="test-device-path" value="/data/local/tmp" />
+        <option name="test-device-path" value="/data/local/tests/unrestricted" />
         <option name="module-name" value="{MODULE}" />
     </test>
 </configuration>
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 82fb413..eeac9aa 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -254,3 +254,8 @@
 endif
 
 SOONG_ALREADY_CONV += $(LOCAL_MODULE)
+
+#######################################
+# Capture deps added after base_rules.mk
+include $(BUILD_NOTICE_FILE)
+#######################################
diff --git a/core/soong_config.mk b/core/soong_config.mk
index d3f983c..9eb02b2 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -27,6 +27,7 @@
 $(call add_json_val,  Platform_sdk_version,              $(PLATFORM_SDK_VERSION))
 $(call add_json_str,  Platform_sdk_codename,             $(PLATFORM_VERSION_CODENAME))
 $(call add_json_bool, Platform_sdk_final,                $(filter REL,$(PLATFORM_VERSION_CODENAME)))
+$(call add_json_val,  Platform_sdk_extension_version,    $(PLATFORM_SDK_EXTENSION_VERSION))
 $(call add_json_csv,  Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
 $(call add_json_str,  Platform_security_patch,           $(PLATFORM_SECURITY_PATCH))
 $(call add_json_str,  Platform_preview_sdk_version,      $(PLATFORM_PREVIEW_SDK_VERSION))
@@ -73,6 +74,7 @@
 $(call add_json_str,  HostArch,                          $(HOST_ARCH))
 $(call add_json_str,  HostSecondaryArch,                 $(HOST_2ND_ARCH))
 $(call add_json_bool, HostStaticBinaries,                $(BUILD_HOST_static))
+$(call add_json_bool, HostMusl,                          $(USE_HOST_MUSL))
 
 $(call add_json_str,  CrossHost,                         $(HOST_CROSS_OS))
 $(call add_json_str,  CrossHostArch,                     $(HOST_CROSS_ARCH))
@@ -163,6 +165,7 @@
 $(call add_json_list, VendorSnapshotDirsExcluded,        $(VENDOR_SNAPSHOT_DIRS_EXCLUDED))
 $(call add_json_list, RecoverySnapshotDirsIncluded,      $(RECOVERY_SNAPSHOT_DIRS_INCLUDED))
 $(call add_json_list, RecoverySnapshotDirsExcluded,      $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
+$(call add_json_bool, HostFakeSnapshotEnabled,           $(HOST_FAKE_SNAPSHOT_ENABLE))
 
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
 $(call add_json_bool, Enforce_vintf_manifest,            $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 1ebbf14..1b93be2 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -206,3 +206,8 @@
 		$(hide) touch $@)
 
 SOONG_ALREADY_CONV += $(LOCAL_MODULE)
+
+#######################################
+# Capture deps added after base_rules.mk
+include $(BUILD_NOTICE_FILE)
+#######################################
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index 8b766ae..f9ae2c1 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -57,13 +57,15 @@
 my_kernel_module_copy_files :=
 my_custom_image_modules_var := BOARD_$(strip $(call to-upper,$(my_custom_image_name)))_KERNEL_MODULES
 ifdef $(my_custom_image_modules_var)
-  my_kernel_module_copy_files += $(call build-image-kernel-modules,$($(my_custom_image_modules_var)),$(my_staging_dir),$(CUSTOM_IMAGE_MOUNT_POINT),$(call intermediates-dir-for,PACKAGING,depmod_$(my_custom_image_name)),$($(my_custom_image_modules_var)),modules.load,,$(call intermediates-dir-for,PACKAGING,depmod_$(my_custom_image_name)_stripped))
-  my_copy_pairs += $(my_kernel_module_copy_files)
+$(foreach kmod,\
+  $(call build-image-kernel-modules,$($(my_custom_image_modules_var)),$(my_staging_dir),$(CUSTOM_IMAGE_MOUNT_POINT),$(call intermediates-dir-for,PACKAGING,depmod_$(my_custom_image_name)),$($(my_custom_image_modules_var)),modules.load,,$(call intermediates-dir-for,PACKAGING,depmod_$(my_custom_image_name)_stripped)),\
+  $(eval pair := $(subst :,$(space),$(kmod)))\
+  $(eval my_kernel_module_copy_files += $(word 1,$(pair)):$(subst $(my_staging_dir)/,,$(word 2,$(pair)))))
 endif
 
 # Collect CUSTOM_IMAGE_COPY_FILES.
 my_image_copy_files :=
-$(foreach f,$(CUSTOM_IMAGE_COPY_FILES),\
+$(foreach f,$(CUSTOM_IMAGE_COPY_FILES) $(my_kernel_module_copy_files),\
   $(eval pair := $(subst :,$(space),$(f)))\
   $(eval src := $(word 1,$(pair)))\
   $(eval my_image_copy_files += $(src))\
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 530bbff..c8fa49f 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -156,6 +156,12 @@
 endif
 .KATI_READONLY := PLATFORM_SDK_VERSION
 
+# This is the sdk extension version of this tree.
+PLATFORM_SDK_EXTENSION_VERSION :=$= 0
+
+# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
+PLATFORM_BASE_SDK_EXTENSION_VERSION :=$= 0
+
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
   PLATFORM_PREVIEW_SDK_VERSION := 0
 else
@@ -241,7 +247,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2021-08-05
+      PLATFORM_SECURITY_PATCH := 2021-09-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/envsetup.sh b/envsetup.sh
index 6085f7a..d70e815 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1707,7 +1707,7 @@
 function b()
 (
     # Generate BUILD, bzl files into the synthetic Bazel workspace (out/soong/workspace).
-    _trigger_build "all-modules" nothing GENERATE_BAZEL_FILES=true USE_BAZEL_ANALYSIS= || return 1
+    _trigger_build "all-modules" bp2build USE_BAZEL_ANALYSIS= || return 1
     # Then, run Bazel using the synthetic workspace as the --package_path.
     if [[ -z "$@" ]]; then
         # If there are no args, show help.
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 6f4b7f1..755d85e 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -93,6 +93,8 @@
 VNDK-core: android.hardware.power-V1-ndk_platform.so
 VNDK-core: android.hardware.power.stats-V1-ndk.so
 VNDK-core: android.hardware.power.stats-V1-ndk_platform.so
+VNDK-core: android.hardware.radio-V1-ndk.so
+VNDK-core: android.hardware.radio-V1-ndk_platform.so
 VNDK-core: android.hardware.rebootescrow-V1-ndk.so
 VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
 VNDK-core: android.hardware.security.keymint-V1-ndk.so
@@ -107,6 +109,8 @@
 VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
 VNDK-core: android.hardware.weaver-V1-ndk.so
 VNDK-core: android.hardware.weaver-V1-ndk_platform.so
+VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
+VNDK-core: android.hardware.wifi.hostapd-V1-ndk_platform.so
 VNDK-core: android.hidl.token@1.0-utils.so
 VNDK-core: android.hidl.token@1.0.so
 VNDK-core: android.system.keystore2-V1-ndk.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index d924d0b..bd7c4ab 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -74,3 +74,7 @@
 
 # Always build modules from source
 MODULE_BUILD_FROM_SOURCE := true
+
+# Additional settings used in all GSI builds
+PRODUCT_PRODUCT_PROPERTIES += \
+    ro.crypto.metadata_init_delete_all_keys.enabled=false \
diff --git a/tests/run.rbc b/tests/run.rbc
index 4d7166a..2c15b81 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -47,6 +47,9 @@
 assert_eq(["foo/%"], rblf.mkpatsubst("%", "%/%", ["foo"]))
 assert_eq(["from/a:to/a", "from/b:to/b"], rblf.product_copy_files_by_pattern("from/%", "to/%", "a b"))
 
+assert_eq([], rblf.filter(["a", "", "b"], "f"))
+assert_eq(["", "b"], rblf.filter_out(["a", "" ], ["a", "", "b"] ))
+
 globals, config = rblf.product_configuration("test/device", init)
 assert_eq(
     {
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 1ff8e65..045cb1d 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -195,10 +195,12 @@
   @classmethod
   def _read_llvm_readobj(cls, elf_file_path, header, llvm_readobj):
     """Run llvm-readobj and parse the output."""
-    proc = subprocess.Popen(
-      [llvm_readobj, '-dynamic-table', '-dyn-symbols', elf_file_path],
-      stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    cmd = [llvm_readobj, '--dynamic-table', '--dyn-symbols', elf_file_path]
+    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out, _ = proc.communicate()
+    rc = proc.returncode
+    if rc != 0:
+      raise subprocess.CalledProcessError(rc, cmd, out)
     lines = out.splitlines()
     return cls._parse_llvm_readobj(elf_file_path, header, lines)
 
diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py
index bf958fb..5e3010f 100755
--- a/tools/generate-notice-files.py
+++ b/tools/generate-notice-files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2012 The Android Open Source Project
 #
@@ -30,20 +30,18 @@
 import os
 import os.path
 import re
+import struct
 import sys
 
 MD5_BLOCKSIZE = 1024 * 1024
 HTML_ESCAPE_TABLE = {
-    "&": "&amp;",
-    '"': "&quot;",
-    "'": "&apos;",
-    ">": "&gt;",
-    "<": "&lt;",
+    b"&": b"&amp;",
+    b'"': b"&quot;",
+    b"'": b"&apos;",
+    b">": b"&gt;",
+    b"<": b"&lt;",
     }
 
-def hexify(s):
-    return ("%02x"*len(s)) % tuple(map(ord, s))
-
 def md5sum(filename):
     """Calculate an MD5 of the file given by FILENAME,
     and return hex digest as a string.
@@ -57,20 +55,26 @@
             break
         sum.update(block)
     f.close()
-    return hexify(sum.digest())
+    return sum.hexdigest()
 
 
 def html_escape(text):
     """Produce entities within text."""
-    return "".join(HTML_ESCAPE_TABLE.get(c,c) for c in text)
+    # Using for i in text doesn't work since i will be an int, not a byte.
+    # There are multiple ways to solve this, but the most performant way
+    # to iterate over a byte array is to use unpack. Using the
+    # for i in range(len(text)) and using that to get a byte using array
+    # slices is twice as slow as this method.
+    return b"".join(HTML_ESCAPE_TABLE.get(i,i) for i in struct.unpack(str(len(text)) + 'c', text))
 
-HTML_OUTPUT_CSS="""
+HTML_OUTPUT_CSS=b"""
 <style type="text/css">
 body { padding: 0; font-family: sans-serif; }
 .same-license { background-color: #eeeeee; border-top: 20px solid white; padding: 10px; }
 .label { font-weight: bold; }
 .file-list { margin-left: 1em; color: blue; }
 </style>
+
 """
 
 def combine_notice_files_html(file_hash, input_dirs, output_filename):
@@ -90,13 +94,13 @@
     # Open the output file, and output the header pieces
     output_file = open(output_filename, "wb")
 
-    print >> output_file, "<html><head>"
-    print >> output_file, HTML_OUTPUT_CSS
-    print >> output_file, '</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">'
+    output_file.write(b"<html><head>\n")
+    output_file.write(HTML_OUTPUT_CSS)
+    output_file.write(b'</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">\n')
 
     # Output our table of contents
-    print >> output_file, '<div class="toc">'
-    print >> output_file, "<ul>"
+    output_file.write(b'<div class="toc">\n')
+    output_file.write(b"<ul>\n")
 
     # Flatten the list of lists into a single list of filenames
     sorted_filenames = sorted(itertools.chain.from_iterable(file_hash))
@@ -104,31 +108,29 @@
     # Print out a nice table of contents
     for filename in sorted_filenames:
         stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
-        print >> output_file, '<li><a href="#id%d">%s</a></li>' % (id_table.get(filename), stripped_filename)
+        output_file.write(('<li><a href="#id%d">%s</a></li>\n' % (id_table.get(filename), stripped_filename)).encode())
 
-    print >> output_file, "</ul>"
-    print >> output_file, "</div><!-- table of contents -->"
+    output_file.write(b"</ul>\n")
+    output_file.write(b"</div><!-- table of contents -->\n")
     # Output the individual notice file lists
-    print >>output_file, '<table cellpadding="0" cellspacing="0" border="0">'
+    output_file.write(b'<table cellpadding="0" cellspacing="0" border="0">\n')
     for value in file_hash:
-        print >> output_file, '<tr id="id%d"><td class="same-license">' % id_table.get(value[0])
-        print >> output_file, '<div class="label">Notices for file(s):</div>'
-        print >> output_file, '<div class="file-list">'
+        output_file.write(b'<tr id="id%d"><td class="same-license">\n' % id_table.get(value[0]))
+        output_file.write(b'<div class="label">Notices for file(s):</div>\n')
+        output_file.write(b'<div class="file-list">\n')
         for filename in value:
-            print >> output_file, "%s <br/>" % (SRC_DIR_STRIP_RE.sub(r"\1", filename))
-        print >> output_file, "</div><!-- file-list -->"
-        print >> output_file
-        print >> output_file, '<pre class="license-text">'
-        print >> output_file, html_escape(open(value[0]).read())
-        print >> output_file, "</pre><!-- license-text -->"
-        print >> output_file, "</td></tr><!-- same-license -->"
-        print >> output_file
-        print >> output_file
-        print >> output_file
+            output_file.write(("%s <br/>\n" % SRC_DIR_STRIP_RE.sub(r"\1", filename)).encode())
+        output_file.write(b"</div><!-- file-list -->\n")
+        output_file.write(b"\n")
+        output_file.write(b'<pre class="license-text">\n')
+        with open(value[0], "rb") as notice_file:
+            output_file.write(html_escape(notice_file.read()))
+        output_file.write(b"\n</pre><!-- license-text -->\n")
+        output_file.write(b"</td></tr><!-- same-license -->\n\n\n\n")
 
     # Finish off the file output
-    print >> output_file, "</table>"
-    print >> output_file, "</body></html>"
+    output_file.write(b"</table>\n")
+    output_file.write(b"</body></html>\n")
     output_file.close()
 
 def combine_notice_files_text(file_hash, input_dirs, output_filename, file_title):
@@ -136,14 +138,18 @@
 
     SRC_DIR_STRIP_RE = re.compile("(?:" + "|".join(input_dirs) + ")(/.*).txt")
     output_file = open(output_filename, "wb")
-    print >> output_file, file_title
+    output_file.write(file_title.encode())
+    output_file.write(b"\n")
     for value in file_hash:
-      print >> output_file, "============================================================"
-      print >> output_file, "Notices for file(s):"
-      for filename in value:
-        print >> output_file, SRC_DIR_STRIP_RE.sub(r"\1", filename)
-      print >> output_file, "------------------------------------------------------------"
-      print >> output_file, open(value[0]).read()
+        output_file.write(b"============================================================\n")
+        output_file.write(b"Notices for file(s):\n")
+        for filename in value:
+            output_file.write(SRC_DIR_STRIP_RE.sub(r"\1", filename).encode())
+            output_file.write(b"\n")
+        output_file.write(b"------------------------------------------------------------\n")
+        with open(value[0], "rb") as notice_file:
+            output_file.write(notice_file.read())
+            output_file.write(b"\n")
     output_file.close()
 
 def combine_notice_files_xml(files_with_same_hash, input_dirs, output_filename):
@@ -154,15 +160,15 @@
     # Set up a filename to row id table (anchors inside tables don't work in
     # most browsers, but href's to table row ids do)
     id_table = {}
-    for file_key in files_with_same_hash.keys():
-        for filename in files_with_same_hash[file_key]:
+    for file_key, files in files_with_same_hash.items():
+        for filename in files:
              id_table[filename] = file_key
 
     # Open the output file, and output the header pieces
     output_file = open(output_filename, "wb")
 
-    print >> output_file, '<?xml version="1.0" encoding="utf-8"?>'
-    print >> output_file, "<licenses>"
+    output_file.write(b'<?xml version="1.0" encoding="utf-8"?>\n')
+    output_file.write(b"<licenses>\n")
 
     # Flatten the list of lists into a single list of filenames
     sorted_filenames = sorted(id_table.keys())
@@ -170,10 +176,8 @@
     # Print out a nice table of contents
     for filename in sorted_filenames:
         stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
-        print >> output_file, '<file-name contentId="%s">%s</file-name>' % (id_table.get(filename), stripped_filename)
-
-    print >> output_file
-    print >> output_file
+        output_file.write(('<file-name contentId="%s">%s</file-name>\n' % (id_table.get(filename), stripped_filename)).encode())
+    output_file.write(b"\n\n")
 
     processed_file_keys = []
     # Output the individual notice file lists
@@ -183,11 +187,13 @@
             continue
         processed_file_keys.append(file_key)
 
-        print >> output_file, '<file-content contentId="%s"><![CDATA[%s]]></file-content>' % (file_key, html_escape(open(filename).read()))
-        print >> output_file
+        output_file.write(('<file-content contentId="%s"><![CDATA[' % file_key).encode())
+        with open(filename, "rb") as notice_file:
+            output_file.write(html_escape(notice_file.read()))
+        output_file.write(b"]]></file-content>\n\n")
 
     # Finish off the file output
-    print >> output_file, "</licenses>"
+    output_file.write(b"</licenses>\n")
     output_file.close()
 
 def get_args():
@@ -254,7 +260,7 @@
                     file_md5sum = md5sum(filename)
                     files_with_same_hash[file_md5sum].append(filename)
 
-    filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(files_with_same_hash.keys())]
+    filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(list(files_with_same_hash))]
     combine_notice_files_text(filesets, input_dirs, txt_output_file, file_title)
 
     if html_output_file is not None:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index f2ba321..1fe468e 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -256,9 +256,11 @@
   needs_casefold = prop_dict.get("needs_casefold", 0)
   needs_compress = prop_dict.get("needs_compress", 0)
 
+  disable_sparse = "disable_sparse" in prop_dict
+
   if fs_type.startswith("ext"):
     build_command = [prop_dict["ext_mkuserimg"]]
-    if "extfs_sparse_flag" in prop_dict:
+    if "extfs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.append(prop_dict["extfs_sparse_flag"])
       run_e2fsck = True
     build_command.extend([in_dir, out_file, fs_type,
@@ -303,7 +305,7 @@
   elif fs_type.startswith("erofs"):
     build_command = ["mkerofsimage.sh"]
     build_command.extend([in_dir, out_file])
-    if "erofs_sparse_flag" in prop_dict:
+    if "erofs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["erofs_sparse_flag"]])
     build_command.extend(["-m", prop_dict["mount_point"]])
     if target_out:
@@ -319,7 +321,7 @@
   elif fs_type.startswith("squash"):
     build_command = ["mksquashfsimage.sh"]
     build_command.extend([in_dir, out_file])
-    if "squashfs_sparse_flag" in prop_dict:
+    if "squashfs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["squashfs_sparse_flag"]])
     build_command.extend(["-m", prop_dict["mount_point"]])
     if target_out:
@@ -341,7 +343,7 @@
   elif fs_type.startswith("f2fs"):
     build_command = ["mkf2fsuserimg.sh"]
     build_command.extend([out_file, prop_dict["image_size"]])
-    if "f2fs_sparse_flag" in prop_dict:
+    if "f2fs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["f2fs_sparse_flag"]])
     if fs_config:
       build_command.extend(["-C", fs_config])
@@ -448,17 +450,20 @@
   # or None if not applicable.
   verity_image_builder = verity_utils.CreateVerityImageBuilder(prop_dict)
 
+  disable_sparse = "disable_sparse" in prop_dict
+  mkfs_output = None
   if (prop_dict.get("use_dynamic_partition_size") == "true" and
       "partition_size" not in prop_dict):
     # If partition_size is not defined, use output of `du' + reserved_size.
     # For compressed file system, it's better to use the compressed size to avoid wasting space.
     if fs_type.startswith("erofs"):
-      tmp_dict = prop_dict.copy()
-      if "erofs_sparse_flag" in tmp_dict:
-        tmp_dict.pop("erofs_sparse_flag")
-      BuildImageMkfs(in_dir, tmp_dict, out_file, target_out, fs_config)
-      size = GetDiskUsage(out_file)
-      os.remove(out_file)
+      mkfs_output = BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
+      if "erofs_sparse_flag" in prop_dict and not disable_sparse:
+        image_path = UnsparseImage(out_file, replace=False)
+        size = GetDiskUsage(image_path)
+        os.remove(image_path)
+      else:
+        size = GetDiskUsage(out_file)
     else:
       size = GetDiskUsage(in_dir)
     logger.info(
@@ -481,7 +486,7 @@
           size // BYTES_IN_MB, prop_dict["extfs_inode_count"])
       BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
       sparse_image = False
-      if "extfs_sparse_flag" in prop_dict:
+      if "extfs_sparse_flag" in prop_dict and not disable_sparse:
         sparse_image = True
       fs_dict = GetFilesystemCharacteristics(fs_type, out_file, sparse_image)
       os.remove(out_file)
@@ -525,7 +530,7 @@
       prop_dict["image_size"] = str(size)
       BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
       sparse_image = False
-      if "f2fs_sparse_flag" in prop_dict:
+      if "f2fs_sparse_flag" in prop_dict and not disable_sparse:
         sparse_image = True
       fs_dict = GetFilesystemCharacteristics(fs_type, out_file, sparse_image)
       os.remove(out_file)
@@ -546,7 +551,8 @@
     max_image_size = verity_image_builder.CalculateMaxImageSize()
     prop_dict["image_size"] = str(max_image_size)
 
-  mkfs_output = BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
+  if not mkfs_output:
+    mkfs_output = BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
 
   # Check if there's enough headroom space available for ext4 image.
   if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
@@ -642,6 +648,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("system_reserved_size", "partition_reserved_size")
     copy_prop("system_selinux_fc", "selinux_fc")
+    copy_prop("system_disable_sparse", "disable_sparse")
   elif mount_point == "system_other":
     # We inherit the selinux policies of /system since we contain some of its
     # files.
@@ -668,6 +675,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("system_reserved_size", "partition_reserved_size")
     copy_prop("system_selinux_fc", "selinux_fc")
+    copy_prop("system_disable_sparse", "disable_sparse")
   elif mount_point == "data":
     # Copy the generic fs type first, override with specific one if available.
     copy_prop("fs_type", "fs_type")
@@ -708,6 +716,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("vendor_reserved_size", "partition_reserved_size")
     copy_prop("vendor_selinux_fc", "selinux_fc")
+    copy_prop("vendor_disable_sparse", "disable_sparse")
   elif mount_point == "product":
     copy_prop("avb_product_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_product_add_hashtree_footer_args",
@@ -733,6 +742,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("product_reserved_size", "partition_reserved_size")
     copy_prop("product_selinux_fc", "selinux_fc")
+    copy_prop("product_disable_sparse", "disable_sparse")
   elif mount_point == "system_ext":
     copy_prop("avb_system_ext_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_system_ext_add_hashtree_footer_args",
@@ -760,6 +770,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("system_ext_reserved_size", "partition_reserved_size")
     copy_prop("system_ext_selinux_fc", "selinux_fc")
+    copy_prop("system_ext_disable_sparse", "disable_sparse")
   elif mount_point == "odm":
     copy_prop("avb_odm_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_odm_add_hashtree_footer_args",
@@ -783,6 +794,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("odm_reserved_size", "partition_reserved_size")
     copy_prop("odm_selinux_fc", "selinux_fc")
+    copy_prop("odm_disable_sparse", "disable_sparse")
   elif mount_point == "vendor_dlkm":
     copy_prop("avb_vendor_dlkm_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_vendor_dlkm_add_hashtree_footer_args",
@@ -808,6 +820,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("vendor_dlkm_reserved_size", "partition_reserved_size")
     copy_prop("vendor_dlkm_selinux_fc", "selinux_fc")
+    copy_prop("vendor_dlkm_disable_sparse", "disable_sparse")
   elif mount_point == "odm_dlkm":
     copy_prop("avb_odm_dlkm_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_odm_dlkm_add_hashtree_footer_args",
@@ -831,6 +844,7 @@
       d["extfs_rsv_pct"] = "0"
     copy_prop("odm_dlkm_reserved_size", "partition_reserved_size")
     copy_prop("odm_dlkm_selinux_fc", "selinux_fc")
+    copy_prop("odm_dlkm_disable_sparse", "disable_sparse")
   elif mount_point == "oem":
     copy_prop("fs_type", "fs_type")
     copy_prop("oem_size", "partition_size")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index f30e382..c6800e8 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1983,6 +1983,8 @@
     info_dict = LoadInfoDict(input_zip)
 
   is_sparse = info_dict.get("extfs_sparse_flag")
+  if info_dict.get(which + "_disable_sparse"):
+    is_sparse = False
 
   # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
   # shared blocks (i.e. some blocks will show up in multiple files' block
@@ -3851,12 +3853,14 @@
   if not image_size:
     return None
 
+  disable_sparse = OPTIONS.info_dict.get(which + "_disable_sparse")
+
   image_blocks = int(image_size) // 4096 - 1
   assert image_blocks > 0, "blocks for {} must be positive".format(which)
 
   # For sparse images, we will only check the blocks that are listed in the care
   # map, i.e. the ones with meaningful data.
-  if "extfs_sparse_flag" in OPTIONS.info_dict:
+  if "extfs_sparse_flag" in OPTIONS.info_dict and not disable_sparse:
     simg = sparse_img.SparseImage(imgname)
     care_map_ranges = simg.care_map.intersect(
         rangelib.RangeSet("0-{}".format(image_blocks)))
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 366b51a..4eb2f0c 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -221,6 +221,12 @@
       For VABC downgrades, we must finish merging before doing data wipe, and
       since data wipe is required for downgrading OTA, this might cause long
       wait time in recovery.
+
+  --enable_vabc_xor
+      Enable the VABC xor feature. Will reduce space requirements for OTA
+
+  --force_minor_version
+      Override the update_engine minor version for delta generation.
 """
 
 from __future__ import print_function
@@ -286,7 +292,8 @@
 OPTIONS.disable_vabc = False
 OPTIONS.spl_downgrade = False
 OPTIONS.vabc_downgrade = False
-OPTIONS.enable_vabc_xor = False
+OPTIONS.enable_vabc_xor = True
+OPTIONS.force_minor_version = None
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -1116,6 +1123,8 @@
     target_info.info_dict['ab_partitions'] = zfp.read(
         AB_PARTITIONS).decode().strip().split("\n")
 
+  CheckVintfIfTrebleEnabled(target_file, target_info)
+
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
@@ -1138,6 +1147,8 @@
     additional_args += ["--disable_vabc", "true"]
   if OPTIONS.enable_vabc_xor:
     additional_args += ["--enable_vabc_xor", "true"]
+  if OPTIONS.force_minor_version:
+    additional_args += ["--force_minor_version", OPTIONS.force_minor_version]
   additional_args += ["--max_timestamp", max_timestamp]
 
   if SupportsMainlineGkiUpdates(source_file):
@@ -1200,8 +1211,6 @@
 
   common.ZipClose(target_zip)
 
-  CheckVintfIfTrebleEnabled(target_file, target_info)
-
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
   common.ZipClose(output_zip)
@@ -1312,7 +1321,9 @@
     elif o == "--vabc_downgrade":
       OPTIONS.vabc_downgrade = True
     elif o == "--enable_vabc_xor":
-      OPTIONS.enable_vabc_xor = True
+      OPTIONS.enable_vabc_xor = a.lower() != "false"
+    elif o == "--force_minor_version":
+      OPTIONS.force_minor_version = a
     else:
       return False
     return True
@@ -1357,7 +1368,8 @@
                                  "disable_vabc",
                                  "spl_downgrade",
                                  "vabc_downgrade",
-                                 "enable_vabc_xor",
+                                 "enable_vabc_xor=",
+                                 "force_minor_version=",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 844f629..61c8676 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -236,7 +236,11 @@
   warning_pattern = re.compile('^/[^ ]*/[^ ]*: warning: .*')
   count = 0
   for line in buildlog:
-    if warning_pattern.match(line):
+    # We want to find android_root of a local build machine.
+    # Do not use RBE warning lines, which has '/b/f/w/' path prefix.
+    # Do not use /tmp/ file warnings.
+    if warning_pattern.match(line) and (
+        '/b/f/w' not in line and not line.startswith('/tmp/')):
       warning_lines.append(line)
       count += 1
       if count > 9999:
@@ -300,6 +304,7 @@
   architecture = 'unknown'
 
   # only handle warning lines of format 'file_path:line_no:col_no: warning: ...'
+  # Bug: http://198657613, This might need change to handle RBE output.
   chrome_warning_pattern = r'^[^ ]*/[^ ]*:[0-9]+:[0-9]+: warning: .*'
 
   warning_pattern = re.compile(chrome_warning_pattern)
@@ -347,6 +352,8 @@
   platform_version = 'unknown'
   target_product = 'unknown'
   target_variant = 'unknown'
+  build_id = 'unknown'
+  use_rbe = False
   android_root = find_android_root(infile)
   infile.seek(0)
 
@@ -363,6 +370,14 @@
   warning_without_file = re.compile('^warning: .*')
   rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
 
+  # If RBE was used, try to reclaim some warning lines mixed with some
+  # leading chars from other concurrent job's stderr output .
+  # The leading characters can be any character, including digits and spaces.
+  # It's impossible to correctly identify the starting point of the source
+  # file path without the file directory name knowledge.
+  # Here we can only be sure to recover lines containing "/b/f/w/".
+  rbe_warning_pattern = re.compile('.*/b/f/w/[^ ]*: warning: .*')
+
    # Collect all unique warning lines
   # Remove the duplicated warnings save ~8% of time when parsing
   # one typical build log than before
@@ -384,6 +399,12 @@
           prev_warning, flags, android_root, unique_warnings)
       prev_warning = ''
 
+    if use_rbe and rbe_warning_pattern.match(line):
+      cleaned_up_line = re.sub('.*/b/f/w/', '', line)
+      unique_warnings = add_normalized_line_to_warnings(
+          cleaned_up_line, flags, android_root, unique_warnings)
+      continue
+
     if warning_pattern.match(line):
       if warning_without_file.match(line):
         # save this line and combine it with the next line
@@ -399,15 +420,26 @@
       result = re.search('(?<=^PLATFORM_VERSION=).*', line)
       if result is not None:
         platform_version = result.group(0)
+        continue
       result = re.search('(?<=^TARGET_PRODUCT=).*', line)
       if result is not None:
         target_product = result.group(0)
+        continue
       result = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
       if result is not None:
         target_variant = result.group(0)
+        continue
+      result = re.search('(?<=^BUILD_ID=).*', line)
+      if result is not None:
+        build_id = result.group(0)
+        continue
       result = re.search('(?<=^TOP=).*', line)
       if result is not None:
         android_root = result.group(1)
+        continue
+      if re.search('USE_RBE=', line) is not None:
+        use_rbe = True
+        continue
 
   if android_root:
     new_unique_warnings = dict()
@@ -418,8 +450,8 @@
           warning_line, flags, android_root)
     unique_warnings = new_unique_warnings
 
-  header_str = '%s - %s - %s' % (platform_version, target_product,
-                                 target_variant)
+  header_str = '%s - %s - %s (%s)' % (
+      platform_version, target_product, target_variant, build_id)
   return unique_warnings, header_str
 
 
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index 1e3c413..f2f65a6 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -530,7 +530,7 @@
     // If the alignment is not what was requested, add some padding in the extra
     // so the payload ends up where is requested.
     uint64_t alignDiff = alignTo - (expectedPayloadOffset % alignTo);
-    if (alignDiff == 0)
+    if (alignDiff == alignTo)
         return OK;
 
     return pEntry->addPadding(alignDiff);
@@ -654,7 +654,7 @@
 {
     ZipEntry* pEntry = NULL;
     status_t result;
-    long lfhPosn, startPosn, endPosn, uncompressedLen;
+    long lfhPosn, uncompressedLen;
 
     if (mReadOnly)
         return INVALID_OPERATION;
@@ -690,7 +690,6 @@
      */
     lfhPosn = ftell(mZipFp);
     pEntry->mLFH.write(mZipFp);
-    startPosn = ftell(mZipFp);
 
     /*
      * Copy the data over.
@@ -741,18 +740,13 @@
     }
 
     /*
-     * Update file offsets.
-     */
-    endPosn = ftell(mZipFp);
-
-    /*
      * Success!  Fill out new values.
      */
     pEntry->setLFHOffset(lfhPosn);
     mEOCD.mNumEntries++;
     mEOCD.mTotalNumEntries++;
     mEOCD.mCentralDirSize = 0;      // mark invalid; set by flush()
-    mEOCD.mCentralDirOffset = endPosn;
+    mEOCD.mCentralDirOffset = ftell(mZipFp);
 
     /*
      * Go back and write the LFH.
diff --git a/tools/zipalign/tests/src/align_test.cpp b/tools/zipalign/tests/src/align_test.cpp
index c79e791..96d4f73 100644
--- a/tools/zipalign/tests/src/align_test.cpp
+++ b/tools/zipalign/tests/src/align_test.cpp
@@ -9,6 +9,7 @@
 #include <android-base/file.h>
 
 using namespace android;
+using namespace base;
 
 static std::string GetTestPath(const std::string& filename) {
   static std::string test_data_dir = android::base::GetExecutableDirectory() + "/tests/data/";
@@ -26,6 +27,34 @@
   ASSERT_EQ(0, verified);
 }
 
+TEST(Align, DoubleAligment) {
+  const std::string src = GetTestPath("unaligned.zip");
+  const std::string tmp = GetTestPath("da_aligned.zip");
+  const std::string dst = GetTestPath("da_d_aligner.zip");
+
+  int processed = process(src.c_str(), tmp.c_str(), 4, true, false, 4096);
+  ASSERT_EQ(0, processed);
+
+  int verified = verify(tmp.c_str(), 4, true, false);
+  ASSERT_EQ(0, verified);
+
+  // Align the result of the previous run. Essentially double aligning.
+  processed = process(tmp.c_str(), dst.c_str(), 4, true, false, 4096);
+  ASSERT_EQ(0, processed);
+
+  verified = verify(dst.c_str(), 4, true, false);
+  ASSERT_EQ(0, verified);
+
+  // Nothing should have changed between tmp and dst.
+  std::string tmp_content;
+  ASSERT_EQ(true, ReadFileToString(tmp, &tmp_content));
+
+  std::string dst_content;
+  ASSERT_EQ(true, ReadFileToString(dst, &dst_content));
+
+  ASSERT_EQ(tmp_content, dst_content);
+}
+
 // Align a zip featuring a hole at the beginning. The
 // hole in the archive is a delete entry in the Central
 // Directory.