Merge "Remove architecture name from names of HOST_*.mk files." into main
diff --git a/Changes.md b/Changes.md
index 6836528..6c0cf70 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,4 +1,19 @@
-# Build System Changes for Android.mk Writers
+# Build System Changes for Android.mk/Android.bp Writers
+
+## Partitions are no longer affected by previous builds
+
+Partition builds used to include everything in their staging directories, and building an
+individual module will install it to the staging directory. Thus, previously, `m mymodule` followed
+by `m` would cause `mymodule` to be presinstalled on the device, even if it wasn't listed in
+`PRODUCT_PACKAGES`.
+
+This behavior has been changed, and now the partition images only include what they'd have if you
+did a clean build. This behavior can be disabled by setting the
+`BUILD_BROKEN_INCORRECT_PARTITION_IMAGES` environment variable or board config variable.
+
+Manually adding make rules that build to the staging directories without going through the make
+module system will not be compatible with this change. This includes many usages of
+`LOCAL_POST_INSTALL_CMD`.
 
 ## Perform validation of Soong plugins
 
@@ -29,6 +44,7 @@
 variable to `true`.
 
 Python 2 is slated for complete removal in V.
+
 ## Stop referencing sysprop_library directly from cc modules
 
 For the migration to Bazel, we are no longer mapping sysprop_library targets
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 957da92..4a7e957 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -773,6 +773,9 @@
 # Don't use SOONG_HOST_OUT, it is now an alias for HOST_OUT.
 $(call add-clean-step, rm -rf $(OUT_DIR)/soong/host)
 
+# Clear out tools/metalava Bazel output dir
+$(call add-clean-step, rm -rf $(OUT_DIR)/bazel/output/execroot/__main__/bazel-out/mixed_builds_product-*/bin/tools/metalava)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/cogsetup.sh b/cogsetup.sh
new file mode 100644
index 0000000..02fcc8f
--- /dev/null
+++ b/cogsetup.sh
@@ -0,0 +1,89 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file is executed by build/envsetup.sh, and can use anything
+# defined in envsetup.sh.
+function _create_out_symlink_for_cog() {
+  if [[ "${OUT_DIR}" == "" ]]; then
+    OUT_DIR="out"
+  fi
+
+  if [[ -L "${OUT_DIR}" ]]; then
+    return
+  fi
+  if [ -d "${OUT_DIR}" ]; then
+    echo -e "\tOutput directory ${OUT_DIR} cannot be present in a Cog workspace."
+    echo -e "\tDelete \"${OUT_DIR}\" or create a symlink from \"${OUT_DIR}\" to a directory outside your workspace."
+    return 1
+  fi
+
+  DEFAULT_OUTPUT_DIR="${HOME}/.cog/android-build-out"
+  mkdir -p ${DEFAULT_OUTPUT_DIR}
+  ln -s ${DEFAULT_OUTPUT_DIR} `pwd`/out
+}
+
+# This function moves the reclient binaries into a directory that exists in a
+# non-cog part of the overall filesystem.  This is to workaround the problem
+# described in b/289391270.
+function _copy_reclient_binaries_from_cog() {
+  local NONCOG_RECLIENT_BIN_DIR="${HOME}/.cog/reclient/bin"
+  if [ ! -d "$NONCOG_RECLIENT_BIN_DIR" ]; then
+    # Create the non cog directory if it doesn't exist.
+    mkdir -p ${NONCOG_RECLIENT_BIN_DIR}
+  else
+    # Clear out the non cog directory if it does exist.
+    rm -f ${NONCOG_RECLIENT_BIN_DIR}/*
+  fi
+
+  local TOP=$(gettop)
+
+  # Copy the binaries out of live.
+  cp $TOP/prebuilts/remoteexecution-client/live/* $NONCOG_RECLIENT_BIN_DIR
+
+  # Finally set the RBE_DIR env var to point to the out-of-cog directory.
+  export RBE_DIR=$NONCOG_RECLIENT_BIN_DIR
+}
+
+# This function sets up the build environment to be appropriate for Cog.
+function _setup_cog_env() {
+  _create_out_symlink_for_cog
+  if [ "$?" -eq "1" ]; then
+    echo -e "\e[0;33mWARNING:\e[00m Cog environment setup failed!"
+    return 1
+  fi
+  _copy_reclient_binaries_from_cog
+
+  export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog"
+
+  # Running repo command within Cog workspaces is not supported, so override
+  # it with this function. If the user is running repo within a Cog workspace,
+  # we'll fail with an error, otherwise, we run the original repo command with
+  # the given args.
+  ORIG_REPO_PATH=`which repo`
+  function repo {
+    if [[ "${PWD}" == /google/cog/* ]]; then
+      echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
+      return 1
+    fi
+    ${ORIG_REPO_PATH} "$@"
+  }
+}
+
+if [[ "${PWD}" != /google/cog/* ]]; then
+  echo -e "\e[01;31mERROR:\e[0m This script must be run from a Cog workspace."
+fi
+
+_setup_cog_env
\ No newline at end of file
diff --git a/core/BUILD.bazel b/core/BUILD.bazel
index 3e69e62..f4869d4 100644
--- a/core/BUILD.bazel
+++ b/core/BUILD.bazel
@@ -1,4 +1,28 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
 # Export tradefed templates for tests.
 exports_files(
     glob(["*.xml"]),
 )
+
+# Export proguard flag files for r8.
+filegroup(
+    name = "global_proguard_flags",
+    srcs = [
+        "proguard.flags",
+        "proguard_basic_keeps.flags",
+    ],
+    visibility = ["//visibility:public"],
+)
diff --git a/core/Makefile b/core/Makefile
index dcec2e5..099df47 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -363,6 +363,10 @@
 )
 INTERNAL_VENDOR_RAMDISK_FRAGMENTS += $(BOARD_VENDOR_RAMDISK_FRAGMENTS)
 
+ifneq ($(BOARD_KERNEL_MODULES_16K),)
+INTERNAL_VENDOR_RAMDISK_FRAGMENTS += 16K
+endif
+
 # Strip the list in case of any whitespace.
 INTERNAL_VENDOR_RAMDISK_FRAGMENTS := \
   $(strip $(INTERNAL_VENDOR_RAMDISK_FRAGMENTS))
@@ -1050,16 +1054,32 @@
 BUILT_RAMDISK_16K_TARGET := $(PRODUCT_OUT)/ramdisk_16k.img
 RAMDISK_16K_STAGING_DIR := $(call intermediates-dir-for,PACKAGING,depmod_ramdisk_16k)
 
-$(BUILT_RAMDISK_16K_TARGET): $(DEPMOD) $(MKBOOTFS)
-$(BUILT_RAMDISK_16K_TARGET): $(call copy-many-files,$(foreach file,$(BOARD_KERNEL_MODULES_16K),$(file):$(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/$(notdir $(file))))
+$(foreach \
+  file,\
+  $(BOARD_KERNEL_MODULES_16K),\
+  $(eval \
+    $(call copy-and-strip-kernel-module,\
+      $(file),\
+      $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/$(notdir $(file)) \
+    ) \
+  ) \
+)
+
+BOARD_VENDOR_RAMDISK_FRAGMENT.16K.PREBUILT := $(BUILT_RAMDISK_16K_TARGET)
+
+$(BUILT_RAMDISK_16K_TARGET): $(DEPMOD) $(MKBOOTFS) $(EXTRACT_KERNEL) $(COMPRESSION_COMMAND_DEPS)
+$(BUILT_RAMDISK_16K_TARGET): $(foreach file,$(BOARD_KERNEL_MODULES_16K),$(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/$(notdir $(file)))
 	$(DEPMOD) -b $(RAMDISK_16K_STAGING_DIR) 0.0
 	for MODULE in $(BOARD_KERNEL_MODULES_16K); do \
 		basename $$MODULE >> $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/modules.load ; \
 	done;
-	mkdir -p $(TARGET_OUT_RAMDISK_16K)/lib
 	rm -rf $(TARGET_OUT_RAMDISK_16K)/lib/modules
-	cp -r $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0 $(TARGET_OUT_RAMDISK_16K)/lib/modules
-	$(MKBOOTFS) $(TARGET_OUT_RAMDISK_16K) > $@
+	mkdir -p $(TARGET_OUT_RAMDISK_16K)/lib/modules
+	KERNEL_RELEASE=`$(EXTRACT_KERNEL) --tools lz4:$(LZ4) --input $(BOARD_KERNEL_PATH_16K) --output-release` ;\
+	IS_16K_KERNEL=`$(EXTRACT_KERNEL) --tools lz4:$(LZ4) --input $(BOARD_KERNEL_PATH_16K) --output-config` ;\
+	if [[ "$$IS_16K_KERNEL" == *"CONFIG_ARM64_16K_PAGES=y"* ]]; then SUFFIX=_16k; fi ;\
+	cp -r $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0 $(TARGET_OUT_RAMDISK_16K)/lib/modules/$$KERNEL_RELEASE$$SUFFIX
+	$(MKBOOTFS) $(TARGET_OUT_RAMDISK_16K) | $(COMPRESSION_COMMAND) > $@
 
 # Builds a ramdisk using modules defined in BOARD_KERNEL_MODULES_16K
 ramdisk_16k: $(BUILT_RAMDISK_16K_TARGET)
@@ -1076,6 +1096,30 @@
 kernel_16k: $(BUILT_KERNEL_16K_TARGET)
 .PHONY: kernel_16k
 
+BUILT_BOOTIMAGE_16K_TARGET := $(PRODUCT_OUT)/boot_16k.img
+
+BOARD_KERNEL_16K_BOOTIMAGE_PARTITION_SIZE := $(BOARD_BOOTIMAGE_PARTITION_SIZE)
+
+$(BUILT_BOOTIMAGE_16K_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS) $(BUILT_KERNEL_16K_TARGET)
+	$(call pretty,"Target boot 16k image: $@")
+	$(call build_boot_from_kernel_avb_enabled,$@,$(BUILT_KERNEL_16K_TARGET))
+
+
+bootimage_16k: $(BUILT_BOOTIMAGE_16K_TARGET)
+.PHONY: bootimage_16k
+
+BUILT_BOOT_OTA_PACKAGE_16K := $(PRODUCT_OUT)/boot_ota_16k.zip
+$(BUILT_BOOT_OTA_PACKAGE_16K): $(OTA_FROM_RAW_IMG) $(BUILT_BOOTIMAGE_16K_TARGET) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8
+	$(OTA_FROM_RAW_IMG) --package_key $(DEFAULT_SYSTEM_DEV_CERTIFICATE) \
+                      --max_timestamp `cat $(BUILD_DATETIME_FILE)` \
+                      --path $(HOST_OUT) \
+                      --partition_name boot \
+                      --output $@ \
+                      $(BUILT_BOOTIMAGE_16K_TARGET)
+
+boototapackage_16k: $(BUILT_BOOT_OTA_PACKAGE_16K)
+.PHONY: boototapackage_16k
+
 endif
 
 
@@ -1171,6 +1215,29 @@
   $(if $(1),--partition_size $(1),--dynamic_partition_size)
 endef
 
+# $1: output boot image target
+# $2: input path to kernel binary
+define build_boot_from_kernel_avb_enabled
+  $(eval kernel := $(2))
+  $(MKBOOTIMG) --kernel $(kernel) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+  $(if $(BOARD_GKI_SIGNING_KEY_PATH), \
+    $(eval boot_signature := $(call intermediates-dir-for,PACKAGING,generic_boot)/$(notdir $(1)).boot_signature) \
+    $(eval kernel_signature := $(call intermediates-dir-for,PACKAGING,generic_kernel)/$(notdir $(kernel)).boot_signature) \
+    $(call generate_generic_boot_image_certificate,$(1),$(boot_signature),boot,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
+    $(call generate_generic_boot_image_certificate,$(kernel),$(kernel_signature),generic_kernel,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
+    cat $(kernel_signature) >> $(boot_signature) $(newline) \
+    $(call assert-max-image-size,$(boot_signature),16 << 10) $(newline) \
+    truncate -s $$(( 16 << 10 )) $(boot_signature) $(newline) \
+    cat "$(boot_signature)" >> $(1))
+  $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot)))
+  $(AVBTOOL) add_hash_footer \
+          --image $(1) \
+          $(call get-partition-size-argument,$(call get-bootimage-partition-size,$(1),boot)) \
+          --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
+          $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+endef
+
+
 ifndef BOARD_PREBUILT_BOOTIMAGE
 
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
@@ -1275,22 +1342,7 @@
 # $1: boot image target
 define build_boot_board_avb_enabled
   $(eval kernel := $(call bootimage-to-kernel,$(1)))
-  $(MKBOOTIMG) --kernel $(kernel) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
-  $(if $(BOARD_GKI_SIGNING_KEY_PATH), \
-    $(eval boot_signature := $(call intermediates-dir-for,PACKAGING,generic_boot)/$(notdir $(1)).boot_signature) \
-    $(eval kernel_signature := $(call intermediates-dir-for,PACKAGING,generic_kernel)/$(notdir $(kernel)).boot_signature) \
-    $(call generate_generic_boot_image_certificate,$(1),$(boot_signature),boot,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
-    $(call generate_generic_boot_image_certificate,$(kernel),$(kernel_signature),generic_kernel,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
-    cat $(kernel_signature) >> $(boot_signature) $(newline) \
-    $(call assert-max-image-size,$(boot_signature),16 << 10) $(newline) \
-    truncate -s $$(( 16 << 10 )) $(boot_signature) $(newline) \
-    cat "$(boot_signature)" >> $(1))
-  $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot)))
-  $(AVBTOOL) add_hash_footer \
-          --image $(1) \
-          $(call get-partition-size-argument,$(call get-bootimage-partition-size,$(1),boot)) \
-          --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
-          $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+  $(call build_boot_from_kernel_avb_enabled,$(1),$(kernel))
 endef
 
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS)
@@ -1386,6 +1438,28 @@
 
 endif # my_installed_prebuilt_gki_apex not defined
 
+ifneq ($(BOARD_KERNEL_PATH_16K),)
+BUILT_BOOT_OTA_PACKAGE_4K := $(PRODUCT_OUT)/boot_ota_4k.zip
+$(BUILT_BOOT_OTA_PACKAGE_4K): $(OTA_FROM_RAW_IMG) $(INSTALLED_BOOTIMAGE_TARGET) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8
+	$(OTA_FROM_RAW_IMG) --package_key $(DEFAULT_SYSTEM_DEV_CERTIFICATE) \
+                      --max_timestamp `cat $(BUILD_DATETIME_FILE)` \
+                      --path $(HOST_OUT) \
+                      --partition_name boot \
+                      --output $@ \
+                      $(INSTALLED_BOOTIMAGE_TARGET)
+
+boototapackage_4k: $(BUILT_BOOT_OTA_PACKAGE_4K)
+.PHONY: boototapackage_4k
+
+$(eval $(call copy-one-file,$(BUILT_BOOT_OTA_PACKAGE_4K),$(TARGET_OUT)/boot_otas/boot_ota_4k.zip))
+$(eval $(call copy-one-file,$(BUILT_BOOT_OTA_PACKAGE_16K),$(TARGET_OUT)/boot_otas/boot_ota_16k.zip))
+
+ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT)/boot_otas/boot_ota_4k.zip
+ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT)/boot_otas/boot_ota_16k.zip
+
+
+endif
+
 my_apex_extracted_boot_image :=
 my_installed_prebuilt_gki_apex :=
 
@@ -3351,9 +3425,21 @@
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
 
-systemimage_intermediates := \
-    $(call intermediates-dir-for,PACKAGING,systemimage)
-BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
+systemimage_intermediates :=$= $(call intermediates-dir-for,PACKAGING,systemimage)
+BUILT_SYSTEMIMAGE :=$= $(systemimage_intermediates)/system.img
+
+
+# Used by the bazel sandwich to request the staging dir be built
+$(systemimage_intermediates)/staging_dir.stamp: $(FULL_SYSTEMIMAGE_DEPS)
+	touch $@
+
+define write-file-lines
+$(1):
+	@echo Writing $$@
+	rm -f $$@
+	echo -n > $$@
+	$$(foreach f,$(2),echo "$$(f)" >> $$@$$(newline))
+endef
 
 # $(1): output file
 define build-systemimage-target
@@ -3363,16 +3449,19 @@
       skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(systemimage_intermediates)/file_list.txt) \
           $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
           || ( mkdir -p $${DIST_DIR}; \
                cp $(INSTALLED_FILES_FILE) $${DIST_DIR}/installed-files-rescued.txt; \
                exit 1 )
 endef
 
+$(eval $(call write-file-lines,$(systemimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT)/,,$(filter $(TARGET_OUT)/%,$(FULL_SYSTEMIMAGE_DEPS)))))
+
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(BUILT_SYSTEMIMAGE): $(BOARD_AVB_SYSTEM_KEY_PATH)
 endif
-$(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE)
+$(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt
 	$(call build-systemimage-target,$@)
 
 $(call declare-1p-container,$(BUILT_SYSTEMIMAGE),system/extras)
@@ -3428,7 +3517,7 @@
 
 .PHONY: systemimage-nodeps snod
 systemimage-nodeps snod: $(filter-out systemimage-nodeps snod,$(MAKECMDGOALS)) \
-	            | $(INTERNAL_USERIMAGES_DEPS)
+	            | $(INTERNAL_USERIMAGES_DEPS) $(systemimage_intermediates)/file_list.txt
 	@echo "make $@: ignoring dependencies"
 	$(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE_TARGET))
 	$(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
@@ -3469,6 +3558,7 @@
   $(call generate-image-prop-dictionary, $(userdataimage_intermediates)/userdata_image_info.txt,userdata,skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(userdataimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt \
           $(INSTALLED_USERDATAIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE))
@@ -3479,7 +3569,10 @@
 INSTALLED_USERDATAIMAGE_TARGET_DEPS := \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_USERDATAIMAGE_FILES)
-$(INSTALLED_USERDATAIMAGE_TARGET): $(INSTALLED_USERDATAIMAGE_TARGET_DEPS)
+
+$(eval $(call write-file-lines,$(userdataimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_DATA)/,,$(filter $(TARGET_OUT_DATA)/%,$(INSTALLED_USERDATAIMAGE_TARGET_DEPS)))))
+
+$(INSTALLED_USERDATAIMAGE_TARGET): $(INSTALLED_USERDATAIMAGE_TARGET_DEPS) $(userdataimage_intermediates)/file_list.txt
 	$(build-userdataimage-target)
 
 $(call declare-1p-container,$(INSTALLED_USERDATAIMAGE_TARGET),)
@@ -3488,7 +3581,7 @@
 UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_USERDATAIMAGE_TARGET)
 
 .PHONY: userdataimage-nodeps
-userdataimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
+userdataimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS) $(userdataimage_intermediates)/file_list.txt
 	$(build-userdataimage-target)
 
 endif # BUILDING_USERDATA_IMAGE
@@ -3524,14 +3617,17 @@
   $(call generate-image-prop-dictionary, $(cacheimage_intermediates)/cache_image_info.txt,cache,skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(cacheimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt \
           $(INSTALLED_CACHEIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(cacheimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_CACHE)/,,$(filter $(TARGET_OUT_CACHE)/%,$(INTERNAL_CACHEIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_CACHEIMAGE_TARGET := $(BUILT_CACHEIMAGE_TARGET)
-$(INSTALLED_CACHEIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAGE_FILES)
+$(INSTALLED_CACHEIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAGE_FILES) $(cacheimage_intermediates)/file_list.txt
 	$(build-cacheimage-target)
 
 $(call declare-1p-container,$(INSTALLED_CACHEIMAGE_TARGET),)
@@ -3540,7 +3636,7 @@
 UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_CACHEIMAGE_TARGET)
 
 .PHONY: cacheimage-nodeps
-cacheimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
+cacheimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS) $(cacheimage_intermediates)/file_list.txt
 	$(build-cacheimage-target)
 
 else # BUILDING_CACHE_IMAGE
@@ -3605,16 +3701,19 @@
   $(call generate-image-prop-dictionary, $(systemotherimage_intermediates)/system_other_image_info.txt,system,skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(systemotherimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_SYSTEM_OTHER) $(systemotherimage_intermediates)/system_other_image_info.txt \
           $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_SYSTEMOTHERIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(systemotherimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_SYSTEM_OTHER)/,,$(filter $(TARGET_OUT_SYSTEM_OTHER)/%,$(INTERNAL_SYSTEMOTHERIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_SYSTEMOTHERIMAGE_TARGET := $(BUILT_SYSTEMOTHERIMAGE_TARGET)
 ifneq (true,$(SANITIZE_LITE))
 # Only create system_other when not building the second stage of a SANITIZE_LITE build.
-$(INSTALLED_SYSTEMOTHERIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEMOTHER)
+$(INSTALLED_SYSTEMOTHERIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEMOTHER) $(systemotherimage_intermediates)/file_list.txt
 	$(build-systemotherimage-target)
 
 $(call declare-1p-container,$(INSTALLED_SYSTEMOTHERIMAGE_TARGET),)
@@ -3624,7 +3723,7 @@
 endif
 
 .PHONY: systemotherimage-nodeps
-systemotherimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
+systemotherimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS) $(systemotherimage_intermediates)/file_list.txt
 	$(build-systemotherimage-target)
 
 endif # BUILDING_SYSTEM_OTHER_IMAGE
@@ -3705,18 +3804,22 @@
   $(call generate-image-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt,vendor,skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(vendorimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_VENDOR) $(vendorimage_intermediates)/vendor_image_info.txt \
           $(INSTALLED_VENDORIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET) $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(vendorimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_VENDOR)/,,$(filter $(TARGET_OUT_VENDOR)/%,$(INTERNAL_VENDORIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_VENDORIMAGE_TARGET := $(BUILT_VENDORIMAGE_TARGET)
 $(INSTALLED_VENDORIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_VENDORIMAGE_FILES) \
     $(INSTALLED_FILES_FILE_VENDOR) \
-    $(RECOVERY_FROM_BOOT_PATCH)
+    $(RECOVERY_FROM_BOOT_PATCH) \
+    $(vendorimage_intermediates)/file_list.txt
 	$(build-vendorimage-target)
 
 VENDOR_NOTICE_DEPS += $(INSTALLED_VENDORIMAGE_TARGET)
@@ -3725,7 +3828,7 @@
 $(call declare-container-license-deps,$(INSTALLED_VENDORIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(RECOVERY_FROM_BOOT_PATH),$(PRODUCT_OUT)/:/)
 
 .PHONY: vendorimage-nodeps vnod
-vendorimage-nodeps vnod: | $(INTERNAL_USERIMAGES_DEPS)
+vendorimage-nodeps vnod: | $(INTERNAL_USERIMAGES_DEPS) $(vendorimage_intermediates)/file_list.txt
 	$(build-vendorimage-target)
 
 .PHONY: sync_vendor
@@ -3771,17 +3874,21 @@
   $(call generate-image-prop-dictionary, $(productimage_intermediates)/product_image_info.txt,product,skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(productimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt \
           $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(productimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_PRODUCT)/,,$(filter $(TARGET_OUT_PRODUCT)/%,$(INTERNAL_PRODUCTIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_PRODUCTIMAGE_TARGET := $(BUILT_PRODUCTIMAGE_TARGET)
 $(INSTALLED_PRODUCTIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_PRODUCTIMAGE_FILES) \
-    $(INSTALLED_FILES_FILE_PRODUCT)
+    $(INSTALLED_FILES_FILE_PRODUCT) \
+    $(productimage_intermediates)/file_list.txt
 	$(build-productimage-target)
 
 PRODUCT_NOTICE_DEPS += $(INSTALLED_PRODUCTIMAGE_TARGET)
@@ -3790,7 +3897,7 @@
 $(call declare-container-license-deps,$(INSTALLED_PRODUCTIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_PRODUCTIMAGE_FILES) $(INSTALLED_FILES_FILE_PRODUCT),$(PRODUCT_OUT)/:/)
 
 .PHONY: productimage-nodeps pnod
-productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
+productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS) $(productimage_intermediates)/file_list.txt
 	$(build-productimage-target)
 
 .PHONY: sync_product
@@ -3832,6 +3939,7 @@
   $(call generate-image-prop-dictionary, $(system_extimage_intermediates)/system_ext_image_info.txt,system_ext, skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(system_extimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_SYSTEM_EXT) \
           $(system_extimage_intermediates)/system_ext_image_info.txt \
           $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
@@ -3839,12 +3947,15 @@
   $(call assert-max-image-size,$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET),$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(system_extimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_SYSTEM_EXT)/,,$(filter $(TARGET_OUT_SYSTEM_EXT)/%,$(INTERNAL_SYSTEM_EXTIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_SYSTEM_EXTIMAGE_TARGET := $(BUILT_SYSTEM_EXTIMAGE_TARGET)
 $(INSTALLED_SYSTEM_EXTIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-    $(INSTALLED_FILES_FILE_SYSTEM_EXT)
+    $(INSTALLED_FILES_FILE_SYSTEM_EXT) \
+    $(system_extimage_intermediates)/file_list.txt
 	$(build-system_extimage-target)
 
 SYSTEM_EXT_NOTICE_DEPS += $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
@@ -3853,7 +3964,7 @@
 $(call declare-container-license-deps,$(INSTALLED_SYSTEM_EXTIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEM_EXTIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEM_EXT),$(PRODUCT_OUT)/:/)
 
 .PHONY: systemextimage-nodeps senod
-systemextimage-nodeps senod: | $(INTERNAL_USERIMAGES_DEPS)
+systemextimage-nodeps senod: | $(INTERNAL_USERIMAGES_DEPS) $(system_extimage_intermediates)/file_list.txt
 	$(build-system_extimage-target)
 
 .PHONY: sync_system_ext
@@ -3916,17 +4027,21 @@
 	  skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(odmimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_ODM) $(odmimage_intermediates)/odm_image_info.txt \
           $(INSTALLED_ODMIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_ODMIMAGE_TARGET),$(BOARD_ODMIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(odmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_ODM)/,,$(filter $(TARGET_OUT_ODM)/%,$(INTERNAL_ODMIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_ODMIMAGE_TARGET := $(BUILT_ODMIMAGE_TARGET)
 $(INSTALLED_ODMIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_ODMIMAGE_FILES) \
-    $(INSTALLED_FILES_FILE_ODM)
+    $(INSTALLED_FILES_FILE_ODM) \
+    $(odmimage_intermediates)/file_list.txt
 	$(build-odmimage-target)
 
 ODM_NOTICE_DEPS += $(INSTALLED_ODMIMAGE_TARGET)
@@ -3935,7 +4050,7 @@
 $(call declare-container-license-deps,$(INSTALLED_ODMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_ODMIMAGE_FILES) $(INSTALLED_FILES_FILE_ODM),$(PRODUCT_OUT)/:/)
 
 .PHONY: odmimage-nodeps onod
-odmimage-nodeps onod: | $(INTERNAL_USERIMAGES_DEPS)
+odmimage-nodeps onod: | $(INTERNAL_USERIMAGES_DEPS) $(odmimage_intermediates)/file_list.txt
 	$(build-odmimage-target)
 
 .PHONY: sync_odm
@@ -3978,17 +4093,21 @@
 	  vendor_dlkm, skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(vendor_dlkmimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_VENDOR_DLKM) $(vendor_dlkmimage_intermediates)/vendor_dlkm_image_info.txt \
           $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_VENDOR_DLKMIMAGE_TARGET),$(BOARD_VENDOR_DLKMIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(vendor_dlkmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_VENDOR_DLKM)/,,$(filter $(TARGET_OUT_VENDOR_DLKM)/%,$(INTERNAL_VENDOR_DLKMIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_VENDOR_DLKMIMAGE_TARGET := $(BUILT_VENDOR_DLKMIMAGE_TARGET)
 $(INSTALLED_VENDOR_DLKMIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
-    $(INSTALLED_FILES_FILE_VENDOR_DLKM)
+    $(INSTALLED_FILES_FILE_VENDOR_DLKM) \
+    $(vendor_dlkmimage_intermediates)/file_list.txt
 	$(build-vendor_dlkmimage-target)
 
 VENDOR_DLKM_NOTICE_DEPS += $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
@@ -3997,7 +4116,7 @@
 $(call declare-container-license-deps,$(INSTALLED_VENDOR_DLKMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDOR_DLKMIMAGE_FILES) $(INSTALLED_FILES_FILE_VENDOR_DLKM),$(PRODUCT_OUT)/:/)
 
 .PHONY: vendor_dlkmimage-nodeps vdnod
-vendor_dlkmimage-nodeps vdnod: | $(INTERNAL_USERIMAGES_DEPS)
+vendor_dlkmimage-nodeps vdnod: | $(INTERNAL_USERIMAGES_DEPS) $(vendor_dlkmimage_intermediates)/file_list.txt
 	$(build-vendor_dlkmimage-target)
 
 .PHONY: sync_vendor_dlkm
@@ -4040,17 +4159,21 @@
 	  odm_dlkm, skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(odm_dlkmimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_ODM_DLKM) $(odm_dlkmimage_intermediates)/odm_dlkm_image_info.txt \
           $(INSTALLED_ODM_DLKMIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_ODM_DLKMIMAGE_TARGET),$(BOARD_ODM_DLKMIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(odm_dlkmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_ODM_DLKM)/,,$(filter $(TARGET_OUT_ODM_DLKM)/%,$(INTERNAL_ODM_DLKMIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_ODM_DLKMIMAGE_TARGET := $(BUILT_ODM_DLKMIMAGE_TARGET)
 $(INSTALLED_ODM_DLKMIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_ODM_DLKMIMAGE_FILES) \
-    $(INSTALLED_FILES_FILE_ODM_DLKM)
+    $(INSTALLED_FILES_FILE_ODM_DLKM) \
+    $(odm_dlkmimage_intermediates)/file_list.txt
 	$(build-odm_dlkmimage-target)
 
 ODM_DLKM_NOTICE_DEPS += $(INSTALLED_ODM_DLKMIMAGE_TARGET)
@@ -4059,7 +4182,7 @@
 $(call declare-container-license-deps,$(INSTALLED_ODM_DLKMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_ODM_DLKMIMAGE_FILES) $(INSTALLED_FILES_FILE_ODM_DLKM),$(PRODUCT_OUT)/:/)
 
 .PHONY: odm_dlkmimage-nodeps odnod
-odm_dlkmimage-nodeps odnod: | $(INTERNAL_USERIMAGES_DEPS)
+odm_dlkmimage-nodeps odnod: | $(INTERNAL_USERIMAGES_DEPS) $(odm_dlkmimage_intermediates)/file_list.txt
 	$(build-odm_dlkmimage-target)
 
 .PHONY: sync_odm_dlkm
@@ -4104,17 +4227,21 @@
 	  system_dlkm, skip_fsck=true)
   PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       $(BUILD_IMAGE) \
+          $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),,--input-directory-filter-file $(system_dlkmimage_intermediates)/file_list.txt) \
           $(TARGET_OUT_SYSTEM_DLKM) $(system_dlkmimage_intermediates)/system_dlkm_image_info.txt \
           $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) $(TARGET_OUT)
   $(call assert-max-image-size,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(BOARD_SYSTEM_DLKMIMAGE_PARTITION_SIZE))
 endef
 
+$(eval $(call write-file-lines,$(system_dlkmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_SYSTEM_DLKM)/,,$(filter $(TARGET_OUT_SYSTEM_DLKM)/%,$(INTERNAL_SYSTEM_DLKMIMAGE_FILES)))))
+
 # We just build this directly to the install location.
 INSTALLED_SYSTEM_DLKMIMAGE_TARGET := $(BUILT_SYSTEM_DLKMIMAGE_TARGET)
 $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET): \
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) \
-    $(INSTALLED_FILES_FILE_SYSTEM_DLKM)
+    $(INSTALLED_FILES_FILE_SYSTEM_DLKM) \
+    $(system_dlkmimage_intermediates)/file_list.txt
 	$(build-system_dlkmimage-target)
 
 SYSTEM_DLKM_NOTICE_DEPS += $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
@@ -4123,7 +4250,7 @@
 $(call declare-container-license-deps,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEM_DLKM),$(PRODUCT_OUT)/:/)
 
 .PHONY: system_dlkmimage-nodeps sdnod
-system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
+system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS) $(system_dlkmimage_intermediates)/file_list.txt
 	$(build-system_dlkmimage-target)
 
 .PHONY: sync_system_dlkm
@@ -4169,7 +4296,7 @@
 
 INSTALLED_PVMFWIMAGE_TARGET := $(PRODUCT_OUT)/pvmfw.img
 INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET := $(PRODUCT_OUT)/pvmfw_embedded.avbpubkey
-INSTALLED_PVMFW_BINARY_TARGET := $(call module-installed-files,pvmfw_bin)
+INSTALLED_PVMFW_BINARY_TARGET := $(call module-target-built-files,pvmfw_bin)
 INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,pvmfw_img)
 INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key)
 INTERNAL_PVMFW_SYMBOL := $(TARGET_OUT_EXECUTABLES_UNSTRIPPED)/pvmfw
@@ -4207,6 +4334,13 @@
 
 ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
 INTERNAL_AVB_CUSTOMIMAGES_SIGNING_ARGS :=
+BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST :=
+# If BOARD_AVB_$(call to-upper,$(partition))_KEY_PATH is set, the image will be included in
+# BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST, otherwise the image won't be AVB signed.
+$(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
+	$(if $(BOARD_AVB_$(call to-upper,$(partition))_KEY_PATH), \
+	$(eval BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST += $(partition)) \
+	$(eval BOARD_$(call to-upper,$(partition))_IMAGE_LIST := $(BOARD_AVB_$(call to-upper,$(partition))_IMAGE_LIST))))
 
 # Sign custom image.
 # $(1): the prebuilt custom image.
@@ -4231,9 +4365,26 @@
 INSTALLED_CUSTOMIMAGES_TARGET += $(3)
 endef
 
-$(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
+# Copy unsigned custom image.
+# $(1): the prebuilt custom image.
+# $(2): the signed custom image target.
+define copy_custom_image
+$(2): $(1) $(INTERNAL_USERIMAGES_DEPS)
+	@echo Target custom image: $(2)
+	mkdir -p $(dir $(2))
+	cp $(1) $(2)
+INSTALLED_CUSTOMIMAGES_TARGET += $(2)
+endef
+
+# Add AVB custom image to droid target
+$(foreach partition,$(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), \
   $(foreach image,$(BOARD_AVB_$(call to-upper,$(partition))_IMAGE_LIST), \
      $(eval $(call sign_custom_image,$(image),$(partition),$(PRODUCT_OUT)/$(notdir $(image))))))
+
+# Add unsigned custom image to droid target
+$(foreach partition,$(filter-out $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), $(BOARD_CUSTOMIMAGES_PARTITION_LIST)), \
+  $(foreach image,$(BOARD_$(call to-upper,$(partition))_IMAGE_LIST), \
+     $(eval $(call copy_custom_image,$(image),$(PRODUCT_OUT)/$(notdir $(image))))))
 endif
 
 # -----------------------------------------------------------------
@@ -4510,7 +4661,9 @@
 $(eval part := $(1))
 $(eval PART=$(call to-upper,$(part)))
 $(eval _rollback_index_location := BOARD_AVB_$(PART)_ROLLBACK_INDEX_LOCATION)
+$(eval _key_path := BOARD_AVB_$(PART)_KEY_PATH)
 $(if $($(_rollback_index_location)),,$(error $(_rollback_index_location) is not defined))
+$(if $($(_key_path)),,$(error $(_key_path) is not defined))
 
 INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
     --chain_partition $(part):$($(_rollback_index_location)):$(AVB_CHAIN_KEY_DIR)/$(part).avbpubkey
@@ -4590,8 +4743,8 @@
 $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),$(eval BOARD_AVB_MAKE_VBMETA_$(call to-upper,$(partition))_IMAGE_ARGS += --padding_size 4096))
 endif
 
-ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
-$(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
+ifneq ($(strip $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST)),)
+$(foreach partition,$(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), \
     $(eval $(call check-and-set-custom-avb-chain-args,$(partition))))
 endif
 
@@ -4678,8 +4831,8 @@
   $(if $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH) \
         --output $(1)/vbmeta_vendor.avbpubkey)
-  $(if $(BOARD_CUSTOMIMAGES_PARTITION_LIST),\
-    $(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
+  $(if $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST),\
+    $(hide) $(foreach partition,$(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), \
         $(AVBTOOL) extract_public_key --key $(BOARD_AVB_$(call to-upper,$(partition))_KEY_PATH) \
             --output $(1)/$(partition).avbpubkey;)) \
   $(if $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),\
@@ -4826,6 +4979,50 @@
     $(INTERNAL_PVMFWIMAGE_FILES) \
 
 # -----------------------------------------------------------------
+# Run apex_sepolicy_tests for all installed APEXes
+
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+intermediate := $(call intermediates-dir-for,PACKAGING,apex_sepolicy_tests)
+apex_dirs := \
+  $(TARGET_OUT)/apex/% \
+  $(TARGET_OUT_SYSTEM_EXT)/apex/% \
+  $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_PRODUCT)/apex/% \
+
+apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
+apex_dirs :=
+
+# $1: apex file
+# $2: output file
+define _run_apex_sepolicy_tests
+$2: $1 \
+    $(HOST_OUT_EXECUTABLES)/apex_sepolicy_tests \
+    $(HOST_OUT_EXECUTABLES)/deapexer \
+    $(HOST_OUT_EXECUTABLES)/debugfs_static
+	@rm -rf $$@
+	@mkdir -p $(dir $$@)
+	$(HOST_OUT_EXECUTABLES)/apex_sepolicy_tests --all -f <($(HOST_OUT_EXECUTABLES)/deapexer --debugfs_path $(HOST_OUT_EXECUTABLES)/debugfs_static list -Z $$<)
+	@touch $$@
+endef
+
+# $1: apex file list
+define run_apex_sepolicy_tests
+$(foreach apex_file,$1, \
+  $(eval passfile := $(patsubst $(PRODUCT_OUT)/%,$(intermediate)/%.pass,$(apex_file))) \
+  $(eval $(call _run_apex_sepolicy_tests,$(apex_file),$(passfile))) \
+  $(passfile))
+endef
+
+.PHONY: run_apex_sepolicy_tests
+run_apex_sepolicy_tests: $(call run_apex_sepolicy_tests,$(apex_files))
+
+droid_targets: run_apex_sepolicy_tests
+
+apex_files :=
+intermediate :=
+endif # TARGET_BUILD_UNBUNDLED
+
+# -----------------------------------------------------------------
 # Check VINTF of build
 
 # Note: vendor_dlkm, odm_dlkm, and system_dlkm does not have VINTF files.
@@ -4940,40 +5137,48 @@
 
 my_board_extracted_kernel :=
 
-# BOARD_KERNEL_CONFIG_FILE and BOARD_KERNEL_VERSION can be used to override the values extracted
-# from INSTALLED_KERNEL_TARGET.
-ifdef BOARD_KERNEL_CONFIG_FILE
-ifdef BOARD_KERNEL_VERSION
-$(BUILT_KERNEL_CONFIGS_FILE): $(BOARD_KERNEL_CONFIG_FILE)
-	cp $< $@
-$(BUILT_KERNEL_VERSION_FILE):
-	echo $(BOARD_KERNEL_VERSION) > $@
-
-$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
-$(call declare-license-metadata,$(BUILT_KERNEL_VERSION_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
-
-my_board_extracted_kernel := true
-endif # BOARD_KERNEL_VERSION
-endif # BOARD_KERNEL_CONFIG_FILE
-
-ifneq ($(my_board_extracted_kernel),true)
 # Tools for decompression that is not in PATH.
 # Check $(EXTRACT_KERNEL) for decompression algorithms supported by the script.
 # Algorithms that are in the script but not in this list will be found in PATH.
 my_decompress_tools := \
     lz4:$(HOST_OUT_EXECUTABLES)/lz4 \
 
-endif # my_board_extracted_kernel
+
+# BOARD_KERNEL_CONFIG_FILE and BOARD_KERNEL_VERSION can be used to override the values extracted
+# from INSTALLED_KERNEL_TARGET.
+ifdef BOARD_KERNEL_VERSION
+$(BUILT_KERNEL_VERSION_FILE): PRIVATE_DECOMPRESS_TOOLS := $(my_decompress_tools)
+$(BUILT_KERNEL_VERSION_FILE): $(foreach pair,$(my_decompress_tools),$(call word-colon,2,$(pair)))
+$(BUILT_KERNEL_VERSION_FILE): $(EXTRACT_KERNEL) $(firstword $(INSTALLED_KERNEL_TARGET))
+	KERNEL_RELEASE=`$(EXTRACT_KERNEL) --tools $(PRIVATE_DECOMPRESS_TOOLS) --input $(firstword $(INSTALLED_KERNEL_TARGET)) \
+	  --output-release` ;\
+  if [ "$$KERNEL_RELEASE" != '$(BOARD_KERNEL_VERSION)' ]; then \
+    echo "Specified kernel version '$(BOARD_KERNEL_VERSION)' does not match actual kernel version '$$KERNEL_RELEASE' " ; exit 1; fi;
+	echo '$(BOARD_KERNEL_VERSION)' > $@
+
+ifdef BOARD_KERNEL_CONFIG_FILE
+$(BUILT_KERNEL_CONFIGS_FILE): $(BOARD_KERNEL_CONFIG_FILE)
+	cp $< $@
+
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+$(call declare-license-metadata,$(BUILT_KERNEL_VERSION_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+
+my_board_extracted_kernel := true
+endif # BOARD_KERNEL_CONFIG_FILE
+endif # BOARD_KERNEL_VERSION
+
 
 ifneq ($(my_board_extracted_kernel),true)
 ifdef INSTALLED_KERNEL_TARGET
+ifndef BOARD_KERNEL_VERSION
 $(BUILT_KERNEL_CONFIGS_FILE): .KATI_IMPLICIT_OUTPUTS := $(BUILT_KERNEL_VERSION_FILE)
+endif
 $(BUILT_KERNEL_CONFIGS_FILE): PRIVATE_DECOMPRESS_TOOLS := $(my_decompress_tools)
 $(BUILT_KERNEL_CONFIGS_FILE): $(foreach pair,$(my_decompress_tools),$(call word-colon,2,$(pair)))
 $(BUILT_KERNEL_CONFIGS_FILE): $(EXTRACT_KERNEL) $(firstword $(INSTALLED_KERNEL_TARGET))
 	$< --tools $(PRIVATE_DECOMPRESS_TOOLS) --input $(firstword $(INSTALLED_KERNEL_TARGET)) \
 	  --output-configs $@ \
-	  --output-release $(BUILT_KERNEL_VERSION_FILE)
+	  $(if $(BOARD_KERNEL_VERSION),,--output-release $(BUILT_KERNEL_VERSION_FILE))
 
 $(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
@@ -5293,6 +5498,7 @@
   toybox \
   tune2fs \
   unpack_bootimg \
+  update_device \
   update_host_simulator \
   validate_target_files \
   verity_signer \
@@ -5434,7 +5640,7 @@
 	$(hide) echo "flash vbmeta_system" >> $@
 endif
 ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
-	$(hide) echo "flash --apply-vbmeta vbmeta_vendor" >> $@
+	$(hide) echo "flash vbmeta_vendor" >> $@
 endif
 ifneq (,$(strip $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)))
 	$(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS), \
@@ -5599,15 +5805,20 @@
 	$(hide) echo "avb_recovery_rollback_index_location=$(BOARD_AVB_RECOVERY_ROLLBACK_INDEX_LOCATION)" >> $@
 endif # BOARD_AVB_RECOVERY_KEY_PATH
 ifneq (,$(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)))
-	$(hide) echo "avb_custom_images_partition_list=$(BOARD_CUSTOMIMAGES_PARTITION_LIST)" >> $@
-	$(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
+	$(hide) echo "custom_images_partition_list=$(filter-out $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), $(BOARD_CUSTOMIMAGES_PARTITION_LIST))" >> $@
+	$(hide) $(foreach partition,$(filter-out $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), $(BOARD_CUSTOMIMAGES_PARTITION_LIST)), \
+	    echo "$(partition)_image_list=$(foreach image,$(BOARD_$(call to-upper,$(partition))_IMAGE_LIST),$(notdir $(image)))" >> $@;)
+endif # BOARD_CUSTOMIMAGES_PARTITION_LIST
+ifneq (,$(strip $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST)))
+	$(hide) echo "avb_custom_images_partition_list=$(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST)" >> $@
+	$(hide) $(foreach partition,$(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), \
 	    echo "avb_$(partition)_key_path=$(BOARD_AVB_$(call to-upper,$(partition))_KEY_PATH)"  >> $@; \
 	    echo "avb_$(partition)_algorithm=$(BOARD_AVB_$(call to-upper,$(partition))_ALGORITHM)"  >> $@; \
 	    echo "avb_$(partition)_add_hashtree_footer_args=$(BOARD_AVB_$(call to-upper,$(partition))_ADD_HASHTREE_FOOTER_ARGS)"  >> $@; \
 	    echo "avb_$(partition)_rollback_index_location=$(BOARD_AVB_$(call to-upper,$(partition))_ROLLBACK_INDEX_LOCATION)"  >> $@; \
 	    echo "avb_$(partition)_partition_size=$(BOARD_AVB_$(call to-upper,$(partition))_PARTITION_SIZE)"  >> $@; \
 	    echo "avb_$(partition)_image_list=$(foreach image,$(BOARD_AVB_$(call to-upper,$(partition))_IMAGE_LIST),$(notdir $(image)))" >> $@;)
-endif # BOARD_CUSTOMIMAGES_PARTITION_LIST
+endif # BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST
 ifneq (,$(strip $(BOARD_AVB_VBMETA_SYSTEM)))
 	$(hide) echo "avb_vbmeta_system=$(BOARD_AVB_VBMETA_SYSTEM)" >> $@
 	$(hide) echo "avb_vbmeta_system_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $@
@@ -5694,9 +5905,6 @@
 ifeq ($(BUILDING_WITH_VSDK),true)
 	$(hide) echo "building_with_vsdk=true" >> $@
 endif
-ifeq ($(TARGET_FLATTEN_APEX),false)
-	$(hide) echo "target_flatten_apex=false" >> $@
-endif
 
 $(call declare-0p-target,$(INSTALLED_FASTBOOT_INFO_TARGET))
 
@@ -5741,6 +5949,20 @@
   fi
 endef
 
+# This is the same as the non-hermetic version, but also accepts a list of files in the directory
+# to copy. It will only copy those files. This is so that we don't copy extra files that could've
+# been built in the staging directories by prior builds.
+# $(1): Directory to copy
+# $(2): Location to copy it to
+# $(3): A list of files in the $(1) directory, only these files will be copied
+define package_files-copy-root-hermetic
+  $(if $(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES),
+    $(call package_files-copy-root,$(1),$(2)),
+    $(foreach f,$(filter $(strip $(1))/%,$(3)), \
+      mkdir -p $(strip $(2))/$(dir $(patsubst $(strip $(1))/%,%,$(f)))$(newline) \
+      $(ACP) -d $(f) $(strip $(2))/$(patsubst $(strip $(1))/%,%,$(f))$(newline)))
+endef
+
 built_ota_tools :=
 
 # We can't build static executables when SANITIZE_TARGET=address
@@ -6090,7 +6312,7 @@
 	    $(BUILT_KERNEL_VERSION_FILE) \
 	    | $(ACP)
 	@echo "Building target files: $@"
-	$(hide) rm -rf $@ $@.list $(zip_root)
+	$(hide) rm -rf $@ $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))$(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
 	@# Components of the recovery image
@@ -6217,8 +6439,8 @@
 endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET
 ifdef BUILDING_SYSTEM_IMAGE
 	@# Contents of the system image
-	$(hide) $(call package_files-copy-root, \
-	    $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM,$(FULL_SYSTEMIMAGE_DEPS))
 else ifdef INSTALLED_BUILD_PROP_TARGET
 	@# Copy the system build.prop even if not building a system image
 	@# because add_img_to_target_files may need it to build other partition
@@ -6228,48 +6450,48 @@
 endif
 ifdef BUILDING_USERDATA_IMAGE
 	@# Contents of the data image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_DATA),$(zip_root)/DATA)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_DATA),$(zip_root)/DATA,$(INSTALLED_USERDATAIMAGE_TARGET_DEPS))
 endif
 ifdef BUILDING_VENDOR_IMAGE
 	@# Contents of the vendor image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_VENDOR),$(zip_root)/VENDOR,$(INTERNAL_VENDORIMAGE_FILES))
 endif
 ifdef BUILDING_PRODUCT_IMAGE
 	@# Contents of the product image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT,$(INTERNAL_PRODUCTIMAGE_FILES))
 endif
 ifdef BUILDING_SYSTEM_EXT_IMAGE
 	@# Contents of the system_ext image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_SYSTEM_EXT),$(zip_root)/SYSTEM_EXT)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_SYSTEM_EXT),$(zip_root)/SYSTEM_EXT,$(INTERNAL_SYSTEM_EXTIMAGE_FILES))
 endif
 ifdef BUILDING_ODM_IMAGE
 	@# Contents of the odm image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_ODM),$(zip_root)/ODM)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_ODM),$(zip_root)/ODM,$(INTERNAL_ODMIMAGE_FILES))
 endif
 ifdef BUILDING_VENDOR_DLKM_IMAGE
 	@# Contents of the vendor_dlkm image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_VENDOR_DLKM),$(zip_root)/VENDOR_DLKM)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_VENDOR_DLKM),$(zip_root)/VENDOR_DLKM,$(INTERNAL_VENDOR_DLKMIMAGE_FILES))
 endif
 ifdef BUILDING_ODM_DLKM_IMAGE
 	@# Contents of the odm_dlkm image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_ODM_DLKM),$(zip_root)/ODM_DLKM)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_ODM_DLKM),$(zip_root)/ODM_DLKM,$(INTERNAL_ODM_DLKMIMAGE_FILES))
 endif
 ifdef BUILDING_SYSTEM_DLKM_IMAGE
 	@# Contents of the system_dlkm image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_SYSTEM_DLKM),$(zip_root)/SYSTEM_DLKM)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_SYSTEM_DLKM),$(zip_root)/SYSTEM_DLKM,$(INTERNAL_SYSTEM_DLKMIMAGE_FILES))
 endif
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
 	@# Contents of the system_other image
-	$(hide) $(call package_files-copy-root, \
-	    $(TARGET_OUT_SYSTEM_OTHER),$(zip_root)/SYSTEM_OTHER)
+	$(hide) $(call package_files-copy-root-hermetic, \
+	    $(TARGET_OUT_SYSTEM_OTHER),$(zip_root)/SYSTEM_OTHER,$(INTERNAL_SYSTEMOTHERIMAGE_FILES))
 endif
 	@# Extra contents of the OTA package
 	$(hide) mkdir -p $(zip_root)/OTA
@@ -6358,7 +6580,8 @@
 endif
 ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
 	@# If breakpad symbols have been generated, add them to the zip.
-	$(hide) cp -R $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD
+	$(call package_files-copy-root, \
+	    $(TARGET_OUT_BREAKPAD),$(zip_root)/BREAKPAD)
 endif
 ifdef BOARD_PREBUILT_VENDORIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
@@ -6430,7 +6653,7 @@
 ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
-	    $(foreach image,$(BOARD_AVB_$(call to-upper,$(partition))_IMAGE_LIST),cp $(image) $(zip_root)/PREBUILT_IMAGES/;))
+	    $(foreach image,$(BOARD_$(call to-upper,$(partition))_IMAGE_LIST),cp $(image) $(zip_root)/PREBUILT_IMAGES/;))
 endif # BOARD_CUSTOMIMAGES_PARTITION_LIST
 	@# The radio images in BOARD_PACK_RADIOIMAGES will be additionally copied from RADIO/ into
 	@# IMAGES/, which then will be added into <product>-img.zip. Such images must be listed in
@@ -6854,6 +7077,14 @@
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
   $(JACOCO_REPORT_CLASSES_ALL): $(INTERNAL_ALLIMAGES_FILES)
 endif
+
+# This is not ideal, but it is difficult to correctly figure out the actual jacoco report
+# jars we need to add here as dependencies, so we add the device-tests as a dependency when
+# the env variable is set and this should guarantee thaat all the jacoco report jars are ready
+# when we package the final report jar here.
+ifeq ($(JACOCO_PACKAGING_INCLUDE_DEVICE_TESTS),true)
+  $(JACOCO_REPORT_CLASSES_ALL): $(COMPATIBILITY.device-tests.FILES)
+endif
 endif # EMMA_INSTRUMENT=true
 
 
@@ -6865,29 +7096,34 @@
 # finding the appropriate dictionary to deobfuscate a stack trace frame.
 #
 
-# The path to the zip file containing proguard dictionaries.
-PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict.zip
-# The path to the zip file containing mappings from dictionary hashes to filenames.
-PROGUARD_DICT_MAPPING := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping.textproto
-.KATI_READONLY := PROGUARD_DICT_ZIP PROGUARD_DICT_MAPPING
-# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
+  _proguard_dict_zip_modules := $(call product-installed-modules,$(INTERNAL_PRODUCT))
+else
+  _proguard_dict_zip_modules := $(unbundled_build_modules)
 endif
-$(PROGUARD_DICT_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary)
-$(PROGUARD_DICT_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)
-$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_filelist)/filelist
-$(PROGUARD_DICT_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
+
+# The path to the zip file containing proguard dictionaries.
+PROGUARD_DICT_ZIP :=$= $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict.zip
+$(PROGUARD_DICT_ZIP): PRIVATE_SOONG_ZIP_ARGUMENTS := $(foreach m,$(_proguard_dict_zip_modules),$(ALL_MODULES.$(m).PROGUARD_DICTIONARY_SOONG_ZIP_ARGUMENTS))
+$(PROGUARD_DICT_ZIP): $(SOONG_ZIP) $(foreach m,$(_proguard_dict_zip_modules),$(ALL_MODULES.$(m).PROGUARD_DICTIONARY_FILES))
 	@echo "Packaging Proguard obfuscation dictionary files."
-	rm -rf $@ $(PRIVATE_LIST_FILE)
-	mkdir -p $(PRIVATE_PACKAGING_DIR) $(PRIVATE_MAPPING_PACKAGING_DIR) $(dir $(PRIVATE_LIST_FILE))
-	# Zip all of the files in the proguard dictionary directory.
-	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -D $(PRIVATE_PACKAGING_DIR)
-	# Find all of the files in the proguard dictionary mapping directory and merge them into the mapping textproto.
-	# Strip the PRIVATE_PACKAGING_DIR off the filenames to match soong_zip's -C argument.
-	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
-	$(SYMBOLS_MAP) -merge $(PROGUARD_DICT_MAPPING) -strip_prefix $(PRIVATE_PACKAGING_DIR)/ -ignore_missing_files @$(PRIVATE_LIST_FILE)
-$(PROGUARD_DICT_ZIP): .KATI_IMPLICIT_OUTPUTS := $(PROGUARD_DICT_MAPPING)
+	# Zip all of the files in PROGUARD_DICTIONARY_FILES.
+	echo -n > $@.tmparglist
+	$(foreach arg,$(PRIVATE_SOONG_ZIP_ARGUMENTS),printf "%s\n" "$(arg)" >> $@.tmparglist$(newline))
+	$(SOONG_ZIP) -d -o $@ @$@.tmparglist
+	rm -f $@.tmparglist
+
+# The path to the zip file containing mappings from dictionary hashes to filenames.
+PROGUARD_DICT_MAPPING :=$= $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping.textproto
+_proguard_dict_mapping_files := $(foreach m,$(_proguard_dict_zip_modules),$(ALL_MODULES.$(m).PROGUARD_DICTIONARY_MAPPING))
+$(PROGUARD_DICT_MAPPING): PRIVATE_MAPPING_FILES := $(_proguard_dict_mapping_files)
+$(PROGUARD_DICT_MAPPING): $(SYMBOLS_MAP) $(_proguard_dict_mapping_files)
+	@echo "Packaging Proguard obfuscation dictionary mapping files."
+	# Merge all the mapping files together
+	echo -n > $@.tmparglist
+	$(foreach mf,$(PRIVATE_MAPPING_FILES),echo "$(mf)" >> $@.tmparglist$(newline))
+	$(SYMBOLS_MAP) -merge $(PROGUARD_DICT_MAPPING) @$@.tmparglist
+	rm -f $@.tmparglist
 
 $(call declare-1p-container,$(PROGUARD_DICT_ZIP),)
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
@@ -6897,31 +7133,19 @@
 #------------------------------------------------------------------
 # A zip of Proguard usage files.
 #
-PROGUARD_USAGE_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-usage.zip
-# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
-ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(PROGUARD_USAGE_ZIP): \
-    $(INSTALLED_SYSTEMIMAGE_TARGET) \
-    $(INSTALLED_RAMDISK_TARGET) \
-    $(INSTALLED_BOOTIMAGE_TARGET) \
-    $(INSTALLED_INIT_BOOT_IMAGE_TARGET) \
-    $(INSTALLED_USERDATAIMAGE_TARGET) \
-    $(INSTALLED_VENDORIMAGE_TARGET) \
-    $(INSTALLED_PRODUCTIMAGE_TARGET) \
-    $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
-    $(INSTALLED_ODMIMAGE_TARGET) \
-    $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
-    $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
-    $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) \
-    $(updater_dep)
-endif
-$(PROGUARD_USAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_usage.zip)/filelist
-$(PROGUARD_USAGE_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_usage)
-$(PROGUARD_USAGE_ZIP): $(MERGE_ZIPS)
+PROGUARD_USAGE_ZIP :=$= $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-usage.zip
+_proguard_usage_zips := $(foreach m,$(_proguard_dict_zip_modules),$(ALL_MODULES.$(m).PROGUARD_USAGE_ZIP))
+$(PROGUARD_USAGE_ZIP): PRIVATE_ZIPS := $(_proguard_usage_zips)
+$(PROGUARD_USAGE_ZIP): $(MERGE_ZIPS) $(_proguard_usage_zips)
 	@echo "Packaging Proguard usage files."
-	mkdir -p $(dir $@) $(PRIVATE_PACKAGING_DIR) $(dir $(PRIVATE_LIST_FILE))
-	find $(PRIVATE_PACKAGING_DIR) -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
-	$(MERGE_ZIPS) $@ @$(PRIVATE_LIST_FILE)
+	echo -n > $@.tmparglist
+	$(foreach z,$(PRIVATE_ZIPS),echo "$(z)" >> $@.tmparglist$(newline))
+	$(MERGE_ZIPS) $@ @$@.tmparglist
+	rm -rf $@.tmparglist
+
+_proguard_dict_mapping_files :=
+_proguard_usage_zips :=
+_proguard_dict_zip_modules :=
 
 $(call declare-1p-container,$(PROGUARD_USAGE_ZIP),)
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
@@ -7083,6 +7307,7 @@
 	PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$(dir $(ZIP2ZIP)):$$PATH \
 	    $(IMG_FROM_TARGET_FILES) \
 	        --additional IMAGES/VerifiedBootParams.textproto:VerifiedBootParams.textproto \
+	        --build_super_image $(BUILD_SUPER_IMAGE) \
 	        $(BUILT_TARGET_FILES_PACKAGE) $@
 
 $(call declare-1p-container,$(INTERNAL_UPDATE_PACKAGE_TARGET),)
diff --git a/core/OWNERS b/core/OWNERS
index eb1d5c3..88f6d06 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -5,3 +5,5 @@
 # For version updates
 per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
 
+# For sdk extensions version updates
+per-file version_defaults.mk = amhk@google.com,gurpreetgs@google.com,mkhokhlova@google.com,robertogil@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index f132d13..23e1e2e 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -29,10 +29,6 @@
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER)
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
-$(call add_soong_config_var,ANDROID,IS_TARGET_MIXED_SEPOLICY)
-ifeq ($(IS_TARGET_MIXED_SEPOLICY),true)
-$(call add_soong_config_var_value,ANDROID,MIXED_SEPOLICY_VERSION,$(BOARD_SEPOLICY_VERS))
-endif
 $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
 $(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
 $(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
@@ -158,6 +154,16 @@
 $(call add_soong_config_var_value,ANDROID,avf_kernel_modules_enabled,$(PRODUCT_AVF_KERNEL_MODULES_ENABLED))
 endif
 
+$(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_device_assignment,$(RELEASE_AVF_ENABLE_DEVICE_ASSIGNMENT))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_dice_changes,$(RELEASE_AVF_ENABLE_DICE_CHANGES))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_llpvm_changes,$(RELEASE_AVF_ENABLE_LLPVM_CHANGES))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_multi_tenant_microdroid_vm,$(RELEASE_AVF_ENABLE_MULTI_TENANT_MICRODROID_VM))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_remote_attestation,$(RELEASE_AVF_ENABLE_REMOTE_ATTESTATION))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_vendor_modules,$(RELEASE_AVF_ENABLE_VENDOR_MODULES))
+
+$(call add_soong_config_var_value,ANDROID,release_binder_death_recipient_weak_from_jni,$(RELEASE_BINDER_DEATH_RECIPIENT_WEAK_FROM_JNI))
+
 # Enable system_server optimizations by default unless explicitly set or if
 # there may be dependent runtime jars.
 # TODO(b/240588226): Remove the off-by-default exceptions after handling
diff --git a/core/base_rules.mk b/core/base_rules.mk
index e7c28ec..3313b5f 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -521,10 +521,6 @@
   # copy of the intermediates for now, as some rules that collect intermediates may expect
   # them to exist.
   $(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
-
-  $(foreach symlink, $(LOCAL_SOONG_INSTALL_SYMLINKS), \
-    $(call declare-0p-target,$(symlink)))
-  $(my_all_targets) : | $(LOCAL_SOONG_INSTALL_SYMLINKS)
 else ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
   $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
   $(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
@@ -546,6 +542,15 @@
 
 endif # !LOCAL_UNINSTALLABLE_MODULE
 
+# Add dependencies on LOCAL_SOONG_INSTALL_SYMLINKS if we're installing any kind of module, not just
+# ones that set LOCAL_SOONG_INSTALLED_MODULE. This is so we can have a soong module that only
+# installs symlinks (e.g. install_symlink). We can't set LOCAL_SOONG_INSTALLED_MODULE to a symlink
+# because cp commands will fail on symlinks.
+ifneq (,$(or $(LOCAL_SOONG_INSTALLED_MODULE),$(call boolean-not,$(LOCAL_UNINSTALLABLE_MODULE))))
+  $(foreach symlink, $(LOCAL_SOONG_INSTALL_SYMLINKS), $(call declare-0p-target,$(symlink)))
+  $(my_all_targets) : | $(LOCAL_SOONG_INSTALL_SYMLINKS)
+endif
+
 ###########################################################
 ## VINTF manifest fragment and init.rc goals
 ###########################################################
@@ -965,6 +970,9 @@
       $(my_init_rc_installed) \
       $(my_installed_test_data) \
       $(my_vintf_installed))
+
+  ALL_MODULES.$(my_register_name).INSTALLED_SYMLINKS := $(LOCAL_SOONG_INSTALL_SYMLINKS)
+
   # Store the list of colon-separated pairs of the built and installed locations
   # of files provided by this module.  Used by custom packaging rules like
   # package-modules.mk that need to copy the built files to a custom install
@@ -998,6 +1006,16 @@
       $(my_init_rc_installed) \
       $(my_vintf_installed))
 endif
+
+# Mark LOCAL_SOONG_INSTALL_SYMLINKS as installed if we're installing any kind of module, not just
+# ones that set LOCAL_SOONG_INSTALLED_MODULE. This is so we can have a soong module that only
+# installs symlinks (e.g. installed_symlink). We can't set LOCAL_SOONG_INSTALLED_MODULE to a symlink
+# because cp commands will fail on symlinks.
+ifneq (,$(or $(LOCAL_SOONG_INSTALLED_MODULE),$(call boolean-not,$(LOCAL_UNINSTALLABLE_MODULE))))
+  ALL_MODULES.$(my_register_name).INSTALLED += $(LOCAL_SOONG_INSTALL_SYMLINKS)
+  ALL_MODULES.$(my_register_name).INSTALLED_SYMLINKS := $(LOCAL_SOONG_INSTALL_SYMLINKS)
+endif
+
 ifdef LOCAL_PICKUP_FILES
 # Files or directories ready to pick up by the build system
 # when $(LOCAL_BUILT_MODULE) is done.
diff --git a/core/board_config.mk b/core/board_config.mk
index c3a6864..eb4c5ec 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -187,6 +187,7 @@
   BUILD_BROKEN_USES_NETWORK \
   BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
   BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
+  BUILD_BROKEN_INCORRECT_PARTITION_IMAGES \
 
 _build_broken_var_list += \
   $(foreach m,$(AVAILABLE_BUILD_MODULE_TYPES) \
@@ -971,24 +972,6 @@
 endif
 TARGET_VENDOR_TEST_SUFFIX := /vendor
 
-###########################################
-# APEXes are by default not flattened, i.e. updatable.
-#
-# APEX flattening can also be forcibly enabled (resp. disabled) by
-# setting OVERRIDE_TARGET_FLATTEN_APEX to true (resp. false), e.g. by
-# setting the OVERRIDE_TARGET_FLATTEN_APEX environment variable.
-ifdef OVERRIDE_TARGET_FLATTEN_APEX
-  TARGET_FLATTEN_APEX := $(OVERRIDE_TARGET_FLATTEN_APEX)
-endif
-
-# TODO(b/278826656) Remove the following message
-ifeq (true,$(TARGET_FLATTEN_APEX))
-  $(warning ********************************************************************************)
-  $(warning Flattened APEX will be deprecated soon. Please stop using flattened APEX and use)
-  $(warning "image" APEX instead.)
-  $(warning ********************************************************************************)
-endif
-
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 ifdef PRODUCT_EXTRA_VNDK_VERSIONS
   $(foreach v,$(PRODUCT_EXTRA_VNDK_VERSIONS),$(call check_vndk_version,$(v)))
diff --git a/core/config.mk b/core/config.mk
index e919be3..c339590 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -408,22 +408,6 @@
 $(if $(findstring ro.config.low_ram=true,$(PRODUCT_ODM_PROPERTIES)),true,false)))))))))
 endef
 
-# Get the board API level.
-board_api_level := $(PLATFORM_SDK_VERSION)
-ifdef BOARD_API_LEVEL
-  board_api_level := $(BOARD_API_LEVEL)
-else ifdef BOARD_SHIPPING_API_LEVEL
-  # Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
-  board_api_level := $(BOARD_SHIPPING_API_LEVEL)
-endif
-
-# Calculate the VSR vendor API level.
-vsr_vendor_api_level := $(board_api_level)
-
-ifdef PRODUCT_SHIPPING_API_LEVEL
-  vsr_vendor_api_level := $(call math_min,$(PRODUCT_SHIPPING_API_LEVEL),$(board_api_level))
-endif
-
 # Set TARGET_MAX_PAGE_SIZE_SUPPORTED.
 # TARGET_MAX_PAGE_SIZE_SUPPORTED indicates the alignment of the ELF segments.
 ifdef PRODUCT_MAX_PAGE_SIZE_SUPPORTED
@@ -435,23 +419,18 @@
   # The default binary alignment for userspace is 4096.
   TARGET_MAX_PAGE_SIZE_SUPPORTED := 4096
   # When VSR vendor API level >= 34, binary alignment will be 65536.
-  ifeq ($(call math_gt_or_eq,$(vsr_vendor_api_level),34),true)
+  ifeq ($(call math_gt_or_eq,$(VSR_VENDOR_API_LEVEL),34),true)
     ifeq ($(TARGET_ARCH),arm64)
       TARGET_MAX_PAGE_SIZE_SUPPORTED := 65536
     endif
-    ifeq ($(TARGET_ARCH),arm)
-      TARGET_MAX_PAGE_SIZE_SUPPORTED := 65536
-    endif
   endif
 endif
 .KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
 
-# Check that TARGET_MAX_PAGE_SIZE_SUPPORTED is greater than 4096 only for ARM arch.
+# Only arm64 arch supports TARGET_MAX_PAGE_SIZE_SUPPORTED greater than 4096.
 ifneq ($(TARGET_MAX_PAGE_SIZE_SUPPORTED),4096)
   ifneq ($(TARGET_ARCH),arm64)
-    ifneq ($(TARGET_ARCH),arm)
-      $(error TARGET_MAX_PAGE_SIZE_SUPPORTED=$(TARGET_MAX_PAGE_SIZE_SUPPORTED) is greater than 4096. Only supported in ARM arch)
-    endif
+    $(error TARGET_MAX_PAGE_SIZE_SUPPORTED=$(TARGET_MAX_PAGE_SIZE_SUPPORTED) is greater than 4096. Only supported in arm64 arch)
   endif
 endif
 
@@ -745,6 +724,7 @@
 IMG_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/img_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 MAKE_RECOVERY_PATCH := $(HOST_OUT_EXECUTABLES)/make_recovery_patch$(HOST_EXECUTABLE_SUFFIX)
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
+OTA_FROM_RAW_IMG := $(HOST_OUT_EXECUTABLES)/ota_from_raw_img$(HOST_EXECUTABLE_SUFFIX)
 SPARSE_IMG := $(HOST_OUT_EXECUTABLES)/sparse_img$(HOST_EXECUTABLE_SUFFIX)
 CHECK_PARTITION_SIZES := $(HOST_OUT_EXECUTABLES)/check_partition_sizes$(HOST_EXECUTABLE_SUFFIX)
 SYMBOLS_MAP := $(HOST_OUT_EXECUTABLES)/symbols_map
@@ -941,22 +921,15 @@
 BOARD_SEPOLICY_VERS := $(PLATFORM_SEPOLICY_VERSION)
 endif
 
-ifeq ($(BOARD_SEPOLICY_VERS),$(PLATFORM_SEPOLICY_VERSION))
-IS_TARGET_MIXED_SEPOLICY :=
-else
-IS_TARGET_MIXED_SEPOLICY := true
-endif
-
-.KATI_READONLY := IS_TARGET_MIXED_SEPOLICY
-
 # A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
-PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
+PLATFORM_SEPOLICY_COMPAT_VERSIONS := $(filter-out $(PLATFORM_SEPOLICY_VERSION), \
     29.0 \
     30.0 \
     31.0 \
     32.0 \
     33.0 \
     34.0 \
+    )
 
 .KATI_READONLY := \
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
@@ -1251,8 +1224,12 @@
 TARGET_AVAILABLE_SDK_VERSIONS := $(filter-out %/module-lib %/system-server,$(TARGET_AVAILABLE_SDK_VERSIONS))
 TARGET_AVAIALBLE_SDK_VERSIONS := $(call numerically_sort,$(TARGET_AVAILABLE_SDK_VERSIONS))
 
-TARGET_SDK_VERSIONS_WITHOUT_JAVA_18_SUPPORT := $(call numbers_less_than,24,$(TARGET_AVAILABLE_SDK_VERSIONS))
-TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT := $(call numbers_less_than,30,$(TARGET_AVAILABLE_SDK_VERSIONS))
+TARGET_SDK_VERSIONS_WITHOUT_JAVA_1_8_SUPPORT := $(call numbers_less_than,24,$(TARGET_AVAILABLE_SDK_VERSIONS))
+TARGET_SDK_VERSIONS_WITHOUT_JAVA_1_9_SUPPORT := $(call numbers_less_than,30,$(TARGET_AVAILABLE_SDK_VERSIONS))
+TARGET_SDK_VERSIONS_WITHOUT_JAVA_11_SUPPORT := $(call numbers_less_than,32,$(TARGET_AVAILABLE_SDK_VERSIONS))
+TARGET_SDK_VERSIONS_WITHOUT_JAVA_17_SUPPORT := $(call numbers_less_than,34,$(TARGET_AVAILABLE_SDK_VERSIONS))
+
+JAVA_LANGUAGE_VERSIONS_WITHOUT_SYSTEM_MODULES := 1.7 1.8
 
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 82b17be..049e7ff 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -65,6 +65,18 @@
   endif
 endif
 
+# Disable global HWASan in excluded paths
+ifneq ($(filter hwaddress, $(my_global_sanitize)),)
+  combined_exclude_paths := $(HWASAN_EXCLUDE_PATHS) \
+                            $(PRODUCT_HWASAN_EXCLUDE_PATHS)
+
+  ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_exclude_paths)),\
+         $(filter $(dir)%,$(LOCAL_PATH)))),)
+    my_global_sanitize := $(filter-out hwaddress,$(my_global_sanitize))
+    my_global_sanitize_diag := $(filter-out hwaddress,$(my_global_sanitize_diag))
+  endif
+endif
+
 ifneq ($(my_global_sanitize),)
   my_sanitize := $(my_global_sanitize) $(my_sanitize)
 endif
diff --git a/core/definitions.mk b/core/definitions.mk
index 462c968..b6b0d69 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -147,6 +147,10 @@
 $(filter true, $(1))
 endef
 
+define boolean-not
+$(if $(filter true,$(1)),,true)
+endef
+
 ###########################################################
 ## Rule for touching GCNO files.
 ###########################################################
@@ -1547,10 +1551,10 @@
 #
 # You must call this with $(eval).
 define define-aidl-java-rule
-define-aidl-java-rule-src := $(patsubst %.aidl,%.java,$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
-$$(define-aidl-java-rule-src) : $(call clean-path,$(LOCAL_PATH)/$(1)) $(AIDL)
+define_aidl_java_rule_src := $(patsubst %.aidl,%.java,$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define_aidl_java_rule_src) : $(call clean-path,$(LOCAL_PATH)/$(1)) $(AIDL)
 	$$(transform-aidl-to-java)
-$(3) += $$(define-aidl-java-rule-src)
+$(3) += $$(define_aidl_java_rule_src)
 endef
 
 ## Given a .aidl file path generate the rule to compile it a .cpp file.
@@ -1560,10 +1564,10 @@
 #
 # You must call this with $(eval).
 define define-aidl-cpp-rule
-define-aidl-cpp-rule-src := $(patsubst %.aidl,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
-$$(define-aidl-cpp-rule-src) : $(call clean-path,$(LOCAL_PATH)/$(1)) $(AIDL_CPP)
+define_aidl_cpp_rule_src := $(patsubst %.aidl,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define_aidl_cpp_rule_src) : $(call clean-path,$(LOCAL_PATH)/$(1)) $(AIDL_CPP)
 	$$(transform-aidl-to-cpp)
-$(3) += $$(define-aidl-cpp-rule-src)
+$(3) += $$(define_aidl_cpp_rule_src)
 endef
 
 ###########################################################
@@ -1585,10 +1589,10 @@
 #
 # You must call this with $(eval).
 define define-vts-cpp-rule
-define-vts-cpp-rule-src := $(patsubst %.vts,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
-$$(define-vts-cpp-rule-src) : $(LOCAL_PATH)/$(1) $(VTSC)
+define_vts_cpp_rule_src := $(patsubst %.vts,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define_vts_cpp_rule_src) : $(LOCAL_PATH)/$(1) $(VTSC)
 	$$(transform-vts-to-cpp)
-$(3) += $$(define-vts-cpp-rule-src)
+$(3) += $$(define_vts_cpp_rule_src)
 endef
 
 ###########################################################
@@ -3203,8 +3207,9 @@
 define copy-vintf-manifest-checked
 $(2): $(1) $(HOST_OUT_EXECUTABLES)/assemble_vintf
 	@echo "Copy xml: $$@"
-	$(hide) $(HOST_OUT_EXECUTABLES)/assemble_vintf -i $$< >/dev/null  # Don't print the xml file to stdout.
-	$$(copy-file-to-target)
+	$(hide) mkdir -p "$$(dir $$@)"
+	$(hide) VINTF_IGNORE_TARGET_FCM_VERSION=true\
+		$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $$< -o $$@
 endef
 
 # Copies many vintf manifest files checked.
@@ -3417,6 +3422,10 @@
 # $(2): path in symbols directory
 # $(3): file type (elf or r8)
 # $(4): path in the mappings directory
+#
+# Regarding the restats at the end: I think you should only need to use KATI_RESTAT on $(2), but
+# there appears to be a bug in kati where it was not adding restat=true in the ninja file unless we
+# also added 4 to KATI_RESTAT.
 define _copy-symbols-file-with-mapping
 $(2): .KATI_IMPLICIT_OUTPUTS := $(4)
 $(2): $(SYMBOLS_MAP)
@@ -3425,16 +3434,7 @@
 	$$(copy-file-to-target)
 	$(SYMBOLS_MAP) -$(strip $(3)) $(2) -write_if_changed $(4)
 .KATI_RESTAT: $(2)
-endef
-
-# Returns the directory to copy proguard dictionaries into
-define local-proguard-dictionary-directory
-$(call intermediates-dir-for,PACKAGING,proguard_dictionary)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
-endef
-
-# Returns the directory to copy proguard dictionary mappings into
-define local-proguard-dictionary-mapping-directory
-$(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+.KATI_RESTAT: $(4)
 endef
 
 
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 6ac169b..6791125 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -90,8 +90,8 @@
 # The input variables are written by build/soong/java/dexpreopt_bootjars.go. Examples can be found
 # at the bottom of build/soong/java/dexpreopt_config_testing.go.
 dexpreopt_root_dir := $(dir $(patsubst %/,%,$(dir $(firstword $(bootclasspath_jars)))))
-booclasspath_arg := $(subst $(space),:,$(patsubst $(dexpreopt_root_dir)%,%,$(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)))
-booclasspath_locations_arg := $(subst $(space),:,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS))
+bootclasspath_arg := $(subst $(space),:,$(patsubst $(dexpreopt_root_dir)%,%,$(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)))
+bootclasspath_locations_arg := $(subst $(space),:,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS))
 boot_images := $(subst :,$(space),$(DEXPREOPT_IMAGE_LOCATIONS_ON_DEVICE$(DEXPREOPT_INFIX)))
 boot_image_arg := $(subst $(space),:,$(patsubst /%,%,$(boot_images)))
 dex2oat_extra_args := $(if $(filter true,$(ENABLE_UFFD_GC)),--runtime-arg -Xgc:CMC)
@@ -99,8 +99,8 @@
 boot_zip_metadata_txt := $(dir $(boot_zip))boot_zip/METADATA.txt
 $(boot_zip_metadata_txt):
 	rm -f $@
-	echo "booclasspath = $(booclasspath_arg)" >> $@
-	echo "booclasspath-locations = $(booclasspath_locations_arg)" >> $@
+	echo "bootclasspath = $(bootclasspath_arg)" >> $@
+	echo "bootclasspath-locations = $(bootclasspath_locations_arg)" >> $@
 	echo "boot-image = $(boot_image_arg)" >> $@
 	echo "extra-args = $(dex2oat_extra_args)" >> $@
 
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 288f81f..54a57d1 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -468,8 +468,6 @@
 	rsync --checksum $(PRIVATE_STAGING) $@
 
   my_dexpreopt_script := $(intermediates)/dexpreopt.sh
-  my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
-  DEXPREOPT.$(LOCAL_MODULE).POST_INSTALLED_DEXPREOPT_ZIP := $(my_dexpreopt_zip)
   .KATI_RESTAT: $(my_dexpreopt_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_script): PRIVATE_GLOBAL_SOONG_CONFIG := $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
@@ -499,38 +497,71 @@
     my_dexpreopt_deps += $(intermediates)/enforce_uses_libraries.status
   endif
 
+  # We need to add all the installed files to ALL_MODULES.$(my_register_name).INSTALLED in order
+  # for the build system to properly track installed files. (for sbom, installclean, etc)
+  # We install all the files in a zip file generated at execution time, which means we have to guess
+  # what's going to be in that zip file before it's created. We then check at executation time that
+  # our guess is correct.
+  # _system_other corresponds to OdexOnSystemOtherByName() in soong.
+  # The other paths correspond to dexpreoptCommand()
+  _dexlocation := $(patsubst $(PRODUCT_OUT)/%,%,$(LOCAL_INSTALLED_MODULE))
+  _dexname := $(basename $(notdir $(_dexlocation)))
+  _system_other := $(strip $(if $(strip $(BOARD_USES_SYSTEM_OTHER_ODEX)), \
+    $(if $(strip $(SANITIZE_LITE)),, \
+      $(if $(filter $(_dexname),$(PRODUCT_DEXPREOPT_SPEED_APPS))$(filter $(_dexname),$(PRODUCT_SYSTEM_SERVER_APPS)),, \
+        $(if $(strip $(foreach myfilter,$(SYSTEM_OTHER_ODEX_FILTER),$(filter system/$(myfilter),$(_dexlocation)))), \
+          system_other/)))))
+  # _dexdir has a trailing /
+  _dexdir := $(_system_other)$(dir $(_dexlocation))
+  my_dexpreopt_zip_contents := $(sort \
+    $(foreach arch,$(my_dexpreopt_archs), \
+      $(_dexdir)oat/$(arch)/$(_dexname).odex \
+      $(_dexdir)oat/$(arch)/$(_dexname).vdex \
+      $(if $(filter false,$(LOCAL_DEX_PREOPT_APP_IMAGE)),, \
+        $(if $(my_process_profile)$(filter true,$(LOCAL_DEX_PREOPT_APP_IMAGE)), \
+          $(_dexdir)oat/$(arch)/$(_dexname).art))) \
+    $(if $(my_process_profile),$(_dexlocation).prof))
+  _dexlocation :=
+  _dexdir :=
+  _dexname :=
+  _system_other :=
+
+  my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
   $(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_zip): $(my_dexpreopt_deps)
   $(my_dexpreopt_zip): | $(DEXPREOPT_GEN_DEPS)
   $(my_dexpreopt_zip): .KATI_DEPFILE := $(my_dexpreopt_zip).d
   $(my_dexpreopt_zip): PRIVATE_DEX := $(my_dex_jar)
   $(my_dexpreopt_zip): PRIVATE_SCRIPT := $(my_dexpreopt_script)
+  $(my_dexpreopt_zip): PRIVATE_ZIP_CONTENTS := $(my_dexpreopt_zip_contents)
   $(my_dexpreopt_zip): $(my_dexpreopt_script)
 	@echo "$(PRIVATE_MODULE) dexpreopt"
+	rm -f $@
+	echo -n > $@.contents
+	$(foreach f,$(PRIVATE_ZIP_CONTENTS),echo "$(f)" >> $@.contents$(newline))
 	bash $(PRIVATE_SCRIPT) $(PRIVATE_DEX) $@
+	if ! diff <(zipinfo -1 $@ | sort) $@.contents >&2; then \
+	  echo "Contents of $@ did not match what make was expecting." >&2 && exit 1; \
+	fi
 
-  ifdef LOCAL_POST_INSTALL_CMD
-    # Add a shell command separator
-    LOCAL_POST_INSTALL_CMD += &&
-  endif
+  $(foreach installed_dex_file,$(my_dexpreopt_zip_contents),\
+    $(eval $(PRODUCT_OUT)/$(installed_dex_file): $(my_dexpreopt_zip) \
+$(newline)	unzip -qoDD -d $(PRODUCT_OUT) $(my_dexpreopt_zip) $(installed_dex_file)))
 
-  LOCAL_POST_INSTALL_CMD += \
-    for i in $$(zipinfo -1 $(my_dexpreopt_zip)); \
-      do mkdir -p $(PRODUCT_OUT)/$$(dirname $$i); \
-    done && \
-    ( unzip -qoDD -d $(PRODUCT_OUT) $(my_dexpreopt_zip) 2>&1 | grep -v "zipfile is empty"; exit $${PIPESTATUS[0]} ) || \
-      ( code=$$?; if [ $$code -ne 0 -a $$code -ne 1 ]; then exit $$code; fi )
+  ALL_MODULES.$(my_register_name).INSTALLED += $(addprefix $(PRODUCT_OUT)/,$(my_dexpreopt_zip_contents))
 
-  $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
-  $(LOCAL_INSTALLED_MODULE): $(my_dexpreopt_zip)
-
-  $(my_all_targets): $(my_dexpreopt_zip)
+  # Normally this happens in sbom.mk, which is included from base_rules.mk. But since
+  # dex_preopt_odex_install.mk is included after base_rules.mk, it misses these odex files.
+  $(foreach installed_file,$(addprefix $(PRODUCT_OUT)/,$(my_dexpreopt_zip_contents)), \
+    $(eval ALL_INSTALLED_FILES.$(installed_file) := $(my_register_name)))
 
   my_dexpreopt_config :=
+  my_dexpreopt_config_for_postprocessing :=
+  my_dexpreopt_jar_copy :=
   my_dexpreopt_product_packages :=
   my_dexpreopt_script :=
   my_dexpreopt_zip :=
-  my_dexpreopt_config_for_postprocessing :=
+  my_dexpreopt_zip_contents :=
 endif # LOCAL_DEX_PREOPT
 endif # my_create_dexpreopt_config
 
diff --git a/core/java_common.mk b/core/java_common.mk
index 0e03d0b..a5ed057 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -25,15 +25,19 @@
     # Host modules always default to 1.9
     LOCAL_JAVA_LANGUAGE_VERSION := 1.9
   else
-    ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_18_SUPPORT)))
+    ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_1_8_SUPPORT)))
       LOCAL_JAVA_LANGUAGE_VERSION := 1.7
-    else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT)))
+    else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_1_9_SUPPORT)))
       LOCAL_JAVA_LANGUAGE_VERSION := 1.8
+    else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_11_SUPPORT)))
+      LOCAL_JAVA_LANGUAGE_VERSION := 1.9
+    else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_17_SUPPORT)))
+      LOCAL_JAVA_LANGUAGE_VERSION := 11
     else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS))
       # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules
       LOCAL_JAVA_LANGUAGE_VERSION := 1.8
     else
-      LOCAL_JAVA_LANGUAGE_VERSION := 1.9
+      LOCAL_JAVA_LANGUAGE_VERSION := 17
     endif
   endif
 endif
@@ -410,7 +414,7 @@
 full_java_system_modules_deps :=
 my_system_modules_dir :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_USE_SYSTEM_MODULES :=
-ifeq ($(LOCAL_JAVA_LANGUAGE_VERSION),1.9)
+ifeq (,$(filter $(LOCAL_JAVA_LANGUAGE_VERSION),$(JAVA_LANGUAGE_VERSIONS_WITHOUT_SYSTEM_MODULES)))
   $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_USE_SYSTEM_MODULES := true
   ifdef my_system_modules
     ifneq ($(my_system_modules),none)
diff --git a/core/main.mk b/core/main.mk
index 48b4b5e..7c25862 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -191,6 +191,13 @@
 ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
 endif
 
+# Add the 16K developer option if it is defined for the product.
+ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true)
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true
+else
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false
+endif
+
 # Enable core platform API violation warnings on userdebug and eng builds.
 ifneq ($(TARGET_BUILD_VARIANT),user)
 ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
@@ -213,11 +220,13 @@
 # property_overrides_split_enabled is true. Otherwise it will be installed in
 # /system/build.prop
 ifdef BOARD_VNDK_VERSION
+  ifeq ($(KEEP_VNDK),true)
   ifeq ($(BOARD_VNDK_VERSION),current)
     ADDITIONAL_VENDOR_PROPERTIES := ro.vndk.version=$(PLATFORM_VNDK_VERSION)
   else
     ADDITIONAL_VENDOR_PROPERTIES := ro.vndk.version=$(BOARD_VNDK_VERSION)
   endif
+  endif
 
   # TODO(b/290159430): ro.vndk.deprecate is a temporal variable for deprecating VNDK.
   # This variable will be removed once ro.vndk.version can be removed.
@@ -330,12 +339,14 @@
 # modules. It uses the version in PRODUCT_PRODUCT_VNDK_VERSION. If the value
 # is "current", use PLATFORM_VNDK_VERSION.
 ifdef PRODUCT_PRODUCT_VNDK_VERSION
+ifeq ($(KEEP_VNDK),true)
 ifeq ($(PRODUCT_PRODUCT_VNDK_VERSION),current)
 ADDITIONAL_PRODUCT_PROPERTIES += ro.product.vndk.version=$(PLATFORM_VNDK_VERSION)
 else
 ADDITIONAL_PRODUCT_PROPERTIES += ro.product.vndk.version=$(PRODUCT_PRODUCT_VNDK_VERSION)
 endif
 endif
+endif
 
 ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
 
@@ -1224,7 +1235,8 @@
 # Returns modules included automatically as a result of certain BoardConfig
 # variables being set.
 define auto-included-modules
-  $(if $(BOARD_VNDK_VERSION),vndk_package) \
+  $(if $(and $(BOARD_VNDK_VERSION),$(filter true,$(KEEP_VNDK))),vndk_package) \
+  $(if $(filter true,$(KEEP_VNDK)),,llndk_in_system) \
   $(if $(DEVICE_MANIFEST_FILE),vendor_manifest.xml) \
   $(if $(DEVICE_MANIFEST_SKUS),$(foreach sku, $(DEVICE_MANIFEST_SKUS),vendor_manifest_$(sku).xml)) \
   $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \
@@ -1232,9 +1244,7 @@
 
 endef
 
-# Lists most of the files a particular product installs, including:
-# - PRODUCT_PACKAGES, and their LOCAL_REQUIRED_MODULES
-# - PRODUCT_COPY_FILES
+# Lists the modules particular product installs.
 # The base list of modules to build for this product is specified
 # by the appropriate product definition file, which was included
 # by product_config.mk.
@@ -1246,8 +1256,7 @@
 # Name resolution for LOCAL_REQUIRED_MODULES:
 #   See the select-bitness-of-required-modules definition.
 # $(1): product makefile
-
-define product-installed-files
+define product-installed-modules
   $(eval _pif_modules := \
     $(call get-product-var,$(1),PRODUCT_PACKAGES) \
     $(if $(filter eng,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ENG)) \
@@ -1264,7 +1273,14 @@
   $(eval ### Resolve the :32 :64 module name) \
   $(eval _pif_modules := $(sort $(call resolve-bitness-for-modules,TARGET,$(_pif_modules)))) \
   $(call expand-required-modules,_pif_modules,$(_pif_modules),$(_pif_overrides)) \
-  $(filter-out $(HOST_OUT_ROOT)/%,$(call module-installed-files, $(_pif_modules))) \
+  $(_pif_modules)
+endef
+
+# Lists most of the files a particular product installs.
+# It gives all the installed files for all modules returned by product-installed-modules,
+# and also includes PRODUCT_COPY_FILES.
+define product-installed-files
+  $(filter-out $(HOST_OUT_ROOT)/%,$(call module-installed-files, $(call product-installed-modules,$(1)))) \
   $(call resolve-product-relative-paths,\
     $(foreach cf,$(call get-product-var,$(1),PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
 endef
@@ -1437,6 +1453,16 @@
       $(warning $(ALL_MODULES.$(m).MAKEFILE): Module '$(m)' in PRODUCT_PACKAGES_TESTS has nothing to install!)))
 endif
 
+ifneq ($(TARGET_BUILD_APPS),)
+  # If this build is just for apps, only build apps and not the full system by default.
+  ifneq ($(filter all,$(TARGET_BUILD_APPS)),)
+    # If they used the magic goal "all" then build all apps in the source tree.
+    unbundled_build_modules := $(foreach m,$(sort $(ALL_MODULES)),$(if $(filter APPS,$(ALL_MODULES.$(m).CLASS)),$(m)))
+  else
+    unbundled_build_modules := $(sort $(TARGET_BUILD_APPS))
+  endif
+endif
+
 # build/make/core/Makefile contains extra stuff that we don't want to pollute this
 # top-level makefile with.  It expects that ALL_DEFAULT_INSTALLED_MODULES
 # contains everything that's built during the current make, but it also further
@@ -1712,16 +1738,10 @@
 else ifneq ($(TARGET_BUILD_APPS),)
   # If this build is just for apps, only build apps and not the full system by default.
 
-  unbundled_build_modules :=
-  ifneq ($(filter all,$(TARGET_BUILD_APPS)),)
-    # If they used the magic goal "all" then build all apps in the source tree.
-    unbundled_build_modules := $(foreach m,$(sort $(ALL_MODULES)),$(if $(filter APPS,$(ALL_MODULES.$(m).CLASS)),$(m)))
-  else
-    unbundled_build_modules := $(TARGET_BUILD_APPS)
-  endif
-
-  # Dist the installed files if they exist.
-  apps_only_installed_files := $(foreach m,$(unbundled_build_modules),$(ALL_MODULES.$(m).INSTALLED))
+  # Dist the installed files if they exist, except the installed symlinks. dist-for-goals emits
+  # `cp src dest` commands, which will fail to copy dangling symlinks.
+  apps_only_installed_files := $(foreach m,$(unbundled_build_modules),\
+    $(filter-out $(ALL_MODULES.$(m).INSTALLED_SYMLINKS),$(ALL_MODULES.$(m).INSTALLED)))
   $(call dist-for-goals,apps_only, $(apps_only_installed_files))
 
   # Dist the bundle files if they exist.
@@ -2168,10 +2188,7 @@
 
 metadata_list := $(OUT_DIR)/.module_paths/METADATA.list
 metadata_files := $(subst $(newline),$(space),$(file <$(metadata_list)))
-# (TODO: b/272358583 find another way of always rebuilding this target)
-# Remove the sbom-metadata.csv whenever makefile is evaluated
-$(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
-$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files) $(metadata_list) $(metadata_files)
+$(PRODUCT_OUT)/sbom-metadata.csv:
 	rm -f $@
 	echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $@
 	$(eval _all_static_libs :=)
@@ -2182,7 +2199,6 @@
 	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
 	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
 	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
-	  $(eval _post_installed_dexpreopt_zip := $(DEXPREOPT.$(_module_name).POST_INSTALLED_DEXPREOPT_ZIP)) \
 	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
 	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
 	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
@@ -2195,15 +2211,14 @@
 	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
 	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
 	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
-	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
+	  $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \
+	  $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
 	  $(eval _static_libs := $(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES)) \
 	  $(eval _whole_static_libs := $(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES)) \
 	  $(foreach l,$(_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
 	  $(foreach l,$(_whole_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
 	  echo /$(_path_on_device),$(_module_path),$(_soong_module_type),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_build_output_path),$(_static_libs),$(_whole_static_libs), >> $@; \
-	  $(if $(_post_installed_dexpreopt_zip), \
-	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i$(comma)$(_static_libs)$(comma)$(_whole_static_libs)$(comma) >> $@ ; done ; \
-	  ) \
 	)
 	$(foreach l,$(sort $(_all_static_libs)), \
 	  $(eval _lib_stem := $(call word-colon,1,$l)) \
@@ -2216,11 +2231,17 @@
 	  echo $(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib) >> $@; \
 	)
 
+# (TODO: b/272358583 find another way of always rebuilding sbom.spdx)
+# Remove the always_dirty_file.txt whenever the makefile is evaluated
+$(shell rm -f $(PRODUCT_OUT)/always_dirty_file.txt)
+$(PRODUCT_OUT)/always_dirty_file.txt:
+	touch $@
+
 .PHONY: sbom
 ifeq ($(TARGET_BUILD_APPS),)
 sbom: $(PRODUCT_OUT)/sbom.spdx.json
 $(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
-$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM) $(installed_files) $(metadata_list) $(metadata_files) $(PRODUCT_OUT)/always_dirty_file.txt
 	rm -rf $@
 	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json
 
@@ -2239,7 +2260,7 @@
 $(eval _dep_modules := $(filter %.$(_module_name),$(ALL_MODULES)) $(filter %.$(_module_name)$(TARGET_2ND_ARCH_MODULE_SUFFIX),$(ALL_MODULES)))
 $(eval _is_apex := $(filter %.apex,$(3)))
 
-$(4): $(3) $(metadata_list) $(metadata_files)
+$(4):
 	rm -rf $$@
 	echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $$@
 	echo /$(_path_on_device),$(_module_path),$(_soong_module_type),,,,,$(3),,, >> $$@
@@ -2248,7 +2269,7 @@
 	    echo $(patsubst $(PRODUCT_OUT)/apex/$(_module_name)/%,%,$(ALL_MODULES.$m.INSTALLED)),$(sort $(ALL_MODULES.$m.PATH)),$(sort $(ALL_MODULES.$m.SOONG_MODULE_TYPE)),,,,,$(strip $(ALL_MODULES.$m.BUILT)),,, >> $$@;))
 
 $(2): $(1)
-$(1): $(4) $(GEN_SBOM)
+$(1): $(4) $(3) $(GEN_SBOM) $(installed_files) $(metadata_list) $(metadata_files)
 	rm -rf $$@
 	$(GEN_SBOM) --output_file $$@ --metadata $(4) --build_version $$(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json $(if $(filter %.apk,$(3)),--unbundled_apk,--unbundled_apex)
 endef
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index 4b692be..ca319ce 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -69,31 +69,45 @@
 # Aconfig Flags
 
 # Create a summary file of build flags for each partition
-# $(1): built aconfig flags textprot file (out)
-# $(2): installed aconfig flags textprot file (out)
+# $(1): built aconfig flags file (out)
+# $(2): installed aconfig flags file (out)
 # $(3): input aconfig files for the partition (in)
+# $(4): file format, passed to `aconfig dump` (in)
+# $(5): text placed in aconfig file when no flags present (out)
 define generate-partition-aconfig-flag-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
 $(eval $(strip $(1)): PRIVATE_IN := $(strip $(3)))
-$(strip $(1)): $(ACONFIG)
+$(strip $(1)): $(ACONFIG) $(strip $(3))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) dump --format textproto --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) dump --format $(4) --out $$(PRIVATE_OUT) \
 			$$(addprefix --cache ,$$(PRIVATE_IN)), \
-		echo "# No aconfig flags" > $$(PRIVATE_OUT) \
+		echo $(5) > $$(PRIVATE_OUT) \
 	)
 $(call copy-one-file, $(1), $(2))
 endef
 
 
 $(foreach partition, $(_FLAG_PARTITIONS), \
-	$(eval aconfig_flag_summaries.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.textproto) \
+	$(eval aconfig_flag_summaries_textproto.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.textproto) \
+	$(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \
 	$(eval $(call generate-partition-aconfig-flag-file, \
 				$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.textproto, \
-				$(aconfig_flag_summaries.$(partition)), \
+				$(aconfig_flag_summaries_textproto.$(partition)), \
 				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
 					$(ALL_MODULES.$(m).ACONFIG_FILES) \
-				)) \
+				)), \
+				textproto, \
+				"# No aconfig flags" \
+	)) \
+	$(eval $(call generate-partition-aconfig-flag-file, \
+				$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
+				$(aconfig_flag_summaries_protobuf.$(partition)), \
+				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
+					$(ALL_MODULES.$(m).ACONFIG_FILES) \
+				)), \
+				protobuf, \
+				"" \
 	)) \
 )
 
@@ -103,10 +117,12 @@
 required_flags_files := \
 		$(sort $(foreach partition, $(filter $(IMAGES_TO_BUILD), $(_FLAG_PARTITIONS)), \
 			$(build_flag_summaries.$(partition)) \
-			$(aconfig_flag_summaries.$(partition)) \
+			$(aconfig_flag_summaries_textproto.$(partition)) \
+			$(aconfig_flag_summaries_protobuf.$(partition)) \
 		))
 
 ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files)
+ALL_FLAGS_FILES := $(required_flags_files)
 
 # TODO: Remove
 .PHONY: flag-files
@@ -117,6 +133,7 @@
 required_flags_files:=
 $(foreach partition, $(_FLAG_PARTITIONS), \
 	$(eval build_flag_summaries.$(partition):=) \
-	$(eval aconfig_flag_summaries.$(partition):=) \
+	$(eval aconfig_flag_summaries_textproto.$(partition):=) \
+	$(eval aconfig_flag_summaries_protobuf.$(partition):=) \
 )
 
diff --git a/core/product.mk b/core/product.mk
index b66f1e2..ca65948 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -47,6 +47,13 @@
 _product_list_vars += PRODUCT_PACKAGES_DEBUG
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
 _product_list_vars += PRODUCT_PACKAGES_ARM64
+
+# packages that are added to PRODUCT_PACKAGES based on the PRODUCT_SHIPPING_API_LEVEL
+# These are only added if the shipping API level is that level or lower
+_product_list_vars += PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29
+_product_list_vars += PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33
+_product_list_vars += PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34
+
 # Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
 _product_list_vars += PRODUCT_PACKAGES_ENG
@@ -254,6 +261,9 @@
 # Whether any paths should have HWASan enabled for components
 _product_list_vars += PRODUCT_HWASAN_INCLUDE_PATHS
 
+# Whether any paths are excluded from sanitization when SANITIZE_TARGET=hwaddress
+_product_list_vars += PRODUCT_HWASAN_EXCLUDE_PATHS
+
 # Whether any paths should have Memtag_heap enabled for components
 _product_list_vars += PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS
 _product_list_vars += PRODUCT_MEMTAG_HEAP_ASYNC_DEFAULT_INCLUDE_PATHS
@@ -430,8 +440,13 @@
 # specified we default to COW version 2 in update_engine for backwards compatibility
 _product_single_value_vars += PRODUCT_VIRTUAL_AB_COW_VERSION
 
+# If set, determines whether the build system checks vendor seapp contexts violations.
+_product_single_value_vars += PRODUCT_CHECK_VENDOR_SEAPP_VIOLATIONS
+
 _product_list_vars += PRODUCT_AFDO_PROFILES
 
+_product_single_value_vars += PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 3f9eb24..b475d75 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -282,7 +282,11 @@
 
 #############################################################################
 # Check product include tag allowlist
-BLUEPRINT_INCLUDE_TAGS_ALLOWLIST := com.android.mainline_go com.android.mainline
+BLUEPRINT_INCLUDE_TAGS_ALLOWLIST := \
+  com.android.mainline_go \
+  com.android.mainline \
+  mainline_module_prebuilt_nightly \
+  mainline_module_prebuilt_monthly_release
 .KATI_READONLY := BLUEPRINT_INCLUDE_TAGS_ALLOWLIST
 $(foreach include_tag,$(PRODUCT_INCLUDE_TAGS), \
 	$(if $(filter $(include_tag),$(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST)),,\
@@ -293,7 +297,8 @@
 # we still analyse it.
 # This means that in setups where we two have two prebuilts of module_sdk, we need a "default" to use in analysis
 # This should be a no-op in aosp and internal since no Android.bp file contains blueprint_package_includes
-PRODUCT_INCLUDE_TAGS += com.android.mainline # Use the big android one by default
+# Use the big android one and main-based prebuilts by default
+PRODUCT_INCLUDE_TAGS += com.android.mainline mainline_module_prebuilt_nightly
 endif
 
 #############################################################################
@@ -500,6 +505,9 @@
   ifneq (,$(call math_gt_or_eq,33,$(PRODUCT_SHIPPING_API_LEVEL)))
     PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33)
   endif
+  ifneq (,$(call math_gt_or_eq,34,$(PRODUCT_SHIPPING_API_LEVEL)))
+    PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34)
+  endif
 endif
 
 # If build command defines OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS,
@@ -543,35 +551,43 @@
 
 $(KATI_obsolete_var OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE,Use PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE instead)
 
-# If build command defines PRODUCT_USE_PRODUCT_VNDK_OVERRIDE as `false`,
-# PRODUCT_PRODUCT_VNDK_VERSION will not be defined automatically.
-# PRODUCT_USE_PRODUCT_VNDK_OVERRIDE can be used for testing only.
-PRODUCT_USE_PRODUCT_VNDK := false
-ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
-  PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
-else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
-  # No shipping level defined. Enforce the product interface by default.
-  PRODUCT_USE_PRODUCT_VNDK := true
-else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
-  # Enforce product interface for VNDK if PRODUCT_SHIPPING_API_LEVEL is greater
-  # than 29.
-  PRODUCT_USE_PRODUCT_VNDK := true
+# From Android V, Define PRODUCT_PRODUCT_VNDK_VERSION as current by default.
+# This is required to make all devices have product variants.
+ifndef PRODUCT_PRODUCT_VNDK_VERSION
+  PRODUCT_PRODUCT_VNDK_VERSION := current
 endif
 
-ifeq ($(PRODUCT_USE_PRODUCT_VNDK),true)
-  ifndef PRODUCT_PRODUCT_VNDK_VERSION
-    PRODUCT_PRODUCT_VNDK_VERSION := current
-  endif
-endif
-
-$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
-
 ifdef PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
     $(error PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS is deprecated, consider using RRO for \
       $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
 endif
 
+# Get the board API level.
+board_api_level := $(PLATFORM_SDK_VERSION)
+ifdef BOARD_API_LEVEL
+  board_api_level := $(BOARD_API_LEVEL)
+else ifdef BOARD_SHIPPING_API_LEVEL
+  # Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
+  board_api_level := $(BOARD_SHIPPING_API_LEVEL)
+endif
+
+# Calculate the VSR vendor API level.
+VSR_VENDOR_API_LEVEL := $(board_api_level)
+
+ifdef PRODUCT_SHIPPING_API_LEVEL
+  VSR_VENDOR_API_LEVEL := $(call math_min,$(PRODUCT_SHIPPING_API_LEVEL),$(board_api_level))
+endif
+.KATI_READONLY := VSR_VENDOR_API_LEVEL
+
+# Boolean variable determining if vendor seapp contexts is enforced
+CHECK_VENDOR_SEAPP_VIOLATIONS := false
+ifneq ($(call math_gt,$(VSR_VENDOR_API_LEVEL),34),)
+  CHECK_VENDOR_SEAPP_VIOLATIONS := true
+else ifneq ($(PRODUCT_CHECK_VENDOR_SEAPP_VIOLATIONS),)
+  CHECK_VENDOR_SEAPP_VIOLATIONS := $(PRODUCT_CHECK_VENDOR_SEAPP_VIOLATIONS)
+endif
+.KATI_READONLY := CHECK_VENDOR_SEAPP_VIOLATIONS
+
 define product-overrides-config
 $$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
     $$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/proguard.flags b/core/proguard.flags
index d790061..6dbee84 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -51,4 +51,13 @@
     @**android**.annotation*.Keep <init>(...);
 }
 
+# Keep Dalvik optimization annotations. These annotations are special in that
+# 1) we want them preserved for visibility with ART, but 2) they don't have
+# RUNTIME retention. These minimal keep rules ensure they're not stripped by R8.
+# TODO(b/215417388): Export this rule from the owning library, core-libart,
+# via export_proguard_flags_files.
+-keepclassmembers,allowshrinking,allowoptimization,allowobfuscation,allowaccessmodification class * {
+    @dalvik.annotation.optimization.** *;
+}
+
 -include proguard_basic_keeps.flags
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index b59527a..f6b34b8 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -10,6 +10,12 @@
 # and RuntimeVisibleTypeAnnotations, as well as associated defaults.
 -keepattributes RuntimeVisible*Annotation*,AnnotationDefault
 
+# With R8 full mode, certain attributes are only kept when matched with an
+# explicit keep rule for that target, even with a global -keepattributes rule.
+# As such, we can add the global keep rule here with minimal cost while
+# simplifying incremental development.
+-keepattributes Exceptions
+
 # For enumeration classes, see http://proguard.sourceforge.net/manual/examples.html#enumerations
 -keepclassmembers enum * {
     public static **[] values();
diff --git a/core/release_config.bzl b/core/release_config.bzl
index 805106f..a2f59e6 100644
--- a/core/release_config.bzl
+++ b/core/release_config.bzl
@@ -12,6 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+load("//build/bazel/utils:schema_validation.bzl", "validate")
+
 # Partitions that get build system flag summaries
 _flag_partitions = [
     "product",
@@ -28,6 +30,48 @@
 
 _valid_types = ["NoneType", "bool", "list", "string", "int"]
 
+_all_flags_schema = {
+    "type": "list",
+    "of": {
+        "type": "dict",
+        "required_keys": {
+            "name": {"type": "string"},
+            "partitions": {
+                "type": "list",
+                "of": {
+                    "type": "string",
+                    "choices": _flag_partitions + ["all"],
+                },
+                "unique": True,
+            },
+            "default": {
+                "or": [
+                    {"type": t}
+                    for t in _valid_types
+                ],
+            },
+            "declared_in": {"type": "string"},
+        },
+    },
+}
+
+_all_values_schema = {
+    "type": "list",
+    "of": {
+        "type": "dict",
+        "required_keys": {
+            "name": {"type": "string"},
+            "value": {
+                "or": [
+                    {"type": t}
+                    for t in _valid_types
+                ],
+            },
+            "set_in": {"type": "string"},
+        },
+    },
+}
+
 def flag(name, partitions, default):
     "Declare a flag."
     if not partitions:
@@ -69,6 +113,8 @@
 
 def release_config(all_flags, all_values):
     "Return the make variables that should be set for this release config."
+    validate(all_flags, _all_flags_schema)
+    validate(all_values, _all_values_schema)
 
     # Validate flags
     flag_names = []
@@ -82,6 +128,8 @@
     for flag in all_flags:
         for partition in flag["partitions"]:
             if partition == "all":
+                if len(flag["partitions"]) > 1:
+                    fail("\"all\" can't be combined with other partitions: " + str(flag["partitions"]))
                 for partition in _flag_partitions:
                     partitions.setdefault(partition, []).append(flag["name"])
             else:
@@ -105,8 +153,6 @@
         if flag["name"] in values:
             val = values[flag["name"]]["value"]
             set_in = values[flag["name"]]["set_in"]
-            if type(val) not in _valid_types:
-                fail("Invalid type of value for flag \"" + flag["name"] + "\" (" + type(val) + ")")
         else:
             val = flag["default"]
             set_in = flag["declared_in"]
diff --git a/core/rust_device_benchmark_config_template.xml b/core/rust_device_benchmark_config_template.xml
index 2055df2..541630c 100644
--- a/core/rust_device_benchmark_config_template.xml
+++ b/core/rust_device_benchmark_config_template.xml
@@ -16,7 +16,7 @@
 <!-- This test config file is auto-generated. -->
 <configuration description="Config to run {MODULE} rust benchmark tests.">
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
-        <option name="cleanup" value="false" />
+        <option name="cleanup" value="true" />
         <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
     </target_preparer>
 
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index ccc5449..3aa244c 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -73,12 +73,14 @@
 # We skip it for unbundled app builds where we cannot build veridex.
 module_run_appcompat :=
 ifeq (true,$(non_system_module))
-ifeq (,$(TARGET_BUILD_APPS))  # ! unbundled app build
+ifeq (,$(TARGET_BUILD_APPS))  # not unbundled app build
+ifeq (,$(filter sdk,$(MAKECMDGOALS))) # not sdk build (which is another form of unbundled build)
 ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
   module_run_appcompat := true
 endif
 endif
 endif
+endif
 
 ifeq ($(module_run_appcompat),true)
   $(LOCAL_BUILT_MODULE): $(appcompat-files)
@@ -100,31 +102,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
-  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
-  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(intermediates.COMMON)/proguard_dictionary))
   $(eval $(call copy-r8-dictionary-file-with-mapping,\
     $(LOCAL_SOONG_PROGUARD_DICT),\
-    $(my_proguard_dictionary_directory)/proguard_dictionary,\
-    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(my_proguard_dictionary_directory)/classes.jar))
-  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(intermediates.COMMON)/proguard_dictionary)
-  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(my_proguard_dictionary_directory)/proguard_dictionary)
-  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
-  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(my_proguard_dictionary_directory)/classes.jar)
+    $(intermediates.COMMON)/proguard_dictionary,\
+    $(intermediates.COMMON)/proguard_dictionary.textproto))
+
+  ALL_MODULES.$(my_register_name).PROGUARD_DICTIONARY_FILES := \
+    $(intermediates.COMMON)/proguard_dictionary \
+    $(LOCAL_SOONG_CLASSES_JAR)
+  ALL_MODULES.$(my_register_name).PROGUARD_DICTIONARY_SOONG_ZIP_ARGUMENTS := \
+    -e out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates/proguard_dictionary \
+    -f $(intermediates.COMMON)/proguard_dictionary \
+    -e out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates/classes.jar \
+    -f $(LOCAL_SOONG_CLASSES_JAR)
+  ALL_MODULES.$(my_register_name).PROGUARD_DICTIONARY_MAPPING := $(intermediates.COMMON)/proguard_dictionary.textproto
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
-    $(call local-packaging-dir,proguard_usage)/proguard_usage.zip))
-  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_usage)/proguard_usage.zip)
+  ALL_MODULES.$(my_register_name).PROGUARD_USAGE_ZIP := $(LOCAL_SOONG_PROGUARD_USAGE_ZIP)
 endif
 
 ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
diff --git a/core/soong_config.mk b/core/soong_config.mk
index bd6cfbb..e3eb780 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -115,6 +115,7 @@
 $(call add_json_list, CFIIncludePaths,                   $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
 $(call add_json_list, IntegerOverflowExcludePaths,       $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
 $(call add_json_list, HWASanIncludePaths,                $(HWASAN_INCLUDE_PATHS) $(PRODUCT_HWASAN_INCLUDE_PATHS))
+$(call add_json_list, HWASanExcludePaths,                $(HWASAN_EXCLUDE_PATHS) $(PRODUCT_HWASAN_EXCLUDE_PATHS))
 
 $(call add_json_list, MemtagHeapExcludePaths,            $(MEMTAG_HEAP_EXCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS))
 $(call add_json_list, MemtagHeapAsyncIncludePaths,       $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(if $(filter true,$(PRODUCT_MEMTAG_HEAP_SKIP_DEFAULT_PATHS)),,$(PRODUCT_MEMTAG_HEAP_ASYNC_DEFAULT_INCLUDE_PATHS)))
@@ -151,7 +152,7 @@
 $(call add_json_bool, Malloc_pattern_fill_contents,      $(MALLOC_PATTERN_FILL_CONTENTS))
 $(call add_json_str,  Override_rs_driver,                $(OVERRIDE_RS_DRIVER))
 $(call add_json_str,  DeviceMaxPageSizeSupported,        $(TARGET_MAX_PAGE_SIZE_SUPPORTED))
-$(call add_json_bool, Device_page_size_agnostic,         $(filter true,$(TARGET_PAGE_SIZE_AGNOSTIC)))
+$(call add_json_bool, DevicePageSizeAgnostic,            $(filter true,$(TARGET_PAGE_SIZE_AGNOSTIC)))
 
 $(call add_json_bool, UncompressPrivAppDex,              $(call invert_bool,$(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))))
 $(call add_json_list, ModulesLoadedByPrivilegedModules,  $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
@@ -206,17 +207,8 @@
 
 $(call add_json_list, PgoAdditionalProfileDirs,          $(PGO_ADDITIONAL_PROFILE_DIRS))
 
-$(call add_json_list, BoardPlatVendorPolicy,             $(BOARD_PLAT_VENDOR_POLICY))
-$(call add_json_list, BoardReqdMaskPolicy,               $(BOARD_REQD_MASK_POLICY))
-$(call add_json_list, BoardSystemExtPublicPrebuiltDirs,  $(BOARD_SYSTEM_EXT_PUBLIC_PREBUILT_DIRS))
-$(call add_json_list, BoardSystemExtPrivatePrebuiltDirs, $(BOARD_SYSTEM_EXT_PRIVATE_PREBUILT_DIRS))
-$(call add_json_list, BoardProductPublicPrebuiltDirs,    $(BOARD_PRODUCT_PUBLIC_PREBUILT_DIRS))
-$(call add_json_list, BoardProductPrivatePrebuiltDirs,   $(BOARD_PRODUCT_PRIVATE_PREBUILT_DIRS))
 $(call add_json_list, BoardVendorSepolicyDirs,           $(BOARD_VENDOR_SEPOLICY_DIRS) $(BOARD_SEPOLICY_DIRS))
 $(call add_json_list, BoardOdmSepolicyDirs,              $(BOARD_ODM_SEPOLICY_DIRS))
-$(call add_json_list, BoardVendorDlkmSepolicyDirs,       $(BOARD_VENDOR_DLKM_SEPOLICY_DIRS))
-$(call add_json_list, BoardOdmDlkmSepolicyDirs,          $(BOARD_ODM_DLKM_SEPOLICY_DIRS))
-$(call add_json_list, BoardSystemDlkmSepolicyDirs,       $(BOARD_SYSTEM_DLKM_SEPOLICY_DIRS))
 $(call add_json_list, SystemExtPublicSepolicyDirs,       $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS))
 $(call add_json_list, SystemExtPrivateSepolicyDirs,      $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS))
 $(call add_json_list, BoardSepolicyM4Defs,               $(BOARD_SEPOLICY_M4DEFS))
@@ -283,16 +275,17 @@
 
 $(call add_json_str,  ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
 
-$(call add_json_list, BuildBrokenPluginValidation,        $(BUILD_BROKEN_PLUGIN_VALIDATION))
-$(call add_json_bool, BuildBrokenClangProperty,           $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
-$(call add_json_bool, BuildBrokenClangAsFlags,            $(filter true,$(BUILD_BROKEN_CLANG_ASFLAGS)))
-$(call add_json_bool, BuildBrokenClangCFlags,             $(filter true,$(BUILD_BROKEN_CLANG_CFLAGS)))
-$(call add_json_bool, GenruleSandboxing,                  $(filter true,$(GENRULE_SANDBOXING)))
-$(call add_json_bool, BuildBrokenEnforceSyspropOwner,     $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
-$(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
-$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
-$(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
-$(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES))
+$(call add_json_list, BuildBrokenPluginValidation,         $(BUILD_BROKEN_PLUGIN_VALIDATION))
+$(call add_json_bool, BuildBrokenClangProperty,            $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
+$(call add_json_bool, BuildBrokenClangAsFlags,             $(filter true,$(BUILD_BROKEN_CLANG_ASFLAGS)))
+$(call add_json_bool, BuildBrokenClangCFlags,              $(filter true,$(BUILD_BROKEN_CLANG_CFLAGS)))
+$(call add_json_bool, GenruleSandboxing,                   $(filter true,$(GENRULE_SANDBOXING)))
+$(call add_json_bool, BuildBrokenEnforceSyspropOwner,      $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
+$(call add_json_bool, BuildBrokenTrebleSyspropNeverallow,  $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
+$(call add_json_bool, BuildBrokenUsesSoongPython2Modules,  $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
+$(call add_json_bool, BuildBrokenVendorPropertyNamespace,  $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
+$(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES)))
+$(call add_json_list, BuildBrokenInputDirModules,          $(BUILD_BROKEN_INPUT_DIR_MODULES))
 
 $(call add_json_list, BuildWarningBadOptionalUsesLibsAllowlist,    $(BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST))
 
@@ -319,10 +312,68 @@
 $(call add_json_list, BuildVersionTags,    $(BUILD_VERSION_TAGS))
 
 $(call add_json_str, ReleaseVersion,    $(_RELEASE_VERSION))
-$(call add_json_list, ReleaseAconfigValueSets,    $(RELEASE_ACONFIG_VALUE_SETS))
+$(call add_json_str, ReleaseAconfigValueSets,    $(RELEASE_ACONFIG_VALUE_SETS))
+$(call add_json_str, ReleaseAconfigFlagDefaultPermission,    $(RELEASE_ACONFIG_FLAG_DEFAULT_PERMISSION))
 
 $(call add_json_bool, KeepVndk, $(filter true,$(KEEP_VNDK)))
 
+$(call add_json_bool, CheckVendorSeappViolations, $(filter true,$(CHECK_VENDOR_SEAPP_VIOLATIONS)))
+
+$(call add_json_map, PartitionVarsForBazelMigrationOnlyDoNotUse)
+  $(call add_json_str,  ProductDirectory,    $(dir $(INTERNAL_PRODUCT)))
+
+  $(call add_json_map,PartitionQualifiedVariables)
+  $(foreach image_type,SYSTEM VENDOR CACHE USERDATA PRODUCT SYSTEM_EXT OEM ODM VENDOR_DLKM ODM_DLKM SYSTEM_DLKM, \
+    $(call add_json_map,$(call to-lower,$(image_type))) \
+    $(call add_json_bool, BuildingImage, $(filter true,$(BUILDING_$(image_type)_IMAGE))) \
+    $(call add_json_str, BoardErofsCompressor, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESSOR)) \
+    $(call add_json_str, BoardErofsCompressHints, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESS_HINTS)) \
+    $(call add_json_str, BoardErofsPclusterSize, $(BOARD_$(image_type)IMAGE_EROFS_PCLUSTER_SIZE)) \
+    $(call add_json_str, BoardExtfsInodeCount, $(BOARD_$(image_type)IMAGE_EXTFS_INODE_COUNT)) \
+    $(call add_json_str, BoardExtfsRsvPct, $(BOARD_$(image_type)IMAGE_EXTFS_RSV_PCT)) \
+    $(call add_json_str, BoardF2fsSloadCompressFlags, $(BOARD_$(image_type)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)) \
+    $(call add_json_str, BoardFileSystemCompress, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_COMPRESS)) \
+    $(call add_json_str, BoardFileSystemType, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_TYPE)) \
+    $(call add_json_str, BoardJournalSize, $(BOARD_$(image_type)IMAGE_JOURNAL_SIZE)) \
+    $(call add_json_str, BoardPartitionReservedSize, $(BOARD_$(image_type)IMAGE_PARTITION_RESERVED_SIZE)) \
+    $(call add_json_str, BoardPartitionSize, $(BOARD_$(image_type)IMAGE_PARTITION_SIZE)) \
+    $(call add_json_str, BoardSquashfsBlockSize, $(BOARD_$(image_type)IMAGE_SQUASHFS_BLOCK_SIZE)) \
+    $(call add_json_str, BoardSquashfsCompressor, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR)) \
+    $(call add_json_str, BoardSquashfsCompressorOpt, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR_OPT)) \
+    $(call add_json_str, BoardSquashfsDisable4kAlign, $(BOARD_$(image_type)IMAGE_SQUASHFS_DISABLE_4K_ALIGN)) \
+    $(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \
+    $(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \
+    $(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \
+    $(call end_json_map) \
+  )
+  $(call end_json_map)
+
+  $(call add_json_bool, TargetUserimagesUseExt2, $(filter true,$(TARGET_USERIMAGES_USE_EXT2)))
+  $(call add_json_bool, TargetUserimagesUseExt3, $(filter true,$(TARGET_USERIMAGES_USE_EXT3)))
+  $(call add_json_bool, TargetUserimagesUseExt4, $(filter true,$(TARGET_USERIMAGES_USE_EXT4)))
+
+  $(call add_json_bool, TargetUserimagesSparseExtDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseErofsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseSquashfsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseF2fsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)))
+
+  $(call add_json_str, BoardErofsCompressor, $(BOARD_EROFS_COMPRESSOR))
+  $(call add_json_str, BoardErofsCompressorHints, $(BOARD_EROFS_COMPRESS_HINTS))
+  $(call add_json_str, BoardErofsPclusterSize, $(BOARD_EROFS_PCLUSTER_SIZE))
+  $(call add_json_str, BoardErofsShareDupBlocks, $(BOARD_EROFS_SHARE_DUP_BLOCKS))
+  $(call add_json_str, BoardErofsUseLegacyCompression, $(BOARD_EROFS_USE_LEGACY_COMPRESSION))
+  $(call add_json_str, BoardExt4ShareDupBlocks, $(BOARD_EXT4_SHARE_DUP_BLOCKS))
+  $(call add_json_str, BoardFlashLogicalBlockSize, $(BOARD_FLASH_LOGICAL_BLOCK_SIZE))
+  $(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE))
+
+  $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
+  $(call add_json_bool, BoardBuildGkiBootImageWithoutRamdisk, $(filter true,$(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK)))
+  $(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)))
+  $(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP)))
+$(call end_json_map)
+
+$(call add_json_bool, NextReleaseHideFlaggedApi, $(filter true,$(PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API)))
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index c7c6a11..9744abf 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -62,31 +62,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
-  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
-  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(intermediates.COMMON)/proguard_dictionary))
   $(eval $(call copy-r8-dictionary-file-with-mapping,\
     $(LOCAL_SOONG_PROGUARD_DICT),\
-    $(my_proguard_dictionary_directory)/proguard_dictionary,\
-    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(my_proguard_dictionary_directory)/classes.jar))
-  $(call add-dependency,$(common_javalib.jar),\
-    $(intermediates.COMMON)/proguard_dictionary)
-  $(call add-dependency,$(common_javalib.jar),\
-    $(my_proguard_dictionary_directory)/proguard_dictionary)
-  $(call add-dependency,$(common_javalib.jar),\
-    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
-  $(call add-dependency,$(common_javalib.jar),\
-    $(my_proguard_dictionary_directory)/classes.jar)
+    $(intermediates.COMMON)/proguard_dictionary,\
+    $(intermediates.COMMON)/proguard_dictionary.textproto))
+
+  ALL_MODULES.$(my_register_name).PROGUARD_DICTIONARY_FILES := \
+    $(intermediates.COMMON)/proguard_dictionary \
+    $(LOCAL_SOONG_CLASSES_JAR)
+  ALL_MODULES.$(my_register_name).PROGUARD_DICTIONARY_SOONG_ZIP_ARGUMENTS := \
+    -e out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates/proguard_dictionary \
+    -f $(intermediates.COMMON)/proguard_dictionary \
+    -e out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates/classes.jar \
+    -f $(LOCAL_SOONG_CLASSES_JAR)
+  ALL_MODULES.$(my_register_name).PROGUARD_DICTIONARY_MAPPING := $(intermediates.COMMON)/proguard_dictionary.textproto
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
-    $(call local-packaging-dir,proguard_usage)/proguard_usage.zip))
-  $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_usage)/proguard_usage.zip)
+  ALL_MODULES.$(my_register_name).PROGUARD_USAGE_ZIP := $(LOCAL_SOONG_PROGUARD_USAGE_ZIP)
 endif
 
 
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 451e88a..4536e5f 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -47,9 +47,21 @@
         echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
         echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
         # Attestation specific properties for AOSP/GSI build running on device.
-        echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
-        echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
-        echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
+        if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
+            echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
+        fi; \
+        if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \
+            echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
+        fi; \
+        if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \
+            echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
+        fi; \
+        if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \
+            echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\
+        fi; \
+        if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \
+            echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\
+        fi; \
     )\
     $(if $(filter true,$(ZYGOTE_FORCE_64)),\
         $(if $(filter vendor,$(1)),\
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index c8b1183..593b7b6 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -37,16 +37,18 @@
   cts_platform_release_path := cts/tests/tests/os/assets/platform_releases.txt
   cts_platform_release_string := $(shell cat $(cts_platform_release_path))
 
-  ifeq (,$(findstring $(PLATFORM_VERSION),$(cts_platform_version_string)))
-    define error_msg
-      ============================================================
-      Could not find version "$(PLATFORM_VERSION)" in CTS platform version file:
-      $(cts_platform_version_path)
-      Most likely PLATFORM_VERSION in build/core/version_defaults.mk
-      has changed and a new version must be added to this CTS file.
-      ============================================================
-    endef
-    $(error $(error_msg))
+  ifeq ($(RELEASE_PLATFORM_VERSION_CODENAME_REL),)
+    ifeq (,$(findstring $(PLATFORM_VERSION),$(cts_platform_version_string)))
+      define error_msg
+        ============================================================
+        Could not find version "$(PLATFORM_VERSION)" in CTS platform version file:
+        $(cts_platform_version_path)
+        Most likely PLATFORM_VERSION in build/core/version_defaults.mk
+        has changed and a new version must be added to this CTS file.
+        ============================================================
+      endef
+      $(error $(error_msg))
+    endif
   endif
   ifeq (,$(findstring $(PLATFORM_VERSION_LAST_STABLE),$(cts_platform_release_string)))
     define error_msg
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index e288779..99d69d1 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -52,6 +52,8 @@
 
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
+# Note that this may be overridden by RELEASE_VERSION_CODENAME_REL in
+# version_util.mk.
 PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
 PLATFORM_VERSION_CODENAME.VP1A := VanillaIceCream
 
diff --git a/core/version_util.mk b/core/version_util.mk
index d3fcdc2..dca7482 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -56,6 +56,12 @@
 # unreleased API level targetable by this branch, not just those that are valid
 # lunch targets for this branch.
 
+# Release config flag to override the current version to REL.  Note that the
+# codename can also be locked to REL by setting it in versino_defaults.mk.
+ifneq ($(RELEASE_PLATFORM_VERSION_CODENAME_REL),)
+  PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION) := REL
+endif
+
 PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
 ifndef PLATFORM_VERSION_CODENAME
   # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
@@ -163,17 +169,14 @@
 
 ifndef PLATFORM_VNDK_VERSION
   # This is the definition of the VNDK version for the current VNDK libraries.
-  # The version is only available when PLATFORM_VERSION_CODENAME == REL.
-  # Otherwise, it will be set to a CODENAME version. The ABI is allowed to be
-  # changed only before the Android version is released. Once
-  # PLATFORM_VNDK_VERSION is set to actual version, the ABI for this version
-  # will be frozon and emit build errors if any ABI for the VNDK libs are
-  # changed.
-  # After that the snapshot of the VNDK with this version will be generated.
-  #
-  # The VNDK version follows PLATFORM_SDK_VERSION.
+  # With trunk stable, VNDK will not be frozen but deprecated.
+  # This version will be removed with the VNDK deprecation.
   ifeq (REL,$(PLATFORM_VERSION_CODENAME))
-    PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
+    ifdef RELEASE_PLATFORM_VNDK_VERSION
+      PLATFORM_VNDK_VERSION := $(RELEASE_PLATFORM_VNDK_VERSION)
+    else
+      PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
+    endif
   else
     PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
   endif
diff --git a/envsetup.sh b/envsetup.sh
index 0a90460..63837ec 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -2006,6 +2006,11 @@
             fi
         done
     done
+
+    if [[ "${PWD}" == /google/cog/* ]]; then
+        f="build/make/cogsetup.sh"
+        echo "including $f"; . "$T/$f"
+    fi
 }
 
 function showcommands() {
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 21c0c10..decc345 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -10,15 +10,29 @@
 # device we're building for.  This file is typically packaged up
 # with everything else.
 #
-# If TARGET_BOARD_INFO_FILE (which can be set in BoardConfig.mk) is
-# defined, it is used, otherwise board-info.txt is looked for in
-# $(TARGET_DEVICE_DIR).
+# The following logic is used to find the contents of the info file:
+#   1. TARGET_BOARD_INFO_FILES (can be set in BoardConfig.mk) will be combined.
+#   2. TARGET_BOARD_INFO_FILE (can be set in BoardConfig.mk) will be used.
+#   3. $(TARGET_DEVICE_DIR)/board-info.txt will be used if present.
+#
+# Specifying both TARGET_BOARD_INFO_FILES and TARGET_BOARD_INFO_FILE is an
+# error.
 #
 INSTALLED_ANDROID_INFO_TXT_TARGET := $(PRODUCT_OUT)/android-info.txt
-board_info_txt := $(TARGET_BOARD_INFO_FILE)
-ifndef board_info_txt
-board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
+ifdef TARGET_BOARD_INFO_FILES
+  ifdef TARGET_BOARD_INFO_FILE
+    $(warning Both TARGET_BOARD_INFO_FILES and TARGET_BOARD_INFO_FILE are defined.)
+    $(warning Using $(TARGET_BOARD_INFO_FILES) rather than $(TARGET_BOARD_INFO_FILE) for android-info.txt)
+  endif
+  board_info_txt := $(call intermediates-dir-for,PACKAGING,board-info)/board-info.txt
+$(board_info_txt): $(TARGET_BOARD_INFO_FILES)
+	$(hide) cat $(TARGET_BOARD_INFO_FILES) > $@
+else ifdef TARGET_BOARD_INFO_FILE
+  board_info_txt := $(TARGET_BOARD_INFO_FILE)
+else
+  board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
 endif
+
 CHECK_RADIO_VERSIONS := $(HOST_OUT_EXECUTABLES)/check_radio_versions$(HOST_EXECUTABLE_SUFFIX)
 $(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) $(CHECK_RADIO_VERSIONS)
 	$(hide) $(CHECK_RADIO_VERSIONS) \
diff --git a/target/board/emulator_arm64/BoardConfig.mk b/target/board/emulator_arm64/BoardConfig.mk
index 963e558..c16e61b 100644
--- a/target/board/emulator_arm64/BoardConfig.mk
+++ b/target/board/emulator_arm64/BoardConfig.mk
@@ -51,9 +51,6 @@
 include build/make/target/board/BoardConfigGsiCommon.mk
 include build/make/target/board/BoardConfigEmuCommon.mk
 
-TARGET_NO_KERNEL := false
-BOARD_USES_RECOVERY_AS_BOOT := true
-
 BOARD_BOOTIMAGE_PARTITION_SIZE := 0x02000000
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 6dd85f0..f8dbafd 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -19,11 +19,9 @@
     abx \
     adbd_system_api \
     am \
-    android.hidl.allocator@1.0-service \
     android.hidl.base-V1.0-java \
     android.hidl.manager-V1.0-java \
     android.hidl.memory@1.0-impl \
-    android.hidl.memory@1.0-impl.vendor \
     android.system.suspend-service \
     android.test.base \
     android.test.mock \
@@ -70,7 +68,6 @@
     com.android.scheduling \
     com.android.sdkext \
     com.android.tethering \
-    com.android.threadnetwork \
     com.android.tzdata \
     com.android.uwb \
     com.android.virt \
@@ -109,7 +106,6 @@
     gatekeeperd \
     gpuservice \
     hid \
-    hwservicemanager \
     idmap2 \
     idmap2d \
     ime \
@@ -309,6 +305,14 @@
     system_manifest.xml \
     system_compatibility_matrix.xml \
 
+HIDL_SUPPORT_SERVICES := \
+    hwservicemanager \
+    android.hidl.allocator@1.0-service \
+
+# Base modules when shipping api level is less than or equal to 34
+PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
+    $(HIDL_SUPPORT_SERVICES) \
+
 PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
  libclang_rt.hwasan.bootstrap \
  libc_hwasan \
@@ -339,6 +343,7 @@
 PRODUCT_HOST_PACKAGES += \
     BugReport \
     adb \
+    adevice \
     art-tools \
     atest \
     bcc \
@@ -386,6 +391,7 @@
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
     adb_keys \
+    adevice_fingerprint \
     arping \
     dmuserd \
     idlcli \
@@ -432,3 +438,6 @@
     frameworks/base/config/dirty-image-objects:system/etc/dirty-image-objects)
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
+
+# Use "image" APEXes always.
+$(call inherit-product,$(SRC_TARGET_DIR)/product/updatable_apex.mk)
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 3c4d62e..b6e5370 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -46,7 +46,8 @@
 
 # Base modules and settings for the vendor partition.
 PRODUCT_PACKAGES += \
-    android.hardware.cas-service.example \
+    android.hidl.memory@1.0-impl.vendor \
+    com.android.hardware.cas \
     boringssl_self_test_vendor \
     dumpsys_vendor \
     fs_config_files_nonsystem \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index f82d177..3ca4187 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -111,7 +111,6 @@
     com.android.os.statsd:service-statsd \
     com.android.scheduling:service-scheduling \
     com.android.tethering:service-connectivity \
-    com.android.threadnetwork:service-threadnetwork \
     com.android.uwb:service-uwb \
     com.android.wifi:service-wifi \
 
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 1a639ef..dc9324c 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -102,6 +102,11 @@
     libaudiopolicyengineconfigurable \
     libpolicy-subsystem
 
+
+ifneq ($(KEEP_VNDK),true)
+PRODUCT_PACKAGES += llndk.libraries.txt
+endif
+
 # Include all zygote init scripts. "ro.zygote" will select one of them.
 PRODUCT_COPY_FILES += \
     system/core/rootdir/init.zygote32.rc:system/etc/init/hw/init.zygote32.rc \
@@ -126,6 +131,10 @@
 
 _base_mk_allowed_list :=
 
+# TODO(b/299166571) Remove this after the artifact path requirements checker picks up
+# hwservicemanager correctly.
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += $(TARGET_COPY_OUT_SYSTEM)/bin/hwservicemanager
+
 _my_allowed_list := $(_base_mk_allowed_list)
 
 # For mainline, system.img should be mounted at /, so we include ROOT here.
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 86d4622..563712a 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -1,5 +1,14 @@
 LOCAL_PATH:= $(call my-dir)
 
+# VNDK will not be frozen if the PLATFORM_VNDK_VERSION is a codename or greater than 34
+ifeq ($(call math_is_number,$(PLATFORM_VNDK_VERSION)),)
+UNFROZEN_VNDK := true
+else
+ifeq ($(call math_gt,$(PLATFORM_VNDK_VERSION),34),true)
+UNFROZEN_VNDK := true
+endif
+endif
+
 #####################################################################
 # list of vndk libraries from the source code.
 INTERNAL_VNDK_LIB_LIST := $(SOONG_VNDK_LIBRARIES_FILE)
@@ -9,10 +18,14 @@
 # TODO(b/62012285): the lib list should be stored somewhere under
 # /prebuilts/vndk
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ifndef UNFROZEN_VNDK
 LATEST_VNDK_LIB_LIST := $(LOCAL_PATH)/$(PLATFORM_VNDK_VERSION).txt
 ifeq ($(wildcard $(LATEST_VNDK_LIB_LIST)),)
 $(error $(LATEST_VNDK_LIB_LIST) file not found. Please copy "$(LOCAL_PATH)/current.txt" to "$(LATEST_VNDK_LIB_LIST)" and commit a CL for release branch)
 endif
+else # UNFROZEN_VNDK
+LATEST_VNDK_LIB_LIST := $(LOCAL_PATH)/current.txt
+endif # UNFROZEN_VNDK
 else
 LATEST_VNDK_LIB_LIST := $(LOCAL_PATH)/current.txt
 endif
@@ -175,8 +188,7 @@
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 # Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
-LOCAL_REQUIRED_MODULES := \
-    $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES))
+LOCAL_REQUIRED_MODULES := llndk_in_system
 
 ifneq ($(TARGET_SKIP_CURRENT_VNDK),true)
 LOCAL_REQUIRED_MODULES += \
@@ -195,7 +207,10 @@
 include $(CLEAR_VARS)
 _vndk_versions :=
 ifeq ($(filter com.android.vndk.current.on_vendor, $(PRODUCT_PACKAGES)),)
-	_vndk_versions += $(PRODUCT_EXTRA_VNDK_VERSIONS)
+	_vndk_versions += $(if $(call math_is_number,$(PLATFORM_VNDK_VERSION)),\
+		$(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),\
+			$(if $(call math_lt,$(vndk_ver),$(PLATFORM_VNDK_VERSION)),$(vndk_ver))),\
+		$(PRODUCT_EXTRA_VNDK_VERSIONS))
 endif
 ifneq ($(BOARD_VNDK_VERSION),current)
 	_vndk_versions += $(BOARD_VNDK_VERSION)
@@ -210,6 +225,21 @@
 _vndk_versions :=
 
 #####################################################################
+# Define Phony module to install LLNDK modules which are installed in
+# the system image
+include $(CLEAR_VARS)
+LOCAL_MODULE := llndk_in_system
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
+
+# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
+LOCAL_REQUIRED_MODULES := \
+    $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES))
+
+include $(BUILD_PHONY_PACKAGE)
+
+#####################################################################
 # skip_mount.cfg, read by init to skip mounting some partitions when GSI is used.
 
 include $(CLEAR_VARS)
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index e39af92..bd85b9f 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -28,10 +28,15 @@
 
 BUILDING_GSI := true
 
-# Exclude all files under system/product and system/system_ext
+# Exclude all files under system/product and system/system_ext,
+# and the vndk apex's compat symlinks
 PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
     system/product/% \
-    system/system_ext/%
+    system/system_ext/% \
+    system/lib/vndk-29 \
+    system/lib/vndk-sp-29 \
+    system/lib64/vndk-29 \
+    system/lib64/vndk-sp-29
 
 # GSI should always support up-to-date platform features.
 # Keep this value at the latest API level to ensure latest build system
diff --git a/target/product/mainline_sdk.mk b/target/product/mainline_sdk.mk
index 343aed6..0ea72cc 100644
--- a/target/product/mainline_sdk.mk
+++ b/target/product/mainline_sdk.mk
@@ -16,3 +16,5 @@
 PRODUCT_NAME := mainline_sdk
 PRODUCT_BRAND := Android
 PRODUCT_DEVICE := mainline_sdk
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index fa7e1ad..e4cb7ff 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -14,11 +14,8 @@
 # limitations under the License.
 #
 
-# This is a simple product that uses configures the minimum amount
-# needed to build the SDK (without the emulator).
+# Don't modify this file - It's just an alias!
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_x86.mk)
 
 PRODUCT_NAME := sdk
-PRODUCT_BRAND := Android
-PRODUCT_DEVICE := mainline_x86
diff --git a/target/product/sdk_arm64.mk b/target/product/sdk_arm64.mk
index 8bb38f4..3eb9304 100644
--- a/target/product/sdk_arm64.mk
+++ b/target/product/sdk_arm64.mk
@@ -14,8 +14,13 @@
 # limitations under the License.
 #
 
-# Don't modify this file - It's just an alias!
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_arm64.mk)
+TARGET_SUPPORTS_32_BIT_APPS := true
+TARGET_SUPPORTS_64_BIT_APPS := true
 
 PRODUCT_NAME := sdk_arm64
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_arm64
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index 3f81615..c16c403 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -63,4 +63,4 @@
 # library name, so the check fails.
 PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
 
-
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index 48a0e3b..293b1ea 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -62,3 +62,5 @@
 # RadioConfigLib), which makes it impossible to translate their module names to
 # library name, so the check fails.
 PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 0f8b508..90cd8d5 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -57,3 +57,5 @@
 # RadioConfigLib), which makes it impossible to translate their module names to
 # library name, so the check fails.
 PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index f5d9028..b2e14a5 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -58,3 +58,5 @@
 # RadioConfigLib), which makes it impossible to translate their module names to
 # library name, so the check fails.
 PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_x86.mk b/target/product/sdk_x86.mk
index 13ee57d..a6e3bcd 100644
--- a/target/product/sdk_x86.mk
+++ b/target/product/sdk_x86.mk
@@ -14,8 +14,13 @@
 # limitations under the License.
 #
 
-# Don't modify this file - It's just an alias!
+# This is a simple product that uses configures the minimum amount
+# needed to build the SDK (without the emulator).
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_x86.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 
-PRODUCT_NAME := sdk_x86
+PRODUCT_NAME := sdk_x86_64
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_x86
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_x86_64.mk b/target/product/sdk_x86_64.mk
index 5f6553e..af73007 100644
--- a/target/product/sdk_x86_64.mk
+++ b/target/product/sdk_x86_64.mk
@@ -14,8 +14,16 @@
 # limitations under the License.
 #
 
-# Don't modify this file - It's just an alias!
+# This is a simple product that uses configures the minimum amount
+# needed to build the SDK (without the emulator).
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_x86_64.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
+
+TARGET_SUPPORTS_32_BIT_APPS := true
+TARGET_SUPPORTS_64_BIT_APPS := true
 
 PRODUCT_NAME := sdk_x86_64
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_x86_64
+
+PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
index c19982b..8357fdf 100644
--- a/target/product/updatable_apex.mk
+++ b/target/product/updatable_apex.mk
@@ -14,17 +14,13 @@
 # limitations under the License.
 #
 
-# Inherit this when the target needs to support updating APEXes
+# com.android.apex.cts.shim.v1_prebuilt overrides CtsShimPrebuilt
+# and CtsShimPrivPrebuilt since they are packaged inside the APEX.
+PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_prebuilt
+PRODUCT_SYSTEM_PROPERTIES := ro.apex.updatable=true
 
-ifneq ($(OVERRIDE_TARGET_FLATTEN_APEX),true)
-  # com.android.apex.cts.shim.v1_prebuilt overrides CtsShimPrebuilt
-  # and CtsShimPrivPrebuilt since they are packaged inside the APEX.
-  PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_prebuilt
-  PRODUCT_SYSTEM_PROPERTIES := ro.apex.updatable=true
-  TARGET_FLATTEN_APEX := false
-  # Use compressed apexes in pre-installed partitions.
-  # Note: this doesn't mean that all pre-installed apexes will be compressed.
-  #  Whether an apex is compressed or not is controlled at apex Soong module
-  #  via compresible property.
-  PRODUCT_COMPRESSED_APEX := true
-endif
+# Use compressed apexes in pre-installed partitions.
+# Note: this doesn't mean that all pre-installed apexes will be compressed.
+#  Whether an apex is compressed or not is controlled at apex Soong module
+#  via compresible property.
+PRODUCT_COMPRESSED_APEX := true
diff --git a/tools/Android.bp b/tools/Android.bp
index bea0602..b8ab162 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -82,3 +82,17 @@
       }
     }
 }
+
+python_test_host {
+    name: "auto_gen_test_config_test",
+    main: "auto_gen_test_config_test.py",
+    srcs: [
+        "auto_gen_test_config.py",
+        "auto_gen_test_config_test.py",
+    ],
+    auto_gen_config: true,
+    test_suites: ["general-tests"],
+    test_options: {
+        unit_test: true,
+    },
+}
diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel
index 0de178b..9ec0dce 100644
--- a/tools/BUILD.bazel
+++ b/tools/BUILD.bazel
@@ -1,6 +1,7 @@
 py_library(
     name = "event_log_tags",
     srcs = ["event_log_tags.py"],
+    imports = ["."],
 )
 
 py_binary(
@@ -25,3 +26,10 @@
     python_version = "PY3",
     visibility = ["//visibility:public"],
 )
+
+py_binary(
+    name = "auto_gen_test_config",
+    srcs = ["auto_gen_test_config.py"],
+    python_version = "PY3",
+    visibility = ["//visibility:public"],
+)
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
index a4ea7f4..02fc57c 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/Android.bp
@@ -24,14 +24,23 @@
     },
 }
 
+python_library_host {
+    name: "libaconfig_python_proto",
+    srcs: ["protos/aconfig.proto"],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
+
 // host binary: aconfig
 
-rust_protobuf_host {
+rust_protobuf {
     name: "libaconfig_protos",
     protos: ["protos/aconfig.proto"],
     crate_name: "aconfig_protos",
     source_stem: "aconfig_protos",
     use_protobuf3: true,
+    host_supported: true,
 }
 
 rust_defaults {
@@ -101,7 +110,7 @@
 android_test {
     name: "aconfig.test.java",
     srcs: [
-        "tests/**/*.java",
+        "tests/AconfigTest.java",
     ],
     manifest: "tests/AndroidManifest.xml",
     certificate: "platform",
@@ -113,6 +122,25 @@
     test_suites: ["device-tests"],
 }
 
+java_aconfig_library {
+    name: "aconfig_host_test_java_library",
+    aconfig_declarations: "aconfig.test.flags",
+    host_supported: true,
+    test: true,
+}
+
+java_test_host {
+    name: "AconfigJavaHostTest",
+    srcs: [
+        "tests/AconfigHostTest.java",
+    ],
+    static_libs: [
+        "aconfig_host_test_java_library",
+        "junit",
+    ],
+    test_suites: ["general-tests"],
+}
+
 // integration tests: C++
 
 cc_aconfig_library {
@@ -133,3 +161,36 @@
         "server_configurable_flags",
     ],
 }
+
+rust_aconfig_library {
+    name: "libaconfig_test_rust_library",
+    crate_name: "aconfig_test_rust_library",
+    aconfig_declarations: "aconfig.test.flags",
+}
+
+rust_test {
+    name: "aconfig.prod_mode.test.rust",
+    srcs: [
+        "tests/aconfig_prod_mode_test.rs"
+    ],
+    rustlibs: [
+        "libaconfig_test_rust_library",
+    ],
+}
+
+rust_aconfig_library {
+    name: "libaconfig_test_rust_library_with_test_mode",
+    crate_name: "aconfig_test_rust_library",
+    aconfig_declarations: "aconfig.test.flags",
+    test: true,
+}
+
+rust_test {
+    name: "aconfig.test_mode.test.rust",
+    srcs: [
+        "tests/aconfig_test_mode_test.rs"
+    ],
+    rustlibs: [
+        "libaconfig_test_rust_library_with_test_mode",
+    ],
+}
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 86124dd..74ac5ec 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -10,6 +10,12 @@
           "include-filter": "android.cts.flags.tests.FlagAnnotationTest"
         }
       ]
+    },
+    {
+      // Ensure changes on aconfig auto generated library is compatible with
+      // test testing filtering logic. Breakage on this test means all tests
+      // that using the flag macros to do filtering will get affected.
+      "name": "FlagMacrosTests"
     }
   ]
 }
diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp
new file mode 100644
index 0000000..5f62ae9
--- /dev/null
+++ b/tools/aconfig/fake_device_config/Android.bp
@@ -0,0 +1,21 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+java_library {
+	name: "fake_device_config",
+	srcs: ["src/**/*.java"],
+	sdk_version: "core_platform",
+    host_supported: true,
+}
+
diff --git a/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java b/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java
new file mode 100644
index 0000000..50b6289
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.provider;
+
+/*
+ * This class allows generated aconfig code to compile independently of the framework.
+ */
+public class DeviceConfig {
+	private DeviceConfig() {
+	}
+
+	public static boolean getBoolean(String ns, String name, boolean def) {
+		return false;
+	}
+}
diff --git a/tools/aconfig/printflags/Android.bp b/tools/aconfig/printflags/Android.bp
new file mode 100644
index 0000000..5d73d96
--- /dev/null
+++ b/tools/aconfig/printflags/Android.bp
@@ -0,0 +1,16 @@
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+rust_binary {
+    name: "printflags",
+    edition: "2021",
+    clippy_lints: "android",
+    lints: "android",
+    srcs: ["src/main.rs"],
+    rustlibs: [
+        "libaconfig_protos",
+        "libanyhow",
+        "libprotobuf",
+    ],
+}
diff --git a/tools/aconfig/printflags/src/main.rs b/tools/aconfig/printflags/src/main.rs
new file mode 100644
index 0000000..a9f7c03
--- /dev/null
+++ b/tools/aconfig/printflags/src/main.rs
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! `printflags` is a device binary to print feature flags.
+
+use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
+use anyhow::Result;
+use std::collections::HashMap;
+use std::fs;
+
+fn main() -> Result<()> {
+    let mut flags: HashMap<String, Vec<String>> = HashMap::new();
+    for partition in ["system", "system_ext", "product", "vendor"] {
+        let path = format!("/{}/etc/aconfig_flags.pb", partition);
+        let Ok(bytes) = fs::read(&path) else {
+            eprintln!("warning: failed to read {}", path);
+            continue;
+        };
+        let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?;
+        for flag in parsed_flags.parsed_flag {
+            let key = format!("{}.{}", flag.package(), flag.name());
+            let value = format!("{:?} + {:?} ({})", flag.permission(), flag.state(), partition);
+            flags.entry(key).or_default().push(value);
+        }
+    }
+    for (key, value) in flags {
+        // TODO: if the flag is READ_WRITE (for any partition), call "device_config get" to obtain
+        // the flag's current state, and append value to the output
+        println!("{}: {}", key, value.join(", "));
+    }
+    Ok(())
+}
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
index 4cad69a..d5e2868 100644
--- a/tools/aconfig/protos/aconfig.proto
+++ b/tools/aconfig/protos/aconfig.proto
@@ -39,6 +39,7 @@
   optional string namespace = 2;
   optional string description = 3;
   repeated string bug = 4;
+  optional bool is_fixed_read_only = 5;
 };
 
 message flag_declarations {
@@ -75,6 +76,7 @@
   optional flag_state state = 6;
   optional flag_permission permission = 7;
   repeated tracepoint trace = 8;
+  optional bool is_fixed_read_only = 9;
 }
 
 message parsed_flags {
diff --git a/tools/aconfig/src/codegen_cpp.rs b/tools/aconfig/src/codegen_cpp.rs
index 530af49..5eadf2a 100644
--- a/tools/aconfig/src/codegen_cpp.rs
+++ b/tools/aconfig/src/codegen_cpp.rs
@@ -38,9 +38,9 @@
     let cpp_namespace = package.replace('.', "::");
     ensure!(codegen::is_valid_name_ident(&header));
     let context = Context {
-        header: header.clone(),
-        cpp_namespace,
-        package: package.to_string(),
+        header: &header,
+        cpp_namespace: &cpp_namespace,
+        package,
         readwrite,
         for_test: codegen_mode == CodegenMode::Test,
         class_elements,
@@ -77,10 +77,10 @@
 }
 
 #[derive(Serialize)]
-pub struct Context {
-    pub header: String,
-    pub cpp_namespace: String,
-    pub package: String,
+pub struct Context<'a> {
+    pub header: &'a str,
+    pub cpp_namespace: &'a str,
+    pub package: &'a str,
     pub readwrite: bool,
     pub for_test: bool,
     pub class_elements: Vec<ClassElement>,
@@ -131,6 +131,8 @@
 
     virtual bool disabled_rw() = 0;
 
+    virtual bool enabled_fixed_ro() = 0;
+
     virtual bool enabled_ro() = 0;
 
     virtual bool enabled_rw() = 0;
@@ -146,6 +148,10 @@
     return provider_->disabled_rw();
 }
 
+inline bool enabled_fixed_ro() {
+    return true;
+}
+
 inline bool enabled_ro() {
     return true;
 }
@@ -163,6 +169,8 @@
 
 bool com_android_aconfig_test_disabled_rw();
 
+bool com_android_aconfig_test_enabled_fixed_ro();
+
 bool com_android_aconfig_test_enabled_ro();
 
 bool com_android_aconfig_test_enabled_rw();
@@ -194,6 +202,10 @@
 
     virtual void disabled_rw(bool val) = 0;
 
+    virtual bool enabled_fixed_ro() = 0;
+
+    virtual void enabled_fixed_ro(bool val) = 0;
+
     virtual bool enabled_ro() = 0;
 
     virtual void enabled_ro(bool val) = 0;
@@ -223,6 +235,14 @@
     provider_->disabled_rw(val);
 }
 
+inline bool enabled_fixed_ro() {
+    return provider_->enabled_fixed_ro();
+}
+
+inline void enabled_fixed_ro(bool val) {
+    provider_->enabled_fixed_ro(val);
+}
+
 inline bool enabled_ro() {
     return provider_->enabled_ro();
 }
@@ -256,6 +276,10 @@
 
 void set_com_android_aconfig_test_disabled_rw(bool val);
 
+bool com_android_aconfig_test_enabled_fixed_ro();
+
+void set_com_android_aconfig_test_enabled_fixed_ro(bool val);
+
 bool com_android_aconfig_test_enabled_ro();
 
 void set_com_android_aconfig_test_enabled_ro(bool val);
@@ -289,18 +313,22 @@
 
             virtual bool disabled_rw() override {
                 return server_configurable_flags::GetServerConfigurableFlag(
-                    "aconfig_test",
+                    "aconfig_flags.aconfig_test",
                     "com.android.aconfig.test.disabled_rw",
                     "false") == "true";
             }
 
+            virtual bool enabled_fixed_ro() override {
+                return true;
+            }
+
             virtual bool enabled_ro() override {
                 return true;
             }
 
             virtual bool enabled_rw() override {
                 return server_configurable_flags::GetServerConfigurableFlag(
-                    "aconfig_test",
+                    "aconfig_flags.aconfig_test",
                     "com.android.aconfig.test.enabled_rw",
                     "true") == "true";
             }
@@ -319,6 +347,10 @@
     return com::android::aconfig::test::disabled_rw();
 }
 
+bool com_android_aconfig_test_enabled_fixed_ro() {
+    return true;
+}
+
 bool com_android_aconfig_test_enabled_ro() {
     return true;
 }
@@ -332,6 +364,7 @@
     const TEST_SOURCE_FILE_EXPECTED: &str = r#"
 #include "com_android_aconfig_test.h"
 #include <server_configurable_flags/get_flags.h>
+#include <unordered_map>
 
 namespace com::android::aconfig::test {
 
@@ -363,7 +396,7 @@
                       return it->second;
                 } else {
                   return server_configurable_flags::GetServerConfigurableFlag(
-                      "aconfig_test",
+                      "aconfig_flags.aconfig_test",
                       "com.android.aconfig.test.disabled_rw",
                       "false") == "true";
                 }
@@ -373,6 +406,19 @@
                 overrides_["disabled_rw"] = val;
             }
 
+            virtual bool enabled_fixed_ro() override {
+                auto it = overrides_.find("enabled_fixed_ro");
+                  if (it != overrides_.end()) {
+                      return it->second;
+                } else {
+                  return true;
+                }
+            }
+
+            virtual void enabled_fixed_ro(bool val) override {
+                overrides_["enabled_fixed_ro"] = val;
+            }
+
             virtual bool enabled_ro() override {
                 auto it = overrides_.find("enabled_ro");
                   if (it != overrides_.end()) {
@@ -392,7 +438,7 @@
                       return it->second;
                 } else {
                   return server_configurable_flags::GetServerConfigurableFlag(
-                      "aconfig_test",
+                      "aconfig_flags.aconfig_test",
                       "com.android.aconfig.test.enabled_rw",
                       "true") == "true";
                 }
@@ -402,7 +448,6 @@
                 overrides_["enabled_rw"] = val;
             }
 
-
             virtual void reset_flags() override {
                 overrides_.clear();
             }
@@ -430,6 +475,16 @@
     com::android::aconfig::test::disabled_rw(val);
 }
 
+
+bool com_android_aconfig_test_enabled_fixed_ro() {
+    return com::android::aconfig::test::enabled_fixed_ro();
+}
+
+void set_com_android_aconfig_test_enabled_fixed_ro(bool val) {
+    com::android::aconfig::test::enabled_fixed_ro(val);
+}
+
+
 bool com_android_aconfig_test_enabled_ro() {
     return com::android::aconfig::test::enabled_ro();
 }
@@ -463,7 +518,7 @@
         for file in generated {
             generated_files_map.insert(
                 String::from(file.path.to_str().unwrap()),
-                String::from_utf8(file.contents.clone()).unwrap(),
+                String::from_utf8(file.contents).unwrap(),
             );
         }
 
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
index 8ab6ffa..702ef22 100644
--- a/tools/aconfig/src/codegen_java.rs
+++ b/tools/aconfig/src/codegen_java.rs
@@ -47,9 +47,13 @@
         "FeatureFlags.java",
         include_str!("../templates/FeatureFlags.java.template"),
     )?;
+    template.add_template(
+        "FakeFeatureFlagsImpl.java",
+        include_str!("../templates/FakeFeatureFlagsImpl.java.template"),
+    )?;
 
     let path: PathBuf = package.split('.').collect();
-    ["Flags.java", "FeatureFlagsImpl.java", "FeatureFlags.java"]
+    ["Flags.java", "FeatureFlags.java", "FeatureFlagsImpl.java", "FakeFeatureFlagsImpl.java"]
         .iter()
         .map(|file| {
             Ok(OutputFile {
@@ -112,29 +116,48 @@
     use super::*;
     use std::collections::HashMap;
 
-    const EXPECTED_FEATUREFLAGS_CONTENT: &str = r#"
+    const EXPECTED_FEATUREFLAGS_COMMON_CONTENT: &str = r#"
     package com.android.aconfig.test;
+    /** @hide */
     public interface FeatureFlags {
+        @com.android.aconfig.annotations.AssumeFalseForR8
         boolean disabledRo();
         boolean disabledRw();
+        @com.android.aconfig.annotations.AssumeTrueForR8
+        boolean enabledFixedRo();
+        @com.android.aconfig.annotations.AssumeTrueForR8
         boolean enabledRo();
         boolean enabledRw();
-    }"#;
+    }
+    "#;
 
     const EXPECTED_FLAG_COMMON_CONTENT: &str = r#"
     package com.android.aconfig.test;
+    /** @hide */
     public final class Flags {
+        /** @hide */
         public static final String FLAG_DISABLED_RO = "com.android.aconfig.test.disabled_ro";
+        /** @hide */
         public static final String FLAG_DISABLED_RW = "com.android.aconfig.test.disabled_rw";
+        /** @hide */
+        public static final String FLAG_ENABLED_FIXED_RO = "com.android.aconfig.test.enabled_fixed_ro";
+        /** @hide */
         public static final String FLAG_ENABLED_RO = "com.android.aconfig.test.enabled_ro";
+        /** @hide */
         public static final String FLAG_ENABLED_RW = "com.android.aconfig.test.enabled_rw";
 
+        @com.android.aconfig.annotations.AssumeFalseForR8
         public static boolean disabledRo() {
             return FEATURE_FLAGS.disabledRo();
         }
         public static boolean disabledRw() {
             return FEATURE_FLAGS.disabledRw();
         }
+        @com.android.aconfig.annotations.AssumeTrueForR8
+        public static boolean enabledFixedRo() {
+            return FEATURE_FLAGS.enabledFixedRo();
+        }
+        @com.android.aconfig.annotations.AssumeTrueForR8
         public static boolean enabledRo() {
             return FEATURE_FLAGS.enabledRo();
         }
@@ -143,6 +166,65 @@
         }
     "#;
 
+    const EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT: &str = r#"
+    package com.android.aconfig.test;
+    import java.util.HashMap;
+    import java.util.Map;
+    /** @hide */
+    public class FakeFeatureFlagsImpl implements FeatureFlags {
+        public FakeFeatureFlagsImpl() {
+            resetAll();
+        }
+        @Override
+        public boolean disabledRo() {
+            return getValue(Flags.FLAG_DISABLED_RO);
+        }
+        @Override
+        public boolean disabledRw() {
+            return getValue(Flags.FLAG_DISABLED_RW);
+        }
+        @Override
+        public boolean enabledFixedRo() {
+            return getValue(Flags.FLAG_ENABLED_FIXED_RO);
+        }
+        @Override
+        public boolean enabledRo() {
+            return getValue(Flags.FLAG_ENABLED_RO);
+        }
+        @Override
+        public boolean enabledRw() {
+            return getValue(Flags.FLAG_ENABLED_RW);
+        }
+        public void setFlag(String flagName, boolean value) {
+            if (!this.mFlagMap.containsKey(flagName)) {
+                throw new IllegalArgumentException("no such flag " + flagName);
+            }
+            this.mFlagMap.put(flagName, value);
+        }
+        public void resetAll() {
+            for (Map.Entry entry : mFlagMap.entrySet()) {
+                entry.setValue(null);
+            }
+        }
+        private boolean getValue(String flagName) {
+            Boolean value = this.mFlagMap.get(flagName);
+            if (value == null) {
+                throw new IllegalArgumentException(flagName + " is not set");
+            }
+            return value;
+        }
+        private Map<String, Boolean> mFlagMap = new HashMap<>(
+            Map.of(
+                Flags.FLAG_DISABLED_RO, false,
+                Flags.FLAG_DISABLED_RW, false,
+                Flags.FLAG_ENABLED_FIXED_RO, false,
+                Flags.FLAG_ENABLED_RO, false,
+                Flags.FLAG_ENABLED_RW, false
+            )
+        );
+    }
+    "#;
+
     #[test]
     fn test_generate_java_code_production() {
         let parsed_flags = crate::test::parse_test_flags();
@@ -156,9 +238,11 @@
             + r#"
             private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
         }"#;
-        let expected_featureflagsimpl_content = r#"
+
+        let expect_featureflagsimpl_content = r#"
         package com.android.aconfig.test;
         import android.provider.DeviceConfig;
+        /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
             @Override
             public boolean disabledRo() {
@@ -166,30 +250,59 @@
             }
             @Override
             public boolean disabledRw() {
-                return DeviceConfig.getBoolean(
+                return getValue(
                     "aconfig_test",
                     "com.android.aconfig.test.disabled_rw",
                     false
                 );
             }
             @Override
+            public boolean enabledFixedRo() {
+                return true;
+            }
+            @Override
             public boolean enabledRo() {
                 return true;
             }
             @Override
             public boolean enabledRw() {
-                return DeviceConfig.getBoolean(
+                return getValue(
                     "aconfig_test",
                     "com.android.aconfig.test.enabled_rw",
                     true
                 );
             }
+            private boolean getValue(String nameSpace,
+                String flagName, boolean defaultValue) {
+                boolean value = defaultValue;
+                try {
+                    value = DeviceConfig.getBoolean(
+                        nameSpace,
+                        flagName,
+                        defaultValue
+                    );
+                } catch (NullPointerException e) {
+                    throw new RuntimeException(
+                        "Cannot read value of flag " + flagName + " from DeviceConfig. " +
+                        "It could be that the code using flag executed " +
+                        "before SettingsProvider initialization. " +
+                        "Please use fixed read-only flag by adding " +
+                        "is_fixed_read_only: true in flag declaration.",
+                        e
+                    );
+                }
+                return value;
+            }
         }
         "#;
         let mut file_set = HashMap::from([
             ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
-            ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsimpl_content),
-            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_CONTENT),
+            ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
+            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
+            (
+                "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
+                EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
+            ),
         ]);
 
         for file in generated_files {
@@ -199,7 +312,7 @@
                 None,
                 crate::test::first_significant_code_diff(
                     file_set.get(file_path).unwrap(),
-                    &String::from_utf8(file.contents.clone()).unwrap()
+                    &String::from_utf8(file.contents).unwrap()
                 ),
                 "File {} content is not correct",
                 file_path
@@ -219,75 +332,58 @@
             CodegenMode::Test,
         )
         .unwrap();
+
         let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string()
             + r#"
-            public static void setFeatureFlagsImpl(FeatureFlags featureFlags) {
+            public static void setFeatureFlags(FeatureFlags featureFlags) {
                 Flags.FEATURE_FLAGS = featureFlags;
             }
-            public static void unsetFeatureFlagsImpl() {
+            public static void unsetFeatureFlags() {
                 Flags.FEATURE_FLAGS = null;
             }
             private static FeatureFlags FEATURE_FLAGS;
         }
         "#;
-        let expected_featureflagsimpl_content = r#"
+        let expect_featureflagsimpl_content = r#"
         package com.android.aconfig.test;
-        import static java.util.stream.Collectors.toMap;
-        import java.util.HashMap;
-        import java.util.Map;
-        import java.util.stream.Stream;
+        /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
             @Override
             public boolean disabledRo() {
-                return getFlag(Flags.FLAG_DISABLED_RO);
+                throw new UnsupportedOperationException(
+                    "Method is not implemented.");
             }
             @Override
             public boolean disabledRw() {
-                return getFlag(Flags.FLAG_DISABLED_RW);
+                throw new UnsupportedOperationException(
+                    "Method is not implemented.");
+            }
+            @Override
+            public boolean enabledFixedRo() {
+                throw new UnsupportedOperationException(
+                    "Method is not implemented.");
             }
             @Override
             public boolean enabledRo() {
-                return getFlag(Flags.FLAG_ENABLED_RO);
+                throw new UnsupportedOperationException(
+                    "Method is not implemented.");
             }
             @Override
             public boolean enabledRw() {
-                return getFlag(Flags.FLAG_ENABLED_RW);
+                throw new UnsupportedOperationException(
+                    "Method is not implemented.");
             }
-            public void setFlag(String flagName, boolean value) {
-                if (!this.mFlagMap.containsKey(flagName)) {
-                    throw new IllegalArgumentException("no such flag" + flagName);
-                }
-                this.mFlagMap.put(flagName, value);
-            }
-            public void resetAll() {
-                for (Map.Entry entry : mFlagMap.entrySet()) {
-                    entry.setValue(null);
-                }
-            }
-            private boolean getFlag(String flagName) {
-                Boolean value = this.mFlagMap.get(flagName);
-                if (value == null) {
-                    throw new IllegalArgumentException(flagName + " is not set");
-                }
-                return value;
-            }
-            private HashMap<String, Boolean> mFlagMap = Stream.of(
-                    Flags.FLAG_DISABLED_RO,
-                    Flags.FLAG_DISABLED_RW,
-                    Flags.FLAG_ENABLED_RO,
-                    Flags.FLAG_ENABLED_RW
-                )
-                .collect(
-                    HashMap::new,
-                    (map, elem) -> map.put(elem, null),
-                    HashMap::putAll
-                );
         }
         "#;
+
         let mut file_set = HashMap::from([
             ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
-            ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsimpl_content),
-            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_CONTENT),
+            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
+            ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
+            (
+                "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
+                EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
+            ),
         ]);
 
         for file in generated_files {
@@ -297,7 +393,7 @@
                 None,
                 crate::test::first_significant_code_diff(
                     file_set.get(file_path).unwrap(),
-                    &String::from_utf8(file.contents.clone()).unwrap()
+                    &String::from_utf8(file.contents).unwrap()
                 ),
                 "File {} content is not correct",
                 file_path
diff --git a/tools/aconfig/src/codegen_rust.rs b/tools/aconfig/src/codegen_rust.rs
index 053cebc..4e4c7dd 100644
--- a/tools/aconfig/src/codegen_rust.rs
+++ b/tools/aconfig/src/codegen_rust.rs
@@ -103,11 +103,16 @@
     /// query flag disabled_rw
     pub fn disabled_rw(&self) -> bool {
         flags_rust::GetServerConfigurableFlag(
-            "aconfig_test",
+            "aconfig_flags.aconfig_test",
             "com.android.aconfig.test.disabled_rw",
             "false") == "true"
     }
 
+    /// query flag enabled_fixed_ro
+    pub fn enabled_fixed_ro(&self) -> bool {
+        true
+    }
+
     /// query flag enabled_ro
     pub fn enabled_ro(&self) -> bool {
         true
@@ -116,7 +121,7 @@
     /// query flag enabled_rw
     pub fn enabled_rw(&self) -> bool {
         flags_rust::GetServerConfigurableFlag(
-            "aconfig_test",
+            "aconfig_flags.aconfig_test",
             "com.android.aconfig.test.enabled_rw",
             "true") == "true"
     }
@@ -137,6 +142,12 @@
     PROVIDER.disabled_rw()
 }
 
+/// query flag enabled_fixed_ro
+#[inline(always)]
+pub fn enabled_fixed_ro() -> bool {
+    true
+}
+
 /// query flag enabled_ro
 #[inline(always)]
 pub fn enabled_ro() -> bool {
@@ -178,7 +189,7 @@
     pub fn disabled_rw(&self) -> bool {
         self.overrides.get("disabled_rw").copied().unwrap_or(
             flags_rust::GetServerConfigurableFlag(
-                "aconfig_test",
+                "aconfig_flags.aconfig_test",
                 "com.android.aconfig.test.disabled_rw",
                 "false") == "true"
         )
@@ -189,6 +200,18 @@
         self.overrides.insert("disabled_rw", val);
     }
 
+    /// query flag enabled_fixed_ro
+    pub fn enabled_fixed_ro(&self) -> bool {
+        self.overrides.get("enabled_fixed_ro").copied().unwrap_or(
+            true
+        )
+    }
+
+    /// set flag enabled_fixed_ro
+    pub fn set_enabled_fixed_ro(&mut self, val: bool) {
+        self.overrides.insert("enabled_fixed_ro", val);
+    }
+
     /// query flag enabled_ro
     pub fn enabled_ro(&self) -> bool {
         self.overrides.get("enabled_ro").copied().unwrap_or(
@@ -205,7 +228,7 @@
     pub fn enabled_rw(&self) -> bool {
         self.overrides.get("enabled_rw").copied().unwrap_or(
             flags_rust::GetServerConfigurableFlag(
-                "aconfig_test",
+                "aconfig_flags.aconfig_test",
                 "com.android.aconfig.test.enabled_rw",
                 "true") == "true"
         )
@@ -251,6 +274,18 @@
     PROVIDER.lock().unwrap().set_disabled_rw(val);
 }
 
+/// query flag enabled_fixed_ro
+#[inline(always)]
+pub fn enabled_fixed_ro() -> bool {
+    PROVIDER.lock().unwrap().enabled_fixed_ro()
+}
+
+/// set flag enabled_fixed_ro
+#[inline(always)]
+pub fn set_enabled_fixed_ro(val: bool) {
+    PROVIDER.lock().unwrap().set_enabled_fixed_ro(val);
+}
+
 /// query flag enabled_ro
 #[inline(always)]
 pub fn enabled_ro() -> bool {
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
index bd09e24..7b05147 100644
--- a/tools/aconfig/src/commands.rs
+++ b/tools/aconfig/src/commands.rs
@@ -52,10 +52,15 @@
     pub contents: Vec<u8>,
 }
 
-const DEFAULT_FLAG_STATE: ProtoFlagState = ProtoFlagState::DISABLED;
-const DEFAULT_FLAG_PERMISSION: ProtoFlagPermission = ProtoFlagPermission::READ_WRITE;
+pub const DEFAULT_FLAG_STATE: ProtoFlagState = ProtoFlagState::DISABLED;
+pub const DEFAULT_FLAG_PERMISSION: ProtoFlagPermission = ProtoFlagPermission::READ_WRITE;
 
-pub fn parse_flags(package: &str, declarations: Vec<Input>, values: Vec<Input>) -> Result<Vec<u8>> {
+pub fn parse_flags(
+    package: &str,
+    declarations: Vec<Input>,
+    values: Vec<Input>,
+    default_permission: ProtoFlagPermission,
+) -> Result<Vec<u8>> {
     let mut parsed_flags = ProtoParsedFlags::new();
 
     for mut input in declarations {
@@ -86,11 +91,17 @@
             parsed_flag.set_description(flag_declaration.take_description());
             parsed_flag.bug.append(&mut flag_declaration.bug);
             parsed_flag.set_state(DEFAULT_FLAG_STATE);
-            parsed_flag.set_permission(DEFAULT_FLAG_PERMISSION);
+            let flag_permission = if flag_declaration.is_fixed_read_only() {
+                ProtoFlagPermission::READ_ONLY
+            } else {
+                default_permission
+            };
+            parsed_flag.set_permission(flag_permission);
+            parsed_flag.set_is_fixed_read_only(flag_declaration.is_fixed_read_only());
             let mut tracepoint = ProtoTracepoint::new();
             tracepoint.set_source(input.source.clone());
             tracepoint.set_state(DEFAULT_FLAG_STATE);
-            tracepoint.set_permission(DEFAULT_FLAG_PERMISSION);
+            tracepoint.set_permission(flag_permission);
             parsed_flag.trace.push(tracepoint);
 
             // verify ParsedFlag looks reasonable
@@ -130,6 +141,13 @@
                 continue;
             };
 
+            ensure!(
+                !parsed_flag.is_fixed_read_only()
+                    || flag_value.permission() == ProtoFlagPermission::READ_ONLY,
+                "failed to set permission of flag {}, since this flag is fixed read only flag",
+                flag_value.name()
+            );
+
             parsed_flag.set_state(flag_value.state());
             parsed_flag.set_permission(flag_value.permission());
             let mut tracepoint = ProtoTracepoint::new();
@@ -242,7 +260,7 @@
         DumpFormat::Text => {
             for parsed_flag in parsed_flags.parsed_flag.into_iter() {
                 let line = format!(
-                    "{}/{}: {:?} + {:?}\n",
+                    "{}.{}: {:?} + {:?}\n",
                     parsed_flag.package(),
                     parsed_flag.name(),
                     parsed_flag.permission(),
@@ -256,7 +274,7 @@
                 let sources: Vec<_> =
                     parsed_flag.trace.iter().map(|tracepoint| tracepoint.source()).collect();
                 let line = format!(
-                    "{}/{}: {:?} + {:?} ({})\n",
+                    "{}.{}: {:?} + {:?} ({})\n",
                     parsed_flag.package(),
                     parsed_flag.name(),
                     parsed_flag.permission(),
@@ -305,6 +323,7 @@
         assert_eq!(ProtoFlagState::ENABLED, enabled_ro.state());
         assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.permission());
         assert_eq!(3, enabled_ro.trace.len());
+        assert!(!enabled_ro.is_fixed_read_only());
         assert_eq!("tests/test.aconfig", enabled_ro.trace[0].source());
         assert_eq!(ProtoFlagState::DISABLED, enabled_ro.trace[0].state());
         assert_eq!(ProtoFlagPermission::READ_WRITE, enabled_ro.trace[0].permission());
@@ -315,8 +334,11 @@
         assert_eq!(ProtoFlagState::ENABLED, enabled_ro.trace[2].state());
         assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.trace[2].permission());
 
-        assert_eq!(4, parsed_flags.parsed_flag.len());
+        assert_eq!(5, parsed_flags.parsed_flag.len());
         for pf in parsed_flags.parsed_flag.iter() {
+            if pf.name() == "enabled_fixed_ro" {
+                continue;
+            }
             let first = pf.trace.first().unwrap();
             assert_eq!(DEFAULT_FLAG_STATE, first.state());
             assert_eq!(DEFAULT_FLAG_PERMISSION, first.permission());
@@ -325,6 +347,85 @@
             assert_eq!(pf.state(), last.state());
             assert_eq!(pf.permission(), last.permission());
         }
+
+        let enabled_fixed_ro =
+            parsed_flags.parsed_flag.iter().find(|pf| pf.name() == "enabled_fixed_ro").unwrap();
+        assert!(enabled_fixed_ro.is_fixed_read_only());
+        assert_eq!(ProtoFlagState::ENABLED, enabled_fixed_ro.state());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_fixed_ro.permission());
+        assert_eq!(2, enabled_fixed_ro.trace.len());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_fixed_ro.trace[0].permission());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_fixed_ro.trace[1].permission());
+    }
+
+    #[test]
+    fn test_parse_flags_setting_default() {
+        let first_flag = r#"
+        package: "com.first"
+        flag {
+            name: "first"
+            namespace: "first_ns"
+            description: "This is the description of the first flag."
+            bug: "123"
+        }
+        "#;
+        let declaration =
+            vec![Input { source: "momery".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+        let value: Vec<Input> = vec![];
+
+        let flags_bytes = crate::commands::parse_flags(
+            "com.first",
+            declaration,
+            value,
+            ProtoFlagPermission::READ_ONLY,
+        )
+        .unwrap();
+        let parsed_flags =
+            crate::protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
+        assert_eq!(1, parsed_flags.parsed_flag.len());
+        let parsed_flag = parsed_flags.parsed_flag.first().unwrap();
+        assert_eq!(ProtoFlagState::DISABLED, parsed_flag.state());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, parsed_flag.permission());
+    }
+
+    #[test]
+    fn test_parse_flags_override_fixed_read_only() {
+        let first_flag = r#"
+        package: "com.first"
+        flag {
+            name: "first"
+            namespace: "first_ns"
+            description: "This is the description of the first flag."
+            bug: "123"
+            is_fixed_read_only: true
+        }
+        "#;
+        let declaration =
+            vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+        let first_flag_value = r#"
+        flag_value {
+            package: "com.first"
+            name: "first"
+            state: DISABLED
+            permission: READ_WRITE
+        }
+        "#;
+        let value = vec![Input {
+            source: "memory".to_string(),
+            reader: Box::new(first_flag_value.as_bytes()),
+        }];
+        let error = crate::commands::parse_flags(
+            "com.first",
+            declaration,
+            value,
+            ProtoFlagPermission::READ_WRITE,
+        )
+        .unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            "failed to set permission of flag first, since this flag is fixed read only flag"
+        );
     }
 
     #[test]
@@ -348,7 +449,7 @@
         let input = parse_test_flags_as_input();
         let bytes = dump_parsed_flags(vec![input], DumpFormat::Text).unwrap();
         let text = std::str::from_utf8(&bytes).unwrap();
-        assert!(text.contains("com.android.aconfig.test/disabled_ro: READ_ONLY + DISABLED"));
+        assert!(text.contains("com.android.aconfig.test.disabled_ro: READ_ONLY + DISABLED"));
     }
 
     #[test]
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
index 920b761..7e44baf 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/src/main.rs
@@ -44,6 +44,14 @@
                 .arg(Arg::new("package").long("package").required(true))
                 .arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
                 .arg(Arg::new("values").long("values").action(ArgAction::Append))
+                .arg(
+                    Arg::new("default-permission")
+                        .long("default-permission")
+                        .value_parser(protos::flag_permission::parse_from_str)
+                        .default_value(protos::flag_permission::to_string(
+                            &commands::DEFAULT_FLAG_PERMISSION,
+                        )),
+                )
                 .arg(Arg::new("cache").long("cache").required(true)),
         )
         .subcommand(
@@ -129,14 +137,14 @@
 }
 
 fn write_output_file_realtive_to_dir(root: &Path, output_file: &OutputFile) -> Result<()> {
-    let path = root.join(output_file.path.clone());
+    let path = root.join(&output_file.path);
     let parent = path
         .parent()
         .ok_or(anyhow!("unable to locate parent of output file {}", path.display()))?;
     fs::create_dir_all(parent)
         .with_context(|| format!("failed to create directory {}", parent.display()))?;
-    let mut file = fs::File::create(path.clone())
-        .with_context(|| format!("failed to open {}", path.display()))?;
+    let mut file =
+        fs::File::create(&path).with_context(|| format!("failed to open {}", path.display()))?;
     file.write_all(&output_file.contents)
         .with_context(|| format!("failed to write to {}", path.display()))?;
     Ok(())
@@ -161,7 +169,9 @@
             let package = get_required_arg::<String>(sub_matches, "package")?;
             let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
             let values = open_zero_or_more_files(sub_matches, "values")?;
-            let output = commands::parse_flags(package, declarations, values)
+            let default_permission =
+                get_required_arg::<protos::ProtoFlagPermission>(sub_matches, "default-permission")?;
+            let output = commands::parse_flags(package, declarations, values, *default_permission)
                 .context("failed to create cache")?;
             let path = get_required_arg::<String>(sub_matches, "cache")?;
             write_output_to_file_or_stdout(path, &output)?;
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
index 2ab6e05..d3b5b37 100644
--- a/tools/aconfig/src/protos.rs
+++ b/tools/aconfig/src/protos.rs
@@ -156,6 +156,26 @@
     }
 }
 
+pub mod flag_permission {
+    use super::*;
+    use anyhow::bail;
+
+    pub fn parse_from_str(permission: &str) -> Result<ProtoFlagPermission> {
+        match permission.to_ascii_lowercase().as_str() {
+            "read_write" => Ok(ProtoFlagPermission::READ_WRITE),
+            "read_only" => Ok(ProtoFlagPermission::READ_ONLY),
+            _ => bail!("Permission needs to be read_only or read_write."),
+        }
+    }
+
+    pub fn to_string(permission: &ProtoFlagPermission) -> &str {
+        match permission {
+            ProtoFlagPermission::READ_WRITE => "read_write",
+            ProtoFlagPermission::READ_ONLY => "read_only",
+        }
+    }
+}
+
 pub mod tracepoint {
     use super::*;
     use anyhow::ensure;
@@ -195,6 +215,17 @@
             super::tracepoint::verify_fields(tp)?;
         }
         ensure!(pf.bug.len() == 1, "bad flag declaration: exactly one bug required");
+        if pf.is_fixed_read_only() {
+            ensure!(
+                pf.permission() == ProtoFlagPermission::READ_ONLY,
+                "bad parsed flag: flag is is_fixed_read_only but permission is not READ_ONLY"
+            );
+            for tp in pf.trace.iter() {
+                ensure!(tp.permission() == ProtoFlagPermission::READ_ONLY,
+                "bad parsed flag: flag is is_fixed_read_only but a tracepoint's permission is not READ_ONLY"
+                );
+            }
+        }
 
         Ok(())
     }
@@ -283,6 +314,7 @@
     namespace: "second_ns"
     description: "This is the description of the second flag."
     bug: "abc"
+    is_fixed_read_only: true
 }
 "#,
         )
@@ -293,11 +325,13 @@
         assert_eq!(first.namespace(), "first_ns");
         assert_eq!(first.description(), "This is the description of the first flag.");
         assert_eq!(first.bug, vec!["123"]);
+        assert!(!first.is_fixed_read_only());
         let second = flag_declarations.flag.iter().find(|pf| pf.name() == "second").unwrap();
         assert_eq!(second.name(), "second");
         assert_eq!(second.namespace(), "second_ns");
         assert_eq!(second.description(), "This is the description of the second flag.");
         assert_eq!(second.bug, vec!["abc"]);
+        assert!(second.is_fixed_read_only());
 
         // bad input: missing package in flag declarations
         let error = flag_declarations::try_from_text_proto(
@@ -524,7 +558,7 @@
     description: "This is the description of the second flag."
     bug: "SOME_BUG"
     state: ENABLED
-    permission: READ_WRITE
+    permission: READ_ONLY
     trace {
         source: "flags.declarations"
         state: DISABLED
@@ -533,8 +567,9 @@
     trace {
         source: "flags.values"
         state: ENABLED
-        permission: READ_WRITE
+        permission: READ_ONLY
     }
+    is_fixed_read_only: true
 }
 "#;
         let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -546,14 +581,15 @@
         assert_eq!(second.description(), "This is the description of the second flag.");
         assert_eq!(second.bug, vec!["SOME_BUG"]);
         assert_eq!(second.state(), ProtoFlagState::ENABLED);
-        assert_eq!(second.permission(), ProtoFlagPermission::READ_WRITE);
+        assert_eq!(second.permission(), ProtoFlagPermission::READ_ONLY);
         assert_eq!(2, second.trace.len());
         assert_eq!(second.trace[0].source(), "flags.declarations");
         assert_eq!(second.trace[0].state(), ProtoFlagState::DISABLED);
         assert_eq!(second.trace[0].permission(), ProtoFlagPermission::READ_ONLY);
         assert_eq!(second.trace[1].source(), "flags.values");
         assert_eq!(second.trace[1].state(), ProtoFlagState::ENABLED);
-        assert_eq!(second.trace[1].permission(), ProtoFlagPermission::READ_WRITE);
+        assert_eq!(second.trace[1].permission(), ProtoFlagPermission::READ_ONLY);
+        assert!(second.is_fixed_read_only());
 
         // valid input: empty
         let parsed_flags = try_from_binary_proto_from_text_proto("").unwrap();
diff --git a/tools/aconfig/src/test.rs b/tools/aconfig/src/test.rs
index 14beb93..9034704 100644
--- a/tools/aconfig/src/test.rs
+++ b/tools/aconfig/src/test.rs
@@ -41,6 +41,7 @@
     state: DISABLED
     permission: READ_ONLY
   }
+  is_fixed_read_only: false
 }
 parsed_flag {
   package: "com.android.aconfig.test"
@@ -55,6 +56,27 @@
     state: DISABLED
     permission: READ_WRITE
   }
+  is_fixed_read_only: false
+}
+parsed_flag {
+  package: "com.android.aconfig.test"
+  name: "enabled_fixed_ro"
+  namespace: "aconfig_test"
+  description: "This flag is fixed READ_ONLY + ENABLED"
+  bug: ""
+  state: ENABLED
+  permission: READ_ONLY
+  trace {
+    source: "tests/test.aconfig"
+    state: DISABLED
+    permission: READ_ONLY
+  }
+  trace {
+    source: "tests/first.values"
+    state: ENABLED
+    permission: READ_ONLY
+  }
+  is_fixed_read_only: true
 }
 parsed_flag {
   package: "com.android.aconfig.test"
@@ -79,6 +101,7 @@
     state: ENABLED
     permission: READ_ONLY
   }
+  is_fixed_read_only: false
 }
 parsed_flag {
   package: "com.android.aconfig.test"
@@ -98,6 +121,7 @@
     state: ENABLED
     permission: READ_WRITE
   }
+  is_fixed_read_only: false
 }
 "#;
 
@@ -118,6 +142,7 @@
                     reader: Box::new(include_bytes!("../tests/second.values").as_slice()),
                 },
             ],
+            crate::commands::DEFAULT_FLAG_PERMISSION,
         )
         .unwrap();
         crate::protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
diff --git a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
new file mode 100644
index 0000000..e4a1fb7
--- /dev/null
+++ b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
@@ -0,0 +1,46 @@
+package {package_name};
+
+import java.util.HashMap;
+import java.util.Map;
+
+/** @hide */
+public class FakeFeatureFlagsImpl implements FeatureFlags \{
+    public FakeFeatureFlagsImpl() \{
+        resetAll();
+    }
+
+{{ for item in class_elements}}
+    @Override
+    public boolean {item.method_name}() \{
+        return getValue(Flags.FLAG_{item.flag_name_constant_suffix});
+    }
+{{ endfor}}
+    public void setFlag(String flagName, boolean value) \{
+        if (!this.mFlagMap.containsKey(flagName)) \{
+            throw new IllegalArgumentException("no such flag " + flagName);
+        }
+        this.mFlagMap.put(flagName, value);
+    }
+
+    public void resetAll() \{
+        for (Map.Entry entry : mFlagMap.entrySet()) \{
+            entry.setValue(null);
+        }
+    }
+
+    private boolean getValue(String flagName) \{
+        Boolean value = this.mFlagMap.get(flagName);
+        if (value == null) \{
+            throw new IllegalArgumentException(flagName + " is not set");
+        }
+        return value;
+    }
+
+    private Map<String, Boolean> mFlagMap = new HashMap<>(
+        Map.of(
+            {{-for item in class_elements}}
+            Flags.FLAG_{item.flag_name_constant_suffix}, false{{ if not @last }},{{ endif }}
+            {{ -endfor }}
+        )
+    );
+}
diff --git a/tools/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/templates/FeatureFlags.java.template
index e0f201f..9350d60 100644
--- a/tools/aconfig/templates/FeatureFlags.java.template
+++ b/tools/aconfig/templates/FeatureFlags.java.template
@@ -1,7 +1,15 @@
 package {package_name};
 
+/** @hide */
 public interface FeatureFlags \{
 {{ for item in class_elements}}
+{{ -if not item.is_read_write }}
+{{ -if item.default_value }}
+    @com.android.aconfig.annotations.AssumeTrueForR8
+{{ -else }}
+    @com.android.aconfig.annotations.AssumeFalseForR8
+{{ -endif- }}
+{{ endif }}
     boolean {item.method_name}();
 {{ endfor }}
 }
diff --git a/tools/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/templates/FeatureFlagsImpl.java.template
index 082d476..3913fa4 100644
--- a/tools/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FeatureFlagsImpl.java.template
@@ -1,65 +1,58 @@
 package {package_name};
-{{ -if is_test_mode }}
-import static java.util.stream.Collectors.toMap;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.stream.Stream;
-{{ else}}
+{{ if not is_test_mode }}
 {{ if is_read_write- }}
 import android.provider.DeviceConfig;
-{{ -endif- }}
 {{ endif }}
+/** @hide */
 public final class FeatureFlagsImpl implements FeatureFlags \{
 {{ for item in class_elements}}
     @Override
     public boolean {item.method_name}() \{
-        {{ -if not is_test_mode- }}
-        {{ if item.is_read_write }}
-        return DeviceConfig.getBoolean(
+    {{ -if item.is_read_write }}
+        return getValue(
             "{item.device_config_namespace}",
             "{item.device_config_flag}",
             {item.default_value}
         );
-        {{ -else }}
+    {{ else }}
         return {item.default_value};
-        {{ -endif- }}
-        {{ else }}
-        return getFlag(Flags.FLAG_{item.flag_name_constant_suffix});
-        {{ -endif }}
+    {{ endif- }}
     }
-{{ endfor- }}
-{{ if is_test_mode }}
-    public void setFlag(String flagName, boolean value) \{
-        if (!this.mFlagMap.containsKey(flagName)) \{
-            throw new IllegalArgumentException("no such flag" + flagName);
-        }
-        this.mFlagMap.put(flagName, value);
-    }
-
-    public void resetAll() \{
-        for (Map.Entry entry : mFlagMap.entrySet()) \{
-            entry.setValue(null);
-        }
-    }
-
-    private boolean getFlag(String flagName) \{
-        Boolean value = this.mFlagMap.get(flagName);
-        if (value == null) \{
-            throw new IllegalArgumentException(flagName + " is not set");
+{{ endfor }}
+{{ if is_read_write- }}
+    private boolean getValue(String nameSpace,
+        String flagName, boolean defaultValue) \{
+        boolean value = defaultValue;
+        try \{
+            value = DeviceConfig.getBoolean(
+                nameSpace,
+                flagName,
+                defaultValue
+            );
+        } catch (NullPointerException e) \{
+            throw new RuntimeException(
+                "Cannot read value of flag " + flagName + " from DeviceConfig. " +
+                "It could be that the code using flag executed " +
+                "before SettingsProvider initialization. " +
+                "Please use fixed read-only flag by adding " +
+                "is_fixed_read_only: true in flag declaration.",
+                e
+            );
         }
         return value;
     }
-
-    private HashMap<String, Boolean> mFlagMap = Stream.of(
-            {{-for item in class_elements}}
-            Flags.FLAG_{item.flag_name_constant_suffix}{{ if not @last }},{{ endif }}
-            {{ -endfor }}
-        )
-        .collect(
-            HashMap::new,
-            (map, elem) -> map.put(elem, null),
-            HashMap::putAll
-        );
-{{ -endif }}
+{{ endif- }}
 }
+{{ else }}
+{#- Generate only stub if in test mode #}
+/** @hide */
+public final class FeatureFlagsImpl implements FeatureFlags \{
+{{ for item in class_elements}}
+    @Override
+    public boolean {item.method_name}() \{
+        throw new UnsupportedOperationException(
+            "Method is not implemented.");
+    }
+{{ endfor }}
+}
+{{ endif }}
diff --git a/tools/aconfig/templates/Flags.java.template b/tools/aconfig/templates/Flags.java.template
index c244b15..39024a8 100644
--- a/tools/aconfig/templates/Flags.java.template
+++ b/tools/aconfig/templates/Flags.java.template
@@ -1,23 +1,32 @@
 package {package_name};
 
+/** @hide */
 public final class Flags \{
 {{- for item in class_elements}}
+    /** @hide */
     public static final String FLAG_{item.flag_name_constant_suffix} = "{item.device_config_flag}";
 {{- endfor }}
 {{ for item in class_elements}}
+{{ -if not item.is_read_write }}
+{{ -if item.default_value }}
+    @com.android.aconfig.annotations.AssumeTrueForR8
+{{ -else }}
+    @com.android.aconfig.annotations.AssumeFalseForR8
+{{ -endif- }}
+{{ endif }}
     public static boolean {item.method_name}() \{
         return FEATURE_FLAGS.{item.method_name}();
     }
 {{ endfor }}
 {{ -if is_test_mode }}
-    public static void setFeatureFlagsImpl(FeatureFlags featureFlags) \{
+    public static void setFeatureFlags(FeatureFlags featureFlags) \{
         Flags.FEATURE_FLAGS = featureFlags;
     }
 
-    public static void unsetFeatureFlagsImpl() \{
+    public static void unsetFeatureFlags() \{
         Flags.FEATURE_FLAGS = null;
     }
-{{ endif}}
+{{ endif }}
     private static FeatureFlags FEATURE_FLAGS{{ -if not is_test_mode }} = new FeatureFlagsImpl(){{ -endif- }};
 
 }
diff --git a/tools/aconfig/templates/cpp_source_file.template b/tools/aconfig/templates/cpp_source_file.template
index 289e299..c0e7343 100644
--- a/tools/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/templates/cpp_source_file.template
@@ -2,6 +2,9 @@
 {{ if readwrite }}
 #include <server_configurable_flags/get_flags.h>
 {{ endif }}
+{{ if for_test }}
+#include <unordered_map>
+{{ endif }}
 
 namespace {cpp_namespace} \{
 
@@ -23,7 +26,7 @@
             } else \{
               {{ if item.readwrite- }}
               return server_configurable_flags::GetServerConfigurableFlag(
-                  "{item.device_config_namespace}",
+                  "aconfig_flags.{item.device_config_namespace}",
                   "{item.device_config_flag}",
                   "{item.default_value}") == "true";
               {{ -else- }}
@@ -50,7 +53,7 @@
         virtual bool {item.flag_name}() override \{
             {{ if item.readwrite- }}
             return server_configurable_flags::GetServerConfigurableFlag(
-                "{item.device_config_namespace}",
+                "aconfig_flags.{item.device_config_namespace}",
                 "{item.device_config_flag}",
                 "{item.default_value}") == "true";
             {{ -else- }}
diff --git a/tools/aconfig/templates/rust_prod.template b/tools/aconfig/templates/rust_prod.template
index d518694..e22ad6f 100644
--- a/tools/aconfig/templates/rust_prod.template
+++ b/tools/aconfig/templates/rust_prod.template
@@ -10,7 +10,7 @@
     pub fn {flag.name}(&self) -> bool \{
     {{ if flag.readwrite -}}
         flags_rust::GetServerConfigurableFlag(
-          "{flag.device_config_namespace}",
+          "aconfig_flags.{flag.device_config_namespace}",
           "{flag.device_config_flag}",
           "{flag.default_value}") == "true"
     {{ -else- }}
diff --git a/tools/aconfig/templates/rust_test.template b/tools/aconfig/templates/rust_test.template
index 1e2c28a..fd1229b 100644
--- a/tools/aconfig/templates/rust_test.template
+++ b/tools/aconfig/templates/rust_test.template
@@ -15,7 +15,7 @@
         self.overrides.get("{flag.name}").copied().unwrap_or(
         {{ if flag.readwrite -}}
           flags_rust::GetServerConfigurableFlag(
-            "{flag.device_config_namespace}",
+            "aconfig_flags.{flag.device_config_namespace}",
             "{flag.device_config_flag}",
             "{flag.default_value}") == "true"
         {{ -else- }}
diff --git a/tools/aconfig/tests/AconfigHostTest.java b/tools/aconfig/tests/AconfigHostTest.java
new file mode 100644
index 0000000..ea71b7e
--- /dev/null
+++ b/tools/aconfig/tests/AconfigHostTest.java
@@ -0,0 +1,88 @@
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+
+import com.android.aconfig.test.FakeFeatureFlagsImpl;
+import com.android.aconfig.test.FeatureFlags;
+import com.android.aconfig.test.FeatureFlagsImpl;
+import com.android.aconfig.test.Flags;
+
+@RunWith(JUnit4.class)
+public final class AconfigHostTest {
+    @Test
+    public void testThrowsExceptionIfFlagNotSet() {
+        assertThrows(NullPointerException.class, () -> Flags.disabledRo());
+        FakeFeatureFlagsImpl featureFlags = new FakeFeatureFlagsImpl();
+        assertThrows(IllegalArgumentException.class, () -> featureFlags.disabledRo());
+    }
+
+    @Test
+    public void testSetFlagInFakeFeatureFlagsImpl() {
+        FakeFeatureFlagsImpl featureFlags = new FakeFeatureFlagsImpl();
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RW, true);
+        assertTrue(featureFlags.enabledRw());
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RW, false);
+        assertFalse(featureFlags.enabledRw());
+
+        //Set Flags
+        assertThrows(NullPointerException.class, () -> Flags.enabledRw());
+        Flags.setFeatureFlags(featureFlags);
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RW, true);
+        assertTrue(Flags.enabledRw());
+        Flags.unsetFeatureFlags();
+    }
+
+    @Test
+    public void testSetFlagWithRandomName() {
+        FakeFeatureFlagsImpl featureFlags = new FakeFeatureFlagsImpl();
+        assertThrows(IllegalArgumentException.class,
+            () -> featureFlags.setFlag("Randome_name", true));
+    }
+
+    @Test
+    public void testResetFlagsInFakeFeatureFlagsImpl() {
+        FakeFeatureFlagsImpl featureFlags = new FakeFeatureFlagsImpl();
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RO, true);
+        assertTrue(featureFlags.enabledRo());
+        featureFlags.resetAll();
+        assertThrows(IllegalArgumentException.class, () -> featureFlags.enabledRo());
+
+        // Set value after reset
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RO, false);
+        assertFalse(featureFlags.enabledRo());
+    }
+
+    @Test
+    public void testFlagsSetFeatureFlags() {
+        FakeFeatureFlagsImpl featureFlags = new FakeFeatureFlagsImpl();
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RW, true);
+        assertThrows(NullPointerException.class, () -> Flags.enabledRw());
+        Flags.setFeatureFlags(featureFlags);
+        assertTrue(Flags.enabledRw());
+        Flags.unsetFeatureFlags();
+    }
+
+    @Test
+    public void testFlagsUnsetFeatureFlags() {
+        FakeFeatureFlagsImpl featureFlags = new FakeFeatureFlagsImpl();
+        featureFlags.setFlag(Flags.FLAG_ENABLED_RW, true);
+        assertThrows(NullPointerException.class, () -> Flags.enabledRw());
+        Flags.setFeatureFlags(featureFlags);
+        assertTrue(Flags.enabledRw());
+
+        Flags.unsetFeatureFlags();
+        assertThrows(NullPointerException.class, () -> Flags.enabledRw());
+    }
+
+    @Test
+    public void testFeatureFlagsImplNotImpl() {
+        FeatureFlags featureFlags = new FeatureFlagsImpl();
+        assertThrows(UnsupportedOperationException.class,
+            () -> featureFlags.enabledRw());
+    }
+}
diff --git a/tools/aconfig/tests/AconfigTest.java b/tools/aconfig/tests/AconfigTest.java
index 6681f32..958b02e 100644
--- a/tools/aconfig/tests/AconfigTest.java
+++ b/tools/aconfig/tests/AconfigTest.java
@@ -1,19 +1,25 @@
 import static com.android.aconfig.test.Flags.FLAG_DISABLED_RO;
 import static com.android.aconfig.test.Flags.FLAG_DISABLED_RW;
+import static com.android.aconfig.test.Flags.FLAG_ENABLED_FIXED_RO;
 import static com.android.aconfig.test.Flags.FLAG_ENABLED_RO;
 import static com.android.aconfig.test.Flags.FLAG_ENABLED_RW;
 import static com.android.aconfig.test.Flags.disabledRo;
 import static com.android.aconfig.test.Flags.disabledRw;
+import static com.android.aconfig.test.Flags.enabledFixedRo;
 import static com.android.aconfig.test.Flags.enabledRo;
 import static com.android.aconfig.test.Flags.enabledRw;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThrows;
 import static org.junit.Assert.assertTrue;
 
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
+import com.android.aconfig.test.FakeFeatureFlagsImpl;
+import com.android.aconfig.test.FeatureFlags;
+
 @RunWith(JUnit4.class)
 public final class AconfigTest {
     @Test
@@ -31,6 +37,14 @@
     }
 
     @Test
+    public void testEnabledFixedReadOnlyFlag() {
+        assertEquals("com.android.aconfig.test.enabled_fixed_ro", FLAG_ENABLED_FIXED_RO);
+        // TODO: change to assertTrue(enabledFixedRo()) when the build supports reading tests/*.values
+        // (currently all flags are assigned the default READ_ONLY + DISABLED)
+        assertFalse(enabledFixedRo());
+    }
+
+    @Test
     public void testDisabledReadWriteFlag() {
         assertEquals("com.android.aconfig.test.enabled_ro", FLAG_ENABLED_RO);
         assertFalse(disabledRw());
@@ -43,4 +57,11 @@
         // (currently all flags are assigned the default READ_ONLY + DISABLED)
         assertFalse(enabledRw());
     }
+
+    @Test
+    public void testFakeFeatureFlagsImplImpled() {
+        FakeFeatureFlagsImpl fakeFeatureFlags = new FakeFeatureFlagsImpl();
+        fakeFeatureFlags.setFlag(FLAG_ENABLED_RW, false);
+        assertFalse(fakeFeatureFlags.enabledRw());
+    }
 }
diff --git a/tools/aconfig/tests/aconfig_prod_mode_test.rs b/tools/aconfig/tests/aconfig_prod_mode_test.rs
new file mode 100644
index 0000000..950c441
--- /dev/null
+++ b/tools/aconfig/tests/aconfig_prod_mode_test.rs
@@ -0,0 +1,9 @@
+#[cfg(not(feature = "cargo"))]
+#[test]
+fn test_flags() {
+    assert!(!aconfig_test_rust_library::disabled_ro());
+    assert!(!aconfig_test_rust_library::disabled_rw());
+    // TODO: Fix template to not default both disabled and enabled to false
+    assert!(!aconfig_test_rust_library::enabled_ro());
+    assert!(!aconfig_test_rust_library::enabled_rw());
+}
diff --git a/tools/aconfig/tests/aconfig_test_mode_test.rs b/tools/aconfig/tests/aconfig_test_mode_test.rs
new file mode 100644
index 0000000..3f56d2c
--- /dev/null
+++ b/tools/aconfig/tests/aconfig_test_mode_test.rs
@@ -0,0 +1,24 @@
+#[cfg(not(feature = "cargo"))]
+#[test]
+fn test_flags() {
+    assert!(!aconfig_test_rust_library::disabled_ro());
+    assert!(!aconfig_test_rust_library::disabled_rw());
+    // TODO: Fix template to not default both disabled and enabled to false
+    assert!(!aconfig_test_rust_library::enabled_ro());
+    assert!(!aconfig_test_rust_library::enabled_rw());
+
+    aconfig_test_rust_library::set_disabled_ro(true);
+    assert!(aconfig_test_rust_library::disabled_ro());
+    aconfig_test_rust_library::set_disabled_rw(true);
+    assert!(aconfig_test_rust_library::disabled_rw());
+    aconfig_test_rust_library::set_enabled_ro(true);
+    assert!(aconfig_test_rust_library::enabled_ro());
+    aconfig_test_rust_library::set_enabled_rw(true);
+    assert!(aconfig_test_rust_library::enabled_rw());
+
+    aconfig_test_rust_library::reset_flags();
+    assert!(!aconfig_test_rust_library::disabled_ro());
+    assert!(!aconfig_test_rust_library::disabled_rw());
+    assert!(!aconfig_test_rust_library::enabled_ro());
+    assert!(!aconfig_test_rust_library::enabled_rw());
+}
diff --git a/tools/aconfig/tests/first.values b/tools/aconfig/tests/first.values
index e524404..a450f78 100644
--- a/tools/aconfig/tests/first.values
+++ b/tools/aconfig/tests/first.values
@@ -16,3 +16,9 @@
     state: ENABLED
     permission: READ_WRITE
 }
+flag_value {
+    package: "com.android.aconfig.test"
+    name: "enabled_fixed_ro"
+    state: ENABLED
+    permission: READ_ONLY
+}
diff --git a/tools/aconfig/tests/test.aconfig b/tools/aconfig/tests/test.aconfig
index 46cf1e9..aaa6df5 100644
--- a/tools/aconfig/tests/test.aconfig
+++ b/tools/aconfig/tests/test.aconfig
@@ -40,3 +40,14 @@
     description: "This flag is DISABLED + READ_WRITE"
     bug: "456"
 }
+
+# This flag's final value calculated from:
+# - test.aconfig: DISABLED + READ_ONLY
+# - first.values: ENABLED + READ_ONLY
+flag {
+    name: "enabled_fixed_ro"
+    namespace: "aconfig_test"
+    description: "This flag is fixed READ_ONLY + ENABLED"
+    bug: ""
+    is_fixed_read_only: true
+}
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index ce64160..8ee599a 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -17,6 +17,8 @@
 """A tool to generate TradeFed test config file.
 """
 
+import argparse
+import re
 import os
 import shutil
 import sys
@@ -42,42 +44,85 @@
   Returns:
     0 if no error, otherwise 1.
   """
-  if len(argv) != 4 and len(argv) != 6:
-    sys.stderr.write(
-        'Invalid arguments. The script requires 4 arguments for file paths: '
-        'target_config android_manifest empty_config '
-        'instrumentation_test_config_template '
-        'and 2 optional arguments for extra configs: '
-        '--extra-configs \'EXTRA_CONFIGS\'.\n')
-    return 1
 
-  target_config = argv[0]
-  android_manifest = argv[1]
-  empty_config = argv[2]
-  instrumentation_test_config_template = argv[3]
-  extra_configs = '\n'.join(argv[5].split('\\n')) if len(argv) == 6 else ''
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      "target_config",
+      help="Path to the generated output config.")
+  parser.add_argument(
+      "android_manifest",
+      help="Path to AndroidManifest.xml or output of 'aapt2 dump xmltree' with .xmltree extension.")
+  parser.add_argument(
+      "empty_config",
+      help="Path to the empty config template.")
+  parser.add_argument(
+      "instrumentation_test_config_template",
+      help="Path to the instrumentation test config template.")
+  parser.add_argument("--extra-configs", default="")
+  args = parser.parse_args(argv)
 
-  manifest = parse(android_manifest)
-  instrumentation_elements = manifest.getElementsByTagName('instrumentation')
-  manifest_elements = manifest.getElementsByTagName('manifest')
-  if len(instrumentation_elements) != 1 or len(manifest_elements) != 1:
-    # Failed to locate instrumentation or manifest element in AndroidManifest.
-    # file. Empty test config file will be created.
-    shutil.copyfile(empty_config, target_config)
-    return 0
+  target_config = args.target_config
+  android_manifest = args.android_manifest
+  empty_config = args.empty_config
+  instrumentation_test_config_template = args.instrumentation_test_config_template
+  extra_configs = '\n'.join(args.extra_configs.split('\\n'))
 
   module = os.path.splitext(os.path.basename(target_config))[0]
-  instrumentation = instrumentation_elements[0]
-  manifest = manifest_elements[0]
-  if ATTRIBUTE_LABEL in instrumentation.attributes:
-    label = instrumentation.attributes[ATTRIBUTE_LABEL].value
-  else:
+
+  # If the AndroidManifest.xml is not available, but the APK is, this tool also
+  # accepts the output of `aapt2 dump xmltree <apk> AndroidManifest.xml` written
+  # into a file. This is a custom structured aapt2 output - not raw XML!
+  if android_manifest.endswith(".xmltree"):
     label = module
-  runner = instrumentation.attributes[ATTRIBUTE_RUNNER].value
-  package = manifest.attributes[ATTRIBUTE_PACKAGE].value
+    with open(android_manifest, encoding="utf-8") as manifest:
+      # e.g. A: package="android.test.example.helloworld" (Raw: "android.test.example.helloworld")
+      #                                                          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+      pattern = re.compile(r"\(Raw:\s\"(.*)\"\)$")
+      curr_element = None
+      for line in manifest:
+        curr_line = line.strip()
+        if curr_line.startswith("E:"):
+          # e.g. "E: instrumentation (line=9)"
+          #          ^^^^^^^^^^^^^^^
+          curr_element = curr_line.split(" ")[1]
+        if curr_element == "instrumentation":
+          if ATTRIBUTE_RUNNER in curr_line:
+            runner =  re.findall(pattern, curr_line)[0]
+          if ATTRIBUTE_LABEL in curr_line:
+            label = re.findall(pattern, curr_line)[0]
+        if curr_element == "manifest":
+          if ATTRIBUTE_PACKAGE in curr_line:
+            package = re.findall(pattern, curr_line)[0]
+
+    if not (runner and label and package):
+      # Failed to locate instrumentation or manifest element in AndroidManifest.
+      # file. Empty test config file will be created.
+      shutil.copyfile(empty_config, target_config)
+      return 0
+
+  else:
+    # If the AndroidManifest.xml file is directly available, read it as an XML file.
+    manifest = parse(android_manifest)
+    instrumentation_elements = manifest.getElementsByTagName('instrumentation')
+    manifest_elements = manifest.getElementsByTagName('manifest')
+    if len(instrumentation_elements) != 1 or len(manifest_elements) != 1:
+      # Failed to locate instrumentation or manifest element in AndroidManifest.
+      # file. Empty test config file will be created.
+      shutil.copyfile(empty_config, target_config)
+      return 0
+
+    instrumentation = instrumentation_elements[0]
+    manifest = manifest_elements[0]
+    if ATTRIBUTE_LABEL in instrumentation.attributes:
+      label = instrumentation.attributes[ATTRIBUTE_LABEL].value
+    else:
+      label = module
+    runner = instrumentation.attributes[ATTRIBUTE_RUNNER].value
+    package = manifest.attributes[ATTRIBUTE_PACKAGE].value
+
   test_type = ('InstrumentationTest'
-               if runner.endswith('.InstrumentationTestRunner')
-               else 'AndroidJUnitTest')
+              if runner.endswith('.InstrumentationTestRunner')
+              else 'AndroidJUnitTest')
 
   with open(instrumentation_test_config_template) as template:
     config = template.read()
diff --git a/tools/auto_gen_test_config_test.py b/tools/auto_gen_test_config_test.py
index 51a8583..ce97723 100644
--- a/tools/auto_gen_test_config_test.py
+++ b/tools/auto_gen_test_config_test.py
@@ -30,6 +30,24 @@
 </manifest>
 """
 
+XMLTREE_JUNIT_TEST = """N: android=http://schemas.android.com/apk/res/android (line=2)
+  E: manifest (line=2)
+    A: package="com.android.my.tests.x" (Raw: "com.android.my.tests.x")
+      E: instrumentation (line=9)
+        A: http://schemas.android.com/apk/res/android:label(0x01010001)="TestModule" (Raw: "TestModule")
+        A: http://schemas.android.com/apk/res/android:name(0x01010003)="androidx.test.runner.AndroidJUnitRunner" (Raw: "androidx.test.runner.AndroidJUnitRunner")
+        A: http://schemas.android.com/apk/res/android:targetPackage(0x01010021)="com.android.my.tests" (Raw: "com.android.my.tests")
+"""
+
+XMLTREE_INSTRUMENTATION_TEST = """N: android=http://schemas.android.com/apk/res/android (line=2)
+  E: manifest (line=2)
+    A: package="com.android.my.tests.x" (Raw: "com.android.my.tests.x")
+      E: instrumentation (line=9)
+        A: http://schemas.android.com/apk/res/android:label(0x01010001)="TestModule" (Raw: "TestModule")
+        A: http://schemas.android.com/apk/res/android:name(0x01010003)="android.test.InstrumentationTestRunner" (Raw: "android.test.InstrumentationTestRunner")
+        A: http://schemas.android.com/apk/res/android:targetPackage(0x01010021)="com.android.my.tests" (Raw: "com.android.my.tests")
+"""
+
 MANIFEST_JUNIT_TEST = """<?xml version="1.0" encoding="utf-8"?>
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
   package="com.android.my.tests.x">
@@ -45,12 +63,12 @@
     <instrumentation
         android:name="android.test.InstrumentationTestRunner"
         android:targetPackage="com.android.my.tests"
-        android:label="My Tests" />
+        android:label="TestModule" />
 </manifest>
 """
 
 EXPECTED_JUNIT_TEST_CONFIG = """<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
+<!-- Copyright (C) 2023 The Android Open Source Project
 
      Licensed under the Apache License, Version 2.0 (the "License");
      you may not use this file except in compliance with the License.
@@ -66,19 +84,23 @@
 -->
 <!-- This test config file is auto-generated. -->
 <configuration description="Runs TestModule.">
+    <option name="test-suite-tag" value="apct" />
+    <option name="test-suite-tag" value="apct-instrumentation" />
+
     <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
         <option name="test-file-name" value="TestModule.apk" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
-        <option name="package" value="com.android.my.tests.x" />
+        {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="com.android.my.tests.x" />
         <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
     </test>
 </configuration>
 """
 
 EXPECTED_INSTRUMENTATION_TEST_CONFIG = """<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
+<!-- Copyright (C) 2023 The Android Open Source Project
 
      Licensed under the Apache License, Version 2.0 (the "License");
      you may not use this file except in compliance with the License.
@@ -93,23 +115,74 @@
      limitations under the License.
 -->
 <!-- This test config file is auto-generated. -->
-<configuration description="Runs My Tests.">
+<configuration description="Runs TestModule.">
+    <option name="test-suite-tag" value="apct" />
+    <option name="test-suite-tag" value="apct-instrumentation" />
+
     <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
         <option name="test-file-name" value="TestModule.apk" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.InstrumentationTest" >
-        <option name="package" value="com.android.my.tests.x" />
+        {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="com.android.my.tests.x" />
         <option name="runner" value="android.test.InstrumentationTestRunner" />
     </test>
 </configuration>
 """
 
-TOOLS_DIR = os.path.dirname(os.path.dirname(__file__))
-EMPTY_TEST_CONFIG = os.path.join(
-    TOOLS_DIR, '..', 'core', 'empty_test_config.xml')
-INSTRUMENTATION_TEST_CONFIG_TEMPLATE = os.path.join(
-    TOOLS_DIR, '..', 'core', 'instrumentation_test_config_template.xml')
+EMPTY_TEST_CONFIG_CONTENT = """<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- No AndroidTest.xml was provided and the manifest does not include
+     instrumentation, hence this apk is not instrumentable.
+-->
+<configuration description="Empty Configuration" />
+"""
+
+INSTRUMENTATION_TEST_CONFIG_TEMPLATE_CONTENT = """<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {LABEL}.">
+    <option name="test-suite-tag" value="apct" />
+    <option name="test-suite-tag" value="apct-instrumentation" />
+{EXTRA_CONFIGS}
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
+        <option name="test-file-name" value="{MODULE}.apk" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.{TEST_TYPE}" >
+        {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="{PACKAGE}" />
+        <option name="runner" value="{RUNNER}" />
+    </test>
+</configuration>
+"""
 
 
 class AutoGenTestConfigUnittests(unittest.TestCase):
@@ -120,6 +193,16 @@
     self.test_dir = tempfile.mkdtemp()
     self.config_file = os.path.join(self.test_dir, TEST_MODULE + '.config')
     self.manifest_file = os.path.join(self.test_dir, 'AndroidManifest.xml')
+    self.xmltree_file = os.path.join(self.test_dir, TEST_MODULE + '.xmltree')
+    self.empty_test_config_file = os.path.join(self.test_dir, 'empty.config')
+    self.instrumentation_test_config_template_file = os.path.join(
+        self.test_dir, 'instrumentation.config')
+
+    with open(self.empty_test_config_file, 'w') as f:
+      f.write(EMPTY_TEST_CONFIG_CONTENT)
+
+    with open(self.instrumentation_test_config_template_file, 'w') as f:
+      f.write(INSTRUMENTATION_TEST_CONFIG_TEMPLATE_CONTENT)
 
   def tearDown(self):
     """Cleanup the test directory."""
@@ -133,11 +216,11 @@
 
     argv = [self.config_file,
             self.manifest_file,
-            EMPTY_TEST_CONFIG,
-            INSTRUMENTATION_TEST_CONFIG_TEMPLATE]
+            self.empty_test_config_file,
+            self.instrumentation_test_config_template_file]
     auto_gen_test_config.main(argv)
     with open(self.config_file) as config_file:
-      with open(EMPTY_TEST_CONFIG) as empty_config:
+      with open(self.empty_test_config_file) as empty_config:
         self.assertEqual(config_file.read(), empty_config.read())
 
   def testCreateJUnitTestConfig(self):
@@ -148,8 +231,8 @@
 
     argv = [self.config_file,
             self.manifest_file,
-            EMPTY_TEST_CONFIG,
-            INSTRUMENTATION_TEST_CONFIG_TEMPLATE]
+            self.empty_test_config_file,
+            self.instrumentation_test_config_template_file]
     auto_gen_test_config.main(argv)
     with open(self.config_file) as config_file:
       self.assertEqual(config_file.read(), EXPECTED_JUNIT_TEST_CONFIG)
@@ -162,8 +245,37 @@
 
     argv = [self.config_file,
             self.manifest_file,
-            EMPTY_TEST_CONFIG,
-            INSTRUMENTATION_TEST_CONFIG_TEMPLATE]
+            self.empty_test_config_file,
+            self.instrumentation_test_config_template_file]
+    auto_gen_test_config.main(argv)
+    with open(self.config_file) as config_file:
+      self.assertEqual(
+          config_file.read(), EXPECTED_INSTRUMENTATION_TEST_CONFIG)
+
+  def testCreateJUnitTestConfigWithXMLTree(self):
+    """Test creating test config for AndroidJUnitTest.
+    """
+    with open(self.xmltree_file, 'w') as f:
+      f.write(XMLTREE_JUNIT_TEST)
+
+    argv = [self.config_file,
+            self.xmltree_file,
+            self.empty_test_config_file,
+            self.instrumentation_test_config_template_file]
+    auto_gen_test_config.main(argv)
+    with open(self.config_file) as config_file:
+      self.assertEqual(config_file.read(), EXPECTED_JUNIT_TEST_CONFIG)
+
+  def testCreateInstrumentationTestConfigWithXMLTree(self):
+    """Test creating test config for InstrumentationTest.
+    """
+    with open(self.xmltree_file, 'w') as f:
+      f.write(XMLTREE_INSTRUMENTATION_TEST)
+
+    argv = [self.config_file,
+            self.xmltree_file,
+            self.empty_test_config_file,
+            self.instrumentation_test_config_template_file]
     auto_gen_test_config.main(argv)
     with open(self.config_file) as config_file:
       self.assertEqual(
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 5a7cc76..bd347a1 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -165,6 +165,7 @@
         "ota_utils_lib",
     ],
     required: [
+        "apexd_host",
         "brillo_update_payload",
         "checkvintf",
         "generate_gki_certificate",
@@ -344,6 +345,7 @@
     },
     srcs: [
         "merge_ota.py",
+        "ota_signing_utils.py",
     ],
     libs: [
         "ota_metadata_proto",
@@ -493,6 +495,26 @@
 }
 
 python_binary_host {
+    name: "ota_from_raw_img",
+    srcs: [
+        "ota_from_raw_img.py",
+        "ota_signing_utils.py",
+    ],
+    main: "ota_from_raw_img.py",
+    defaults: [
+        "releasetools_binary_defaults",
+    ],
+    required: [
+        "delta_generator",
+    ],
+    libs: [
+        "ota_metadata_proto",
+        "releasetools_common",
+        "ota_utils_lib",
+    ],
+}
+
+python_binary_host {
     name: "ota_package_parser",
     defaults: ["releasetools_binary_defaults"],
     srcs: [
@@ -590,6 +612,7 @@
         "sign_target_files_apks.py",
         "validate_target_files.py",
         "merge_ota.py",
+        "ota_signing_utils.py",
         ":releasetools_merge_sources",
         ":releasetools_merge_tests",
 
@@ -621,6 +644,7 @@
         },
     },
     required: [
+        "apexd_host",
         "deapexer",
     ],
 }
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index f29d801..31f8736 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -517,12 +517,14 @@
   return img.name
 
 
-def AddCustomImages(output_zip, partition_name):
-  """Adds and signs custom images in IMAGES/.
+def AddCustomImages(output_zip, partition_name, image_list):
+  """Adds and signs avb custom images as needed in IMAGES/.
 
   Args:
     output_zip: The output zip file (needs to be already open), or None to
         write images to OPTIONS.input_tmp/.
+    partition_name: The custom image partition name.
+    image_list: The image list of the custom image partition.
 
   Uses the image under IMAGES/ if it already exists. Otherwise looks for the
   image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
@@ -531,19 +533,20 @@
     AssertionError: If image can't be found.
   """
 
+  builder = None
   key_path = OPTIONS.info_dict.get("avb_{}_key_path".format(partition_name))
-  algorithm = OPTIONS.info_dict.get("avb_{}_algorithm".format(partition_name))
-  extra_args = OPTIONS.info_dict.get(
-      "avb_{}_add_hashtree_footer_args".format(partition_name))
-  partition_size = OPTIONS.info_dict.get(
-      "avb_{}_partition_size".format(partition_name))
+  if key_path is not None:
+    algorithm = OPTIONS.info_dict.get("avb_{}_algorithm".format(partition_name))
+    extra_args = OPTIONS.info_dict.get(
+        "avb_{}_add_hashtree_footer_args".format(partition_name))
+    partition_size = OPTIONS.info_dict.get(
+        "avb_{}_partition_size".format(partition_name))
 
-  builder = verity_utils.CreateCustomImageBuilder(
-      OPTIONS.info_dict, partition_name, partition_size,
-      key_path, algorithm, extra_args)
+    builder = verity_utils.CreateCustomImageBuilder(
+        OPTIONS.info_dict, partition_name, partition_size,
+        key_path, algorithm, extra_args)
 
-  for img_name in OPTIONS.info_dict.get(
-          "avb_{}_image_list".format(partition_name)).split():
+  for img_name in image_list:
     custom_image = OutputFile(
         output_zip, OPTIONS.input_tmp, "IMAGES", img_name)
     if os.path.exists(custom_image.name):
@@ -1066,18 +1069,29 @@
 
   # Custom images.
   custom_partitions = OPTIONS.info_dict.get(
-      "avb_custom_images_partition_list", "").strip().split()
+      "custom_images_partition_list", "").strip().split()
   for partition_name in custom_partitions:
     partition_name = partition_name.strip()
     banner("custom images for " + partition_name)
-    partitions[partition_name] = AddCustomImages(output_zip, partition_name)
+    image_list = OPTIONS.info_dict.get(
+          "{}_image_list".format(partition_name)).split()
+    partitions[partition_name] = AddCustomImages(output_zip, partition_name, image_list)
+
+  avb_custom_partitions = OPTIONS.info_dict.get(
+      "avb_custom_images_partition_list", "").strip().split()
+  for partition_name in avb_custom_partitions:
+    partition_name = partition_name.strip()
+    banner("avb custom images for " + partition_name)
+    image_list = OPTIONS.info_dict.get(
+          "avb_{}_image_list".format(partition_name)).split()
+    partitions[partition_name] = AddCustomImages(output_zip, partition_name, image_list)
 
   if OPTIONS.info_dict.get("avb_enable") == "true":
     # vbmeta_partitions includes the partitions that should be included into
     # top-level vbmeta.img, which are the ones that are not included in any
     # chained VBMeta image plus the chained VBMeta images themselves.
-    # Currently custom_partitions are all chained to VBMeta image.
-    vbmeta_partitions = common.AVB_PARTITIONS[:] + tuple(custom_partitions)
+    # Currently avb_custom_partitions are all chained to VBMeta image.
+    vbmeta_partitions = common.AVB_PARTITIONS[:] + tuple(avb_custom_partitions)
 
     vbmeta_system = OPTIONS.info_dict.get("avb_vbmeta_system", "").strip()
     if vbmeta_system:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 8c6d597..5e4130c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2011 The Android Open Source Project
 #
@@ -22,9 +22,9 @@
             target_output_directory
 """
 
-from __future__ import print_function
 import datetime
 
+import argparse
 import glob
 import logging
 import os
@@ -34,6 +34,7 @@
 import shutil
 import sys
 import uuid
+import tempfile
 
 import common
 import verity_utils
@@ -919,27 +920,69 @@
   common.BuildVBMeta(output_path, partitions, name, vbmeta_partitions)
 
 
-def main(argv):
-  args = common.ParseOptions(argv, __doc__)
+def BuildImageOrVBMeta(input_directory, target_out, glob_dict, image_properties, out_file):
+  try:
+    if "vbmeta" in os.path.basename(out_file):
+      OPTIONS.info_dict = glob_dict
+      BuildVBMeta(input_directory, glob_dict, out_file)
+    else:
+      BuildImage(input_directory, image_properties, out_file, target_out)
+  except:
+    logger.error("Failed to build %s from %s", out_file, input_directory)
+    raise
 
-  if len(args) != 4:
-    print(__doc__)
-    sys.exit(1)
+
+def CopyInputDirectory(src, dst, filter_file):
+  with open(filter_file, 'r') as f:
+    for line in f:
+      line = line.strip()
+      if not line:
+        return
+      if line != os.path.normpath(line):
+        sys.exit(f"{line}: not normalized")
+      if line.startswith("../") or line.startswith('/'):
+        sys.exit(f"{line}: escapes staging directory by starting with ../ or /")
+      full_src = os.path.join(src, line)
+      full_dst = os.path.join(dst, line)
+      if os.path.isdir(full_src):
+        os.makedirs(full_dst, exist_ok=True)
+      else:
+        os.makedirs(os.path.dirname(full_dst), exist_ok=True)
+        os.link(full_src, full_dst, follow_symlinks=False)
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(
+    description="Builds output_image from the given input_directory and properties_file, and "
+    "writes the image to target_output_directory.")
+  parser.add_argument("--input-directory-filter-file",
+    help="the path to a file that contains a list of all files in the input_directory. If this "
+    "option is provided, all files under the input_directory that are not listed in this file will "
+    "be deleted before building the image. This is to work around the fact that building a module "
+    "will install in by default, so there could be files in the input_directory that are not "
+    "actually supposed to be part of the partition. The paths in this file must be relative to "
+    "input_directory.")
+  parser.add_argument("input_directory",
+    help="the staging directory to be converted to an image file")
+  parser.add_argument("properties_file",
+    help="a file containing the 'global dictionary' of properties that affect how the image is "
+    "built")
+  parser.add_argument("out_file",
+    help="the output file to write")
+  parser.add_argument("target_out",
+    help="the path to $(TARGET_OUT). Certain tools will use this to look through multiple staging "
+    "directories for fs config files.")
+  args = parser.parse_args()
 
   common.InitLogging()
 
-  in_dir = args[0]
-  glob_dict_file = args[1]
-  out_file = args[2]
-  target_out = args[3]
-
-  glob_dict = LoadGlobalDict(glob_dict_file)
+  glob_dict = LoadGlobalDict(args.properties_file)
   if "mount_point" in glob_dict:
     # The caller knows the mount point and provides a dictionary needed by
     # BuildImage().
     image_properties = glob_dict
   else:
-    image_filename = os.path.basename(out_file)
+    image_filename = os.path.basename(args.out_file)
     mount_point = ""
     if image_filename == "system.img":
       mount_point = "system"
@@ -974,15 +1017,12 @@
     if "vbmeta" != mount_point:
       image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
 
-  try:
-    if "vbmeta" in os.path.basename(out_file):
-      OPTIONS.info_dict = glob_dict
-      BuildVBMeta(in_dir, glob_dict, out_file)
-    else:
-      BuildImage(in_dir, image_properties, out_file, target_out)
-  except:
-    logger.error("Failed to build %s from %s", out_file, in_dir)
-    raise
+  if args.input_directory_filter_file and not os.environ.get("BUILD_BROKEN_INCORRECT_PARTITION_IMAGES"):
+    with tempfile.TemporaryDirectory(dir=os.path.dirname(args.input_directory)) as new_input_directory:
+      CopyInputDirectory(args.input_directory, new_input_directory, args.input_directory_filter_file)
+      BuildImageOrVBMeta(new_input_directory, args.target_out, glob_dict, image_properties, args.out_file)
+  else:
+    BuildImageOrVBMeta(args.input_directory, args.target_out, glob_dict, image_properties, args.out_file)
 
 
 if __name__ == '__main__':
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 732b5e9..4bcb8fc 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -96,7 +96,6 @@
     self.cache_size = None
     self.stash_threshold = 0.8
     self.logfile = None
-    self.host_tools = {}
     self.sepolicy_name = 'sepolicy.apex'
 
 
@@ -195,7 +194,7 @@
           '': {
               'handlers': ['default'],
               'propagate': True,
-              'level': 'INFO',
+              'level': 'WARNING',
           }
       }
   }
@@ -225,23 +224,15 @@
   logging.config.dictConfig(config)
 
 
-def SetHostToolLocation(tool_name, location):
-  OPTIONS.host_tools[tool_name] = location
-
-
 def FindHostToolPath(tool_name):
   """Finds the path to the host tool.
 
   Args:
     tool_name: name of the tool to find
   Returns:
-    path to the tool if found under either one of the host_tools map or under
-    the same directory as this binary is located at. If not found, tool_name
-    is returned.
+    path to the tool if found under the same directory as this binary is located at. If not found,
+    tool_name is returned.
   """
-  if tool_name in OPTIONS.host_tools:
-    return OPTIONS.host_tools[tool_name]
-
   my_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
   tool_path = os.path.join(my_dir, tool_name)
   if os.path.exists(tool_path):
@@ -459,6 +450,10 @@
     return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
 
   @property
+  def vabc_compression_param(self):
+    return self.get("virtual_ab_compression_method", "")
+
+  @property
   def vendor_api_level(self):
     vendor_prop = self.info_dict.get("vendor.build.prop")
     if not vendor_prop:
@@ -2438,8 +2433,9 @@
             apk_name, proc.returncode, stdoutdata, stderrdata))
 
   for line in stdoutdata.split("\n"):
-    # Looking for lines such as sdkVersion:'23' or sdkVersion:'M'.
-    m = re.match(r'sdkVersion:\'([^\']*)\'', line)
+    # Due to ag/24161708, looking for lines such as minSdkVersion:'23',minSdkVersion:'M'
+    # or sdkVersion:'23', sdkVersion:'M'.
+    m = re.match(r'(?:minSdkVersion|sdkVersion):\'([^\']*)\'', line)
     if m:
       return m.group(1)
   raise ExternalError("No minSdkVersion returned by aapt2")
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index fa53ad2..a3e3681 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -64,7 +64,8 @@
 OPTIONS.retrofit_dap = None
 OPTIONS.build_super = None
 OPTIONS.sparse_userimages = None
-OPTIONS.use_fastboot_info = False
+OPTIONS.use_fastboot_info = True
+OPTIONS.build_super_image = None
 
 def LoadOptions(input_file):
   """Loads information from input_file to OPTIONS.
@@ -174,7 +175,13 @@
   input_tmp = common.UnzipTemp(input_file)
 
   super_file = common.MakeTempFile('super_', '.img')
-  BuildSuperImage(input_tmp, super_file)
+
+  # Allow overriding the BUILD_SUPER_IMAGE binary
+  if OPTIONS.build_super_image:
+    command = [OPTIONS.build_super_image, input_tmp, super_file]
+    common.RunAndCheckOutput(command)
+  else:
+    BuildSuperImage(input_tmp, super_file)
 
   logger.info('Writing super.img to archive...')
   with zipfile.ZipFile(
@@ -231,6 +238,8 @@
       OPTIONS.bootable_only = True
     elif o == '--additional':
       OPTIONS.additional_entries.append(a)
+    elif o == '--build_super_image':
+      OPTIONS.build_super_image = a
     else:
       return False
     return True
@@ -240,6 +249,7 @@
                              extra_long_opts=[
                                  'additional=',
                                  'bootable_zip',
+                                 'build_super_image=',
                              ],
                              extra_option_handler=option_handler)
   if len(args) != 2:
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
index b61f039..198c973 100644
--- a/tools/releasetools/merge/merge_meta.py
+++ b/tools/releasetools/merge/merge_meta.py
@@ -53,8 +53,6 @@
 MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
 
 
-
-
 def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
   UPDATE_ENGINE_CONFIG_NAME = "update_engine_config.txt"
   config1_path = os.path.join(
@@ -74,7 +72,7 @@
         merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
 
 
-def MergeMetaFiles(temp_dir, merged_dir):
+def MergeMetaFiles(temp_dir, merged_dir, framework_partitions):
   """Merges various files in META/*."""
 
   framework_meta_dir = os.path.join(temp_dir, 'framework_meta', 'META')
@@ -114,7 +112,8 @@
     MergeAbPartitions(
         framework_meta_dir=framework_meta_dir,
         vendor_meta_dir=vendor_meta_dir,
-        merged_meta_dir=merged_meta_dir)
+        merged_meta_dir=merged_meta_dir,
+        framework_partitions=framework_partitions)
     UpdateCareMapImageSizeProps(images_dir=os.path.join(merged_dir, 'IMAGES'))
 
   for file_name in ('apkcerts.txt', 'apexkeys.txt'):
@@ -124,10 +123,10 @@
         merged_meta_dir=merged_meta_dir,
         file_name=file_name)
 
-  MergeUpdateEngineConfig(
-      framework_meta_dir,
-      vendor_meta_dir, merged_meta_dir,
-  )
+  if OPTIONS.merged_misc_info.get('ab_update') == 'true':
+    MergeUpdateEngineConfig(
+        framework_meta_dir,
+        vendor_meta_dir, merged_meta_dir)
 
   # Write the now-finalized OPTIONS.merged_misc_info.
   merge_utils.WriteSortedData(
@@ -135,13 +134,22 @@
       path=os.path.join(merged_meta_dir, 'misc_info.txt'))
 
 
-def MergeAbPartitions(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
+def MergeAbPartitions(framework_meta_dir, vendor_meta_dir, merged_meta_dir,
+                      framework_partitions):
   """Merges META/ab_partitions.txt.
 
   The output contains the union of the partition names.
   """
   with open(os.path.join(framework_meta_dir, 'ab_partitions.txt')) as f:
-    framework_ab_partitions = f.read().splitlines()
+    # Filter out some partitions here to support the case that the
+    # ab_partitions.txt of framework-target-files has non-framework partitions.
+    # This case happens when we use a complete merged target files package as
+    # the framework-target-files.
+    framework_ab_partitions = [
+        partition
+        for partition in f.read().splitlines()
+        if partition in framework_partitions
+    ]
 
   with open(os.path.join(vendor_meta_dir, 'ab_partitions.txt')) as f:
     vendor_ab_partitions = f.read().splitlines()
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index d8f7b15..a0d3a1c 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -209,7 +209,9 @@
   # After this function completes successfully, all the files we need to create
   # the output target files package are in place.
   merge_meta.MergeMetaFiles(
-      temp_dir=temp_dir, merged_dir=output_target_files_temp_dir)
+      temp_dir=temp_dir,
+      merged_dir=output_target_files_temp_dir,
+      framework_partitions=OPTIONS.framework_partition_set)
 
   merge_dexopt.MergeDexopt(
       temp_dir=temp_dir, output_target_files_dir=output_target_files_temp_dir)
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index b5683a8..d446fc0 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -217,7 +217,7 @@
 
 # Partitions that are grabbed from the framework partial build by default.
 _FRAMEWORK_PARTITIONS = {
-    'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
+    'system', 'product', 'system_ext', 'system_other', 'root',
     'vbmeta_system', 'pvmfw'
 }
 
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
index 441312c..24d9ea9 100644
--- a/tools/releasetools/merge_ota.py
+++ b/tools/releasetools/merge_ota.py
@@ -14,7 +14,6 @@
 
 import argparse
 import logging
-import shlex
 import struct
 import sys
 import update_payload
@@ -31,6 +30,7 @@
 
 from payload_signer import PayloadSigner
 from ota_utils import PayloadGenerator, METADATA_PROTO_NAME, FinalizeMetadata
+from ota_signing_utils import AddSigningArgumentParse
 
 logger = logging.getLogger(__name__)
 
@@ -126,7 +126,7 @@
     ExtendPartitionUpdates(output_manifest.partitions, manifest.partitions)
     try:
       MergeDynamicPartitionMetadata(
-        output_manifest.dynamic_partition_metadata, manifest.dynamic_partition_metadata)
+          output_manifest.dynamic_partition_metadata, manifest.dynamic_partition_metadata)
     except DuplicatePartitionError:
       logger.error(
           "OTA %s has duplicate partition with some of the previous OTAs", payload.name)
@@ -190,6 +190,7 @@
               f"OTA {partition_to_ota[part].name} and {payload.name} have duplicating partition {part}")
         partition_to_ota[part] = payload
 
+
 def ApexInfo(file_paths):
   if len(file_paths) > 1:
     logger.info("More than one target file specified, will ignore "
@@ -201,33 +202,19 @@
       return apex_info_bytes
   return None
 
-def ParseSignerArgs(args):
-  if args is None:
-    return None
-  return shlex.split(args)
 
 def main(argv):
   parser = argparse.ArgumentParser(description='Merge multiple partial OTAs')
   parser.add_argument('packages', type=str, nargs='+',
                       help='Paths to OTA packages to merge')
-  parser.add_argument('--package_key', type=str,
-                      help='Paths to private key for signing payload')
-  parser.add_argument('--search_path', type=str,
-                      help='Search path for framework/signapk.jar')
-  parser.add_argument('--payload_signer', type=str,
-                      help='Path to custom payload signer')
-  parser.add_argument('--payload_signer_args', type=ParseSignerArgs,
-                      help='Arguments for payload signer if necessary')
-  parser.add_argument('--payload_signer_maximum_signature_size', type=str,
-                      help='Maximum signature size (in bytes) that would be '
-                      'generated by the given payload signer')
   parser.add_argument('--output', type=str,
                       help='Paths to output merged ota', required=True)
   parser.add_argument('--metadata_ota', type=str,
                       help='Output zip will use build metadata from this OTA package, if unspecified, use the last OTA package in merge list')
-  parser.add_argument('--private_key_suffix', type=str,
-                      help='Suffix to be appended to package_key path', default=".pk8")
-  parser.add_argument('-v', action="store_true", help="Enable verbose logging", dest="verbose")
+  parser.add_argument('-v', action="store_true",
+                      help="Enable verbose logging", dest="verbose")
+  AddSigningArgumentParse(parser)
+
   parser.epilog = ('This tool can also be used to resign a regular OTA. For a single regular OTA, '
                    'apex_info.pb will be written to output. When merging multiple OTAs, '
                    'apex_info.pb will not be written.')
@@ -301,8 +288,6 @@
   return 0
 
 
-
-
 if __name__ == '__main__':
   logging.basicConfig()
   sys.exit(main(sys.argv))
diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py
new file mode 100644
index 0000000..0c1c05a
--- /dev/null
+++ b/tools/releasetools/ota_from_raw_img.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Given a series of .img files, produces an OTA package that installs thoese images
+"""
+
+import sys
+import os
+import argparse
+import subprocess
+import tempfile
+import logging
+import zipfile
+
+import common
+from payload_signer import PayloadSigner
+from ota_utils import PayloadGenerator
+from ota_signing_utils import AddSigningArgumentParse
+
+
+logger = logging.getLogger(__name__)
+
+
+def ResolveBinaryPath(filename, search_path):
+  if not search_path:
+    return filename
+  if not os.path.exists(search_path):
+    return filename
+  path = os.path.join(search_path, "bin", filename)
+  if os.path.exists(path):
+    return path
+  path = os.path.join(search_path, filename)
+  if os.path.exists(path):
+    return path
+  return path
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(
+      prog=argv[0], description="Given a series of .img files, produces a full OTA package that installs thoese images")
+  parser.add_argument("images", nargs="+", type=str,
+                      help="List of images to generate OTA")
+  parser.add_argument("--partition_names", nargs='+', type=str,
+                      help="Partition names to install the images, default to basename of the image(no file name extension)")
+  parser.add_argument('--output', type=str,
+                      help='Paths to output merged ota', required=True)
+  parser.add_argument('--max_timestamp', type=int,
+                      help='Maximum build timestamp allowed to install this OTA')
+  parser.add_argument("-v", action="store_true",
+                      help="Enable verbose logging", dest="verbose")
+  AddSigningArgumentParse(parser)
+
+  args = parser.parse_args(argv[1:])
+  if args.verbose:
+    logger.setLevel(logging.INFO)
+  logger.info(args)
+  if not args.partition_names:
+    args.partition_names = [os.path.os.path.splitext(os.path.basename(path))[
+        0] for path in args.images]
+  with tempfile.NamedTemporaryFile() as unsigned_payload, tempfile.NamedTemporaryFile() as dynamic_partition_info_file:
+    dynamic_partition_info_file.writelines(
+        [b"virtual_ab=true\n", b"super_partition_groups=\n"])
+    dynamic_partition_info_file.flush()
+    cmd = [ResolveBinaryPath("delta_generator", args.search_path)]
+    cmd.append("--partition_names=" + ",".join(args.partition_names))
+    cmd.append("--dynamic_partition_info_file=" +
+               dynamic_partition_info_file.name)
+    cmd.append("--new_partitions=" + ",".join(args.images))
+    cmd.append("--out_file=" + unsigned_payload.name)
+    cmd.append("--is_partial_update")
+    if args.max_timestamp:
+      cmd.append("--max_timestamp=" + str(args.max_timestamp))
+      cmd.append("--partition_timestamps=boot:" + str(args.max_timestamp))
+    logger.info("Running %s", cmd)
+
+    subprocess.check_call(cmd)
+    generator = PayloadGenerator()
+    generator.payload_file = unsigned_payload.name
+    logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
+
+    # Get signing keys
+    key_passwords = common.GetKeyPasswords([args.package_key])
+
+    if args.package_key:
+      logger.info("Signing payload...")
+      # TODO: remove OPTIONS when no longer used as fallback in payload_signer
+      common.OPTIONS.payload_signer_args = None
+      common.OPTIONS.payload_signer_maximum_signature_size = None
+      signer = PayloadSigner(args.package_key, args.private_key_suffix,
+                             key_passwords[args.package_key],
+                             payload_signer=args.payload_signer,
+                             payload_signer_args=args.payload_signer_args,
+                             payload_signer_maximum_signature_size=args.payload_signer_maximum_signature_size)
+      generator.payload_file = unsigned_payload.name
+      generator.Sign(signer)
+
+    logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
+
+    logger.info("Writing to %s", args.output)
+    with zipfile.ZipFile(args.output, "w") as zfp:
+      generator.WriteToZip(zfp)
+
+
+if __name__ == "__main__":
+  logging.basicConfig()
+  main(sys.argv)
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f3e6f1e..56ec929 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -147,7 +147,7 @@
 A/B OTA specific options
 
   --disable_fec_computation
-      Disable the on device FEC data computation for incremental updates.
+      Disable the on device FEC data computation for incremental updates. OTA will be larger but installation will be faster.
 
   --include_secondary
       Additionally include the payload for secondary slot images (default:
@@ -224,7 +224,7 @@
       wait time in recovery.
 
   --enable_vabc_xor
-      Enable the VABC xor feature. Will reduce space requirements for OTA
+      Enable the VABC xor feature. Will reduce space requirements for OTA, but OTA installation will be slower.
 
   --force_minor_version
       Override the update_engine minor version for delta generation.
@@ -233,7 +233,10 @@
       A colon ':' separated list of compressors. Allowed values are bz2 and brotli.
 
   --enable_zucchini
-      Whether to enable to zucchini feature. Will generate smaller OTA but uses more memory.
+      Whether to enable to zucchini feature. Will generate smaller OTA but uses more memory, OTA generation will take longer.
+
+  --enable_puffdiff
+      Whether to enable to puffdiff feature. Will generate smaller OTA but uses more memory, OTA generation will take longer.
 
   --enable_lz4diff
       Whether to enable lz4diff feature. Will generate smaller OTA for EROFS but
@@ -244,7 +247,9 @@
       older SPL.
 
   --vabc_compression_param
-      Compression algorithm to be used for VABC. Available options: gz, brotli, none
+      Compression algorithm to be used for VABC. Available options: gz, lz4, zstd, brotli, none. 
+      Compression level can be specified by appending ",$LEVEL" to option. 
+      e.g. --vabc_compression_param=gz,9 specifies level 9 compression with gz algorithm
 
   --security_patch_level
       Override the security patch level in target files
@@ -269,7 +274,7 @@
 import care_map_pb2
 import common
 import ota_utils
-from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
+from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata,
                        PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
 from common import DoesInputFileContain, IsSparseImage
 import target_files_diff
@@ -320,6 +325,7 @@
 OPTIONS.force_minor_version = None
 OPTIONS.compressor_types = None
 OPTIONS.enable_zucchini = True
+OPTIONS.enable_puffdiff = None
 OPTIONS.enable_lz4diff = False
 OPTIONS.vabc_compression_param = None
 OPTIONS.security_patch_level = None
@@ -456,48 +462,51 @@
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
 
-  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
-    infolist = input_zip.infolist()
+  fileslist = []
+  for (root, dirs, files) in os.walk(input_file):
+    root = root.lstrip(input_file).lstrip("/")
+    fileslist.extend([os.path.join(root, d) for d in dirs])
+    fileslist.extend([os.path.join(root, d) for d in files])
 
-  input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
-  for info in infolist:
-    unzipped_file = os.path.join(input_tmp, *info.filename.split('/'))
-    if info.filename == 'IMAGES/system_other.img':
+  input_tmp = input_file
+  for filename in fileslist:
+    unzipped_file = os.path.join(input_tmp, *filename.split('/'))
+    if filename == 'IMAGES/system_other.img':
       common.ZipWrite(target_zip, unzipped_file, arcname='IMAGES/system.img')
 
     # Primary images and friends need to be skipped explicitly.
-    elif info.filename in ('IMAGES/system.img',
-                           'IMAGES/system.map'):
+    elif filename in ('IMAGES/system.img',
+                      'IMAGES/system.map'):
       pass
 
     # Copy images that are not in SECONDARY_PAYLOAD_SKIPPED_IMAGES.
-    elif info.filename.startswith(('IMAGES/', 'RADIO/')):
-      image_name = os.path.basename(info.filename)
+    elif filename.startswith(('IMAGES/', 'RADIO/')):
+      image_name = os.path.basename(filename)
       if image_name not in ['{}.img'.format(partition) for partition in
                             SECONDARY_PAYLOAD_SKIPPED_IMAGES]:
-        common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
+        common.ZipWrite(target_zip, unzipped_file, arcname=filename)
 
     # Skip copying the postinstall config if requested.
-    elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
+    elif skip_postinstall and filename == POSTINSTALL_CONFIG:
       pass
 
-    elif info.filename.startswith('META/'):
+    elif filename.startswith('META/'):
       # Remove the unnecessary partitions for secondary images from the
       # ab_partitions file.
-      if info.filename == AB_PARTITIONS:
+      if filename == AB_PARTITIONS:
         with open(unzipped_file) as f:
           partition_list = f.read().splitlines()
         partition_list = [partition for partition in partition_list if partition
                           and partition not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
-        common.ZipWriteStr(target_zip, info.filename,
+        common.ZipWriteStr(target_zip, filename,
                            '\n'.join(partition_list))
       # Remove the unnecessary partitions from the dynamic partitions list.
-      elif (info.filename == 'META/misc_info.txt' or
-            info.filename == DYNAMIC_PARTITION_INFO):
+      elif (filename == 'META/misc_info.txt' or
+            filename == DYNAMIC_PARTITION_INFO):
         modified_info = GetInfoForSecondaryImages(unzipped_file)
-        common.ZipWriteStr(target_zip, info.filename, modified_info)
+        common.ZipWriteStr(target_zip, filename, modified_info)
       else:
-        common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
+        common.ZipWrite(target_zip, unzipped_file, arcname=filename)
 
   common.ZipClose(target_zip)
 
@@ -863,6 +872,10 @@
     if not source_info.is_vabc or not target_info.is_vabc:
       logger.info("Either source or target does not support VABC, disabling.")
       OPTIONS.disable_vabc = True
+    if source_info.vabc_compression_param != target_info.vabc_compression_param:
+      logger.info("Source build and target build use different compression methods {} vs {}, default to source builds parameter {}".format(
+          source_info.vabc_compression_param, target_info.vabc_compression_param, source_info.vabc_compression_param))
+      OPTIONS.vabc_compression_param = source_info.vabc_compression_param
 
     # Virtual AB Compression was introduced in Androd S.
     # Later, we backported VABC to Android R. But verity support was not
@@ -877,6 +890,22 @@
         "META/ab_partitions.txt is required for ab_update."
     target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
     source_info = None
+    if target_info.vabc_compression_param:
+      minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[
+          target_info.vabc_compression_param]
+      if target_info.vendor_api_level < minimum_api_level_required:
+        logger.warning(
+            "This full OTA is configured to use VABC compression algorithm"
+            " {}, which is supported since"
+            " Android API level {}, but device is "
+            "launched with {} . If this full OTA is"
+            " served to a device running old build, OTA might fail due to "
+            "unsupported compression parameter. For safety, gz is used because "
+            "it's supported since day 1.".format(
+                target_info.vabc_compression_param,
+                minimum_api_level_required,
+                target_info.vendor_api_level))
+        OPTIONS.vabc_compression_param = "gz"
 
   if OPTIONS.partial == []:
     logger.info(
@@ -994,6 +1023,9 @@
 
   additional_args += ["--enable_zucchini=" +
                       str(OPTIONS.enable_zucchini).lower()]
+  if OPTIONS.enable_puffdiff is not None:
+    additional_args += ["--enable_puffdiff=" +
+                        str(OPTIONS.enable_puffdiff).lower()]
 
   if not ota_utils.IsLz4diffCompatible(source_file, target_file):
     logger.warning(
@@ -1193,11 +1225,20 @@
     elif o == "--enable_zucchini":
       assert a.lower() in ["true", "false"]
       OPTIONS.enable_zucchini = a.lower() != "false"
+    elif o == "--enable_puffdiff":
+      assert a.lower() in ["true", "false"]
+      OPTIONS.enable_puffdiff = a.lower() != "false"
     elif o == "--enable_lz4diff":
       assert a.lower() in ["true", "false"]
       OPTIONS.enable_lz4diff = a.lower() != "false"
     elif o == "--vabc_compression_param":
+      words = a.split(",")
+      assert len(words) >= 1 and len(words) <= 2
       OPTIONS.vabc_compression_param = a.lower()
+      if len(words) == 2:
+        if not words[1].isdigit():
+          raise ValueError("Cannot parse value %r for option $COMPRESSION_LEVEL - only "
+                         "integers are allowed." % words[1])
     elif o == "--security_patch_level":
       OPTIONS.security_patch_level = a
     elif o in ("--max_threads"):
@@ -1254,6 +1295,7 @@
                                  "force_minor_version=",
                                  "compressor_types=",
                                  "enable_zucchini=",
+                                 "enable_puffdiff=",
                                  "enable_lz4diff=",
                                  "vabc_compression_param=",
                                  "security_patch_level=",
diff --git a/tools/releasetools/ota_signing_utils.py b/tools/releasetools/ota_signing_utils.py
new file mode 100644
index 0000000..60c8c94
--- /dev/null
+++ b/tools/releasetools/ota_signing_utils.py
@@ -0,0 +1,38 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import shlex
+
+
+def ParseSignerArgs(args):
+  if args is None:
+    return None
+  return shlex.split(args)
+
+
+def AddSigningArgumentParse(parser: argparse.ArgumentParser):
+  parser.add_argument('--package_key', type=str,
+                      help='Paths to private key for signing payload')
+  parser.add_argument('--search_path', '--path', type=str,
+                      help='Search path for framework/signapk.jar')
+  parser.add_argument('--payload_signer', type=str,
+                      help='Path to custom payload signer')
+  parser.add_argument('--payload_signer_args', type=ParseSignerArgs,
+                      help='Arguments for payload signer if necessary')
+  parser.add_argument('--payload_signer_maximum_signature_size', type=str,
+                      help='Maximum signature size (in bytes) that would be '
+                      'generated by the given payload signer')
+  parser.add_argument('--private_key_suffix', type=str,
+                      help='Suffix to be appended to package_key path', default=".pk8")
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 68c6887..5c70223 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -51,6 +51,19 @@
 TARGET_FILES_IMAGES_SUBDIR = ["IMAGES", "PREBUILT_IMAGES", "RADIO"]
 
 
+# Key is the compression algorithm, value is minimum API level required to
+# use this compression algorithm for VABC OTA on device.
+VABC_COMPRESSION_PARAM_SUPPORT = {
+    "gz": 31,
+    "brotli": 31,
+    "none": 31,
+    # lz4 support is added in Android U
+    "lz4": 34,
+    # zstd support is added in Android V
+    "zstd": 35,
+}
+
+
 def FinalizeMetadata(metadata, input_file, output_file, needed_property_files=None, package_key=None, pw=None):
   """Finalizes the metadata and signs an A/B OTA package.
 
@@ -727,6 +740,7 @@
     logger.info("target files %s is already extracted", path)
     return path
   extracted_dir = common.MakeTempDir("target_files")
+  logger.info(f"Extracting target files {path} to {extracted_dir}")
   common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN + [""])
   for subdir in TARGET_FILES_IMAGES_SUBDIR:
     image_dir = os.path.join(extracted_dir, subdir)
@@ -741,12 +755,10 @@
 
 
 def LocatePartitionPath(target_files_dir: str, partition: str, allow_empty):
-  path = os.path.join(target_files_dir, "RADIO", partition + ".img")
-  if os.path.exists(path):
-    return path
-  path = os.path.join(target_files_dir, "IMAGES", partition + ".img")
-  if os.path.exists(path):
-    return path
+  for subdir in TARGET_FILES_IMAGES_SUBDIR:
+    path = os.path.join(target_files_dir, subdir, partition + ".img")
+    if os.path.exists(path):
+      return path
   if allow_empty:
     return ""
   raise common.ExternalError(
@@ -759,12 +771,10 @@
 
 
 def LocatePartitionMap(target_files_dir: str, partition: str):
-  path = os.path.join(target_files_dir, "RADIO", partition + ".map")
-  if os.path.exists(path):
-    return path
-  path = os.path.join(target_files_dir, "IMAGES", partition + ".map")
-  if os.path.exists(path):
-    return path
+  for subdir in TARGET_FILES_IMAGES_SUBDIR:
+    path = os.path.join(target_files_dir, subdir, partition + ".map")
+    if os.path.exists(path):
+      return path
   return ""
 
 
@@ -850,7 +860,7 @@
       cmd.extend(["--dynamic_partition_info_file", dynamic_partition_info])
 
     apex_info = os.path.join(
-      target_dir, "META", "apex_info.pb")
+        target_dir, "META", "apex_info.pb")
     if os.path.exists(apex_info):
       cmd.extend(["--apex_info_file", apex_info])
 
@@ -883,30 +893,7 @@
     """
     assert isinstance(payload_signer, PayloadSigner)
 
-    # 1. Generate hashes of the payload and metadata files.
-    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-    cmd = ["brillo_update_payload", "hash",
-           "--unsigned_payload", self.payload_file,
-           "--signature_size", str(payload_signer.maximum_signature_size),
-           "--metadata_hash_file", metadata_sig_file,
-           "--payload_hash_file", payload_sig_file]
-    self._Run(cmd)
-
-    # 2. Sign the hashes.
-    signed_payload_sig_file = payload_signer.SignHashFile(payload_sig_file)
-    signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
-
-    # 3. Insert the signatures back into the payload file.
-    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
-                                              suffix=".bin")
-    cmd = ["brillo_update_payload", "sign",
-           "--unsigned_payload", self.payload_file,
-           "--payload", signed_payload_file,
-           "--signature_size", str(payload_signer.maximum_signature_size),
-           "--metadata_signature_file", signed_metadata_sig_file,
-           "--payload_signature_file", signed_payload_sig_file]
-    self._Run(cmd)
+    signed_payload_file = payload_signer.SignPayload(self.payload_file)
 
     self.payload_file = signed_payload_file
 
@@ -920,9 +907,9 @@
     # 4. Dump the signed payload properties.
     properties_file = common.MakeTempFile(prefix="payload-properties-",
                                           suffix=".txt")
-    cmd = ["brillo_update_payload", "properties",
-           "--payload", self.payload_file,
-           "--properties_file", properties_file]
+    cmd = ["delta_generator",
+           "--in_file=" + self.payload_file,
+           "--properties_file=" + properties_file]
     self._Run(cmd)
 
     if self.secondary:
@@ -1070,7 +1057,7 @@
     if common.IsSparseImage(src):
       return common.UnsparseImage(src, dst)
     else:
-      return os.link(src, dst)
+      return os.symlink(os.path.realpath(src), dst)
 
   for subdir in TARGET_FILES_IMAGES_SUBDIR:
     if not os.path.exists(os.path.join(input_dir, subdir)):
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
index 9933aef..bbd2896 100644
--- a/tools/releasetools/payload_signer.py
+++ b/tools/releasetools/payload_signer.py
@@ -95,11 +95,11 @@
     # 1. Generate hashes of the payload and metadata files.
     payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
     metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-    cmd = ["brillo_update_payload", "hash",
-           "--unsigned_payload", unsigned_payload,
-           "--signature_size", str(self.maximum_signature_size),
-           "--metadata_hash_file", metadata_sig_file,
-           "--payload_hash_file", payload_sig_file]
+    cmd = ["delta_generator",
+           "--in_file=" + unsigned_payload,
+           "--signature_size=" + str(self.maximum_signature_size),
+           "--out_metadata_hash_file=" + metadata_sig_file,
+           "--out_hash_file=" + payload_sig_file]
     self._Run(cmd)
 
     # 2. Sign the hashes.
@@ -109,16 +109,15 @@
     # 3. Insert the signatures back into the payload file.
     signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
                                               suffix=".bin")
-    cmd = ["brillo_update_payload", "sign",
-           "--unsigned_payload", unsigned_payload,
-           "--payload", signed_payload_file,
-           "--signature_size", str(self.maximum_signature_size),
-           "--metadata_signature_file", signed_metadata_sig_file,
-           "--payload_signature_file", signed_payload_sig_file]
+    cmd = ["delta_generator",
+           "--in_file=" + unsigned_payload,
+           "--out_file=" + signed_payload_file,
+           "--signature_size=" + str(self.maximum_signature_size),
+           "--metadata_signature_file=" + signed_metadata_sig_file,
+           "--payload_signature_file=" + signed_payload_sig_file]
     self._Run(cmd)
     return signed_payload_file
 
-
   def SignHashFile(self, in_file):
     """Signs the given input file. Returns the output filename."""
     out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 2b65e47..2b45825 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -1236,8 +1236,9 @@
 
   vendor_misc_info["has_dtbo"] = "false"  # dtbo
   vendor_misc_info["has_pvmfw"] = "false"  # pvmfw
-  vendor_misc_info["avb_custom_images_partition_list"] = ""  # custom images
+  vendor_misc_info["avb_custom_images_partition_list"] = ""  # avb custom images
   vendor_misc_info["avb_building_vbmeta_image"] = "false" # skip building vbmeta
+  vendor_misc_info["custom_images_partition_list"] = ""  # custom images
   vendor_misc_info["use_dynamic_partitions"] = "false"  # super_empty
   vendor_misc_info["build_super_partition"] = "false"  # super split
   vendor_misc_info["avb_vbmeta_system"] = ""  # skip building vbmeta_system
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 86fb480..c69a13d 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -15,14 +15,13 @@
 #
 
 import copy
-import json
 import os
 import subprocess
 import tempfile
-import time
 import unittest
 import zipfile
 from hashlib import sha1
+from typing import BinaryIO
 
 import common
 import test_utils
@@ -36,14 +35,24 @@
 GiB = 1024 * MiB
 
 
-def get_2gb_string():
+def get_2gb_file():
   size = int(2 * GiB + 1)
   block_size = 4 * KiB
   step_size = 4 * MiB
-  # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
+  tmpfile = tempfile.NamedTemporaryFile()
+  tmpfile.truncate(size)
   for _ in range(0, size, step_size):
-    yield os.urandom(block_size)
-    yield b'\0' * (step_size - block_size)
+    tmpfile.write(os.urandom(block_size))
+    tmpfile.seek(step_size - block_size, os.SEEK_CUR)
+  return tmpfile
+
+
+def hash_file(filename):
+  sha1_hash = sha1()
+  with open(filename, "rb") as fp:
+    for data in iter(lambda: fp.read(4*MiB), b''):
+      sha1_hash.update(data)
+  return sha1_hash
 
 
 class BuildInfoTest(test_utils.ReleaseToolsTestCase):
@@ -222,17 +231,17 @@
     info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
     build_info = common.BuildInfo(info_dict)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/build-id/'
-      'version-incremental:build-type/build-tags', build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/build-id/'
+        'version-incremental:build-type/build-tags', build_info.fingerprint)
 
     build_props = info_dict['build.prop'].build_props
     del build_props['ro.build.id']
     build_props['ro.build.legacy.id'] = 'legacy-build-id'
     build_info = common.BuildInfo(info_dict, use_legacy_id=True)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/'
-      'legacy-build-id/version-incremental:build-type/build-tags',
-      build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/'
+        'legacy-build-id/version-incremental:build-type/build-tags',
+        build_info.fingerprint)
 
     self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
                       False)
@@ -241,9 +250,9 @@
     info_dict['vbmeta_digest'] = 'abcde12345'
     build_info = common.BuildInfo(info_dict, use_legacy_id=False)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/'
-      'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
-      build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/'
+        'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+        build_info.fingerprint)
 
   def test___getitem__(self):
     target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +385,7 @@
     info_dict['build.prop'].build_props[
         'ro.product.property_source_order'] = 'bad-source'
     with self.assertRaisesRegexp(common.ExternalError,
-        'Invalid ro.product.property_source_order'):
+                                 'Invalid ro.product.property_source_order'):
       info = common.BuildInfo(info_dict, None)
       info.GetBuildProp('ro.product.device')
 
@@ -429,6 +438,13 @@
     self.assertIsNone(zip_file.testzip())
 
   def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
+    with tempfile.NamedTemporaryFile() as test_file:
+      test_file_name = test_file.name
+      for data in contents:
+        test_file.write(bytes(data))
+      return self._test_ZipWriteFile(test_file_name, extra_zipwrite_args)
+
+  def _test_ZipWriteFile(self, test_file_name, extra_zipwrite_args=None):
     extra_zipwrite_args = dict(extra_zipwrite_args or {})
 
     test_file = tempfile.NamedTemporaryFile(delete=False)
@@ -441,17 +457,12 @@
     arcname = extra_zipwrite_args.get("arcname", test_file_name)
     if arcname[0] == "/":
       arcname = arcname[1:]
+    sha1_hash = hash_file(test_file_name)
 
     zip_file.close()
     zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
-      sha1_hash = sha1()
-      for data in contents:
-        sha1_hash.update(bytes(data))
-        test_file.write(bytes(data))
-      test_file.close()
-
       expected_mode = extra_zipwrite_args.get("perms", 0o644)
       expected_compress_type = extra_zipwrite_args.get("compress_type",
                                                        zipfile.ZIP_STORED)
@@ -467,7 +478,6 @@
                    test_file_name, expected_stat, expected_mode,
                    expected_compress_type)
     finally:
-      os.remove(test_file_name)
       os.remove(zip_file_name)
 
   def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
@@ -502,14 +512,13 @@
     finally:
       os.remove(zip_file_name)
 
-  def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
+  def _test_ZipWriteStr_large_file(self, large_file: BinaryIO, small, extra_args=None):
     extra_args = dict(extra_args or {})
 
     zip_file = tempfile.NamedTemporaryFile(delete=False)
     zip_file_name = zip_file.name
 
-    test_file = tempfile.NamedTemporaryFile(delete=False)
-    test_file_name = test_file.name
+    test_file_name = large_file.name
 
     arcname_large = test_file_name
     arcname_small = "bar"
@@ -522,11 +531,7 @@
     zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
 
     try:
-      sha1_hash = sha1()
-      for data in large:
-        sha1_hash.update(data)
-        test_file.write(data)
-      test_file.close()
+      sha1_hash = hash_file(test_file_name)
 
       # Arbitrary timestamp, just to make sure common.ZipWrite() restores
       # the timestamp after writing.
@@ -551,7 +556,6 @@
                    expected_compress_type=expected_compress_type)
     finally:
       os.remove(zip_file_name)
-      os.remove(test_file_name)
 
   def _test_reset_ZIP64_LIMIT(self, func, *args):
     default_limit = (1 << 31) - 1
@@ -577,10 +581,10 @@
     })
 
   def test_ZipWrite_large_file(self):
-    file_contents = get_2gb_string()
-    self._test_ZipWrite(file_contents, {
-        "compress_type": zipfile.ZIP_DEFLATED,
-    })
+    with get_2gb_file() as tmpfile:
+      self._test_ZipWriteFile(tmpfile.name, {
+          "compress_type": zipfile.ZIP_DEFLATED,
+      })
 
   def test_ZipWrite_resets_ZIP64_LIMIT(self):
     self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
@@ -627,11 +631,11 @@
     # zipfile.writestr() doesn't work when the str size is over 2GiB even with
     # the workaround. We will only test the case of writing a string into a
     # large archive.
-    long_string = get_2gb_string()
     short_string = os.urandom(1024)
-    self._test_ZipWriteStr_large_file(long_string, short_string, {
-        "compress_type": zipfile.ZIP_DEFLATED,
-    })
+    with get_2gb_file() as large_file:
+      self._test_ZipWriteStr_large_file(large_file, short_string, {
+          "compress_type": zipfile.ZIP_DEFLATED,
+      })
 
   def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
     self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
@@ -821,9 +825,9 @@
   )
 
   APKCERTS_CERTMAP1 = {
-      'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/make/target/product/security/platform',
-      'TV.apk' : 'PRESIGNED',
+      'RecoveryLocalizer.apk': 'certs/devkey',
+      'Settings.apk': 'build/make/target/product/security/platform',
+      'TV.apk': 'PRESIGNED',
   }
 
   APKCERTS_TXT2 = (
@@ -838,10 +842,10 @@
   )
 
   APKCERTS_CERTMAP2 = {
-      'Compressed1.apk' : 'certs/compressed1',
-      'Compressed2a.apk' : 'certs/compressed2',
-      'Compressed2b.apk' : 'certs/compressed2',
-      'Compressed3.apk' : 'certs/compressed3',
+      'Compressed1.apk': 'certs/compressed1',
+      'Compressed2a.apk': 'certs/compressed2',
+      'Compressed2b.apk': 'certs/compressed2',
+      'Compressed3.apk': 'certs/compressed3',
   }
 
   APKCERTS_TXT3 = (
@@ -850,7 +854,7 @@
   )
 
   APKCERTS_CERTMAP3 = {
-      'Compressed4.apk' : 'certs/compressed4',
+      'Compressed4.apk': 'certs/compressed4',
   }
 
   # Test parsing with no optional fields, both optional fields, and only the
@@ -867,9 +871,9 @@
   )
 
   APKCERTS_CERTMAP4 = {
-      'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/make/target/product/security/platform',
-      'TV.apk' : 'PRESIGNED',
+      'RecoveryLocalizer.apk': 'certs/devkey',
+      'Settings.apk': 'build/make/target/product/security/platform',
+      'TV.apk': 'PRESIGNED',
   }
 
   def setUp(self):
@@ -973,7 +977,7 @@
     extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
     extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
     with open(extracted_from_privkey, 'rb') as privkey_fp, \
-        open(extracted_from_pubkey, 'rb') as pubkey_fp:
+            open(extracted_from_pubkey, 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
@@ -1237,7 +1241,8 @@
     self.assertEqual(
         '1-5 9-10',
         sparse_image.file_map['//system/file1'].extra['text_str'])
-    self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
+    self.assertTrue(
+        sparse_image.file_map['//system/file2'].extra['incomplete'])
     self.assertTrue(
         sparse_image.file_map['/system/app/file3'].extra['incomplete'])
 
@@ -1345,7 +1350,7 @@
       'recovery_api_version': 3,
       'fstab_version': 2,
       'system_root_image': 'true',
-      'no_recovery' : 'true',
+      'no_recovery': 'true',
       'recovery_as_boot': 'true',
   }
 
@@ -1667,6 +1672,7 @@
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
                       test_file.name, 'generic_kernel')
 
+
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
 
@@ -1676,7 +1682,7 @@
   def setUp(self):
     self._tempdir = common.MakeTempDir()
     # Create a fake dict that contains the fstab info for boot&recovery.
-    self._info = {"fstab" : {}}
+    self._info = {"fstab": {}}
     fake_fstab = [
         "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2023,11 +2029,11 @@
           input_zip, 'odm', placeholder_values)
 
     self.assertEqual({
-      'ro.odm.build.date.utc': '1578430045',
-      'ro.odm.build.fingerprint':
-      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-      'ro.product.odm.device': 'coral',
-      'ro.product.odm.name': 'product1',
+        'ro.odm.build.date.utc': '1578430045',
+        'ro.odm.build.fingerprint':
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device': 'coral',
+        'ro.product.odm.name': 'product1',
     }, partition_props.build_props)
 
     with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2210,8 +2216,8 @@
 
     copied_props = copy.deepcopy(partition_props)
     self.assertEqual({
-      'ro.odm.build.date.utc': '1578430045',
-      'ro.odm.build.fingerprint':
-      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-      'ro.product.odm.device': 'coral',
+        'ro.odm.build.date.utc': '1578430045',
+        'ro.odm.build.fingerprint':
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device': 'coral',
     }, copied_props.build_props)
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
index 4837dde..519251e 100644
--- a/tools/sbom/Android.bp
+++ b/tools/sbom/Android.bp
@@ -53,5 +53,27 @@
     libs: [
         "sbom_lib",
     ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    test_suites: ["general-tests"],
+}
+
+python_test_host {
+    name: "sbom_data_test",
+    main: "sbom_data_test.py",
+    srcs: [
+        "sbom_data_test.py",
+    ],
+    libs: [
+        "sbom_lib",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
     test_suites: ["general-tests"],
 }
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
index ea38e36..71f8660 100644
--- a/tools/sbom/sbom_data.py
+++ b/tools/sbom/sbom_data.py
@@ -133,7 +133,7 @@
       checksums = []
       for file in self.files:
         if file.id in package.file_ids:
-          checksums.append(file.checksum)
+          checksums.append(file.checksum.split(': ')[1])
       checksums.sort()
       h = hashlib.sha1()
       h.update(''.join(checksums).encode(encoding='utf-8'))
diff --git a/tools/sbom/sbom_data_test.py b/tools/sbom/sbom_data_test.py
new file mode 100644
index 0000000..69bc9d2
--- /dev/null
+++ b/tools/sbom/sbom_data_test.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hashlib
+import unittest
+import sbom_data
+
+BUILD_FINGER_PRINT = 'build_finger_print'
+SUPPLIER_GOOGLE = 'Organization: Google'
+SUPPLIER_UPSTREAM = 'Organization: upstream'
+
+SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
+SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
+
+SPDXID_FILE1 = 'SPDXRef-file1'
+SPDXID_FILE2 = 'SPDXRef-file2'
+SPDXID_FILE3 = 'SPDXRef-file3'
+SPDXID_FILE4 = 'SPDXRef-file4'
+
+
+class SBOMDataTest(unittest.TestCase):
+
+  def setUp(self):
+    # SBOM of a product
+    self.sbom_doc = sbom_data.Document(name='test doc',
+                                       namespace='http://www.google.com/sbom/spdx/android',
+                                       creators=[SUPPLIER_GOOGLE],
+                                       created='2023-03-31T22:17:58Z',
+                                       describes=sbom_data.SPDXID_PRODUCT)
+    self.sbom_doc.add_external_ref(
+        sbom_data.DocumentExternalReference(id='DocumentRef-external_doc_ref',
+                                            uri='external_doc_uri',
+                                            checksum='SHA1: 1234567890'))
+    self.sbom_doc.add_package(
+        sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                          name=sbom_data.PACKAGE_NAME_PRODUCT,
+                          download_location=sbom_data.VALUE_NONE,
+                          supplier=SUPPLIER_GOOGLE,
+                          version=BUILD_FINGER_PRINT,
+                          files_analyzed=True,
+                          verification_code='',
+                          file_ids=[SPDXID_FILE1, SPDXID_FILE2, SPDXID_FILE3, SPDXID_FILE4]))
+
+    self.sbom_doc.add_package(
+        sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                          name=sbom_data.PACKAGE_NAME_PLATFORM,
+                          download_location=sbom_data.VALUE_NONE,
+                          supplier=SUPPLIER_GOOGLE,
+                          version=BUILD_FINGER_PRINT,
+                          ))
+
+    self.sbom_doc.add_package(
+        sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE1,
+                          name='Prebuilt package1',
+                          download_location=sbom_data.VALUE_NONE,
+                          supplier=SUPPLIER_GOOGLE,
+                          version=BUILD_FINGER_PRINT,
+                          ))
+
+    self.sbom_doc.add_package(
+        sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                          name='Source package1',
+                          download_location=sbom_data.VALUE_NONE,
+                          supplier=SUPPLIER_GOOGLE,
+                          version=BUILD_FINGER_PRINT,
+                          external_refs=[sbom_data.PackageExternalRef(
+                              category=sbom_data.PackageExternalRefCategory.SECURITY,
+                              type=sbom_data.PackageExternalRefType.cpe22Type,
+                              locator='cpe:/a:jsoncpp_project:jsoncpp:1.9.4')]
+                          ))
+
+    self.sbom_doc.add_package(
+        sbom_data.Package(id=SPDXID_UPSTREAM_PACKAGE1,
+                          name='Upstream package1',
+                          supplier=SUPPLIER_UPSTREAM,
+                          version='1.1',
+                          ))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
+                                                          relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                          id2=SPDXID_UPSTREAM_PACKAGE1))
+
+    self.sbom_doc.files.append(
+        sbom_data.File(id=SPDXID_FILE1, name='/bin/file1',
+                       checksum='SHA1: 356a192b7913b04c54574d18c28d46e6395428ab'))  # sha1 hash of 1
+    self.sbom_doc.files.append(
+        sbom_data.File(id=SPDXID_FILE2, name='/bin/file2',
+                       checksum='SHA1: da4b9237bacccdf19c0760cab7aec4a8359010b0'))  # sha1 hash of 2
+    self.sbom_doc.files.append(
+        sbom_data.File(id=SPDXID_FILE3, name='/bin/file3',
+                       checksum='SHA1: 77de68daecd823babbb58edb1c8e14d7106e83bb'))  # sha1 hash of 3
+    self.sbom_doc.files.append(
+        sbom_data.File(id=SPDXID_FILE4, name='file4.a',
+                       checksum='SHA1: 1b6453892473a467d07372d45eb05abc2031647a'))  # sha1 of 4
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=sbom_data.SPDXID_PLATFORM))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE2,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_PREBUILT_PACKAGE1))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE3,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_SOURCE_PACKAGE1
+                                                          ))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                          id2=SPDXID_FILE4
+                                                          ))
+
+  def test_package_verification_code(self):
+    checksums = []
+    for file in self.sbom_doc.files:
+      checksums.append(file.checksum.split(': ')[1])
+      checksums.sort()
+    h = hashlib.sha1()
+    h.update(''.join(checksums).encode(encoding='utf-8'))
+    expected_package_verification_code = h.hexdigest()
+
+    self.sbom_doc.generate_packages_verification_code()
+    self.assertEqual(expected_package_verification_code, self.sbom_doc.packages[0].verification_code)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/tools/zipalign/Android.bp b/tools/zipalign/Android.bp
index 0e1d58e..8be7e25 100644
--- a/tools/zipalign/Android.bp
+++ b/tools/zipalign/Android.bp
@@ -70,6 +70,7 @@
         "libgmock",
     ],
     data: [
+         "tests/data/apkWithUncompressedSharedLibs.zip",
          "tests/data/archiveWithOneDirectoryEntry.zip",
          "tests/data/diffOrders.zip",
          "tests/data/holes.zip",
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index 23840e3..f32f90b 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -17,6 +17,7 @@
 #include "ZipFile.h"
 
 #include <stdio.h>
+#include <string.h>
 #include <stdlib.h>
 #include <unistd.h>
 
@@ -36,17 +37,14 @@
 }
 
 static int getAlignment(bool pageAlignSharedLibs, int defaultAlignment,
-    ZipEntry* pEntry) {
-
-    static const int kPageAlignment = 4096;
-
+    ZipEntry* pEntry, int pageSize) {
     if (!pageAlignSharedLibs) {
         return defaultAlignment;
     }
 
     const char* ext = strrchr(pEntry->getFileName(), '.');
     if (ext && strcmp(ext, ".so") == 0) {
-        return kPageAlignment;
+        return pageSize;
     }
 
     return defaultAlignment;
@@ -56,7 +54,7 @@
  * Copy all entries from "pZin" to "pZout", aligning as needed.
  */
 static int copyAndAlign(ZipFile* pZin, ZipFile* pZout, int alignment, bool zopfli,
-    bool pageAlignSharedLibs)
+    bool pageAlignSharedLibs, int pageSize)
 {
     int numEntries = pZin->getNumEntries();
     ZipEntry* pEntry;
@@ -84,7 +82,8 @@
                 status = pZout->add(pZin, pEntry, padding, &pNewEntry);
             }
         } else {
-            const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry);
+            const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry,
+                                             pageSize);
 
             //printf("--- %s: orig at %ld(+%d) len=%ld, adding pad=%d\n",
             //    pEntry->getFileName(), (long) pEntry->getFileOffset(),
@@ -107,7 +106,7 @@
  * output file exists and "force" wasn't specified.
  */
 int process(const char* inFileName, const char* outFileName,
-    int alignment, bool force, bool zopfli, bool pageAlignSharedLibs)
+    int alignment, bool force, bool zopfli, bool pageAlignSharedLibs, int pageSize)
 {
     ZipFile zin, zout;
 
@@ -127,7 +126,7 @@
     }
 
     if (zin.open(inFileName, ZipFile::kOpenReadOnly) != OK) {
-        fprintf(stderr, "Unable to open '%s' as zip archive\n", inFileName);
+        fprintf(stderr, "Unable to open '%s' as zip archive: %s\n", inFileName, strerror(errno));
         return 1;
     }
     if (zout.open(outFileName,
@@ -138,7 +137,8 @@
         return 1;
     }
 
-    int result = copyAndAlign(&zin, &zout, alignment, zopfli, pageAlignSharedLibs);
+    int result = copyAndAlign(&zin, &zout, alignment, zopfli, pageAlignSharedLibs,
+                              pageSize);
     if (result != 0) {
         printf("zipalign: failed rewriting '%s' to '%s'\n",
             inFileName, outFileName);
@@ -150,7 +150,7 @@
  * Verify the alignment of a zip archive.
  */
 int verify(const char* fileName, int alignment, bool verbose,
-    bool pageAlignSharedLibs)
+    bool pageAlignSharedLibs, int pageSize)
 {
     ZipFile zipFile;
     bool foundBad = false;
@@ -181,7 +181,8 @@
             continue;
        } else {
             off_t offset = pEntry->getFileOffset();
-            const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry);
+            const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry,
+                                             pageSize);
             if ((offset % alignTo) != 0) {
                 if (verbose) {
                     printf("%8jd %s (BAD - %jd)\n",
diff --git a/tools/zipalign/ZipAlignMain.cpp b/tools/zipalign/ZipAlignMain.cpp
index 53fc8d4..2f24403 100644
--- a/tools/zipalign/ZipAlignMain.cpp
+++ b/tools/zipalign/ZipAlignMain.cpp
@@ -34,15 +34,18 @@
     fprintf(stderr, "Zip alignment utility\n");
     fprintf(stderr, "Copyright (C) 2009 The Android Open Source Project\n\n");
     fprintf(stderr,
-        "Usage: zipalign [-f] [-p] [-v] [-z] <align> infile.zip outfile.zip\n"
-        "       zipalign -c [-p] [-v] <align> infile.zip\n\n" );
+        "Usage: zipalign [-f] [-p] [-P <pagesize_kb>] [-v] [-z] <align> infile.zip outfile.zip\n"
+        "       zipalign -c [-p] [-P <pagesize_kb>] [-v] <align> infile.zip\n\n" );
     fprintf(stderr,
         "  <align>: alignment in bytes, e.g. '4' provides 32-bit alignment\n");
     fprintf(stderr, "  -c: check alignment only (does not modify file)\n");
     fprintf(stderr, "  -f: overwrite existing outfile.zip\n");
-    fprintf(stderr, "  -p: page-align uncompressed .so files\n");
+    fprintf(stderr, "  -p: 4kb page-align uncompressed .so files\n");
     fprintf(stderr, "  -v: verbose output\n");
     fprintf(stderr, "  -z: recompress using Zopfli\n");
+    fprintf(stderr, "  -P <pagesize_kb>: Align uncompressed .so files to the specified\n");
+    fprintf(stderr, "                    page size. Valid values for <pagesize_kb> are 4, 16\n");
+    fprintf(stderr, "                    and 64. '-P' cannot be used in combination with '-p'.\n");
 }
 
 
@@ -57,12 +60,16 @@
     bool verbose = false;
     bool zopfli = false;
     bool pageAlignSharedLibs = false;
+    int pageSize = 4096;
+    bool legacyPageAlignmentFlag = false;   // -p
+    bool pageAlignmentFlag = false;         // -P <pagesize_kb>
     int result = 1;
     int alignment;
     char* endp;
 
     int opt;
-    while ((opt = getopt(argc, argv, "fcpvz")) != -1) {
+
+    while ((opt = getopt(argc, argv, "fcpvzP:")) != -1) {
         switch (opt) {
         case 'c':
             check = true;
@@ -77,7 +84,29 @@
             zopfli = true;
             break;
         case 'p':
+            legacyPageAlignmentFlag = true;
             pageAlignSharedLibs = true;
+            pageSize = 4096;
+            break;
+        case 'P':
+            pageAlignmentFlag = true;
+            pageAlignSharedLibs = true;
+
+            if (!optarg) {
+                fprintf(stderr, "ERROR: -P requires an argument\n");
+                wantUsage = true;
+                goto bail;
+            }
+
+            pageSize = atoi(optarg);
+            if (pageSize != 4 && pageSize != 16 && pageSize != 64) {
+                fprintf(stderr, "ERROR: Invalid argument for -P: %s\n", optarg);
+                wantUsage = true;
+                goto bail;
+            }
+
+            pageSize *= 1024;  // Convert from kB to bytes.
+
             break;
         default:
             fprintf(stderr, "ERROR: unknown flag -%c\n", opt);
@@ -86,6 +115,13 @@
         }
     }
 
+    if (legacyPageAlignmentFlag && pageAlignmentFlag) {
+            fprintf(stderr, "ERROR: Invalid options: '-P <pagesize_kb>' and '-p'"
+                            "cannot be used in combination.\n");
+            wantUsage = true;
+            goto bail;
+    }
+
     if (!((check && (argc - optind) == 2) || (!check && (argc - optind) == 3))) {
         wantUsage = true;
         goto bail;
@@ -100,14 +136,15 @@
 
     if (check) {
         /* check existing archive for correct alignment */
-        result = verify(argv[optind + 1], alignment, verbose, pageAlignSharedLibs);
+        result = verify(argv[optind + 1], alignment, verbose, pageAlignSharedLibs, pageSize);
     } else {
         /* create the new archive */
-        result = process(argv[optind + 1], argv[optind + 2], alignment, force, zopfli, pageAlignSharedLibs);
+        result = process(argv[optind + 1], argv[optind + 2], alignment, force, zopfli,
+                         pageAlignSharedLibs, pageSize);
 
         /* trust, but verify */
         if (result == 0) {
-            result = verify(argv[optind + 2], alignment, verbose, pageAlignSharedLibs);
+            result = verify(argv[optind + 2], alignment, verbose, pageAlignSharedLibs, pageSize);
         }
     }
 
diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp
index 689999e..0355e56 100644
--- a/tools/zipalign/ZipEntry.cpp
+++ b/tools/zipalign/ZipEntry.cpp
@@ -18,6 +18,8 @@
 // Access to entries in a Zip archive.
 //
 
+#define _POSIX_THREAD_SAFE_FUNCTIONS  // For mingw localtime_r().
+
 #define LOG_TAG "zip"
 
 #include "ZipEntry.h"
@@ -354,31 +356,29 @@
  */
 void ZipEntry::setModWhen(time_t when)
 {
-#if !defined(_WIN32)
-    struct tm tmResult;
-#endif
-    time_t even;
-    uint16_t zdate, ztime;
-
-    struct tm* ptm;
-
     /* round up to an even number of seconds */
-    even = (when & 1) ? (when + 1) : when;
+    time_t even = (when & 1) ? (when + 1) : when;
 
     /* expand */
-#if !defined(_WIN32)
-    ptm = localtime_r(&even, &tmResult);
-#else
-    ptm = localtime(&even);
-#endif
+    struct tm tmResult;
+    struct tm* ptm = localtime_r(&even, &tmResult);
 
-    int year;
-    year = ptm->tm_year;
-    if (year < 80)
-        year = 80;
+    // The earliest valid time for ZIP file entries is 1980-01-01. See:
+    // https://users.cs.jmu.edu/buchhofp/forensics/formats/pkzip.html.
+    // Set any time before 1980 to 1980-01-01.
+    if (ptm->tm_year < 80) {
+        ptm->tm_year = 80;
+        ptm->tm_mon = 0;
+        ptm->tm_mday = 1;
+        ptm->tm_hour = 0;
+        ptm->tm_min = 0;
+        ptm->tm_sec = 0;
+    }
 
-    zdate = (year - 80) << 9 | (ptm->tm_mon+1) << 5 | ptm->tm_mday;
-    ztime = ptm->tm_hour << 11 | ptm->tm_min << 5 | ptm->tm_sec >> 1;
+    uint16_t zdate = static_cast<uint16_t>(
+        (ptm->tm_year - 80) << 9 | (ptm->tm_mon + 1) << 5 | ptm->tm_mday);
+    uint16_t ztime = static_cast<uint16_t>(
+        ptm->tm_hour << 11 | ptm->tm_min << 5 | ptm->tm_sec >> 1);
 
     mCDE.mLastModFileTime = mLFH.mLastModFileTime = ztime;
     mCDE.mLastModFileDate = mLFH.mLastModFileDate = zdate;
diff --git a/tools/zipalign/include/ZipAlign.h b/tools/zipalign/include/ZipAlign.h
index ab36086..85dda14 100644
--- a/tools/zipalign/include/ZipAlign.h
+++ b/tools/zipalign/include/ZipAlign.h
@@ -25,24 +25,28 @@
  * - force  : Overwrite output if it exists, fail otherwise.
  * - zopfli : Recompress compressed entries with more efficient algorithm.
  *            Copy compressed entries as-is, and unaligned, otherwise.
- * - pageAlignSharedLibs: Align .so files to 4096 and other files to
+ * - pageAlignSharedLibs: Align .so files to @pageSize and other files to
  *   alignTo, or all files to alignTo if false..
+ * - pageSize: Specifies the page size of the target device. This is used
+ *             to correctly page-align shared libraries.
  *
  * Returns 0 on success.
  */
 int process(const char* input, const char* output, int alignTo, bool force,
-    bool zopfli, bool pageAlignSharedLibs);
+    bool zopfli, bool pageAlignSharedLibs, int pageSize);
 
 /*
  * Verify the alignment of a zip archive.
  * - alignTo: Alignment (in bytes) for uncompressed entries.
- * - pageAlignSharedLibs: Align .so files to 4096 and other files to
+ * - pageAlignSharedLibs: Align .so files to @pageSize and other files to
  *   alignTo, or all files to alignTo if false..
+ * - pageSize: Specifies the page size of the target device. This is used
+ *             to correctly page-align shared libraries.
  *
  * Returns 0 on success.
  */
 int verify(const char* fileName, int alignTo, bool verbose,
-    bool pageAlignSharedLibs);
+    bool pageAlignSharedLibs, int pageSize);
 
 } // namespace android
 
diff --git a/tools/zipalign/tests/data/apkWithUncompressedSharedLibs.zip b/tools/zipalign/tests/data/apkWithUncompressedSharedLibs.zip
new file mode 100644
index 0000000..930e3b5
--- /dev/null
+++ b/tools/zipalign/tests/data/apkWithUncompressedSharedLibs.zip
Binary files differ
diff --git a/tools/zipalign/tests/src/align_test.cpp b/tools/zipalign/tests/src/align_test.cpp
index a8433fa..07ad7cc 100644
--- a/tools/zipalign/tests/src/align_test.cpp
+++ b/tools/zipalign/tests/src/align_test.cpp
@@ -48,11 +48,12 @@
 TEST(Align, Unaligned) {
   const std::string src = GetTestPath("unaligned.zip");
   const std::string dst = GetTempPath("unaligned_out.zip");
+  int pageSize = 4096;
 
-  int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, false, pageSize);
   ASSERT_EQ(0, processed);
 
-  int verified = verify(dst.c_str(), 4, true, false);
+  int verified = verify(dst.c_str(), 4, true, false, pageSize);
   ASSERT_EQ(0, verified);
 }
 
@@ -60,18 +61,19 @@
   const std::string src = GetTestPath("unaligned.zip");
   const std::string tmp = GetTempPath("da_aligned.zip");
   const std::string dst = GetTempPath("da_d_aligner.zip");
+  int pageSize = 4096;
 
-  int processed = process(src.c_str(), tmp.c_str(), 4, true, false, 4096);
+  int processed = process(src.c_str(), tmp.c_str(), 4, true, false, false, pageSize);
   ASSERT_EQ(0, processed);
 
-  int verified = verify(tmp.c_str(), 4, true, false);
+  int verified = verify(tmp.c_str(), 4, true, false, pageSize);
   ASSERT_EQ(0, verified);
 
   // Align the result of the previous run. Essentially double aligning.
-  processed = process(tmp.c_str(), dst.c_str(), 4, true, false, 4096);
+  processed = process(tmp.c_str(), dst.c_str(), 4, true, false, false, pageSize);
   ASSERT_EQ(0, processed);
 
-  verified = verify(dst.c_str(), 4, true, false);
+  verified = verify(dst.c_str(), 4, true, false, pageSize);
   ASSERT_EQ(0, verified);
 
   // Nothing should have changed between tmp and dst.
@@ -90,11 +92,12 @@
 TEST(Align, Holes) {
   const std::string src = GetTestPath("holes.zip");
   const std::string dst = GetTempPath("holes_out.zip");
+  int pageSize = 4096;
 
-  int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, true, pageSize);
   ASSERT_EQ(0, processed);
 
-  int verified = verify(dst.c_str(), 4, false, true);
+  int verified = verify(dst.c_str(), 4, false, true, pageSize);
   ASSERT_EQ(0, verified);
 }
 
@@ -102,28 +105,85 @@
 TEST(Align, DifferenteOrders) {
   const std::string src = GetTestPath("diffOrders.zip");
   const std::string dst = GetTempPath("diffOrders_out.zip");
+  int pageSize = 4096;
 
-  int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, true, pageSize);
   ASSERT_EQ(0, processed);
 
-  int verified = verify(dst.c_str(), 4, false, true);
+  int verified = verify(dst.c_str(), 4, false, true, pageSize);
   ASSERT_EQ(0, verified);
 }
 
 TEST(Align, DirectoryEntryDoNotRequireAlignment) {
   const std::string src = GetTestPath("archiveWithOneDirectoryEntry.zip");
-  int verified = verify(src.c_str(), 4, false, true);
+  int pageSize = 4096;
+  int verified = verify(src.c_str(), 4, false, true, pageSize);
   ASSERT_EQ(0, verified);
 }
 
 TEST(Align, DirectoryEntry) {
   const std::string src = GetTestPath("archiveWithOneDirectoryEntry.zip");
   const std::string dst = GetTempPath("archiveWithOneDirectoryEntry_out.zip");
+  int pageSize = 4096;
 
-  int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, true, pageSize);
   ASSERT_EQ(0, processed);
   ASSERT_EQ(true, sameContent(src, dst));
 
-  int verified = verify(dst.c_str(), 4, false, true);
+  int verified = verify(dst.c_str(), 4, false, true, pageSize);
+  ASSERT_EQ(0, verified);
+}
+
+class UncompressedSharedLibsTest : public ::testing::Test {
+  protected:
+    static void SetUpTestSuite() {
+      src = GetTestPath("apkWithUncompressedSharedLibs.zip");
+      dst = GetTempPath("apkWithUncompressedSharedLibs_out.zip");
+    }
+
+    static std::string src;
+    static std::string dst;
+};
+
+std::string UncompressedSharedLibsTest::src;
+std::string UncompressedSharedLibsTest::dst;
+
+TEST_F(UncompressedSharedLibsTest, Unaligned) {
+  int pageSize = 4096;
+
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, false, pageSize);
+  ASSERT_EQ(0, processed);
+
+  int verified = verify(dst.c_str(), 4, true, true, pageSize);
+  ASSERT_NE(0, verified); // .so's not page-aligned
+}
+
+TEST_F(UncompressedSharedLibsTest, AlignedPageSize4kB) {
+  int pageSize = 4096;
+
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, true, pageSize);
+  ASSERT_EQ(0, processed);
+
+  int verified = verify(dst.c_str(), 4, true, true, pageSize);
+  ASSERT_EQ(0, verified);
+}
+
+TEST_F(UncompressedSharedLibsTest, AlignedPageSize16kB) {
+  int pageSize = 16384;
+
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, true, pageSize);
+  ASSERT_EQ(0, processed);
+
+  int verified = verify(dst.c_str(), 4, true, true, pageSize);
+  ASSERT_EQ(0, verified);
+}
+
+TEST_F(UncompressedSharedLibsTest, AlignedPageSize64kB) {
+  int pageSize = 65536;
+
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, true, pageSize);
+  ASSERT_EQ(0, processed);
+
+  int verified = verify(dst.c_str(), 4, true, true, pageSize);
   ASSERT_EQ(0, verified);
 }