Merge "core: Add script to check for possible shared library duplication" into main
diff --git a/core/Makefile b/core/Makefile
index c3c5fb3..7c124a9 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -27,26 +27,28 @@
 $(eval $(strip $(1)): PRIVATE_FLAG_NAMES := $(strip $(2)))
 $(strip $(1)):
 	mkdir -p $$(dir $$(PRIVATE_OUT))
-	( \
-		echo '{' ; \
-		echo 'flags: [' ; \
-		$$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
+	echo '{' > $$(PRIVATE_OUT)
+	echo '"flags": [' >> $$(PRIVATE_OUT)
+	$$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
+		( \
 			printf '  { "name": "%s", "value": "%s", ' \
 					'$$(flag)' \
 					'$$(_ALL_RELEASE_FLAGS.$$(flag).VALUE)' \
 					; \
-			printf '"set": "%s", "default": "%s", "declared": "%s", }' \
+			printf '"set": "%s", "default": "%s", "declared": "%s" }' \
 					'$$(_ALL_RELEASE_FLAGS.$$(flag).SET_IN)' \
 					'$$(_ALL_RELEASE_FLAGS.$$(flag).DEFAULT)' \
 					'$$(_ALL_RELEASE_FLAGS.$$(flag).DECLARED_IN)' \
 					; \
 			printf '$$(if $$(filter $$(lastword $$(PRIVATE_FLAG_NAMES)),$$(flag)),,$$(comma))\n' ; \
-		) \
-		echo "]" ; \
-		echo "}" \
-	) >> $$(PRIVATE_OUT)
+		) >> $$(PRIVATE_OUT) \
+	)
+	echo "]" >> $$(PRIVATE_OUT)
+	echo "}" >> $$(PRIVATE_OUT)
 endef
 
+_FLAG_PARTITIONS := product system system_ext vendor
+
 $(foreach partition, $(_FLAG_PARTITIONS), \
 	$(eval BUILD_FLAG_SUMMARIES.$(partition) \
 			:= $(TARGET_OUT_FLAGS)/$(partition)/etc/build_flags.json) \
@@ -410,7 +412,7 @@
 	  unzip -qoDD -d $$(PRIVATE_MODULE_DIR) $$(PRIVATE_MODULE_ARCHIVE); \
 	  mkdir -p $$(PRIVATE_OUTPUT_DIR)/lib; \
 	  cp -r  $(3)/$(DEPMOD_STAGING_SUBDIR)/$(2)/lib/modules $$(PRIVATE_OUTPUT_DIR)/lib/; \
-	  find $$(PRIVATE_MODULE_DIR) -type f -name *.ko | xargs basename -a > $$(PRIVATE_LOAD_FILE); \
+	  find $$(PRIVATE_MODULE_DIR) -type f -name '*.ko' | xargs basename -a > $$(PRIVATE_LOAD_FILE); \
 	)
 	$(if $(1),\
 	  cp $$(PRIVATE_MODULES) $$(PRIVATE_MODULE_DIR)/; \
@@ -2469,7 +2471,11 @@
     TARGET_RECOVERY_UI_PROGRESS_BAR_BASELINE:progress_bar_baseline \
     TARGET_RECOVERY_UI_TOUCH_LOW_THRESHOLD:touch_low_threshold \
     TARGET_RECOVERY_UI_TOUCH_HIGH_THRESHOLD:touch_high_threshold \
-    TARGET_RECOVERY_UI_VR_STEREO_OFFSET:vr_stereo_offset
+    TARGET_RECOVERY_UI_VR_STEREO_OFFSET:vr_stereo_offset \
+    TARGET_RECOVERY_UI_BRIGHTNESS_FILE:brightness_file \
+    TARGET_RECOVERY_UI_MAX_BRIGHTNESS_FILE:max_brightness_file \
+    TARGET_RECOVERY_UI_BRIGHTNESS_NORMAL:brightness_normal_percent \
+    TARGET_RECOVERY_UI_BRIGHTNESS_DIMMED:brightness_dimmed_percent
 
 # Parses the given list of build variables and writes their values as build properties if defined.
 # For example, if a target defines `TARGET_RECOVERY_UI_MARGIN_HEIGHT := 100`,
@@ -4825,25 +4831,24 @@
 intermediates := $(call intermediates-dir-for,PACKAGING,check_vintf_all)
 check_vintf_all_deps :=
 
-APEX_OUT := $(PRODUCT_OUT)/apex
 # -----------------------------------------------------------------
-# Create apex-info-file.xml
+# Activate vendor APEXes for checkvintf
 
 apex_dirs := \
-  $(TARGET_OUT)/apex/% \
-  $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
-  $(TARGET_OUT_ODM)/apex/% \
-  $(TARGET_OUT_PRODUCT)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
+
+APEX_OUT := $(intermediates)/apex
 APEX_INFO_FILE := $(APEX_OUT)/apex-info-list.xml
 
-# dump_apex_info scans $(PRODUCT_OUT)/apex and writes apex-info-list.xml there.
-# This relies on the fact that rules for .apex files install the contents in $(PRODUCT_OUT)/apex.
-$(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/dump_apex_info $(apex_files)
-	@echo "Creating apex-info-file in $(PRODUCT_OUT) "
-	$< --root_dir $(PRODUCT_OUT)
+# apexd_host scans/activates APEX files and writes /apex/apex-info-list.xml
+$(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/apexd_host $(apex_files)
+	@echo "Extracting apexes..."
+	@rm -rf $(APEX_OUT)
+	@mkdir -p $(APEX_OUT)
+	$< --vendor_path $(TARGET_OUT_VENDOR) \
+	   --apex_path $(APEX_OUT)
 
 apex_files :=
 apex_dirs :=
@@ -4891,6 +4896,8 @@
 check_vintf_all_deps += $(vintffm_log)
 $(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps)
 	@( $< --check --dirmap /system:$(TARGET_OUT) \
+	  --dirmap /system_ext:$(TARGET_OUT_SYSTEM_EXT) \
+	  --dirmap /product:$(TARGET_OUT_PRODUCT) \
 	  $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
 
 $(call declare-1p-target,$(vintffm_log))
@@ -5293,12 +5300,11 @@
 
 # Additional tools to unpack and repack the apex file.
 INTERNAL_OTATOOLS_MODULES += \
+  apexd_host \
   apexer \
   apex_compression_tool \
-  blkid_static \
   deapexer \
   debugfs_static \
-  dump_apex_info \
   fsck.erofs \
   make_erofs \
   merge_zips \
@@ -5401,9 +5407,15 @@
 ifdef BOARD_PREBUILT_DTBOIMAGE
 	$(hide) echo "flash dtbo" >> $@
 endif
+ifneq ($(INSTALLED_DTIMAGE_TARGET),)
+	$(hide) echo "flash dts dt.img" >> $@
+endif
 ifneq ($(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),)
 	$(hide) echo "flash vendor_kernel_boot" >> $@
 endif
+ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
+	$(hide) echo "flash recovery" >> $@
+endif
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 	$(hide) echo "flash pvmfw" >> $@
 endif
@@ -5412,7 +5424,7 @@
 	$(hide) echo "flash --apply-vbmeta vbmeta" >> $@
 endif
 ifneq (,$(strip $(BOARD_AVB_VBMETA_SYSTEM)))
-	$(hide) echo "flash --apply-vbmeta vbmeta_system" >> $@
+	$(hide) echo "flash vbmeta_system" >> $@
 endif
 ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
 	$(hide) echo "flash --apply-vbmeta vbmeta_vendor" >> $@
@@ -5422,7 +5434,7 @@
 endif
 ifneq (,$(strip $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)))
 	$(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS), \
-	  echo "flash --apply-vbmeta vbmeta_$(partition)" >> $@;)
+	  echo "flash vbmeta_$(partition)" >> $@;)
 endif
 endif # BOARD_AVB_ENABLE
 	$(hide) echo "reboot fastboot" >> $@
@@ -5882,11 +5894,33 @@
     echo "virtual_ab_cow_version=$(PRODUCT_VIRTUAL_AB_COW_VERSION)" >> $(1))
 endef
 
+# Copy an image file to a directory and generate a block list map file from the image,
+# only if the map_file_generator supports the file system.
+# Otherwise, skip generating map files as well as copying images. The image will be
+# generated from the $(ADD_IMG_TO_TARGET_FILES) to generate the map file with it.
+# $(1): path of the image file
+# $(2): target out directory
+# $(3): image name to generate a map file. skip generating map file if empty
+define copy-image-and-generate-map
+  $(eval _supported_fs_for_map_file_generator := erofs ext%)
+  $(eval _img := $(call to-upper,$(3)))
+  $(if $(3),$(eval _map_fs_type := $(BOARD_$(_img)IMAGE_FILE_SYSTEM_TYPE)),\
+    $(eval _no_map_file := "true"))
+  $(if $(filter $(_supported_fs_for_map_file_generator),$(_map_fs_type))$(_no_map_file),\
+    mkdir -p $(2); \
+    cp $(1) $(2); \
+    $(if $(3),$(HOST_OUT_EXECUTABLES)/map_file_generator $(1) $(2)/$(3).map))
+  $(eval _img :=)
+  $(eval _map_fs_type :=)
+  $(eval _no_map_file :=)
+endef
+
 # By conditionally including the dependency of the target files package on the
 # full system image deps, we speed up builds that do not build the system
 # image.
 ifdef BUILDING_SYSTEM_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(FULL_SYSTEMIMAGE_DEPS)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEMIMAGE)
 else
   # releasetools may need the system build.prop even when building a
   # system-image-less product.
@@ -5899,6 +5933,7 @@
 
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEMOTHERIMAGE_TARGET)
 endif
 
 ifdef BUILDING_VENDOR_BOOT_IMAGE
@@ -5930,18 +5965,21 @@
 
 ifdef BUILDING_VENDOR_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDORIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_VENDORIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_VENDORIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDORIMAGE_TARGET)
 endif
 
 ifdef BUILDING_PRODUCT_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_PRODUCTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_PRODUCTIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_PRODUCTIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_PRODUCTIMAGE_TARGET)
 endif
 
 ifdef BUILDING_SYSTEM_EXT_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEM_EXTIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
 endif
@@ -5956,24 +5994,28 @@
 
 ifdef BUILDING_ODM_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_ODMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_ODMIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_VENDOR_DLKM_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_VENDOR_DLKMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_ODM_DLKM_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_ODM_DLKMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_SYSTEM_DLKM_IMAGE
   $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEM_DLKMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
   $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
 endif
@@ -6025,6 +6067,7 @@
 	    $(SOONG_APEX_KEYS_FILE) \
 	    $(SOONG_ZIP) \
 	    $(HOST_OUT_EXECUTABLES)/fs_config \
+	    $(HOST_OUT_EXECUTABLES)/map_file_generator \
 	    $(ADD_IMG_TO_TARGET_FILES) \
 	    $(MAKE_RECOVERY_PATCH) \
 	    $(BUILT_KERNEL_CONFIGS_FILE) \
@@ -6381,27 +6424,35 @@
 	@# Run fs_config on all the system, vendor, boot ramdisk,
 	@# and recovery ramdisk files in the zip, and save the output
 ifdef BUILDING_SYSTEM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEMIMAGE),$(zip_root)/IMAGES,system)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
 endif
 ifdef BUILDING_VENDOR_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_VENDORIMAGE_TARGET),$(zip_root)/IMAGES,vendor)
 	$(hide) $(call fs_config,$(zip_root)/VENDOR,vendor/) > $(zip_root)/META/vendor_filesystem_config.txt
 endif
 ifdef BUILDING_PRODUCT_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_PRODUCTIMAGE_TARGET),$(zip_root)/IMAGES,product)
 	$(hide) $(call fs_config,$(zip_root)/PRODUCT,product/) > $(zip_root)/META/product_filesystem_config.txt
 endif
 ifdef BUILDING_SYSTEM_EXT_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEM_EXTIMAGE_TARGET),$(zip_root)/IMAGES,system_ext)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_EXT,system_ext/) > $(zip_root)/META/system_ext_filesystem_config.txt
 endif
 ifdef BUILDING_ODM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_ODMIMAGE_TARGET),$(zip_root)/IMAGES,odm)
 	$(hide) $(call fs_config,$(zip_root)/ODM,odm/) > $(zip_root)/META/odm_filesystem_config.txt
 endif
 ifdef BUILDING_VENDOR_DLKM_IMAGE
+	$(hide)$(call copy-image-and-generate-map,$(BUILT_VENDOR_DLKMIMAGE_TARGET),$(zip_root)/IMAGES,vendor_dlkm)
 	$(hide) $(call fs_config,$(zip_root)/VENDOR_DLKM,vendor_dlkm/) > $(zip_root)/META/vendor_dlkm_filesystem_config.txt
 endif
 ifdef BUILDING_ODM_DLKM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_ODM_DLKMIMAGE_TARGET),$(zip_root)/IMAGES,odm_dlkm)
 	$(hide) $(call fs_config,$(zip_root)/ODM_DLKM,odm_dlkm/) > $(zip_root)/META/odm_dlkm_filesystem_config.txt
 endif
 ifdef BUILDING_SYSTEM_DLKM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEM_DLKMIMAGE_TARGET),$(zip_root)/IMAGES,system_dlkm)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_DLKM,system_dlkm/) > $(zip_root)/META/system_dlkm_filesystem_config.txt
 endif
 	@# ROOT always contains the files for the root under normal boot.
@@ -6423,6 +6474,7 @@
 	$(hide) $(call fs_config,$(zip_root)/RECOVERY/RAMDISK,) > $(zip_root)/META/recovery_filesystem_config.txt
 endif
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEMOTHERIMAGE_TARGET),$(zip_root)/IMAGES)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_OTHER,system/) > $(zip_root)/META/system_other_filesystem_config.txt
 endif
 	@# Metadata for compatibility verification.
@@ -6488,6 +6540,7 @@
             $(APKCERTS_FILE) \
             $(SOONG_APEX_KEYS_FILE) \
             $(HOST_OUT_EXECUTABLES)/fs_config \
+            $(HOST_OUT_EXECUTABLES)/map_file_generator \
             $(ADD_IMG_TO_TARGET_FILES) \
             $(MAKE_RECOVERY_PATCH) \
             $(BUILT_KERNEL_CONFIGS_FILE) \
@@ -6728,7 +6781,7 @@
 ifeq (true,$(CLANG_COVERAGE))
   LLVM_PROFDATA := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-profdata
   LLVM_COV := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-cov
-  LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib/x86_64-unknown-linux-gnu/libc++.so.1
+  LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib/x86_64-unknown-linux-gnu/libc++.so
   # Use llvm-profdata.zip for backwards compatibility with tradefed code.
   LLVM_COVERAGE_TOOLS_ZIP := $(PRODUCT_OUT)/llvm-profdata.zip
 
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6ba539c..f132d13 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -114,7 +114,7 @@
 # are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
 # default.
 INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
-  bluetooth \
+  btservices \
   permission \
   rkpd \
   uwb \
@@ -154,6 +154,10 @@
 $(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
 endif
 
+ifdef PRODUCT_AVF_KERNEL_MODULES_ENABLED
+$(call add_soong_config_var_value,ANDROID,avf_kernel_modules_enabled,$(PRODUCT_AVF_KERNEL_MODULES_ENABLED))
+endif
+
 # Enable system_server optimizations by default unless explicitly set or if
 # there may be dependent runtime jars.
 # TODO(b/240588226): Remove the off-by-default exceptions after handling
diff --git a/core/base_rules.mk b/core/base_rules.mk
index c61c653..9ad1cc5 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -450,6 +450,12 @@
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_IS_HOST_MODULE := $(LOCAL_IS_HOST_MODULE)
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_HOST:= $(my_host)
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_PREFIX := $(my_prefix)
+$(LOCAL_INTERMEDIATE_TARGETS) : .KATI_TAGS += ;module_name=$(LOCAL_MODULE)
+ifeq ($(LOCAL_MODULE_CLASS),)
+$(error "$(LOCAL_MODULE) in $(LOCAL_PATH) does not set $(LOCAL_MODULE_CLASS)")
+else
+$(LOCAL_INTERMEDIATE_TARGETS) : .KATI_TAGS += ;module_type=$(LOCAL_MODULE_CLASS)
+endif
 
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_INTERMEDIATES_DIR:= $(intermediates)
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
diff --git a/core/binary.mk b/core/binary.mk
index 579e6b5..e2e5be4 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1572,15 +1572,10 @@
 else ifdef LOCAL_SDK_VERSION
   my_target_global_c_includes :=
   my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
-else ifdef BOARD_VNDK_VERSION
-  my_target_global_c_includes := $(SRC_HEADERS) \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
 else
   my_target_global_c_includes := $(SRC_HEADERS) \
     $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) $(TARGET_OUT_HEADERS) \
+  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
     $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
 endif
 
@@ -1667,14 +1662,8 @@
 
 ifdef LOCAL_USE_VNDK
   imported_includes += $(call intermediates-dir-for,HEADER_LIBRARIES,device_kernel_headers,$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))
-else ifdef LOCAL_SDK_VERSION
-  # Apps shouldn't need device-specific kernel headers
-else ifdef BOARD_VNDK_VERSION
-  # For devices building with the VNDK, only the VNDK gets device-specific kernel headers by default
-  # In soong, it's entirely opt-in
 else
-  # For older non-VNDK builds, continue adding in kernel headers to everything like we used to
-  imported_includes += $(call intermediates-dir-for,HEADER_LIBRARIES,device_kernel_headers,$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))
+  # everything else should manually specify headers
 endif
 
 imported_includes := $(strip \
diff --git a/core/board_config.mk b/core/board_config.mk
index f459d83..663ec7c 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -968,27 +968,13 @@
   $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
 endef
 
-ifdef BOARD_VNDK_VERSION
-  ifeq ($(BOARD_VNDK_VERSION),$(PLATFORM_VNDK_VERSION))
-    $(error BOARD_VNDK_VERSION is equal to PLATFORM_VNDK_VERSION; use BOARD_VNDK_VERSION := current)
-  endif
-  ifneq ($(BOARD_VNDK_VERSION),current)
-    $(call check_vndk_version,$(BOARD_VNDK_VERSION))
-  endif
-  TARGET_VENDOR_TEST_SUFFIX := /vendor
-else
-  TARGET_VENDOR_TEST_SUFFIX :=
+ifeq ($(BOARD_VNDK_VERSION),$(PLATFORM_VNDK_VERSION))
+  $(error BOARD_VNDK_VERSION is equal to PLATFORM_VNDK_VERSION; use BOARD_VNDK_VERSION := current)
 endif
-
-# If PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is set,
-# BOARD_VNDK_VERSION must be set because PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY
-# is a enforcement of inter-partition dependency, and it doesn't have any meaning
-# when BOARD_VNDK_VERSION isn't set.
-ifeq ($(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY),true)
-  ifeq ($(BOARD_VNDK_VERSION),)
-    $(error BOARD_VNDK_VERSION must be set when PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true)
-  endif
+ifneq ($(BOARD_VNDK_VERSION),current)
+  $(call check_vndk_version,$(BOARD_VNDK_VERSION))
 endif
+TARGET_VENDOR_TEST_SUFFIX := /vendor
 
 ###########################################
 # APEXes are by default not flattened, i.e. updatable.
diff --git a/core/config.mk b/core/config.mk
index c549296..c166ef7 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -356,6 +356,16 @@
 endif
 -include $(ANDROID_BUILDSPEC)
 
+# Starting in Android U, non-VNDK devices not supported
+# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
+# letting upstream know it's important to you, we may do cleanup which breaks this
+# significantly. Please let us know if you are changing this.
+ifndef BOARD_VNDK_VERSION
+# READ WARNING - DO NOT CHANGE
+BOARD_VNDK_VERSION := current
+# READ WARNING - DO NOT CHANGE
+endif
+
 # ---------------------------------------------------------------
 # Define most of the global variables.  These are the ones that
 # are specific to the user's build configuration.
@@ -775,24 +785,6 @@
   BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
 endif
 
-# Starting in Android U, non-VNDK devices not supported
-# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
-# letting upstream know it's important to you, we may do cleanup which breaks this
-# significantly. Please let us know if you are changing this.
-ifndef BOARD_VNDK_VERSION
-# READ WARNING - DO NOT CHANGE
-BOARD_VNDK_VERSION := current
-# READ WARNING - DO NOT CHANGE
-endif
-
-ifdef PRODUCT_PRODUCT_VNDK_VERSION
-  ifndef BOARD_VNDK_VERSION
-    # VNDK for product partition is not available unless BOARD_VNDK_VERSION
-    # defined.
-    $(error PRODUCT_PRODUCT_VNDK_VERSION cannot be defined without defining BOARD_VNDK_VERSION)
-  endif
-endif
-
 # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
 # devices if unset.
 ifndef BOARD_SYSTEMSDK_VERSIONS
@@ -907,6 +899,7 @@
     31.0 \
     32.0 \
     33.0 \
+    34.0 \
 
 .KATI_READONLY := \
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index aa638d4..7fa190f 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -140,6 +140,10 @@
                                     $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS)
     combined_exclude_paths := $(MEMTAG_HEAP_EXCLUDE_PATHS) \
                               $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS)
+    ifneq ($(PRODUCT_MEMTAG_HEAP_SKIP_DEFAULT_PATHS),true)
+      combined_sync_include_paths += $(PRODUCT_MEMTAG_HEAP_SYNC_DEFAULT_INCLUDE_PATHS)
+      combined_async_include_paths += $(PRODUCT_MEMTAG_HEAP_ASYNC_DEFAULT_INCLUDE_PATHS)
+    endif
 
     ifeq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_exclude_paths)),\
           $(filter $(dir)%,$(LOCAL_PATH)))),)
@@ -176,6 +180,7 @@
 ifneq ($(filter address,$(my_sanitize)),)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
   my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_globals,$(my_sanitize))
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
@@ -183,8 +188,8 @@
 # Disable memtag for host targets. Host executables in AndroidMk files are
 # deprecated, but some partners still have them floating around.
 ifdef LOCAL_IS_HOST_MODULE
-  my_sanitize := $(filter-out memtag_heap memtag_stack,$(my_sanitize))
-  my_sanitize_diag := $(filter-out memtag_heap memtag_stack,$(my_sanitize_diag))
+  my_sanitize := $(filter-out memtag_heap memtag_stack memtag_globals,$(my_sanitize))
+  my_sanitize_diag := $(filter-out memtag_heap memtag_stack memtag_globals,$(my_sanitize_diag))
 endif
 
 # Disable sanitizers which need the UBSan runtime for host targets.
@@ -219,11 +224,13 @@
   my_sanitize := $(filter-out hwaddress,$(my_sanitize))
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
   my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_globals,$(my_sanitize))
 endif
 
 ifneq ($(filter hwaddress,$(my_sanitize)),)
   my_sanitize := $(filter-out address,$(my_sanitize))
   my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_globals,$(my_sanitize))
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
   my_sanitize := $(filter-out thread,$(my_sanitize))
   my_sanitize := $(filter-out cfi,$(my_sanitize))
@@ -240,7 +247,7 @@
   endif
 endif
 
-ifneq ($(filter memtag_heap memtag_stack,$(my_sanitize)),)
+ifneq ($(filter memtag_heap memtag_stack memtag_globals,$(my_sanitize)),)
   ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
     my_cflags += -fsanitize-memtag-mode=sync
     my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
@@ -249,6 +256,13 @@
   endif
 endif
 
+# Ignore SANITIZE_TARGET_DIAG=memtag_heap without SANITIZE_TARGET=memtag_heap
+# This can happen if a condition above filters out memtag_heap from
+# my_sanitize. It is easier to handle all of these cases here centrally.
+ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+  my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+endif
+
 ifneq ($(filter memtag_heap,$(my_sanitize)),)
   my_cflags += -fsanitize=memtag-heap
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
@@ -262,6 +276,14 @@
   my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
 endif
 
+ifneq ($(filter memtag_globals,$(my_sanitize)),)
+  my_cflags += -fsanitize=memtag-globals
+  # TODO(mitchp): For now, enable memtag-heap with memtag-globals because the
+  # linker isn't new enough
+  # (https://reviews.llvm.org/differential/changeset/?ref=4243566).
+  my_sanitize := $(filter-out memtag_globals,$(my_sanitize))
+endif
+
 # TSAN is not supported on 32-bit architectures. For non-multilib cases, make
 # its use an error. For multilib cases, don't use it for the 32-bit case.
 ifneq ($(filter thread,$(my_sanitize)),)
diff --git a/core/copy_headers.mk b/core/copy_headers.mk
index 054d271..c457eb0 100644
--- a/core/copy_headers.mk
+++ b/core/copy_headers.mk
@@ -18,11 +18,9 @@
 # If we're using the VNDK, only vendor modules using the VNDK may use
 # LOCAL_COPY_HEADERS. Platform libraries will not have the include path
 # present.
-ifdef BOARD_VNDK_VERSION
 ifndef LOCAL_USE_VNDK
   $(call pretty-error,Only vendor modules using LOCAL_USE_VNDK may use LOCAL_COPY_HEADERS)
 endif
-endif
 
 # Clean up LOCAL_COPY_HEADERS_TO, since soong_ui will be comparing cleaned
 # paths to figure out which headers are obsolete and should be removed.
diff --git a/core/definitions.mk b/core/definitions.mk
index 7697211..be40584 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2941,7 +2941,7 @@
 define compress-package
 $(hide) \
   mv $@ $@.uncompressed; \
-  $(MINIGZIP) -c $@.uncompressed > $@.compressed; \
+  $(MINIGZIP) -9 -c $@.uncompressed > $@.compressed; \
   rm -f $@.uncompressed; \
   mv $@.compressed $@;
 endef
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 62c3ba3..6ac169b 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -80,15 +80,42 @@
   $(foreach m,$(other_system_server_jars),\
     $(PRODUCT_OUT)/$(call word-colon,1,$(m))/framework/$(call word-colon,2,$(m)).jar)
 
+# Infix can be 'art' (ART image for testing), 'boot' (primary), or 'mainline' (mainline extension).
+# Soong creates a set of variables for Make, one or each boot image. The only reason why the ART
+# image is exposed to Make is testing (art gtests) and benchmarking (art golem benchmarks). Install
+# rules that use those variables are in dex_preopt_libart.mk. Here for dexpreopt purposes the infix
+# is always 'boot' or 'mainline'.
+DEXPREOPT_INFIX := $(if $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)),mainline,boot)
+
+# The input variables are written by build/soong/java/dexpreopt_bootjars.go. Examples can be found
+# at the bottom of build/soong/java/dexpreopt_config_testing.go.
+dexpreopt_root_dir := $(dir $(patsubst %/,%,$(dir $(firstword $(bootclasspath_jars)))))
+booclasspath_arg := $(subst $(space),:,$(patsubst $(dexpreopt_root_dir)%,%,$(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)))
+booclasspath_locations_arg := $(subst $(space),:,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS))
+boot_images := $(subst :,$(space),$(DEXPREOPT_IMAGE_LOCATIONS_ON_DEVICE$(DEXPREOPT_INFIX)))
+boot_image_arg := $(subst $(space),:,$(patsubst /%,%,$(boot_images)))
+dex2oat_extra_args := $(if $(filter true,$(ENABLE_UFFD_GC)),--runtime-arg -Xgc:CMC)
+
+boot_zip_metadata_txt := $(dir $(boot_zip))boot_zip/METADATA.txt
+$(boot_zip_metadata_txt):
+	rm -f $@
+	echo "booclasspath = $(booclasspath_arg)" >> $@
+	echo "booclasspath-locations = $(booclasspath_locations_arg)" >> $@
+	echo "boot-image = $(boot_image_arg)" >> $@
+	echo "extra-args = $(dex2oat_extra_args)" >> $@
+
+$(call dist-for-goals, droidcore, $(boot_zip_metadata_txt))
+
 $(boot_zip): PRIVATE_BOOTCLASSPATH_JARS := $(bootclasspath_jars)
 $(boot_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
-$(boot_zip): $(bootclasspath_jars) $(system_server_jars) $(SOONG_ZIP) $(MERGE_ZIPS) $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art)
+$(boot_zip): $(bootclasspath_jars) $(system_server_jars) $(SOONG_ZIP) $(MERGE_ZIPS) $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art) $(DEXPREOPT_IMAGE_ZIP_mainline) $(boot_zip_metadata_txt)
 	@echo "Create boot package: $@"
 	rm -f $@
 	$(SOONG_ZIP) -o $@.tmp \
 	  -C $(dir $(firstword $(PRIVATE_BOOTCLASSPATH_JARS)))/.. $(addprefix -f ,$(PRIVATE_BOOTCLASSPATH_JARS)) \
-	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS))
-	$(MERGE_ZIPS) $@ $@.tmp $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art)
+	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS)) \
+	  -j -f $(boot_zip_metadata_txt)
+	$(MERGE_ZIPS) $@ $@.tmp $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art) $(DEXPREOPT_IMAGE_ZIP_mainline)
 	rm -f $@.tmp
 
 $(call dist-for-goals, droidcore, $(boot_zip))
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 14fafa1..288f81f 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -152,6 +152,9 @@
   $(LOCAL_USES_LIBRARIES) \
   $(my_filtered_optional_uses_libraries)
 
+# The order needs to be deterministic.
+my_dexpreopt_libs_all := $(sort $(my_dexpreopt_libs) $(my_dexpreopt_libs_compat))
+
 # Module dexpreopt.config depends on dexpreopt.config files of each
 # <uses-library> dependency, because these libraries may be processed after
 # the current module by Make (there's no topological order), so the dependency
@@ -274,13 +277,7 @@
 my_dexpreopt_images_deps :=
 my_dexpreopt_image_locations_on_host :=
 my_dexpreopt_image_locations_on_device :=
-# Infix can be 'art', 'boot', or 'mainline'. Soong creates a set of variables
-# for Make, one or each boot image (primary, the framework extension, and the
-# mainline extension). The only reason why the primary image is exposed to Make
-# is testing (art gtests) and benchmarking (art golem benchmarks). Install rules
-# that use those variables are in dex_preopt_libart.mk. Here for dexpreopt
-# purposes the infix is always 'boot' or 'mainline'.
-my_dexpreopt_infix := $(if $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)),mainline,boot)
+my_dexpreopt_infix := $(DEXPREOPT_INFIX)
 my_create_dexpreopt_config :=
 
 ifdef LOCAL_DEX_PREOPT
@@ -448,6 +445,28 @@
 	  @cp $(PRIVATE_BUILT_MODULE) $@
   endif
 
+  # The root "product_packages.txt" is generated by `build/make/core/Makefile`. It contains a list
+  # of all packages that are installed on the device. We use `grep` to filter the list by the app's
+  # dependencies to create a per-app list, and use `rsync --checksum` to prevent the file's mtime
+  # from being changed if the contents don't change. This avoids unnecessary dexpreopt reruns.
+  my_dexpreopt_product_packages := $(intermediates)/product_packages.txt
+  .KATI_RESTAT: $(my_dexpreopt_product_packages)
+  $(my_dexpreopt_product_packages): PRIVATE_MODULE := $(LOCAL_MODULE)
+  $(my_dexpreopt_product_packages): PRIVATE_LIBS := $(my_dexpreopt_libs_all)
+  $(my_dexpreopt_product_packages): PRIVATE_STAGING := $(my_dexpreopt_product_packages).tmp
+  $(my_dexpreopt_product_packages): $(PRODUCT_OUT)/product_packages.txt
+	@echo "$(PRIVATE_MODULE) dexpreopt product_packages"
+  ifneq (,$(my_dexpreopt_libs_all))
+		grep -F -x \
+			$(addprefix -e ,$(PRIVATE_LIBS)) \
+			$(PRODUCT_OUT)/product_packages.txt \
+			> $(PRIVATE_STAGING) \
+			|| true
+  else
+		rm -f $(PRIVATE_STAGING) && touch $(PRIVATE_STAGING)
+  endif
+	rsync --checksum $(PRIVATE_STAGING) $@
+
   my_dexpreopt_script := $(intermediates)/dexpreopt.sh
   my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
   DEXPREOPT.$(LOCAL_MODULE).POST_INSTALLED_DEXPREOPT_ZIP := $(my_dexpreopt_zip)
@@ -456,9 +475,10 @@
   $(my_dexpreopt_script): PRIVATE_GLOBAL_SOONG_CONFIG := $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
   $(my_dexpreopt_script): PRIVATE_GLOBAL_CONFIG := $(DEX_PREOPT_CONFIG_FOR_MAKE)
   $(my_dexpreopt_script): PRIVATE_MODULE_CONFIG := $(my_dexpreopt_config)
+  $(my_dexpreopt_script): PRIVATE_PRODUCT_PACKAGES := $(my_dexpreopt_product_packages)
   $(my_dexpreopt_script): $(DEXPREOPT_GEN)
   $(my_dexpreopt_script): $(my_dexpreopt_jar_copy)
-  $(my_dexpreopt_script): $(my_dexpreopt_config) $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) $(DEX_PREOPT_CONFIG_FOR_MAKE)
+  $(my_dexpreopt_script): $(my_dexpreopt_config) $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) $(DEX_PREOPT_CONFIG_FOR_MAKE) $(my_dexpreopt_product_packages)
 	@echo "$(PRIVATE_MODULE) dexpreopt gen"
 	$(DEXPREOPT_GEN) \
 	-global_soong $(PRIVATE_GLOBAL_SOONG_CONFIG) \
@@ -466,12 +486,12 @@
 	-module $(PRIVATE_MODULE_CONFIG) \
 	-dexpreopt_script $@ \
 	-out_dir $(OUT_DIR) \
-	-product_packages $(PRODUCT_OUT)/product_packages.txt
+	-product_packages $(PRIVATE_PRODUCT_PACKAGES)
 
   my_dexpreopt_deps := $(my_dex_jar)
   my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
   my_dexpreopt_deps += \
-    $(foreach lib, $(my_dexpreopt_libs) $(my_dexpreopt_libs_compat), \
+    $(foreach lib, $(my_dexpreopt_libs_all), \
       $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
   my_dexpreopt_deps += $(my_dexpreopt_images_deps)
   my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
@@ -507,8 +527,11 @@
   $(my_all_targets): $(my_dexpreopt_zip)
 
   my_dexpreopt_config :=
+  my_dexpreopt_product_packages :=
   my_dexpreopt_script :=
   my_dexpreopt_zip :=
   my_dexpreopt_config_for_postprocessing :=
 endif # LOCAL_DEX_PREOPT
 endif # my_create_dexpreopt_config
+
+my_dexpreopt_libs_all :=
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 8887ddc..f5a2022 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -24,19 +24,24 @@
 #$(warning $(call find_and_earlier,A B C,C))
 #$(warning $(call find_and_earlier,A B C,D))
 
-# Runs the starlark file given in $(1), and sets all the variables in its top-level
+# Runs a starlark file, and sets all the variables in its top-level
 # variables_to_export_to_make variable as make variables.
 #
 # In order to avoid running starlark every time the stamp file is checked, we use
 # $(KATI_shell_no_rerun). Then, to make sure that we actually do rerun kati when
 # modifying the starlark files, we add the starlark files to the kati stamp file with
 # $(KATI_extra_file_deps).
+#
+# Arguments:
+#  $(1): A single starlark file to use as the entrypoint
+#  $(2): An optional list of starlark files to NOT include as kati dependencies.
+#  $(3): An optional list of extra flags to pass to rbcrun
 define run-starlark
 $(eval _starlark_results := $(OUT_DIR)/starlark_results/$(subst /,_,$(1)).mk)
-$(KATI_shell_no_rerun mkdir -p $(OUT_DIR)/starlark_results && $(OUT_DIR)/rbcrun --mode=make $(1) >$(_starlark_results) && touch -t 200001010000 $(_starlark_results))
+$(KATI_shell_no_rerun mkdir -p $(OUT_DIR)/starlark_results && $(OUT_DIR)/rbcrun --mode=make $(3) $(1) >$(_starlark_results) && touch -t 200001010000 $(_starlark_results))
 $(if $(filter-out 0,$(.SHELLSTATUS)),$(error Starlark failed to run))
 $(eval include $(_starlark_results))
-$(KATI_extra_file_deps $(LOADED_STARLARK_FILES))
+$(KATI_extra_file_deps $(filter-out $(2),$(LOADED_STARLARK_FILES)))
 $(eval LOADED_STARLARK_FILES :=)
 $(eval _starlark_results :=)
 endef
@@ -61,7 +66,7 @@
   $(if $(filter $(ALL_VERSIONS),$(2)),,
     $(error Invalid MAX_PLATFORM_VERSION '$(2)'))
   $(if $(filter $(ALL_VERSIONS),$(3)),,
-    $(error Invalid DEFAULT_PLATFORM_VERSION '$(3)'))
+    $(error Invalid RELEASE_PLATFORM_VERSION '$(3)'))
 
   $(eval allowed_versions_ := $(call find_and_earlier,$(ALL_VERSIONS),$(2)))
 
@@ -72,7 +77,7 @@
     $(filter-out $(call find_and_earlier,$(allowed_versions_),$(1)),$(allowed_versions_)))
 
   $(if $(filter $(allowed_versions_),$(3)),,
-    $(error DEFAULT_PLATFORM_VERSION '$(3)' must be between MIN_PLATFORM_VERSION '$(1)' and MAX_PLATFORM_VERSION '$(2)'))
+    $(error RELEASE_PLATFORM_VERSION '$(3)' must be between MIN_PLATFORM_VERSION '$(1)' and MAX_PLATFORM_VERSION '$(2)'))
 
   $(allowed_versions_))
 endef
diff --git a/core/local_vndk.mk b/core/local_vndk.mk
index befbc59..eb8f2c0 100644
--- a/core/local_vndk.mk
+++ b/core/local_vndk.mk
@@ -37,12 +37,5 @@
     $(shell echo $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): LOCAL_USE_VNDK must not be used with LOCAL_SDK_VERSION >&2)
     $(error done)
   endif
-
-  # If we're not using the VNDK, drop all restrictions
-  ifndef BOARD_VNDK_VERSION
-    LOCAL_USE_VNDK:=
-    LOCAL_USE_VNDK_VENDOR:=
-    LOCAL_USE_VNDK_PRODUCT:=
-  endif
 endif
 
diff --git a/core/main.mk b/core/main.mk
index 40e690d..5a591f9 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -4,7 +4,7 @@
 $(error done)
 endif
 
-$(info [1/1] initializing build system ...)
+$(info [1/1] initializing legacy Make module parser ...)
 
 # Absolute path of the present working direcotry.
 # This overrides the shell variable $PWD, which does not necessarily points to
@@ -554,7 +554,7 @@
 subdir_makefiles_total := $(words init post finish)
 endif
 
-$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] finishing build rules ...)
+$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] finishing legacy Make module parsing ...)
 
 # -------------------------------------------------------------------
 # All module makefiles have been included at this point.
@@ -2151,13 +2151,21 @@
 #       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
 #       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
 #       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
+#   build_output_path: the path of the built file, used to calculate checksum
+#   static_libraries/whole_static_libraries: list of module name of the static libraries the file links against, e.g. libclang_rt.builtins or libclang_rt.builtins_32
+#       Info of all static libraries of all installed files are collected in variable _all_static_libs that is used to list all the static library files in sbom-metadata.csv.
+#       See the second foreach loop in the rule of sbom-metadata.csv for the detailed info of static libraries collected in _all_static_libs.
+#   is_static_lib: whether the file is a static library
 
+metadata_list := $(OUT_DIR)/.module_paths/METADATA.list
+metadata_files := $(subst $(newline),$(space),$(file <$(metadata_list)))
 # (TODO: b/272358583 find another way of always rebuilding this target)
 # Remove the sbom-metadata.csv whenever makefile is evaluated
 $(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
-$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files)
+$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files) $(metadata_list) $(metadata_files)
 	rm -f $@
-	@echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated,build_output_path >> $@
+	echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $@
+	$(eval _all_static_libs :=)
 	$(foreach f,$(installed_files),\
 	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
 	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
@@ -2179,11 +2187,25 @@
 	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
 	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
 	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
-	  @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(_build_output_path) >> $@ $(newline) \
+	  $(eval _static_libs := $(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES)) \
+	  $(eval _whole_static_libs := $(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES)) \
+	  $(foreach l,$(_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
+	  $(foreach l,$(_whole_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
+	  echo /$(_path_on_device),$(_module_path),$(_soong_module_type),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_build_output_path),$(_static_libs),$(_whole_static_libs), >> $@; \
 	  $(if $(_post_installed_dexpreopt_zip), \
-	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i >> $@ ; done $(newline) \
+	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i$(comma)$(_static_libs)$(comma)$(_whole_static_libs)$(comma) >> $@ ; done ; \
 	  ) \
 	)
+	$(foreach l,$(sort $(_all_static_libs)), \
+	  $(eval _lib_stem := $(call word-colon,1,$l)) \
+	  $(eval _module_path := $(call word-colon,2,$l)) \
+	  $(eval _soong_module_type := $(call word-colon,3,$l)) \
+	  $(eval _built_file := $(call word-colon,4,$l)) \
+	  $(eval _static_libs := $(ALL_STATIC_LIBRARIES.$l.STATIC_LIBRARIES)) \
+	  $(eval _whole_static_libs := $(ALL_STATIC_LIBRARIES.$l.WHOLE_STATIC_LIBRARIES)) \
+	  $(eval _is_static_lib := Y) \
+	  echo $(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib) >> $@; \
+	)
 
 .PHONY: sbom
 ifeq ($(TARGET_BUILD_APPS),)
@@ -2195,21 +2217,51 @@
 
 $(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
 else
-apps_only_sbom_files := $(sort $(patsubst %,%.spdx.json,$(filter %.apk,$(apps_only_installed_files))))
-$(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
-	rm -rf $@
-	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --unbundled_apk
+# Create build rules for generating SBOMs of unbundled APKs and APEXs
+# $1: sbom file
+# $2: sbom fragment file
+# $3: installed file
+# $4: sbom-metadata.csv file
+define generate-app-sbom
+$(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$(3)))
+$(eval _module_name := $(ALL_INSTALLED_FILES.$(3)))
+$(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH))))
+$(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE))))
+$(eval _dep_modules := $(filter %.$(_module_name),$(ALL_MODULES)) $(filter %.$(_module_name)$(TARGET_2ND_ARCH_MODULE_SUFFIX),$(ALL_MODULES)))
+$(eval _is_apex := $(filter %.apex,$(3)))
+
+$(4): $(3) $(metadata_list) $(metadata_files)
+	rm -rf $$@
+	echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $$@
+	echo /$(_path_on_device),$(_module_path),$(_soong_module_type),,,,,$(3),,, >> $$@
+	$(if $(filter %.apex,$(3)),\
+	  $(foreach m,$(_dep_modules),\
+	    echo $(patsubst $(PRODUCT_OUT)/apex/$(_module_name)/%,%,$(ALL_MODULES.$m.INSTALLED)),$(sort $(ALL_MODULES.$m.PATH)),$(sort $(ALL_MODULES.$m.SOONG_MODULE_TYPE)),,,,,$(strip $(ALL_MODULES.$m.BUILT)),,, >> $$@;))
+
+$(2): $(1)
+$(1): $(4) $(GEN_SBOM)
+	rm -rf $$@
+	$(GEN_SBOM) --output_file $$@ --metadata $(4) --build_version $$(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json $(if $(filter %.apk,$(3)),--unbundled_apk,--unbundled_apex)
+endef
+
+apps_only_sbom_files :=
+apps_only_fragment_files :=
+$(foreach f,$(filter %.apk %.apex,$(installed_files)), \
+  $(eval _metadata_csv_file := $(patsubst %,%-sbom-metadata.csv,$f)) \
+  $(eval _sbom_file := $(patsubst %,%.spdx.json,$f)) \
+  $(eval _fragment_file := $(patsubst %,%-fragment.spdx,$f)) \
+  $(eval apps_only_sbom_files += $(_sbom_file)) \
+  $(eval apps_only_fragment_files += $(_fragment_file)) \
+  $(eval $(call generate-app-sbom,$(_sbom_file),$(_fragment_file),$f,$(_metadata_csv_file))) \
+)
 
 sbom: $(apps_only_sbom_files)
 
-$(foreach f,$(apps_only_sbom_files),$(eval $(patsubst %.spdx.json,%-fragment.spdx,$f): $f))
-apps_only_fragment_files := $(patsubst %.spdx.json,%-fragment.spdx,$(apps_only_sbom_files))
 $(foreach f,$(apps_only_fragment_files),$(eval apps_only_fragment_dist_files += :sbom/$(notdir $f)))
-
 $(foreach f,$(apps_only_sbom_files),$(eval apps_only_sbom_dist_files += :sbom/$(notdir $f)))
 $(call dist-for-goals,apps_only,$(join $(apps_only_sbom_files),$(apps_only_sbom_dist_files)) $(join $(apps_only_fragment_files),$(apps_only_fragment_dist_files)))
 endif
 
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
 
-$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
+$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing legacy Make module rules ...)
diff --git a/core/product.mk b/core/product.mk
index 818aac2..8f4db38 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -247,6 +247,16 @@
 # Whether any paths should have HWASan enabled for components
 _product_list_vars += PRODUCT_HWASAN_INCLUDE_PATHS
 
+# Whether any paths should have Memtag_heap enabled for components
+_product_list_vars += PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS
+_product_list_vars += PRODUCT_MEMTAG_HEAP_ASYNC_DEFAULT_INCLUDE_PATHS
+_product_list_vars += PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS
+_product_list_vars += PRODUCT_MEMTAG_HEAP_SYNC_DEFAULT_INCLUDE_PATHS
+_product_list_vars += PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS
+
+# Whether this product wants to start with an empty list of default memtag_heap include paths
+_product_single_value_vars += PRODUCT_MEMTAG_HEAP_SKIP_DEFAULT_PATHS
+
 # Whether the Scudo hardened allocator is disabled platform-wide
 _product_single_value_vars += PRODUCT_DISABLE_SCUDO
 
@@ -368,8 +378,6 @@
 # a java_sdk_library module.
 _product_list_vars += PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST
 
-_product_single_value_vars += PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES
-
 # Install a copy of the debug policy to the system_ext partition, and allow
 # init-second-stage to load debug policy from system_ext.
 # This option is only meant to be set by compliance GSI targets.
@@ -389,6 +397,9 @@
 # If true, installs a full version of com.android.virt APEX.
 _product_single_value_vars += PRODUCT_AVF_ENABLED
 
+# If true, kernel with modules will be used for Microdroid VMs.
+_product_single_value_vars += PRODUCT_AVF_KERNEL_MODULES_ENABLED
+
 # List of .json files to be merged/compiled into vendor/etc/linker.config.pb
 _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS
 
diff --git a/core/release_config.bzl b/core/release_config.bzl
new file mode 100644
index 0000000..805106f
--- /dev/null
+++ b/core/release_config.bzl
@@ -0,0 +1,121 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Partitions that get build system flag summaries
+_flag_partitions = [
+    "product",
+    "system",
+    "system_ext",
+    "vendor",
+]
+
+ALL = ["all"]
+PRODUCT = ["product"]
+SYSTEM = ["system"]
+SYSTEM_EXT = ["system_ext"]
+VENDOR = ["vendor"]
+
+_valid_types = ["NoneType", "bool", "list", "string", "int"]
+
+def flag(name, partitions, default):
+    "Declare a flag."
+    if not partitions:
+        fail("At least 1 partition is required")
+    if not name.startswith("RELEASE_"):
+        fail("Release flag names must start with RELEASE_")
+    if " " in name or "\t" in name or "\n" in name:
+        fail("Flag names must not contain whitespace: \"" + name + "\"")
+    for partition in partitions:
+        if partition == "all":
+            if len(partitions) > 1:
+                fail("\"all\" can't be combined with other partitions: " + str(partitions))
+        elif partition not in _flag_partitions:
+            fail("Invalid partition: " + partition + ", allowed partitions: " +
+                 str(_flag_partitions))
+    if type(default) not in _valid_types:
+        fail("Invalid type of default for flag \"" + name + "\" (" + type(default) + ")")
+    return {
+        "name": name,
+        "partitions": partitions,
+        "default": default,
+    }
+
+def value(name, value):
+    "Define the flag value for a particular configuration."
+    return {
+        "name": name,
+        "value": value,
+    }
+
+def _format_value(val):
+    "Format the starlark type correctly for make"
+    if type(val) == "NoneType":
+        return ""
+    elif type(val) == "bool":
+        return "true" if val else ""
+    else:
+        return val
+
+def release_config(all_flags, all_values):
+    "Return the make variables that should be set for this release config."
+
+    # Validate flags
+    flag_names = []
+    for flag in all_flags:
+        if flag["name"] in flag_names:
+            fail(flag["declared_in"] + ": Duplicate declaration of flag " + flag["name"])
+        flag_names.append(flag["name"])
+
+    # Record which flags go on which partition
+    partitions = {}
+    for flag in all_flags:
+        for partition in flag["partitions"]:
+            if partition == "all":
+                for partition in _flag_partitions:
+                    partitions.setdefault(partition, []).append(flag["name"])
+            else:
+                partitions.setdefault(partition, []).append(flag["name"])
+
+    # Validate values
+    # TODO(joeo): Disallow duplicate values after we've split AOSP and vendor flags.
+    values = {}
+    for value in all_values:
+        if value["name"] not in flag_names:
+            fail(value["set_in"] + ": Value set for undeclared build flag: " + value["name"])
+        values[value["name"]] = value
+
+    # Collect values
+    result = {
+        "_ALL_RELEASE_FLAGS": sorted(flag_names),
+    }
+    for partition, names in partitions.items():
+        result["_ALL_RELEASE_FLAGS.PARTITIONS." + partition] = names
+    for flag in all_flags:
+        if flag["name"] in values:
+            val = values[flag["name"]]["value"]
+            set_in = values[flag["name"]]["set_in"]
+            if type(val) not in _valid_types:
+                fail("Invalid type of value for flag \"" + flag["name"] + "\" (" + type(val) + ")")
+        else:
+            val = flag["default"]
+            set_in = flag["declared_in"]
+        val = _format_value(val)
+        result[flag["name"]] = val
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".PARTITIONS"] = flag["partitions"]
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".DEFAULT"] = _format_value(flag["default"])
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".VALUE"] = val
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".DECLARED_IN"] = flag["declared_in"]
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".SET_IN"] = set_in
+
+    return result
diff --git a/core/release_config.mk b/core/release_config.mk
index fdfc6a0..b72ee89 100644
--- a/core/release_config.mk
+++ b/core/release_config.mk
@@ -12,17 +12,30 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# Partitions that get build system flag summaries
-_FLAG_PARTITIONS := system vendor system_ext product
-
-# All possible release flags. Defined in the build_flags.mk files
-# throughout the tree
-_ALL_RELEASE_FLAGS :=
 
 # -----------------------------------------------------------------
 # Choose the flag files
+# -----------------------------------------------------------------
+# Release configs are defined in reflease_config_map files, which map
+# the short name (e.g. -next) used in lunch to the starlark files
+# defining the build flag values.
+#
+# (If you're thinking about aconfig flags, there is one build flag,
+# RELEASE_ACONFIG_VALUE_SETS, that sets which aconfig_value_set
+# module to use to set the aconfig flag values.)
+#
+# The short release config names *can* appear multiple times, to allow
+# for AOSP and vendor specific flags under the same name, but the
+# individual flag values must appear in exactly one config.  Vendor
+# does not override AOSP, or anything like that.  This is because
+# vendor code usually includes prebuilts, and having vendor compile
+# with different flags from AOSP increases the likelihood of flag
+# mismatch.
+
 # Do this first, because we're going to unset TARGET_RELEASE before
 # including anyone, so they don't start making conditionals based on it.
+# This logic is in make because starlark doesn't understand optional
+# vendor files.
 
 # If this is a google source tree, restrict it to only the one file
 # which has OWNERS control.  If it isn't let others define their own.
@@ -42,17 +55,12 @@
 # $1 config name
 # $2 release config files
 define declare-release-config
-    $(eval # No duplicates)
-    $(if $(filter $(_all_release_configs), $(strip $(1))), \
-        $(error declare-release-config: config $(strip $(1)) declared in: $(_included) Previously declared here: $(_all_release_configs.$(strip $(1)).DECLARED_IN)) \
-    )
-    $(eval # Must have release config files)
     $(if $(strip $(2)),,  \
         $(error declare-release-config: config $(strip $(1)) must have release config files) \
     )
     $(eval _all_release_configs := $(sort $(_all_release_configs) $(strip $(1))))
-    $(eval _all_release_configs.$(strip $(1)).DECLARED_IN := $(_included))
-    $(eval _all_release_configs.$(strip $(1)).FILES := $(strip $(2)))
+    $(eval _all_release_configs.$(strip $(1)).DECLARED_IN := $(_included) $(_all_release_configs.$(strip $(1)).DECLARED_IN))
+    $(eval _all_release_configs.$(strip $(1)).FILES := $(_all_release_configs.$(strip $(1)).FILES) $(strip $(2)))
 endef
 
 # Include the config map files
@@ -70,17 +78,17 @@
 else
     # Choose flag files
     # Don't sort this, use it in the order they gave us.
-    _release_config_files := $(_all_release_configs.$(TARGET_RELEASE).FILES)
+    flag_value_files := $(_all_release_configs.$(TARGET_RELEASE).FILES)
 endif
 else
 # Useful for finding scripts etc that aren't passing or setting TARGET_RELEASE
 ifneq ($(FAIL_IF_NO_RELEASE_CONFIG),)
     $(error FAIL_IF_NO_RELEASE_CONFIG was set and TARGET_RELEASE was not)
 endif
-_release_config_files :=
+flag_value_files :=
 endif
 
-# Unset variables so they can't use it
+# Unset variables so they can't use them
 define declare-release-config
 $(error declare-release-config can only be called from inside release_config_map.mk files)
 endef
@@ -96,6 +104,7 @@
 endif
 .KATI_READONLY := TARGET_RELEASE
 
+
 $(foreach config, $(_all_release_configs), \
     $(eval _all_release_configs.$(config).DECLARED_IN:= ) \
     $(eval _all_release_configs.$(config).FILES:= ) \
@@ -103,120 +112,53 @@
 _all_release_configs:=
 config_map_files:=
 
+
 # -----------------------------------------------------------------
-# Declare the flags
+# Flag declarations and values
+# -----------------------------------------------------------------
+# This part is in starlark.  We generate a root starlark file that loads
+# all of the flags declaration files that we found, and the flag_value_files
+# that we chose from the config map above.  Then we run that, and load the
+# results of that into the make environment.
 
-# $1 partition(s)
-# $2 flag name. Must start with RELEASE_
-# $3 default. True or false
-define declare-build-flag
-    $(if $(filter-out all $(_FLAG_PARTITIONS), $(strip $(1))), \
-        $(error declare-build-flag: invalid partitions: $(strip $(1))) \
-    )
-    $(if $(and $(filter all,$(strip $(1))),$(filter-out all, $(strip $(1)))), \
-        $(error declare-build-flag: "all" can't be combined with other partitions: $(strip $(1))), \
-        $(eval declare-build-flag.partition := $(_FLAG_PARTITIONS)) \
-    )
-    $(if $(filter-out RELEASE_%, $(strip $(2))), \
-        $(error declare-build-flag: Release flag names must start with RELEASE_: $(strip $(2))) \
-    )
-    $(eval _ALL_RELEASE_FLAGS += $(strip $(2)))
-    $(foreach partition, $(declare-build-flag.partition), \
-        $(eval _ALL_RELEASE_FLAGS.PARTITIONS.$(partition) := $(sort \
-            $(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) $(strip $(2)))) \
-    )
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).PARTITIONS := $(declare-build-flag.partition))
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).DEFAULT := $(strip $(3)))
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).DECLARED_IN := $(_included))
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).VALUE := $(strip $(3)))
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).SET_IN := $(_included))
-    $(eval declare-build-flag.partition:=)
-endef
-
-
-# Choose the files
 # If this is a google source tree, restrict it to only the one file
 # which has OWNERS control.  If it isn't let others define their own.
-flag_declaration_files := $(wildcard build/release/build_flags.mk) \
-    $(if $(wildcard vendor/google/release/build_flags.mk), \
-        vendor/google/release/build_flags.mk, \
+# TODO: Remove wildcard for build/release one when all branch manifests
+# have updated.
+flag_declaration_files := $(wildcard build/release/build_flags.bzl) \
+    $(if $(wildcard vendor/google/release/build_flags.bzl), \
+        vendor/google/release/build_flags.bzl, \
         $(sort \
-            $(wildcard device/*/release/build_flags.mk) \
-            $(wildcard device/*/*/release/build_flags.mk) \
-            $(wildcard vendor/*/release/build_flags.mk) \
-            $(wildcard vendor/*/*/release/build_flags.mk) \
+            $(wildcard device/*/release/build_flags.bzl) \
+            $(wildcard device/*/*/release/build_flags.bzl) \
+            $(wildcard vendor/*/release/build_flags.bzl) \
+            $(wildcard vendor/*/*/release/build_flags.bzl) \
         ) \
     )
 
-# Include the files
-$(foreach f, $(flag_declaration_files), \
-    $(eval _included := $(f)) \
-    $(eval include $(f)) \
-)
 
-# Don't let anyone declare build flags after here
-define declare-build-flag
-$(error declare-build-flag can only be called from inside flag definition files.)
-endef
+# Because starlark can't find files with $(wildcard), write an entrypoint starlark script that
+# contains the result of the above wildcards for the starlark code to use.
+filename_to_starlark=$(subst /,_,$(subst .,_,$(1)))
+_c:=load("//build/make/core/release_config.bzl", "release_config")
+_c+=$(newline)def add(d, k, v):
+_c+=$(newline)$(space)d = dict(d)
+_c+=$(newline)$(space)d[k] = v
+_c+=$(newline)$(space)return d
+_c+=$(foreach f,$(flag_declaration_files),$(newline)load("$(f)", flags_$(call filename_to_starlark,$(f)) = "flags"))
+_c+=$(newline)all_flags = [] $(foreach f,$(flag_declaration_files),+ [add(x, "declared_in", "$(f)") for x in flags_$(call filename_to_starlark,$(f))])
+_c+=$(foreach f,$(flag_value_files),$(newline)load("//$(f)", values_$(call filename_to_starlark,$(f)) = "values"))
+_c+=$(newline)all_values = [] $(foreach f,$(flag_value_files),+ [add(x, "set_in", "$(f)") for x in values_$(call filename_to_starlark,$(f))])
+_c+=$(newline)variables_to_export_to_make = release_config(all_flags, all_values)
+$(file >$(OUT_DIR)/release_config_entrypoint.bzl,$(_c))
+_c:=
+filename_to_starlark:=
 
-# No more flags from here on
-.KATI_READONLY := _ALL_RELEASE_FLAGS
+# Exclude the entrypoint file as a dependency (by passing it as the 2nd argument) so that we don't
+# rerun kati every build. Kati will replay the $(file) command that generates it every build,
+# updating its timestamp.
+#
+# We also need to pass --allow_external_entrypoint to rbcrun in case the OUT_DIR is set to something
+# outside of the source tree.
+$(call run-starlark,$(OUT_DIR)/release_config_entrypoint.bzl,$(OUT_DIR)/release_config_entrypoint.bzl,--allow_external_entrypoint)
 
-# -----------------------------------------------------------------
-# Set the flags
-
-# $(1): Flag name. Must start with RELEASE_ and have been defined by declare-build-flag
-# $(2): Value. True or false
-define set-build-flag
-    $(if $(filter-out $(_ALL_RELEASE_FLAGS), $(strip $(1))), \
-        $(error set-build-flag: Undeclared build flag: $(strip $(1))) \
-    )
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(1)).VALUE := $(strip $(2)))
-    $(eval _ALL_RELEASE_FLAGS.$(strip $(1)).SET_IN := $(_included))
-endef
-
-# This writes directly to a file so that the version never exists in make for
-# people to write conditionals upon.
-define set-release-version
-    $(eval _RELEASE_VERSION := $(strip $(1)))
-endef
-
-# Include the files (if there are any)
-ifneq ($(strip $(_release_config_files)),)
-    $(foreach f, $(_release_config_files), \
-        $(eval _included := $(f)) \
-        $(eval include $(f)) \
-    )
-else
-    # No TARGET_RELEASE means release version 0
-    $(call set-release-version, 0)
-endif
-
-
-ifeq ($(_RELEASE_VERSION)),)
-    $(error No release config file called set-release-version. Included files were: $(_release_config_files))
-endif
-
-# Don't let anyone declare build flags after here
-define set-build-flag
-$(error set-build-flag can only be called from inside release config files.)
-endef
-
-# Don't let anyone set the release version after here
-define set-release-version
-$(error set-release-version can only be called from inside release config files.)
-endef
-
-# Set the flag values, and don't allow any one to modify them.
-$(foreach flag, $(_ALL_RELEASE_FLAGS), \
-    $(eval $(flag) := $(_ALL_RELEASE_FLAGS.$(flag).VALUE)) \
-    $(eval .KATI_READONLY := $(flag)) \
-)
-
-
-# -----------------------------------------------------------------
-# Clear out vars
-flag_declaration_files:=
-flag_files:=
-_included:=
-_release_config_files:=
diff --git a/core/sbom.mk b/core/sbom.mk
index e23bbc1..39c251a 100644
--- a/core/sbom.mk
+++ b/core/sbom.mk
@@ -3,9 +3,20 @@
 # unless a .mk file changes its installed file after including base_rules.mk.
 
 ifdef my_register_name
+  # ALL_INSTALLED_FILES.$(installed_file).STATIC_LIBRARIES: list of module name of static libraries, e.g. libc++demangle libclang_rt.builtins, for primary arch
+  # ALL_INSTALLED_FILES.$(installed_file).WHOLE_STATIC_LIBRARIES: list of module name of static libraries, e.g. libc++demangle_32 libclang_rt.builtins_32, for 2nd arch.
   ifneq (, $(strip $(ALL_MODULES.$(my_register_name).INSTALLED)))
     $(foreach installed_file,$(ALL_MODULES.$(my_register_name).INSTALLED),\
       $(eval ALL_INSTALLED_FILES.$(installed_file) := $(my_register_name))\
+      $(eval ALL_INSTALLED_FILES.$(installed_file).STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_STATIC_LIBRARIES))),$l$(if $(LOCAL_2ND_ARCH_VAR_PREFIX),$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))))\
+      $(eval ALL_INSTALLED_FILES.$(installed_file).WHOLE_STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_WHOLE_STATIC_LIBRARIES))),$l$(if $(LOCAL_2ND_ARCH_VAR_PREFIX),$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))))\
     )
   endif
+  ifeq (STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS))
+  ALL_STATIC_LIBRARIES.$(my_register_name).STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_STATIC_LIBRARIES))),$l$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))
+  ALL_STATIC_LIBRARIES.$(my_register_name).WHOLE_STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_WHOLE_STATIC_LIBRARIES))),$l$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))
+  ifdef LOCAL_SOONG_MODULE_TYPE
+    ALL_STATIC_LIBRARIES.$(my_register_name).BUILT_FILE := $(LOCAL_PREBUILT_MODULE_FILE)
+  endif
+  endif
 endif
\ No newline at end of file
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 6383393..0d5799c 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -116,8 +116,8 @@
 $(call add_json_list, HWASanIncludePaths,                $(HWASAN_INCLUDE_PATHS) $(PRODUCT_HWASAN_INCLUDE_PATHS))
 
 $(call add_json_list, MemtagHeapExcludePaths,            $(MEMTAG_HEAP_EXCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS))
-$(call add_json_list, MemtagHeapAsyncIncludePaths,       $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS))
-$(call add_json_list, MemtagHeapSyncIncludePaths,        $(MEMTAG_HEAP_SYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS))
+$(call add_json_list, MemtagHeapAsyncIncludePaths,       $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(if $(filter true,$(PRODUCT_MEMTAG_HEAP_SKIP_DEFAULT_PATHS)),,$(PRODUCT_MEMTAG_HEAP_ASYNC_DEFAULT_INCLUDE_PATHS)))
+$(call add_json_list, MemtagHeapSyncIncludePaths,       $(MEMTAG_HEAP_SYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS) $(if $(filter true,$(PRODUCT_MEMTAG_HEAP_SKIP_DEFAULT_PATHS)),,$(PRODUCT_MEMTAG_HEAP_SYNC_DEFAULT_INCLUDE_PATHS)))
 
 $(call add_json_bool, DisableScudo,                      $(filter true,$(PRODUCT_DISABLE_SCUDO)))
 
@@ -226,7 +226,6 @@
 $(call add_json_str,  TotSepolicyVersion,                $(TOT_SEPOLICY_VERSION))
 $(call add_json_list, PlatformSepolicyCompatVersions,    $(PLATFORM_SEPOLICY_COMPAT_VERSIONS))
 
-$(call add_json_bool, Flatten_apex,                      $(filter true,$(TARGET_FLATTEN_APEX)))
 $(call add_json_bool, ForceApexSymlinkOptimization,      $(filter true,$(TARGET_FORCE_APEX_SYMLINK_OPTIMIZATION)))
 
 $(call add_json_str,  DexpreoptGlobalConfig,             $(DEX_PREOPT_CONFIG))
@@ -265,8 +264,6 @@
 $(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(filter true,$(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY)))
 $(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST))
 
-$(call add_json_bool, InstallExtraFlattenedApexes, $(PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES))
-
 $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX)))
 
 ifndef APEX_BUILD_FOR_PRE_S_DEVICES
@@ -319,7 +316,7 @@
 $(call add_json_list, BuildVersionTags,    $(BUILD_VERSION_TAGS))
 
 $(call add_json_str, ReleaseVersion,    $(_RELEASE_VERSION))
-$(call add_json_list, ReleaseDeviceConfigValueSets,    $(RELEASE_DEVICE_CONFIG_VALUE_SETS))
+$(call add_json_list, ReleaseAconfigValueSets,    $(RELEASE_ACONFIG_VALUE_SETS))
 
 $(call json_end)
 
diff --git a/core/sysprop.mk b/core/sysprop.mk
index cf4b1f6..a2296a8 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -174,7 +174,7 @@
   ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
     BF_BUILD_NUMBER := $(BUILD_USERNAME)$$($(DATE_FROM_FILE) +%m%d%H%M)
   else
-    BF_BUILD_NUMBER := $$(cat $(SOONG_OUT_DIR)/build_hostname.txt)
+    BF_BUILD_NUMBER := $(BUILD_NUMBER_FROM_FILE)
   endif
   BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
 endif
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index ff9eb09..c95f6e7 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -50,7 +50,8 @@
 	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
 	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
-	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
+	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list \
+	  -sha256
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
diff --git a/core/tasks/sts-lite.mk b/core/tasks/sts-lite.mk
index dee25d4..65c65c3 100644
--- a/core/tasks/sts-lite.mk
+++ b/core/tasks/sts-lite.mk
@@ -29,7 +29,8 @@
 	$(ZIP2ZIP) -i $(STS_LITE_ZIP) -o $(STS_LITE_ZIP)_filtered \
 		-x android-sts-lite/tools/sts-tradefed-tests.jar \
 		'android-sts-lite/tools/*:sts-test/libs/' \
-		'android-sts-lite/testcases/*:sts-test/utils/'
+		'android-sts-lite/testcases/*:sts-test/utils/' \
+		'android-sts-lite/jdk/**/*:sts-test/jdk/'
 	$(MERGE_ZIPS) $@ $(STS_LITE_ZIP)_filtered $(STS_SDK_SAMPLES)
 	rm -f $(STS_LITE_ZIP)_filtered
 
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index dd2305e..4e78d89 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -36,6 +36,7 @@
   $(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed).jar \
   $(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed)-tests.jar \
   $(HOST_OUT_EXECUTABLES)/$(test_suite_tradefed) \
+  $(HOST_OUT_EXECUTABLES)/test-utils-script \
   $(test_suite_readme)
 
 $(foreach f,$(test_suite_readme),$(if $(strip $(ALL_TARGETS.$(f).META_LIC)),,$(eval ALL_TARGETS.$(f).META_LIC := $(module_license_metadata))))
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index f9175e45..c107254 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,8 +40,7 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-DEFAULT_PLATFORM_VERSION := VP1A
-.KATI_READONLY := DEFAULT_PLATFORM_VERSION
+# Set release configuration. The default resides in build/release/build_flags.mk.
 MIN_PLATFORM_VERSION := UP1A
 MAX_PLATFORM_VERSION := VP1A
 
@@ -104,7 +103,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2023-05-05
+    PLATFORM_SECURITY_PATCH := 2023-06-05
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/core/version_util.mk b/core/version_util.mk
index d4ce113..d3fcdc2 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -14,17 +14,17 @@
 # limitations under the License.
 #
 
-#
-
 ALLOWED_VERSIONS := $(call allowed-platform-versions,\
   $(MIN_PLATFORM_VERSION),\
   $(MAX_PLATFORM_VERSION),\
-  $(DEFAULT_PLATFORM_VERSION))
+  $(RELEASE_PLATFORM_VERSION))
 
-ifndef TARGET_PLATFORM_VERSION
-  TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
+ifdef TARGET_PLATFORM_VERSION
+  $(error Do not set TARGET_PLATFORM_VERSION directly. Use RELEASE_PLATFORM_VERSION. value: $(TARGET_PLATFORM_VERSION))
 endif
 
+TARGET_PLATFORM_VERSION := $(RELEASE_PLATFORM_VERSION)
+
 ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
   $(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
   $(error $(ALLOWED_VERSIONS))
diff --git a/envsetup.sh b/envsetup.sh
index d292dbb..8b70dc4 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -62,8 +62,8 @@
               invocations of 'm' etc.
 - tapas:      tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
               Sets up the build environment for building unbundled apps (APKs).
-- banchan:    banchan <module1> [<module2> ...] [arm|x86|arm64|x86_64|arm64_only|x86_64only] \
-                      [eng|userdebug|user]
+- banchan:    banchan <module1> [<module2> ...] \
+                      [arm|x86|arm64|riscv64|x86_64|arm64_only|x86_64only] [eng|userdebug|user]
               Sets up the build environment for building unbundled modules (APEXes).
 - croot:      Changes directory to the top of the tree, or a subdirectory thereof.
 - m:          Makes from the top of the tree.
@@ -952,9 +952,9 @@
 function banchan()
 {
     local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
-    local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|x86_64|arm64only|x86_64only)$' | xargs)"
+    local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|riscv64|x86_64|arm64only|x86_64only)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
-    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|(.*_)?(arm|x86|arm64|x86_64))$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|(.*_)?(arm|x86|arm64|riscv64|x86_64))$' | xargs)"
 
     if [ "$showHelp" != "" ]; then
       $(gettop)/build/make/banchanHelp.sh
@@ -980,6 +980,7 @@
       arm)    product=module_arm;;
       x86)    product=module_x86;;
       arm64)  product=module_arm64;;
+      riscv64) product=module_riscv64;;
       x86_64) product=module_x86_64;;
       arm64only)  product=module_arm64only;;
       x86_64only) product=module_x86_64only;;
@@ -1357,53 +1358,6 @@
     get_abs_build_var ANDROID_PREBUILTS
 }
 
-function tracedmdump()
-{
-    local T=$(gettop)
-    if [ ! "$T" ]; then
-        echo "Couldn't locate the top of the tree.  Try setting TOP."
-        return
-    fi
-    local prebuiltdir=$(getprebuilt)
-    local arch=$(gettargetarch)
-    local KERNEL=$T/prebuilts/qemu-kernel/$arch/vmlinux-qemu
-
-    local TRACE=$1
-    if [ ! "$TRACE" ] ; then
-        echo "usage:  tracedmdump  tracename"
-        return
-    fi
-
-    if [ ! -r "$KERNEL" ] ; then
-        echo "Error: cannot find kernel: '$KERNEL'"
-        return
-    fi
-
-    local BASETRACE=$(basename $TRACE)
-    if [ "$BASETRACE" = "$TRACE" ] ; then
-        TRACE=$ANDROID_PRODUCT_OUT/traces/$TRACE
-    fi
-
-    echo "post-processing traces..."
-    rm -f $TRACE/qtrace.dexlist
-    post_trace $TRACE
-    if [ $? -ne 0 ]; then
-        echo "***"
-        echo "*** Error: malformed trace.  Did you remember to exit the emulator?"
-        echo "***"
-        return
-    fi
-    echo "generating dexlist output..."
-    /bin/ls $ANDROID_PRODUCT_OUT/system/framework/*.jar $ANDROID_PRODUCT_OUT/system/app/*.apk $ANDROID_PRODUCT_OUT/data/app/*.apk 2>/dev/null | xargs dexlist > $TRACE/qtrace.dexlist
-    echo "generating dmtrace data..."
-    q2dm -r $ANDROID_PRODUCT_OUT/symbols $TRACE $KERNEL $TRACE/dmtrace || return
-    echo "generating html file..."
-    dmtracedump -h $TRACE/dmtrace >| $TRACE/dmtrace.html || return
-    echo "done, see $TRACE/dmtrace.html for details"
-    echo "or run:"
-    echo "    traceview $TRACE/dmtrace"
-}
-
 # communicate with a running device or emulator, set up necessary state,
 # and run the hat command.
 function runhat()
diff --git a/packaging/distdir.mk b/packaging/distdir.mk
index c9508af..153ecf6 100644
--- a/packaging/distdir.mk
+++ b/packaging/distdir.mk
@@ -30,6 +30,7 @@
     $(eval _dist_$$(goal):)))
 
 define copy-one-dist-file
+$(2): .KATI_TAGS += ;rule_name=dist-cp
 $(2): $(1)
 	@echo "Dist: $$@"
 	rm -f $$@
diff --git a/target/board/module_riscv64/BoardConfig.mk b/target/board/module_riscv64/BoardConfig.mk
new file mode 100644
index 0000000..8bc1999
--- /dev/null
+++ b/target/board/module_riscv64/BoardConfig.mk
@@ -0,0 +1,22 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+TARGET_ARCH := riscv64
+TARGET_ARCH_VARIANT :=
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := riscv64
+
+# Temporary hack while prebuilt modules are missing riscv64.
+ALLOW_MISSING_DEPENDENCIES := true
diff --git a/target/board/module_riscv64/README.md b/target/board/module_riscv64/README.md
new file mode 100644
index 0000000..edebaa9
--- /dev/null
+++ b/target/board/module_riscv64/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to a
+riscv64 device. This is a 64-bit only device (no 32-bit support).
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 1e0ce19..473a275 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -35,6 +35,7 @@
 ifneq ($(TARGET_BUILD_APPS),)
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_arm64.mk \
+    $(LOCAL_DIR)/aosp_arm64_fullmte.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/aosp_riscv64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
@@ -46,6 +47,7 @@
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_64bitonly_x86_64.mk \
     $(LOCAL_DIR)/aosp_arm64.mk \
+    $(LOCAL_DIR)/aosp_arm64_fullmte.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/aosp_riscv64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
@@ -81,6 +83,7 @@
     $(LOCAL_DIR)/module_arm.mk \
     $(LOCAL_DIR)/module_arm64.mk \
     $(LOCAL_DIR)/module_arm64only.mk \
+    $(LOCAL_DIR)/module_riscv64.mk \
     $(LOCAL_DIR)/module_x86.mk \
     $(LOCAL_DIR)/module_x86_64.mk \
     $(LOCAL_DIR)/module_x86_64only.mk \
diff --git a/target/product/aosp_arm64_fullmte.mk b/target/product/aosp_arm64_fullmte.mk
new file mode 100644
index 0000000..ed6bd4a
--- /dev/null
+++ b/target/product/aosp_arm64_fullmte.mk
@@ -0,0 +1,27 @@
+# Copyright (C) 2023 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+include $(SRC_TARGET_DIR)/product/fullmte.mk
+
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_arm64.mk)
+
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
+
+PRODUCT_NAME := aosp_arm64_fullmte
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index a23fdd5..7fc33b0 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -70,6 +70,7 @@
     com.android.scheduling \
     com.android.sdkext \
     com.android.tethering \
+    com.android.threadnetwork \
     com.android.tzdata \
     com.android.uwb \
     com.android.virt \
diff --git a/target/product/cfi-common.mk b/target/product/cfi-common.mk
index 11c01a2..5cc7ae5 100644
--- a/target/product/cfi-common.mk
+++ b/target/product/cfi-common.mk
@@ -28,11 +28,12 @@
     hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib \
     hardware/synaptics/wlan/synadhd/wpa_supplicant_8_lib \
     hardware/interfaces/nfc \
+    hardware/qcom/wlan/qcwcn/wpa_supplicant_8_lib \
     hardware/qcom/wlan/legacy/qcwcn/wpa_supplicant_8_lib \
     hardware/qcom/wlan/wcn6740/qcwcn/wpa_supplicant_8_lib \
     hardware/interfaces/keymaster \
     hardware/interfaces/security \
-    packages/modules/Bluetooth/system \
+    packages/modules/Bluetooth \
     system/chre \
     system/core/libnetutils \
     system/libziparchive \
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 107c94f..86d4622 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -33,9 +33,6 @@
 check-vndk-list: ;
 else ifeq ($(TARGET_SKIP_CURRENT_VNDK),true)
 check-vndk-list: ;
-else ifeq ($(BOARD_VNDK_VERSION),)
-# b/143233626 do not check vndk-list when vndk libs are not built
-check-vndk-list: ;
 else
 check-vndk-list: $(check-vndk-list-timestamp)
 ifneq ($(SKIP_ABI_CHECKS),true)
@@ -172,8 +169,6 @@
 #####################################################################
 # VNDK package and snapshot.
 
-ifneq ($(BOARD_VNDK_VERSION),)
-
 include $(CLEAR_VARS)
 LOCAL_MODULE := vndk_package
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
@@ -214,8 +209,6 @@
 
 _vndk_versions :=
 
-endif # BOARD_VNDK_VERSION is set
-
 #####################################################################
 # skip_mount.cfg, read by init to skip mounting some partitions when GSI is used.
 
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 2df85e5..c747d89 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -4,6 +4,7 @@
 LLNDK: libGLESv3.so
 LLNDK: libRS.so
 LLNDK: libandroid_net.so
+LLNDK: libapexsupport.so
 LLNDK: libbinder_ndk.so
 LLNDK: libc.so
 LLNDK: libcgrouprc.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 3b97792..e39af92 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -47,17 +47,6 @@
 # Disable the build-time debugfs restrictions on GSI builds
 PRODUCT_SET_DEBUGFS_RESTRICTIONS := false
 
-# GSI targets should install "unflattened" APEXes in /system
-TARGET_FLATTEN_APEX := false
-
-# GSI targets should install "flattened" APEXes in /system_ext as well
-PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES := true
-
-# The flattened version of com.android.apex.cts.shim.v1 should be explicitly installed
-# because the shim apex is prebuilt one and PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES is not
-# supported for prebuilt_apex modules yet.
-PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_with_prebuilts.flattened
-
 # GSI specific tasks on boot
 PRODUCT_PACKAGES += \
     gsi_skip_mount.cfg \
@@ -91,3 +80,6 @@
 # Additional settings used in all GSI builds
 PRODUCT_PRODUCT_PROPERTIES += \
     ro.crypto.metadata_init_delete_all_keys.enabled=false \
+
+# Window Extensions
+$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk)
\ No newline at end of file
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 41233b2..d965367 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -27,6 +27,7 @@
 $(call inherit-product-if-exists, external/google-fonts/source-sans-pro/fonts.mk)
 $(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
 $(call inherit-product-if-exists, external/roboto-fonts/fonts.mk)
+$(call inherit-product-if-exists, external/roboto-flex-fonts/fonts.mk)
 $(call inherit-product-if-exists, external/hyphenation-patterns/patterns.mk)
 $(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
 $(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 79bd74a..38ba219 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -76,3 +76,7 @@
 # Enable CFI for security-sensitive components
 $(call inherit-product, $(SRC_TARGET_DIR)/product/cfi-common.mk)
 $(call inherit-product-if-exists, vendor/google/products/cfi-vendor.mk)
+
+# Enable MTE for security-sensitive components
+$(call inherit-product, $(SRC_TARGET_DIR)/product/memtag-common.mk)
+$(call inherit-product-if-exists, vendor/google/products/memtag-vendor.mk)
diff --git a/target/product/memtag-common.mk b/target/product/memtag-common.mk
new file mode 100644
index 0000000..829cb41
--- /dev/null
+++ b/target/product/memtag-common.mk
@@ -0,0 +1,30 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a recommended set of common components to enable MTE for.
+
+PRODUCT_MEMTAG_HEAP_ASYNC_DEFAULT_INCLUDE_PATHS := \
+    external/android-clat \
+    external/iproute2 \
+    external/iptables \
+    external/mtpd \
+    external/ppp \
+    hardware/st/nfc \
+    hardware/st/secure_element \
+    hardware/st/secure_element2 \
+    packages/modules/StatsD \
+    system/bpf \
+    system/netd/netutil_wrappers \
+    system/netd/server
diff --git a/target/product/module_common.mk b/target/product/module_common.mk
index 84bd799..53b2ca6 100644
--- a/target/product/module_common.mk
+++ b/target/product/module_common.mk
@@ -17,6 +17,7 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/cfi-common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/memtag-common.mk)
 
 # Enables treble, which enabled certain -D compilation flags. In particular, libhidlbase
 # uses -DENFORCE_VINTF_MANIFEST. See b/185759877
diff --git a/target/product/module_riscv64.mk b/target/product/module_riscv64.mk
new file mode 100644
index 0000000..4fd38c0
--- /dev/null
+++ b/target/product/module_riscv64.mk
@@ -0,0 +1,21 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+
+PRODUCT_NAME := module_riscv64
+PRODUCT_DEVICE := module_riscv64
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
index d606e00..c19982b 100644
--- a/target/product/updatable_apex.mk
+++ b/target/product/updatable_apex.mk
@@ -20,7 +20,7 @@
   # com.android.apex.cts.shim.v1_prebuilt overrides CtsShimPrebuilt
   # and CtsShimPrivPrebuilt since they are packaged inside the APEX.
   PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_prebuilt
-  PRODUCT_VENDOR_PROPERTIES := ro.apex.updatable=true
+  PRODUCT_SYSTEM_PROPERTIES := ro.apex.updatable=true
   TARGET_FLATTEN_APEX := false
   # Use compressed apexes in pre-installed partitions.
   # Note: this doesn't mean that all pre-installed apexes will be compressed.
diff --git a/tests/b_tests.sh b/tests/b_tests.sh
index 491d762..68a13e3 100755
--- a/tests/b_tests.sh
+++ b/tests/b_tests.sh
@@ -23,6 +23,10 @@
 
 test_target=//build/bazel/scripts/difftool:difftool
 
+if b build //build/bazel:nonexistent_module &>/dev/null ; then
+    echo "b did not fail when building a nonexistent module" >&2
+    exit 1
+fi
 b build "$test_target"
 b build -- "$test_target"
 b build "$test_target" --run-soong-tests
diff --git a/tests/lunch_tests.sh b/tests/lunch_tests.sh
index 4285d13..9b142ee 100755
--- a/tests/lunch_tests.sh
+++ b/tests/lunch_tests.sh
@@ -28,7 +28,7 @@
     [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
 )
 
-default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
+default_version=$(get_build_var RELEASE_PLATFORM_VERSION)
 
 # lunch tests
 check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       ""
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
index 9617e0e..c349907 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/Android.bp
@@ -2,6 +2,30 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
+// proto libraries for consumers of `aconfig dump --format=protobuf` output
+
+java_library {
+    name: "libaconfig_java_proto_lite",
+    host_supported: true,
+    srcs: ["protos/aconfig.proto"],
+    static_libs: ["libprotobuf-java-lite"],
+    proto: {
+        type: "lite",
+    },
+    sdk_version: "current",
+}
+
+java_library_host {
+    name: "libaconfig_java_proto_full",
+    srcs: ["protos/aconfig.proto"],
+    static_libs: ["libprotobuf-java-full"],
+    proto: {
+        type: "full",
+    },
+}
+
+// host binary: aconfig
+
 rust_protobuf_host {
     name: "libaconfig_protos",
     protos: ["protos/aconfig.proto"],
@@ -25,6 +49,9 @@
         "libserde_json",
         "libtinytemplate",
     ],
+    proc_macros: [
+        "libpaste",
+    ]
 }
 
 rust_binary_host {
@@ -35,4 +62,51 @@
 rust_test_host {
     name: "aconfig.test",
     defaults: ["aconfig.defaults"],
+    rustlibs: [
+        "libitertools",
+    ],
+}
+
+// integration tests: java
+
+aconfig_declarations {
+    name: "aconfig.test.flags",
+    package: "com.android.aconfig.test",
+    srcs: ["tests/test.aconfig"],
+}
+
+aconfig_values {
+    name: "aconfig.test.flag.values",
+    package: "com.android.aconfig.test",
+    srcs: [
+        "tests/first.values",
+        "tests/second.values",
+    ],
+}
+
+aconfig_value_set {
+    name: "aconfig.test.flag.value_set",
+    values: [
+        "aconfig.test.flag.values",
+    ],
+}
+
+java_aconfig_library {
+    name: "aconfig_test_java",
+    aconfig_declarations: "aconfig.test.flags",
+}
+
+android_test {
+    name: "aconfig.test.java",
+    srcs: [
+        "tests/**/*.java",
+    ],
+    manifest: "tests/AndroidManifest.xml",
+    certificate: "platform",
+    static_libs: [
+        "androidx.test.rules",
+        "testng",
+        "aconfig_test_java",
+    ],
+    test_suites: ["device-tests"],
 }
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
index 8517dd2..941b30d 100644
--- a/tools/aconfig/Cargo.toml
+++ b/tools/aconfig/Cargo.toml
@@ -11,6 +11,7 @@
 [dependencies]
 anyhow = "1.0.69"
 clap = { version = "4.1.8", features = ["derive"] }
+paste = "1.0.11"
 protobuf = "3.2.0"
 serde = { version = "1.0.152", features = ["derive"] }
 serde_json = "1.0.93"
@@ -18,3 +19,6 @@
 
 [build-dependencies]
 protobuf-codegen = "3.2.0"
+
+[dev-dependencies]
+itertools = "0.10.5"
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
new file mode 100644
index 0000000..86124dd
--- /dev/null
+++ b/tools/aconfig/TEST_MAPPING
@@ -0,0 +1,15 @@
+{
+  "presubmit": [
+    {
+      // Ensure changes on aconfig auto generated library is compatible with
+      // test testing filtering logic. Breakage on this test means all tests
+      // that using the flag annotations to do filtering will get affected.
+      "name": "FlagAnnotationTests",
+      "options": [
+        {
+          "include-filter": "android.cts.flags.tests.FlagAnnotationTest"
+        }
+      ]
+    }
+  ]
+}
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
index 9d36a9e..4cad69a 100644
--- a/tools/aconfig/protos/aconfig.proto
+++ b/tools/aconfig/protos/aconfig.proto
@@ -35,20 +35,22 @@
 // aconfig input messages: flag declarations and values
 
 message flag_declaration {
-  required string name = 1;
-  required string description = 2;
+  optional string name = 1;
+  optional string namespace = 2;
+  optional string description = 3;
+  repeated string bug = 4;
 };
 
 message flag_declarations {
-  required string namespace = 1;
+  optional string package = 1;
   repeated flag_declaration flag = 2;
 };
 
 message flag_value {
-  required string namespace = 1;
-  required string name = 2;
-  required flag_state state = 3;
-  required flag_permission permission = 4;
+  optional string package = 1;
+  optional string name = 2;
+  optional flag_state state = 3;
+  optional flag_permission permission = 4;
 };
 
 message flag_values {
@@ -59,18 +61,20 @@
 
 message tracepoint {
   // path to declaration or value file relative to $TOP
-  required string source = 1;
-  required flag_state state = 2;
-  required flag_permission permission = 3;
+  optional string source = 1;
+  optional flag_state state = 2;
+  optional flag_permission permission = 3;
 }
 
 message parsed_flag {
-  required string namespace = 1;
-  required string name = 2;
-  required string description = 3;
-  required flag_state state = 4;
-  required flag_permission permission = 5;
-  repeated tracepoint trace = 6;
+  optional string package = 1;
+  optional string name = 2;
+  optional string namespace = 3;
+  optional string description = 4;
+  repeated string bug = 5;
+  optional flag_state state = 6;
+  optional flag_permission permission = 7;
+  repeated tracepoint trace = 8;
 }
 
 message parsed_flags {
diff --git a/tools/aconfig/src/aconfig.rs b/tools/aconfig/src/aconfig.rs
deleted file mode 100644
index b9fa324..0000000
--- a/tools/aconfig/src/aconfig.rs
+++ /dev/null
@@ -1,289 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-use anyhow::{anyhow, bail, Context, Error, Result};
-use protobuf::{Enum, EnumOrUnknown};
-use serde::{Deserialize, Serialize};
-
-use crate::cache::{Cache, Item, Tracepoint};
-use crate::protos::{
-    ProtoFlagDeclaration, ProtoFlagDeclarations, ProtoFlagPermission, ProtoFlagState,
-    ProtoFlagValue, ProtoFlagValues, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint,
-};
-
-#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
-pub enum FlagState {
-    Enabled,
-    Disabled,
-}
-
-impl TryFrom<EnumOrUnknown<ProtoFlagState>> for FlagState {
-    type Error = Error;
-
-    fn try_from(proto: EnumOrUnknown<ProtoFlagState>) -> Result<Self, Self::Error> {
-        match ProtoFlagState::from_i32(proto.value()) {
-            Some(ProtoFlagState::ENABLED) => Ok(FlagState::Enabled),
-            Some(ProtoFlagState::DISABLED) => Ok(FlagState::Disabled),
-            None => Err(anyhow!("unknown flag state enum value {}", proto.value())),
-        }
-    }
-}
-
-impl From<FlagState> for ProtoFlagState {
-    fn from(state: FlagState) -> Self {
-        match state {
-            FlagState::Enabled => ProtoFlagState::ENABLED,
-            FlagState::Disabled => ProtoFlagState::DISABLED,
-        }
-    }
-}
-
-#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
-pub enum Permission {
-    ReadOnly,
-    ReadWrite,
-}
-
-impl TryFrom<EnumOrUnknown<ProtoFlagPermission>> for Permission {
-    type Error = Error;
-
-    fn try_from(proto: EnumOrUnknown<ProtoFlagPermission>) -> Result<Self, Self::Error> {
-        match ProtoFlagPermission::from_i32(proto.value()) {
-            Some(ProtoFlagPermission::READ_ONLY) => Ok(Permission::ReadOnly),
-            Some(ProtoFlagPermission::READ_WRITE) => Ok(Permission::ReadWrite),
-            None => Err(anyhow!("unknown permission enum value {}", proto.value())),
-        }
-    }
-}
-
-impl From<Permission> for ProtoFlagPermission {
-    fn from(permission: Permission) -> Self {
-        match permission {
-            Permission::ReadOnly => ProtoFlagPermission::READ_ONLY,
-            Permission::ReadWrite => ProtoFlagPermission::READ_WRITE,
-        }
-    }
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct FlagDeclaration {
-    pub name: String,
-    pub description: String,
-}
-
-impl FlagDeclaration {
-    #[allow(dead_code)] // only used in unit tests
-    pub fn try_from_text_proto(text_proto: &str) -> Result<FlagDeclaration> {
-        let proto: ProtoFlagDeclaration = crate::protos::try_from_text_proto(text_proto)
-            .with_context(|| text_proto.to_owned())?;
-        proto.try_into()
-    }
-}
-
-impl TryFrom<ProtoFlagDeclaration> for FlagDeclaration {
-    type Error = Error;
-
-    fn try_from(proto: ProtoFlagDeclaration) -> Result<Self, Self::Error> {
-        let Some(name) = proto.name else {
-            bail!("missing 'name' field");
-        };
-        let Some(description) = proto.description else {
-            bail!("missing 'description' field");
-        };
-        Ok(FlagDeclaration { name, description })
-    }
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct FlagDeclarations {
-    pub namespace: String,
-    pub flags: Vec<FlagDeclaration>,
-}
-
-impl FlagDeclarations {
-    pub fn try_from_text_proto(text_proto: &str) -> Result<FlagDeclarations> {
-        let proto: ProtoFlagDeclarations = crate::protos::try_from_text_proto(text_proto)
-            .with_context(|| text_proto.to_owned())?;
-        let Some(namespace) = proto.namespace else {
-            bail!("missing 'namespace' field");
-        };
-        let mut flags = vec![];
-        for proto_flag in proto.flag.into_iter() {
-            flags.push(proto_flag.try_into()?);
-        }
-        Ok(FlagDeclarations { namespace, flags })
-    }
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct FlagValue {
-    pub namespace: String,
-    pub name: String,
-    pub state: FlagState,
-    pub permission: Permission,
-}
-
-impl FlagValue {
-    #[allow(dead_code)] // only used in unit tests
-    pub fn try_from_text_proto(text_proto: &str) -> Result<FlagValue> {
-        let proto: ProtoFlagValue = crate::protos::try_from_text_proto(text_proto)?;
-        proto.try_into()
-    }
-
-    pub fn try_from_text_proto_list(text_proto: &str) -> Result<Vec<FlagValue>> {
-        let proto: ProtoFlagValues = crate::protos::try_from_text_proto(text_proto)?;
-        proto.flag_value.into_iter().map(|proto_flag| proto_flag.try_into()).collect()
-    }
-}
-
-impl TryFrom<ProtoFlagValue> for FlagValue {
-    type Error = Error;
-
-    fn try_from(proto: ProtoFlagValue) -> Result<Self, Self::Error> {
-        let Some(namespace) = proto.namespace else {
-            bail!("missing 'namespace' field");
-        };
-        let Some(name) = proto.name else {
-            bail!("missing 'name' field");
-        };
-        let Some(proto_state) = proto.state else {
-            bail!("missing 'state' field");
-        };
-        let state = proto_state.try_into()?;
-        let Some(proto_permission) = proto.permission else {
-            bail!("missing 'permission' field");
-        };
-        let permission = proto_permission.try_into()?;
-        Ok(FlagValue { namespace, name, state, permission })
-    }
-}
-
-impl From<Cache> for ProtoParsedFlags {
-    fn from(cache: Cache) -> Self {
-        let mut proto = ProtoParsedFlags::new();
-        for item in cache.into_iter() {
-            proto.parsed_flag.push(item.into());
-        }
-        proto
-    }
-}
-
-impl From<Item> for ProtoParsedFlag {
-    fn from(item: Item) -> Self {
-        let mut proto = crate::protos::ProtoParsedFlag::new();
-        proto.set_namespace(item.namespace.to_owned());
-        proto.set_name(item.name.clone());
-        proto.set_description(item.description.clone());
-        proto.set_state(item.state.into());
-        proto.set_permission(item.permission.into());
-        for trace in item.trace.into_iter() {
-            proto.trace.push(trace.into());
-        }
-        proto
-    }
-}
-
-impl From<Tracepoint> for ProtoTracepoint {
-    fn from(tracepoint: Tracepoint) -> Self {
-        let mut proto = ProtoTracepoint::new();
-        proto.set_source(format!("{}", tracepoint.source));
-        proto.set_state(tracepoint.state.into());
-        proto.set_permission(tracepoint.permission.into());
-        proto
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_flag_try_from_text_proto() {
-        let expected = FlagDeclaration {
-            name: "1234".to_owned(),
-            description: "Description of the flag".to_owned(),
-        };
-
-        let s = r#"
-        name: "1234"
-        description: "Description of the flag"
-        "#;
-        let actual = FlagDeclaration::try_from_text_proto(s).unwrap();
-
-        assert_eq!(expected, actual);
-    }
-
-    #[test]
-    fn test_flag_try_from_text_proto_bad_input() {
-        let s = r#"
-        name: "a"
-        "#;
-        let error = FlagDeclaration::try_from_text_proto(s).unwrap_err();
-        assert!(format!("{:?}", error).contains("Message not initialized"));
-
-        let s = r#"
-        description: "Description of the flag"
-        "#;
-        let error = FlagDeclaration::try_from_text_proto(s).unwrap_err();
-        assert!(format!("{:?}", error).contains("Message not initialized"));
-    }
-
-    #[test]
-    fn test_namespace_try_from_text_proto() {
-        let expected = FlagDeclarations {
-            namespace: "ns".to_owned(),
-            flags: vec![
-                FlagDeclaration { name: "a".to_owned(), description: "A".to_owned() },
-                FlagDeclaration { name: "b".to_owned(), description: "B".to_owned() },
-            ],
-        };
-
-        let s = r#"
-        namespace: "ns"
-        flag {
-            name: "a"
-            description: "A"
-        }
-        flag {
-            name: "b"
-            description: "B"
-        }
-        "#;
-        let actual = FlagDeclarations::try_from_text_proto(s).unwrap();
-
-        assert_eq!(expected, actual);
-    }
-
-    #[test]
-    fn test_flag_declaration_try_from_text_proto_list() {
-        let expected = FlagValue {
-            namespace: "ns".to_owned(),
-            name: "1234".to_owned(),
-            state: FlagState::Enabled,
-            permission: Permission::ReadOnly,
-        };
-
-        let s = r#"
-        namespace: "ns"
-        name: "1234"
-        state: ENABLED
-        permission: READ_ONLY
-        "#;
-        let actual = FlagValue::try_from_text_proto(s).unwrap();
-
-        assert_eq!(expected, actual);
-    }
-}
diff --git a/tools/aconfig/src/cache.rs b/tools/aconfig/src/cache.rs
deleted file mode 100644
index 44ad3dd..0000000
--- a/tools/aconfig/src/cache.rs
+++ /dev/null
@@ -1,358 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-use anyhow::{bail, ensure, Result};
-use serde::{Deserialize, Serialize};
-use std::io::{Read, Write};
-
-use crate::aconfig::{FlagDeclaration, FlagState, FlagValue, Permission};
-use crate::codegen;
-use crate::commands::Source;
-
-const DEFAULT_FLAG_STATE: FlagState = FlagState::Disabled;
-const DEFAULT_FLAG_PERMISSION: Permission = Permission::ReadWrite;
-
-#[derive(Serialize, Deserialize, Debug)]
-pub struct Tracepoint {
-    pub source: Source,
-    pub state: FlagState,
-    pub permission: Permission,
-}
-
-#[derive(Serialize, Deserialize, Debug)]
-pub struct Item {
-    // TODO: duplicating the Cache.namespace as Item.namespace makes the internal representation
-    // closer to the proto message `parsed_flag`; hopefully this will enable us to replace the Item
-    // struct and use a newtype instead once aconfig has matured. Until then, namespace should
-    // really be a Cow<String>.
-    pub namespace: String,
-    pub name: String,
-    pub description: String,
-    pub state: FlagState,
-    pub permission: Permission,
-    pub trace: Vec<Tracepoint>,
-}
-
-#[derive(Serialize, Deserialize, Debug)]
-pub struct Cache {
-    namespace: String,
-    items: Vec<Item>,
-}
-
-// TODO: replace this function with Iterator.is_sorted_by_key(...)) when that API becomes stable
-fn iter_is_sorted_by_key<'a, T: 'a, F, K>(iter: impl Iterator<Item = &'a T>, f: F) -> bool
-where
-    F: FnMut(&'a T) -> K,
-    K: PartialOrd<K>,
-{
-    let mut last: Option<K> = None;
-    for current in iter.map(f) {
-        if let Some(l) = last {
-            if l > current {
-                return false;
-            }
-        }
-        last = Some(current);
-    }
-    true
-}
-
-impl Cache {
-    pub fn read_from_reader(reader: impl Read) -> Result<Cache> {
-        let cache: Cache = serde_json::from_reader(reader)?;
-        ensure!(
-            iter_is_sorted_by_key(cache.iter(), |item| &item.name),
-            "internal error: flags in cache file not sorted"
-        );
-        Ok(cache)
-    }
-
-    pub fn write_to_writer(&self, writer: impl Write) -> Result<()> {
-        ensure!(
-            iter_is_sorted_by_key(self.iter(), |item| &item.name),
-            "internal error: flags in cache file not sorted"
-        );
-        serde_json::to_writer(writer, self).map_err(|e| e.into())
-    }
-
-    pub fn iter(&self) -> impl Iterator<Item = &Item> {
-        self.items.iter()
-    }
-
-    pub fn into_iter(self) -> impl Iterator<Item = Item> {
-        self.items.into_iter()
-    }
-
-    pub fn namespace(&self) -> &str {
-        debug_assert!(!self.namespace.is_empty());
-        &self.namespace
-    }
-}
-
-#[derive(Debug)]
-pub struct CacheBuilder {
-    cache: Cache,
-}
-
-impl CacheBuilder {
-    pub fn new(namespace: String) -> Result<CacheBuilder> {
-        ensure!(codegen::is_valid_identifier(&namespace), "bad namespace");
-        let cache = Cache { namespace, items: vec![] };
-        Ok(CacheBuilder { cache })
-    }
-
-    pub fn add_flag_declaration(
-        &mut self,
-        source: Source,
-        declaration: FlagDeclaration,
-    ) -> Result<&mut CacheBuilder> {
-        ensure!(codegen::is_valid_identifier(&declaration.name), "bad flag name");
-        ensure!(!declaration.description.is_empty(), "empty flag description");
-        ensure!(
-            self.cache.items.iter().all(|item| item.name != declaration.name),
-            "failed to declare flag {} from {}: flag already declared",
-            declaration.name,
-            source
-        );
-        self.cache.items.push(Item {
-            namespace: self.cache.namespace.clone(),
-            name: declaration.name.clone(),
-            description: declaration.description,
-            state: DEFAULT_FLAG_STATE,
-            permission: DEFAULT_FLAG_PERMISSION,
-            trace: vec![Tracepoint {
-                source,
-                state: DEFAULT_FLAG_STATE,
-                permission: DEFAULT_FLAG_PERMISSION,
-            }],
-        });
-        Ok(self)
-    }
-
-    pub fn add_flag_value(
-        &mut self,
-        source: Source,
-        value: FlagValue,
-    ) -> Result<&mut CacheBuilder> {
-        ensure!(codegen::is_valid_identifier(&value.namespace), "bad flag namespace");
-        ensure!(codegen::is_valid_identifier(&value.name), "bad flag name");
-        ensure!(
-            value.namespace == self.cache.namespace,
-            "failed to set values for flag {}/{} from {}: expected namespace {}",
-            value.namespace,
-            value.name,
-            source,
-            self.cache.namespace
-        );
-        let Some(existing_item) = self.cache.items.iter_mut().find(|item| item.name == value.name) else {
-            bail!("failed to set values for flag {}/{} from {}: flag not declared", value.namespace, value.name, source);
-        };
-        existing_item.state = value.state;
-        existing_item.permission = value.permission;
-        existing_item.trace.push(Tracepoint {
-            source,
-            state: value.state,
-            permission: value.permission,
-        });
-        Ok(self)
-    }
-
-    pub fn build(mut self) -> Cache {
-        self.cache.items.sort_by_cached_key(|item| item.name.clone());
-        self.cache
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::aconfig::{FlagState, Permission};
-
-    #[test]
-    fn test_add_flag_declaration() {
-        let mut builder = CacheBuilder::new("ns".to_string()).unwrap();
-        builder
-            .add_flag_declaration(
-                Source::File("first.txt".to_string()),
-                FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
-            )
-            .unwrap();
-        let error = builder
-            .add_flag_declaration(
-                Source::File("second.txt".to_string()),
-                FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
-            )
-            .unwrap_err();
-        assert_eq!(
-            &format!("{:?}", error),
-            "failed to declare flag foo from second.txt: flag already declared"
-        );
-        builder
-            .add_flag_declaration(
-                Source::File("first.txt".to_string()),
-                FlagDeclaration { name: "bar".to_string(), description: "desc".to_string() },
-            )
-            .unwrap();
-
-        let cache = builder.build();
-
-        // check flags are sorted by name
-        assert_eq!(
-            cache.into_iter().map(|item| item.name).collect::<Vec<_>>(),
-            vec!["bar".to_string(), "foo".to_string()]
-        );
-    }
-
-    #[test]
-    fn test_add_flag_value() {
-        let mut builder = CacheBuilder::new("ns".to_string()).unwrap();
-        let error = builder
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: "ns".to_string(),
-                    name: "foo".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap_err();
-        assert_eq!(
-            &format!("{:?}", error),
-            "failed to set values for flag ns/foo from <memory>: flag not declared"
-        );
-
-        builder
-            .add_flag_declaration(
-                Source::File("first.txt".to_string()),
-                FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
-            )
-            .unwrap();
-
-        builder
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: "ns".to_string(),
-                    name: "foo".to_string(),
-                    state: FlagState::Disabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap();
-
-        builder
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: "ns".to_string(),
-                    name: "foo".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadWrite,
-                },
-            )
-            .unwrap();
-
-        // different namespace -> no-op
-        let error = builder
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: "some_other_namespace".to_string(),
-                    name: "foo".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap_err();
-        assert_eq!(&format!("{:?}", error), "failed to set values for flag some_other_namespace/foo from <memory>: expected namespace ns");
-
-        let cache = builder.build();
-        let item = cache.iter().find(|&item| item.name == "foo").unwrap();
-        assert_eq!(FlagState::Enabled, item.state);
-        assert_eq!(Permission::ReadWrite, item.permission);
-    }
-
-    #[test]
-    fn test_reject_empty_cache_namespace() {
-        CacheBuilder::new("".to_string()).unwrap_err();
-    }
-
-    #[test]
-    fn test_reject_empty_flag_declaration_fields() {
-        let mut builder = CacheBuilder::new("ns".to_string()).unwrap();
-
-        let error = builder
-            .add_flag_declaration(
-                Source::Memory,
-                FlagDeclaration { name: "".to_string(), description: "Description".to_string() },
-            )
-            .unwrap_err();
-        assert_eq!(&format!("{:?}", error), "bad flag name");
-
-        let error = builder
-            .add_flag_declaration(
-                Source::Memory,
-                FlagDeclaration { name: "foo".to_string(), description: "".to_string() },
-            )
-            .unwrap_err();
-        assert_eq!(&format!("{:?}", error), "empty flag description");
-    }
-
-    #[test]
-    fn test_reject_empty_flag_value_files() {
-        let mut builder = CacheBuilder::new("ns".to_string()).unwrap();
-        builder
-            .add_flag_declaration(
-                Source::Memory,
-                FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
-            )
-            .unwrap();
-
-        let error = builder
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: "".to_string(),
-                    name: "foo".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap_err();
-        assert_eq!(&format!("{:?}", error), "bad flag namespace");
-
-        let error = builder
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: "ns".to_string(),
-                    name: "".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap_err();
-        assert_eq!(&format!("{:?}", error), "bad flag name");
-    }
-
-    #[test]
-    fn test_iter_is_sorted_by_key() {
-        assert!(iter_is_sorted_by_key(["a", "b", "c"].iter(), |s| s));
-        assert!(iter_is_sorted_by_key(Vec::<&str>::new().iter(), |s| s));
-        assert!(!iter_is_sorted_by_key(["a", "c", "b"].iter(), |s| s));
-    }
-}
diff --git a/tools/aconfig/src/codegen.rs b/tools/aconfig/src/codegen.rs
index b60ec51..b7fb08f 100644
--- a/tools/aconfig/src/codegen.rs
+++ b/tools/aconfig/src/codegen.rs
@@ -14,8 +14,13 @@
  * limitations under the License.
  */
 
-pub fn is_valid_identifier(s: &str) -> bool {
-    // Identifiers must match [a-z][a-z0-9_]*
+use anyhow::{ensure, Result};
+
+pub fn is_valid_name_ident(s: &str) -> bool {
+    // Identifiers must match [a-z][a-z0-9_]*, except consecutive underscores are not allowed
+    if s.contains("__") {
+        return false;
+    }
     let mut chars = s.chars();
     let Some(first) = chars.next() else {
         return false;
@@ -26,18 +31,64 @@
     chars.all(|ch| ch.is_ascii_lowercase() || ch.is_ascii_digit() || ch == '_')
 }
 
+pub fn is_valid_package_ident(s: &str) -> bool {
+    if !s.contains('.') {
+        return false;
+    }
+    s.split('.').all(is_valid_name_ident)
+}
+
+pub fn create_device_config_ident(package: &str, flag_name: &str) -> Result<String> {
+    ensure!(is_valid_package_ident(package), "bad package");
+    ensure!(is_valid_name_ident(flag_name), "bad flag name");
+    Ok(format!("{}.{}", package, flag_name))
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
 
     #[test]
-    fn test_is_valid_identifier() {
-        assert!(is_valid_identifier("foo"));
-        assert!(is_valid_identifier("foo_bar_123"));
+    fn test_is_valid_name_ident() {
+        assert!(is_valid_name_ident("foo"));
+        assert!(is_valid_name_ident("foo_bar_123"));
+        assert!(is_valid_name_ident("foo_"));
 
-        assert!(!is_valid_identifier(""));
-        assert!(!is_valid_identifier("123_foo"));
-        assert!(!is_valid_identifier("foo-bar"));
-        assert!(!is_valid_identifier("foo-b\u{00e5}r"));
+        assert!(!is_valid_name_ident(""));
+        assert!(!is_valid_name_ident("123_foo"));
+        assert!(!is_valid_name_ident("foo-bar"));
+        assert!(!is_valid_name_ident("foo-b\u{00e5}r"));
+        assert!(!is_valid_name_ident("foo__bar"));
+        assert!(!is_valid_name_ident("_foo"));
+    }
+
+    #[test]
+    fn test_is_valid_package_ident() {
+        assert!(is_valid_package_ident("foo.bar"));
+        assert!(is_valid_package_ident("foo.bar_baz"));
+        assert!(is_valid_package_ident("foo.bar.a123"));
+
+        assert!(!is_valid_package_ident("foo_bar_123"));
+        assert!(!is_valid_package_ident("foo"));
+        assert!(!is_valid_package_ident("foo._bar"));
+        assert!(!is_valid_package_ident(""));
+        assert!(!is_valid_package_ident("123_foo"));
+        assert!(!is_valid_package_ident("foo-bar"));
+        assert!(!is_valid_package_ident("foo-b\u{00e5}r"));
+        assert!(!is_valid_package_ident("foo.bar.123"));
+        assert!(!is_valid_package_ident(".foo.bar"));
+        assert!(!is_valid_package_ident("foo.bar."));
+        assert!(!is_valid_package_ident("."));
+        assert!(!is_valid_package_ident(".."));
+        assert!(!is_valid_package_ident("foo..bar"));
+        assert!(!is_valid_package_ident("foo.__bar"));
+    }
+
+    #[test]
+    fn test_create_device_config_ident() {
+        assert_eq!(
+            "com.foo.bar.some_flag",
+            create_device_config_ident("com.foo.bar", "some_flag").unwrap()
+        );
     }
 }
diff --git a/tools/aconfig/src/codegen_cpp.rs b/tools/aconfig/src/codegen_cpp.rs
index 2aeea6a..a802725 100644
--- a/tools/aconfig/src/codegen_cpp.rs
+++ b/tools/aconfig/src/codegen_cpp.rs
@@ -14,201 +14,436 @@
  * limitations under the License.
  */
 
-use anyhow::Result;
+use anyhow::{ensure, Result};
 use serde::Serialize;
+use std::path::PathBuf;
 use tinytemplate::TinyTemplate;
 
-use crate::aconfig::{FlagState, Permission};
-use crate::cache::{Cache, Item};
-use crate::commands::OutputFile;
+use crate::codegen;
+use crate::commands::{CodegenMode, OutputFile};
+use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
 
-pub fn generate_cpp_code(cache: &Cache) -> Result<OutputFile> {
-    let class_elements: Vec<ClassElement> = cache.iter().map(create_class_element).collect();
+pub fn generate_cpp_code<'a, I>(
+    package: &str,
+    parsed_flags_iter: I,
+    codegen_mode: CodegenMode,
+) -> Result<Vec<OutputFile>>
+where
+    I: Iterator<Item = &'a ProtoParsedFlag>,
+{
+    let class_elements: Vec<ClassElement> =
+        parsed_flags_iter.map(|pf| create_class_element(package, pf)).collect();
     let readwrite = class_elements.iter().any(|item| item.readwrite);
-    let namespace = cache.namespace().to_lowercase();
-    let context = Context { namespace: namespace.clone(), readwrite, class_elements };
+    let header = package.replace('.', "_");
+    let cpp_namespace = package.replace('.', "::");
+    ensure!(codegen::is_valid_name_ident(&header));
+    let context = Context {
+        header: header.clone(),
+        cpp_namespace,
+        package: package.to_string(),
+        readwrite,
+        for_prod: codegen_mode == CodegenMode::Production,
+        class_elements,
+    };
+
+    let files = [
+        FileSpec {
+            name: &format!("{}.h", header),
+            template: include_str!("../templates/cpp_exported_header.template"),
+            dir: "include",
+        },
+        FileSpec {
+            name: &format!("{}.cc", header),
+            template: include_str!("../templates/cpp_source_file.template"),
+            dir: "",
+        },
+        FileSpec {
+            name: &format!("{}_flag_provider.h", header),
+            template: match codegen_mode {
+                CodegenMode::Production => {
+                    include_str!("../templates/cpp_prod_flag_provider.template")
+                }
+                CodegenMode::Test => include_str!("../templates/cpp_test_flag_provider.template"),
+            },
+            dir: "",
+        },
+    ];
+    files.iter().map(|file| generate_file(file, &context)).collect()
+}
+
+pub fn generate_file(file: &FileSpec, context: &Context) -> Result<OutputFile> {
     let mut template = TinyTemplate::new();
-    template.add_template("cpp_code_gen", include_str!("../templates/cpp.template"))?;
-    let contents = template.render("cpp_code_gen", &context)?;
-    let path = ["aconfig", &(namespace + ".h")].iter().collect();
+    template.add_template(file.name, file.template)?;
+    let contents = template.render(file.name, &context)?;
+    let path: PathBuf = [&file.dir, &file.name].iter().collect();
     Ok(OutputFile { contents: contents.into(), path })
 }
 
 #[derive(Serialize)]
-struct Context {
-    pub namespace: String,
+pub struct FileSpec<'a> {
+    pub name: &'a str,
+    pub template: &'a str,
+    pub dir: &'a str,
+}
+
+#[derive(Serialize)]
+pub struct Context {
+    pub header: String,
+    pub cpp_namespace: String,
+    pub package: String,
     pub readwrite: bool,
+    pub for_prod: bool,
     pub class_elements: Vec<ClassElement>,
 }
 
 #[derive(Serialize)]
-struct ClassElement {
+pub struct ClassElement {
     pub readwrite: bool,
     pub default_value: String,
     pub flag_name: String,
+    pub uppercase_flag_name: String,
+    pub device_config_namespace: String,
+    pub device_config_flag: String,
 }
 
-fn create_class_element(item: &Item) -> ClassElement {
+fn create_class_element(package: &str, pf: &ProtoParsedFlag) -> ClassElement {
     ClassElement {
-        readwrite: item.permission == Permission::ReadWrite,
-        default_value: if item.state == FlagState::Enabled {
+        readwrite: pf.permission() == ProtoFlagPermission::READ_WRITE,
+        default_value: if pf.state() == ProtoFlagState::ENABLED {
             "true".to_string()
         } else {
             "false".to_string()
         },
-        flag_name: item.name.clone(),
+        flag_name: pf.name().to_string(),
+        uppercase_flag_name: pf.name().to_string().to_ascii_uppercase(),
+        device_config_namespace: pf.namespace().to_string(),
+        device_config_flag: codegen::create_device_config_ident(package, pf.name())
+            .expect("values checked at flag parse time"),
     }
 }
 
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::aconfig::{FlagDeclaration, FlagState, FlagValue, Permission};
-    use crate::cache::CacheBuilder;
-    use crate::commands::Source;
+    use std::collections::HashMap;
 
-    #[test]
-    fn test_cpp_codegen_build_time_flag_only() {
-        let namespace = "my_namespace";
-        let mut builder = CacheBuilder::new(namespace.to_string()).unwrap();
-        builder
-            .add_flag_declaration(
-                Source::File("aconfig_one.txt".to_string()),
-                FlagDeclaration {
-                    name: "my_flag_one".to_string(),
-                    description: "buildtime disable".to_string(),
-                },
-            )
-            .unwrap()
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: namespace.to_string(),
-                    name: "my_flag_one".to_string(),
-                    state: FlagState::Disabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap()
-            .add_flag_declaration(
-                Source::File("aconfig_two.txt".to_string()),
-                FlagDeclaration {
-                    name: "my_flag_two".to_string(),
-                    description: "buildtime enable".to_string(),
-                },
-            )
-            .unwrap()
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: namespace.to_string(),
-                    name: "my_flag_two".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap();
-        let cache = builder.build();
-        let expect_content = r#"#ifndef my_namespace_HEADER_H
-        #define my_namespace_HEADER_H
-        #include "my_namespace.h"
+    const EXPORTED_PROD_HEADER_EXPECTED: &str = r#"
+#ifndef com_android_aconfig_test_HEADER_H
+#define com_android_aconfig_test_HEADER_H
 
-        namespace my_namespace {
+#include <string>
+#include <memory>
+#include <server_configurable_flags/get_flags.h>
+using namespace server_configurable_flags;
 
-            class my_flag_one {
-                public:
-                    virtual const bool value() {
-                        return false;
-                    }
-            }
+namespace com::android::aconfig::test {
+class flag_provider_interface {
+public:
 
-            class my_flag_two {
-                public:
-                    virtual const bool value() {
-                        return true;
-                    }
-            }
+    virtual ~flag_provider_interface() = default;
 
+    virtual bool disabled_ro() = 0;
+
+    virtual bool disabled_rw() = 0;
+
+    virtual bool enabled_ro() = 0;
+
+    virtual bool enabled_rw() = 0;
+
+    virtual void override_flag(std::string const&, bool) {}
+
+    virtual void reset_overrides() {}
+};
+
+extern std::unique_ptr<flag_provider_interface> provider_;
+
+extern std::string const DISABLED_RO;
+extern std::string const DISABLED_RW;
+extern std::string const ENABLED_RO;
+extern std::string const ENABLED_RW;
+
+inline bool disabled_ro() {
+    return false;
+}
+
+inline bool disabled_rw() {
+    return provider_->disabled_rw();
+}
+
+inline bool enabled_ro() {
+    return true;
+}
+
+inline bool enabled_rw() {
+    return provider_->enabled_rw();
+}
+
+inline void override_flag(std::string const& name, bool val) {
+    return provider_->override_flag(name, val);
+}
+
+inline void reset_overrides() {
+    return provider_->reset_overrides();
+}
+
+}
+#endif
+"#;
+
+    const EXPORTED_TEST_HEADER_EXPECTED: &str = r#"
+#ifndef com_android_aconfig_test_HEADER_H
+#define com_android_aconfig_test_HEADER_H
+
+#include <string>
+#include <memory>
+#include <server_configurable_flags/get_flags.h>
+using namespace server_configurable_flags;
+
+namespace com::android::aconfig::test {
+class flag_provider_interface {
+public:
+
+    virtual ~flag_provider_interface() = default;
+
+    virtual bool disabled_ro() = 0;
+
+    virtual bool disabled_rw() = 0;
+
+    virtual bool enabled_ro() = 0;
+
+    virtual bool enabled_rw() = 0;
+
+    virtual void override_flag(std::string const&, bool) {}
+
+    virtual void reset_overrides() {}
+};
+
+extern std::unique_ptr<flag_provider_interface> provider_;
+
+extern std::string const DISABLED_RO;
+extern std::string const DISABLED_RW;
+extern std::string const ENABLED_RO;
+extern std::string const ENABLED_RW;
+
+inline bool disabled_ro() {
+    return provider_->disabled_ro();
+}
+
+inline bool disabled_rw() {
+    return provider_->disabled_rw();
+}
+
+inline bool enabled_ro() {
+    return provider_->enabled_ro();
+}
+
+inline bool enabled_rw() {
+    return provider_->enabled_rw();
+}
+
+inline void override_flag(std::string const& name, bool val) {
+    return provider_->override_flag(name, val);
+}
+
+inline void reset_overrides() {
+    return provider_->reset_overrides();
+}
+
+}
+#endif
+"#;
+
+    const PROD_FLAG_PROVIDER_HEADER_EXPECTED: &str = r#"
+#ifndef com_android_aconfig_test_flag_provider_HEADER_H
+#define com_android_aconfig_test_flag_provider_HEADER_H
+
+#include "com_android_aconfig_test.h"
+
+namespace com::android::aconfig::test {
+class flag_provider : public flag_provider_interface {
+public:
+
+    virtual bool disabled_ro() override {
+        return false;
+    }
+
+    virtual bool disabled_rw() override {
+        return GetServerConfigurableFlag(
+            "aconfig_test",
+            "com.android.aconfig.test.disabled_rw",
+            "false") == "true";
+    }
+
+    virtual bool enabled_ro() override {
+        return true;
+    }
+
+    virtual bool enabled_rw() override {
+        return GetServerConfigurableFlag(
+            "aconfig_test",
+            "com.android.aconfig.test.enabled_rw",
+            "true") == "true";
+    }
+};
+}
+#endif
+"#;
+
+    const TEST_FLAG_PROVIDER_HEADER_EXPECTED: &str = r#"
+#ifndef com_android_aconfig_test_flag_provider_HEADER_H
+#define com_android_aconfig_test_flag_provider_HEADER_H
+
+#include "com_android_aconfig_test.h"
+
+#include <unordered_map>
+#include <unordered_set>
+#include <cassert>
+
+namespace com::android::aconfig::test {
+class flag_provider : public flag_provider_interface {
+private:
+    std::unordered_map<std::string, bool> overrides_;
+    std::unordered_set<std::string> flag_names_;
+
+public:
+
+    flag_provider()
+        : overrides_(),
+          flag_names_() {
+        flag_names_.insert(DISABLED_RO);
+        flag_names_.insert(DISABLED_RW);
+        flag_names_.insert(ENABLED_RO);
+        flag_names_.insert(ENABLED_RW);
+    }
+
+    virtual bool disabled_ro() override {
+        auto it = overrides_.find(DISABLED_RO);
+        if (it != overrides_.end()) {
+            return it->second;
+        } else {
+            return false;
         }
-        #endif
-        "#;
-        let file = generate_cpp_code(&cache).unwrap();
-        assert_eq!("aconfig/my_namespace.h", file.path.to_str().unwrap());
+    }
+
+    virtual bool disabled_rw() override {
+        auto it = overrides_.find(DISABLED_RW);
+        if (it != overrides_.end()) {
+            return it->second;
+        } else {
+            return GetServerConfigurableFlag(
+                "aconfig_test",
+                "com.android.aconfig.test.disabled_rw",
+                "false") == "true";
+        }
+    }
+
+    virtual bool enabled_ro() override {
+        auto it = overrides_.find(ENABLED_RO);
+        if (it != overrides_.end()) {
+            return it->second;
+        } else {
+            return true;
+        }
+    }
+
+    virtual bool enabled_rw() override {
+        auto it = overrides_.find(ENABLED_RW);
+        if (it != overrides_.end()) {
+            return it->second;
+        } else {
+            return GetServerConfigurableFlag(
+                "aconfig_test",
+                "com.android.aconfig.test.enabled_rw",
+                "true") == "true";
+        }
+    }
+
+    virtual void override_flag(std::string const& flag, bool val) override {
+        assert(flag_names_.count(flag));
+        overrides_[flag] = val;
+    }
+
+    virtual void reset_overrides() override {
+        overrides_.clear();
+    }
+};
+}
+#endif
+"#;
+
+    const SOURCE_FILE_EXPECTED: &str = r#"
+#include "com_android_aconfig_test.h"
+#include "com_android_aconfig_test_flag_provider.h"
+
+namespace com::android::aconfig::test {
+
+    std::string const DISABLED_RO = "com.android.aconfig.test.disabled_ro";
+    std::string const DISABLED_RW = "com.android.aconfig.test.disabled_rw";
+    std::string const ENABLED_RO = "com.android.aconfig.test.enabled_ro";
+    std::string const ENABLED_RW = "com.android.aconfig.test.enabled_rw";
+
+    std::unique_ptr<flag_provider_interface> provider_ =
+        std::make_unique<flag_provider>();
+}
+"#;
+
+    fn test_generate_cpp_code(mode: CodegenMode) {
+        let parsed_flags = crate::test::parse_test_flags();
+        let generated =
+            generate_cpp_code(crate::test::TEST_PACKAGE, parsed_flags.parsed_flag.iter(), mode)
+                .unwrap();
+        let mut generated_files_map = HashMap::new();
+        for file in generated {
+            generated_files_map.insert(
+                String::from(file.path.to_str().unwrap()),
+                String::from_utf8(file.contents.clone()).unwrap(),
+            );
+        }
+
+        let mut target_file_path = String::from("include/com_android_aconfig_test.h");
+        assert!(generated_files_map.contains_key(&target_file_path));
         assert_eq!(
-            expect_content.replace(' ', ""),
-            String::from_utf8(file.contents).unwrap().replace(' ', "")
+            None,
+            crate::test::first_significant_code_diff(
+                match mode {
+                    CodegenMode::Production => EXPORTED_PROD_HEADER_EXPECTED,
+                    CodegenMode::Test => EXPORTED_TEST_HEADER_EXPECTED,
+                },
+                generated_files_map.get(&target_file_path).unwrap()
+            )
+        );
+
+        target_file_path = String::from("com_android_aconfig_test_flag_provider.h");
+        assert!(generated_files_map.contains_key(&target_file_path));
+        assert_eq!(
+            None,
+            crate::test::first_significant_code_diff(
+                match mode {
+                    CodegenMode::Production => PROD_FLAG_PROVIDER_HEADER_EXPECTED,
+                    CodegenMode::Test => TEST_FLAG_PROVIDER_HEADER_EXPECTED,
+                },
+                generated_files_map.get(&target_file_path).unwrap()
+            )
+        );
+
+        target_file_path = String::from("com_android_aconfig_test.cc");
+        assert!(generated_files_map.contains_key(&target_file_path));
+        assert_eq!(
+            None,
+            crate::test::first_significant_code_diff(
+                SOURCE_FILE_EXPECTED,
+                generated_files_map.get(&target_file_path).unwrap()
+            )
         );
     }
 
     #[test]
-    fn test_cpp_codegen_runtime_flag() {
-        let namespace = "my_namespace";
-        let mut builder = CacheBuilder::new(namespace.to_string()).unwrap();
-        builder
-            .add_flag_declaration(
-                Source::File("aconfig_one.txt".to_string()),
-                FlagDeclaration {
-                    name: "my_flag_one".to_string(),
-                    description: "buildtime disable".to_string(),
-                },
-            )
-            .unwrap()
-            .add_flag_declaration(
-                Source::File("aconfig_two.txt".to_string()),
-                FlagDeclaration {
-                    name: "my_flag_two".to_string(),
-                    description: "runtime enable".to_string(),
-                },
-            )
-            .unwrap()
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: namespace.to_string(),
-                    name: "my_flag_two".to_string(),
-                    state: FlagState::Enabled,
-                    permission: Permission::ReadWrite,
-                },
-            )
-            .unwrap();
-        let cache = builder.build();
-        let expect_content = r#"#ifndef my_namespace_HEADER_H
-        #define my_namespace_HEADER_H
-        #include "my_namespace.h"
+    fn test_generate_cpp_code_for_prod() {
+        test_generate_cpp_code(CodegenMode::Production);
+    }
 
-        #include <server_configurable_flags/get_flags.h>
-        using namespace server_configurable_flags;
-
-        namespace my_namespace {
-
-            class my_flag_one {
-                public:
-                    virtual const bool value() {
-                        return GetServerConfigurableFlag(
-                            "my_namespace",
-                            "my_flag_one",
-                            "false") == "true";
-                    }
-            }
-
-            class my_flag_two {
-                public:
-                    virtual const bool value() {
-                        return GetServerConfigurableFlag(
-                            "my_namespace",
-                            "my_flag_two",
-                            "true") == "true";
-                    }
-            }
-
-        }
-        #endif
-        "#;
-        let file = generate_cpp_code(&cache).unwrap();
-        assert_eq!("aconfig/my_namespace.h", file.path.to_str().unwrap());
-        assert_eq!(
-            expect_content.replace(' ', ""),
-            String::from_utf8(file.contents).unwrap().replace(' ', "")
-        );
+    #[test]
+    fn test_generate_cpp_code_for_test() {
+        test_generate_cpp_code(CodegenMode::Test);
     }
 }
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
index 98288e7..8ab6ffa 100644
--- a/tools/aconfig/src/codegen_java.rs
+++ b/tools/aconfig/src/codegen_java.rs
@@ -19,118 +19,300 @@
 use std::path::PathBuf;
 use tinytemplate::TinyTemplate;
 
-use crate::aconfig::{FlagState, Permission};
-use crate::cache::{Cache, Item};
-use crate::commands::OutputFile;
+use crate::codegen;
+use crate::commands::{CodegenMode, OutputFile};
+use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
 
-pub fn generate_java_code(cache: &Cache) -> Result<OutputFile> {
-    let class_elements: Vec<ClassElement> = cache.iter().map(create_class_element).collect();
-    let readwrite = class_elements.iter().any(|item| item.readwrite);
-    let namespace = cache.namespace();
-    let context = Context { namespace: namespace.to_string(), readwrite, class_elements };
+pub fn generate_java_code<'a, I>(
+    package: &str,
+    parsed_flags_iter: I,
+    codegen_mode: CodegenMode,
+) -> Result<Vec<OutputFile>>
+where
+    I: Iterator<Item = &'a ProtoParsedFlag>,
+{
+    let class_elements: Vec<ClassElement> =
+        parsed_flags_iter.map(|pf| create_class_element(package, pf)).collect();
+    let is_read_write = class_elements.iter().any(|elem| elem.is_read_write);
+    let is_test_mode = codegen_mode == CodegenMode::Test;
+    let context =
+        Context { class_elements, is_test_mode, is_read_write, package_name: package.to_string() };
     let mut template = TinyTemplate::new();
-    template.add_template("java_code_gen", include_str!("../templates/java.template"))?;
-    let contents = template.render("java_code_gen", &context)?;
-    let mut path: PathBuf = ["aconfig", namespace].iter().collect();
-    // TODO: Allow customization of the java class name
-    path.push("Flags.java");
-    Ok(OutputFile { contents: contents.into(), path })
+    template.add_template("Flags.java", include_str!("../templates/Flags.java.template"))?;
+    template.add_template(
+        "FeatureFlagsImpl.java",
+        include_str!("../templates/FeatureFlagsImpl.java.template"),
+    )?;
+    template.add_template(
+        "FeatureFlags.java",
+        include_str!("../templates/FeatureFlags.java.template"),
+    )?;
+
+    let path: PathBuf = package.split('.').collect();
+    ["Flags.java", "FeatureFlagsImpl.java", "FeatureFlags.java"]
+        .iter()
+        .map(|file| {
+            Ok(OutputFile {
+                contents: template.render(file, &context)?.into(),
+                path: path.join(file),
+            })
+        })
+        .collect::<Result<Vec<OutputFile>>>()
 }
 
 #[derive(Serialize)]
 struct Context {
-    pub namespace: String,
-    pub readwrite: bool,
     pub class_elements: Vec<ClassElement>,
+    pub is_test_mode: bool,
+    pub is_read_write: bool,
+    pub package_name: String,
 }
 
 #[derive(Serialize)]
 struct ClassElement {
+    pub default_value: bool,
+    pub device_config_namespace: String,
+    pub device_config_flag: String,
+    pub flag_name_constant_suffix: String,
+    pub is_read_write: bool,
     pub method_name: String,
-    pub readwrite: bool,
-    pub default_value: String,
-    pub feature_name: String,
-    pub flag_name: String,
 }
 
-fn create_class_element(item: &Item) -> ClassElement {
+fn create_class_element(package: &str, pf: &ProtoParsedFlag) -> ClassElement {
+    let device_config_flag = codegen::create_device_config_ident(package, pf.name())
+        .expect("values checked at flag parse time");
     ClassElement {
-        method_name: item.name.clone(),
-        readwrite: item.permission == Permission::ReadWrite,
-        default_value: if item.state == FlagState::Enabled {
-            "true".to_string()
-        } else {
-            "false".to_string()
-        },
-        feature_name: item.name.clone(),
-        flag_name: item.name.clone(),
+        default_value: pf.state() == ProtoFlagState::ENABLED,
+        device_config_namespace: pf.namespace().to_string(),
+        device_config_flag,
+        flag_name_constant_suffix: pf.name().to_ascii_uppercase(),
+        is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
+        method_name: format_java_method_name(pf.name()),
     }
 }
 
+fn format_java_method_name(flag_name: &str) -> String {
+    flag_name
+        .split('_')
+        .filter(|&word| !word.is_empty())
+        .enumerate()
+        .map(|(index, word)| {
+            if index == 0 {
+                word.to_ascii_lowercase()
+            } else {
+                word[0..1].to_ascii_uppercase() + &word[1..].to_ascii_lowercase()
+            }
+        })
+        .collect::<Vec<String>>()
+        .join("")
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::aconfig::{FlagDeclaration, FlagValue};
-    use crate::cache::CacheBuilder;
-    use crate::commands::Source;
+    use std::collections::HashMap;
+
+    const EXPECTED_FEATUREFLAGS_CONTENT: &str = r#"
+    package com.android.aconfig.test;
+    public interface FeatureFlags {
+        boolean disabledRo();
+        boolean disabledRw();
+        boolean enabledRo();
+        boolean enabledRw();
+    }"#;
+
+    const EXPECTED_FLAG_COMMON_CONTENT: &str = r#"
+    package com.android.aconfig.test;
+    public final class Flags {
+        public static final String FLAG_DISABLED_RO = "com.android.aconfig.test.disabled_ro";
+        public static final String FLAG_DISABLED_RW = "com.android.aconfig.test.disabled_rw";
+        public static final String FLAG_ENABLED_RO = "com.android.aconfig.test.enabled_ro";
+        public static final String FLAG_ENABLED_RW = "com.android.aconfig.test.enabled_rw";
+
+        public static boolean disabledRo() {
+            return FEATURE_FLAGS.disabledRo();
+        }
+        public static boolean disabledRw() {
+            return FEATURE_FLAGS.disabledRw();
+        }
+        public static boolean enabledRo() {
+            return FEATURE_FLAGS.enabledRo();
+        }
+        public static boolean enabledRw() {
+            return FEATURE_FLAGS.enabledRw();
+        }
+    "#;
 
     #[test]
-    fn test_generate_java_code() {
-        let namespace = "example";
-        let mut builder = CacheBuilder::new(namespace.to_string()).unwrap();
-        builder
-            .add_flag_declaration(
-                Source::File("test.txt".to_string()),
-                FlagDeclaration {
-                    name: "test".to_string(),
-                    description: "buildtime enable".to_string(),
-                },
-            )
-            .unwrap()
-            .add_flag_declaration(
-                Source::File("test2.txt".to_string()),
-                FlagDeclaration {
-                    name: "test2".to_string(),
-                    description: "runtime disable".to_string(),
-                },
-            )
-            .unwrap()
-            .add_flag_value(
-                Source::Memory,
-                FlagValue {
-                    namespace: namespace.to_string(),
-                    name: "test".to_string(),
-                    state: FlagState::Disabled,
-                    permission: Permission::ReadOnly,
-                },
-            )
-            .unwrap();
-        let cache = builder.build();
-        let expect_content = r#"package aconfig.example;
-
+    fn test_generate_java_code_production() {
+        let parsed_flags = crate::test::parse_test_flags();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            parsed_flags.parsed_flag.iter(),
+            CodegenMode::Production,
+        )
+        .unwrap();
+        let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string()
+            + r#"
+            private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
+        }"#;
+        let expected_featureflagsimpl_content = r#"
+        package com.android.aconfig.test;
         import android.provider.DeviceConfig;
-
-        public final class Flags {
-
-            public static boolean test() {
+        public final class FeatureFlagsImpl implements FeatureFlags {
+            @Override
+            public boolean disabledRo() {
                 return false;
             }
-
-            public static boolean test2() {
+            @Override
+            public boolean disabledRw() {
                 return DeviceConfig.getBoolean(
-                    "example",
-                    "test2__test2",
+                    "aconfig_test",
+                    "com.android.aconfig.test.disabled_rw",
                     false
                 );
             }
-
+            @Override
+            public boolean enabledRo() {
+                return true;
+            }
+            @Override
+            public boolean enabledRw() {
+                return DeviceConfig.getBoolean(
+                    "aconfig_test",
+                    "com.android.aconfig.test.enabled_rw",
+                    true
+                );
+            }
         }
         "#;
-        let file = generate_java_code(&cache).unwrap();
-        assert_eq!("aconfig/example/Flags.java", file.path.to_str().unwrap());
-        assert_eq!(
-            expect_content.replace(' ', ""),
-            String::from_utf8(file.contents).unwrap().replace(' ', "")
-        );
+        let mut file_set = HashMap::from([
+            ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
+            ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsimpl_content),
+            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_CONTENT),
+        ]);
+
+        for file in generated_files {
+            let file_path = file.path.to_str().unwrap();
+            assert!(file_set.contains_key(file_path), "Cannot find {}", file_path);
+            assert_eq!(
+                None,
+                crate::test::first_significant_code_diff(
+                    file_set.get(file_path).unwrap(),
+                    &String::from_utf8(file.contents.clone()).unwrap()
+                ),
+                "File {} content is not correct",
+                file_path
+            );
+            file_set.remove(file_path);
+        }
+
+        assert!(file_set.is_empty());
+    }
+
+    #[test]
+    fn test_generate_java_code_test() {
+        let parsed_flags = crate::test::parse_test_flags();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            parsed_flags.parsed_flag.iter(),
+            CodegenMode::Test,
+        )
+        .unwrap();
+        let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string()
+            + r#"
+            public static void setFeatureFlagsImpl(FeatureFlags featureFlags) {
+                Flags.FEATURE_FLAGS = featureFlags;
+            }
+            public static void unsetFeatureFlagsImpl() {
+                Flags.FEATURE_FLAGS = null;
+            }
+            private static FeatureFlags FEATURE_FLAGS;
+        }
+        "#;
+        let expected_featureflagsimpl_content = r#"
+        package com.android.aconfig.test;
+        import static java.util.stream.Collectors.toMap;
+        import java.util.HashMap;
+        import java.util.Map;
+        import java.util.stream.Stream;
+        public final class FeatureFlagsImpl implements FeatureFlags {
+            @Override
+            public boolean disabledRo() {
+                return getFlag(Flags.FLAG_DISABLED_RO);
+            }
+            @Override
+            public boolean disabledRw() {
+                return getFlag(Flags.FLAG_DISABLED_RW);
+            }
+            @Override
+            public boolean enabledRo() {
+                return getFlag(Flags.FLAG_ENABLED_RO);
+            }
+            @Override
+            public boolean enabledRw() {
+                return getFlag(Flags.FLAG_ENABLED_RW);
+            }
+            public void setFlag(String flagName, boolean value) {
+                if (!this.mFlagMap.containsKey(flagName)) {
+                    throw new IllegalArgumentException("no such flag" + flagName);
+                }
+                this.mFlagMap.put(flagName, value);
+            }
+            public void resetAll() {
+                for (Map.Entry entry : mFlagMap.entrySet()) {
+                    entry.setValue(null);
+                }
+            }
+            private boolean getFlag(String flagName) {
+                Boolean value = this.mFlagMap.get(flagName);
+                if (value == null) {
+                    throw new IllegalArgumentException(flagName + " is not set");
+                }
+                return value;
+            }
+            private HashMap<String, Boolean> mFlagMap = Stream.of(
+                    Flags.FLAG_DISABLED_RO,
+                    Flags.FLAG_DISABLED_RW,
+                    Flags.FLAG_ENABLED_RO,
+                    Flags.FLAG_ENABLED_RW
+                )
+                .collect(
+                    HashMap::new,
+                    (map, elem) -> map.put(elem, null),
+                    HashMap::putAll
+                );
+        }
+        "#;
+        let mut file_set = HashMap::from([
+            ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
+            ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsimpl_content),
+            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_CONTENT),
+        ]);
+
+        for file in generated_files {
+            let file_path = file.path.to_str().unwrap();
+            assert!(file_set.contains_key(file_path), "Cannot find {}", file_path);
+            assert_eq!(
+                None,
+                crate::test::first_significant_code_diff(
+                    file_set.get(file_path).unwrap(),
+                    &String::from_utf8(file.contents.clone()).unwrap()
+                ),
+                "File {} content is not correct",
+                file_path
+            );
+            file_set.remove(file_path);
+        }
+
+        assert!(file_set.is_empty());
+    }
+
+    #[test]
+    fn test_format_java_method_name() {
+        let input = "____some_snake___name____";
+        let expected = "someSnakeName";
+        let formatted_name = format_java_method_name(input);
+        assert_eq!(expected, formatted_name);
     }
 }
diff --git a/tools/aconfig/src/codegen_rust.rs b/tools/aconfig/src/codegen_rust.rs
index fe4231b..f931418 100644
--- a/tools/aconfig/src/codegen_rust.rs
+++ b/tools/aconfig/src/codegen_rust.rs
@@ -18,15 +18,21 @@
 use serde::Serialize;
 use tinytemplate::TinyTemplate;
 
-use crate::aconfig::{FlagState, Permission};
-use crate::cache::{Cache, Item};
+use crate::codegen;
 use crate::commands::OutputFile;
+use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
 
-pub fn generate_rust_code(cache: &Cache) -> Result<OutputFile> {
-    let namespace = cache.namespace();
-    let parsed_flags: Vec<TemplateParsedFlag> =
-        cache.iter().map(|item| create_template_parsed_flag(namespace, item)).collect();
-    let context = TemplateContext { namespace: namespace.to_string(), parsed_flags };
+pub fn generate_rust_code<'a, I>(package: &str, parsed_flags_iter: I) -> Result<OutputFile>
+where
+    I: Iterator<Item = &'a ProtoParsedFlag>,
+{
+    let template_flags: Vec<TemplateParsedFlag> =
+        parsed_flags_iter.map(|pf| TemplateParsedFlag::new(package, pf)).collect();
+    let context = TemplateContext {
+        package: package.to_string(),
+        template_flags,
+        modules: package.split('.').map(|s| s.to_string()).collect::<Vec<_>>(),
+    };
     let mut template = TinyTemplate::new();
     template.add_template("rust_code_gen", include_str!("../templates/rust.template"))?;
     let contents = template.render("rust_code_gen", &context)?;
@@ -36,14 +42,16 @@
 
 #[derive(Serialize)]
 struct TemplateContext {
-    pub namespace: String,
-    pub parsed_flags: Vec<TemplateParsedFlag>,
+    pub package: String,
+    pub template_flags: Vec<TemplateParsedFlag>,
+    pub modules: Vec<String>,
 }
 
 #[derive(Serialize)]
 struct TemplateParsedFlag {
     pub name: String,
-    pub fn_name: String,
+    pub device_config_namespace: String,
+    pub device_config_flag: String,
 
     // TinyTemplate's conditionals are limited to single <bool> expressions; list all options here
     // Invariant: exactly one of these fields will be true
@@ -52,78 +60,80 @@
     pub is_read_write: bool,
 }
 
-#[allow(clippy::nonminimal_bool)]
-fn create_template_parsed_flag(namespace: &str, item: &Item) -> TemplateParsedFlag {
-    let template = TemplateParsedFlag {
-        name: item.name.clone(),
-        fn_name: format!("{}_{}", namespace, &item.name),
-        is_read_only_enabled: item.permission == Permission::ReadOnly
-            && item.state == FlagState::Enabled,
-        is_read_only_disabled: item.permission == Permission::ReadOnly
-            && item.state == FlagState::Disabled,
-        is_read_write: item.permission == Permission::ReadWrite,
-    };
-    #[rustfmt::skip]
-    debug_assert!(
-        (template.is_read_only_enabled && !template.is_read_only_disabled && !template.is_read_write) ||
-        (!template.is_read_only_enabled && template.is_read_only_disabled && !template.is_read_write) ||
-        (!template.is_read_only_enabled && !template.is_read_only_disabled && template.is_read_write),
-        "TemplateParsedFlag invariant failed: {} {} {}",
-        template.is_read_only_enabled,
-        template.is_read_only_disabled,
-        template.is_read_write,
-    );
-    template
+impl TemplateParsedFlag {
+    #[allow(clippy::nonminimal_bool)]
+    fn new(package: &str, pf: &ProtoParsedFlag) -> Self {
+        let template = TemplateParsedFlag {
+            name: pf.name().to_string(),
+            device_config_namespace: pf.namespace().to_string(),
+            device_config_flag: codegen::create_device_config_ident(package, pf.name())
+                .expect("values checked at flag parse time"),
+            is_read_only_enabled: pf.permission() == ProtoFlagPermission::READ_ONLY
+                && pf.state() == ProtoFlagState::ENABLED,
+            is_read_only_disabled: pf.permission() == ProtoFlagPermission::READ_ONLY
+                && pf.state() == ProtoFlagState::DISABLED,
+            is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
+        };
+        #[rustfmt::skip]
+        debug_assert!(
+            (template.is_read_only_enabled && !template.is_read_only_disabled && !template.is_read_write) ||
+            (!template.is_read_only_enabled && template.is_read_only_disabled && !template.is_read_write) ||
+            (!template.is_read_only_enabled && !template.is_read_only_disabled && template.is_read_write),
+            "TemplateParsedFlag invariant failed: {} {} {}",
+            template.is_read_only_enabled,
+            template.is_read_only_disabled,
+            template.is_read_write,
+        );
+        template
+    }
 }
 
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::commands::{create_cache, Input, Source};
 
     #[test]
     fn test_generate_rust_code() {
-        let cache = create_cache(
-            "test",
-            vec![Input {
-                source: Source::File("testdata/test.aconfig".to_string()),
-                reader: Box::new(include_bytes!("../testdata/test.aconfig").as_slice()),
-            }],
-            vec![
-                Input {
-                    source: Source::File("testdata/first.values".to_string()),
-                    reader: Box::new(include_bytes!("../testdata/first.values").as_slice()),
-                },
-                Input {
-                    source: Source::File("testdata/test.aconfig".to_string()),
-                    reader: Box::new(include_bytes!("../testdata/second.values").as_slice()),
-                },
-            ],
-        )
-        .unwrap();
-        let generated = generate_rust_code(&cache).unwrap();
+        let parsed_flags = crate::test::parse_test_flags();
+        let generated =
+            generate_rust_code(crate::test::TEST_PACKAGE, parsed_flags.parsed_flag.iter()).unwrap();
         assert_eq!("src/lib.rs", format!("{}", generated.path.display()));
         let expected = r#"
+pub mod com {
+pub mod android {
+pub mod aconfig {
+pub mod test {
 #[inline(always)]
-pub const fn r#test_disabled_ro() -> bool {
+pub const fn r#disabled_ro() -> bool {
     false
 }
 
 #[inline(always)]
-pub fn r#test_disabled_rw() -> bool {
-    flags_rust::GetServerConfigurableFlag("test", "disabled_rw", "false") == "true"
+pub fn r#disabled_rw() -> bool {
+    flags_rust::GetServerConfigurableFlag("aconfig_test", "com.android.aconfig.test.disabled_rw", "false") == "true"
 }
 
 #[inline(always)]
-pub const fn r#test_enabled_ro() -> bool {
+pub const fn r#enabled_ro() -> bool {
     true
 }
 
 #[inline(always)]
-pub fn r#test_enabled_rw() -> bool {
-    flags_rust::GetServerConfigurableFlag("test", "enabled_rw", "false") == "true"
+pub fn r#enabled_rw() -> bool {
+    flags_rust::GetServerConfigurableFlag("aconfig_test", "com.android.aconfig.test.enabled_rw", "false") == "true"
+}
+
+}
+}
+}
 }
 "#;
-        assert_eq!(expected.trim(), String::from_utf8(generated.contents).unwrap().trim());
+        assert_eq!(
+            None,
+            crate::test::first_significant_code_diff(
+                expected,
+                &String::from_utf8(generated.contents).unwrap()
+            )
+        );
     }
 }
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
index cce1d7f..687f319 100644
--- a/tools/aconfig/src/commands.rs
+++ b/tools/aconfig/src/commands.rs
@@ -14,95 +14,194 @@
  * limitations under the License.
  */
 
-use anyhow::{ensure, Context, Result};
+use anyhow::{bail, ensure, Context, Result};
 use clap::ValueEnum;
 use protobuf::Message;
-use serde::{Deserialize, Serialize};
-use std::fmt;
 use std::io::Read;
 use std::path::PathBuf;
 
-use crate::aconfig::{FlagDeclarations, FlagValue};
-use crate::cache::{Cache, CacheBuilder};
 use crate::codegen_cpp::generate_cpp_code;
 use crate::codegen_java::generate_java_code;
 use crate::codegen_rust::generate_rust_code;
-use crate::protos::ProtoParsedFlags;
-
-#[derive(Serialize, Deserialize, Clone, Debug)]
-pub enum Source {
-    #[allow(dead_code)] // only used in unit tests
-    Memory,
-    File(String),
-}
-
-impl fmt::Display for Source {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        match self {
-            Self::Memory => write!(f, "<memory>"),
-            Self::File(path) => write!(f, "{}", path),
-        }
-    }
-}
+use crate::protos::{
+    ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint,
+};
 
 pub struct Input {
-    pub source: Source,
+    pub source: String,
     pub reader: Box<dyn Read>,
 }
 
+impl Input {
+    fn try_parse_flags(&mut self) -> Result<ProtoParsedFlags> {
+        let mut buffer = Vec::new();
+        self.reader.read_to_end(&mut buffer)?;
+        crate::protos::parsed_flags::try_from_binary_proto(&buffer)
+    }
+}
+
 pub struct OutputFile {
     pub path: PathBuf, // relative to some root directory only main knows about
     pub contents: Vec<u8>,
 }
 
-pub fn create_cache(
-    namespace: &str,
-    declarations: Vec<Input>,
-    values: Vec<Input>,
-) -> Result<Cache> {
-    let mut builder = CacheBuilder::new(namespace.to_owned())?;
+const DEFAULT_FLAG_STATE: ProtoFlagState = ProtoFlagState::DISABLED;
+const DEFAULT_FLAG_PERMISSION: ProtoFlagPermission = ProtoFlagPermission::READ_WRITE;
+
+pub fn parse_flags(package: &str, declarations: Vec<Input>, values: Vec<Input>) -> Result<Vec<u8>> {
+    let mut parsed_flags = ProtoParsedFlags::new();
 
     for mut input in declarations {
         let mut contents = String::new();
         input.reader.read_to_string(&mut contents)?;
-        let dec_list = FlagDeclarations::try_from_text_proto(&contents)
+
+        let flag_declarations = crate::protos::flag_declarations::try_from_text_proto(&contents)
             .with_context(|| format!("Failed to parse {}", input.source))?;
         ensure!(
-            namespace == dec_list.namespace,
-            "Failed to parse {}: expected namespace {}, got {}",
+            package == flag_declarations.package(),
+            "Failed to parse {}: expected package {}, got {}",
             input.source,
-            namespace,
-            dec_list.namespace
+            package,
+            flag_declarations.package()
         );
-        for d in dec_list.flags.into_iter() {
-            builder.add_flag_declaration(input.source.clone(), d)?;
+        for mut flag_declaration in flag_declarations.flag.into_iter() {
+            crate::protos::flag_declaration::verify_fields(&flag_declaration)
+                .with_context(|| format!("Failed to parse {}", input.source))?;
+
+            // create ParsedFlag using FlagDeclaration and default values
+            let mut parsed_flag = ProtoParsedFlag::new();
+            parsed_flag.set_package(package.to_string());
+            parsed_flag.set_name(flag_declaration.take_name());
+            parsed_flag.set_namespace(flag_declaration.take_namespace());
+            parsed_flag.set_description(flag_declaration.take_description());
+            parsed_flag.bug.append(&mut flag_declaration.bug);
+            parsed_flag.set_state(DEFAULT_FLAG_STATE);
+            parsed_flag.set_permission(DEFAULT_FLAG_PERMISSION);
+            let mut tracepoint = ProtoTracepoint::new();
+            tracepoint.set_source(input.source.clone());
+            tracepoint.set_state(DEFAULT_FLAG_STATE);
+            tracepoint.set_permission(DEFAULT_FLAG_PERMISSION);
+            parsed_flag.trace.push(tracepoint);
+
+            // verify ParsedFlag looks reasonable
+            crate::protos::parsed_flag::verify_fields(&parsed_flag)?;
+
+            // verify ParsedFlag can be added
+            ensure!(
+                parsed_flags.parsed_flag.iter().all(|other| other.name() != parsed_flag.name()),
+                "failed to declare flag {} from {}: flag already declared",
+                parsed_flag.name(),
+                input.source
+            );
+
+            // add ParsedFlag to ParsedFlags
+            parsed_flags.parsed_flag.push(parsed_flag);
         }
     }
 
     for mut input in values {
         let mut contents = String::new();
         input.reader.read_to_string(&mut contents)?;
-        let values_list = FlagValue::try_from_text_proto_list(&contents)
+        let flag_values = crate::protos::flag_values::try_from_text_proto(&contents)
             .with_context(|| format!("Failed to parse {}", input.source))?;
-        for v in values_list {
-            // TODO: warn about flag values that do not take effect?
-            let _ = builder.add_flag_value(input.source.clone(), v);
+        for flag_value in flag_values.flag_value.into_iter() {
+            crate::protos::flag_value::verify_fields(&flag_value)
+                .with_context(|| format!("Failed to parse {}", input.source))?;
+
+            let Some(parsed_flag) = parsed_flags.parsed_flag.iter_mut().find(|pf| pf.package() == flag_value.package() && pf.name() == flag_value.name()) else {
+                // (silently) skip unknown flags
+                continue;
+            };
+
+            parsed_flag.set_state(flag_value.state());
+            parsed_flag.set_permission(flag_value.permission());
+            let mut tracepoint = ProtoTracepoint::new();
+            tracepoint.set_source(input.source.clone());
+            tracepoint.set_state(flag_value.state());
+            tracepoint.set_permission(flag_value.permission());
+            parsed_flag.trace.push(tracepoint);
         }
     }
 
-    Ok(builder.build())
+    crate::protos::parsed_flags::verify_fields(&parsed_flags)?;
+    let mut output = Vec::new();
+    parsed_flags.write_to_vec(&mut output)?;
+    Ok(output)
 }
 
-pub fn create_java_lib(cache: &Cache) -> Result<OutputFile> {
-    generate_java_code(cache)
+#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)]
+pub enum CodegenMode {
+    Production,
+    Test,
 }
 
-pub fn create_cpp_lib(cache: &Cache) -> Result<OutputFile> {
-    generate_cpp_code(cache)
+pub fn create_java_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<Vec<OutputFile>> {
+    let parsed_flags = input.try_parse_flags()?;
+    let Some(package) = find_unique_package(&parsed_flags) else {
+        bail!("no parsed flags, or the parsed flags use different packages");
+    };
+    generate_java_code(package, parsed_flags.parsed_flag.iter(), codegen_mode)
 }
 
-pub fn create_rust_lib(cache: &Cache) -> Result<OutputFile> {
-    generate_rust_code(cache)
+pub fn create_cpp_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<Vec<OutputFile>> {
+    let parsed_flags = input.try_parse_flags()?;
+    let Some(package) = find_unique_package(&parsed_flags) else {
+        bail!("no parsed flags, or the parsed flags use different packages");
+    };
+    generate_cpp_code(package, parsed_flags.parsed_flag.iter(), codegen_mode)
+}
+
+pub fn create_rust_lib(mut input: Input) -> Result<OutputFile> {
+    let parsed_flags = input.try_parse_flags()?;
+    let Some(package) = find_unique_package(&parsed_flags) else {
+        bail!("no parsed flags, or the parsed flags use different packages");
+    };
+    generate_rust_code(package, parsed_flags.parsed_flag.iter())
+}
+
+pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> {
+    let parsed_flags = input.try_parse_flags()?;
+    let mut output = Vec::new();
+    for parsed_flag in parsed_flags
+        .parsed_flag
+        .into_iter()
+        .filter(|pf| pf.permission() == ProtoFlagPermission::READ_WRITE)
+    {
+        let line = format!(
+            "{}:{}.{}={}\n",
+            parsed_flag.namespace(),
+            parsed_flag.package(),
+            parsed_flag.name(),
+            match parsed_flag.state() {
+                ProtoFlagState::ENABLED => "enabled",
+                ProtoFlagState::DISABLED => "disabled",
+            }
+        );
+        output.extend_from_slice(line.as_bytes());
+    }
+    Ok(output)
+}
+
+pub fn create_device_config_sysprops(mut input: Input) -> Result<Vec<u8>> {
+    let parsed_flags = input.try_parse_flags()?;
+    let mut output = Vec::new();
+    for parsed_flag in parsed_flags
+        .parsed_flag
+        .into_iter()
+        .filter(|pf| pf.permission() == ProtoFlagPermission::READ_WRITE)
+    {
+        let line = format!(
+            "persist.device_config.{}.{}={}\n",
+            parsed_flag.package(),
+            parsed_flag.name(),
+            match parsed_flag.state() {
+                ProtoFlagState::ENABLED => "true",
+                ProtoFlagState::DISABLED => "false",
+            }
+        );
+        output.extend_from_slice(line.as_bytes());
+    }
+    Ok(output)
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)]
@@ -110,150 +209,148 @@
     Text,
     Debug,
     Protobuf,
+    Textproto,
 }
 
-pub fn dump_cache(mut caches: Vec<Cache>, format: DumpFormat) -> Result<Vec<u8>> {
+pub fn dump_parsed_flags(mut input: Vec<Input>, format: DumpFormat) -> Result<Vec<u8>> {
+    let individually_parsed_flags: Result<Vec<ProtoParsedFlags>> =
+        input.iter_mut().map(|i| i.try_parse_flags()).collect();
+    let parsed_flags: ProtoParsedFlags =
+        crate::protos::parsed_flags::merge(individually_parsed_flags?)?;
+
     let mut output = Vec::new();
-    caches.sort_by_cached_key(|cache| cache.namespace().to_string());
-    for cache in caches.into_iter() {
-        match format {
-            DumpFormat::Text => {
-                let mut lines = vec![];
-                for item in cache.iter() {
-                    lines.push(format!(
-                        "{}/{}: {:?} {:?}\n",
-                        item.namespace, item.name, item.state, item.permission
-                    ));
-                }
-                output.append(&mut lines.concat().into());
+    match format {
+        DumpFormat::Text => {
+            for parsed_flag in parsed_flags.parsed_flag.into_iter() {
+                let line = format!(
+                    "{}/{}: {:?} {:?}\n",
+                    parsed_flag.package(),
+                    parsed_flag.name(),
+                    parsed_flag.state(),
+                    parsed_flag.permission()
+                );
+                output.extend_from_slice(line.as_bytes());
             }
-            DumpFormat::Debug => {
-                let mut lines = vec![];
-                for item in cache.iter() {
-                    lines.push(format!("{:#?}\n", item));
-                }
-                output.append(&mut lines.concat().into());
+        }
+        DumpFormat::Debug => {
+            for parsed_flag in parsed_flags.parsed_flag.into_iter() {
+                let line = format!("{:#?}\n", parsed_flag);
+                output.extend_from_slice(line.as_bytes());
             }
-            DumpFormat::Protobuf => {
-                let parsed_flags: ProtoParsedFlags = cache.into();
-                parsed_flags.write_to_vec(&mut output)?;
-            }
+        }
+        DumpFormat::Protobuf => {
+            parsed_flags.write_to_vec(&mut output)?;
+        }
+        DumpFormat::Textproto => {
+            let s = protobuf::text_format::print_to_string_pretty(&parsed_flags);
+            output.extend_from_slice(s.as_bytes());
         }
     }
     Ok(output)
 }
 
+fn find_unique_package(parsed_flags: &ProtoParsedFlags) -> Option<&str> {
+    let Some(package) = parsed_flags.parsed_flag.first().map(|pf| pf.package()) else {
+        return None;
+    };
+    if parsed_flags.parsed_flag.iter().any(|pf| pf.package() != package) {
+        return None;
+    }
+    Some(package)
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::aconfig::{FlagState, Permission};
 
-    fn create_test_cache_ns1() -> Cache {
-        let s = r#"
-        namespace: "ns1"
-        flag {
-            name: "a"
-            description: "Description of a"
-        }
-        flag {
-            name: "b"
-            description: "Description of b"
-        }
-        "#;
-        let declarations = vec![Input { source: Source::Memory, reader: Box::new(s.as_bytes()) }];
-        let o = r#"
-        flag_value {
-            namespace: "ns1"
-            name: "a"
-            state: DISABLED
-            permission: READ_ONLY
-        }
-        "#;
-        let values = vec![Input { source: Source::Memory, reader: Box::new(o.as_bytes()) }];
-        create_cache("ns1", declarations, values).unwrap()
-    }
+    #[test]
+    fn test_parse_flags() {
+        let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags
+        crate::protos::parsed_flags::verify_fields(&parsed_flags).unwrap();
 
-    fn create_test_cache_ns2() -> Cache {
-        let s = r#"
-        namespace: "ns2"
-        flag {
-            name: "c"
-            description: "Description of c"
+        let enabled_ro =
+            parsed_flags.parsed_flag.iter().find(|pf| pf.name() == "enabled_ro").unwrap();
+        assert!(crate::protos::parsed_flag::verify_fields(enabled_ro).is_ok());
+        assert_eq!("com.android.aconfig.test", enabled_ro.package());
+        assert_eq!("enabled_ro", enabled_ro.name());
+        assert_eq!("This flag is ENABLED + READ_ONLY", enabled_ro.description());
+        assert_eq!(ProtoFlagState::ENABLED, enabled_ro.state());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.permission());
+        assert_eq!(3, enabled_ro.trace.len());
+        assert_eq!("tests/test.aconfig", enabled_ro.trace[0].source());
+        assert_eq!(ProtoFlagState::DISABLED, enabled_ro.trace[0].state());
+        assert_eq!(ProtoFlagPermission::READ_WRITE, enabled_ro.trace[0].permission());
+        assert_eq!("tests/first.values", enabled_ro.trace[1].source());
+        assert_eq!(ProtoFlagState::DISABLED, enabled_ro.trace[1].state());
+        assert_eq!(ProtoFlagPermission::READ_WRITE, enabled_ro.trace[1].permission());
+        assert_eq!("tests/second.values", enabled_ro.trace[2].source());
+        assert_eq!(ProtoFlagState::ENABLED, enabled_ro.trace[2].state());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.trace[2].permission());
+
+        assert_eq!(4, parsed_flags.parsed_flag.len());
+        for pf in parsed_flags.parsed_flag.iter() {
+            let first = pf.trace.first().unwrap();
+            assert_eq!(DEFAULT_FLAG_STATE, first.state());
+            assert_eq!(DEFAULT_FLAG_PERMISSION, first.permission());
+
+            let last = pf.trace.last().unwrap();
+            assert_eq!(pf.state(), last.state());
+            assert_eq!(pf.permission(), last.permission());
         }
-        "#;
-        let declarations = vec![Input { source: Source::Memory, reader: Box::new(s.as_bytes()) }];
-        let o = r#"
-        flag_value {
-            namespace: "ns2"
-            name: "c"
-            state: DISABLED
-            permission: READ_ONLY
-        }
-        "#;
-        let values = vec![Input { source: Source::Memory, reader: Box::new(o.as_bytes()) }];
-        create_cache("ns2", declarations, values).unwrap()
     }
 
     #[test]
-    fn test_create_cache() {
-        let caches = create_test_cache_ns1(); // calls create_cache
-        let item = caches.iter().find(|&item| item.name == "a").unwrap();
-        assert_eq!(FlagState::Disabled, item.state);
-        assert_eq!(Permission::ReadOnly, item.permission);
+    fn test_create_device_config_defaults() {
+        let input = parse_test_flags_as_input();
+        let bytes = create_device_config_defaults(input).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert_eq!("aconfig_test:com.android.aconfig.test.disabled_rw=disabled\naconfig_test:com.android.aconfig.test.enabled_rw=enabled\n", text);
+    }
+
+    #[test]
+    fn test_create_device_config_sysprops() {
+        let input = parse_test_flags_as_input();
+        let bytes = create_device_config_sysprops(input).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert_eq!("persist.device_config.com.android.aconfig.test.disabled_rw=false\npersist.device_config.com.android.aconfig.test.enabled_rw=true\n", text);
     }
 
     #[test]
     fn test_dump_text_format() {
-        let caches = vec![create_test_cache_ns1()];
-        let bytes = dump_cache(caches, DumpFormat::Text).unwrap();
+        let input = parse_test_flags_as_input();
+        let bytes = dump_parsed_flags(vec![input], DumpFormat::Text).unwrap();
         let text = std::str::from_utf8(&bytes).unwrap();
-        assert!(text.contains("a: Disabled"));
+        assert!(text.contains("com.android.aconfig.test/disabled_ro: DISABLED READ_ONLY"));
     }
 
     #[test]
     fn test_dump_protobuf_format() {
-        use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoTracepoint};
-        use protobuf::Message;
+        let expected = protobuf::text_format::parse_from_str::<ProtoParsedFlags>(
+            crate::test::TEST_FLAGS_TEXTPROTO,
+        )
+        .unwrap()
+        .write_to_bytes()
+        .unwrap();
 
-        let caches = vec![create_test_cache_ns1()];
-        let bytes = dump_cache(caches, DumpFormat::Protobuf).unwrap();
-        let actual = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
+        let input = parse_test_flags_as_input();
+        let actual = dump_parsed_flags(vec![input], DumpFormat::Protobuf).unwrap();
 
-        assert_eq!(
-            vec!["a".to_string(), "b".to_string()],
-            actual.parsed_flag.iter().map(|item| item.name.clone().unwrap()).collect::<Vec<_>>()
-        );
-
-        let item =
-            actual.parsed_flag.iter().find(|item| item.name == Some("b".to_string())).unwrap();
-        assert_eq!(item.namespace(), "ns1");
-        assert_eq!(item.name(), "b");
-        assert_eq!(item.description(), "Description of b");
-        assert_eq!(item.state(), ProtoFlagState::DISABLED);
-        assert_eq!(item.permission(), ProtoFlagPermission::READ_WRITE);
-        let mut tp = ProtoTracepoint::new();
-        tp.set_source("<memory>".to_string());
-        tp.set_state(ProtoFlagState::DISABLED);
-        tp.set_permission(ProtoFlagPermission::READ_WRITE);
-        assert_eq!(item.trace, vec![tp]);
+        assert_eq!(expected, actual);
     }
 
     #[test]
-    fn test_dump_multiple_caches() {
-        let caches = vec![create_test_cache_ns1(), create_test_cache_ns2()];
-        let bytes = dump_cache(caches, DumpFormat::Protobuf).unwrap();
-        let dump = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
-        assert_eq!(
-            dump.parsed_flag
-                .iter()
-                .map(|parsed_flag| format!("{}/{}", parsed_flag.namespace(), parsed_flag.name()))
-                .collect::<Vec<_>>(),
-            vec!["ns1/a".to_string(), "ns1/b".to_string(), "ns2/c".to_string()]
-        );
+    fn test_dump_textproto_format() {
+        let input = parse_test_flags_as_input();
+        let bytes = dump_parsed_flags(vec![input], DumpFormat::Textproto).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert_eq!(crate::test::TEST_FLAGS_TEXTPROTO.trim(), text.trim());
+    }
 
-        let caches = vec![create_test_cache_ns2(), create_test_cache_ns1()];
-        let bytes = dump_cache(caches, DumpFormat::Protobuf).unwrap();
-        let dump_reversed_input = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
-        assert_eq!(dump, dump_reversed_input);
+    fn parse_test_flags_as_input() -> Input {
+        let parsed_flags = crate::test::parse_test_flags();
+        let binary_proto = parsed_flags.write_to_bytes().unwrap();
+        let cursor = std::io::Cursor::new(binary_proto);
+        let reader = Box::new(cursor);
+        Input { source: "test.data".to_string(), reader }
     }
 }
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
index 1d2ec95..72feb94 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/src/main.rs
@@ -16,7 +16,7 @@
 
 //! `aconfig` is a build time tool to manage build time configurations, such as feature flags.
 
-use anyhow::{anyhow, ensure, Result};
+use anyhow::{anyhow, bail, ensure, Result};
 use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command};
 use core::any::Any;
 use std::fs;
@@ -24,8 +24,6 @@
 use std::io::Write;
 use std::path::{Path, PathBuf};
 
-mod aconfig;
-mod cache;
 mod codegen;
 mod codegen_cpp;
 mod codegen_java;
@@ -33,15 +31,17 @@
 mod commands;
 mod protos;
 
-use crate::cache::Cache;
-use commands::{DumpFormat, Input, OutputFile, Source};
+#[cfg(test)]
+mod test;
+
+use commands::{CodegenMode, DumpFormat, Input, OutputFile};
 
 fn cli() -> Command {
     Command::new("aconfig")
         .subcommand_required(true)
         .subcommand(
             Command::new("create-cache")
-                .arg(Arg::new("namespace").long("namespace").required(true))
+                .arg(Arg::new("package").long("package").required(true))
                 .arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
                 .arg(Arg::new("values").long("values").action(ArgAction::Append))
                 .arg(Arg::new("cache").long("cache").required(true)),
@@ -49,12 +49,24 @@
         .subcommand(
             Command::new("create-java-lib")
                 .arg(Arg::new("cache").long("cache").required(true))
-                .arg(Arg::new("out").long("out").required(true)),
+                .arg(Arg::new("out").long("out").required(true))
+                .arg(
+                    Arg::new("mode")
+                        .long("mode")
+                        .value_parser(EnumValueParser::<commands::CodegenMode>::new())
+                        .default_value("production"),
+                ),
         )
         .subcommand(
             Command::new("create-cpp-lib")
                 .arg(Arg::new("cache").long("cache").required(true))
-                .arg(Arg::new("out").long("out").required(true)),
+                .arg(Arg::new("out").long("out").required(true))
+                .arg(
+                    Arg::new("mode")
+                        .long("mode")
+                        .value_parser(EnumValueParser::<commands::CodegenMode>::new())
+                        .default_value("production"),
+                ),
         )
         .subcommand(
             Command::new("create-rust-lib")
@@ -62,6 +74,16 @@
                 .arg(Arg::new("out").long("out").required(true)),
         )
         .subcommand(
+            Command::new("create-device-config-defaults")
+                .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+                .arg(Arg::new("out").long("out").default_value("-")),
+        )
+        .subcommand(
+            Command::new("create-device-config-sysprops")
+                .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+                .arg(Arg::new("out").long("out").default_value("-")),
+        )
+        .subcommand(
             Command::new("dump")
                 .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
                 .arg(
@@ -87,11 +109,19 @@
     let mut opened_files = vec![];
     for path in matches.get_many::<String>(arg_name).unwrap_or_default() {
         let file = Box::new(fs::File::open(path)?);
-        opened_files.push(Input { source: Source::File(path.to_string()), reader: file });
+        opened_files.push(Input { source: path.to_string(), reader: file });
     }
     Ok(opened_files)
 }
 
+fn open_single_file(matches: &ArgMatches, arg_name: &str) -> Result<Input> {
+    let Some(path) = matches.get_one::<String>(arg_name) else {
+        bail!("missing argument {}", arg_name);
+    };
+    let file = Box::new(fs::File::open(path)?);
+    Ok(Input { source: path.to_string(), reader: file })
+}
+
 fn write_output_file_realtive_to_dir(root: &Path, output_file: &OutputFile) -> Result<()> {
     ensure!(
         root.is_dir(),
@@ -108,58 +138,68 @@
     Ok(())
 }
 
+fn write_output_to_file_or_stdout(path: &str, data: &[u8]) -> Result<()> {
+    if path == "-" {
+        io::stdout().write_all(data)?;
+    } else {
+        fs::File::create(path)?.write_all(data)?;
+    }
+    Ok(())
+}
+
 fn main() -> Result<()> {
     let matches = cli().get_matches();
     match matches.subcommand() {
         Some(("create-cache", sub_matches)) => {
-            let namespace = get_required_arg::<String>(sub_matches, "namespace")?;
+            let package = get_required_arg::<String>(sub_matches, "package")?;
             let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
             let values = open_zero_or_more_files(sub_matches, "values")?;
-            let cache = commands::create_cache(namespace, declarations, values)?;
+            let output = commands::parse_flags(package, declarations, values)?;
             let path = get_required_arg::<String>(sub_matches, "cache")?;
-            let file = fs::File::create(path)?;
-            cache.write_to_writer(file)?;
+            write_output_to_file_or_stdout(path, &output)?;
         }
         Some(("create-java-lib", sub_matches)) => {
-            let path = get_required_arg::<String>(sub_matches, "cache")?;
-            let file = fs::File::open(path)?;
-            let cache = Cache::read_from_reader(file)?;
+            let cache = open_single_file(sub_matches, "cache")?;
+            let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?;
+            let generated_files = commands::create_java_lib(cache, *mode)?;
             let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
-            let generated_file = commands::create_java_lib(&cache)?;
-            write_output_file_realtive_to_dir(&dir, &generated_file)?;
+            generated_files
+                .iter()
+                .try_for_each(|file| write_output_file_realtive_to_dir(&dir, file))?;
         }
         Some(("create-cpp-lib", sub_matches)) => {
-            let path = get_required_arg::<String>(sub_matches, "cache")?;
-            let file = fs::File::open(path)?;
-            let cache = Cache::read_from_reader(file)?;
+            let cache = open_single_file(sub_matches, "cache")?;
+            let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?;
+            let generated_files = commands::create_cpp_lib(cache, *mode)?;
             let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
-            let generated_file = commands::create_cpp_lib(&cache)?;
-            write_output_file_realtive_to_dir(&dir, &generated_file)?;
+            generated_files
+                .iter()
+                .try_for_each(|file| write_output_file_realtive_to_dir(&dir, file))?;
         }
         Some(("create-rust-lib", sub_matches)) => {
-            let path = get_required_arg::<String>(sub_matches, "cache")?;
-            let file = fs::File::open(path)?;
-            let cache = Cache::read_from_reader(file)?;
+            let cache = open_single_file(sub_matches, "cache")?;
+            let generated_file = commands::create_rust_lib(cache)?;
             let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
-            let generated_file = commands::create_rust_lib(&cache)?;
             write_output_file_realtive_to_dir(&dir, &generated_file)?;
         }
-        Some(("dump", sub_matches)) => {
-            let mut caches = Vec::new();
-            for path in sub_matches.get_many::<String>("cache").unwrap_or_default() {
-                let file = fs::File::open(path)?;
-                let cache = Cache::read_from_reader(file)?;
-                caches.push(cache);
-            }
-            let format = get_required_arg::<DumpFormat>(sub_matches, "format")?;
-            let output = commands::dump_cache(caches, *format)?;
+        Some(("create-device-config-defaults", sub_matches)) => {
+            let cache = open_single_file(sub_matches, "cache")?;
+            let output = commands::create_device_config_defaults(cache)?;
             let path = get_required_arg::<String>(sub_matches, "out")?;
-            let mut file: Box<dyn Write> = if *path == "-" {
-                Box::new(io::stdout())
-            } else {
-                Box::new(fs::File::create(path)?)
-            };
-            file.write_all(&output)?;
+            write_output_to_file_or_stdout(path, &output)?;
+        }
+        Some(("create-device-config-sysprops", sub_matches)) => {
+            let cache = open_single_file(sub_matches, "cache")?;
+            let output = commands::create_device_config_sysprops(cache)?;
+            let path = get_required_arg::<String>(sub_matches, "out")?;
+            write_output_to_file_or_stdout(path, &output)?;
+        }
+        Some(("dump", sub_matches)) => {
+            let input = open_zero_or_more_files(sub_matches, "cache")?;
+            let format = get_required_arg::<DumpFormat>(sub_matches, "format")?;
+            let output = commands::dump_parsed_flags(input, *format)?;
+            let path = get_required_arg::<String>(sub_matches, "out")?;
+            write_output_to_file_or_stdout(path, &output)?;
         }
         _ => unreachable!(),
     }
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
index cb75692..a621b87 100644
--- a/tools/aconfig/src/protos.rs
+++ b/tools/aconfig/src/protos.rs
@@ -28,70 +28,743 @@
 
 // ---- When building with the Android tool-chain ----
 #[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Flag_values as ProtoFlagValues;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Parsed_flag as ProtoParsedFlag;
-
-#[cfg(not(feature = "cargo"))]
-pub use aconfig_protos::aconfig::Tracepoint as ProtoTracepoint;
+mod auto_generated {
+    pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
+    pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
+    pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
+    pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
+    pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
+    pub use aconfig_protos::aconfig::Flag_values as ProtoFlagValues;
+    pub use aconfig_protos::aconfig::Parsed_flag as ProtoParsedFlag;
+    pub use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
+    pub use aconfig_protos::aconfig::Tracepoint as ProtoTracepoint;
+}
 
 // ---- When building with cargo ----
 #[cfg(feature = "cargo")]
-include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Flag_declaration as ProtoFlagDeclaration;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Flag_declarations as ProtoFlagDeclarations;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Flag_value as ProtoFlagValue;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Flag_values as ProtoFlagValues;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Flag_permission as ProtoFlagPermission;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Flag_state as ProtoFlagState;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Parsed_flags as ProtoParsedFlags;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Parsed_flag as ProtoParsedFlag;
-
-#[cfg(feature = "cargo")]
-pub use aconfig::Tracepoint as ProtoTracepoint;
+mod auto_generated {
+    // include! statements should be avoided (because they import file contents verbatim), but
+    // because this is only used during local development, and only if using cargo instead of the
+    // Android tool-chain, we allow it
+    include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
+    pub use aconfig::Flag_declaration as ProtoFlagDeclaration;
+    pub use aconfig::Flag_declarations as ProtoFlagDeclarations;
+    pub use aconfig::Flag_permission as ProtoFlagPermission;
+    pub use aconfig::Flag_state as ProtoFlagState;
+    pub use aconfig::Flag_value as ProtoFlagValue;
+    pub use aconfig::Flag_values as ProtoFlagValues;
+    pub use aconfig::Parsed_flag as ProtoParsedFlag;
+    pub use aconfig::Parsed_flags as ProtoParsedFlags;
+    pub use aconfig::Tracepoint as ProtoTracepoint;
+}
 
 // ---- Common for both the Android tool-chain and cargo ----
-use anyhow::Result;
+pub use auto_generated::*;
 
-pub fn try_from_text_proto<T>(s: &str) -> Result<T>
+use anyhow::Result;
+use paste::paste;
+
+fn try_from_text_proto<T>(s: &str) -> Result<T>
 where
     T: protobuf::MessageFull,
 {
-    // warning: parse_from_str does not check if required fields are set
     protobuf::text_format::parse_from_str(s).map_err(|e| e.into())
 }
+
+macro_rules! ensure_required_fields {
+    ($type:expr, $struct:expr, $($field:expr),+) => {
+        $(
+        paste! {
+            ensure!($struct.[<has_ $field>](), "bad {}: missing {}", $type, $field);
+        }
+        )+
+    };
+}
+
+pub mod flag_declaration {
+    use super::*;
+    use crate::codegen;
+    use anyhow::ensure;
+
+    pub fn verify_fields(pdf: &ProtoFlagDeclaration) -> Result<()> {
+        ensure_required_fields!("flag declaration", pdf, "name", "namespace", "description");
+
+        ensure!(codegen::is_valid_name_ident(pdf.name()), "bad flag declaration: bad name");
+        ensure!(codegen::is_valid_name_ident(pdf.namespace()), "bad flag declaration: bad name");
+        ensure!(!pdf.description().is_empty(), "bad flag declaration: empty description");
+
+        // ProtoFlagDeclaration.bug: Vec<String>: may be empty, no checks needed
+
+        Ok(())
+    }
+}
+
+pub mod flag_declarations {
+    use super::*;
+    use crate::codegen;
+    use anyhow::ensure;
+
+    pub fn try_from_text_proto(s: &str) -> Result<ProtoFlagDeclarations> {
+        let pdf: ProtoFlagDeclarations = super::try_from_text_proto(s)?;
+        verify_fields(&pdf)?;
+        Ok(pdf)
+    }
+
+    pub fn verify_fields(pdf: &ProtoFlagDeclarations) -> Result<()> {
+        ensure_required_fields!("flag declarations", pdf, "package");
+
+        ensure!(
+            codegen::is_valid_package_ident(pdf.package()),
+            "bad flag declarations: bad package"
+        );
+        for flag_declaration in pdf.flag.iter() {
+            super::flag_declaration::verify_fields(flag_declaration)?;
+        }
+
+        Ok(())
+    }
+}
+
+pub mod flag_value {
+    use super::*;
+    use crate::codegen;
+    use anyhow::ensure;
+
+    pub fn verify_fields(fv: &ProtoFlagValue) -> Result<()> {
+        ensure_required_fields!("flag value", fv, "package", "name", "state", "permission");
+
+        ensure!(codegen::is_valid_package_ident(fv.package()), "bad flag value: bad package");
+        ensure!(codegen::is_valid_name_ident(fv.name()), "bad flag value: bad name");
+
+        Ok(())
+    }
+}
+
+pub mod flag_values {
+    use super::*;
+
+    pub fn try_from_text_proto(s: &str) -> Result<ProtoFlagValues> {
+        let pfv: ProtoFlagValues = super::try_from_text_proto(s)?;
+        verify_fields(&pfv)?;
+        Ok(pfv)
+    }
+
+    pub fn verify_fields(pfv: &ProtoFlagValues) -> Result<()> {
+        for flag_value in pfv.flag_value.iter() {
+            super::flag_value::verify_fields(flag_value)?;
+        }
+        Ok(())
+    }
+}
+
+pub mod tracepoint {
+    use super::*;
+    use anyhow::ensure;
+
+    pub fn verify_fields(tp: &ProtoTracepoint) -> Result<()> {
+        ensure_required_fields!("tracepoint", tp, "source", "state", "permission");
+
+        ensure!(!tp.source().is_empty(), "bad tracepoint: empty source");
+
+        Ok(())
+    }
+}
+
+pub mod parsed_flag {
+    use super::*;
+    use crate::codegen;
+    use anyhow::ensure;
+
+    pub fn verify_fields(pf: &ProtoParsedFlag) -> Result<()> {
+        ensure_required_fields!(
+            "parsed flag",
+            pf,
+            "package",
+            "name",
+            "namespace",
+            "description",
+            "state",
+            "permission"
+        );
+
+        ensure!(codegen::is_valid_package_ident(pf.package()), "bad parsed flag: bad package");
+        ensure!(codegen::is_valid_name_ident(pf.name()), "bad parsed flag: bad name");
+        ensure!(codegen::is_valid_name_ident(pf.namespace()), "bad parsed flag: bad namespace");
+        ensure!(!pf.description().is_empty(), "bad parsed flag: empty description");
+        ensure!(!pf.trace.is_empty(), "bad parsed flag: empty trace");
+        for tp in pf.trace.iter() {
+            super::tracepoint::verify_fields(tp)?;
+        }
+
+        // ProtoParsedFlag.bug: Vec<String>: may be empty, no checks needed
+
+        Ok(())
+    }
+
+    pub fn path_to_declaration(pf: &ProtoParsedFlag) -> &str {
+        debug_assert!(!pf.trace.is_empty());
+        pf.trace[0].source()
+    }
+}
+
+pub mod parsed_flags {
+    use super::*;
+    use anyhow::bail;
+    use std::cmp::Ordering;
+
+    pub fn try_from_binary_proto(bytes: &[u8]) -> Result<ProtoParsedFlags> {
+        let message: ProtoParsedFlags = protobuf::Message::parse_from_bytes(bytes)?;
+        verify_fields(&message)?;
+        Ok(message)
+    }
+
+    pub fn verify_fields(pf: &ProtoParsedFlags) -> Result<()> {
+        use crate::protos::parsed_flag::path_to_declaration;
+
+        let mut previous: Option<&ProtoParsedFlag> = None;
+        for parsed_flag in pf.parsed_flag.iter() {
+            if let Some(prev) = previous {
+                let a = create_sorting_key(prev);
+                let b = create_sorting_key(parsed_flag);
+                match a.cmp(&b) {
+                    Ordering::Less => {}
+                    Ordering::Equal => bail!(
+                        "bad parsed flags: duplicate flag {} (defined in {} and {})",
+                        a,
+                        path_to_declaration(prev),
+                        path_to_declaration(parsed_flag)
+                    ),
+                    Ordering::Greater => {
+                        bail!("bad parsed flags: not sorted: {} comes before {}", a, b)
+                    }
+                }
+            }
+            super::parsed_flag::verify_fields(parsed_flag)?;
+            previous = Some(parsed_flag);
+        }
+        Ok(())
+    }
+
+    pub fn merge(parsed_flags: Vec<ProtoParsedFlags>) -> Result<ProtoParsedFlags> {
+        let mut merged = ProtoParsedFlags::new();
+        for mut pfs in parsed_flags.into_iter() {
+            merged.parsed_flag.append(&mut pfs.parsed_flag);
+        }
+        merged.parsed_flag.sort_by_cached_key(create_sorting_key);
+        verify_fields(&merged)?;
+        Ok(merged)
+    }
+
+    fn create_sorting_key(pf: &ProtoParsedFlag) -> String {
+        format!("{}.{}", pf.package(), pf.name())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_flag_declarations_try_from_text_proto() {
+        // valid input
+        let flag_declarations = flag_declarations::try_from_text_proto(
+            r#"
+package: "com.foo.bar"
+flag {
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    bug: "123"
+    bug: "abc"
+}
+flag {
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+}
+"#,
+        )
+        .unwrap();
+        assert_eq!(flag_declarations.package(), "com.foo.bar");
+        let first = flag_declarations.flag.iter().find(|pf| pf.name() == "first").unwrap();
+        assert_eq!(first.name(), "first");
+        assert_eq!(first.namespace(), "first_ns");
+        assert_eq!(first.description(), "This is the description of the first flag.");
+        assert_eq!(first.bug.len(), 2);
+        assert_eq!(first.bug[0], "123");
+        assert_eq!(first.bug[1], "abc");
+        let second = flag_declarations.flag.iter().find(|pf| pf.name() == "second").unwrap();
+        assert_eq!(second.name(), "second");
+        assert_eq!(second.namespace(), "second_ns");
+        assert_eq!(second.description(), "This is the description of the second flag.");
+        assert_eq!(second.bug.len(), 0);
+
+        // bad input: missing package in flag declarations
+        let error = flag_declarations::try_from_text_proto(
+            r#"
+flag {
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+}
+flag {
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+}
+"#,
+        )
+        .unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad flag declarations: missing package");
+
+        // bad input: missing namespace in flag declaration
+        let error = flag_declarations::try_from_text_proto(
+            r#"
+package: "com.foo.bar"
+flag {
+    name: "first"
+    description: "This is the description of the first flag."
+}
+flag {
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+}
+"#,
+        )
+        .unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad flag declaration: missing namespace");
+
+        // bad input: bad package name in flag declarations
+        let error = flag_declarations::try_from_text_proto(
+            r#"
+package: "_com.FOO__BAR"
+flag {
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+}
+flag {
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+}
+"#,
+        )
+        .unwrap_err();
+        assert!(format!("{:?}", error).contains("bad flag declarations: bad package"));
+
+        // bad input: bad name in flag declaration
+        let error = flag_declarations::try_from_text_proto(
+            r#"
+package: "com.foo.bar"
+flag {
+    name: "FIRST"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+}
+flag {
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+}
+"#,
+        )
+        .unwrap_err();
+        assert!(format!("{:?}", error).contains("bad flag declaration: bad name"));
+    }
+
+    #[test]
+    fn test_flag_values_try_from_text_proto() {
+        // valid input
+        let flag_values = flag_values::try_from_text_proto(
+            r#"
+flag_value {
+    package: "com.first"
+    name: "first"
+    state: DISABLED
+    permission: READ_ONLY
+}
+flag_value {
+    package: "com.second"
+    name: "second"
+    state: ENABLED
+    permission: READ_WRITE
+}
+"#,
+        )
+        .unwrap();
+        let first = flag_values.flag_value.iter().find(|fv| fv.name() == "first").unwrap();
+        assert_eq!(first.package(), "com.first");
+        assert_eq!(first.name(), "first");
+        assert_eq!(first.state(), ProtoFlagState::DISABLED);
+        assert_eq!(first.permission(), ProtoFlagPermission::READ_ONLY);
+        let second = flag_values.flag_value.iter().find(|fv| fv.name() == "second").unwrap();
+        assert_eq!(second.package(), "com.second");
+        assert_eq!(second.name(), "second");
+        assert_eq!(second.state(), ProtoFlagState::ENABLED);
+        assert_eq!(second.permission(), ProtoFlagPermission::READ_WRITE);
+
+        // bad input: bad package in flag value
+        let error = flag_values::try_from_text_proto(
+            r#"
+flag_value {
+    package: "COM.FIRST"
+    name: "first"
+    state: DISABLED
+    permission: READ_ONLY
+}
+"#,
+        )
+        .unwrap_err();
+        assert!(format!("{:?}", error).contains("bad flag value: bad package"));
+
+        // bad input: bad name in flag value
+        let error = flag_values::try_from_text_proto(
+            r#"
+flag_value {
+    package: "com.first"
+    name: "FIRST"
+    state: DISABLED
+    permission: READ_ONLY
+}
+"#,
+        )
+        .unwrap_err();
+        assert!(format!("{:?}", error).contains("bad flag value: bad name"));
+
+        // bad input: missing state in flag value
+        let error = flag_values::try_from_text_proto(
+            r#"
+flag_value {
+    package: "com.first"
+    name: "first"
+    permission: READ_ONLY
+}
+"#,
+        )
+        .unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad flag value: missing state");
+
+        // bad input: missing permission in flag value
+        let error = flag_values::try_from_text_proto(
+            r#"
+flag_value {
+    package: "com.first"
+    name: "first"
+    state: DISABLED
+}
+"#,
+        )
+        .unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad flag value: missing permission");
+    }
+
+    fn try_from_binary_proto_from_text_proto(text_proto: &str) -> Result<ProtoParsedFlags> {
+        use protobuf::Message;
+
+        let parsed_flags: ProtoParsedFlags = try_from_text_proto(text_proto)?;
+        let mut binary_proto = Vec::new();
+        parsed_flags.write_to_vec(&mut binary_proto)?;
+        parsed_flags::try_from_binary_proto(&binary_proto)
+    }
+
+    #[test]
+    fn test_parsed_flags_try_from_text_proto() {
+        // valid input
+        let text_proto = r#"
+parsed_flag {
+    package: "com.first"
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+parsed_flag {
+    package: "com.second"
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+    state: ENABLED
+    permission: READ_WRITE
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+    trace {
+        source: "flags.values"
+        state: ENABLED
+        permission: READ_WRITE
+    }
+}
+"#;
+        let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+        assert_eq!(parsed_flags.parsed_flag.len(), 2);
+        let second = parsed_flags.parsed_flag.iter().find(|fv| fv.name() == "second").unwrap();
+        assert_eq!(second.package(), "com.second");
+        assert_eq!(second.name(), "second");
+        assert_eq!(second.namespace(), "second_ns");
+        assert_eq!(second.description(), "This is the description of the second flag.");
+        assert_eq!(second.state(), ProtoFlagState::ENABLED);
+        assert_eq!(second.permission(), ProtoFlagPermission::READ_WRITE);
+        assert_eq!(2, second.trace.len());
+        assert_eq!(second.trace[0].source(), "flags.declarations");
+        assert_eq!(second.trace[0].state(), ProtoFlagState::DISABLED);
+        assert_eq!(second.trace[0].permission(), ProtoFlagPermission::READ_ONLY);
+        assert_eq!(second.trace[1].source(), "flags.values");
+        assert_eq!(second.trace[1].state(), ProtoFlagState::ENABLED);
+        assert_eq!(second.trace[1].permission(), ProtoFlagPermission::READ_WRITE);
+
+        // valid input: empty
+        let parsed_flags = try_from_binary_proto_from_text_proto("").unwrap();
+        assert!(parsed_flags.parsed_flag.is_empty());
+
+        // bad input: empty trace
+        let text_proto = r#"
+parsed_flag {
+    package: "com.first"
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+}
+"#;
+        let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad parsed flag: empty trace");
+
+        // bad input: missing namespace in parsed_flag
+        let text_proto = r#"
+parsed_flag {
+    package: "com.first"
+    name: "first"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad parsed flag: missing namespace");
+
+        // bad input: parsed_flag not sorted by package
+        let text_proto = r#"
+parsed_flag {
+    package: "bbb.bbb"
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+parsed_flag {
+    package: "aaa.aaa"
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+    state: ENABLED
+    permission: READ_WRITE
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            "bad parsed flags: not sorted: bbb.bbb.first comes before aaa.aaa.second"
+        );
+
+        // bad input: parsed_flag not sorted by name
+        let text_proto = r#"
+parsed_flag {
+    package: "com.foo"
+    name: "bbb"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+parsed_flag {
+    package: "com.foo"
+    name: "aaa"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+    state: ENABLED
+    permission: READ_WRITE
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            "bad parsed flags: not sorted: com.foo.bbb comes before com.foo.aaa"
+        );
+
+        // bad input: duplicate flags
+        let text_proto = r#"
+parsed_flag {
+    package: "com.foo"
+    name: "bar"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+parsed_flag {
+    package: "com.foo"
+    name: "bar"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+    state: ENABLED
+    permission: READ_WRITE
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.foo.bar (defined in flags.declarations and flags.declarations)");
+    }
+
+    #[test]
+    fn test_parsed_flag_path_to_declaration() {
+        let text_proto = r#"
+parsed_flag {
+    package: "com.foo"
+    name: "bar"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+    trace {
+        source: "flags.values"
+        state: ENABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+        let parsed_flag = &parsed_flags.parsed_flag[0];
+        assert_eq!(
+            crate::protos::parsed_flag::path_to_declaration(parsed_flag),
+            "flags.declarations"
+        );
+    }
+
+    #[test]
+    fn test_parsed_flags_merge() {
+        let text_proto = r#"
+parsed_flag {
+    package: "com.first"
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+parsed_flag {
+    package: "com.second"
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+    state: ENABLED
+    permission: READ_WRITE
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let expected = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+
+        let text_proto = r#"
+parsed_flag {
+    package: "com.first"
+    name: "first"
+    namespace: "first_ns"
+    description: "This is the description of the first flag."
+    state: DISABLED
+    permission: READ_ONLY
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let first = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+
+        let text_proto = r#"
+parsed_flag {
+    package: "com.second"
+    name: "second"
+    namespace: "second_ns"
+    description: "This is the description of the second flag."
+    state: ENABLED
+    permission: READ_WRITE
+    trace {
+        source: "flags.declarations"
+        state: DISABLED
+        permission: READ_ONLY
+    }
+}
+"#;
+        let second = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+
+        // bad cases
+        let error = parsed_flags::merge(vec![first.clone(), first.clone()]).unwrap_err();
+        assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.first.first (defined in flags.declarations and flags.declarations)");
+
+        // valid cases
+        assert!(parsed_flags::merge(vec![]).unwrap().parsed_flag.is_empty());
+        assert_eq!(first, parsed_flags::merge(vec![first.clone()]).unwrap());
+        assert_eq!(expected, parsed_flags::merge(vec![first.clone(), second.clone()]).unwrap());
+        assert_eq!(expected, parsed_flags::merge(vec![second, first]).unwrap());
+    }
+}
diff --git a/tools/aconfig/src/test.rs b/tools/aconfig/src/test.rs
new file mode 100644
index 0000000..04bbe28
--- /dev/null
+++ b/tools/aconfig/src/test.rs
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#[cfg(test)]
+pub mod test_utils {
+    use crate::commands::Input;
+    use crate::protos::ProtoParsedFlags;
+    use itertools;
+
+    pub const TEST_PACKAGE: &str = "com.android.aconfig.test";
+
+    pub const TEST_FLAGS_TEXTPROTO: &str = r#"
+parsed_flag {
+  package: "com.android.aconfig.test"
+  name: "disabled_ro"
+  namespace: "aconfig_test"
+  description: "This flag is DISABLED + READ_ONLY"
+  bug: "123"
+  state: DISABLED
+  permission: READ_ONLY
+  trace {
+    source: "tests/test.aconfig"
+    state: DISABLED
+    permission: READ_WRITE
+  }
+  trace {
+    source: "tests/first.values"
+    state: DISABLED
+    permission: READ_ONLY
+  }
+}
+parsed_flag {
+  package: "com.android.aconfig.test"
+  name: "disabled_rw"
+  namespace: "aconfig_test"
+  description: "This flag is DISABLED + READ_WRITE"
+  bug: "456"
+  state: DISABLED
+  permission: READ_WRITE
+  trace {
+    source: "tests/test.aconfig"
+    state: DISABLED
+    permission: READ_WRITE
+  }
+}
+parsed_flag {
+  package: "com.android.aconfig.test"
+  name: "enabled_ro"
+  namespace: "aconfig_test"
+  description: "This flag is ENABLED + READ_ONLY"
+  bug: "789"
+  bug: "abc"
+  state: ENABLED
+  permission: READ_ONLY
+  trace {
+    source: "tests/test.aconfig"
+    state: DISABLED
+    permission: READ_WRITE
+  }
+  trace {
+    source: "tests/first.values"
+    state: DISABLED
+    permission: READ_WRITE
+  }
+  trace {
+    source: "tests/second.values"
+    state: ENABLED
+    permission: READ_ONLY
+  }
+}
+parsed_flag {
+  package: "com.android.aconfig.test"
+  name: "enabled_rw"
+  namespace: "aconfig_test"
+  description: "This flag is ENABLED + READ_WRITE"
+  state: ENABLED
+  permission: READ_WRITE
+  trace {
+    source: "tests/test.aconfig"
+    state: DISABLED
+    permission: READ_WRITE
+  }
+  trace {
+    source: "tests/first.values"
+    state: ENABLED
+    permission: READ_WRITE
+  }
+}
+"#;
+
+    pub fn parse_test_flags() -> ProtoParsedFlags {
+        let bytes = crate::commands::parse_flags(
+            "com.android.aconfig.test",
+            vec![Input {
+                source: "tests/test.aconfig".to_string(),
+                reader: Box::new(include_bytes!("../tests/test.aconfig").as_slice()),
+            }],
+            vec![
+                Input {
+                    source: "tests/first.values".to_string(),
+                    reader: Box::new(include_bytes!("../tests/first.values").as_slice()),
+                },
+                Input {
+                    source: "tests/second.values".to_string(),
+                    reader: Box::new(include_bytes!("../tests/second.values").as_slice()),
+                },
+            ],
+        )
+        .unwrap();
+        crate::protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
+    }
+
+    pub fn first_significant_code_diff(a: &str, b: &str) -> Option<String> {
+        let a = a.lines().map(|line| line.trim_start()).filter(|line| !line.is_empty());
+        let b = b.lines().map(|line| line.trim_start()).filter(|line| !line.is_empty());
+        match itertools::diff_with(a, b, |left, right| left == right) {
+            Some(itertools::Diff::FirstMismatch(_, mut left, mut right)) => {
+                Some(format!("'{}' vs '{}'", left.next().unwrap(), right.next().unwrap()))
+            }
+            Some(itertools::Diff::Shorter(_, mut left)) => {
+                Some(format!("LHS trailing data: '{}'", left.next().unwrap()))
+            }
+            Some(itertools::Diff::Longer(_, mut right)) => {
+                Some(format!("RHS trailing data: '{}'", right.next().unwrap()))
+            }
+            None => None,
+        }
+    }
+
+    #[test]
+    fn test_first_significant_code_diff() {
+        assert!(first_significant_code_diff("", "").is_none());
+        assert!(first_significant_code_diff("   a", "\n\na\n").is_none());
+        let a = r#"
+        public class A {
+            private static final String FOO = "FOO";
+            public static void main(String[] args) {
+                System.out.println("FOO=" + FOO);
+            }
+        }
+        "#;
+        let b = r#"
+        public class A {
+            private static final String FOO = "BAR";
+            public static void main(String[] args) {
+                System.out.println("foo=" + FOO);
+            }
+        }
+        "#;
+        assert_eq!(Some(r#"'private static final String FOO = "FOO";' vs 'private static final String FOO = "BAR";'"#.to_string()), first_significant_code_diff(a, b));
+        assert_eq!(
+            Some("LHS trailing data: 'b'".to_string()),
+            first_significant_code_diff("a\nb", "a")
+        );
+        assert_eq!(
+            Some("RHS trailing data: 'b'".to_string()),
+            first_significant_code_diff("a", "a\nb")
+        );
+    }
+}
+
+#[cfg(test)]
+pub use test_utils::*;
diff --git a/tools/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/templates/FeatureFlags.java.template
new file mode 100644
index 0000000..e0f201f
--- /dev/null
+++ b/tools/aconfig/templates/FeatureFlags.java.template
@@ -0,0 +1,7 @@
+package {package_name};
+
+public interface FeatureFlags \{
+{{ for item in class_elements}}
+    boolean {item.method_name}();
+{{ endfor }}
+}
diff --git a/tools/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/templates/FeatureFlagsImpl.java.template
new file mode 100644
index 0000000..082d476
--- /dev/null
+++ b/tools/aconfig/templates/FeatureFlagsImpl.java.template
@@ -0,0 +1,65 @@
+package {package_name};
+{{ -if is_test_mode }}
+import static java.util.stream.Collectors.toMap;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.Stream;
+{{ else}}
+{{ if is_read_write- }}
+import android.provider.DeviceConfig;
+{{ -endif- }}
+{{ endif }}
+public final class FeatureFlagsImpl implements FeatureFlags \{
+{{ for item in class_elements}}
+    @Override
+    public boolean {item.method_name}() \{
+        {{ -if not is_test_mode- }}
+        {{ if item.is_read_write }}
+        return DeviceConfig.getBoolean(
+            "{item.device_config_namespace}",
+            "{item.device_config_flag}",
+            {item.default_value}
+        );
+        {{ -else }}
+        return {item.default_value};
+        {{ -endif- }}
+        {{ else }}
+        return getFlag(Flags.FLAG_{item.flag_name_constant_suffix});
+        {{ -endif }}
+    }
+{{ endfor- }}
+{{ if is_test_mode }}
+    public void setFlag(String flagName, boolean value) \{
+        if (!this.mFlagMap.containsKey(flagName)) \{
+            throw new IllegalArgumentException("no such flag" + flagName);
+        }
+        this.mFlagMap.put(flagName, value);
+    }
+
+    public void resetAll() \{
+        for (Map.Entry entry : mFlagMap.entrySet()) \{
+            entry.setValue(null);
+        }
+    }
+
+    private boolean getFlag(String flagName) \{
+        Boolean value = this.mFlagMap.get(flagName);
+        if (value == null) \{
+            throw new IllegalArgumentException(flagName + " is not set");
+        }
+        return value;
+    }
+
+    private HashMap<String, Boolean> mFlagMap = Stream.of(
+            {{-for item in class_elements}}
+            Flags.FLAG_{item.flag_name_constant_suffix}{{ if not @last }},{{ endif }}
+            {{ -endfor }}
+        )
+        .collect(
+            HashMap::new,
+            (map, elem) -> map.put(elem, null),
+            HashMap::putAll
+        );
+{{ -endif }}
+}
diff --git a/tools/aconfig/templates/Flags.java.template b/tools/aconfig/templates/Flags.java.template
new file mode 100644
index 0000000..c244b15
--- /dev/null
+++ b/tools/aconfig/templates/Flags.java.template
@@ -0,0 +1,23 @@
+package {package_name};
+
+public final class Flags \{
+{{- for item in class_elements}}
+    public static final String FLAG_{item.flag_name_constant_suffix} = "{item.device_config_flag}";
+{{- endfor }}
+{{ for item in class_elements}}
+    public static boolean {item.method_name}() \{
+        return FEATURE_FLAGS.{item.method_name}();
+    }
+{{ endfor }}
+{{ -if is_test_mode }}
+    public static void setFeatureFlagsImpl(FeatureFlags featureFlags) \{
+        Flags.FEATURE_FLAGS = featureFlags;
+    }
+
+    public static void unsetFeatureFlagsImpl() \{
+        Flags.FEATURE_FLAGS = null;
+    }
+{{ endif}}
+    private static FeatureFlags FEATURE_FLAGS{{ -if not is_test_mode }} = new FeatureFlagsImpl(){{ -endif- }};
+
+}
diff --git a/tools/aconfig/templates/cpp.template b/tools/aconfig/templates/cpp.template
deleted file mode 100644
index ae8b59f..0000000
--- a/tools/aconfig/templates/cpp.template
+++ /dev/null
@@ -1,25 +0,0 @@
-#ifndef {namespace}_HEADER_H
-#define {namespace}_HEADER_H
-#include "{namespace}.h"
-{{ if readwrite }}
-#include <server_configurable_flags/get_flags.h>
-using namespace server_configurable_flags;
-{{ endif }}
-namespace {namespace} \{
-    {{ for item in class_elements}}
-    class {item.flag_name} \{
-        public:
-            virtual const bool value() \{
-                {{ if item.readwrite- }}
-                return GetServerConfigurableFlag(
-                    "{namespace}",
-                    "{item.flag_name}",
-                    "{item.default_value}") == "true";
-                {{ -else- }}
-                return {item.default_value};
-                {{ -endif }}
-            }
-    }
-    {{ endfor }}
-}
-#endif
diff --git a/tools/aconfig/templates/cpp_exported_header.template b/tools/aconfig/templates/cpp_exported_header.template
new file mode 100644
index 0000000..e244de3
--- /dev/null
+++ b/tools/aconfig/templates/cpp_exported_header.template
@@ -0,0 +1,48 @@
+#ifndef {header}_HEADER_H
+#define {header}_HEADER_H
+
+#include <string>
+#include <memory>
+{{ if readwrite }}
+#include <server_configurable_flags/get_flags.h>
+using namespace server_configurable_flags;
+{{ endif }}
+namespace {cpp_namespace} \{
+
+class flag_provider_interface \{
+public:
+    virtual ~flag_provider_interface() = default;
+    {{ for item in class_elements}}
+    virtual bool {item.flag_name}() = 0;
+    {{ endfor }}
+    virtual void override_flag(std::string const&, bool) \{}
+
+    virtual void reset_overrides() \{}
+};
+
+extern std::unique_ptr<flag_provider_interface> provider_;
+{{ for item in class_elements}}
+extern std::string const {item.uppercase_flag_name};{{ endfor }}
+{{ for item in class_elements}}
+inline bool {item.flag_name}() \{
+    {{ if for_prod }}
+    {{ if not item.readwrite- }}
+    return {item.default_value};
+    {{ -else- }}
+    return provider_->{item.flag_name}();
+    {{ -endif }}
+    {{ -else- }}
+    return provider_->{item.flag_name}();
+    {{ -endif }}
+}
+{{ endfor }}
+inline void override_flag(std::string const& name, bool val) \{
+    return provider_->override_flag(name, val);
+}
+
+inline void reset_overrides() \{
+    return provider_->reset_overrides();
+}
+
+}
+#endif
diff --git a/tools/aconfig/templates/cpp_prod_flag_provider.template b/tools/aconfig/templates/cpp_prod_flag_provider.template
new file mode 100644
index 0000000..c966ed4
--- /dev/null
+++ b/tools/aconfig/templates/cpp_prod_flag_provider.template
@@ -0,0 +1,22 @@
+#ifndef {header}_flag_provider_HEADER_H
+#define {header}_flag_provider_HEADER_H
+#include "{header}.h"
+
+namespace {cpp_namespace} \{
+class flag_provider : public flag_provider_interface \{
+public:
+    {{ for item in class_elements}}
+    virtual bool {item.flag_name}() override \{
+        {{ if item.readwrite- }}
+        return GetServerConfigurableFlag(
+            "{item.device_config_namespace}",
+            "{item.device_config_flag}",
+            "{item.default_value}") == "true";
+        {{ -else- }}
+            return {item.default_value};
+        {{ -endif }}
+    }
+    {{ endfor }}
+};
+}
+#endif
diff --git a/tools/aconfig/templates/cpp_source_file.template b/tools/aconfig/templates/cpp_source_file.template
new file mode 100644
index 0000000..1b4f336
--- /dev/null
+++ b/tools/aconfig/templates/cpp_source_file.template
@@ -0,0 +1,10 @@
+
+#include "{header}.h"
+#include "{header}_flag_provider.h"
+
+namespace {cpp_namespace} \{
+{{ for item in class_elements}}
+std::string const {item.uppercase_flag_name} = "{item.device_config_flag}";{{ endfor }}
+std::unique_ptr<flag_provider_interface> provider_ =
+    std::make_unique<flag_provider>();
+}
diff --git a/tools/aconfig/templates/cpp_test_flag_provider.template b/tools/aconfig/templates/cpp_test_flag_provider.template
new file mode 100644
index 0000000..bd597e7
--- /dev/null
+++ b/tools/aconfig/templates/cpp_test_flag_provider.template
@@ -0,0 +1,49 @@
+#ifndef {header}_flag_provider_HEADER_H
+#define {header}_flag_provider_HEADER_H
+#include "{header}.h"
+
+#include <unordered_map>
+#include <unordered_set>
+#include <cassert>
+
+namespace {cpp_namespace} \{
+class flag_provider : public flag_provider_interface \{
+private:
+    std::unordered_map<std::string, bool> overrides_;
+    std::unordered_set<std::string> flag_names_;
+
+public:
+    flag_provider()
+        : overrides_(),
+        flag_names_() \{
+        {{ for item in class_elements}}
+        flag_names_.insert({item.uppercase_flag_name});{{ endfor }}
+    }
+    {{ for item in class_elements}}
+    virtual bool {item.flag_name}() override \{
+        auto it = overrides_.find({item.uppercase_flag_name});
+	      if (it != overrides_.end()) \{
+	          return it->second;
+        } else \{
+          {{ if item.readwrite- }}
+          return GetServerConfigurableFlag(
+              "{item.device_config_namespace}",
+              "{item.device_config_flag}",
+              "{item.default_value}") == "true";
+          {{ -else- }}
+              return {item.default_value};
+          {{ -endif }}
+        }
+    }
+    {{ endfor }}
+    virtual void override_flag(std::string const& flag, bool val) override \{
+        assert(flag_names_.count(flag));
+        overrides_[flag] = val;
+    }
+
+    virtual void reset_overrides() override \{
+        overrides_.clear();
+    }
+};
+}
+#endif
diff --git a/tools/aconfig/templates/java.template b/tools/aconfig/templates/java.template
deleted file mode 100644
index 30c7ad7..0000000
--- a/tools/aconfig/templates/java.template
+++ /dev/null
@@ -1,19 +0,0 @@
-package aconfig.{namespace};
-{{ if readwrite }}
-import android.provider.DeviceConfig;
-{{ endif }}
-public final class Flags \{
-    {{ for item in class_elements}}
-    public static boolean {item.method_name}() \{
-        {{ if item.readwrite- }}
-        return DeviceConfig.getBoolean(
-            "{namespace}",
-            "{item.feature_name}__{item.flag_name}",
-            {item.default_value}
-        );
-        {{ -else- }}
-        return {item.default_value};
-        {{ -endif }}
-    }
-    {{ endfor }}
-}
diff --git a/tools/aconfig/templates/rust.template b/tools/aconfig/templates/rust.template
index d7f4e8d..960c494 100644
--- a/tools/aconfig/templates/rust.template
+++ b/tools/aconfig/templates/rust.template
@@ -1,23 +1,29 @@
-{{- for parsed_flag in parsed_flags -}}
-{{- if parsed_flag.is_read_only_disabled -}}
+{{- for mod in modules -}}
+pub mod {mod} \{
+{{ endfor -}}
+{{- for flag in template_flags -}}
+{{- if flag.is_read_only_disabled -}}
 #[inline(always)]
-pub const fn r#{parsed_flag.fn_name}() -> bool \{
+pub const fn r#{flag.name}() -> bool \{
     false
 }
 
 {{ endif -}}
-{{- if parsed_flag.is_read_only_enabled -}}
+{{- if flag.is_read_only_enabled -}}
 #[inline(always)]
-pub const fn r#{parsed_flag.fn_name}() -> bool \{
+pub const fn r#{flag.name}() -> bool \{
     true
 }
 
 {{ endif -}}
-{{- if parsed_flag.is_read_write -}}
+{{- if flag.is_read_write -}}
 #[inline(always)]
-pub fn r#{parsed_flag.fn_name}() -> bool \{
-    flags_rust::GetServerConfigurableFlag("{namespace}", "{parsed_flag.name}", "false") == "true"
+pub fn r#{flag.name}() -> bool \{
+    flags_rust::GetServerConfigurableFlag("{flag.device_config_namespace}", "{flag.device_config_flag}", "false") == "true"
 }
 
 {{ endif -}}
 {{- endfor -}}
+{{- for mod in modules -}}
+}
+{{ endfor -}}
diff --git a/tools/aconfig/tests/AconfigTest.java b/tools/aconfig/tests/AconfigTest.java
new file mode 100644
index 0000000..778a4c6
--- /dev/null
+++ b/tools/aconfig/tests/AconfigTest.java
@@ -0,0 +1,37 @@
+import static com.android.aconfig.test.Flags.disabledRo;
+import static com.android.aconfig.test.Flags.disabledRw;
+import static com.android.aconfig.test.Flags.enabledRo;
+import static com.android.aconfig.test.Flags.enabledRw;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public final class AconfigTest {
+    @Test
+    public void testDisabledReadOnlyFlag() {
+        assertFalse(disabledRo());
+    }
+
+    @Test
+    public void testEnabledReadOnlyFlag() {
+        // TODO: change to assertTrue(enabledRo()) when the build supports reading tests/*.values
+        // (currently all flags are assigned the default READ_ONLY + DISABLED)
+        assertFalse(enabledRo());
+    }
+
+    @Test
+    public void testDisabledReadWriteFlag() {
+        assertFalse(disabledRw());
+    }
+
+    @Test
+    public void testEnabledReadWriteFlag() {
+        // TODO: change to assertTrue(enabledRw()) when the build supports reading tests/*.values
+        // (currently all flags are assigned the default READ_ONLY + DISABLED)
+        assertFalse(enabledRw());
+    }
+}
diff --git a/tools/aconfig/tests/AndroidManifest.xml b/tools/aconfig/tests/AndroidManifest.xml
new file mode 100644
index 0000000..04002e6
--- /dev/null
+++ b/tools/aconfig/tests/AndroidManifest.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="aconfig.test.java">
+
+    <uses-permission android:name="android.permission.READ_DEVICE_CONFIG" />
+
+    <application>
+        <uses-library android:name="android.test.runner"/>
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+        android:targetPackage="aconfig.test.java"
+        android:label="aconfig integration tests (java)" />
+</manifest>
diff --git a/tools/aconfig/testdata/first.values b/tools/aconfig/tests/first.values
similarity index 67%
rename from tools/aconfig/testdata/first.values
rename to tools/aconfig/tests/first.values
index 3c49111..e524404 100644
--- a/tools/aconfig/testdata/first.values
+++ b/tools/aconfig/tests/first.values
@@ -1,17 +1,17 @@
 flag_value {
-    namespace: "test"
+    package: "com.android.aconfig.test"
     name: "disabled_ro"
     state: DISABLED
     permission: READ_ONLY
 }
 flag_value {
-    namespace: "test"
+    package: "com.android.aconfig.test"
     name: "enabled_ro"
     state: DISABLED
     permission: READ_WRITE
 }
 flag_value {
-    namespace: "test"
+    package: "com.android.aconfig.test"
     name: "enabled_rw"
     state: ENABLED
     permission: READ_WRITE
diff --git a/tools/aconfig/testdata/second.values b/tools/aconfig/tests/second.values
similarity index 67%
rename from tools/aconfig/testdata/second.values
rename to tools/aconfig/tests/second.values
index 3fe11ab..aa09cf6 100644
--- a/tools/aconfig/testdata/second.values
+++ b/tools/aconfig/tests/second.values
@@ -1,5 +1,5 @@
 flag_value {
-    namespace: "test"
+    package: "com.android.aconfig.test"
     name: "enabled_ro"
     state: ENABLED
     permission: READ_ONLY
diff --git a/tools/aconfig/testdata/test.aconfig b/tools/aconfig/tests/test.aconfig
similarity index 77%
rename from tools/aconfig/testdata/test.aconfig
rename to tools/aconfig/tests/test.aconfig
index 986a526..a8f6652 100644
--- a/tools/aconfig/testdata/test.aconfig
+++ b/tools/aconfig/tests/test.aconfig
@@ -1,18 +1,22 @@
-namespace: "test"
+package: "com.android.aconfig.test"
 
 # This flag's final value is calculated from:
 # - test.aconfig: DISABLED + READ_WRITE (default)
 # - first.values: DISABLED + READ_ONLY
 flag {
     name: "disabled_ro"
+    namespace: "aconfig_test"
     description: "This flag is DISABLED + READ_ONLY"
+    bug: "123"
 }
 
 # This flag's final value is calculated from:
 # - test.aconfig: DISABLED + READ_WRITE (default)
 flag {
     name: "disabled_rw"
+    namespace: "aconfig_test"
     description: "This flag is DISABLED + READ_WRITE"
+    bug: "456"
 }
 
 # This flag's final value is calculated from:
@@ -21,7 +25,10 @@
 # - second.values: ENABLED + READ_ONLY
 flag {
     name: "enabled_ro"
+    namespace: "aconfig_test"
     description: "This flag is ENABLED + READ_ONLY"
+    bug: "789"
+    bug: "abc"
 }
 
 # This flag's final value is calculated from:
@@ -29,5 +36,7 @@
 # - first.values: ENABLED + READ_WRITE
 flag {
     name: "enabled_rw"
+    namespace: "aconfig_test"
     description: "This flag is ENABLED + READ_WRITE"
+    # no bug field: bug is not mandatory
 }
diff --git a/tools/compliance/cmd/rtrace/rtrace.go b/tools/compliance/cmd/rtrace/rtrace.go
index 667cdce..3e7e69b 100644
--- a/tools/compliance/cmd/rtrace/rtrace.go
+++ b/tools/compliance/cmd/rtrace/rtrace.go
@@ -93,17 +93,17 @@
 	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
-Outputs a space-separated Target ActsOn Origin Condition tuple for each
-resolution in the graph. When -dot flag given, outputs nodes and edges
-in graphviz directed graph format.
+Calculates the source-sharing requirements in reverse starting at the
+-rtrace projects or metadata files that inherited source-sharing and
+working back to the targets where the source-sharing requirmements
+originate.
 
-If one or more '-c condition' conditions are given, outputs the
-resolution for the union of the conditions. Otherwise, outputs the
-resolution for all conditions.
+Outputs a space-separated pair where the first field is an originating
+target with one or more restricted conditions and where the second
+field is a colon-separated list of the restricted conditions.
 
-In plain text mode, when '-label_conditions' is requested, the Target
-and Origin have colon-separated license conditions appended:
-i.e. target:condition1:condition2 etc.
+Outputs a count of the originating targets, and if the count is zero,
+outputs a warning to check the -rtrace projects and/or filenames.
 
 Options:
 `, filepath.Base(os.Args[0]))
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
index f61289e..a53741f 100644
--- a/tools/compliance/cmd/sbom/sbom.go
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -35,7 +35,7 @@
 	"github.com/google/blueprint/deptools"
 
 	"github.com/spdx/tools-golang/builder/builder2v2"
-	"github.com/spdx/tools-golang/json"
+	spdx_json "github.com/spdx/tools-golang/json"
 	"github.com/spdx/tools-golang/spdx/common"
 	spdx "github.com/spdx/tools-golang/spdx/v2_2"
 	"github.com/spdx/tools-golang/spdxlib"
@@ -274,7 +274,7 @@
 	tn *compliance.TargetNode) (*projectmetadata.ProjectMetadata, error) {
 	pms, err := pmix.MetadataForProjects(tn.Projects()...)
 	if err != nil {
-		return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn, err)
+		return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn.Name(), err)
 	}
 	if len(pms) == 0 {
 		return nil, nil
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
index 8a62713..13ba66d 100644
--- a/tools/compliance/cmd/sbom/sbom_test.go
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -25,6 +25,7 @@
 	"time"
 
 	"android/soong/tools/compliance"
+
 	"github.com/spdx/tools-golang/builder/builder2v2"
 	"github.com/spdx/tools-golang/spdx/common"
 	spdx "github.com/spdx/tools-golang/spdx/v2_2"
@@ -2375,8 +2376,8 @@
 	if doc.DocumentName == "" {
 		return fmt.Errorf("DocumentName: got nothing, want Document Name")
 	}
-	if fmt.Sprintf("%v", doc.CreationInfo.Creators[1].Creator) != "Google LLC" {
-		return fmt.Errorf("Creator: got %v, want  'Google LLC'")
+	if c := fmt.Sprintf("%v", doc.CreationInfo.Creators[1].Creator); c != "Google LLC" {
+		return fmt.Errorf("Creator: got %v, want  'Google LLC'", c)
 	}
 	_, err := time.Parse(time.RFC3339, doc.CreationInfo.Created)
 	if err != nil {
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
index 088915a..1928189 100644
--- a/tools/compliance/go.mod
+++ b/tools/compliance/go.mod
@@ -7,8 +7,11 @@
 require (
 	android/soong v0.0.0
 	github.com/google/blueprint v0.0.0
+	github.com/spdx/tools-golang v0.0.0
 )
 
+replace github.com/spdx/tools-golang v0.0.0 => ../../../../external/spdx-tools
+
 require golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
 
 replace android/soong v0.0.0 => ../../../soong
diff --git a/tools/compliance/projectmetadata/projectmetadata.go b/tools/compliance/projectmetadata/projectmetadata.go
index b137a12..30a6325 100644
--- a/tools/compliance/projectmetadata/projectmetadata.go
+++ b/tools/compliance/projectmetadata/projectmetadata.go
@@ -63,12 +63,12 @@
 	return pm.project
 }
 
-// ProjectName returns the name of the project.
+// Name returns the name of the project.
 func (pm *ProjectMetadata) Name() string {
 	return pm.proto.GetName()
 }
 
-// ProjectVersion returns the version of the project if available.
+// Version returns the version of the project if available.
 func (pm *ProjectMetadata) Version() string {
 	tp := pm.proto.GetThirdParty()
 	if tp != nil {
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
index fa33986..6d13325 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -5,14 +5,14 @@
 function apply_droidstubs_hack() {
     if ! grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
         local build_soong_git_root="$(readlink -f $top/build/soong)"
-        git -C "$build_soong_git_root" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+        patch --strip=1 --no-backup-if-mismatch --directory="$build_soong_git_root" --input=../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
     fi
 }
 
 function apply_resources_sdk_int_fix() {
     if ! grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
         local base_git_root="$(readlink -f $top/frameworks/base)"
-        git -C "$base_git_root" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
+        patch --strip=1 --no-backup-if-mismatch --directory="$base_git_root" --input=../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
     fi
 }
 
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index 62e5ee5..cb7d1fc 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -4,19 +4,19 @@
 
 function revert_droidstubs_hack() {
     if grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
-        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
+        patch --strip=1 --no-backup-if-mismatch --directory="$top/build/soong" --input=../../build/make/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
     fi
 }
 
 function revert_resources_sdk_int_fix() {
     if grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
-        git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
+        patch --strip=1 --no-backup-if-mismatch --directory="$top/frameworks/base" --input=../../build/make/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
     fi
 }
 
 function apply_prerelease_sdk_hack() {
     if ! grep -q 'STOPSHIP: hack for the pre-release SDK' "$top/frameworks/base/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java" ; then
-        git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_hack.diff
+        patch --strip=1 --no-backup-if-mismatch --directory="$top/frameworks/base" --input=../../build/make/tools/finalization/frameworks_base.apply_hack.diff
     fi
 }
 
diff --git a/tools/find_static_candidates.py b/tools/find_static_candidates.py
new file mode 100644
index 0000000..7511b36
--- /dev/null
+++ b/tools/find_static_candidates.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python3
+
+"""Tool to find static libraries that maybe should be shared libraries and shared libraries that maybe should be static libraries.
+
+This tool only looks at the module-info.json for the current target.
+
+Example of "class" types for each of the modules in module-info.json
+  "EXECUTABLES": 2307,
+  "ETC": 9094,
+  "NATIVE_TESTS": 10461,
+  "APPS": 2885,
+  "JAVA_LIBRARIES": 5205,
+  "EXECUTABLES/JAVA_LIBRARIES": 119,
+  "FAKE": 553,
+  "SHARED_LIBRARIES/STATIC_LIBRARIES": 7591,
+  "STATIC_LIBRARIES": 11535,
+  "SHARED_LIBRARIES": 10852,
+  "HEADER_LIBRARIES": 1897,
+  "DYLIB_LIBRARIES": 1262,
+  "RLIB_LIBRARIES": 3413,
+  "ROBOLECTRIC": 39,
+  "PACKAGING": 5,
+  "PROC_MACRO_LIBRARIES": 36,
+  "RENDERSCRIPT_BITCODE": 17,
+  "DYLIB_LIBRARIES/RLIB_LIBRARIES": 8,
+  "ETC/FAKE": 1
+
+None of the "SHARED_LIBRARIES/STATIC_LIBRARIES" are double counted in the
+modules with one class
+RLIB/
+
+All of these classes have shared_libs and/or static_libs
+    "EXECUTABLES",
+    "SHARED_LIBRARIES",
+    "STATIC_LIBRARIES",
+    "SHARED_LIBRARIES/STATIC_LIBRARIES", # cc_library
+    "HEADER_LIBRARIES",
+    "NATIVE_TESTS", # test modules
+    "DYLIB_LIBRARIES", # rust
+    "RLIB_LIBRARIES", # rust
+    "ETC", # rust_bindgen
+"""
+
+from collections import defaultdict
+
+import json, os, argparse
+
+ANDROID_PRODUCT_OUT = os.environ.get("ANDROID_PRODUCT_OUT")
+# If a shared library is used less than MAX_SHARED_INCLUSIONS times in a target,
+# then it will likely save memory by changing it to a static library
+# This move will also use less storage
+MAX_SHARED_INCLUSIONS = 2
+# If a static library is used more than MAX_STATIC_INCLUSIONS times in a target,
+# then it will likely save memory by changing it to a shared library
+# This move will also likely use less storage
+MIN_STATIC_INCLUSIONS = 3
+
+
+def parse_args():
+  parser = argparse.ArgumentParser(
+      description=(
+          "Parse module-info.jso and display information about static and"
+          " shared library dependencies."
+      )
+  )
+  parser.add_argument(
+      "--module", dest="module", help="Print the info for the module."
+  )
+  parser.add_argument(
+      "--shared",
+      dest="print_shared",
+      action=argparse.BooleanOptionalAction,
+      help=(
+          "Print the list of libraries that are shared_libs for fewer than {}"
+          " modules.".format(MAX_SHARED_INCLUSIONS)
+      ),
+  )
+  parser.add_argument(
+      "--static",
+      dest="print_static",
+      action=argparse.BooleanOptionalAction,
+      help=(
+          "Print the list of libraries that are static_libs for more than {}"
+          " modules.".format(MIN_STATIC_INCLUSIONS)
+      ),
+  )
+  parser.add_argument(
+      "--recursive",
+      dest="recursive",
+      action=argparse.BooleanOptionalAction,
+      default=True,
+      help=(
+          "Gather all dependencies of EXECUTABLES recursvily before calculating"
+          " the stats. This eliminates duplicates from multiple libraries"
+          " including the same dependencies in a single binary."
+      ),
+  )
+  parser.add_argument(
+      "--both",
+      dest="both",
+      action=argparse.BooleanOptionalAction,
+      default=False,
+      help=(
+          "Print a list of libraries that are including libraries as both"
+          " static and shared"
+      ),
+  )
+  return parser.parse_args()
+
+
+class TransitiveHelper:
+
+  def __init__(self):
+    # keep a list of already expanded libraries so we don't end up in a cycle
+    self.visited = defaultdict(lambda: defaultdict(set))
+
+  # module is an object from the module-info dictionary
+  # module_info is the dictionary from module-info.json
+  # modify the module's shared_libs and static_libs with all of the transient
+  # dependencies required from all of the explicit dependencies
+  def flattenDeps(self, module, module_info):
+    libs_snapshot = dict(shared_libs = set(module["shared_libs"]), static_libs = set(module["static_libs"]))
+
+    for lib_class in ["shared_libs", "static_libs"]:
+      for lib in libs_snapshot[lib_class]:
+        if not lib or lib not in module_info:
+          continue
+        if lib in self.visited:
+          module[lib_class].update(self.visited[lib][lib_class])
+        else:
+          res = self.flattenDeps(module_info[lib], module_info)
+          module[lib_class].update(res[lib_class])
+          self.visited[lib][lib_class].update(res[lib_class])
+
+    return module
+
+def main():
+  module_info = json.load(open(ANDROID_PRODUCT_OUT + "/module-info.json"))
+  # turn all of the static_libs and shared_libs lists into sets to make them
+  # easier to update
+  for _, module in module_info.items():
+    module["shared_libs"] = set(module["shared_libs"])
+    module["static_libs"] = set(module["static_libs"])
+
+  args = parse_args()
+
+  if args.module:
+    if args.module not in module_info:
+      print("Module {} does not exist".format(args.module))
+      exit(1)
+
+  includedStatically = defaultdict(set)
+  includedSharedly = defaultdict(set)
+  includedBothly = defaultdict(set)
+  transitive = TransitiveHelper()
+  for name, module in module_info.items():
+    if args.recursive:
+      # in this recursive mode we only want to see what is included by the executables
+      if "EXECUTABLES" not in module["class"]:
+        continue
+      module = transitive.flattenDeps(module, module_info)
+      # filter out fuzzers by their dependency on clang
+      if "libclang_rt.fuzzer" in module["static_libs"]:
+        continue
+    else:
+      if "NATIVE_TESTS" in module["class"]:
+        # We don't care about how tests are including libraries
+        continue
+
+    # count all of the shared and static libs included in this module
+    for lib in module["shared_libs"]:
+      includedSharedly[lib].add(name)
+    for lib in module["static_libs"]:
+      includedStatically[lib].add(name)
+
+    intersection = set(module["shared_libs"]).intersection(
+        module["static_libs"]
+    )
+    if intersection:
+      includedBothly[name] = intersection
+
+  if args.print_shared:
+    print(
+        "Shared libraries that are included by fewer than {} modules on a"
+        " device:".format(MAX_SHARED_INCLUSIONS)
+    )
+    for name, libs in includedSharedly.items():
+      if len(libs) < MAX_SHARED_INCLUSIONS:
+        print("{}: {} included by: {}".format(name, len(libs), libs))
+
+  if args.print_static:
+    print(
+        "Libraries that are included statically by more than {} modules on a"
+        " device:".format(MIN_STATIC_INCLUSIONS)
+    )
+    for name, libs in includedStatically.items():
+      if len(libs) > MIN_STATIC_INCLUSIONS:
+        print("{}: {} included by: {}".format(name, len(libs), libs))
+
+  if args.both:
+    allIncludedBothly = set()
+    for name, libs in includedBothly.items():
+      allIncludedBothly.update(libs)
+
+    print(
+        "List of libraries used both statically and shared in the same"
+        " processes:\n {}\n\n".format("\n".join(sorted(allIncludedBothly)))
+    )
+    print(
+        "List of libraries used both statically and shared in any processes:\n {}".format("\n".join(sorted(includedStatically.keys() & includedSharedly.keys()))))
+
+  if args.module:
+    print(json.dumps(module_info[args.module], default=list, indent=2))
+    print(
+        "{} is included in shared_libs {} times by these modules: {}".format(
+            args.module, len(includedSharedly[args.module]),
+            includedSharedly[args.module]
+        )
+    )
+    print(
+        "{} is included in static_libs {} times by these modules: {}".format(
+            args.module, len(includedStatically[args.module]),
+            includedStatically[args.module]
+        )
+    )
+    print("Shared libs included by this module that are used in fewer than {} processes:\n{}".format(
+        MAX_SHARED_INCLUSIONS, [x for x in module_info[args.module]["shared_libs"] if len(includedSharedly[x]) < MAX_SHARED_INCLUSIONS]))
+
+
+
+if __name__ == "__main__":
+  main()
diff --git a/tools/list_files.py b/tools/list_files.py
index 3afa81f..4f666aa 100644
--- a/tools/list_files.py
+++ b/tools/list_files.py
@@ -18,6 +18,7 @@
 from glob import glob
 from pathlib import Path
 from os.path import join, relpath
+from itertools import chain
 import argparse
 
 class FileLister:
@@ -27,7 +28,8 @@
         self.folder_dir = args.dir
         self.extensions = [e if e.startswith(".") else "." + e for e in args.extensions]
         self.root = args.root
-        self.files_list = list()
+        self.files_list : List[str] = list()
+        self.classes = args.classes
 
     def get_files(self) -> None:
         """Get all files directory in the input directory including the files in the subdirectories
@@ -61,6 +63,26 @@
     def list(self) -> None:
         self.get_files()
         self.files_list = [f for f in self.files_list if not self.extensions or Path(f).suffix in self.extensions]
+
+        # If files_list is as below:
+        # A/B/C.java
+        # A/B/D.java
+        # A/B/E.txt
+        # --classes flag converts files_list in the following format:
+        # A/B/C.class
+        # A/B/C$*.class
+        # A/B/D.class
+        # A/B/D$*.class
+        # Additional `$*`-suffixed line is appended after each line
+        # to take multiple top level classes in a single java file into account.
+        # Note that non-java files in files_list are filtered out.
+        if self.classes:
+            self.files_list = list(chain.from_iterable([
+                (class_files := str(Path(ff).with_suffix(".class")),
+                 class_files.replace(".class", "$*.class"))
+                 for ff in self.files_list if ff.endswith(".java")
+            ]))
+
         self.write()
 
     def write(self) -> None:
@@ -95,6 +117,10 @@
                         help="optional directory to replace the root directories of output.")
     parser.add_argument('--extensions', nargs='*', default=list(), dest='extensions',
                         help="Extensions to include in the output. If not set, all files are included")
+    parser.add_argument('--classes', dest='classes', action=argparse.BooleanOptionalAction,
+                        help="Optional flag. If passed, outputs a list of pattern of class files \
+                                that will be produced by compiling java files in the input dir. \
+                                Non-java files in the input directory will be ignored.")
 
     args = parser.parse_args()
 
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index a0fb9e1..1d68d43 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -34,9 +34,10 @@
 	ExecutionModeMake ExecutionMode = iota
 )
 
+const allowExternalEntrypointKey = "allowExternalEntrypoint"
 const callerDirKey = "callerDir"
-const shellKey = "shell"
 const executionModeKey = "executionMode"
+const shellKey = "shell"
 
 type modentry struct {
 	globals starlark.StringDict
@@ -64,7 +65,7 @@
 
 // Takes a module name (the first argument to the load() function) and returns the path
 // it's trying to load, stripping out leading //, and handling leading :s.
-func cleanModuleName(moduleName string, callerDir string) (string, error) {
+func cleanModuleName(moduleName string, callerDir string, allowExternalPaths bool) (string, error) {
 	if strings.Count(moduleName, ":") > 1 {
 		return "", fmt.Errorf("at most 1 colon must be present in starlark path: %s", moduleName)
 	}
@@ -82,7 +83,7 @@
 	} else if strings.HasPrefix(moduleName, ":") {
 		moduleName = moduleName[1:]
 		localLoad = true
-	} else {
+	} else if !allowExternalPaths {
 		return "", fmt.Errorf("load path must start with // or :")
 	}
 
@@ -93,11 +94,13 @@
 	if filepath.Clean(moduleName) != moduleName {
 		return "", fmt.Errorf("load path must be clean, found: %s, expected: %s", moduleName, filepath.Clean(moduleName))
 	}
-	if strings.HasPrefix(moduleName, "../") {
-		return "", fmt.Errorf("load path must not start with ../: %s", moduleName)
-	}
-	if strings.HasPrefix(moduleName, "/") {
-		return "", fmt.Errorf("load path starts with /, use // for a absolute path: %s", moduleName)
+	if !allowExternalPaths {
+		if strings.HasPrefix(moduleName, "../") {
+			return "", fmt.Errorf("load path must not start with ../: %s", moduleName)
+		}
+		if strings.HasPrefix(moduleName, "/") {
+			return "", fmt.Errorf("load path starts with /, use // for a absolute path: %s", moduleName)
+		}
 	}
 
 	if localLoad {
@@ -114,17 +117,18 @@
 // bound to None if file is missing.
 func loader(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 	mode := thread.Local(executionModeKey).(ExecutionMode)
+	allowExternalEntrypoint := thread.Local(allowExternalEntrypointKey).(bool)
 	var defaultSymbol string
 	mustLoad := true
 	if mode == ExecutionModeRbc {
 		pipePos := strings.LastIndex(module, "|")
-		mustLoad = pipePos < 0
-		if !mustLoad {
+		if pipePos >= 0 {
+			mustLoad = false
 			defaultSymbol = module[pipePos+1:]
 			module = module[:pipePos]
 		}
 	}
-	modulePath, err := cleanModuleName(module, thread.Local(callerDirKey).(string))
+	modulePath, err := cleanModuleName(module, thread.Local(callerDirKey).(string), allowExternalEntrypoint)
 	if err != nil {
 		return nil, err
 	}
@@ -155,9 +159,11 @@
 				childThread.SetLocal(testReporterKey, v)
 			}
 
+			// Only the entrypoint starlark file allows external loads.
+			childThread.SetLocal(allowExternalEntrypointKey, false)
 			childThread.SetLocal(callerDirKey, filepath.Dir(modulePath))
-			childThread.SetLocal(shellKey, thread.Local(shellKey))
 			childThread.SetLocal(executionModeKey, mode)
+			childThread.SetLocal(shellKey, thread.Local(shellKey))
 			if mode == ExecutionModeRbc {
 				globals, err := starlark.ExecFile(childThread, modulePath, nil, rbcBuiltins)
 				e = &modentry{globals, err}
@@ -318,7 +324,7 @@
 // * src is an optional source of bytes to use instead of filename
 //   (it can be a string, or a byte array, or an io.Reader instance)
 // Returns the top-level starlark variables, the list of starlark files loaded, and an error
-func Run(filename string, src interface{}, mode ExecutionMode) (starlark.StringDict, []string, error) {
+func Run(filename string, src interface{}, mode ExecutionMode, allowExternalEntrypoint bool) (starlark.StringDict, []string, error) {
 	// NOTE(asmundak): OS-specific. Behave similar to Linux `system` call,
 	// which always uses /bin/sh to run the command
 	shellPath := "/bin/sh"
@@ -347,7 +353,7 @@
 		if err != nil {
 			return nil, nil, err
 		}
-		if strings.HasPrefix(filename, "../") {
+		if !allowExternalEntrypoint && strings.HasPrefix(filename, "../") {
 			return nil, nil, fmt.Errorf("path could not be made relative to workspace root: %s", filename)
 		}
 	} else {
@@ -358,9 +364,10 @@
 	moduleCache[filename] = nil
 
 	var results starlark.StringDict
+	mainThread.SetLocal(allowExternalEntrypointKey, allowExternalEntrypoint)
 	mainThread.SetLocal(callerDirKey, filepath.Dir(filename))
-	mainThread.SetLocal(shellKey, shellPath)
 	mainThread.SetLocal(executionModeKey, mode)
+	mainThread.SetLocal(shellKey, shellPath)
 	if mode == ExecutionModeRbc {
 		results, err = starlark.ExecFile(mainThread, filename, src, rbcBuiltins)
 	} else if mode == ExecutionModeMake {
diff --git a/tools/rbcrun/host_test.go b/tools/rbcrun/host_test.go
index 10cac62..10ce55e 100644
--- a/tools/rbcrun/host_test.go
+++ b/tools/rbcrun/host_test.go
@@ -125,6 +125,7 @@
 	if err := os.Chdir(filepath.Dir(dir)); err != nil {
 		t.Fatal(err)
 	}
+	thread.SetLocal(allowExternalEntrypointKey, false)
 	thread.SetLocal(callerDirKey, dir)
 	thread.SetLocal(executionModeKey, ExecutionModeRbc)
 	if _, err := starlark.ExecFile(thread, "testdata/load.star", nil, rbcBuiltins); err != nil {
diff --git a/tools/rbcrun/rbcrun/rbcrun.go b/tools/rbcrun/rbcrun/rbcrun.go
index b5182f0..a15b867 100644
--- a/tools/rbcrun/rbcrun/rbcrun.go
+++ b/tools/rbcrun/rbcrun/rbcrun.go
@@ -26,6 +26,7 @@
 )
 
 var (
+	allowExternalEntrypoint = flag.Bool("allow_external_entrypoint", false, "allow the entrypoint starlark file to be outside of the source tree")
 	modeFlag  = flag.String("mode", "", "the general behavior of rbcrun. Can be \"rbc\" or \"make\". Required.")
 	rootdir  = flag.String("d", ".", "the value of // for load paths")
 	perfFile = flag.String("perf", "", "save performance data")
@@ -159,7 +160,7 @@
 			quit("%s\n", err)
 		}
 	}
-	variables, loadedStarlarkFiles, err := rbcrun.Run(filename, nil, mode)
+	variables, loadedStarlarkFiles, err := rbcrun.Run(filename, nil, mode, *allowExternalEntrypoint)
 	rc := 0
 	if *perfFile != "" {
 		if err2 := starlark.StopProfile(); err2 != nil {
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index a76dc8a..7a2dcb7 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -99,9 +99,8 @@
         "releasetools_common",
     ],
     required: [
+        "apexd_host",
         "checkvintf",
-        "deapexer",
-        "dump_apex_info",
     ],
 }
 
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 59c712e..bfc87b8 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -65,8 +65,6 @@
         OPTIONS.search_path, "bin", "debugfs_static")
     self.fsckerofs_path = os.path.join(
         OPTIONS.search_path, "bin", "fsck.erofs")
-    self.blkid_path = os.path.join(
-        OPTIONS.search_path, "bin", "blkid_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
 
@@ -129,15 +127,10 @@
           "Couldn't find location of fsck.erofs: " +
           "Path {} does not exist. ".format(self.fsckerofs_path) +
           "Make sure bin/fsck.erofs can be found in -p <path>")
-    if not os.path.exists(self.blkid_path):
-      raise ApexSigningError(
-          "Couldn't find location of blkid: " +
-          "Path {} does not exist. ".format(self.blkid_path) +
-          "Make sure bin/blkid can be found in -p <path>")
     payload_dir = common.MakeTempDir()
     extract_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
                    '--fsckerofs_path', self.fsckerofs_path,
-                   '--blkid_path', self.blkid_path, 'extract',
+                   'extract',
                    self.apex_path, payload_dir]
     common.RunAndCheckOutput(extract_cmd)
     assert os.path.exists(self.apex_path)
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index d935607..a7b3523 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -241,7 +241,8 @@
     # Signer (minSdkVersion=24, maxSdkVersion=32) certificate SHA-1 digest: 19da94896ce4078c38ca695701f1dec741ec6d67
     # ...
     certs_info = {}
-    certificate_regex = re.compile(r"(Signer (?:#[0-9]+|\(.*\))) (certificate .*):(.*)")
+    certificate_regex = re.compile(
+        r"(Signer (?:#[0-9]+|\(.*\))) (certificate .*):(.*)")
     for line in output.splitlines():
       m = certificate_regex.match(line)
       if not m:
@@ -312,7 +313,7 @@
     # This is the list of wildcards of files we extract from |filename|.
     apk_extensions = ['*.apk', '*.apex']
 
-    with zipfile.ZipFile(filename) as input_zip:
+    with zipfile.ZipFile(filename, "r") as input_zip:
       self.certmap, compressed_extension = common.ReadApkCerts(input_zip)
     if compressed_extension:
       apk_extensions.append('*.apk' + compressed_extension)
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 5b71c72..33624f5 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -129,8 +129,9 @@
 
   dirmap = GetDirmap(input_tmp)
 
-  # Simulate apexd from target-files.
-  dirmap['/apex'] = PrepareApexDirectory(input_tmp)
+  # Simulate apexd with target-files.
+  # add a mapping('/apex' => ${input_tmp}/APEX) to dirmap
+  PrepareApexDirectory(input_tmp, dirmap)
 
   args_for_skus = GetArgsForSkus(info_dict)
   shipping_api_level_args = GetArgsForShippingApiLevel(info_dict)
@@ -204,7 +205,8 @@
 
   return patterns
 
-def PrepareApexDirectory(inp):
+
+def PrepareApexDirectory(inp, dirmap):
   """ Prepare /apex directory before running checkvintf
 
   Apex binaries do not support dirmaps, in order to use these binaries we
@@ -212,96 +214,25 @@
   expected device locations.
 
   This simulates how apexd activates APEXes.
-  1. create {inp}/APEX which is treated as a "/" on device.
-  2. copy apexes from target-files to {root}/{partition}/apex.
-  3. mount apexes under {root}/{partition}/apex at {root}/apex.
-  4. generate info files with dump_apex_info.
-
-  We'll get the following layout
-       {inp}/APEX/apex             # Activated APEXes + some info files
-       {inp}/APEX/system/apex      # System APEXes
-       {inp}/APEX/vendor/apex      # Vendor APEXes
-       ...
-
-  Args:
-    inp: path to the directory that contains the extracted target files archive.
-
-  Returns:
-    directory representing /apex on device
+  1. create {inp}/APEX which is treated as a "/apex" on device.
+  2. invoke apexd_host with vendor APEXes.
   """
 
-  deapexer = 'deapexer'
-  debugfs_path = 'debugfs'
-  blkid_path = 'blkid'
-  fsckerofs_path = 'fsck.erofs'
-  if OPTIONS.search_path:
-    debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
-    deapexer_path = os.path.join(OPTIONS.search_path, 'bin', 'deapexer')
-    blkid_path = os.path.join(OPTIONS.search_path, 'bin', 'blkid_static')
-    fsckerofs_path = os.path.join(OPTIONS.search_path, 'bin', 'fsck.erofs')
-    if os.path.isfile(deapexer_path):
-      deapexer = deapexer_path
-
-  def ExtractApexes(path, outp):
-    # Extract all APEXes found in input path.
-    logger.info('Extracting APEXs in %s', path)
-    for f in os.listdir(path):
-      logger.info('  adding APEX %s', os.path.basename(f))
-      apex = os.path.join(path, f)
-      if os.path.isdir(apex) and os.path.isfile(os.path.join(apex, 'apex_manifest.pb')):
-        info = ParseApexManifest(os.path.join(apex, 'apex_manifest.pb'))
-        # Flattened APEXes may have symlinks for libs (linked to /system/lib)
-        # We need to blindly copy them all.
-        shutil.copytree(apex, os.path.join(outp, info.name), symlinks=True)
-      elif os.path.isfile(apex) and apex.endswith(('.apex', '.capex')):
-        cmd = [deapexer,
-               '--debugfs_path', debugfs_path,
-               'info',
-               apex]
-        info = json.loads(common.RunAndCheckOutput(cmd))
-
-        cmd = [deapexer,
-               '--debugfs_path', debugfs_path,
-               '--fsckerofs_path', fsckerofs_path,
-               '--blkid_path', blkid_path,
-               'extract',
-               apex,
-               os.path.join(outp, info['name'])]
-        common.RunAndCheckOutput(cmd)
-      else:
-        logger.info('  .. skipping %s (is it APEX?)', path)
-
-  root_dir_name = 'APEX'
-  root_dir = os.path.join(inp, root_dir_name)
-  extracted_root = os.path.join(root_dir, 'apex')
+  apex_dir = os.path.join(inp, 'APEX')
+  # checkvintf needs /apex dirmap
+  dirmap['/apex'] = apex_dir
 
   # Always create /apex directory for dirmap
-  os.makedirs(extracted_root)
+  os.makedirs(apex_dir)
 
-  create_info_file = False
+  # Invoke apexd_host to activate vendor APEXes for checkvintf
+  apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host')
+  cmd = [apex_host, '--tool_path', OPTIONS.search_path]
+  cmd += ['--apex_path', dirmap['/apex']]
+  if '/vendor' in dirmap:
+      cmd += ['--vendor_path', dirmap['/vendor']]
+  common.RunAndCheckOutput(cmd)
 
-  # Loop through search path looking for and processing apex/ directories.
-  for device_path, target_files_rel_paths in DIR_SEARCH_PATHS.items():
-    # checkvintf only needs vendor apexes. skip other partitions for efficiency
-    if device_path not in ['/vendor', '/odm']:
-      continue
-    # First, copy VENDOR/apex/foo.apex to APEX/vendor/apex/foo.apex
-    # Then, extract the contents to APEX/apex/foo/
-    for target_files_rel_path in target_files_rel_paths:
-      inp_partition = os.path.join(inp, target_files_rel_path,"apex")
-      if os.path.exists(inp_partition):
-        apex_dir = root_dir + os.path.join(device_path + "/apex");
-        os.makedirs(root_dir + device_path)
-        shutil.copytree(inp_partition, apex_dir, symlinks=True)
-        ExtractApexes(apex_dir, extracted_root)
-        create_info_file = True
-
-  if create_info_file:
-    ### Dump apex info files
-    dump_cmd = ['dump_apex_info', '--root_dir', root_dir]
-    common.RunAndCheckOutput(dump_cmd)
-
-  return extracted_root
 
 def CheckVintfFromTargetFiles(inp, info_dict=None):
   """
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index f92d67c..091121f 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -35,6 +35,7 @@
 import shutil
 import subprocess
 import sys
+import stat
 import tempfile
 import threading
 import time
@@ -1423,9 +1424,10 @@
   def ResolveBinaryPath(path):
     if os.path.exists(path):
       return path
-    new_path = os.path.join(OPTIONS.search_path, path)
-    if os.path.exists(new_path):
-      return new_path
+    if OPTIONS.search_path:
+      new_path = os.path.join(OPTIONS.search_path, path)
+      if os.path.exists(new_path):
+        return new_path
     raise ExternalError(
         "Failed to find {}".format(new_path))
 
@@ -2114,6 +2116,26 @@
     shutil.copyfileobj(in_file, out_file)
 
 
+def UnzipSingleFile(input_zip: zipfile.ZipFile, info: zipfile.ZipInfo, dirname: str):
+  # According to https://stackoverflow.com/questions/434641/how-do-i-set-permissions-attributes-on-a-file-in-a-zip-file-using-pythons-zip/6297838#6297838
+  # higher bits of |external_attr| are unix file permission and types
+  unix_filetype = info.external_attr >> 16
+
+  def CheckMask(a, mask):
+    return (a & mask) == mask
+
+  def IsSymlink(a):
+    return CheckMask(a, stat.S_IFLNK)
+  # python3.11 zipfile implementation doesn't handle symlink correctly
+  if not IsSymlink(unix_filetype):
+    return input_zip.extract(info, dirname)
+  if dirname is None:
+    dirname = os.getcwd()
+  target = os.path.join(dirname, info.filename)
+  os.makedirs(os.path.dirname(target), exist_ok=True)
+  os.symlink(input_zip.read(info).decode(), target)
+
+
 def UnzipToDir(filename, dirname, patterns=None):
   """Unzips the archive to the given directory.
 
@@ -2126,17 +2148,44 @@
   """
   with zipfile.ZipFile(filename, allowZip64=True, mode="r") as input_zip:
     # Filter out non-matching patterns. unzip will complain otherwise.
+    entries = input_zip.infolist()
+    # b/283033491
+    # Per https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header
+    # In zip64 mode, central directory record's header_offset field might be
+    # set to 0xFFFFFFFF if header offset is > 2^32. In this case, the extra
+    # fields will contain an 8 byte little endian integer at offset 20
+    # to indicate the actual local header offset.
+    # As of python3.11, python does not handle zip64 central directories
+    # correctly, so we will manually do the parsing here.
+
+    # ZIP64 central directory extra field has two required fields:
+    # 2 bytes header ID and 2 bytes size field. Thes two require fields have
+    # a total size of 4 bytes. Then it has three other 8 bytes field, followed
+    # by a 4 byte disk number field. The last disk number field is not required
+    # to be present, but if it is present, the total size of extra field will be
+    # divisible by 8(because 2+2+4+8*n is always going to be multiple of 8)
+    # Most extra fields are optional, but when they appear, their must appear
+    # in the order defined by zip64 spec. Since file header offset is the 2nd
+    # to last field in zip64 spec, it will only be at last 8 bytes or last 12-4
+    # bytes, depending on whether disk number is present.
+    for entry in entries:
+      if entry.header_offset == 0xFFFFFFFF:
+        if len(entry.extra) % 8 == 0:
+          entry.header_offset = int.from_bytes(entry.extra[-12:-4], "little")
+        else:
+          entry.header_offset = int.from_bytes(entry.extra[-8:], "little")
     if patterns is not None:
-      names = input_zip.namelist()
-      filtered = [name for name in names if any(
-          [fnmatch.fnmatch(name, p) for p in patterns])]
+      filtered = [info for info in entries if any(
+          [fnmatch.fnmatch(info.filename, p) for p in patterns])]
 
       # There isn't any matching files. Don't unzip anything.
       if not filtered:
         return
-      input_zip.extractall(dirname, filtered)
+      for info in filtered:
+        UnzipSingleFile(input_zip, info, dirname)
     else:
-      input_zip.extractall(dirname)
+      for info in entries:
+        UnzipSingleFile(input_zip, info, dirname)
 
 
 def UnzipTemp(filename, patterns=None):
@@ -2402,12 +2451,22 @@
   try:
     return int(version)
   except ValueError:
-    # Not a decimal number. Codename?
-    if version in codename_to_api_level_map:
-      return codename_to_api_level_map[version]
+    # Not a decimal number.
+    #
+    # It could be either a straight codename, e.g.
+    #     UpsideDownCake
+    #
+    # Or a codename with API fingerprint SHA, e.g.
+    #     UpsideDownCake.e7d3947f14eb9dc4fec25ff6c5f8563e
+    #
+    # Extract the codename and try and map it to a version number.
+    split = version.split(".")
+    codename = split[0]
+    if codename in codename_to_api_level_map:
+      return codename_to_api_level_map[codename]
     raise ExternalError(
-        "Unknown minSdkVersion: '{}'. Known codenames: {}".format(
-            version, codename_to_api_level_map))
+        "Unknown codename: '{}' from minSdkVersion: '{}'. Known codenames: {}".format(
+            codename, version, codename_to_api_level_map))
 
 
 def SignFile(input_name, output_name, key, password, min_api_level=None,
diff --git a/tools/releasetools/merge/Android.bp b/tools/releasetools/merge/Android.bp
index 219acf8..96ec73e 100644
--- a/tools/releasetools/merge/Android.bp
+++ b/tools/releasetools/merge/Android.bp
@@ -50,6 +50,7 @@
         "releasetools_ota_from_target_files",
     ],
     required: [
+        "apexd_host",
         "checkvintf",
         "host_init_verifier",
         "secilc",
diff --git a/tools/releasetools/merge/merge_dexopt.py b/tools/releasetools/merge/merge_dexopt.py
index 16182b5..1c0c743 100644
--- a/tools/releasetools/merge/merge_dexopt.py
+++ b/tools/releasetools/merge/merge_dexopt.py
@@ -72,7 +72,6 @@
   #         <contents of vendor dexpreopt_config.zip>
   #     system -> output/SYSTEM
   #     vendor -> output/VENDOR
-  #     apex -> output/SYSTEM/apex (only for flattened APEX builds)
   #     apex/ (extracted updatable APEX)
   #         <apex 1>/
   #             ...
@@ -114,70 +113,20 @@
       os.path.join(output_target_files_dir, 'VENDOR'),
       os.path.join(temp_dir, 'vendor'))
 
-  # The directory structure for flatteded APEXes is:
-  #
-  # SYSTEM
-  #     apex
-  #         <APEX name, e.g., com.android.wifi>
-  #             apex_manifest.pb
-  #             apex_pubkey
-  #             etc/
-  #             javalib/
-  #             lib/
-  #             lib64/
-  #             priv-app/
-  #
-  # The directory structure for updatable APEXes is:
-  #
-  # SYSTEM
-  #     apex
-  #         com.android.adbd.apex
-  #         com.android.appsearch.apex
-  #         com.android.art.apex
-  #         ...
-  apex_root = os.path.join(output_target_files_dir, 'SYSTEM', 'apex')
+  # Extract APEX.
+  logging.info('extracting APEX')
+  apex_extract_root_dir = os.path.join(temp_dir, 'apex')
+  os.makedirs(apex_extract_root_dir)
 
-  # Check for flattended versus updatable APEX.
-  if OPTIONS.framework_misc_info.get('target_flatten_apex') == 'false':
-    # Extract APEX.
-    logging.info('extracting APEX')
-
-    apex_extract_root_dir = os.path.join(temp_dir, 'apex')
-    os.makedirs(apex_extract_root_dir)
-
-    for apex in (glob.glob(os.path.join(apex_root, '*.apex')) +
-                 glob.glob(os.path.join(apex_root, '*.capex'))):
-      logging.info('  apex: %s', apex)
-      # deapexer is in the same directory as the merge_target_files binary extracted
-      # from otatools.zip.
-      apex_json_info = subprocess.check_output(['deapexer', 'info', apex])
-      logging.info('    info: %s', apex_json_info)
-      apex_info = json.loads(apex_json_info)
-      apex_name = apex_info['name']
-      logging.info('    name: %s', apex_name)
-
-      apex_extract_dir = os.path.join(apex_extract_root_dir, apex_name)
-      os.makedirs(apex_extract_dir)
-
-      # deapexer uses debugfs_static, which is part of otatools.zip.
-      command = [
-          'deapexer',
-          '--debugfs_path',
-          'debugfs_static',
-          '--blkid_path',
-          'blkid',
-          '--fsckerofs_path',
-          'fsck.erofs',
-          'extract',
-          apex,
-          apex_extract_dir,
-      ]
-      logging.info('    running %s', command)
-      subprocess.check_call(command)
-  else:
-    # Flattened APEXes don't need to be extracted since they have the necessary
-    # directory structure.
-    os.symlink(os.path.join(apex_root), os.path.join(temp_dir, 'apex'))
+  command = [
+      'apexd_host',
+      '--system_path',
+      os.path.join(temp_dir, 'system'),
+      '--apex_path',
+      apex_extract_root_dir,
+  ]
+  logging.info('    running %s', command)
+  subprocess.check_call(command)
 
   # Modify system config to point to the tools that have been extracted.
   # Absolute or .. paths are not allowed  by the dexpreopt_gen tool in
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index afbe81a..f3e6f1e 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -633,14 +633,17 @@
         return True
     return False
 
-  postinstall_config = common.ReadFromInputFile(input_file, POSTINSTALL_CONFIG)
-  postinstall_config = [
-      line for line in postinstall_config.splitlines() if IsInPartialList(line)]
-  if postinstall_config:
-    postinstall_config = "\n".join(postinstall_config)
-    common.WriteToInputFile(input_file, POSTINSTALL_CONFIG, postinstall_config)
-  else:
-    os.unlink(os.path.join(input_file, POSTINSTALL_CONFIG))
+  if common.DoesInputFileContain(input_file, POSTINSTALL_CONFIG):
+    postinstall_config = common.ReadFromInputFile(
+        input_file, POSTINSTALL_CONFIG)
+    postinstall_config = [
+        line for line in postinstall_config.splitlines() if IsInPartialList(line)]
+    if postinstall_config:
+      postinstall_config = "\n".join(postinstall_config)
+      common.WriteToInputFile(
+          input_file, POSTINSTALL_CONFIG, postinstall_config)
+    else:
+      os.unlink(os.path.join(input_file, POSTINSTALL_CONFIG))
 
   return input_file
 
@@ -1063,6 +1066,8 @@
       # ZIP_STORED.
       common.ZipWriteStr(output_zip, care_map_name, care_map_data,
                          compress_type=zipfile.ZIP_STORED)
+      # break here to avoid going into else when care map has been handled
+      break
     else:
       logger.warning("Cannot find care map file in target_file package")
 
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 63a863e..68c6887 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -762,6 +762,9 @@
   path = os.path.join(target_files_dir, "RADIO", partition + ".map")
   if os.path.exists(path):
     return path
+  path = os.path.join(target_files_dir, "IMAGES", partition + ".map")
+  if os.path.exists(path):
+    return path
   return ""
 
 
@@ -846,6 +849,11 @@
     if os.path.exists(dynamic_partition_info):
       cmd.extend(["--dynamic_partition_info_file", dynamic_partition_info])
 
+    apex_info = os.path.join(
+      target_dir, "META", "apex_info.pb")
+    if os.path.exists(apex_info):
+      cmd.extend(["--apex_info_file", apex_info])
+
     major_version, minor_version = ParseUpdateEngineConfig(
         os.path.join(target_dir, "META", "update_engine_config.txt"))
     if source_file:
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 2dfd8c7..86fb480 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1642,6 +1642,7 @@
         'gki_signing_algorithm': 'SHA256_RSA4096',
         'gki_signing_signature_args': '--prop foo:bar',
     }
+    common.OPTIONS.search_path = None
     test_file = tempfile.NamedTemporaryFile()
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
                       test_file.name, 'generic_kernel')
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
index 2415f7e..b19be87 100755
--- a/tools/sbom/generate-sbom.py
+++ b/tools/sbom/generate-sbom.py
@@ -332,14 +332,6 @@
   return external_doc_ref, packages, relationships
 
 
-def generate_package_verification_code(files):
-  checksums = [file.checksum for file in files]
-  checksums.sort()
-  h = hashlib.sha1()
-  h.update(''.join(checksums).encode(encoding='utf-8'))
-  return h.hexdigest()
-
-
 def save_report(report_file_path, report):
   with open(report_file_path, 'w', encoding='utf-8') as report_file:
     for type, issues in report.items():
@@ -487,20 +479,32 @@
       product_copy_files = installed_file_metadata['product_copy_files']
       kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
       build_output_path = installed_file_metadata['build_output_path']
+      is_static_lib = installed_file_metadata['is_static_lib']
 
       if not installed_file_has_metadata(installed_file_metadata, report):
         continue
-      if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+      if not is_static_lib and not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+        # Ignore non-existing static library files for now since they are not shipped on devices.
         report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
         continue
 
       file_id = new_file_id(installed_file)
-      doc.files.append(
-        sbom_data.File(id=file_id, name=installed_file, checksum=checksum(build_output_path)))
-      if not args.unbundled_apex:
-        product_package.file_ids.append(file_id)
-      elif len(doc.files) > 1:
-          doc.add_relationship(sbom_data.Relationship(doc.files[0].id, sbom_data.RelationshipType.CONTAINS, file_id))
+      # TODO(b/285453664): Soong should report the information of statically linked libraries to Make.
+      # This happens when a different sanitized version of static libraries is used in linking.
+      # As a workaround, use the following SHA1 checksum for static libraries created by Soong, if .a files could not be
+      # located correctly because Soong doesn't report the information to Make.
+      sha1 = 'SHA1: da39a3ee5e6b4b0d3255bfef95601890afd80709'  # SHA1 of empty string
+      if os.path.islink(build_output_path) or os.path.isfile(build_output_path):
+        sha1 = checksum(build_output_path)
+      doc.files.append(sbom_data.File(id=file_id,
+                                      name=installed_file,
+                                      checksum=sha1))
+
+      if not is_static_lib:
+        if not args.unbundled_apex:
+          product_package.file_ids.append(file_id)
+        elif len(doc.files) > 1:
+            doc.add_relationship(sbom_data.Relationship(doc.files[0].id, sbom_data.RelationshipType.CONTAINS, file_id))
 
       if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
         metadata_file_path = get_metadata_file_path(installed_file_metadata)
@@ -544,13 +548,21 @@
                                                     relationship=sbom_data.RelationshipType.GENERATED_FROM,
                                                     id2=sbom_data.SPDXID_PLATFORM))
 
-  if not args.unbundled_apex:
-    product_package.verification_code = generate_package_verification_code(doc.files)
+      # Process static libraries and whole static libraries the installed file links to
+      static_libs = installed_file_metadata['static_libraries']
+      whole_static_libs = installed_file_metadata['whole_static_libraries']
+      all_static_libs = (static_libs + ' ' + whole_static_libs).strip()
+      if all_static_libs:
+        for lib in all_static_libs.split(' '):
+          doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                      relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                      id2=new_file_id(lib + '.a')))
 
   if args.unbundled_apex:
     doc.describes = doc.files[0].id
 
   # Save SBOM records to output file
+  doc.generate_packages_verification_code()
   doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
   prefix = args.output_file
   if prefix.endswith('.spdx'):
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
index 14c4eb2..ea38e36 100644
--- a/tools/sbom/sbom_data.py
+++ b/tools/sbom/sbom_data.py
@@ -25,6 +25,7 @@
 
 from dataclasses import dataclass, field
 from typing import List
+import hashlib
 
 SPDXID_DOC = 'SPDXRef-DOCUMENT'
 SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
@@ -81,6 +82,7 @@
   VARIANT_OF = 'VARIANT_OF'
   GENERATED_FROM = 'GENERATED_FROM'
   CONTAINS = 'CONTAINS'
+  STATIC_LINK = 'STATIC_LINK'
 
 
 @dataclass
@@ -122,3 +124,17 @@
     if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
                for r in self.relationships):
       self.relationships.append(rel)
+
+  def generate_packages_verification_code(self):
+    for package in self.packages:
+      if not package.file_ids:
+        continue
+
+      checksums = []
+      for file in self.files:
+        if file.id in package.file_ids:
+          checksums.append(file.checksum)
+      checksums.sort()
+      h = hashlib.sha1()
+      h.update(''.join(checksums).encode(encoding='utf-8'))
+      package.verification_code = h.hexdigest()
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
index 85dee9d..1cb864d 100644
--- a/tools/sbom/sbom_writers.py
+++ b/tools/sbom/sbom_writers.py
@@ -85,7 +85,7 @@
     return headers
 
   @staticmethod
-  def marshal_package(package):
+  def marshal_package(sbom_doc, package, fragment):
     download_location = sbom_data.VALUE_NOASSERTION
     if package.download_location:
       download_location = package.download_location
@@ -107,50 +107,32 @@
           f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
 
     tagvalues.append('')
+
+    if package.id == sbom_doc.describes and not fragment:
+      tagvalues.append(
+          f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+      tagvalues.append('')
+
+    for file in sbom_doc.files:
+      if file.id in package.file_ids:
+        tagvalues += TagValueWriter.marshal_file(file)
+
     return tagvalues
 
   @staticmethod
-  def marshal_described_element(sbom_doc, fragment):
-    if not sbom_doc.describes:
-      return None
-
-    product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
-    if product_package:
-      tagvalues = TagValueWriter.marshal_package(product_package[0])
-      if not fragment:
-        tagvalues.append(
-            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
-
-      tagvalues.append('')
-      return tagvalues
-
-    file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
-    if file:
-      tagvalues = TagValueWriter.marshal_file(file[0])
-      if not fragment:
-        tagvalues.append(
-            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
-
-      return tagvalues
-
-    return None
-
-  @staticmethod
-  def marshal_packages(sbom_doc):
+  def marshal_packages(sbom_doc, fragment):
     tagvalues = []
     marshaled_relationships = []
     i = 0
     packages = sbom_doc.packages
     while i < len(packages):
-      if packages[i].id == sbom_doc.describes:
-        i += 1
-        continue
-
-      if i + 1 < len(packages) \
-          and packages[i].id.startswith('SPDXRef-SOURCE-') \
-          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
-        tagvalues += TagValueWriter.marshal_package(packages[i])
-        tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+      if (i + 1 < len(packages)
+          and packages[i].id.startswith('SPDXRef-SOURCE-')
+          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-')):
+        # Output SOURCE, UPSTREAM packages and their VARIANT_OF relationship together, so they are close to each other
+        # in SBOMs in tagvalue format.
+        tagvalues += TagValueWriter.marshal_package(sbom_doc, packages[i], fragment)
+        tagvalues += TagValueWriter.marshal_package(sbom_doc, packages[i + 1], fragment)
         rel = next((r for r in sbom_doc.relationships if
                     r.id1 == packages[i].id and
                     r.id2 == packages[i + 1].id and
@@ -162,7 +144,7 @@
 
         i += 2
       else:
-        tagvalues += TagValueWriter.marshal_package(packages[i])
+        tagvalues += TagValueWriter.marshal_package(sbom_doc, packages[i], fragment)
         i += 1
 
     return tagvalues, marshaled_relationships
@@ -179,12 +161,20 @@
     return tagvalues
 
   @staticmethod
-  def marshal_files(sbom_doc):
+  def marshal_files(sbom_doc, fragment):
     tagvalues = []
+    files_in_packages = []
+    for package in sbom_doc.packages:
+      files_in_packages += package.file_ids
     for file in sbom_doc.files:
-      if file.id == sbom_doc.describes:
+      if file.id in files_in_packages:
         continue
       tagvalues += TagValueWriter.marshal_file(file)
+      if file.id == sbom_doc.describes and not fragment:
+        # Fragment is not a full SBOM document so the relationship DESCRIBES is not applicable.
+        tagvalues.append(
+            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+        tagvalues.append('')
     return tagvalues
 
   @staticmethod
@@ -208,11 +198,8 @@
     content = []
     if not fragment:
       content += TagValueWriter.marshal_doc_headers(sbom_doc)
-    described_element = TagValueWriter.marshal_described_element(sbom_doc, fragment)
-    if described_element:
-      content += described_element
-    content += TagValueWriter.marshal_files(sbom_doc)
-    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+    content += TagValueWriter.marshal_files(sbom_doc, fragment)
+    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc, fragment)
     content += tagvalues
     content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
     file.write('\n'.join(content))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
index 361dae6..cf85e01 100644
--- a/tools/sbom/sbom_writers_test.py
+++ b/tools/sbom/sbom_writers_test.py
@@ -31,6 +31,7 @@
 SPDXID_FILE1 = 'SPDXRef-file1'
 SPDXID_FILE2 = 'SPDXRef-file2'
 SPDXID_FILE3 = 'SPDXRef-file3'
+SPDXID_FILE4 = 'SPDXRef-file4'
 
 
 class SBOMWritersTest(unittest.TestCase):
@@ -101,6 +102,8 @@
       sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
     self.sbom_doc.files.append(
       sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE4, name='file4.a', checksum='SHA1: 44444'))
 
     self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
                                                           relationship=sbom_data.RelationshipType.GENERATED_FROM,
@@ -112,6 +115,10 @@
                                                           relationship=sbom_data.RelationshipType.GENERATED_FROM,
                                                           id2=SPDXID_SOURCE_PACKAGE1
                                                           ))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                          id2=SPDXID_FILE4
+                                                          ))
 
     # SBOM fragment of a APK
     self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
@@ -139,6 +146,14 @@
       self.maxDiff = None
       self.assertEqual(expected_output, output.getvalue())
 
+  def test_tagvalue_writer_doc_describes_file(self):
+    with io.StringIO() as output:
+      self.sbom_doc.describes = SPDXID_FILE4
+      sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_doc_describes_file.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
   def test_tagvalue_writer_unbundled(self):
     with io.StringIO() as output:
       sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
index 32715a5..53936c5 100644
--- a/tools/sbom/testdata/expected_json_sbom.spdx.json
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -110,6 +110,16 @@
                     "checksumValue": "33333"
                 }
             ]
+        },
+        {
+            "fileName": "file4.a",
+            "SPDXID": "SPDXRef-file4",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "44444"
+                }
+            ]
         }
     ],
     "relationships": [
@@ -129,6 +139,11 @@
             "relationshipType": "GENERATED_FROM"
         },
         {
+            "spdxElementId": "SPDXRef-file1",
+            "relatedSpdxElement": "SPDXRef-file4",
+            "relationshipType": "STATIC_LINK"
+        },
+        {
             "spdxElementId": "SPDXRef-SOURCE-package1",
             "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
             "relationshipType": "VARIANT_OF"
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
index ee39e82..e6fd17e 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -7,6 +7,10 @@
 Created: 2023-03-31T22:17:58Z
 ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
 
+FileName: file4.a
+SPDXID: SPDXRef-file4
+FileChecksum: SHA1: 44444
+
 PackageName: PRODUCT
 SPDXID: SPDXRef-PRODUCT
 PackageDownloadLocation: NONE
@@ -63,3 +67,4 @@
 Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
 Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
 Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
+Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
new file mode 100644
index 0000000..428d7e3
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
@@ -0,0 +1,70 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+FileName: file4.a
+SPDXID: SPDXRef-file4
+FileChecksum: SHA1: 44444
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-file4
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NOASSERTION
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
+Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4