Merge "Adding fastboot-info to target-files"
diff --git a/core/Makefile b/core/Makefile
index 1d5bb66..622174f 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -3288,8 +3288,8 @@
 
 endif # BUILDING_SYSTEM_IMAGE
 
-.PHONY: sync syncsys
-sync syncsys: $(INTERNAL_SYSTEMIMAGE_FILES)
+.PHONY: sync syncsys sync_system
+sync syncsys sync_system: $(INTERNAL_SYSTEMIMAGE_FILES)
 
 # -----------------------------------------------------------------
 # Old PDK fusion targets
@@ -3617,7 +3617,8 @@
 vendorimage-nodeps vnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-vendorimage-target)
 
-sync: $(INTERNAL_VENDORIMAGE_FILES)
+.PHONY: sync_vendor
+sync sync_vendor: $(INTERNAL_VENDORIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_VENDORIMAGE
 INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
@@ -3681,7 +3682,8 @@
 productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-productimage-target)
 
-sync: $(INTERNAL_PRODUCTIMAGE_FILES)
+.PHONY: sync_product
+sync sync_product: $(INTERNAL_PRODUCTIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_PRODUCTIMAGE
 INSTALLED_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
@@ -3743,7 +3745,8 @@
 systemextimage-nodeps senod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-system_extimage-target)
 
-sync: $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+.PHONY: sync_system_ext
+sync sync_system_ext: $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
 INSTALLED_SYSTEM_EXTIMAGE_TARGET := $(PRODUCT_OUT)/system_ext.img
@@ -3824,7 +3827,8 @@
 odmimage-nodeps onod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-odmimage-target)
 
-sync: $(INTERNAL_ODMIMAGE_FILES)
+.PHONY: sync_odm
+sync sync_odm: $(INTERNAL_ODMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_ODMIMAGE
 INSTALLED_ODMIMAGE_TARGET := $(PRODUCT_OUT)/odm.img
@@ -3885,7 +3889,8 @@
 vendor_dlkmimage-nodeps vdnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-vendor_dlkmimage-target)
 
-sync: $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+.PHONY: sync_vendor_dlkm
+sync sync_vendor_dlkm: $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
 INSTALLED_VENDOR_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/vendor_dlkm.img
@@ -3946,7 +3951,8 @@
 odm_dlkmimage-nodeps odnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-odm_dlkmimage-target)
 
-sync: $(INTERNAL_ODM_DLKMIMAGE_FILES)
+.PHONY: sync_odm_dlkm
+sync sync_odm_dlkm: $(INTERNAL_ODM_DLKMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
 INSTALLED_ODM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/odm_dlkm.img
@@ -4009,7 +4015,8 @@
 system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-system_dlkmimage-target)
 
-sync: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+.PHONY: sync_system_dlkm
+sync sync_system_dlkm: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
 INSTALLED_SYSTEM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/system_dlkm.img
@@ -5273,7 +5280,7 @@
 
 # -----------------------------------------------------------------
 #  fastboot-info.txt
-FASTBOOT_INFO_VERSION = 1.0
+FASTBOOT_INFO_VERSION = 1
 
 INSTALLED_FASTBOOT_INFO_TARGET := $(PRODUCT_OUT)/fastboot-info.txt
 
@@ -5765,6 +5772,8 @@
     echo "virtual_ab_compression_method=$(PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD)" >> $(1))
   $(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)), \
     echo "virtual_ab_retrofit=true" >> $(1))
+  $(if $(PRODUCT_VIRTUAL_AB_COW_VERSION), \
+    echo "virtual_ab_cow_version=$(PRODUCT_VIRTUAL_AB_COW_VERSION)" >> $(1))
 endef
 
 # By conditionally including the dependency of the target files package on the
@@ -6393,11 +6402,10 @@
 PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$(dir $(ZIP2ZIP)):$$PATH \
     $(OTA_FROM_TARGET_FILES) \
         --verbose \
-        --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
         --path $(HOST_OUT) \
         $(if $(OEM_OTA_CONFIG), --oem_settings $(OEM_OTA_CONFIG)) \
         $(2) \
-        $(BUILT_TARGET_FILES_PACKAGE) $(1)
+        $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) $(1)
 endef
 
 product_name := $(TARGET_PRODUCT)
@@ -6413,7 +6421,7 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 $(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_DIR) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
 
@@ -6449,7 +6457,7 @@
 
 INTERNAL_OTA_PARTIAL_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
-$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_DIR) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package partial OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --partial "$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)")
 
@@ -6772,7 +6780,7 @@
 # For real devices and for dist builds, build super image from target files to an intermediate directory.
 INTERNAL_SUPERIMAGE_DIST_TARGET := $(call intermediates-dir-for,PACKAGING,super.img)/super.img
 $(INTERNAL_SUPERIMAGE_DIST_TARGET): extracted_input_target_files := $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE))
-$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_PACKAGE) $(BUILD_SUPER_IMAGE)
+$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_DIR) $(BUILD_SUPER_IMAGE)
 	$(call pretty,"Target super fs image from target files: $@")
 	PATH=$(dir $(LPMAKE)):$$PATH \
 	    $(BUILD_SUPER_IMAGE) -v $(extracted_input_target_files) $@
diff --git a/core/board_config.mk b/core/board_config.mk
index fae7aaa..7969b25 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -256,7 +256,7 @@
   endif
 
   $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_board_config_results.mk \
-    $(OUT_DIR)/rbcrun RBC_OUT="make" $(OUT_DIR)/rbc/boardlauncher.rbc)
+    $(OUT_DIR)/rbcrun $(OUT_DIR)/rbc/boardlauncher.rbc)
   ifneq ($(.SHELLSTATUS),0)
     $(error board configuration runner failed: $(.SHELLSTATUS))
   endif
diff --git a/core/config.mk b/core/config.mk
index 4300800..396aad0 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -546,8 +546,10 @@
 
 TARGET_BUILD_USE_PREBUILT_SDKS :=
 DISABLE_PREOPT :=
+DISABLE_PREOPT_BOOT_IMAGES :=
 ifneq (,$(TARGET_BUILD_APPS)$(TARGET_BUILD_UNBUNDLED_IMAGE))
   DISABLE_PREOPT := true
+  DISABLE_PREOPT_BOOT_IMAGES := true
 endif
 ifeq (true,$(TARGET_BUILD_UNBUNDLED))
   ifneq (true,$(UNBUNDLED_BUILD_SDKS_FROM_SOURCE))
@@ -558,6 +560,7 @@
 .KATI_READONLY := \
   TARGET_BUILD_USE_PREBUILT_SDKS \
   DISABLE_PREOPT \
+  DISABLE_PREOPT_BOOT_IMAGES \
 
 prebuilt_sdk_tools := prebuilts/sdk/tools
 prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index e36e2eb..7b9c4db 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -12,9 +12,15 @@
   # would result in passing bad arguments to dex2oat and failing the build.
   ENABLE_PREOPT :=
   ENABLE_PREOPT_BOOT_IMAGES :=
-else ifeq (true,$(DISABLE_PREOPT))
-  # Disable dexpreopt for libraries/apps, but do compile boot images.
-  ENABLE_PREOPT :=
+else
+  ifeq (true,$(DISABLE_PREOPT))
+    # Disable dexpreopt for libraries/apps, but may compile boot images.
+    ENABLE_PREOPT :=
+  endif
+  ifeq (true,$(DISABLE_PREOPT_BOOT_IMAGES))
+    # Disable dexpreopt for boot images, but may compile libraries/apps.
+    ENABLE_PREOPT_BOOT_IMAGES :=
+  endif
 endif
 
 # The default value for LOCAL_DEX_PREOPT
diff --git a/core/product.mk b/core/product.mk
index e90e27b..818aac2 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -404,6 +404,10 @@
 #   supports it
 _product_single_value_vars += PRODUCT_ENABLE_UFFD_GC
 
+# Specifies COW version to be used by update_engine and libsnapshot. If this value is not
+# specified we default to COW version 2 in update_engine for backwards compatibility
+_product_single_value_vars += PRODUCT_VIRTUAL_AB_COW_VERSION
+
 _product_list_vars += PRODUCT_AFDO_PROFILES
 
 .KATI_READONLY := _product_single_value_vars _product_list_vars
diff --git a/core/product_config.mk b/core/product_config.mk
index 9db881f..6a27613 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -223,7 +223,7 @@
 endif
 
 ifeq (,$(current_product_makefile))
-  $(error Can not locate config makefile for product "$(TARGET_PRODUCT)")
+  $(error Cannot locate config makefile for product "$(TARGET_PRODUCT)")
 endif
 
 ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
@@ -236,14 +236,22 @@
   $(shell mkdir -p $(OUT_DIR)/rbc)
   $(call dump-variables-rbc, $(OUT_DIR)/rbc/make_vars_pre_product_config.mk)
 
-  $(shell build/soong/scripts/update_out \
-    $(OUT_DIR)/rbc/rbc_product_config_results.mk \
-    build/soong/scripts/rbc-run \
-    $(current_product_makefile) \
-    $(OUT_DIR)/rbc/make_vars_pre_product_config.mk)
+  $(shell $(OUT_DIR)/mk2rbc \
+    --mode=write -r --outdir $(OUT_DIR)/rbc \
+    --launcher=$(OUT_DIR)/rbc/launcher.rbc \
+    --input_variables=$(OUT_DIR)/rbc/make_vars_pre_product_config.mk \
+    --makefile_list=$(OUT_DIR)/.module_paths/configuration.list \
+    $(current_product_makefile))
   ifneq ($(.SHELLSTATUS),0)
     $(error product configuration converter failed: $(.SHELLSTATUS))
   endif
+
+  $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_product_config_results.mk \
+    $(OUT_DIR)/rbcrun $(OUT_DIR)/rbc/launcher.rbc)
+  ifneq ($(.SHELLSTATUS),0)
+    $(error product configuration runner failed: $(.SHELLSTATUS))
+  endif
+
   include $(OUT_DIR)/rbc/rbc_product_config_results.mk
 endif
 
diff --git a/core/product_config.rbc b/core/product_config.rbc
index a5e5721..e594894 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -54,25 +54,16 @@
     if value == None:
         return
     if type(value) == "list":
-        if _options.rearrange:
-            value = __printvars_rearrange_list(value)
-        if _options.format == "pretty":
-            print(attr, "=", repr(value))
-        elif _options.format == "make":
-            value = list(value)
-            for i, x in enumerate(value):
-                if type(x) == "tuple" and len(x) == 1:
-                    value[i] = "@inherit:" + x[0] + ".mk"
-                elif type(x) != "string":
-                    fail("Wasn't a list of strings:", attr, " value:", value)
-            print(attr, ":=", " ".join(value))
-    elif _options.format == "pretty":
-        print(attr, "=", repr(value))
-    elif _options.format == "make":
+        value = list(value)
+        for i, x in enumerate(value):
+            if type(x) == "tuple" and len(x) == 1:
+                value[i] = "@inherit:" + x[0] + ".mk"
+            elif type(x) != "string":
+                fail("Wasn't a list of strings:", attr, " value:", value)
+        print(attr, ":=", " ".join(value))
+    else:
         # Trim all spacing to a single space
         print(attr, ":=", _mkstrip(value))
-    else:
-        fail("bad output format", _options.format)
 
 def _printvars(state):
     """Prints configuration and global variables."""
@@ -83,8 +74,7 @@
             for nsname, nsvars in sorted(val.items()):
                 # Define SOONG_CONFIG_<ns> for Make, othewise
                 # it cannot be added to .KATI_READONLY list
-                if _options.format == "make":
-                    print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
+                print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
                 for var, val in sorted(nsvars.items()):
                     if val:
                         __print_attr("SOONG_CONFIG_%s_%s" % (nsname, var), val)
@@ -105,11 +95,6 @@
         elif attr not in globals_base or globals_base[attr] != val:
             __print_attr(attr, val)
 
-def __printvars_rearrange_list(value_list):
-    """Rearrange value list: return only distinct elements, maybe sorted."""
-    seen = {item: 0 for item in value_list}
-    return sorted(seen.keys()) if _options.rearrange == "sort" else seen.keys()
-
 def __sort_pcm_names(pcm_names):
     # We have to add an extension back onto the pcm names when sorting,
     # or else the sort order could be wrong when one is a prefix of another.
@@ -695,16 +680,8 @@
     rblf_log(file, "warning", message, sep = ':')
 
 def _mk2rbc_error(loc, message):
-    """Prints a message about conversion error and stops.
-
-    If RBC_MK2RBC_CONTINUE environment variable is set,
-    the execution will continue after the message is printed.
-    """
-    if _options.mk2rbc_continue:
-        rblf_log(loc, message, sep = ':')
-    else:
-        _mkerror(loc, message)
-
+    """Prints a message about conversion error and stops."""
+    _mkerror(loc, message)
 
 def _mkinfo(file, message = ""):
     """Prints info."""
@@ -877,39 +854,12 @@
             # Cause the variable to appear set like the make version does
             g[v] = ""
 
-
-def __get_options():
-    """Returns struct containing runtime global settings."""
-    settings = dict(
-        format = "pretty",
-        rearrange = "",
-        trace_modules = False,
-        trace_variables = [],
-        mk2rbc_continue = False,
-    )
-    for x in getattr(rblf_cli, "RBC_OUT", "").split(","):
-        if x == "sort" or x == "unique":
-            if settings["rearrange"]:
-                fail("RBC_OUT: either sort or unique is allowed (and sort implies unique)")
-            settings["rearrange"] = x
-        elif x == "pretty" or x == "make":
-            settings["format"] = x
-        elif x == "global":
-            # TODO: Remove this, kept for backwards compatibility
-            pass
-        elif x != "":
-            fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
-    for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
-        if x == "!trace":
-            settings["trace_modules"] = True
-        elif x != "":
-            settings["trace_variables"].append(x)
-    if getattr(rblf_cli, "RBC_MK2RBC_CONTINUE", ""):
-        settings["mk2rbc_continue"] = True
-    return struct(**settings)
-
 # Settings used during debugging.
-_options = __get_options()
+_options = struct(
+    trace_modules = False,
+    trace_variables = [],
+)
+
 rblf = struct(
     soong_config_namespace = _soong_config_namespace,
     soong_config_append = _soong_config_append,
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 90a2f75..034d044 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -315,6 +315,10 @@
 
 $(call add_json_list, AfdoProfiles,                $(ALL_AFDO_PROFILES))
 
+$(call add_json_str,  ProductManufacturer, $(PRODUCT_MANUFACTURER))
+$(call add_json_str,  ProductBrand,        $(PRODUCT_BRAND))
+$(call add_json_list, BuildVersionTags,    $(BUILD_VERSION_TAGS))
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 9e9e74b..f9175e45 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -104,7 +104,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2023-04-05
+    PLATFORM_SECURITY_PATCH := 2023-05-05
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/envsetup.sh b/envsetup.sh
index 17d8a5d..916344c 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -804,13 +804,19 @@
 
     export TARGET_BUILD_APPS=
 
-    local product variant_and_version variant version
+    # Support either <product>-<variant> or <product>-<release>-<variant>
+    local product release_and_variant release variant
     product=${selection%%-*} # Trim everything after first dash
-    variant_and_version=${selection#*-} # Trim everything up to first dash
-    if [ "$variant_and_version" != "$selection" ]; then
-        variant=${variant_and_version%%-*}
-        if [ "$variant" != "$variant_and_version" ]; then
-            version=${variant_and_version#*-}
+    release_and_variant=${selection#*-} # Trim everything up to first dash
+    if [ "$release_and_variant" != "$selection" ]; then
+        local first=${release_and_variant%%-*} # Trim everything after first dash
+        if [ "$first" != "$release_and_variant" ]; then
+            # There is a 2nd dash, split into release-variant
+            release=$first # Everything up to the dash
+            variant=${release_and_variant#*-} # Trim everything up to dash
+        else
+            # There is not a 2nd dash, default to variant as the second param
+            variant=$first
         fi
     fi
 
@@ -823,7 +829,7 @@
 
     TARGET_PRODUCT=$product \
     TARGET_BUILD_VARIANT=$variant \
-    TARGET_PLATFORM_VERSION=$version \
+    TARGET_RELEASE=$release \
     build_build_var_cache
     if [ $? -ne 0 ]
     then
@@ -835,10 +841,10 @@
     fi
     export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
     export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
-    if [ -n "$version" ]; then
-      export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+    if [ -n "$release" ]; then
+      export TARGET_RELEASE=$(get_build_var TARGET_RELEASE)
     else
-      unset TARGET_PLATFORM_VERSION
+      unset TARGET_RELEASE
     fi
     export TARGET_BUILD_TYPE=release
 
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index 6720ddb..87c16da 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -30,3 +30,20 @@
 TARGET_CPU_ABI2 := armeabi
 
 include build/make/target/board/BoardConfigGsiCommon.mk
+
+ifndef BUILDING_GSI
+include build/make/target/board/BoardConfigEmuCommon.mk
+
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
+endif
diff --git a/target/board/generic/device.mk b/target/board/generic/device.mk
index 76edf6b..76242c9 100644
--- a/target/board/generic/device.mk
+++ b/target/board/generic/device.mk
@@ -14,3 +14,5 @@
 # limitations under the License.
 #
 
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
diff --git a/target/board/generic_64bitonly_x86_64/BoardConfig.mk b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
index a129ea0..a240eab 100644
--- a/target/board/generic_64bitonly_x86_64/BoardConfig.mk
+++ b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
@@ -28,3 +28,23 @@
 TARGET_PRELINK_MODULE := false
 
 include build/make/target/board/BoardConfigGsiCommon.mk
+
+ifndef BUILDING_GSI
+include build/make/target/board/BoardConfigEmuCommon.mk
+
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
+
+endif # !BUILDING_GSI
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index 26bede8..47fd384 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -19,3 +19,23 @@
 TARGET_ARCH_VARIANT := x86
 
 include build/make/target/board/BoardConfigGsiCommon.mk
+
+ifndef BUILDING_GSI
+include build/make/target/board/BoardConfigEmuCommon.mk
+
+# Resize to 4G to accomodate ASAN and CTS
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
+endif
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index 60f0cc3..5ad008f 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -14,6 +14,9 @@
 # limitations under the License.
 #
 
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
+
 ifdef NET_ETH0_STARTONBOOT
   PRODUCT_VENDOR_PROPERTIES += net.eth0.startonboot=1
 endif
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 2385579..e7f2ae0 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -26,3 +26,23 @@
 TARGET_DYNAMIC_64_32_MEDIASERVER := true
 
 include build/make/target/board/BoardConfigGsiCommon.mk
+
+ifndef BUILDING_GSI
+include build/make/target/board/BoardConfigEmuCommon.mk
+
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
+
+endif # !BUILDING_GSI
diff --git a/target/board/generic_x86_64_arm64/BoardConfig.mk b/target/board/generic_x86_64_arm64/BoardConfig.mk
index 818ec44..f528294 100755
--- a/target/board/generic_x86_64_arm64/BoardConfig.mk
+++ b/target/board/generic_x86_64_arm64/BoardConfig.mk
@@ -13,6 +13,7 @@
 # limitations under the License.
 #
 
+# x86_64 emulator specific definitions
 TARGET_CPU_ABI := x86_64
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := x86_64
@@ -36,9 +37,23 @@
 TARGET_PRELINK_MODULE := false
 
 include build/make/target/board/BoardConfigMainlineCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
 
 # the settings differ from BoardConfigMainlineCommon.mk
 BOARD_USES_SYSTEM_OTHER_ODEX :=
 
 # Resize to 4G to accommodate ASAN and CTS
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
diff --git a/target/board/generic_x86_64_arm64/device.mk b/target/board/generic_x86_64_arm64/device.mk
index 76edf6b..76242c9 100755
--- a/target/board/generic_x86_64_arm64/device.mk
+++ b/target/board/generic_x86_64_arm64/device.mk
@@ -14,3 +14,5 @@
 # limitations under the License.
 #
 
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
diff --git a/target/board/generic_x86_arm/BoardConfig.mk b/target/board/generic_x86_arm/BoardConfig.mk
index 62bb5eb..f6589b0 100644
--- a/target/board/generic_x86_arm/BoardConfig.mk
+++ b/target/board/generic_x86_arm/BoardConfig.mk
@@ -13,6 +13,7 @@
 # limitations under the License.
 #
 
+# x86 emulator specific definitions
 TARGET_CPU_ABI := x86
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
@@ -29,9 +30,23 @@
 # The settings in latter makefiles overwrite those in the former.
 #
 include build/make/target/board/BoardConfigMainlineCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
 
 # the settings differ from BoardConfigMainlineCommon.mk
 BOARD_USES_SYSTEM_OTHER_ODEX :=
 
 # Resize to 4G to accomodate ASAN and CTS
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE           := emulator
+BOARD_HOSTAPD_DRIVER        := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB   := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION      := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
diff --git a/target/board/generic_x86_arm/device.mk b/target/board/generic_x86_arm/device.mk
index 76edf6b..76242c9 100644
--- a/target/board/generic_x86_arm/device.mk
+++ b/target/board/generic_x86_arm/device.mk
@@ -14,3 +14,5 @@
 # limitations under the License.
 #
 
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
diff --git a/target/product/angle.mk b/target/product/angle_default.mk
similarity index 63%
copy from target/product/angle.mk
copy to target/product/angle_default.mk
index 0d7f8cb..bea0be6 100644
--- a/target/product/angle.mk
+++ b/target/product/angle_default.mk
@@ -14,13 +14,10 @@
 # limitations under the License.
 #
 
-# To include ANGLE drivers into the build, add
-# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle.mk) to the Makefile.
+# To enable ANGLE as the default system GLES drivers, add
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_enabled.mk) to the Makefile.
 
-PRODUCT_PACKAGES := \
-    libEGL_angle \
-    libGLESv1_CM_angle \
-    libGLESv2_angle
+$(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk)
 
-# Set ro.gfx.angle.supported based on if ANGLE is installed in vendor partition
-PRODUCT_VENDOR_PROPERTIES := ro.gfx.angle.supported=true
+PRODUCT_VENDOR_PROPERTIES += \
+    persist.graphics.egl=angle
diff --git a/target/product/angle.mk b/target/product/angle_supported.mk
similarity index 71%
rename from target/product/angle.mk
rename to target/product/angle_supported.mk
index 0d7f8cb..c83ff5f 100644
--- a/target/product/angle.mk
+++ b/target/product/angle_supported.mk
@@ -14,13 +14,14 @@
 # limitations under the License.
 #
 
-# To include ANGLE drivers into the build, add
-# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle.mk) to the Makefile.
+# To include ANGLE into the image build, add
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk) to the Makefile.
+# By default, this will allow ANGLE binaries to coexist with native GLES drivers.
 
-PRODUCT_PACKAGES := \
+PRODUCT_PACKAGES += \
     libEGL_angle \
     libGLESv1_CM_angle \
     libGLESv2_angle
 
 # Set ro.gfx.angle.supported based on if ANGLE is installed in vendor partition
-PRODUCT_VENDOR_PROPERTIES := ro.gfx.angle.supported=true
+PRODUCT_VENDOR_PROPERTIES += ro.gfx.angle.supported=true
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
index cf812a2..75fd3c8 100644
--- a/target/product/aosp_64bitonly_x86_64.mk
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -51,6 +51,7 @@
 #
 # All components inherited here go to vendor image
 #
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 
 #
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index d9c362e..61c1316 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -50,6 +50,7 @@
 # All components inherited here go to vendor image
 #
 $(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
 #
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index d3514a5..6c907db 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -54,6 +54,7 @@
 #
 # All components inherited here go to vendor or vendor_boot image
 #
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_arm64/device.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
 
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index fa503ff..270a989 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -46,6 +46,7 @@
 #
 # All components inherited here go to vendor image
 #
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
 
 #
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index c26a8bf..a2f0390 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -47,6 +47,7 @@
 #
 # All components inherited here go to vendor image
 #
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
 
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 3040dd3..535ee3f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -56,6 +56,7 @@
 #
 # All components inherited here go to vendor image
 #
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
 
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index a103b1a..39ad0d8 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -45,6 +45,7 @@
 #
 # All components inherited here go to vendor image
 #
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_arm/device.mk)
 
 
diff --git a/target/product/full.mk b/target/product/full.mk
index f1e4a16..945957f 100644
--- a/target/product/full.mk
+++ b/target/product/full.mk
@@ -20,6 +20,7 @@
 # entirely appropriate to inherit from for on-device configurations.
 
 $(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic/device.mk)
 
diff --git a/target/product/full_x86.mk b/target/product/full_x86.mk
index 72e170d..0f3be91 100644
--- a/target/product/full_x86.mk
+++ b/target/product/full_x86.mk
@@ -23,6 +23,7 @@
 # that isn't a wifi connection. This will instruct init.rc to enable the
 # network connection so that you can use it with ADB
 
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
diff --git a/target/product/generic.mk b/target/product/generic.mk
index fd3b3fb..fb5b727 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -14,6 +14,9 @@
 # limitations under the License.
 #
 
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
+PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
+
 # This is a generic phone product that isn't specialized for a specific device.
 # It includes the base Android platform.
 
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 418aaa4..f862485 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -20,3 +20,5 @@
 #
 # All U+ launching devices should instead use vabc_features.mk.
 $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/vabc_features.mk)
+
+PRODUCT_VIRTUAL_AB_COW_VERSION := 2
diff --git a/tools/aconfig/.gitignore b/tools/aconfig/.gitignore
new file mode 100644
index 0000000..1b72444
--- /dev/null
+++ b/tools/aconfig/.gitignore
@@ -0,0 +1,2 @@
+/Cargo.lock
+/target
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
index b3813bf..9617e0e 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/Android.bp
@@ -18,7 +18,12 @@
     srcs: ["src/main.rs"],
     rustlibs: [
         "libaconfig_protos",
+        "libanyhow",
+        "libclap",
         "libprotobuf",
+        "libserde",
+        "libserde_json",
+        "libtinytemplate",
     ],
 }
 
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
new file mode 100644
index 0000000..8517dd2
--- /dev/null
+++ b/tools/aconfig/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "aconfig"
+version = "0.1.0"
+edition = "2021"
+build = "build.rs"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
+anyhow = "1.0.69"
+clap = { version = "4.1.8", features = ["derive"] }
+protobuf = "3.2.0"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
+tinytemplate = "1.2.1"
+
+[build-dependencies]
+protobuf-codegen = "3.2.0"
diff --git a/tools/aconfig/build.rs b/tools/aconfig/build.rs
new file mode 100644
index 0000000..5ef5b60
--- /dev/null
+++ b/tools/aconfig/build.rs
@@ -0,0 +1,17 @@
+use protobuf_codegen::Codegen;
+
+fn main() {
+    let proto_files = vec!["protos/aconfig.proto"];
+
+    // tell cargo to only re-run the build script if any of the proto files has changed
+    for path in &proto_files {
+        println!("cargo:rerun-if-changed={}", path);
+    }
+
+    Codegen::new()
+        .pure()
+        .include("protos")
+        .inputs(proto_files)
+        .cargo_out_dir("aconfig_proto")
+        .run_from_script();
+}
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
index 989c398..a3b1fec 100644
--- a/tools/aconfig/protos/aconfig.proto
+++ b/tools/aconfig/protos/aconfig.proto
@@ -12,12 +12,74 @@
 // See the License for the specific language governing permissions and
 // limitations under the License
 
-// Placeholder proto file. Will be replaced by actual contents.
+// This is the schema definition for of Aconfig files. Modifications need to be
+// either backwards compatible, or include updates to all Aconfig files in the
+// Android tree.
 
-syntax = "proto3";
+syntax = "proto2";
 
 package android.aconfig;
 
-message Placeholder {
-  string name = 1;
+// messages used in both aconfig input and output
+
+enum flag_state {
+  ENABLED = 1;
+  DISABLED = 2;
+}
+
+enum flag_permission {
+  READ_ONLY = 1;
+  READ_WRITE = 2;
+}
+
+// aconfig input messages: configuration and override data
+
+message flag_value {
+  required flag_state state = 1;
+  required flag_permission permission = 2;
+  optional uint32 since = 3;
+}
+
+message flag_definition {
+  required string name = 1;
+  required string description = 2;
+  repeated flag_value value = 3;
+};
+
+message namespace {
+  required string namespace = 1;
+  repeated flag_definition flag = 2;
+};
+
+message flag_override {
+  required string namespace = 1;
+  required string name = 2;
+  required flag_state state = 3;
+  required flag_permission permission = 4;
+};
+
+message flag_overrides {
+  repeated flag_override flag_override = 1;
+};
+
+// aconfig output messages: parsed and verified configuration and override data
+
+message tracepoint {
+  // path to config or override file releative to $TOP
+  required string source = 1;
+  required flag_state state = 2;
+  required flag_permission permission = 3;
+}
+
+message parsed_flag {
+  required string namespace = 1;
+  required string name = 2;
+  required string description = 3;
+  required flag_state state = 4;
+  required flag_permission permission = 5;
+  repeated tracepoint trace = 6;
+}
+
+message parsed_flags {
+  repeated parsed_flag parsed_flag = 1;
 }
diff --git a/tools/aconfig/src/aconfig.rs b/tools/aconfig/src/aconfig.rs
new file mode 100644
index 0000000..8fe82b6
--- /dev/null
+++ b/tools/aconfig/src/aconfig.rs
@@ -0,0 +1,432 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{anyhow, Context, Error, Result};
+use protobuf::{Enum, EnumOrUnknown};
+use serde::{Deserialize, Serialize};
+
+use crate::cache::{Cache, Item, Tracepoint};
+use crate::protos::{
+    ProtoFlagDefinition, ProtoFlagDefinitionValue, ProtoFlagOverride, ProtoFlagOverrides,
+    ProtoFlagPermission, ProtoFlagState, ProtoNamespace, ProtoParsedFlag, ProtoParsedFlags,
+    ProtoTracepoint,
+};
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
+pub enum FlagState {
+    Enabled,
+    Disabled,
+}
+
+impl TryFrom<EnumOrUnknown<ProtoFlagState>> for FlagState {
+    type Error = Error;
+
+    fn try_from(proto: EnumOrUnknown<ProtoFlagState>) -> Result<Self, Self::Error> {
+        match ProtoFlagState::from_i32(proto.value()) {
+            Some(ProtoFlagState::ENABLED) => Ok(FlagState::Enabled),
+            Some(ProtoFlagState::DISABLED) => Ok(FlagState::Disabled),
+            None => Err(anyhow!("unknown flag state enum value {}", proto.value())),
+        }
+    }
+}
+
+impl From<FlagState> for ProtoFlagState {
+    fn from(state: FlagState) -> Self {
+        match state {
+            FlagState::Enabled => ProtoFlagState::ENABLED,
+            FlagState::Disabled => ProtoFlagState::DISABLED,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
+pub enum Permission {
+    ReadOnly,
+    ReadWrite,
+}
+
+impl TryFrom<EnumOrUnknown<ProtoFlagPermission>> for Permission {
+    type Error = Error;
+
+    fn try_from(proto: EnumOrUnknown<ProtoFlagPermission>) -> Result<Self, Self::Error> {
+        match ProtoFlagPermission::from_i32(proto.value()) {
+            Some(ProtoFlagPermission::READ_ONLY) => Ok(Permission::ReadOnly),
+            Some(ProtoFlagPermission::READ_WRITE) => Ok(Permission::ReadWrite),
+            None => Err(anyhow!("unknown permission enum value {}", proto.value())),
+        }
+    }
+}
+
+impl From<Permission> for ProtoFlagPermission {
+    fn from(permission: Permission) -> Self {
+        match permission {
+            Permission::ReadOnly => ProtoFlagPermission::READ_ONLY,
+            Permission::ReadWrite => ProtoFlagPermission::READ_WRITE,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Value {
+    state: FlagState,
+    permission: Permission,
+    since: Option<u32>,
+}
+
+#[allow(dead_code)] // only used in unit tests
+impl Value {
+    pub fn new(state: FlagState, permission: Permission, since: u32) -> Value {
+        Value { state, permission, since: Some(since) }
+    }
+
+    pub fn default(state: FlagState, permission: Permission) -> Value {
+        Value { state, permission, since: None }
+    }
+}
+
+impl TryFrom<ProtoFlagDefinitionValue> for Value {
+    type Error = Error;
+
+    fn try_from(proto: ProtoFlagDefinitionValue) -> Result<Self, Self::Error> {
+        let Some(proto_state) = proto.state else {
+            return Err(anyhow!("missing 'state' field"));
+        };
+        let state = proto_state.try_into()?;
+        let Some(proto_permission) = proto.permission else {
+            return Err(anyhow!("missing 'permission' field"));
+        };
+        let permission = proto_permission.try_into()?;
+        Ok(Value { state, permission, since: proto.since })
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Flag {
+    pub name: String,
+    pub description: String,
+
+    // ordered by Value.since; guaranteed to contain at least one item (the default value, with
+    // since == None)
+    pub values: Vec<Value>,
+}
+
+impl Flag {
+    #[allow(dead_code)] // only used in unit tests
+    pub fn try_from_text_proto(text_proto: &str) -> Result<Flag> {
+        let proto: ProtoFlagDefinition = crate::protos::try_from_text_proto(text_proto)
+            .with_context(|| text_proto.to_owned())?;
+        proto.try_into()
+    }
+
+    pub fn resolve(&self, build_id: u32) -> (FlagState, Permission) {
+        let mut state = self.values[0].state;
+        let mut permission = self.values[0].permission;
+        for candidate in self.values.iter().skip(1) {
+            let since = candidate.since.expect("invariant: non-defaults values have Some(since)");
+            if since <= build_id {
+                state = candidate.state;
+                permission = candidate.permission;
+            }
+        }
+        (state, permission)
+    }
+}
+
+impl TryFrom<ProtoFlagDefinition> for Flag {
+    type Error = Error;
+
+    fn try_from(proto: ProtoFlagDefinition) -> Result<Self, Self::Error> {
+        let Some(name) = proto.name else {
+            return Err(anyhow!("missing 'name' field"));
+        };
+        let Some(description) = proto.description else {
+            return Err(anyhow!("missing 'description' field"));
+        };
+        if proto.value.is_empty() {
+            return Err(anyhow!("missing 'value' field"));
+        }
+
+        let mut values: Vec<Value> = vec![];
+        for proto_value in proto.value.into_iter() {
+            let v: Value = proto_value.try_into()?;
+            if values.iter().any(|w| v.since == w.since) {
+                let msg = match v.since {
+                    None => format!("flag {}: multiple default values", name),
+                    Some(x) => format!("flag {}: multiple values for since={}", name, x),
+                };
+                return Err(anyhow!(msg));
+            }
+            values.push(v);
+        }
+        values.sort_by_key(|v| v.since);
+
+        Ok(Flag { name, description, values })
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Namespace {
+    pub namespace: String,
+    pub flags: Vec<Flag>,
+}
+
+impl Namespace {
+    pub fn try_from_text_proto(text_proto: &str) -> Result<Namespace> {
+        let proto: ProtoNamespace = crate::protos::try_from_text_proto(text_proto)
+            .with_context(|| text_proto.to_owned())?;
+        let Some(namespace) = proto.namespace else {
+            return Err(anyhow!("missing 'namespace' field"));
+        };
+        let mut flags = vec![];
+        for proto_flag in proto.flag.into_iter() {
+            flags.push(proto_flag.try_into()?);
+        }
+        Ok(Namespace { namespace, flags })
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Override {
+    pub namespace: String,
+    pub name: String,
+    pub state: FlagState,
+    pub permission: Permission,
+}
+
+impl Override {
+    #[allow(dead_code)] // only used in unit tests
+    pub fn try_from_text_proto(text_proto: &str) -> Result<Override> {
+        let proto: ProtoFlagOverride = crate::protos::try_from_text_proto(text_proto)?;
+        proto.try_into()
+    }
+
+    pub fn try_from_text_proto_list(text_proto: &str) -> Result<Vec<Override>> {
+        let proto: ProtoFlagOverrides = crate::protos::try_from_text_proto(text_proto)?;
+        proto.flag_override.into_iter().map(|proto_flag| proto_flag.try_into()).collect()
+    }
+}
+
+impl TryFrom<ProtoFlagOverride> for Override {
+    type Error = Error;
+
+    fn try_from(proto: ProtoFlagOverride) -> Result<Self, Self::Error> {
+        let Some(namespace) = proto.namespace else {
+            return Err(anyhow!("missing 'namespace' field"));
+        };
+        let Some(name) = proto.name else {
+            return Err(anyhow!("missing 'name' field"));
+        };
+        let Some(proto_state) = proto.state else {
+            return Err(anyhow!("missing 'state' field"));
+        };
+        let state = proto_state.try_into()?;
+        let Some(proto_permission) = proto.permission else {
+            return Err(anyhow!("missing 'permission' field"));
+        };
+        let permission = proto_permission.try_into()?;
+        Ok(Override { namespace, name, state, permission })
+    }
+}
+
+impl From<Cache> for ProtoParsedFlags {
+    fn from(cache: Cache) -> Self {
+        let mut proto = ProtoParsedFlags::new();
+        for item in cache.into_iter() {
+            proto.parsed_flag.push(item.into());
+        }
+        proto
+    }
+}
+
+impl From<Item> for ProtoParsedFlag {
+    fn from(item: Item) -> Self {
+        let mut proto = crate::protos::ProtoParsedFlag::new();
+        proto.set_namespace(item.namespace.to_owned());
+        proto.set_name(item.name.clone());
+        proto.set_description(item.description.clone());
+        proto.set_state(item.state.into());
+        proto.set_permission(item.permission.into());
+        for trace in item.trace.into_iter() {
+            proto.trace.push(trace.into());
+        }
+        proto
+    }
+}
+
+impl From<Tracepoint> for ProtoTracepoint {
+    fn from(tracepoint: Tracepoint) -> Self {
+        let mut proto = ProtoTracepoint::new();
+        proto.set_source(format!("{}", tracepoint.source));
+        proto.set_state(tracepoint.state.into());
+        proto.set_permission(tracepoint.permission.into());
+        proto
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_flag_try_from_text_proto() {
+        let expected = Flag {
+            name: "1234".to_owned(),
+            description: "Description of the flag".to_owned(),
+            values: vec![
+                Value::default(FlagState::Disabled, Permission::ReadOnly),
+                Value::new(FlagState::Enabled, Permission::ReadWrite, 8),
+            ],
+        };
+
+        let s = r#"
+        name: "1234"
+        description: "Description of the flag"
+        value {
+            state: DISABLED
+            permission: READ_ONLY
+        }
+        value {
+            state: ENABLED
+            permission: READ_WRITE
+            since: 8
+        }
+        "#;
+        let actual = Flag::try_from_text_proto(s).unwrap();
+
+        assert_eq!(expected, actual);
+    }
+
+    #[test]
+    fn test_flag_try_from_text_proto_bad_input() {
+        let s = r#"
+        name: "a"
+        description: "Description of the flag"
+        "#;
+        let error = Flag::try_from_text_proto(s).unwrap_err();
+        assert_eq!(format!("{:?}", error), "missing 'value' field");
+
+        let s = r#"
+        description: "Description of the flag"
+        value {
+            state: ENABLED
+            permission: READ_ONLY
+        }
+        "#;
+        let error = Flag::try_from_text_proto(s).unwrap_err();
+        assert!(format!("{:?}", error).contains("Message not initialized"));
+
+        let s = r#"
+        name: "a"
+        description: "Description of the flag"
+        value {
+            state: ENABLED
+            permission: READ_ONLY
+        }
+        value {
+            state: ENABLED
+            permission: READ_ONLY
+        }
+        "#;
+        let error = Flag::try_from_text_proto(s).unwrap_err();
+        assert_eq!(format!("{:?}", error), "flag a: multiple default values");
+    }
+
+    #[test]
+    fn test_namespace_try_from_text_proto() {
+        let expected = Namespace {
+            namespace: "ns".to_owned(),
+            flags: vec![
+                Flag {
+                    name: "a".to_owned(),
+                    description: "A".to_owned(),
+                    values: vec![Value::default(FlagState::Enabled, Permission::ReadOnly)],
+                },
+                Flag {
+                    name: "b".to_owned(),
+                    description: "B".to_owned(),
+                    values: vec![Value::default(FlagState::Disabled, Permission::ReadWrite)],
+                },
+            ],
+        };
+
+        let s = r#"
+        namespace: "ns"
+        flag {
+            name: "a"
+            description: "A"
+            value {
+                state: ENABLED
+                permission: READ_ONLY
+            }
+        }
+        flag {
+            name: "b"
+            description: "B"
+            value {
+                state: DISABLED
+                permission: READ_WRITE
+            }
+        }
+        "#;
+        let actual = Namespace::try_from_text_proto(s).unwrap();
+
+        assert_eq!(expected, actual);
+    }
+
+    #[test]
+    fn test_override_try_from_text_proto_list() {
+        let expected = Override {
+            namespace: "ns".to_owned(),
+            name: "1234".to_owned(),
+            state: FlagState::Enabled,
+            permission: Permission::ReadOnly,
+        };
+
+        let s = r#"
+        namespace: "ns"
+        name: "1234"
+        state: ENABLED
+        permission: READ_ONLY
+        "#;
+        let actual = Override::try_from_text_proto(s).unwrap();
+
+        assert_eq!(expected, actual);
+    }
+
+    #[test]
+    fn test_flag_resolve() {
+        let flag = Flag {
+            name: "a".to_owned(),
+            description: "A".to_owned(),
+            values: vec![
+                Value::default(FlagState::Disabled, Permission::ReadOnly),
+                Value::new(FlagState::Disabled, Permission::ReadWrite, 10),
+                Value::new(FlagState::Enabled, Permission::ReadOnly, 20),
+                Value::new(FlagState::Enabled, Permission::ReadWrite, 30),
+            ],
+        };
+        assert_eq!((FlagState::Disabled, Permission::ReadOnly), flag.resolve(0));
+        assert_eq!((FlagState::Disabled, Permission::ReadOnly), flag.resolve(9));
+        assert_eq!((FlagState::Disabled, Permission::ReadWrite), flag.resolve(10));
+        assert_eq!((FlagState::Disabled, Permission::ReadWrite), flag.resolve(11));
+        assert_eq!((FlagState::Disabled, Permission::ReadWrite), flag.resolve(19));
+        assert_eq!((FlagState::Enabled, Permission::ReadOnly), flag.resolve(20));
+        assert_eq!((FlagState::Enabled, Permission::ReadOnly), flag.resolve(21));
+        assert_eq!((FlagState::Enabled, Permission::ReadOnly), flag.resolve(29));
+        assert_eq!((FlagState::Enabled, Permission::ReadWrite), flag.resolve(30));
+        assert_eq!((FlagState::Enabled, Permission::ReadWrite), flag.resolve(10_000));
+    }
+}
diff --git a/tools/aconfig/src/cache.rs b/tools/aconfig/src/cache.rs
new file mode 100644
index 0000000..4b46c42
--- /dev/null
+++ b/tools/aconfig/src/cache.rs
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{anyhow, Result};
+use serde::{Deserialize, Serialize};
+use std::io::{Read, Write};
+
+use crate::aconfig::{Flag, FlagState, Override, Permission};
+use crate::commands::Source;
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Tracepoint {
+    pub source: Source,
+    pub state: FlagState,
+    pub permission: Permission,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Item {
+    // TODO: duplicating the Cache.namespace as Item.namespace makes the internal representation
+    // closer to the proto message `parsed_flag`; hopefully this will enable us to replace the Item
+    // struct and use a newtype instead once aconfig has matured. Until then, namespace should
+    // really be a Cow<String>.
+    pub namespace: String,
+    pub name: String,
+    pub description: String,
+    pub state: FlagState,
+    pub permission: Permission,
+    pub trace: Vec<Tracepoint>,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Cache {
+    build_id: u32,
+    namespace: String,
+    items: Vec<Item>,
+}
+
+impl Cache {
+    pub fn new(build_id: u32, namespace: String) -> Cache {
+        Cache { build_id, namespace, items: vec![] }
+    }
+
+    pub fn read_from_reader(reader: impl Read) -> Result<Cache> {
+        serde_json::from_reader(reader).map_err(|e| e.into())
+    }
+
+    pub fn write_to_writer(&self, writer: impl Write) -> Result<()> {
+        serde_json::to_writer(writer, self).map_err(|e| e.into())
+    }
+
+    pub fn add_flag(&mut self, source: Source, flag: Flag) -> Result<()> {
+        if self.items.iter().any(|item| item.name == flag.name) {
+            return Err(anyhow!(
+                "failed to add flag {} from {}: flag already defined",
+                flag.name,
+                source,
+            ));
+        }
+        let (state, permission) = flag.resolve(self.build_id);
+        self.items.push(Item {
+            namespace: self.namespace.clone(),
+            name: flag.name.clone(),
+            description: flag.description,
+            state,
+            permission,
+            trace: vec![Tracepoint { source, state, permission }],
+        });
+        Ok(())
+    }
+
+    pub fn add_override(&mut self, source: Source, override_: Override) -> Result<()> {
+        if override_.namespace != self.namespace {
+            // TODO: print warning?
+            return Ok(());
+        }
+        let Some(existing_item) = self.items.iter_mut().find(|item| item.name == override_.name) else {
+            return Err(anyhow!("failed to override flag {}: unknown flag", override_.name));
+        };
+        existing_item.state = override_.state;
+        existing_item.permission = override_.permission;
+        existing_item.trace.push(Tracepoint {
+            source,
+            state: override_.state,
+            permission: override_.permission,
+        });
+        Ok(())
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &Item> {
+        self.items.iter()
+    }
+
+    pub fn into_iter(self) -> impl Iterator<Item = Item> {
+        self.items.into_iter()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::aconfig::{FlagState, Permission, Value};
+
+    #[test]
+    fn test_add_flag() {
+        let mut cache = Cache::new(1, "ns".to_string());
+        cache
+            .add_flag(
+                Source::File("first.txt".to_string()),
+                Flag {
+                    name: "foo".to_string(),
+                    description: "desc".to_string(),
+                    values: vec![Value::default(FlagState::Enabled, Permission::ReadOnly)],
+                },
+            )
+            .unwrap();
+        let error = cache
+            .add_flag(
+                Source::File("second.txt".to_string()),
+                Flag {
+                    name: "foo".to_string(),
+                    description: "desc".to_string(),
+                    values: vec![Value::default(FlagState::Disabled, Permission::ReadOnly)],
+                },
+            )
+            .unwrap_err();
+        assert_eq!(
+            &format!("{:?}", error),
+            "failed to add flag foo from second.txt: flag already defined"
+        );
+    }
+
+    #[test]
+    fn test_add_override() {
+        fn check(cache: &Cache, name: &str, expected: (FlagState, Permission)) -> bool {
+            let item = cache.iter().find(|&item| item.name == name).unwrap();
+            item.state == expected.0 && item.permission == expected.1
+        }
+
+        let mut cache = Cache::new(1, "ns".to_string());
+        let error = cache
+            .add_override(
+                Source::Memory,
+                Override {
+                    namespace: "ns".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap_err();
+        assert_eq!(&format!("{:?}", error), "failed to override flag foo: unknown flag");
+
+        cache
+            .add_flag(
+                Source::File("first.txt".to_string()),
+                Flag {
+                    name: "foo".to_string(),
+                    description: "desc".to_string(),
+                    values: vec![Value::default(FlagState::Enabled, Permission::ReadOnly)],
+                },
+            )
+            .unwrap();
+        dbg!(&cache);
+        assert!(check(&cache, "foo", (FlagState::Enabled, Permission::ReadOnly)));
+
+        cache
+            .add_override(
+                Source::Memory,
+                Override {
+                    namespace: "ns".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Disabled,
+                    permission: Permission::ReadWrite,
+                },
+            )
+            .unwrap();
+        dbg!(&cache);
+        assert!(check(&cache, "foo", (FlagState::Disabled, Permission::ReadWrite)));
+
+        cache
+            .add_override(
+                Source::Memory,
+                Override {
+                    namespace: "ns".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadWrite,
+                },
+            )
+            .unwrap();
+        assert!(check(&cache, "foo", (FlagState::Enabled, Permission::ReadWrite)));
+
+        // different namespace -> no-op
+        cache
+            .add_override(
+                Source::Memory,
+                Override {
+                    namespace: "some-other-namespace".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap();
+        assert!(check(&cache, "foo", (FlagState::Enabled, Permission::ReadWrite)));
+    }
+}
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
new file mode 100644
index 0000000..9d52cce
--- /dev/null
+++ b/tools/aconfig/src/codegen_java.rs
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::Result;
+use serde::Serialize;
+use tinytemplate::TinyTemplate;
+
+use crate::aconfig::{FlagState, Permission};
+use crate::cache::{Cache, Item};
+
+pub struct GeneratedFile {
+    pub file_content: String,
+    pub file_name: String,
+}
+
+pub fn generate_java_code(cache: &Cache) -> Result<GeneratedFile> {
+    let class_elements: Vec<ClassElement> = cache.iter().map(create_class_element).collect();
+    let readwrite = class_elements.iter().any(|item| item.readwrite);
+    let namespace = uppercase_first_letter(
+        cache.iter().find(|item| !item.namespace.is_empty()).unwrap().namespace.as_str(),
+    );
+    let context = Context { namespace: namespace.clone(), readwrite, class_elements };
+    let mut template = TinyTemplate::new();
+    template.add_template("java_code_gen", include_str!("../templates/java.template"))?;
+    let file_content = template.render("java_code_gen", &context)?;
+    Ok(GeneratedFile { file_content, file_name: format!("{}.java", namespace) })
+}
+
+#[derive(Serialize)]
+struct Context {
+    pub namespace: String,
+    pub readwrite: bool,
+    pub class_elements: Vec<ClassElement>,
+}
+
+#[derive(Serialize)]
+struct ClassElement {
+    pub method_name: String,
+    pub readwrite: bool,
+    pub default_value: String,
+    pub feature_name: String,
+    pub flag_name: String,
+}
+
+fn create_class_element(item: &Item) -> ClassElement {
+    ClassElement {
+        method_name: item.name.clone(),
+        readwrite: item.permission == Permission::ReadWrite,
+        default_value: if item.state == FlagState::Enabled {
+            "true".to_string()
+        } else {
+            "false".to_string()
+        },
+        feature_name: item.name.clone(),
+        flag_name: item.name.clone(),
+    }
+}
+
+fn uppercase_first_letter(s: &str) -> String {
+    s.chars()
+        .enumerate()
+        .map(
+            |(index, ch)| {
+                if index == 0 {
+                    ch.to_ascii_uppercase()
+                } else {
+                    ch.to_ascii_lowercase()
+                }
+            },
+        )
+        .collect()
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::aconfig::{Flag, Value};
+    use crate::commands::Source;
+
+    #[test]
+    fn test_generate_java_code() {
+        let namespace = "TeSTFlaG";
+        let mut cache = Cache::new(1, namespace.to_string());
+        cache
+            .add_flag(
+                Source::File("test.txt".to_string()),
+                Flag {
+                    name: "test".to_string(),
+                    description: "buildtime enable".to_string(),
+                    values: vec![Value::default(FlagState::Enabled, Permission::ReadOnly)],
+                },
+            )
+            .unwrap();
+        cache
+            .add_flag(
+                Source::File("test2.txt".to_string()),
+                Flag {
+                    name: "test2".to_string(),
+                    description: "runtime disable".to_string(),
+                    values: vec![Value::default(FlagState::Disabled, Permission::ReadWrite)],
+                },
+            )
+            .unwrap();
+        let expect_content = "package com.android.aconfig;
+
+        import android.provider.DeviceConfig;
+
+        public final class Testflag {
+
+            public static boolean test() {
+                return true;
+            }
+
+            public static boolean test2() {
+                return DeviceConfig.getBoolean(
+                    \"Testflag\",
+                    \"test2__test2\",
+                    false
+                );
+            }
+
+        }
+        ";
+        let expected_file_name = format!("{}.java", uppercase_first_letter(namespace));
+        let generated_file = generate_java_code(&cache).unwrap();
+        assert_eq!(expected_file_name, generated_file.file_name);
+        assert_eq!(expect_content.replace(' ', ""), generated_file.file_content.replace(' ', ""));
+    }
+}
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
new file mode 100644
index 0000000..1487e72
--- /dev/null
+++ b/tools/aconfig/src/commands.rs
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{ensure, Context, Result};
+use clap::ValueEnum;
+use protobuf::Message;
+use serde::{Deserialize, Serialize};
+use std::fmt;
+use std::io::Read;
+
+use crate::aconfig::{Namespace, Override};
+use crate::cache::Cache;
+use crate::codegen_java::{generate_java_code, GeneratedFile};
+use crate::protos::ProtoParsedFlags;
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+pub enum Source {
+    #[allow(dead_code)] // only used in unit tests
+    Memory,
+    File(String),
+}
+
+impl fmt::Display for Source {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            Self::Memory => write!(f, "<memory>"),
+            Self::File(path) => write!(f, "{}", path),
+        }
+    }
+}
+
+pub struct Input {
+    pub source: Source,
+    pub reader: Box<dyn Read>,
+}
+
+pub fn create_cache(
+    build_id: u32,
+    namespace: &str,
+    aconfigs: Vec<Input>,
+    overrides: Vec<Input>,
+) -> Result<Cache> {
+    let mut cache = Cache::new(build_id, namespace.to_owned());
+
+    for mut input in aconfigs {
+        let mut contents = String::new();
+        input.reader.read_to_string(&mut contents)?;
+        let ns = Namespace::try_from_text_proto(&contents)
+            .with_context(|| format!("Failed to parse {}", input.source))?;
+        ensure!(
+            namespace == ns.namespace,
+            "Failed to parse {}: expected namespace {}, got {}",
+            input.source,
+            namespace,
+            ns.namespace
+        );
+        for flag in ns.flags.into_iter() {
+            cache.add_flag(input.source.clone(), flag)?;
+        }
+    }
+
+    for mut input in overrides {
+        let mut contents = String::new();
+        input.reader.read_to_string(&mut contents)?;
+        let overrides = Override::try_from_text_proto_list(&contents)
+            .with_context(|| format!("Failed to parse {}", input.source))?;
+        for override_ in overrides {
+            cache.add_override(input.source.clone(), override_)?;
+        }
+    }
+
+    Ok(cache)
+}
+
+pub fn generate_code(cache: &Cache) -> Result<GeneratedFile> {
+    generate_java_code(cache)
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)]
+pub enum Format {
+    Text,
+    Debug,
+    Protobuf,
+}
+
+pub fn dump_cache(cache: Cache, format: Format) -> Result<Vec<u8>> {
+    match format {
+        Format::Text => {
+            let mut lines = vec![];
+            for item in cache.iter() {
+                lines.push(format!("{}: {:?}\n", item.name, item.state));
+            }
+            Ok(lines.concat().into())
+        }
+        Format::Debug => {
+            let mut lines = vec![];
+            for item in cache.iter() {
+                lines.push(format!("{:?}\n", item));
+            }
+            Ok(lines.concat().into())
+        }
+        Format::Protobuf => {
+            let parsed_flags: ProtoParsedFlags = cache.into();
+            let mut output = vec![];
+            parsed_flags.write_to_vec(&mut output)?;
+            Ok(output)
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::aconfig::{FlagState, Permission};
+
+    fn create_test_cache() -> Cache {
+        let s = r#"
+        namespace: "ns"
+        flag {
+            name: "a"
+            description: "Description of a"
+            value {
+                state: ENABLED
+                permission: READ_WRITE
+            }
+        }
+        flag {
+            name: "b"
+            description: "Description of b"
+            value {
+                state: ENABLED
+                permission: READ_ONLY
+            }
+        }
+        "#;
+        let aconfigs = vec![Input { source: Source::Memory, reader: Box::new(s.as_bytes()) }];
+        let o = r#"
+        flag_override {
+            namespace: "ns"
+            name: "a"
+            state: DISABLED
+            permission: READ_ONLY
+        }
+        "#;
+        let overrides = vec![Input { source: Source::Memory, reader: Box::new(o.as_bytes()) }];
+        create_cache(1, "ns", aconfigs, overrides).unwrap()
+    }
+
+    #[test]
+    fn test_create_cache() {
+        let cache = create_test_cache(); // calls create_cache
+        let item = cache.iter().find(|&item| item.name == "a").unwrap();
+        assert_eq!(FlagState::Disabled, item.state);
+        assert_eq!(Permission::ReadOnly, item.permission);
+    }
+
+    #[test]
+    fn test_dump_text_format() {
+        let cache = create_test_cache();
+        let bytes = dump_cache(cache, Format::Text).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert!(text.contains("a: Disabled"));
+    }
+
+    #[test]
+    fn test_dump_protobuf_format() {
+        use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoTracepoint};
+        use protobuf::Message;
+
+        let cache = create_test_cache();
+        let bytes = dump_cache(cache, Format::Protobuf).unwrap();
+        let actual = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
+
+        assert_eq!(
+            vec!["a".to_string(), "b".to_string()],
+            actual.parsed_flag.iter().map(|item| item.name.clone().unwrap()).collect::<Vec<_>>()
+        );
+
+        let item =
+            actual.parsed_flag.iter().find(|item| item.name == Some("b".to_string())).unwrap();
+        assert_eq!(item.namespace(), "ns");
+        assert_eq!(item.name(), "b");
+        assert_eq!(item.description(), "Description of b");
+        assert_eq!(item.state(), ProtoFlagState::ENABLED);
+        assert_eq!(item.permission(), ProtoFlagPermission::READ_ONLY);
+        let mut tp = ProtoTracepoint::new();
+        tp.set_source("<memory>".to_string());
+        tp.set_state(ProtoFlagState::ENABLED);
+        tp.set_permission(ProtoFlagPermission::READ_ONLY);
+        assert_eq!(item.trace, vec![tp]);
+    }
+}
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
index 2f7255e..f29186a 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/src/main.rs
@@ -16,38 +16,103 @@
 
 //! `aconfig` is a build time tool to manage build time configurations, such as feature flags.
 
-use aconfig_protos::aconfig::Placeholder;
-use protobuf::text_format::{parse_from_str, ParseError};
+use anyhow::Result;
+use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command};
+use std::fs;
+use std::io;
+use std::io::Write;
 
-fn foo() -> Result<String, ParseError> {
-    let placeholder = parse_from_str::<Placeholder>(r#"name: "aconfig""#)?;
-    Ok(placeholder.name)
+mod aconfig;
+mod cache;
+mod codegen_java;
+mod commands;
+mod protos;
+
+use crate::cache::Cache;
+use commands::{Input, Source};
+
+fn cli() -> Command {
+    Command::new("aconfig")
+        .subcommand_required(true)
+        .subcommand(
+            Command::new("create-cache")
+                .arg(
+                    Arg::new("build-id")
+                        .long("build-id")
+                        .value_parser(clap::value_parser!(u32))
+                        .required(true),
+                )
+                .arg(Arg::new("namespace").long("namespace").required(true))
+                .arg(Arg::new("aconfig").long("aconfig").action(ArgAction::Append))
+                .arg(Arg::new("override").long("override").action(ArgAction::Append))
+                .arg(Arg::new("cache").long("cache").required(true)),
+        )
+        .subcommand(
+            Command::new("create-java-lib")
+                .arg(Arg::new("cache").long("cache").required(true))
+                .arg(Arg::new("out").long("out").required(true)),
+        )
+        .subcommand(
+            Command::new("dump")
+                .arg(Arg::new("cache").long("cache").required(true))
+                .arg(
+                    Arg::new("format")
+                        .long("format")
+                        .value_parser(EnumValueParser::<commands::Format>::new())
+                        .default_value("text"),
+                )
+                .arg(Arg::new("out").long("out").default_value("-")),
+        )
 }
 
-fn main() {
-    println!("{:?}", foo());
+fn open_zero_or_more_files(matches: &ArgMatches, arg_name: &str) -> Result<Vec<Input>> {
+    let mut opened_files = vec![];
+    for path in matches.get_many::<String>(arg_name).unwrap_or_default() {
+        let file = Box::new(fs::File::open(path)?);
+        opened_files.push(Input { source: Source::File(path.to_string()), reader: file });
+    }
+    Ok(opened_files)
 }
 
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_foo() {
-        assert_eq!("aconfig", foo().unwrap());
+fn main() -> Result<()> {
+    let matches = cli().get_matches();
+    match matches.subcommand() {
+        Some(("create-cache", sub_matches)) => {
+            let build_id = *sub_matches.get_one::<u32>("build-id").unwrap();
+            let namespace = sub_matches.get_one::<String>("namespace").unwrap();
+            let aconfigs = open_zero_or_more_files(sub_matches, "aconfig")?;
+            let overrides = open_zero_or_more_files(sub_matches, "override")?;
+            let cache = commands::create_cache(build_id, namespace, aconfigs, overrides)?;
+            let path = sub_matches.get_one::<String>("cache").unwrap();
+            let file = fs::File::create(path)?;
+            cache.write_to_writer(file)?;
+        }
+        Some(("create-java-lib", sub_matches)) => {
+            let path = sub_matches.get_one::<String>("cache").unwrap();
+            let file = fs::File::open(path)?;
+            let cache = Cache::read_from_reader(file)?;
+            let out = sub_matches.get_one::<String>("out").unwrap();
+            let generated_file = commands::generate_code(&cache).unwrap();
+            fs::write(
+                format!("{}/{}", out, generated_file.file_name),
+                generated_file.file_content,
+            )?;
+        }
+        Some(("dump", sub_matches)) => {
+            let path = sub_matches.get_one::<String>("cache").unwrap();
+            let file = fs::File::open(path)?;
+            let cache = Cache::read_from_reader(file)?;
+            let format = sub_matches.get_one("format").unwrap();
+            let output = commands::dump_cache(cache, *format)?;
+            let path = sub_matches.get_one::<String>("out").unwrap();
+            let mut file: Box<dyn Write> = if path == "-" {
+                Box::new(io::stdout())
+            } else {
+                Box::new(fs::File::create(path)?)
+            };
+            file.write_all(&output)?;
+        }
+        _ => unreachable!(),
     }
-
-    #[test]
-    fn test_binary_protobuf() {
-        use protobuf::Message;
-        let mut buffer = Vec::new();
-
-        let mut original = Placeholder::new();
-        original.name = "test".to_owned();
-        original.write_to_writer(&mut buffer).unwrap();
-
-        let copy = Placeholder::parse_from_reader(&mut buffer.as_slice()).unwrap();
-
-        assert_eq!(original, copy);
-    }
+    Ok(())
 }
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
new file mode 100644
index 0000000..5965a09
--- /dev/null
+++ b/tools/aconfig/src/protos.rs
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// When building with the Android tool-chain
+//
+//   - an external crate `aconfig_protos` will be generated
+//   - the feature "cargo" will be disabled
+//
+// When building with cargo
+//
+//   - a local sub-module will be generated in OUT_DIR and included in this file
+//   - the feature "cargo" will be enabled
+//
+// This module hides these differences from the rest of aconfig.
+
+// ---- When building with the Android tool-chain ----
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Namespace as ProtoNamespace;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_value as ProtoFlagDefinitionValue;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_definition as ProtoFlagDefinition;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_overrides as ProtoFlagOverrides;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_override as ProtoFlagOverride;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Parsed_flag as ProtoParsedFlag;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Tracepoint as ProtoTracepoint;
+
+// ---- When building with cargo ----
+#[cfg(feature = "cargo")]
+include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Namespace as ProtoNamespace;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_value as ProtoFlagDefinitionValue;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_definition as ProtoFlagDefinition;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_overrides as ProtoFlagOverrides;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_override as ProtoFlagOverride;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_permission as ProtoFlagPermission;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_state as ProtoFlagState;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Parsed_flags as ProtoParsedFlags;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Parsed_flag as ProtoParsedFlag;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Tracepoint as ProtoTracepoint;
+
+// ---- Common for both the Android tool-chain and cargo ----
+use anyhow::Result;
+
+pub fn try_from_text_proto<T>(s: &str) -> Result<T>
+where
+    T: protobuf::MessageFull,
+{
+    // warning: parse_from_str does not check if required fields are set
+    protobuf::text_format::parse_from_str(s).map_err(|e| e.into())
+}
diff --git a/tools/aconfig/templates/java.template b/tools/aconfig/templates/java.template
new file mode 100644
index 0000000..3854579
--- /dev/null
+++ b/tools/aconfig/templates/java.template
@@ -0,0 +1,19 @@
+package com.android.aconfig;
+{{ if readwrite }}
+import android.provider.DeviceConfig;
+{{ endif }}
+public final class {namespace} \{
+    {{ for item in class_elements}}
+    public static boolean {item.method_name}() \{
+        {{ if item.readwrite- }}
+        return DeviceConfig.getBoolean(
+            "{namespace}",
+            "{item.feature_name}__{item.flag_name}",
+            {item.default_value}
+        ); 
+        {{ -else- }}
+        return {item.default_value};
+        {{ -endif }}
+    }
+    {{ endfor }}
+}
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index b0ed645..9714ac4 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -7,7 +7,6 @@
 export FINAL_PLATFORM_CODENAME='VanillaIceCream'
 export CURRENT_PLATFORM_CODENAME='VanillaIceCream'
 export FINAL_PLATFORM_CODENAME_JAVA='VANILLA_ICE_CREAM'
-export FINAL_BUILD_PREFIX='VP1A'
 export FINAL_PLATFORM_VERSION='15'
 
 # Set arbitrary large values for CI.
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
index d977a65..fa33986 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -4,13 +4,15 @@
 
 function apply_droidstubs_hack() {
     if ! grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
-        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+        local build_soong_git_root="$(readlink -f $top/build/soong)"
+        git -C "$build_soong_git_root" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
     fi
 }
 
 function apply_resources_sdk_int_fix() {
     if ! grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
-        git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
+        local base_git_root="$(readlink -f $top/frameworks/base)"
+        git -C "$base_git_root" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
     fi
 }
 
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index 84ad2a7..62e5ee5 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -34,7 +34,8 @@
     revert_resources_sdk_int_fix
 
     # build/make/core/version_defaults.mk
-    sed -i -e "s/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := .*/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := REL/g" "$top/build/make/core/version_defaults.mk"
+    # Mark all versions "released".
+    sed -i 's/\(PLATFORM_VERSION_CODENAME\.[^[:space:]]*\) := [^[:space:]]*/\1 := REL/g' "$top/build/make/core/version_defaults.mk"
 
     # cts
     echo "$FINAL_PLATFORM_VERSION" > "$top/cts/tests/tests/os/assets/platform_versions.txt"
diff --git a/tools/finalization/localonly-steps.sh b/tools/finalization/localonly-steps.sh
index 6107b3e..7318ca1 100755
--- a/tools/finalization/localonly-steps.sh
+++ b/tools/finalization/localonly-steps.sh
@@ -17,7 +17,7 @@
     $top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo DIST_DIR=out/dist
 
     # Build Modules SDKs.
-    TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh"
+    TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh" --build-release=latest
 
     # Update prebuilts.
     "$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index 32afa45..f2fda4e 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -223,16 +223,6 @@
 	return starlark.NewList(elems)
 }
 
-// propsetFromEnv constructs a propset from the array of KEY=value strings
-func structFromEnv(env []string) *starlarkstruct.Struct {
-	sd := make(map[string]starlark.Value, len(env))
-	for _, x := range env {
-		kv := strings.SplitN(x, "=", 2)
-		sd[kv[0]] = starlark.String(kv[1])
-	}
-	return starlarkstruct.FromStringDict(starlarkstruct.Default, sd)
-}
-
 func log(thread *starlark.Thread, fn *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {
 	sep := " "
 	if err := starlark.UnpackArgs("print", nil, kwargs, "sep?", &sep); err != nil {
@@ -255,12 +245,10 @@
 	return starlark.None, nil
 }
 
-func setup(env []string) {
+func setup() {
 	// Create the symbols that aid makefile conversion. See README.md
 	builtins = starlark.StringDict{
 		"struct":   starlark.NewBuiltin("struct", starlarkstruct.Make),
-		"rblf_cli": structFromEnv(env),
-		"rblf_env": structFromEnv(os.Environ()),
 		// To convert find-copy-subdir and product-copy-files-by pattern
 		"rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
 		// To convert makefile's $(shell cmd)
@@ -285,11 +273,8 @@
 //   and the name that appears in error messages;
 // * src is an optional source of bytes to use instead of filename
 //   (it can be a string, or a byte array, or an io.Reader instance)
-// * commandVars is an array of "VAR=value" items. They are accessible from
-//   the starlark script as members of the `rblf_cli` propset.
-func Run(filename string, src interface{}, commandVars []string) error {
-	setup(commandVars)
-
+func Run(filename string, src interface{}) error {
+	setup()
 	mainThread := &starlark.Thread{
 		Name:  "main",
 		Print: func(_ *starlark.Thread, msg string) { fmt.Println(msg) },
diff --git a/tools/rbcrun/host_test.go b/tools/rbcrun/host_test.go
index 97f6ce9..e109c02 100644
--- a/tools/rbcrun/host_test.go
+++ b/tools/rbcrun/host_test.go
@@ -53,8 +53,8 @@
 }
 
 // Common setup for the tests: create thread, change to the test directory
-func testSetup(t *testing.T, env []string) *starlark.Thread {
-	setup(env)
+func testSetup(t *testing.T) *starlark.Thread {
+	setup()
 	thread := &starlark.Thread{
 		Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 			if module == "assert.star" {
@@ -72,14 +72,16 @@
 func dataDir() string {
 	_, thisSrcFile, _, _ := runtime.Caller(0)
 	return filepath.Join(filepath.Dir(thisSrcFile), "testdata")
-
 }
 
 func exerciseStarlarkTestFile(t *testing.T, starFile string) {
 	// In order to use "assert.star" from go/starlark.net/starlarktest in the tests, provide:
 	//  * load function that handles "assert.star"
 	//  * starlarktest.DataFile function that finds its location
-	setup(nil)
+	setup()
+	if err := os.Chdir(dataDir()); err != nil {
+		t.Fatal(err)
+	}
 	thread := &starlark.Thread{
 		Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 			if module == "assert.star" {
@@ -98,26 +100,9 @@
 	}
 }
 
-func TestCliAndEnv(t *testing.T) {
-	// TODO(asmundak): convert this to use exerciseStarlarkTestFile
-	if err := os.Setenv("TEST_ENVIRONMENT_FOO", "test_environment_foo"); err != nil {
-		t.Fatal(err)
-	}
-	thread := testSetup(t, []string{"CLI_FOO=foo"})
-	if _, err := starlark.ExecFile(thread, "cli_and_env.star", nil, builtins); err != nil {
-		if err, ok := err.(*starlark.EvalError); ok {
-			t.Fatal(err.Backtrace())
-		}
-		t.Fatal(err)
-	}
-}
-
 func TestFileOps(t *testing.T) {
 	// TODO(asmundak): convert this to use exerciseStarlarkTestFile
-	if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
-		t.Fatal(err)
-	}
-	thread := testSetup(t, nil)
+	thread := testSetup(t)
 	if _, err := starlark.ExecFile(thread, "file_ops.star", nil, builtins); err != nil {
 		if err, ok := err.(*starlark.EvalError); ok {
 			t.Fatal(err.Backtrace())
@@ -128,7 +113,7 @@
 
 func TestLoad(t *testing.T) {
 	// TODO(asmundak): convert this to use exerciseStarlarkTestFile
-	thread := testSetup(t, nil)
+	thread := testSetup(t)
 	thread.Load = func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 		if module == "assert.star" {
 			return starlarktest.LoadAssertModule()
@@ -148,8 +133,5 @@
 }
 
 func TestShell(t *testing.T) {
-	if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
-		t.Fatal(err)
-	}
 	exerciseStarlarkTestFile(t, "testdata/shell.star")
 }
diff --git a/tools/rbcrun/rbcrun/rbcrun.go b/tools/rbcrun/rbcrun/rbcrun.go
index 4db6a0b..8dd0f46 100644
--- a/tools/rbcrun/rbcrun/rbcrun.go
+++ b/tools/rbcrun/rbcrun/rbcrun.go
@@ -20,44 +20,25 @@
 	"go.starlark.net/starlark"
 	"os"
 	"rbcrun"
-	"strings"
 )
 
 var (
-	execprog = flag.String("c", "", "execute program `prog`")
 	rootdir  = flag.String("d", ".", "the value of // for load paths")
-	file     = flag.String("f", "", "file to execute")
 	perfFile = flag.String("perf", "", "save performance data")
 )
 
 func main() {
 	flag.Parse()
-	filename := *file
-	var src interface{}
-	var env []string
+	filename := ""
 
-	rc := 0
 	for _, arg := range flag.Args() {
-		if strings.Contains(arg, "=") {
-			env = append(env, arg)
-		} else if filename == "" {
+		if filename == "" {
 			filename = arg
 		} else {
 			quit("only one file can be executed\n")
 		}
 	}
-	if *execprog != "" {
-		if filename != "" {
-			quit("either -c or file name should be present\n")
-		}
-		filename = "<cmdline>"
-		src = *execprog
-	}
 	if filename == "" {
-		if len(env) > 0 {
-			fmt.Fprintln(os.Stderr,
-				"no file to run -- if your file's name contains '=', use -f to specify it")
-		}
 		flag.Usage()
 		os.Exit(1)
 	}
@@ -75,7 +56,8 @@
 		}
 	}
 	rbcrun.LoadPathRoot = *rootdir
-	err := rbcrun.Run(filename, src, env)
+	err := rbcrun.Run(filename, nil)
+	rc := 0
 	if *perfFile != "" {
 		if err2 := starlark.StopProfile(); err2 != nil {
 			fmt.Fprintln(os.Stderr, err2)
diff --git a/tools/rbcrun/testdata/cli_and_env.star b/tools/rbcrun/testdata/cli_and_env.star
deleted file mode 100644
index d6f464a..0000000
--- a/tools/rbcrun/testdata/cli_and_env.star
+++ /dev/null
@@ -1,11 +0,0 @@
-# Tests rblf_env access
-load("assert.star", "assert")
-
-
-def test():
-    assert.eq(rblf_env.TEST_ENVIRONMENT_FOO, "test_environment_foo")
-    assert.fails(lambda: rblf_env.FOO_BAR_BAZ, ".*struct has no .FOO_BAR_BAZ attribute$")
-    assert.eq(rblf_cli.CLI_FOO, "foo")
-
-
-test()
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
index 2ee78fc..b2b907c 100644
--- a/tools/rbcrun/testdata/file_ops.star
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -1,22 +1,21 @@
 # Tests file ops builtins
 load("assert.star", "assert")
 
-
 def test():
     myname = "file_ops.star"
     files = rblf_wildcard("*.star")
     assert.true(myname in files, "expected %s in  %s" % (myname, files))
-    files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
+    files = rblf_wildcard("*.star")
     assert.true(myname in files, "expected %s in %s" % (myname, files))
     files = rblf_wildcard("*.xxx")
     assert.true(len(files) == 0, "expansion should be empty but contains %s" % files)
     mydir = "testdata"
     myrelname = "%s/%s" % (mydir, myname)
-    files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*")
+    files = rblf_find_files("../", "*")
     assert.true(mydir in files and myrelname in files, "expected %s and %s in %s" % (mydir, myrelname, files))
-    files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*", only_files=1)
+    files = rblf_find_files("../", "*", only_files=1)
     assert.true(mydir not in files, "did not expect %s in %s" % (mydir, files))
     assert.true(myrelname in files, "expected %s  in %s" % (myrelname, files))
-    files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*.star")
+    files = rblf_find_files("../", "*.star")
     assert.true(myrelname in files, "expected %s in %s" % (myrelname, files))
 test()
diff --git a/tools/rbcrun/testdata/shell.star b/tools/rbcrun/testdata/shell.star
index ad10697..dd17375 100644
--- a/tools/rbcrun/testdata/shell.star
+++ b/tools/rbcrun/testdata/shell.star
@@ -1,5 +1,5 @@
 # Tests "queue" data type
 load("assert.star", "assert")
 
-assert.eq("load.star shell.star", rblf_shell("cd %s && ls -1 shell.star load.star 2>&1" % rblf_env.TEST_DATA_DIR))
-assert.eq("shell.star", rblf_shell("cd %s && echo shell.sta*" % rblf_env.TEST_DATA_DIR))
+assert.eq("load.star shell.star", rblf_shell("ls -1 shell.star load.star 2>&1"))
+assert.eq("shell.star", rblf_shell("echo shell.sta*"))
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 940ac1d..06de622 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -754,6 +754,33 @@
   return ReadBytesFromInputFile(input_file, fn).decode()
 
 
+def WriteBytesToInputFile(input_file, fn, data):
+  """Write bytes |data| contents to fn of input zipfile or directory."""
+  if isinstance(input_file, zipfile.ZipFile):
+    with input_file.open(fn, "w") as entry_fp:
+      return entry_fp.write(data)
+  elif zipfile.is_zipfile(input_file):
+    with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+      with zfp.open(fn, "w") as entry_fp:
+        return entry_fp.write(data)
+  else:
+    if not os.path.isdir(input_file):
+      raise ValueError(
+          "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
+    path = os.path.join(input_file, *fn.split("/"))
+    try:
+      with open(path, "wb") as f:
+        return f.write(data)
+    except IOError as e:
+      if e.errno == errno.ENOENT:
+        raise KeyError(fn)
+
+
+def WriteToInputFile(input_file, fn, str: str):
+  """Write str content to fn of input file or directory"""
+  return WriteBytesToInputFile(input_file, fn, str.encode())
+
+
 def ExtractFromInputFile(input_file, fn):
   """Extracts the contents of fn from input zipfile or directory into a file."""
   if isinstance(input_file, zipfile.ZipFile):
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index c4fd809..667891c 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -23,6 +23,7 @@
 from check_target_files_vintf import CheckVintfIfTrebleEnabled, HasPartition
 from common import OPTIONS
 from ota_utils import UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, PropertyFiles
+import subprocess
 
 logger = logging.getLogger(__name__)
 
@@ -277,7 +278,8 @@
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
+  FinalizeMetadata(metadata, staging_file, output_file,
+                   needed_property_files, package_key=OPTIONS.package_key)
 
 
 def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
@@ -532,7 +534,8 @@
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
+  FinalizeMetadata(metadata, staging_file, output_file,
+                   needed_property_files, package_key=OPTIONS.package_key)
 
 
 def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
@@ -555,8 +558,18 @@
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
   else:
-    logger.info("unzipping target target-files...")
-    OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+    if not os.path.isdir(target_file):
+      logger.info("unzipping target target-files...")
+      OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+    else:
+      OPTIONS.input_tmp = target_file
+      tmpfile = common.MakeTempFile(suffix=".zip")
+      os.unlink(tmpfile)
+      common.RunAndCheckOutput(
+          ["zip", tmpfile, "-r", ".", "-0"], cwd=target_file)
+      assert zipfile.is_zipfile(tmpfile)
+      target_file = tmpfile
+
   OPTIONS.target_tmp = OPTIONS.input_tmp
 
   # If the caller explicitly specified the device-specific extensions path via
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index e40256c..04ef5ef 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -270,10 +270,9 @@
 import common
 import ota_utils
 from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
-                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, CopyTargetFilesDir)
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
 from common import DoesInputFileContain, IsSparseImage
 import target_files_diff
-from check_target_files_vintf import CheckVintfIfTrebleEnabled
 from non_ab_ota import GenerateNonAbOtaPackage
 from payload_signer import PayloadSigner
 
@@ -519,15 +518,10 @@
   Returns:
     The filename of target-files.zip that doesn't contain postinstall config.
   """
-  # We should only make a copy if postinstall_config entry exists.
-  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
-    if POSTINSTALL_CONFIG not in input_zip.namelist():
-      return input_file
-
-  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
-  shutil.copyfile(input_file, target_file)
-  common.ZipDelete(target_file, POSTINSTALL_CONFIG)
-  return target_file
+  config_path = os.path.join(input_file, POSTINSTALL_CONFIG)
+  if os.path.exists(config_path):
+    os.unlink(config_path)
+  return input_file
 
 
 def ParseInfoDict(target_file_path):
@@ -544,6 +538,17 @@
   Returns:
     The path to modified target-files.zip
   """
+  if os.path.isdir(input_file):
+    dynamic_partition_info_path = os.path.join(
+        input_file, "META", "dynamic_partitions_info.txt")
+    with open(dynamic_partition_info_path, "r") as fp:
+      dynamic_partition_info = fp.read()
+    dynamic_partition_info = ModifyVABCCompressionParam(
+        dynamic_partition_info, vabc_compression_param)
+    with open(dynamic_partition_info_path, "w") as fp:
+      fp.write(dynamic_partition_info)
+    return input_file
+
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   shutil.copyfile(input_file, target_file)
   common.ZipDelete(target_file, DYNAMIC_PARTITION_INFO)
@@ -571,23 +576,7 @@
     The filename of target-files.zip used for partial ota update.
   """
 
-  def AddImageForPartition(partition_name):
-    """Add the archive name for a given partition to the copy list."""
-    for prefix in ['IMAGES', 'RADIO']:
-      image_path = '{}/{}.img'.format(prefix, partition_name)
-      if image_path in namelist:
-        copy_entries.append(image_path)
-        map_path = '{}/{}.map'.format(prefix, partition_name)
-        if map_path in namelist:
-          copy_entries.append(map_path)
-        return
-
-    raise ValueError("Cannot find {} in input zipfile".format(partition_name))
-
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    original_ab_partitions = input_zip.read(
-        AB_PARTITIONS).decode().splitlines()
-    namelist = input_zip.namelist()
+  original_ab_partitions = common.ReadFromInputFile(input_file, AB_PARTITIONS)
 
   unrecognized_partitions = [partition for partition in ab_partitions if
                              partition not in original_ab_partitions]
@@ -596,50 +585,65 @@
                      unrecognized_partitions)
 
   logger.info("Generating partial updates for %s", ab_partitions)
+  for subdir in ["IMAGES", "RADIO", "PREBUILT_IMAGES"]:
+    image_dir = os.path.join(subdir)
+    if not os.path.exists(image_dir):
+      continue
+    for filename in os.listdir(image_dir):
+      filepath = os.path.join(image_dir, filename)
+      if filename.endswith(".img"):
+        partition_name = filename.removesuffix(".img")
+        if partition_name not in ab_partitions:
+          os.unlink(filepath)
 
-  copy_entries = ['META/update_engine_config.txt']
-  for partition_name in ab_partitions:
-    AddImageForPartition(partition_name)
+  common.WriteToInputFile(input_file, 'META/ab_partitions.txt',
+                          '\n'.join(ab_partitions))
+  CARE_MAP_ENTRY = "META/care_map.pb"
+  if DoesInputFileContain(input_file, CARE_MAP_ENTRY):
+    caremap = care_map_pb2.CareMap()
+    caremap.ParseFromString(
+        common.ReadBytesFromInputFile(input_file, CARE_MAP_ENTRY))
+    filtered = [
+        part for part in caremap.partitions if part.name in ab_partitions]
+    del caremap.partitions[:]
+    caremap.partitions.extend(filtered)
+    common.WriteBytesToInputFile(input_file, CARE_MAP_ENTRY,
+                                 caremap.SerializeToString())
 
-  # Use zip2zip to avoid extracting the zipfile.
-  partial_target_file = common.MakeTempFile(suffix='.zip')
-  cmd = ['zip2zip', '-i', input_file, '-o', partial_target_file]
-  cmd.extend(['{}:{}'.format(name, name) for name in copy_entries])
-  common.RunAndCheckOutput(cmd)
+  for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
+    if not DoesInputFileContain(input_file, info_file):
+      logger.warning('Cannot find %s in input zipfile', info_file)
+      continue
 
-  partial_target_zip = zipfile.ZipFile(partial_target_file, 'a',
-                                       allowZip64=True)
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt',
-                       '\n'.join(ab_partitions))
-    CARE_MAP_ENTRY = "META/care_map.pb"
-    if CARE_MAP_ENTRY in input_zip.namelist():
-      caremap = care_map_pb2.CareMap()
-      caremap.ParseFromString(input_zip.read(CARE_MAP_ENTRY))
-      filtered = [
-          part for part in caremap.partitions if part.name in ab_partitions]
-      del caremap.partitions[:]
-      caremap.partitions.extend(filtered)
-      common.ZipWriteStr(partial_target_zip, CARE_MAP_ENTRY,
-                         caremap.SerializeToString())
+    content = common.ReadFromInputFile(input_file, info_file)
+    modified_info = UpdatesInfoForSpecialUpdates(
+        content, lambda p: p in ab_partitions)
+    if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
+      modified_info = ModifyVABCCompressionParam(
+          modified_info, OPTIONS.vabc_compression_param)
+    common.WriteToInputFile(input_file, info_file, modified_info)
 
-    for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
-      if info_file not in input_zip.namelist():
-        logger.warning('Cannot find %s in input zipfile', info_file)
-        continue
-      content = input_zip.read(info_file).decode()
-      modified_info = UpdatesInfoForSpecialUpdates(
-          content, lambda p: p in ab_partitions)
-      if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
-        modified_info = ModifyVABCCompressionParam(
-            modified_info, OPTIONS.vabc_compression_param)
-      common.ZipWriteStr(partial_target_zip, info_file, modified_info)
+  def IsInPartialList(postinstall_line: str):
+    idx = postinstall_line.find("=")
+    if idx < 0:
+      return False
+    key = postinstall_line[:idx]
+    logger.info("%s %s", key, ab_partitions)
+    for part in ab_partitions:
+      if key.endswith("_" + part):
+        return True
+    return False
 
-    # TODO(xunchang) handle META/postinstall_config.txt'
+  postinstall_config = common.ReadFromInputFile(input_file, POSTINSTALL_CONFIG)
+  postinstall_config = [
+      line for line in postinstall_config.splitlines() if IsInPartialList(line)]
+  if postinstall_config:
+    postinstall_config = "\n".join(postinstall_config)
+    common.WriteToInputFile(input_file, POSTINSTALL_CONFIG, postinstall_config)
+  else:
+    os.unlink(os.path.join(input_file, POSTINSTALL_CONFIG))
 
-  common.ZipClose(partial_target_zip)
-
-  return partial_target_file
+  return input_file
 
 
 def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
@@ -664,21 +668,12 @@
   replace = {'OTA/super_{}.img'.format(dev): 'IMAGES/{}.img'.format(dev)
              for dev in super_block_devices}
 
-  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
-  shutil.copyfile(input_file, target_file)
-
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    namelist = input_zip.namelist()
-
-  input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
-
   # Remove partitions from META/ab_partitions.txt that is in
   # dynamic_partition_list but not in super_block_devices so that
   # brillo_update_payload won't generate update for those logical partitions.
-  ab_partitions_file = os.path.join(input_tmp, *AB_PARTITIONS.split('/'))
-  with open(ab_partitions_file) as f:
-    ab_partitions_lines = f.readlines()
-    ab_partitions = [line.strip() for line in ab_partitions_lines]
+  ab_partitions_lines = common.ReadFromInputFile(
+      input_file, AB_PARTITIONS).split("\n")
+  ab_partitions = [line.strip() for line in ab_partitions_lines]
   # Assert that all super_block_devices are in ab_partitions
   super_device_not_updated = [partition for partition in super_block_devices
                               if partition not in ab_partitions]
@@ -686,15 +681,6 @@
       "{} is in super_block_devices but not in {}".format(
           super_device_not_updated, AB_PARTITIONS)
   # ab_partitions -= (dynamic_partition_list - super_block_devices)
-  new_ab_partitions = common.MakeTempFile(
-      prefix="ab_partitions", suffix=".txt")
-  with open(new_ab_partitions, 'w') as f:
-    for partition in ab_partitions:
-      if (partition in dynamic_partition_list and
-              partition not in super_block_devices):
-        logger.info("Dropping %s from ab_partitions.txt", partition)
-        continue
-      f.write(partition + "\n")
   to_delete = [AB_PARTITIONS]
 
   # Always skip postinstall for a retrofit update.
@@ -707,24 +693,28 @@
   # Remove the existing partition images as well as the map files.
   to_delete += list(replace.values())
   to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
-
-  common.ZipDelete(target_file, to_delete)
-
-  target_zip = zipfile.ZipFile(target_file, 'a', allowZip64=True)
+  for item in to_delete:
+    os.unlink(os.path.join(input_file, item))
 
   # Write super_{foo}.img as {foo}.img.
   for src, dst in replace.items():
-    assert src in namelist, \
+    assert DoesInputFileContain(input_file, src), \
         'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
-    unzipped_file = os.path.join(input_tmp, *src.split('/'))
-    common.ZipWrite(target_zip, unzipped_file, arcname=dst)
+    source_path = os.path.join(input_file, *src.split("/"))
+    target_path = os.path.join(input_file, *dst.split("/"))
+    os.rename(source_path, target_path)
 
   # Write new ab_partitions.txt file
-  common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
+  new_ab_partitions = os.paht.join(input_file, AB_PARTITIONS)
+  with open(new_ab_partitions, 'w') as f:
+    for partition in ab_partitions:
+      if (partition in dynamic_partition_list and
+              partition not in super_block_devices):
+        logger.info("Dropping %s from ab_partitions.txt", partition)
+        continue
+      f.write(partition + "\n")
 
-  common.ZipClose(target_zip)
-
-  return target_file
+  return input_file
 
 
 def GetTargetFilesZipForCustomImagesUpdates(input_file, custom_images):
@@ -833,14 +823,20 @@
   return pattern.search(output) is not None
 
 
+def ExtractOrCopyTargetFiles(target_file):
+  if os.path.isdir(target_file):
+    return CopyTargetFilesDir(target_file)
+  else:
+    return ExtractTargetFiles(target_file)
+
+
 def GenerateAbOtaPackage(target_file, output_file, source_file=None):
   """Generates an Android OTA package that has A/B update payload."""
   # If input target_files are directories, create a copy so that we can modify
   # them directly
-  if os.path.isdir(target_file):
-    target_file = CopyTargetFilesDir(target_file)
-  if source_file is not None and os.path.isdir(source_file):
-    source_file = CopyTargetFilesDir(source_file)
+  target_file = ExtractOrCopyTargetFiles(target_file)
+  if source_file is not None:
+    source_file = ExtractOrCopyTargetFiles(source_file)
   # Stage the output zip package for package signing.
   if not OPTIONS.no_signing:
     staging_file = common.MakeTempFile(suffix='.zip')
@@ -851,7 +847,7 @@
                                allowZip64=True)
 
   if source_file is not None:
-    source_file = ota_utils.ExtractTargetFiles(source_file)
+    source_file = ExtractTargetFiles(source_file)
     assert "ab_partitions" in OPTIONS.source_info_dict, \
         "META/ab_partitions.txt is required for ab_update."
     assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -948,15 +944,16 @@
   elif OPTIONS.partial:
     target_file = GetTargetFilesZipForPartialUpdates(target_file,
                                                      OPTIONS.partial)
-  elif OPTIONS.vabc_compression_param:
+  if OPTIONS.vabc_compression_param:
     target_file = GetTargetFilesZipForCustomVABCCompression(
         target_file, OPTIONS.vabc_compression_param)
-  elif OPTIONS.skip_postinstall:
+  if OPTIONS.skip_postinstall:
     target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
   # Target_file may have been modified, reparse ab_partitions
   target_info.info_dict['ab_partitions'] = common.ReadFromInputFile(target_file,
                                                                     AB_PARTITIONS).strip().split("\n")
 
+  from check_target_files_vintf import CheckVintfIfTrebleEnabled
   CheckVintfIfTrebleEnabled(target_file, target_info)
 
   # Metadata to comply with Android OTA package format.
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 3291d56..9067e78 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -1047,10 +1047,15 @@
 
 def CopyTargetFilesDir(input_dir):
   output_dir = common.MakeTempDir("target_files")
-  shutil.copytree(os.path.join(input_dir, "IMAGES"), os.path.join(
-      output_dir, "IMAGES"), dirs_exist_ok=True)
+  IMAGES_DIR = ["IMAGES", "PREBUILT_IMAGES", "RADIO"]
+  for subdir in IMAGES_DIR:
+    if not os.path.exists(os.path.join(input_dir, subdir)):
+      continue
+    shutil.copytree(os.path.join(input_dir, subdir), os.path.join(
+        output_dir, subdir), dirs_exist_ok=True, copy_function=os.link)
   shutil.copytree(os.path.join(input_dir, "META"), os.path.join(
       output_dir, "META"), dirs_exist_ok=True)
+
   for (dirpath, _, filenames) in os.walk(input_dir):
     for filename in filenames:
       path = os.path.join(dirpath, filename)