Merge "Revert^3 "Use -target-feature for MTE"" into main
diff --git a/ci/build_metadata b/ci/build_metadata
index 8136702..cd011c8 100755
--- a/ci/build_metadata
+++ b/ci/build_metadata
@@ -20,6 +20,9 @@
 export TARGET_RELEASE=trunk_staging
 export TARGET_BUILD_VARIANT=eng
 
-build/soong/bin/m dist \
-    code_metadata
+TARGETS=(
+    all_teams
+    release_config_metadata
+)
 
+build/soong/bin/m dist ${TARGETS[@]}
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 402880c..b8c4a38 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -20,7 +20,6 @@
 import logging
 import os
 import pathlib
-import re
 import subprocess
 import sys
 from typing import Callable
@@ -70,7 +69,7 @@
       return BuildPlan(set(self.args.extra_targets), set())
 
     build_targets = set()
-    packaging_functions = set()
+    packaging_commands_getters = []
     for target in self.args.extra_targets:
       if self._unused_target_exclusion_enabled(
           target
@@ -86,9 +85,11 @@
           target, self.build_context, self.args
       )
       build_targets.update(target_optimizer.get_build_targets())
-      packaging_functions.add(target_optimizer.package_outputs)
+      packaging_commands_getters.append(
+          target_optimizer.get_package_outputs_commands
+      )
 
-    return BuildPlan(build_targets, packaging_functions)
+    return BuildPlan(build_targets, packaging_commands_getters)
 
   def _unused_target_exclusion_enabled(self, target: str) -> bool:
     return (
@@ -100,7 +101,7 @@
 @dataclass(frozen=True)
 class BuildPlan:
   build_targets: set[str]
-  packaging_functions: set[Callable[..., None]]
+  packaging_commands_getters: list[Callable[[], list[list[str]]]]
 
 
 def build_test_suites(argv: list[str]) -> int:
@@ -182,8 +183,12 @@
   except subprocess.CalledProcessError as e:
     raise BuildFailureError(e.returncode) from e
 
-  for packaging_function in build_plan.packaging_functions:
-    packaging_function()
+  for packaging_commands_getter in build_plan.packaging_commands_getters:
+    try:
+      for packaging_command in packaging_commands_getter():
+        run_command(packaging_command)
+    except subprocess.CalledProcessError as e:
+      raise BuildFailureError(e.returncode) from e
 
 
 def get_top() -> pathlib.Path:
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
index f3ff6f4..2afaab7 100644
--- a/ci/build_test_suites_test.py
+++ b/ci/build_test_suites_test.py
@@ -241,17 +241,17 @@
   class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget):
 
     def __init__(
-        self, target, build_context, args, output_targets, packaging_outputs
+        self, target, build_context, args, output_targets, packaging_commands
     ):
       super().__init__(target, build_context, args)
       self.output_targets = output_targets
-      self.packaging_outputs = packaging_outputs
+      self.packaging_commands = packaging_commands
 
     def get_build_targets_impl(self):
       return self.output_targets
 
-    def package_outputs_impl(self):
-      self.packaging_outputs.add(f'packaging {" ".join(self.output_targets)}')
+    def get_package_outputs_commands_impl(self):
+      return self.packaging_commands
 
     def get_enabled_flag(self):
       return f'{self.target}_enabled'
@@ -276,7 +276,8 @@
 
     build_plan = build_planner.create_build_plan()
 
-    self.assertEqual(len(build_plan.packaging_functions), 0)
+    for packaging_command in self.run_packaging_commands(build_plan):
+      self.assertEqual(len(packaging_command), 0)
 
   def test_build_optimization_on_optimizes_target(self):
     build_targets = {'target_1', 'target_2'}
@@ -294,20 +295,19 @@
 
   def test_build_optimization_on_packages_target(self):
     build_targets = {'target_1', 'target_2'}
-    packaging_outputs = set()
+    optimized_target_name = self.get_optimized_target_name('target_1')
+    packaging_commands = [[f'packaging {optimized_target_name}']]
     build_planner = self.create_build_planner(
         build_targets=build_targets,
         build_context=self.create_build_context(
             enabled_build_features=[{'name': self.get_target_flag('target_1')}]
         ),
-        packaging_outputs=packaging_outputs,
+        packaging_commands=packaging_commands,
     )
 
     build_plan = build_planner.create_build_plan()
-    self.run_packaging_functions(build_plan)
 
-    optimized_target_name = self.get_optimized_target_name('target_1')
-    self.assertIn(f'packaging {optimized_target_name}', packaging_outputs)
+    self.assertIn(packaging_commands, self.run_packaging_commands(build_plan))
 
   def test_individual_build_optimization_off_doesnt_optimize(self):
     build_targets = {'target_1', 'target_2'}
@@ -321,16 +321,16 @@
 
   def test_individual_build_optimization_off_doesnt_package(self):
     build_targets = {'target_1', 'target_2'}
-    packaging_outputs = set()
+    packaging_commands = [['packaging command']]
     build_planner = self.create_build_planner(
         build_targets=build_targets,
-        packaging_outputs=packaging_outputs,
+        packaging_commands=packaging_commands,
     )
 
     build_plan = build_planner.create_build_plan()
-    self.run_packaging_functions(build_plan)
 
-    self.assertFalse(packaging_outputs)
+    for packaging_command in self.run_packaging_commands(build_plan):
+      self.assertEqual(len(packaging_command), 0)
 
   def test_target_output_used_target_built(self):
     build_target = 'test_target'
@@ -408,7 +408,7 @@
       target_optimizations: dict[
           str, optimized_targets.OptimizedBuildTarget
       ] = None,
-      packaging_outputs: set[str] = set(),
+      packaging_commands: list[list[str]] = [],
   ) -> build_test_suites.BuildPlanner:
     if not build_context:
       build_context = self.create_build_context()
@@ -418,7 +418,7 @@
       target_optimizations = self.create_target_optimizations(
           build_context,
           build_targets,
-          packaging_outputs,
+          packaging_commands,
       )
     return build_test_suites.BuildPlanner(
         build_context, args, target_optimizations
@@ -450,14 +450,14 @@
       self,
       build_context: BuildContext,
       build_targets: set[str],
-      packaging_outputs: set[str] = set(),
+      packaging_commands: list[list[str]] = [],
   ):
     target_optimizations = dict()
     for target in build_targets:
       target_optimizations[target] = functools.partial(
           self.TestOptimizedBuildTarget,
           output_targets={self.get_optimized_target_name(target)},
-          packaging_outputs=packaging_outputs,
+          packaging_commands=packaging_commands,
       )
 
     return target_optimizations
@@ -468,10 +468,6 @@
   def get_optimized_target_name(self, target: str):
     return f'{target}_optimized'
 
-  def run_packaging_functions(self, build_plan: build_test_suites.BuildPlan):
-    for packaging_function in build_plan.packaging_functions:
-      packaging_function()
-
   def get_test_context(self, target: str):
     return {
         'testInfos': [
@@ -491,6 +487,12 @@
         ],
     }
 
+  def run_packaging_commands(self, build_plan: build_test_suites.BuildPlan):
+    return [
+        packaging_command_getter()
+        for packaging_command_getter in build_plan.packaging_commands_getters
+    ]
+
 
 def wait_until(
     condition_function: Callable[[], bool],
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
index fddde17..688bdd8 100644
--- a/ci/optimized_targets.py
+++ b/ci/optimized_targets.py
@@ -16,12 +16,13 @@
 from abc import ABC
 import argparse
 import functools
-from build_context import BuildContext
 import json
 import logging
 import os
-from typing import Self
+import pathlib
+import subprocess
 
+from build_context import BuildContext
 import test_mapping_module_retriever
 
 
@@ -33,6 +34,9 @@
   build.
   """
 
+  _SOONG_UI_BASH_PATH = 'build/soong/soong_ui.bash'
+  _PREBUILT_SOONG_ZIP_PATH = 'prebuilts/build-tools/linux-x86/bin/soong_zip'
+
   def __init__(
       self,
       target: str,
@@ -52,14 +56,17 @@
     self.modules_to_build = {self.target}
     return {self.target}
 
-  def package_outputs(self):
+  def get_package_outputs_commands(self) -> list[list[str]]:
     features = self.build_context.enabled_build_features
     if self.get_enabled_flag() in features:
-      return self.package_outputs_impl()
+      return self.get_package_outputs_commands_impl()
 
-  def package_outputs_impl(self):
+    return []
+
+  def get_package_outputs_commands_impl(self) -> list[list[str]]:
     raise NotImplementedError(
-        f'package_outputs_impl not implemented in {type(self).__name__}'
+        'get_package_outputs_commands_impl not implemented in'
+        f' {type(self).__name__}'
     )
 
   def get_enabled_flag(self):
@@ -72,6 +79,88 @@
         f'get_build_targets_impl not implemented in {type(self).__name__}'
     )
 
+  def _generate_zip_options_for_items(
+      self,
+      prefix: str = '',
+      relative_root: str = '',
+      list_files: list[str] | None = None,
+      files: list[str] | None = None,
+      directories: list[str] | None = None,
+  ) -> list[str]:
+    if not list_files and not files and not directories:
+      raise RuntimeError(
+          f'No items specified to be added to zip! Prefix: {prefix}, Relative'
+          f' root: {relative_root}'
+      )
+    command_segment = []
+    # These are all soong_zip options so consult soong_zip --help for specifics.
+    if prefix:
+      command_segment.append('-P')
+      command_segment.append(prefix)
+    if relative_root:
+      command_segment.append('-C')
+      command_segment.append(relative_root)
+    if list_files:
+      for list_file in list_files:
+        command_segment.append('-l')
+        command_segment.append(list_file)
+    if files:
+      for file in files:
+        command_segment.append('-f')
+        command_segment.append(file)
+    if directories:
+      for directory in directories:
+        command_segment.append('-D')
+        command_segment.append(directory)
+
+    return command_segment
+
+  def _query_soong_vars(
+      self, src_top: pathlib.Path, soong_vars: list[str]
+  ) -> dict[str, str]:
+    process_result = subprocess.run(
+        args=[
+            f'{src_top / self._SOONG_UI_BASH_PATH}',
+            '--dumpvars-mode',
+            f'--abs-vars={" ".join(soong_vars)}',
+        ],
+        env=os.environ,
+        check=False,
+        capture_output=True,
+        text=True,
+    )
+    if not process_result.returncode == 0:
+      logging.error('soong dumpvars command failed! stderr:')
+      logging.error(process_result.stderr)
+      raise RuntimeError('Soong dumpvars failed! See log for stderr.')
+
+    if not process_result.stdout:
+      raise RuntimeError(
+          'Necessary soong variables ' + soong_vars + ' not found.'
+      )
+
+    try:
+      return {
+          line.split('=')[0]: line.split('=')[1].strip("'")
+          for line in process_result.stdout.strip().split('\n')
+      }
+    except IndexError as e:
+      raise RuntimeError(
+          'Error parsing soong dumpvars output! See output here:'
+          f' {process_result.stdout}',
+          e,
+      )
+
+  def _base_zip_command(
+      self, src_top: pathlib.Path, dist_dir: pathlib.Path, name: str
+  ) -> list[str]:
+    return [
+        f'{src_top / self._PREBUILT_SOONG_ZIP_PATH }',
+        '-d',
+        '-o',
+        f'{dist_dir / name}',
+    ]
+
 
 class NullOptimizer(OptimizedBuildTarget):
   """No-op target optimizer.
@@ -86,8 +175,8 @@
   def get_build_targets(self):
     return {self.target}
 
-  def package_outputs(self):
-    pass
+  def get_package_outputs_commands(self):
+    return []
 
 
 class ChangeInfo:
@@ -114,11 +203,10 @@
 
     return changed_files
 
+
 class GeneralTestsOptimizer(OptimizedBuildTarget):
   """general-tests optimizer
 
-  TODO(b/358215235): Implement
-
   This optimizer reads in the list of changed files from the file located in
   env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to
   determine what test mapping modules will run for the given changes. It then
@@ -126,10 +214,13 @@
   normally built.
   """
 
-  # List of modules that are always required to be in general-tests.zip.
-  _REQUIRED_MODULES = frozenset(
-      ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util']
-  )
+  # List of modules that are built alongside general-tests as dependencies.
+  _REQUIRED_MODULES = frozenset([
+      'cts-tradefed',
+      'vts-tradefed',
+      'compatibility-host-util',
+      'general-tests-shared-libs',
+  ])
 
   def get_build_targets_impl(self) -> set[str]:
     change_info_file_path = os.environ.get('CHANGE_INFO')
@@ -173,6 +264,212 @@
 
     return modules_to_build
 
+  def get_package_outputs_commands_impl(self):
+    src_top = pathlib.Path(os.environ.get('TOP', os.getcwd()))
+    dist_dir = pathlib.Path(os.environ.get('DIST_DIR'))
+
+    soong_vars = self._query_soong_vars(
+        src_top,
+        [
+            'HOST_OUT_TESTCASES',
+            'TARGET_OUT_TESTCASES',
+            'PRODUCT_OUT',
+            'SOONG_HOST_OUT',
+            'HOST_OUT',
+        ],
+    )
+    host_out_testcases = pathlib.Path(soong_vars.get('HOST_OUT_TESTCASES'))
+    target_out_testcases = pathlib.Path(soong_vars.get('TARGET_OUT_TESTCASES'))
+    product_out = pathlib.Path(soong_vars.get('PRODUCT_OUT'))
+    soong_host_out = pathlib.Path(soong_vars.get('SOONG_HOST_OUT'))
+    host_out = pathlib.Path(soong_vars.get('HOST_OUT'))
+
+    host_paths = []
+    target_paths = []
+    host_config_files = []
+    target_config_files = []
+    for module in self.modules_to_build:
+      # The required modules are handled separately, no need to package.
+      if module in self._REQUIRED_MODULES:
+        continue
+
+      host_path = host_out_testcases / module
+      if os.path.exists(host_path):
+        host_paths.append(host_path)
+        self._collect_config_files(src_top, host_path, host_config_files)
+
+      target_path = target_out_testcases / module
+      if os.path.exists(target_path):
+        target_paths.append(target_path)
+        self._collect_config_files(src_top, target_path, target_config_files)
+
+      if not os.path.exists(host_path) and not os.path.exists(target_path):
+        logging.info(f'No host or target build outputs found for {module}.')
+
+    zip_commands = []
+
+    zip_commands.extend(
+        self._get_zip_test_configs_zips_commands(
+            src_top,
+            dist_dir,
+            host_out,
+            product_out,
+            host_config_files,
+            target_config_files,
+        )
+    )
+
+    zip_command = self._base_zip_command(src_top, dist_dir, 'general-tests.zip')
+
+    # Add host testcases.
+    if host_paths:
+      zip_command.extend(
+          self._generate_zip_options_for_items(
+              prefix='host',
+              relative_root=f'{src_top / soong_host_out}',
+              directories=host_paths,
+          )
+      )
+
+    # Add target testcases.
+    if target_paths:
+      zip_command.extend(
+          self._generate_zip_options_for_items(
+              prefix='target',
+              relative_root=f'{src_top / product_out}',
+              directories=target_paths,
+          )
+      )
+
+    # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
+    # Add necessary tools. These are also hardcoded in general-tests.mk.
+    framework_path = soong_host_out / 'framework'
+
+    zip_command.extend(
+        self._generate_zip_options_for_items(
+            prefix='host/tools',
+            relative_root=str(framework_path),
+            files=[
+                f"{framework_path / 'cts-tradefed.jar'}",
+                f"{framework_path / 'compatibility-host-util.jar'}",
+                f"{framework_path / 'vts-tradefed.jar'}",
+            ],
+        )
+    )
+
+    zip_commands.append(zip_command)
+    return zip_commands
+
+  def _collect_config_files(
+      self,
+      src_top: pathlib.Path,
+      root_dir: pathlib.Path,
+      config_files: list[str],
+  ):
+    for root, dirs, files in os.walk(src_top / root_dir):
+      for file in files:
+        if file.endswith('.config'):
+          config_files.append(root_dir / file)
+
+  def _get_zip_test_configs_zips_commands(
+      self,
+      src_top: pathlib.Path,
+      dist_dir: pathlib.Path,
+      host_out: pathlib.Path,
+      product_out: pathlib.Path,
+      host_config_files: list[str],
+      target_config_files: list[str],
+  ) -> tuple[list[str], list[str]]:
+    """Generate general-tests_configs.zip and general-tests_list.zip.
+
+    general-tests_configs.zip contains all of the .config files that were
+    built and general-tests_list.zip contains a text file which lists
+    all of the .config files that are in general-tests_configs.zip.
+
+    general-tests_configs.zip is organized as follows:
+    /
+      host/
+        testcases/
+          test_1.config
+          test_2.config
+          ...
+      target/
+        testcases/
+          test_1.config
+          test_2.config
+          ...
+
+    So the process is we write out the paths to all the host config files into
+    one
+    file and all the paths to the target config files in another. We also write
+    the paths to all the config files into a third file to use for
+    general-tests_list.zip.
+
+    Args:
+      dist_dir: dist directory.
+      host_out: host out directory.
+      product_out: product out directory.
+      host_config_files: list of all host config files.
+      target_config_files: list of all target config files.
+
+    Returns:
+      The commands to generate general-tests_configs.zip and
+      general-tests_list.zip
+    """
+    with open(
+        f"{host_out / 'host_general-tests_list'}", 'w'
+    ) as host_list_file, open(
+        f"{product_out / 'target_general-tests_list'}", 'w'
+    ) as target_list_file, open(
+        f"{host_out / 'general-tests_list'}", 'w'
+    ) as list_file:
+
+      for config_file in host_config_files:
+        host_list_file.write(f'{config_file}' + '\n')
+        list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n')
+
+      for config_file in target_config_files:
+        target_list_file.write(f'{config_file}' + '\n')
+        list_file.write(
+            'target/' + os.path.relpath(config_file, product_out) + '\n'
+        )
+
+    zip_commands = []
+
+    tests_config_zip_command = self._base_zip_command(
+        src_top, dist_dir, 'general-tests_configs.zip'
+    )
+    tests_config_zip_command.extend(
+        self._generate_zip_options_for_items(
+            prefix='host',
+            relative_root=str(host_out),
+            list_files=[f"{host_out / 'host_general-tests_list'}"],
+        )
+    )
+
+    tests_config_zip_command.extend(
+        self._generate_zip_options_for_items(
+            prefix='target',
+            relative_root=str(product_out),
+            list_files=[f"{product_out / 'target_general-tests_list'}"],
+        ),
+    )
+
+    zip_commands.append(tests_config_zip_command)
+
+    tests_list_zip_command = self._base_zip_command(
+        src_top, dist_dir, 'general-tests_list.zip'
+    )
+    tests_list_zip_command.extend(
+        self._generate_zip_options_for_items(
+            relative_root=str(host_out),
+            files=[f"{host_out / 'general-tests_list'}"],
+        )
+    )
+    zip_commands.append(tests_list_zip_command)
+
+    return zip_commands
+
   def get_enabled_flag(self):
     return 'general_tests_optimized'
 
diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py
index 919c193..0b0c0ec 100644
--- a/ci/optimized_targets_test.py
+++ b/ci/optimized_targets_test.py
@@ -19,10 +19,12 @@
 import os
 import pathlib
 import re
+import subprocess
+import textwrap
 import unittest
 from unittest import mock
-import optimized_targets
 from build_context import BuildContext
+import optimized_targets
 from pyfakefs import fake_filesystem_unittest
 
 
@@ -43,11 +45,68 @@
 
   def _setup_working_build_env(self):
     self.change_info_file = pathlib.Path('/tmp/change_info')
+    self._write_soong_ui_file()
+    self._host_out_testcases = pathlib.Path('/tmp/top/host_out_testcases')
+    self._host_out_testcases.mkdir(parents=True)
+    self._target_out_testcases = pathlib.Path('/tmp/top/target_out_testcases')
+    self._target_out_testcases.mkdir(parents=True)
+    self._product_out = pathlib.Path('/tmp/top/product_out')
+    self._product_out.mkdir(parents=True)
+    self._soong_host_out = pathlib.Path('/tmp/top/soong_host_out')
+    self._soong_host_out.mkdir(parents=True)
+    self._host_out = pathlib.Path('/tmp/top/host_out')
+    self._host_out.mkdir(parents=True)
+
+    self._dist_dir = pathlib.Path('/tmp/top/out/dist')
+    self._dist_dir.mkdir(parents=True)
 
     self.mock_os_environ.update({
         'CHANGE_INFO': str(self.change_info_file),
+        'TOP': '/tmp/top',
+        'DIST_DIR': '/tmp/top/out/dist',
     })
 
+  def _write_soong_ui_file(self):
+    soong_path = pathlib.Path('/tmp/top/build/soong')
+    soong_path.mkdir(parents=True)
+    with open(os.path.join(soong_path, 'soong_ui.bash'), 'w') as f:
+      f.write("""
+              #/bin/bash
+              echo HOST_OUT_TESTCASES='/tmp/top/host_out_testcases'
+              echo TARGET_OUT_TESTCASES='/tmp/top/target_out_testcases'
+              echo PRODUCT_OUT='/tmp/top/product_out'
+              echo SOONG_HOST_OUT='/tmp/top/soong_host_out'
+              echo HOST_OUT='/tmp/top/host_out'
+              """)
+    os.chmod(os.path.join(soong_path, 'soong_ui.bash'), 0o666)
+
+  def _write_change_info_file(self):
+    change_info_contents = {
+        'changes': [{
+            'projectPath': '/project/path',
+            'revisions': [{
+                'fileInfos': [{
+                    'path': 'file/path/file_name',
+                }],
+            }],
+        }]
+    }
+
+    with open(self.change_info_file, 'w') as f:
+      json.dump(change_info_contents, f)
+
+  def _write_test_mapping_file(self):
+    test_mapping_contents = {
+        'test-mapping-group': [
+            {
+                'name': 'test_mapping_module',
+            },
+        ],
+    }
+
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      json.dump(test_mapping_contents, f)
+
   def test_general_tests_optimized(self):
     optimizer = self._create_general_tests_optimizer()
 
@@ -124,36 +183,44 @@
     with self.assertRaises(json.decoder.JSONDecodeError):
       build_targets = optimizer.get_build_targets()
 
-  def _write_change_info_file(self):
-    change_info_contents = {
-        'changes': [{
-            'projectPath': '/project/path',
-            'revisions': [{
-                'fileInfos': [{
-                    'path': 'file/path/file_name',
-                }],
-            }],
-        }]
-    }
+  @mock.patch('subprocess.run')
+  def test_packaging_outputs_success(self, subprocess_run):
+    subprocess_run.return_value = self._get_soong_vars_output()
+    optimizer = self._create_general_tests_optimizer()
+    self._set_up_build_outputs(['test_mapping_module'])
 
-    with open(self.change_info_file, 'w') as f:
-      json.dump(change_info_contents, f)
+    targets = optimizer.get_build_targets()
+    package_commands = optimizer.get_package_outputs_commands()
 
-  def _write_test_mapping_file(self):
-    test_mapping_contents = {
-        'test-mapping-group': [
-            {
-                'name': 'test_mapping_module',
-            },
-        ],
-    }
+    self._verify_soong_zip_commands(package_commands, ['test_mapping_module'])
 
-    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
-      json.dump(test_mapping_contents, f)
+  @mock.patch('subprocess.run')
+  def test_get_soong_dumpvars_fails_raises(self, subprocess_run):
+    subprocess_run.return_value = self._get_soong_vars_output(return_code=-1)
+    optimizer = self._create_general_tests_optimizer()
+    self._set_up_build_outputs(['test_mapping_module'])
 
-  def _create_general_tests_optimizer(
-      self, build_context: BuildContext = None
-  ):
+    targets = optimizer.get_build_targets()
+
+    with self.assertRaisesRegex(RuntimeError, 'Soong dumpvars failed!'):
+      package_commands = optimizer.get_package_outputs_commands()
+
+  @mock.patch('subprocess.run')
+  def test_get_soong_dumpvars_bad_output_raises(self, subprocess_run):
+    subprocess_run.return_value = self._get_soong_vars_output(
+        stdout='This output is bad'
+    )
+    optimizer = self._create_general_tests_optimizer()
+    self._set_up_build_outputs(['test_mapping_module'])
+
+    targets = optimizer.get_build_targets()
+
+    with self.assertRaisesRegex(
+        RuntimeError, 'Error parsing soong dumpvars output'
+    ):
+      package_commands = optimizer.get_package_outputs_commands()
+
+  def _create_general_tests_optimizer(self, build_context: BuildContext = None):
     if not build_context:
       build_context = self._create_build_context()
     return optimized_targets.GeneralTestsOptimizer(
@@ -170,7 +237,9 @@
     build_context_dict = {}
     build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}]
     if general_tests_optimized:
-      build_context_dict['enabledBuildFeatures'].append({'name': 'general_tests_optimized'})
+      build_context_dict['enabledBuildFeatures'].append(
+          {'name': 'general_tests_optimized'}
+      )
     build_context_dict['testContext'] = test_context
     return BuildContext(build_context_dict)
 
@@ -199,6 +268,81 @@
         ],
     }
 
+  def _get_soong_vars_output(
+      self, return_code: int = 0, stdout: str = ''
+  ) -> subprocess.CompletedProcess:
+    return_value = subprocess.CompletedProcess(args=[], returncode=return_code)
+    if not stdout:
+      stdout = textwrap.dedent(f"""\
+                               HOST_OUT_TESTCASES='{self._host_out_testcases}'
+                               TARGET_OUT_TESTCASES='{self._target_out_testcases}'
+                               PRODUCT_OUT='{self._product_out}'
+                               SOONG_HOST_OUT='{self._soong_host_out}'
+                               HOST_OUT='{self._host_out}'""")
+
+    return_value.stdout = stdout
+    return return_value
+
+  def _set_up_build_outputs(self, targets: list[str]):
+    for target in targets:
+      host_dir = self._host_out_testcases / target
+      host_dir.mkdir()
+      (host_dir / f'{target}.config').touch()
+      (host_dir / f'test_file').touch()
+
+      target_dir = self._target_out_testcases / target
+      target_dir.mkdir()
+      (target_dir / f'{target}.config').touch()
+      (target_dir / f'test_file').touch()
+
+  def _verify_soong_zip_commands(self, commands: list[str], targets: list[str]):
+    """Verify the structure of the zip commands.
+
+    Zip commands have to start with the soong_zip binary path, then are followed
+    by a couple of options and the name of the file being zipped. Depending on
+    which zip we are creating look for a few essential items being added in
+    those zips.
+
+    Args:
+      commands: list of command lists
+      targets: list of targets expected to be in general-tests.zip
+    """
+    for command in commands:
+      self.assertEqual(
+          '/tmp/top/prebuilts/build-tools/linux-x86/bin/soong_zip',
+          command[0],
+      )
+      self.assertEqual('-d', command[1])
+      self.assertEqual('-o', command[2])
+      match (command[3]):
+        case '/tmp/top/out/dist/general-tests_configs.zip':
+          self.assertIn(f'{self._host_out}/host_general-tests_list', command)
+          self.assertIn(
+              f'{self._product_out}/target_general-tests_list', command
+          )
+          return
+        case '/tmp/top/out/dist/general-tests_list.zip':
+          self.assertIn('-f', command)
+          self.assertIn(f'{self._host_out}/general-tests_list', command)
+          return
+        case '/tmp/top/out/dist/general-tests.zip':
+          for target in targets:
+            self.assertIn(f'{self._host_out_testcases}/{target}', command)
+            self.assertIn(f'{self._target_out_testcases}/{target}', command)
+          self.assertIn(
+              f'{self._soong_host_out}/framework/cts-tradefed.jar', command
+          )
+          self.assertIn(
+              f'{self._soong_host_out}/framework/compatibility-host-util.jar',
+              command,
+          )
+          self.assertIn(
+              f'{self._soong_host_out}/framework/vts-tradefed.jar', command
+          )
+          return
+        case _:
+          self.fail(f'malformed command: {command}')
+
 
 if __name__ == '__main__':
   # Setup logging to be silent so unit tests can pass through TF.
diff --git a/cogsetup.sh b/cogsetup.sh
deleted file mode 100644
index 5c64a06..0000000
--- a/cogsetup.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This file is executed by build/envsetup.sh, and can use anything
-# defined in envsetup.sh.
-function _create_out_symlink_for_cog() {
-  if [[ "${OUT_DIR}" == "" ]]; then
-    OUT_DIR="out"
-  fi
-
-  # getoutdir ensures paths are absolute. envsetup could be called from a
-  # directory other than the root of the source tree
-  local outdir=$(getoutdir)
-  if [[ -L "${outdir}" ]]; then
-    return
-  fi
-  if [ -d "${outdir}" ]; then
-    echo -e "\tOutput directory ${outdir} cannot be present in a Cog workspace."
-    echo -e "\tDelete \"${outdir}\" or create a symlink from \"${outdir}\" to a directory outside your workspace."
-    return 1
-  fi
-
-  DEFAULT_OUTPUT_DIR="${HOME}/.cog/android-build-out"
-  mkdir -p ${DEFAULT_OUTPUT_DIR}
-  ln -s ${DEFAULT_OUTPUT_DIR} ${outdir}
-}
-
-# This function sets up the build environment to be appropriate for Cog.
-function _setup_cog_env() {
-  _create_out_symlink_for_cog
-  if [ "$?" -eq "1" ]; then
-    echo -e "\e[0;33mWARNING:\e[00m Cog environment setup failed!"
-    return 1
-  fi
-
-  export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog"
-
-  # Running repo command within Cog workspaces is not supported, so override
-  # it with this function. If the user is running repo within a Cog workspace,
-  # we'll fail with an error, otherwise, we run the original repo command with
-  # the given args.
-  if ! ORIG_REPO_PATH=`which repo`; then
-    return 0
-  fi
-  function repo {
-    if [[ "${PWD}" == /google/cog/* ]]; then
-      echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
-      return 1
-    fi
-    ${ORIG_REPO_PATH} "$@"
-  }
-}
-
-if [[ "${PWD}" != /google/cog/* ]]; then
-  echo -e "\e[01;31mERROR:\e[0m This script must be run from a Cog workspace."
-fi
-
-_setup_cog_env
diff --git a/core/Makefile b/core/Makefile
index bf2d48a..81ae6f7 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1964,7 +1964,7 @@
 installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz
 
 ALL_INSTALLED_NOTICE_FILES := \
-  $(installed_notice_html_or_xml_gz) \
+  $(if $(USE_SOONG_DEFINED_SYSTEM_IMAGE),,$(installed_notice_html_or_xml_gz)) \
   $(installed_vendor_notice_xml_gz) \
   $(installed_product_notice_xml_gz) \
   $(installed_system_ext_notice_xml_gz) \
@@ -2051,7 +2051,9 @@
 
 endif # PRODUCT_NOTICE_SPLIT
 
+ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
+endif
 
 need_vendor_notice:=false
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
@@ -3511,6 +3513,8 @@
 		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \
 		--value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key provideLibs \
+		--value "$(foreach lib,$(PRODUCT_EXTRA_STUB_LIBRARIES), $(lib).so)"
 
 $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),)
 $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE))
@@ -3563,14 +3567,24 @@
 file_list_diff := $(HOST_OUT_EXECUTABLES)/file_list_diff$(HOST_EXECUTABLE_SUFFIX)
 system_file_diff_timestamp := $(systemimage_intermediates)/file_diff.timestamp
 
+# The build configuration to build the REL version may have more files to allow.
+# Use allowlist_next in addition to the allowlist in this case.
+system_file_diff_allowlist_next :=
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+system_file_diff_allowlist_next := $(ALL_MODULES.system_image_diff_allowlist_next.INSTALLED)
+$(system_file_diff_timestamp): PRIVATE_ALLOWLIST_NEXT := $(system_file_diff_allowlist_next)
+endif
 $(system_file_diff_timestamp): \
 	    $(systemimage_intermediates)/file_list.txt \
 	    $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \
 	    $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \
+	    $(system_file_diff_allowlist_next) \
 	    $(file_list_diff)
 	$(file_list_diff) $(systemimage_intermediates)/file_list.txt \
 	  $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \
-	  $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
+	  $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) \
+	  --allowlists $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \
+	  $(PRIVATE_ALLOWLIST_NEXT)
 	touch $@
 
 $(BUILT_SYSTEMIMAGE): $(system_file_diff_timestamp)
@@ -3588,10 +3602,10 @@
 ifeq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),)
 $(error PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE must be set if USE_SOONG_DEFINED_SYSTEM_IMAGE is true)
 endif
-soong_defined_system_image := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
-$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(soong_defined_system_image)
-$(eval $(call copy-one-file, $(soong_defined_system_image), $(BUILT_SYSTEMIMAGE)))
-soong_defined_system_image :=
+SOONG_DEFINED_SYSTEM_IMAGE_PATH := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
+SOONG_DEFINED_SYSTEM_IMAGE_BASE := $(dir $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST))
+$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(SOONG_DEFINED_SYSTEM_IMAGE_PATH)
+$(eval $(call copy-one-file, $(SOONG_DEFINED_SYSTEM_IMAGE_PATH), $(BUILT_SYSTEMIMAGE)))
 else
 $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt
 	$(call build-systemimage-target,$@)
@@ -3996,6 +4010,21 @@
     $(filter $(TARGET_OUT_PRODUCT)/%,\
       $(ALL_DEFAULT_INSTALLED_MODULES))
 
+# Install product/etc/linker.config.pb with PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS
+product_linker_config_file := $(TARGET_OUT_PRODUCT)/etc/linker.config.pb
+$(product_linker_config_file): private_linker_config_fragments := $(PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS)
+$(product_linker_config_file): $(INTERNAL_PRODUCTIMAGE_FILES) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config proto \
+		--source $(call normalize-path-list,$(private_linker_config_fragments)) \
+		--output $@
+$(call define declare-1p-target,$(product_linker_config_file),)
+INTERNAL_PRODUCTIMAGE_FILES += $(product_linker_config_file)
+ALL_DEFAULT_INSTALLED_MODULES += $(product_linker_config_file)
+
+
 INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
 INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json)
 $(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT)
@@ -6134,6 +6163,9 @@
 $(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name)
 $(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates)
 
+ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),)
+  $(BUILT_TARGET_FILES_DIR): $(SOONG_DEFINED_SYSTEM_IMAGE_PATH)
+endif
 
 # $(1): Directory to copy
 # $(2): Location to copy it to
@@ -6462,8 +6494,11 @@
 	    $(INSTALLED_RAMDISK_TARGET) \
 	    $(INSTALLED_DTBIMAGE_TARGET) \
 	    $(INSTALLED_2NDBOOTLOADER_TARGET) \
+	    $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \
 	    $(BUILT_RAMDISK_16K_TARGET) \
 	    $(BUILT_KERNEL_16K_TARGET) \
+	    $(BUILT_BOOTIMAGE_16K_TARGET) \
+	    $(INSTALLED_DTBOIMAGE_16KB_TARGET) \
 	    $(BOARD_PREBUILT_DTBOIMAGE) \
 	    $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \
 	    $(BOARD_RECOVERY_ACPIO) \
@@ -6617,8 +6652,13 @@
 endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET
 ifdef BUILDING_SYSTEM_IMAGE
 	@# Contents of the system image
+ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),)
+	$(hide) $(call package_files-copy-root, \
+	    $(SOONG_DEFINED_SYSTEM_IMAGE_BASE)/root/system,$(zip_root)/SYSTEM)
+else
 	$(hide) $(call package_files-copy-root, \
 	    $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM)
+endif
 else ifdef INSTALLED_BUILD_PROP_TARGET
 	@# Copy the system build.prop even if not building a system image
 	@# because add_img_to_target_files may need it to build other partition
@@ -7935,9 +7975,14 @@
 # Desktop pack recovery image hook.
 ifneq (,$(strip $(PACK_DESKTOP_RECOVERY_IMAGE)))
 PACK_RECOVERY_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_recovery_image.bin
+PACK_RECOVERY_IMAGE_ARGS := --noarchive --recovery
+
+ifneq (,$(strip $(PACK_RECOVERY_IMAGE_EXPERIMENTAL)))
+PACK_RECOVERY_IMAGE_ARGS += --experimental
+endif # PACK_RECOVERY_IMAGE_EXPERIMENTAL
 
 $(PACK_RECOVERY_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
-	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive --recovery
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_RECOVERY_IMAGE_ARGS)
 
 PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET := $(PACK_RECOVERY_IMAGE_TARGET).gz
 
@@ -7955,9 +8000,14 @@
 # Desktop pack update image hook.
 ifneq (,$(strip $(PACK_DESKTOP_UPDATE_IMAGE)))
 PACK_UPDATE_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_update_image.bin
+PACK_UPDATE_IMAGE_ARGS := --noarchive --update
+
+ifneq (,$(strip $(PACK_UPDATE_IMAGE_EXPERIMENTAL)))
+PACK_UPDATE_IMAGE_ARGS += --experimental
+endif # PACK_UPDATE_IMAGE_EXPERIMENTAL
 
 $(PACK_UPDATE_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
-	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive --update
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_UPDATE_IMAGE_ARGS)
 
 PACKED_UPDATE_IMAGE_ARCHIVE_TARGET := $(PACK_UPDATE_IMAGE_TARGET).gz
 
@@ -7971,6 +8021,28 @@
 
 endif # PACK_DESKTOP_UPDATE_IMAGE
 
+PACK_MIGRATION_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_migration_image
+
+# -----------------------------------------------------------------
+# Desktop pack migration image hook.
+ifeq ($(ANDROID_DESKTOP_MIGRATION_IMAGE),true)
+PACK_MIGRATION_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_migration_image.bin
+
+$(PACK_MIGRATION_IMAGE_TARGET): $(IMAGES) $(PACK_MIGRATION_IMAGE_SCRIPT)
+	$(PACK_MIGRATION_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive
+
+PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET := $(PACK_MIGRATION_IMAGE_TARGET).gz
+
+$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET): $(PACK_MIGRATION_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_MIGRATION_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-migration-image
+pack-migration-image: $(PACK_MIGRATION_IMAGE_TARGET)
+
+endif # ANDROID_DESKTOP_MIGRATION_IMAGE
+
 # -----------------------------------------------------------------
 # OS Licensing
 
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 5fc8fd4..06dc54c 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -47,6 +47,8 @@
 $(call soong_config_set_bool,ANDROID,CLANG_COVERAGE,$(CLANG_COVERAGE))
 $(call soong_config_set,ANDROID,SCUDO_ALLOCATION_RING_BUFFER_SIZE,$(PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE))
 
+$(call soong_config_set_bool,ANDROID,EMMA_INSTRUMENT,$(if $(filter true,$(EMMA_INSTRUMENT)),true,false))
+
 # PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not.
 $(call soong_config_set_bool,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true))
 
@@ -182,3 +184,27 @@
 
 # Add target_use_pan_display flag for hardware/libhardware:gralloc.default
 $(call soong_config_set_bool,gralloc,target_use_pan_display,$(if $(filter true,$(TARGET_USE_PAN_DISPLAY)),true,false))
+
+# Add use_camera_v4l2_hal flag for hardware/libhardware/modules/camera/3_4:camera.v4l2
+$(call soong_config_set_bool,camera,use_camera_v4l2_hal,$(if $(filter true,$(USE_CAMERA_V4L2_HAL)),true,false))
+
+# Add audioserver_multilib flag for hardware/interfaces/soundtrigger/2.0/default:android.hardware.soundtrigger@2.0-impl
+ifneq ($(strip $(AUDIOSERVER_MULTILIB)),)
+  $(call soong_config_set,soundtrigger,audioserver_multilib,$(AUDIOSERVER_MULTILIB))
+endif
+
+# Add sim_count, disable_rild_oem_hook, and use_aosp_rild flag for ril related modules
+$(call soong_config_set,ril,sim_count,$(SIM_COUNT))
+ifneq ($(DISABLE_RILD_OEM_HOOK), false)
+  $(call soong_config_set_bool,ril,disable_rild_oem_hook,true)
+endif
+ifneq ($(ENABLE_VENDOR_RIL_SERVICE), true)
+  $(call soong_config_set_bool,ril,use_aosp_rild,true)
+endif
+
+# Export target_board_platform to soong for hardware/google/graphics/common/libmemtrack:memtrack.$(TARGET_BOARD_PLATFORM)
+$(call soong_config_set,ANDROID,target_board_platform,$(TARGET_BOARD_PLATFORM))
+
+# Export board_uses_scaler_m2m1shot and board_uses_align_restriction to soong for hardware/google/graphics/common/libscaler:libexynosscaler
+$(call soong_config_set_bool,google_graphics,board_uses_scaler_m2m1shot,$(if $(filter true,$(BOARD_USES_SCALER_M2M1SHOT)),true,false))
+$(call soong_config_set_bool,google_graphics,board_uses_align_restriction,$(if $(filter true,$(BOARD_USES_ALIGN_RESTRICTION)),true,false))
diff --git a/core/binary.mk b/core/binary.mk
index 1e98bc0..3481144 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -330,18 +330,20 @@
   ifneq ($(LOCAL_IN_VENDOR),)
     # Vendor modules have LOCAL_IN_VENDOR
     my_cflags += -D__ANDROID_VENDOR__
-
-    ifeq ($(BOARD_API_LEVEL),)
-      # TODO(b/314036847): This is a fallback for UDC targets.
-      # This must be a build failure when UDC is no longer built from this source tree.
-      my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION)
-    else
-      my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
-    endif
   else ifneq ($(LOCAL_IN_PRODUCT),)
     # Product modules have LOCAL_IN_PRODUCT
     my_cflags += -D__ANDROID_PRODUCT__
   endif
+
+  # Define __ANDROID_VENDOR_API__ for both product and vendor variants because
+  # they both use the same LLNDK libraries.
+  ifeq ($(BOARD_API_LEVEL),)
+    # TODO(b/314036847): This is a fallback for UDC targets.
+    # This must be a build failure when UDC is no longer built from this source tree.
+    my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION)
+  else
+    my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
+  endif
 endif
 
 ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/combo/arch/arm64/armv9-2a.mk b/core/combo/arch/arm64/armv9-2a.mk
new file mode 100644
index 0000000..69ffde0
--- /dev/null
+++ b/core/combo/arch/arm64/armv9-2a.mk
@@ -0,0 +1,18 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# .mk file required to support build for the ARMv9.2-A arch variant.
+# The file just needs to be present, it does not need to contain anything.
diff --git a/core/combo/arch/x86/alderlake.mk b/core/combo/arch/x86/alderlake.mk
new file mode 100644
index 0000000..a7ae6ed
--- /dev/null
+++ b/core/combo/arch/x86/alderlake.mk
@@ -0,0 +1,6 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86_64/alderlake.mk b/core/combo/arch/x86_64/alderlake.mk
new file mode 100644
index 0000000..a7ae6ed
--- /dev/null
+++ b/core/combo/arch/x86_64/alderlake.mk
@@ -0,0 +1,6 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 906d7f0..88e0cc7 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -13,28 +13,6 @@
 install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1)))))
 endif
 
-# Install boot images for testing on host. We exclude framework image as it is not part of art manifest.
-my_boot_image_arch := HOST_ARCH
-my_boot_image_out := $(HOST_OUT)
-my_boot_image_syms := $(HOST_OUT)/symbols
-HOST_BOOT_IMAGE_MODULE := \
-  $(foreach my_boot_image_name,art_host,$(strip \
-    $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-    $(my_boot_image_module)))
-HOST_BOOT_IMAGE := $(call module-installed-files,$(HOST_BOOT_IMAGE_MODULE))
-ifdef HOST_2ND_ARCH
-  my_boot_image_arch := HOST_2ND_ARCH
-  2ND_HOST_BOOT_IMAGE_MODULE := \
-    $(foreach my_boot_image_name,art_host,$(strip \
-      $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-      $(my_boot_image_module)))
-  2ND_HOST_BOOT_IMAGE := $(call module-installed-files,$(2ND_HOST_BOOT_IMAGE_MODULE))
-endif
-my_boot_image_arch :=
-my_boot_image_out :=
-my_boot_image_syms :=
-my_boot_image_module :=
-
 # Build the boot.zip which contains the boot jars and their compilation output
 # We can do this only if preopt is enabled and if the product uses libart config (which sets the
 # default properties for preopting).
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
deleted file mode 100644
index a2c9942..0000000
--- a/core/dex_preopt_libart.mk
+++ /dev/null
@@ -1,109 +0,0 @@
-####################################
-# ART boot image installation
-# Input variables:
-#   my_boot_image_name: the boot image to install
-#   my_boot_image_arch: the architecture to install (e.g. TARGET_ARCH, not expanded)
-#   my_boot_image_out:  the install directory (e.g. $(PRODUCT_OUT))
-#   my_boot_image_syms: the symbols director (e.g. $(TARGET_OUT_UNSTRIPPED))
-#
-# Output variables:
-#   my_boot_image_module: the created module name. Empty if no module is created.
-#
-# Install the boot images compiled by Soong.
-# Create a module named dexpreopt_bootjar.$(my_boot_image_name)_$($(my_boot_image_arch))
-# that installs all of boot image files.
-# If there is no file to install for $(my_boot_image_name), for example when
-# building an unbundled build, then no module is created.
-#
-####################################
-
-# Takes a list of src:dest install pairs and returns a new list with a path
-# prefixed to each dest value.
-# $(1): list of src:dest install pairs
-# $(2): path to prefix to each dest value
-define prefix-copy-many-files-dest
-$(foreach v,$(1),$(call word-colon,1,$(v)):$(2)$(call word-colon,2,$(v)))
-endef
-
-# Converts an architecture-specific vdex path into a location that can be shared
-# between architectures.
-define vdex-shared-install-path
-$(dir $(patsubst %/,%,$(dir $(1))))$(notdir $(1))
-endef
-
-# Takes a list of src:dest install pairs of vdex files and returns a new list
-# where each dest has been rewritten to the shared location for vdex files.
-define vdex-copy-many-files-shared-dest
-$(foreach v,$(1),$(call word-colon,1,$(v)):$(call vdex-shared-install-path,$(call word-colon,2,$(v))))
-endef
-
-# Creates a rule to symlink an architecture specific vdex file to the shared
-# location for that vdex file.
-define symlink-vdex-file
-$(strip \
-  $(call symlink-file,\
-    $(call vdex-shared-install-path,$(1)),\
-    ../$(notdir $(1)),\
-    $(1))\
-  $(1))
-endef
-
-# Takes a list of src:dest install pairs of vdex files and creates rules to
-# symlink each dest to the shared location for that vdex file.
-define symlink-vdex-files
-$(foreach v,$(1),$(call symlink-vdex-file,$(call word-colon,2,$(v))))
-endef
-
-my_boot_image_module :=
-
-my_suffix := $(my_boot_image_name)_$($(my_boot_image_arch))
-my_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out))
-my_vdex_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out))
-my_vdex_copy_shared_pairs := $(call vdex-copy-many-files-shared-dest,$(my_vdex_copy_pairs))
-ifeq (,$(filter %_2ND_ARCH,$(my_boot_image_arch)))
-  # Only install the vdex to the shared location for the primary architecture.
-  my_copy_pairs += $(my_vdex_copy_shared_pairs)
-endif
-
-my_unstripped_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_syms))
-
-# Generate the boot image module only if there is any file to install.
-ifneq (,$(strip $(my_copy_pairs)))
-  my_first_pair := $(firstword $(my_copy_pairs))
-  my_rest_pairs := $(wordlist 2,$(words $(my_copy_pairs)),$(my_copy_pairs))
-
-  my_first_src := $(call word-colon,1,$(my_first_pair))
-  my_first_dest := $(call word-colon,2,$(my_first_pair))
-
-  my_installed := $(call copy-many-files,$(my_copy_pairs))
-  my_unstripped_installed := $(call copy-many-files,$(my_unstripped_copy_pairs))
-
-  my_symlinks := $(call symlink-vdex-files,$(my_vdex_copy_pairs))
-
-  # We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM).
-  LOCAL_PATH := $(BUILD_SYSTEM)
-  # Hack to let these pseudo-modules wrapped around Soong modules use LOCAL_SOONG_INSTALLED_MODULE.
-  LOCAL_MODULE_MAKEFILE := $(SOONG_ANDROID_MK)
-
-  include $(CLEAR_VARS)
-  LOCAL_MODULE := dexpreopt_bootjar.$(my_suffix)
-  LOCAL_PREBUILT_MODULE_FILE := $(my_first_src)
-  LOCAL_MODULE_PATH := $(dir $(my_first_dest))
-  LOCAL_MODULE_STEM := $(notdir $(my_first_dest))
-  LOCAL_SOONG_INSTALL_PAIRS := $(my_copy_pairs)
-  LOCAL_SOONG_INSTALL_SYMLINKS := $(my_symlinks)
-  LOCAL_SOONG_INSTALLED_MODULE := $(my_first_dest)
-  LOCAL_SOONG_LICENSE_METADATA := $(DEXPREOPT_IMAGE_LICENSE_METADATA_$(my_suffix))
-  ifneq (,$(strip $(filter HOST_%,$(my_boot_image_arch))))
-    LOCAL_IS_HOST_MODULE := true
-  endif
-  LOCAL_MODULE_CLASS := ETC
-  include $(BUILD_PREBUILT)
-  $(LOCAL_BUILT_MODULE): | $(my_unstripped_installed)
-  # Installing boot.art causes all boot image bits to be installed.
-  # Keep this old behavior in case anyone still needs it.
-  $(LOCAL_INSTALLED_MODULE): $(wordlist 2,$(words $(my_installed)),$(my_installed)) $(my_symlinks)
-  $(my_all_targets): $(my_installed) $(my_symlinks)
-
-  my_boot_image_module := $(LOCAL_MODULE)
-endif  # my_copy_pairs != empty
diff --git a/core/envsetup.mk b/core/envsetup.mk
index c063f60..f82e861 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -417,6 +417,7 @@
 HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest64
 HOST_OUT_COVERAGE := $(HOST_OUT)/coverage
 HOST_OUT_TESTCASES := $(HOST_OUT)/testcases
+HOST_OUT_ETC := $(HOST_OUT)/etc
 .KATI_READONLY := \
   HOST_OUT_EXECUTABLES \
   HOST_OUT_SHARED_LIBRARIES \
@@ -425,7 +426,8 @@
   HOST_OUT_SDK_ADDON \
   HOST_OUT_NATIVE_TESTS \
   HOST_OUT_COVERAGE \
-  HOST_OUT_TESTCASES
+  HOST_OUT_TESTCASES \
+  HOST_OUT_ETC
 
 HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin
 HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib
diff --git a/core/main.mk b/core/main.mk
index 8d0b465..5bbe1b1 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -289,6 +289,9 @@
 
 $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
 
+# Build bootloader.img/radio.img, and unpack the partitions.
+include $(BUILD_SYSTEM)/tasks/tools/update_bootloader_radio_image.mk
+
 # For an unbundled image, we can skip blueprint_tools because unbundled image
 # aims to remove a large number framework projects from the manifest, the
 # sources or dependencies for these tools may be missing from the tree.
@@ -297,6 +300,9 @@
 checkbuild: blueprint_tests
 endif
 
+# Create necessary directories and symlinks in the root filesystem
+include system/core/rootdir/create_root_structure.mk
+
 endif # dont_bother
 
 ifndef subdir_makefiles_total
@@ -305,9 +311,6 @@
 
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] finishing legacy Make module parsing ...)
 
-# Create necessary directories and symlinks in the root filesystem
-include system/core/rootdir/create_root_structure.mk
-
 # -------------------------------------------------------------------
 # All module makefiles have been included at this point.
 # -------------------------------------------------------------------
@@ -684,12 +687,12 @@
 # Scan all modules in general-tests, device-tests and other selected suites and
 # flatten the shared library dependencies.
 define update-host-shared-libs-deps-for-suites
-$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests,\
+$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests camera-hal-tests,\
   $(foreach m,$(COMPATIBILITY.$(suite).MODULES),\
     $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
     $(foreach dep,$(my_deps),\
       $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
-        $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\
+        $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests camera-hal-tests),\
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
@@ -1896,7 +1899,6 @@
 	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
 	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
 	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
-	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
 	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
 	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
 	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
@@ -1906,7 +1908,7 @@
 	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
 	  $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \
 	  $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \
-	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
 	  $(eval _static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES))) \
 	  $(eval _whole_static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES))) \
 	  $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES))) \
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index 1e1b7df..d15a3d0 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -17,13 +17,17 @@
 
 $(eval $(call text-notice-rule,$(target_notice_file_txt),"System image",$(system_notice_file_message),$(SYSTEM_NOTICE_DEPS),$(SYSTEM_NOTICE_DEPS)))
 
+ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
 $(installed_notice_html_or_xml_gz): $(target_notice_file_xml_gz)
 	$(copy-file-to-target)
 endif
+endif
 
 $(call declare-1p-target,$(target_notice_file_xml_gz))
+ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
 $(call declare-1p-target,$(installed_notice_html_or_xml_gz))
 endif
+endif
 
 .PHONY: vendorlicense
 vendorlicense: $(call corresponding-license-metadata, $(VENDOR_NOTICE_DEPS)) reportmissinglicenses
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index a77956b..ccb502c 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -18,7 +18,7 @@
 #
 
 # TODO: Should we do all of the images in $(IMAGES_TO_BUILD)?
-_FLAG_PARTITIONS := product system system_ext vendor
+_FLAG_PARTITIONS := product system vendor
 
 
 # -----------------------------------------------------------------
@@ -28,7 +28,6 @@
 # $(1): built aconfig flags file (out)
 # $(2): installed aconfig flags file (out)
 # $(3): the partition (in)
-# $(4): input aconfig files for the partition (in)
 define generate-partition-aconfig-flag-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
 $(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
@@ -36,12 +35,14 @@
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
 		$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
-			--filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \
+			--filter container:$(strip $(3)) \
+			$$(addprefix --cache ,$$(PRIVATE_IN)), \
 		echo -n > $$(PRIVATE_OUT) \
 	)
 $(call copy-one-file, $(1), $(2))
 endef
 
+
 # Create a summary file of build flags for each partition
 # $(1): built aconfig flags file (out)
 # $(2): installed aconfig flags file (out)
@@ -59,16 +60,22 @@
 $(call copy-one-file, $(1), $(2))
 endef
 
-
 $(foreach partition, $(_FLAG_PARTITIONS), \
 	$(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \
 	$(eval $(call generate-partition-aconfig-flag-file, \
-				$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
-				$(aconfig_flag_summaries_protobuf.$(partition)), \
-				$(partition), \
-				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
+			$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
+			$(aconfig_flag_summaries_protobuf.$(partition)), \
+			$(partition), \
+			$(sort \
+				$(foreach m, $(call register-names-for-partition, $(partition)), \
 					$(ALL_MODULES.$(m).ACONFIG_FILES) \
-				)), \
+				) \
+				$(if $(filter system, $(partition)), \
+					$(foreach m, $(call register-names-for-partition, system_ext), \
+						$(ALL_MODULES.$(m).ACONFIG_FILES) \
+					) \
+				) \
+			) \
 	)) \
 )
 
@@ -90,42 +97,54 @@
 # $(1): built aconfig flags storage package map file (out)
 # $(2): built aconfig flags storage flag map file (out)
 # $(3): built aconfig flags storage flag val file (out)
-# $(4): installed aconfig flags storage package map file (out)
-# $(5): installed aconfig flags storage flag map file (out)
-# $(6): installed aconfig flags storage flag value file (out)
-# $(7): input aconfig files for the partition (in)
-# $(8): partition name
+# $(4): built aconfig flags storage flag info file (out)
+# $(5): installed aconfig flags storage package map file (out)
+# $(6): installed aconfig flags storage flag map file (out)
+# $(7): installed aconfig flags storage flag value file (out)
+# $(8): installed aconfig flags storage flag info file (out)
+# $(9): input aconfig files for the partition (in)
+# $(10): partition name
 define generate-partition-aconfig-storage-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
-$(eval $(strip $(1)): PRIVATE_IN := $(strip $(7)))
-$(strip $(1)): $(ACONFIG) $(strip $(7))
+$(eval $(strip $(1)): PRIVATE_IN := $(strip $(9)))
+$(strip $(1)): $(ACONFIG) $(strip $(9))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) create-storage --container $(8) --file package_map --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) create-storage --container $(10) --file package_map --out $$(PRIVATE_OUT) \
 			$$(addprefix --cache ,$$(PRIVATE_IN)), \
 	)
 	touch $$(PRIVATE_OUT)
 $(eval $(strip $(2)): PRIVATE_OUT := $(strip $(2)))
-$(eval $(strip $(2)): PRIVATE_IN := $(strip $(7)))
-$(strip $(2)): $(ACONFIG) $(strip $(7))
+$(eval $(strip $(2)): PRIVATE_IN := $(strip $(9)))
+$(strip $(2)): $(ACONFIG) $(strip $(9))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) create-storage --container $(8) --file flag_map --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) create-storage --container $(10) --file flag_map --out $$(PRIVATE_OUT) \
 			$$(addprefix --cache ,$$(PRIVATE_IN)), \
 	)
 	touch $$(PRIVATE_OUT)
 $(eval $(strip $(3)): PRIVATE_OUT := $(strip $(3)))
-$(eval $(strip $(3)): PRIVATE_IN := $(strip $(7)))
-$(strip $(3)): $(ACONFIG) $(strip $(7))
+$(eval $(strip $(3)): PRIVATE_IN := $(strip $(9)))
+$(strip $(3)): $(ACONFIG) $(strip $(9))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) create-storage --container $(8) --file flag_val --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) create-storage --container $(10) --file flag_val --out $$(PRIVATE_OUT) \
 		$$(addprefix --cache ,$$(PRIVATE_IN)), \
 	)
 	touch $$(PRIVATE_OUT)
-$(call copy-one-file, $(strip $(1)), $(4))
-$(call copy-one-file, $(strip $(2)), $(5))
-$(call copy-one-file, $(strip $(3)), $(6))
+$(eval $(strip $(4)): PRIVATE_OUT := $(strip $(4)))
+$(eval $(strip $(4)): PRIVATE_IN := $(strip $(9)))
+$(strip $(4)): $(ACONFIG) $(strip $(9))
+	mkdir -p $$(dir $$(PRIVATE_OUT))
+	$$(if $$(PRIVATE_IN), \
+		$$(ACONFIG) create-storage --container $(10) --file flag_info --out $$(PRIVATE_OUT) \
+		$$(addprefix --cache ,$$(PRIVATE_IN)), \
+	)
+	touch $$(PRIVATE_OUT)
+$(call copy-one-file, $(strip $(1)), $(5))
+$(call copy-one-file, $(strip $(2)), $(6))
+$(call copy-one-file, $(strip $(3)), $(7))
+$(call copy-one-file, $(strip $(4)), $(8))
 endef
 
 ifeq ($(RELEASE_CREATE_ACONFIG_STORAGE_FILE),true)
@@ -133,13 +152,16 @@
 	$(eval aconfig_storage_package_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/package.map) \
 	$(eval aconfig_storage_flag_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.map) \
 	$(eval aconfig_storage_flag_val.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.val) \
+	$(eval aconfig_storage_flag_info.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.info) \
 	$(eval $(call generate-partition-aconfig-storage-file, \
 				$(TARGET_OUT_FLAGS)/$(partition)/package.map, \
 				$(TARGET_OUT_FLAGS)/$(partition)/flag.map, \
 				$(TARGET_OUT_FLAGS)/$(partition)/flag.val, \
+				$(TARGET_OUT_FLAGS)/$(partition)/flag.info, \
 				$(aconfig_storage_package_map.$(partition)), \
 				$(aconfig_storage_flag_map.$(partition)), \
 				$(aconfig_storage_flag_val.$(partition)), \
+				$(aconfig_storage_flag_info.$(partition)), \
 				$(aconfig_flag_summaries_protobuf.$(partition)), \
 				$(partition), \
 	)) \
@@ -155,6 +177,7 @@
 			$(aconfig_storage_package_map.$(partition)) \
 			$(aconfig_storage_flag_map.$(partition)) \
 			$(aconfig_storage_flag_val.$(partition)) \
+			$(aconfig_storage_flag_info.$(partition)) \
 		))
 
 ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files)
@@ -174,5 +197,5 @@
 	$(eval aconfig_storage_package_map.$(partition):=) \
 	$(eval aconfig_storage_flag_map.$(partition):=) \
 	$(eval aconfig_storage_flag_val.$(partition):=) \
+	$(eval aconfig_storage_flag_info.$(partition):=) \
 )
-
diff --git a/core/product.mk b/core/product.mk
index 4c23e5d..8fc40f8 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -390,20 +390,6 @@
 # If set, Java module in product partition cannot use hidden APIs.
 _product_single_value_vars += PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
 
-# If set, only java_sdk_library can be used at inter-partition dependency.
-# Note: Build error if BOARD_VNDK_VERSION is not set while
-#       PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true, because
-#       PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY has no meaning if
-#       BOARD_VNDK_VERSION is not set.
-# Note: When PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE is not set, there are
-#       no restrictions at dependency between system and product partition.
-_product_single_value_vars += PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY
-
-# Allowlist for PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY option.
-# Listed modules are allowed at inter-partition dependency even if it isn't
-# a java_sdk_library module.
-_product_list_vars += PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST
-
 # Install a copy of the debug policy to the system_ext partition, and allow
 # init-second-stage to load debug policy from system_ext.
 # This option is only meant to be set by compliance GSI targets.
@@ -436,8 +422,9 @@
 # If true, the cgroup v2 hierarchy will be split into apps/system subtrees
 _product_single_value_vars += PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED
 
-# List of .json files to be merged/compiled into vendor/etc/linker.config.pb
+# List of .json files to be merged/compiled into vendor/etc/linker.config.pb and product/etc/linker.config.pb
 _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS
+_product_list_vars += PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS
 
 # Whether to use userfaultfd GC.
 # Possible values are:
@@ -499,6 +486,10 @@
 # If set, build would generate system image from Soong-defined module.
 _product_single_value_vars += PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE
 
+# List of stub libraries specific to the product that are already present in the system image and
+# should be included in the system_linker_config.
+_product_list_vars += PRODUCT_EXTRA_STUB_LIBRARIES
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/proguard.flags b/core/proguard.flags
index aa406b9..5148e56 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -38,6 +38,17 @@
   @com.android.internal.annotations.KeepForWeakReference <fields>;
 }
 
+# Needed to ensure callback field references are kept in their respective
+# owning classes when the downstream callback registrars only store weak refs.
+-if @com.android.internal.annotations.WeaklyReferencedCallback class *
+-keepclassmembers,allowaccessmodification class * {
+  <1> *;
+}
+-if class * extends @com.android.internal.annotations.WeaklyReferencedCallback **
+-keepclassmembers,allowaccessmodification class * {
+  <1> *;
+}
+
 # Understand the common @Keep annotation from various Android packages:
 #  * android.support.annotation
 #  * androidx.annotation
diff --git a/core/project_definitions.mk b/core/project_definitions.mk
index 5728b67..184b03e 100644
--- a/core/project_definitions.mk
+++ b/core/project_definitions.mk
@@ -22,3 +22,6 @@
 # Include definitions for prebuilt SDK, if present.
 #
 -include prebuilts/sdk/current/definitions.mk
+
+# SDV-specific config.
+-include system/software_defined_vehicle/platform/config.mk
diff --git a/core/soong_config.mk b/core/soong_config.mk
index fddb500..aaeb70f 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -236,6 +236,12 @@
 
 $(call add_json_list, TargetFSConfigGen,                 $(TARGET_FS_CONFIG_GEN))
 
+# Although USE_SOONG_DEFINED_SYSTEM_IMAGE determines whether to use the system image specified by
+# PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE, PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE is still used to compare
+# installed files between make and soong, regardless of the USE_SOONG_DEFINED_SYSTEM_IMAGE setting.
+$(call add_json_bool, UseSoongSystemImage,               $(filter true,$(USE_SOONG_DEFINED_SYSTEM_IMAGE)))
+$(call add_json_str,  ProductSoongDefinedSystemImage,    $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))
+
 $(call add_json_map, VendorVars)
 $(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\
   $(call add_json_map, $(namespace))\
@@ -259,9 +265,6 @@
 $(call add_json_bool, EnforceProductPartitionInterface,  $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)))
 $(call add_json_str,  DeviceCurrentApiLevelForVendorModules,  $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
 
-$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(filter true,$(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY)))
-$(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST))
-
 $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX)))
 
 ifndef APEX_BUILD_FOR_PRE_S_DEVICES
@@ -345,6 +348,8 @@
 $(call add_json_list, ProductPropFiles, $(TARGET_PRODUCT_PROP))
 $(call add_json_list, OdmPropFiles, $(TARGET_ODM_PROP))
 
+$(call add_json_str, ExtraAllowedDepsTxt, $(EXTRA_ALLOWED_DEPS_TXT))
+
 # Do not set ArtTargetIncludeDebugBuild into any value if PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD is not set,
 # to have the same behavior from runtime_libart.mk.
 ifneq ($(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD),)
@@ -361,6 +366,64 @@
 $(call add_json_list, BoardAvbSystemAddHashtreeFooterArgs, $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS))
 $(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE)))
 
+$(call add_json_str, AdbKeys, $(PRODUCT_ADB_KEYS))
+
+$(call add_json_map, PartitionVarsForSoongMigrationOnlyDoNotUse)
+  $(call add_json_str,  ProductDirectory,    $(dir $(INTERNAL_PRODUCT)))
+
+  $(call add_json_map,PartitionQualifiedVariables)
+  $(foreach image_type,SYSTEM VENDOR CACHE USERDATA PRODUCT SYSTEM_EXT OEM ODM VENDOR_DLKM ODM_DLKM SYSTEM_DLKM, \
+    $(call add_json_map,$(call to-lower,$(image_type))) \
+    $(call add_json_bool, BuildingImage, $(filter true,$(BUILDING_$(image_type)_IMAGE))) \
+    $(call add_json_str, BoardErofsCompressor, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESSOR)) \
+    $(call add_json_str, BoardErofsCompressHints, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESS_HINTS)) \
+    $(call add_json_str, BoardErofsPclusterSize, $(BOARD_$(image_type)IMAGE_EROFS_PCLUSTER_SIZE)) \
+    $(call add_json_str, BoardExtfsInodeCount, $(BOARD_$(image_type)IMAGE_EXTFS_INODE_COUNT)) \
+    $(call add_json_str, BoardExtfsRsvPct, $(BOARD_$(image_type)IMAGE_EXTFS_RSV_PCT)) \
+    $(call add_json_str, BoardF2fsSloadCompressFlags, $(BOARD_$(image_type)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)) \
+    $(call add_json_str, BoardFileSystemCompress, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_COMPRESS)) \
+    $(call add_json_str, BoardFileSystemType, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_TYPE)) \
+    $(call add_json_str, BoardJournalSize, $(BOARD_$(image_type)IMAGE_JOURNAL_SIZE)) \
+    $(call add_json_str, BoardPartitionReservedSize, $(BOARD_$(image_type)IMAGE_PARTITION_RESERVED_SIZE)) \
+    $(call add_json_str, BoardPartitionSize, $(BOARD_$(image_type)IMAGE_PARTITION_SIZE)) \
+    $(call add_json_str, BoardSquashfsBlockSize, $(BOARD_$(image_type)IMAGE_SQUASHFS_BLOCK_SIZE)) \
+    $(call add_json_str, BoardSquashfsCompressor, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR)) \
+    $(call add_json_str, BoardSquashfsCompressorOpt, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR_OPT)) \
+    $(call add_json_str, BoardSquashfsDisable4kAlign, $(BOARD_$(image_type)IMAGE_SQUASHFS_DISABLE_4K_ALIGN)) \
+    $(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \
+    $(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \
+    $(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \
+    $(call end_json_map) \
+  )
+  $(call end_json_map)
+
+  $(call add_json_bool, TargetUserimagesUseExt2, $(filter true,$(TARGET_USERIMAGES_USE_EXT2)))
+  $(call add_json_bool, TargetUserimagesUseExt3, $(filter true,$(TARGET_USERIMAGES_USE_EXT3)))
+  $(call add_json_bool, TargetUserimagesUseExt4, $(filter true,$(TARGET_USERIMAGES_USE_EXT4)))
+
+  $(call add_json_bool, TargetUserimagesSparseExtDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseErofsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseSquashfsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseF2fsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)))
+
+  $(call add_json_str, BoardErofsCompressor, $(BOARD_EROFS_COMPRESSOR))
+  $(call add_json_str, BoardErofsCompressorHints, $(BOARD_EROFS_COMPRESS_HINTS))
+  $(call add_json_str, BoardErofsPclusterSize, $(BOARD_EROFS_PCLUSTER_SIZE))
+  $(call add_json_str, BoardErofsShareDupBlocks, $(BOARD_EROFS_SHARE_DUP_BLOCKS))
+  $(call add_json_str, BoardErofsUseLegacyCompression, $(BOARD_EROFS_USE_LEGACY_COMPRESSION))
+  $(call add_json_str, BoardExt4ShareDupBlocks, $(BOARD_EXT4_SHARE_DUP_BLOCKS))
+  $(call add_json_str, BoardFlashLogicalBlockSize, $(BOARD_FLASH_LOGICAL_BLOCK_SIZE))
+  $(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE))
+
+  $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
+  $(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)))
+  $(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP)))
+
+  $(call add_json_list, ProductPackages, $(PRODUCT_PACKAGES))
+  $(call add_json_list, ProductPackagesDebug, $(PRODUCT_PACKAGES_DEBUG))
+
+$(call end_json_map)
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 5850c4e..6164c2e 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -14,6 +14,7 @@
 
 
 .PHONY: device-tests
+.PHONY: device-tests-host-shared-libs
 
 device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
 # Create an artifact to include a list of test config files in device-tests.
@@ -23,37 +24,45 @@
 my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES))
 device_tests_host_shared_libs_zip := $(PRODUCT_OUT)/device-tests_host-shared-libs.zip
 
-$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip)
+$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip)
 $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
 $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
-$(device-tests-zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip)
 $(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
-	rm -f $@-shared-libs.list
 	echo $(sort $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $@-host.list; \
-	  echo $$shared_lib >> $@-shared-libs.list; \
 	done
-	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \
 	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
-	$(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \
-	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
 	rm -f $(PRIVATE_device_tests_list)
 	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list)
 	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list)
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list)
 	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
-	  $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_device_tests_list)
+		$(PRIVATE_device_tests_list)
+
+$(device_tests_host_shared_libs_zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip)
+$(device_tests_host_shared_libs_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
+$(device_tests_host_shared_libs_zip) : $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
+	rm -f $@-shared-libs.list
+	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+	  echo $$shared_lib >> $@-shared-libs.list; \
+	done
+	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
+	$(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
 
 device-tests: $(device-tests-zip)
+device-tests-host-shared-libs: $(device_tests_host_shared_libs_zip)
+
 $(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip))
+$(call dist-for-goals, device-tests-host-shared-libs, $(device_tests_host_shared_libs_zip))
 
 $(call declare-1p-container,$(device-tests-zip),)
 $(call declare-container-license-deps,$(device-tests-zip),$(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests),$(PRODUCT_OUT)/:/)
diff --git a/core/tasks/mke2fs-dist.mk b/core/tasks/mke2fs-dist.mk
new file mode 100644
index 0000000..3540c1f
--- /dev/null
+++ b/core/tasks/mke2fs-dist.mk
@@ -0,0 +1,22 @@
+# Copyright (C) 2024 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: After Soong's recovery partition variation can be set to selectable
+#       and the meta_lic file duplication issue is resolved, move it to the
+#       dist section of the corresponding module's Android.bp.
+my_dist_files := $(HOST_OUT_EXECUTABLES)/mke2fs
+my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs
+my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs_casefold
+$(call dist-for-goals,dist_files sdk,$(my_dist_files))
+my_dist_files :=
diff --git a/core/tasks/sts-sdk.mk b/core/tasks/sts-sdk.mk
index b8ce5bf..4abbc29 100644
--- a/core/tasks/sts-sdk.mk
+++ b/core/tasks/sts-sdk.mk
@@ -28,8 +28,7 @@
 	rm -f $@ $(STS_SDK_ZIP)_filtered
 	$(ZIP2ZIP) -i $(STS_SDK_ZIP) -o $(STS_SDK_ZIP)_filtered \
 		-x android-sts-sdk/tools/sts-tradefed-tests.jar \
-		'android-sts-sdk/tools/*:plugin/src/main/resources/sts-tradefed-tools/' \
-		'android-sts-sdk/jdk/**/*:plugin/src/main/resources/jdk/'
+		'android-sts-sdk/tools/*:sts-sdk/src/main/resources/sts-tradefed-tools/'
 	$(MERGE_ZIPS) $@ $(STS_SDK_ZIP)_filtered $(STS_SDK_PLUGIN_SKEL)
 	rm -f $(STS_SDK_ZIP)_filtered
 
diff --git a/core/tasks/tools/update_bootloader_radio_image.mk b/core/tasks/tools/update_bootloader_radio_image.mk
new file mode 100644
index 0000000..0ebf247
--- /dev/null
+++ b/core/tasks/tools/update_bootloader_radio_image.mk
@@ -0,0 +1,17 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifeq ($(USES_DEVICE_GOOGLE_ZUMA),true)
+    -include vendor/google_devices/zuma/prebuilts/misc_bins/update_bootloader_radio_image.mk
+endif
diff --git a/envsetup.sh b/envsetup.sh
index 06dadd3..3fed5ae 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -442,6 +442,7 @@
 function lunch()
 {
     local answer
+    setup_cog_env_if_needed
 
     if [[ $# -gt 1 ]]; then
         echo "usage: lunch [target]" >&2
@@ -1079,10 +1080,7 @@
         done
     done
 
-    if [[ "${PWD}" == /google/cog/* ]]; then
-        f="build/make/cogsetup.sh"
-        echo "including $f"; . "$T/$f"
-    fi
+    setup_cog_env_if_needed
 }
 
 function showcommands() {
diff --git a/shell_utils.sh b/shell_utils.sh
index 86f3f49..c4a6756 100644
--- a/shell_utils.sh
+++ b/shell_utils.sh
@@ -63,6 +63,70 @@
 }
 fi
 
+# This function sets up the build environment to be appropriate for Cog.
+function setup_cog_env_if_needed() {
+  local top=$(gettop)
+
+  # return early if not in a cog workspace
+  if [[ ! "$top" =~ ^/google/cog ]]; then
+    return 0
+  fi
+
+  setup_cog_symlink
+
+  export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog"
+
+  # Running repo command within Cog workspaces is not supported, so override
+  # it with this function. If the user is running repo within a Cog workspace,
+  # we'll fail with an error, otherwise, we run the original repo command with
+  # the given args.
+  if ! ORIG_REPO_PATH=`which repo`; then
+    return 0
+  fi
+  function repo {
+    if [[ "${PWD}" == /google/cog/* ]]; then
+      echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
+      kill -INT $$ # exits the script without exiting the user's shell
+    fi
+    ${ORIG_REPO_PATH} "$@"
+  }
+}
+
+# creates a symlink for the out/ dir when inside a cog workspace.
+function setup_cog_symlink() {
+  local out_dir=$(getoutdir)
+  local top=$(gettop)
+
+  # return early if out dir is already a symlink
+  if [[ -L "$out_dir" ]]; then
+    return 0
+  fi
+
+  # return early if out dir is not in the workspace
+  if [[ ! "$out_dir" =~ ^$top/ ]]; then
+    return 0
+  fi
+
+  local link_destination="${HOME}/.cog/android-build-out"
+
+  # remove existing out/ dir if it exists
+  if [[ -d "$out_dir" ]]; then
+    echo "Detected existing out/ directory in the Cog workspace which is not supported. Repairing workspace by removing it and creating the symlink to ~/.cog/android-build-out"
+    if ! rm -rf "$out_dir"; then
+      echo "Failed to remove existing out/ directory: $out_dir" >&2
+      kill -INT $$ # exits the script without exiting the user's shell
+    fi
+  fi
+
+  # create symlink
+  echo "Creating symlink: $out_dir -> $link_destination"
+  mkdir -p ${link_destination}
+  if ! ln -s "$link_destination" "$out_dir"; then
+    echo "Failed to create cog symlink: $out_dir -> $link_destination" >&2
+    kill -INT $$ # exits the script without exiting the user's shell
+  fi
+}
+
 function getoutdir
 {
     local top=$(gettop)
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index d806c06..74ed82d 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -345,6 +345,11 @@
         com.android.webview.bootstrap
 endif
 
+ifneq (,$(RELEASE_RANGING_STACK))
+    PRODUCT_PACKAGES += \
+        com.android.ranging
+endif
+
 # VINTF data for system image
 PRODUCT_PACKAGES += \
     system_manifest.xml \
@@ -499,6 +504,10 @@
     unwind_reg_info \
     unwind_symbols \
 
+# For Remotely Provisioned Certificate Processor
+PRODUCT_SYSTEM_PROPERTIES += \
+    remote_provisioning.use_cert_processor=false
+
 # The set of packages whose code can be loaded by the system server.
 PRODUCT_SYSTEM_SERVER_APPS += \
     SettingsProvider \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 1a3f2cf..668f054 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -76,6 +76,7 @@
     com.android.mediaprovider:framework-mediaprovider \
     com.android.mediaprovider:framework-pdf \
     com.android.mediaprovider:framework-pdf-v \
+    com.android.mediaprovider:framework-photopicker \
     com.android.ondevicepersonalization:framework-ondevicepersonalization \
     com.android.os.statsd:framework-statsd \
     com.android.permission:framework-permission \
@@ -113,6 +114,12 @@
 
 endif
 
+ifneq (,$(RELEASE_RANGING_STACK))
+    PRODUCT_APEX_BOOT_JARS += \
+        com.android.uwb:framework-ranging \
+    $(call soong_config_set,bootclasspath,release_ranging_stack,true)
+endif
+
 # List of system_server classpath jars delivered via apex.
 # Keep the list sorted by module names and then library names.
 # Note: For modules available in Q, DO NOT add new entries here.
@@ -168,6 +175,11 @@
 
 endif
 
+ifneq (,$(RELEASE_RANGING_STACK))
+    PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS += \
+        com.android.uwb:service-ranging
+endif
+
 # Overrides the (apex, jar) pairs above when determining the on-device location. The format is:
 # <old_apex>:<old_jar>:<new_apex>:<new_jar>
 PRODUCT_CONFIGURED_JAR_LOCATION_OVERRIDES := \
diff --git a/target/product/generic/Android.bp b/target/product/generic/Android.bp
new file mode 100644
index 0000000..c14fa17
--- /dev/null
+++ b/target/product/generic/Android.bp
@@ -0,0 +1,826 @@
+android_rootdirs = [
+    "acct",
+    "apex",
+    "bootstrap-apex",
+    "config",
+    "data",
+    "data_mirror",
+    "debug_ramdisk",
+    "dev",
+    "linkerconfig",
+    "metadata",
+    "mnt",
+    "odm",
+    "odm_dlkm",
+    "oem",
+    "postinstall",
+    "proc",
+    "product",
+    "second_stage_resources",
+    "storage",
+    "sys",
+    "system",
+    "system_dlkm",
+    "system_ext",
+    "tmp",
+    "vendor",
+    "vendor_dlkm",
+]
+
+android_symlinks = [
+    {
+        target: "/system/bin/init",
+        name: "init",
+    },
+    {
+        target: "/system/etc",
+        name: "etc",
+    },
+    {
+        target: "/system/bin",
+        name: "bin",
+    },
+    {
+        target: "/product",
+        name: "system/product",
+    },
+    {
+        target: "/vendor",
+        name: "system/vendor",
+    },
+    {
+        target: "/system_ext",
+        name: "system/system_ext",
+    },
+    {
+        target: "/system_dlkm/lib/modules",
+        name: "system/lib/modules",
+    },
+    {
+        target: "/data/user_de/0/com.android.shell/files/bugreports",
+        name: "bugreports",
+    },
+    {
+        target: "/data/cache",
+        name: "cache",
+    },
+    {
+        target: "/sys/kernel/debug",
+        name: "d",
+    },
+    {
+        target: "/storage/self/primary",
+        name: "sdcard",
+    },
+    {
+        target: "/product/etc/security/adb_keys",
+        name: "adb_keys",
+    },
+]
+
+filegroup {
+    name: "generic_system_sign_key",
+    srcs: [":avb_testkey_rsa4096"],
+}
+
+phony {
+    name: "generic_system_fonts",
+    required: [
+        "AndroidClock.ttf",
+        "CarroisGothicSC-Regular.ttf",
+        "ComingSoon.ttf",
+        "CutiveMono.ttf",
+        "DancingScript-Regular.ttf",
+        "DroidSansMono.ttf",
+        "NotoColorEmoji.ttf",
+        "NotoColorEmojiFlags.ttf",
+        "NotoNaskhArabic-Bold.ttf",
+        "NotoNaskhArabic-Regular.ttf",
+        "NotoNaskhArabicUI-Bold.ttf",
+        "NotoNaskhArabicUI-Regular.ttf",
+        "NotoSansAdlam-VF.ttf",
+        "NotoSansAhom-Regular.otf",
+        "NotoSansAnatolianHieroglyphs-Regular.otf",
+        "NotoSansArmenian-VF.ttf",
+        "NotoSansAvestan-Regular.ttf",
+        "NotoSansBalinese-Regular.ttf",
+        "NotoSansBamum-Regular.ttf",
+        "NotoSansBassaVah-Regular.otf",
+        "NotoSansBatak-Regular.ttf",
+        "NotoSansBengali-VF.ttf",
+        "NotoSansBengaliUI-VF.ttf",
+        "NotoSansBhaiksuki-Regular.otf",
+        "NotoSansBrahmi-Regular.ttf",
+        "NotoSansBuginese-Regular.ttf",
+        "NotoSansBuhid-Regular.ttf",
+        "NotoSansCJK-Regular.ttc",
+        "NotoSansCanadianAboriginal-Regular.ttf",
+        "NotoSansCarian-Regular.ttf",
+        "NotoSansChakma-Regular.otf",
+        "NotoSansCham-Bold.ttf",
+        "NotoSansCham-Regular.ttf",
+        "NotoSansCherokee-Regular.ttf",
+        "NotoSansCoptic-Regular.ttf",
+        "NotoSansCuneiform-Regular.ttf",
+        "NotoSansCypriot-Regular.ttf",
+        "NotoSansDeseret-Regular.ttf",
+        "NotoSansDevanagari-VF.ttf",
+        "NotoSansDevanagariUI-VF.ttf",
+        "NotoSansEgyptianHieroglyphs-Regular.ttf",
+        "NotoSansElbasan-Regular.otf",
+        "NotoSansEthiopic-VF.ttf",
+        "NotoSansGeorgian-VF.ttf",
+        "NotoSansGlagolitic-Regular.ttf",
+        "NotoSansGothic-Regular.ttf",
+        "NotoSansGrantha-Regular.ttf",
+        "NotoSansGujarati-Bold.ttf",
+        "NotoSansGujarati-Regular.ttf",
+        "NotoSansGujaratiUI-Bold.ttf",
+        "NotoSansGujaratiUI-Regular.ttf",
+        "NotoSansGunjalaGondi-Regular.otf",
+        "NotoSansGurmukhi-VF.ttf",
+        "NotoSansGurmukhiUI-VF.ttf",
+        "NotoSansHanifiRohingya-Regular.otf",
+        "NotoSansHanunoo-Regular.ttf",
+        "NotoSansHatran-Regular.otf",
+        "NotoSansHebrew-Bold.ttf",
+        "NotoSansHebrew-Regular.ttf",
+        "NotoSansImperialAramaic-Regular.ttf",
+        "NotoSansInscriptionalPahlavi-Regular.ttf",
+        "NotoSansInscriptionalParthian-Regular.ttf",
+        "NotoSansJavanese-Regular.otf",
+        "NotoSansKaithi-Regular.ttf",
+        "NotoSansKannada-VF.ttf",
+        "NotoSansKannadaUI-VF.ttf",
+        "NotoSansKayahLi-Regular.ttf",
+        "NotoSansKharoshthi-Regular.ttf",
+        "NotoSansKhmer-VF.ttf",
+        "NotoSansKhmerUI-Bold.ttf",
+        "NotoSansKhmerUI-Regular.ttf",
+        "NotoSansKhojki-Regular.otf",
+        "NotoSansLao-Bold.ttf",
+        "NotoSansLao-Regular.ttf",
+        "NotoSansLaoUI-Bold.ttf",
+        "NotoSansLaoUI-Regular.ttf",
+        "NotoSansLepcha-Regular.ttf",
+        "NotoSansLimbu-Regular.ttf",
+        "NotoSansLinearA-Regular.otf",
+        "NotoSansLinearB-Regular.ttf",
+        "NotoSansLisu-Regular.ttf",
+        "NotoSansLycian-Regular.ttf",
+        "NotoSansLydian-Regular.ttf",
+        "NotoSansMalayalam-VF.ttf",
+        "NotoSansMalayalamUI-VF.ttf",
+        "NotoSansMandaic-Regular.ttf",
+        "NotoSansManichaean-Regular.otf",
+        "NotoSansMarchen-Regular.otf",
+        "NotoSansMasaramGondi-Regular.otf",
+        "NotoSansMedefaidrin-VF.ttf",
+        "NotoSansMeeteiMayek-Regular.ttf",
+        "NotoSansMeroitic-Regular.otf",
+        "NotoSansMiao-Regular.otf",
+        "NotoSansModi-Regular.ttf",
+        "NotoSansMongolian-Regular.ttf",
+        "NotoSansMro-Regular.otf",
+        "NotoSansMultani-Regular.otf",
+        "NotoSansMyanmar-Bold.otf",
+        "NotoSansMyanmar-Medium.otf",
+        "NotoSansMyanmar-Regular.otf",
+        "NotoSansMyanmarUI-Bold.otf",
+        "NotoSansMyanmarUI-Medium.otf",
+        "NotoSansMyanmarUI-Regular.otf",
+        "NotoSansNKo-Regular.ttf",
+        "NotoSansNabataean-Regular.otf",
+        "NotoSansNewTaiLue-Regular.ttf",
+        "NotoSansNewa-Regular.otf",
+        "NotoSansOgham-Regular.ttf",
+        "NotoSansOlChiki-Regular.ttf",
+        "NotoSansOldItalic-Regular.ttf",
+        "NotoSansOldNorthArabian-Regular.otf",
+        "NotoSansOldPermic-Regular.otf",
+        "NotoSansOldPersian-Regular.ttf",
+        "NotoSansOldSouthArabian-Regular.ttf",
+        "NotoSansOldTurkic-Regular.ttf",
+        "NotoSansOriya-Bold.ttf",
+        "NotoSansOriya-Regular.ttf",
+        "NotoSansOriyaUI-Bold.ttf",
+        "NotoSansOriyaUI-Regular.ttf",
+        "NotoSansOsage-Regular.ttf",
+        "NotoSansOsmanya-Regular.ttf",
+        "NotoSansPahawhHmong-Regular.otf",
+        "NotoSansPalmyrene-Regular.otf",
+        "NotoSansPauCinHau-Regular.otf",
+        "NotoSansPhagsPa-Regular.ttf",
+        "NotoSansPhoenician-Regular.ttf",
+        "NotoSansRejang-Regular.ttf",
+        "NotoSansRunic-Regular.ttf",
+        "NotoSansSamaritan-Regular.ttf",
+        "NotoSansSaurashtra-Regular.ttf",
+        "NotoSansSharada-Regular.otf",
+        "NotoSansShavian-Regular.ttf",
+        "NotoSansSinhala-VF.ttf",
+        "NotoSansSinhalaUI-VF.ttf",
+        "NotoSansSoraSompeng-Regular.otf",
+        "NotoSansSoyombo-VF.ttf",
+        "NotoSansSundanese-Regular.ttf",
+        "NotoSansSylotiNagri-Regular.ttf",
+        "NotoSansSymbols-Regular-Subsetted.ttf",
+        "NotoSansSymbols-Regular-Subsetted2.ttf",
+        "NotoSansSyriacEastern-Regular.ttf",
+        "NotoSansSyriacEstrangela-Regular.ttf",
+        "NotoSansSyriacWestern-Regular.ttf",
+        "NotoSansTagalog-Regular.ttf",
+        "NotoSansTagbanwa-Regular.ttf",
+        "NotoSansTaiLe-Regular.ttf",
+        "NotoSansTaiTham-Regular.ttf",
+        "NotoSansTaiViet-Regular.ttf",
+        "NotoSansTakri-VF.ttf",
+        "NotoSansTamil-VF.ttf",
+        "NotoSansTamilUI-VF.ttf",
+        "NotoSansTelugu-VF.ttf",
+        "NotoSansTeluguUI-VF.ttf",
+        "NotoSansThaana-Bold.ttf",
+        "NotoSansThaana-Regular.ttf",
+        "NotoSansThai-Bold.ttf",
+        "NotoSansThai-Regular.ttf",
+        "NotoSansThaiUI-Bold.ttf",
+        "NotoSansThaiUI-Regular.ttf",
+        "NotoSansTifinagh-Regular.otf",
+        "NotoSansUgaritic-Regular.ttf",
+        "NotoSansVai-Regular.ttf",
+        "NotoSansWancho-Regular.otf",
+        "NotoSansWarangCiti-Regular.otf",
+        "NotoSansYi-Regular.ttf",
+        "NotoSerif-Bold.ttf",
+        "NotoSerif-BoldItalic.ttf",
+        "NotoSerif-Italic.ttf",
+        "NotoSerif-Regular.ttf",
+        "NotoSerifArmenian-VF.ttf",
+        "NotoSerifBengali-VF.ttf",
+        "NotoSerifCJK-Regular.ttc",
+        "NotoSerifDevanagari-VF.ttf",
+        "NotoSerifDogra-Regular.ttf",
+        "NotoSerifEthiopic-VF.ttf",
+        "NotoSerifGeorgian-VF.ttf",
+        "NotoSerifGujarati-VF.ttf",
+        "NotoSerifGurmukhi-VF.ttf",
+        "NotoSerifHebrew-Bold.ttf",
+        "NotoSerifHebrew-Regular.ttf",
+        "NotoSerifHentaigana.ttf",
+        "NotoSerifKannada-VF.ttf",
+        "NotoSerifKhmer-Bold.otf",
+        "NotoSerifKhmer-Regular.otf",
+        "NotoSerifLao-Bold.ttf",
+        "NotoSerifLao-Regular.ttf",
+        "NotoSerifMalayalam-VF.ttf",
+        "NotoSerifMyanmar-Bold.otf",
+        "NotoSerifMyanmar-Regular.otf",
+        "NotoSerifNyiakengPuachueHmong-VF.ttf",
+        "NotoSerifSinhala-VF.ttf",
+        "NotoSerifTamil-VF.ttf",
+        "NotoSerifTelugu-VF.ttf",
+        "NotoSerifThai-Bold.ttf",
+        "NotoSerifThai-Regular.ttf",
+        "NotoSerifTibetan-VF.ttf",
+        "NotoSerifYezidi-VF.ttf",
+        "Roboto-Regular.ttf",
+        "RobotoFlex-Regular.ttf",
+        "RobotoStatic-Regular.ttf",
+        "SourceSansPro-Bold.ttf",
+        "SourceSansPro-BoldItalic.ttf",
+        "SourceSansPro-Italic.ttf",
+        "SourceSansPro-Regular.ttf",
+        "SourceSansPro-SemiBold.ttf",
+        "SourceSansPro-SemiBoldItalic.ttf",
+        "font_fallback.xml",
+        "fonts.xml",
+    ],
+}
+
+android_system_image {
+    name: "generic_system_image",
+
+    partition_name: "system",
+    base_dir: "system",
+    dirs: android_rootdirs,
+    symlinks: android_symlinks,
+    file_contexts: ":plat_file_contexts",
+    linker_config_src: ":system_linker_config_json_file",
+    fsverity: {
+        inputs: [
+            "etc/boot-image.prof",
+            "etc/classpaths/*.pb",
+            "etc/dirty-image-objects",
+            "etc/preloaded-classes",
+            "framework/*",
+            "framework/*/*", // framework/{arch}
+            "framework/oat/*/*", // framework/oat/{arch}
+        ],
+        libs: [":framework-res{.export-package.apk}"],
+    },
+    build_logtags: true,
+    gen_aconfig_flags_pb: true,
+
+    compile_multilib: "both",
+
+    use_avb: true,
+    avb_private_key: ":generic_system_sign_key",
+    avb_algorithm: "SHA256_RSA4096",
+    avb_hash_algorithm: "sha256",
+
+    deps: [
+        "abx",
+        "aconfigd",
+        "aflags",
+        "am",
+        "android.software.credentials.prebuilt.xml", // generic_system
+        "android.software.webview.prebuilt.xml", // media_system
+        "android.software.window_magnification.prebuilt.xml", // handheld_system
+        "android.system.suspend-service",
+        "prebuilt_vintf_manifest",
+        "apexd",
+        "appops",
+        "approved-ogki-builds.xml", // base_system
+        "appwidget",
+        "atrace",
+        "audioserver",
+        "bcc",
+        "blank_screen",
+        "blkid",
+        "bmgr",
+        "bootanimation",
+        "bootstat",
+        "bpfloader",
+        "bu",
+        "bugreport",
+        "bugreportz",
+        "cameraserver",
+        "cgroups.json",
+        "cmd",
+        "content",
+        "cppreopts.sh", // generic_system
+        "credstore",
+        "debuggerd",
+        "device_config",
+        "dirty-image-objects",
+        "dmctl",
+        "dmesgd",
+        "dnsmasq",
+        "dpm",
+        "dump.erofs",
+        "dumpstate",
+        "dumpsys",
+        "e2fsck",
+        "enhanced-confirmation.xml", // base_system
+        "etc_hosts",
+        "flags_health_check",
+        "framework-audio_effects.xml", // for handheld // handheld_system
+        "framework-sysconfig.xml",
+        "fs_config_dirs_system",
+        "fs_config_files_system",
+        "fsck.erofs",
+        "fsck.f2fs", // for media_system
+        "fsck_msdos",
+        "fsverity-release-cert-der",
+        "gatekeeperd",
+        "gpu_counter_producer",
+        "gpuservice",
+        "group_system",
+        "gsi_tool",
+        "gsid",
+        "heapprofd",
+        "hid",
+        "hiddenapi-package-whitelist.xml", // from runtime_libart
+        "idc_data",
+        "idmap2",
+        "idmap2d",
+        "ime",
+        "incident",
+        "incident-helper-cmd",
+        "incident_helper",
+        "incidentd",
+        "init.environ.rc-soong",
+        "init.usb.configfs.rc",
+        "init.usb.rc",
+        "init.zygote32.rc",
+        "init.zygote64.rc",
+        "init.zygote64_32.rc",
+        "init_first_stage", // for boot partition
+        "initial-package-stopped-states.xml",
+        "input",
+        "installd",
+        "ip", // base_system
+        "iptables",
+        "kcmdlinectrl",
+        "kernel-lifetimes.xml", // base_system
+        "keychars_data",
+        "keylayout_data",
+        "keystore2",
+        "ld.mc",
+        "llkd", // base_system
+        "lmkd", // base_system
+        "local_time.default", // handheld_vendo
+        "locksettings", // base_system
+        "logcat", // base_system
+        "logd", // base_system
+        "logpersist.start",
+        "lpdump", // base_system
+        "lshal", // base_system
+        "make_f2fs", // media_system
+        "mdnsd", // base_system
+        "media_profiles_V1_0.dtd", // base_system
+        "mediacodec.policy", // base_system
+        "mediaextractor", // base_system
+        "mediametrics", // base_system
+        "misctrl", // from base_system
+        "mke2fs", // base_system
+        "mkfs.erofs", // base_system
+        "monkey", // base_system
+        "mtectrl", // base_system
+        "ndc", // base_system
+        "netd", // base_system
+        "netutils-wrapper-1.0", // full_base
+        "notice_xml_system",
+        "odsign", // base_system
+        "otapreopt_script", // generic_system
+        "package-shareduid-allowlist.xml", // base_system
+        "passwd_system", // base_system
+        "perfetto", // base_system
+        "ping", // base_system
+        "ping6", // base_system
+        "pintool", // base_system
+        "platform.xml", // base_system
+        "pm", // base_system
+        "preinstalled-packages-asl-files.xml", // base_system
+        "preinstalled-packages-platform-generic-system.xml", // generic_system
+        "preinstalled-packages-platform-handheld-system.xml", // handheld_system
+        "preinstalled-packages-platform.xml", // base_system
+        "preinstalled-packages-strict-signature.xml", // base_system
+        "preloaded-classes", // ok
+        "printflags", // base_system
+        "privapp-permissions-platform.xml", // base_system
+        "prng_seeder", // base_system
+        "public.libraries.android.txt",
+        "recovery-persist", // base_system
+        "recovery-refresh", // generic_system
+        "requestsync", // media_system
+        "resize2fs", // base_system
+        "rss_hwm_reset", // base_system
+        "run-as", // base_system
+        "schedtest", // base_system
+        "screencap", // base_system
+        "screenrecord", // handheld_system
+        "sdcard", // base_system
+        "secdiscard", // base_system
+        "sensorservice", // base_system
+        "service", // base_system
+        "servicemanager", // base_system
+        "settings", // base_system
+        "sfdo", // base_system
+        "sgdisk", // base_system
+        "sm", // base_system
+        "snapshotctl", // base_system
+        "snapuserd", // base_system
+        "snapuserd_ramdisk", // ramdisk
+        "storaged", // base_system
+        "surfaceflinger", // base_system
+        "svc", // base_system
+        "task_profiles.json", // base_system
+        "tc", // base_system
+        "telecom", // base_system
+        "tombstoned", // base_system
+        "traced", // base_system
+        "traced_probes", // base_system
+        "tune2fs", // base_system
+        "uiautomator", // base_system
+        "uinput", // base_system
+        "uncrypt", // base_system
+        "update_engine", // generic_system
+        "update_engine_sideload", // recovery
+        "update_verifier", // generic_system
+        "usbd", // base_system
+        "vdc", // base_system
+        "virtual_camera", // handheld_system // release_package_virtual_camera
+        "vold", // base_system
+        "vr", // handheld_system
+        "watchdogd", // base_system
+        "wifi.rc", // base_system
+        "wificond", // base_system
+        "wm", // base_system
+    ] + select(release_flag("RELEASE_PLATFORM_VERSION_CODENAME"), {
+        "REL": [],
+        default: [
+            "android.software.preview_sdk.prebuilt.xml", // media_system
+        ],
+    }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), {
+        "true": [
+            "trace_redactor", // base_system (RELEASE_PACKAGE_PROFILING_MODULE)
+        ],
+        default: [],
+    }) + select(product_variable("debuggable"), {
+        true: [
+            "adevice_fingerprint",
+            "arping",
+            "avbctl",
+            "bootctl",
+            "dmuserd",
+            "evemu-record",
+            "idlcli",
+            "init-debug.rc",
+            "iotop",
+            "iperf3",
+            "iw",
+            "layertracegenerator",
+            "logtagd.rc",
+            "ot-cli-ftd",
+            "ot-ctl",
+            "procrank",
+            "profcollectctl",
+            "profcollectd",
+            "record_binder",
+            "sanitizer-status",
+            "servicedispatcher",
+            "showmap",
+            "sqlite3",
+            "ss",
+            "start_with_lockagent",
+            "strace",
+            "su",
+            "tinycap",
+            "tinyhostless",
+            "tinymix",
+            "tinypcminfo",
+            "tinyplay", // host
+            "tracepath",
+            "tracepath6",
+            "traceroute6",
+            "unwind_info",
+            "unwind_reg_info",
+            "unwind_symbols",
+            "update_engine_client",
+        ],
+        default: [],
+    }),
+    multilib: {
+        common: {
+            deps: [
+                "BackupRestoreConfirmation", // base_system
+                "BasicDreams", // handheld_system
+                "BlockedNumberProvider", // handheld_system
+                "BluetoothMidiService", // handheld_system
+                "BookmarkProvider", // handheld_system
+                "BuiltInPrintService", // handheld_system
+                "CalendarProvider", // handheld_system
+                "CallLogBackup", // telephony_system
+                "CameraExtensionsProxy", // handheld_system
+                "CaptivePortalLogin", // handheld_system
+                "CarrierDefaultApp", // telephony_system
+                "CellBroadcastLegacyApp", // telephony_system
+                "CertInstaller", // handheld_system
+                "CompanionDeviceManager", // media_system
+                "ContactsProvider", // base_system
+                "CredentialManager", // handheld_system
+                "DeviceAsWebcam", // handheld_system
+                "DocumentsUI", // handheld_system
+                "DownloadProvider", // base_system
+                "DownloadProviderUi", // handheld_system
+                "DynamicSystemInstallationService", // base_system
+                "E2eeContactKeysProvider", // base_system
+                "EasterEgg", // handheld_system
+                "ExtShared", // base_system
+                "ExternalStorageProvider", // handheld_system
+                "FusedLocation", // handheld_system
+                "HTMLViewer", // media_system
+                "InputDevices", // handheld_system
+                "IntentResolver", // base_system
+                "KeyChain", // handheld_system
+                "LiveWallpapersPicker", // generic_system, full_base
+                "LocalTransport", // base_system
+                "ManagedProvisioning", // handheld_system
+                "MediaProviderLegacy", // base_system
+                "MmsService", // handheld_system
+                "MtpService", // handheld_system
+                "MusicFX", // handheld_system
+                "NetworkStack", // base_system
+                "ONS", // telephony_system
+                "PacProcessor", // handheld_system
+                "PackageInstaller", // base_system
+                "PartnerBookmarksProvider", // generic_system
+                "PhotoTable", // full_base
+                "PrintRecommendationService", // handheld_system
+                "PrintSpooler", // handheld_system
+                "ProxyHandler", // handheld_system
+                "SecureElement", // handheld_system
+                "SettingsProvider", // base_system
+                "SharedStorageBackup", // handheld_system
+                "Shell", // base_system
+                "SimAppDialog", // handheld_system
+                "SoundPicker", // not installed by anyone
+                "StatementService", // media_system
+                "Stk", // generic_system
+                "Tag", // generic_system
+                "TeleService", // handheld_system
+                "Telecom", // handheld_system
+                "TelephonyProvider", // handheld_system
+                "Traceur", // handheld_system
+                "UserDictionaryProvider", // handheld_system
+                "VpnDialogs", // handheld_system
+                "WallpaperBackup", // base_system
+                "adbd_system_api", // base_system
+                "android.hidl.base-V1.0-java", // base_system
+                "android.hidl.manager-V1.0-java", // base_system
+                "android.test.base", // from runtime_libart
+                "android.test.mock", // base_system
+                "android.test.runner", // base_system
+                "aosp_mainline_modules", // ok
+                "build_flag_system", // base_system
+                "charger_res_images", // generic_system
+                "com.android.apex.cts.shim.v1_prebuilt", // ok
+                "com.android.cellbroadcast", // telephony_system
+                "com.android.future.usb.accessory", // media_system
+                "com.android.location.provider", // base_system
+                "com.android.media.remotedisplay", // media_system
+                "com.android.media.remotedisplay.xml", // media_system
+                "com.android.mediadrm.signer", // media_system
+                "com.android.nfc_extras", // ok
+                "com.android.nfcservices", // base_system (RELEASE_PACKAGE_NFC_STACK != NfcNci)
+                "com.android.runtime", // ok
+                "dex_bootjars",
+                "ext", // from runtime_libart
+                "framework-graphics", // base_system
+                "framework-location", // base_system
+                "framework-minus-apex-install-dependencies", // base_system
+                "framework_compatibility_matrix.device.xml",
+                "generic_system_fonts", // ok
+                "hwservicemanager_compat_symlink_module", // base_system
+                "hyph-data",
+                "ims-common", // base_system
+                "init_system", // base_system
+                "javax.obex", // base_system
+                "llndk.libraries.txt", //ok
+                "org.apache.http.legacy", // base_system
+                "perfetto-extras", // system
+                "sanitizer.libraries.txt", // base_system
+                "selinux_policy_system_soong", // ok
+                "services", // base_system
+                "shell_and_utilities_system", // ok
+                "system-build.prop",
+                "system_compatibility_matrix.xml", //base_system
+                "telephony-common", // libs from TeleService
+                "voip-common", // base_system
+            ] + select(soong_config_variable("ANDROID", "release_crashrecovery_module"), {
+                "true": [
+                    "com.android.crashrecovery", // base_system (RELEASE_CRASHRECOVERY_MODULE)
+                ],
+                default: [],
+            }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), {
+                "true": [
+                    "com.android.profiling", // base_system (RELEASE_PACKAGE_PROFILING_MODULE)
+                ],
+                default: [],
+            }) + select(release_flag("RELEASE_AVATAR_PICKER_APP"), {
+                true: [
+                    "AvatarPicker", // generic_system (RELEASE_AVATAR_PICKER_APP)
+                ],
+                default: [],
+            }),
+        },
+        prefer32: {
+            deps: [
+                "drmserver", // media_system
+                "mediaserver", // base_system
+            ],
+        },
+        lib64: {
+            deps: [
+                "android.system.virtualizationcommon-ndk",
+                "android.system.virtualizationservice-ndk",
+                "libgsi",
+                "servicemanager",
+            ],
+        },
+        both: {
+            deps: [
+                "android.hardware.biometrics.fingerprint@2.1", // generic_system
+                "android.hardware.radio.config@1.0", // generic_system
+                "android.hardware.radio.deprecated@1.0", // generic_system
+                "android.hardware.radio@1.0", // generic_system
+                "android.hardware.radio@1.1", // generic_system
+                "android.hardware.radio@1.2", // generic_system
+                "android.hardware.radio@1.3", // generic_system
+                "android.hardware.radio@1.4", // generic_system
+                "android.hardware.secure_element@1.0", // generic_system
+                "app_process", // base_system
+                "boringssl_self_test", // base_system
+                "heapprofd_client", // base_system
+                "libEGL", // base_system
+                "libEGL_angle", // base_system
+                "libETC1", // base_system
+                "libFFTEm", // base_system
+                "libGLESv1_CM", // base_system
+                "libGLESv1_CM_angle", // base_system
+                "libGLESv2", // base_system
+                "libGLESv2_angle", // base_system
+                "libGLESv3", // base_system
+                "libOpenMAXAL", // base_system
+                "libOpenSLES", // base_system
+                "libaaudio", // base_system
+                "libalarm_jni", // base_system
+                "libamidi", // base_system
+                "libandroid",
+                "libandroid_runtime",
+                "libandroid_servers",
+                "libandroidfw",
+                "libartpalette-system",
+                "libaudio-resampler", // generic-system
+                "libaudioeffect_jni",
+                "libaudiohal", // generic-system
+                "libaudiopolicyengineconfigurable", // generic-system
+                "libbinder",
+                "libbinder_ndk",
+                "libbinder_rpc_unstable",
+                "libcamera2ndk",
+                "libclang_rt.asan",
+                "libcompiler_rt",
+                "libcutils", // used by many libs
+                "libdmabufheap", // used by many libs
+                "libdrm", // used by many libs // generic_system
+                "libdrmframework", // base_system
+                "libdrmframework_jni", // base_system
+                "libfdtrack", // base_system
+                "libfilterfw", // base_system
+                "libfilterpack_imageproc", // media_system
+                "libfwdlockengine", // generic_system
+                "libgatekeeper", // base_system
+                "libgui", // base_system
+                "libhardware", // base_system
+                "libhardware_legacy", // base_system
+                "libhidltransport", // generic_system
+                "libhwbinder", // generic_system
+                "libinput", // base_system
+                "libinputflinger", // base_system
+                "libiprouteutil", // base_system
+                "libjnigraphics", // base_system
+                "libjpeg", // base_system
+                "liblog", // base_system
+                "liblogwrap", // generic_system
+                "liblz4", // generic_system
+                "libmedia", // base_system
+                "libmedia_jni", // base_system
+                "libmediandk", // base_system
+                "libminui", // generic_system
+                "libmtp", // base_system
+                "libnetd_client", // base_system
+                "libnetlink", // base_system
+                "libnetutils", // base_system
+                "libneuralnetworks_packageinfo", // base_system
+                "libnl", // generic_system
+                "libpdfium", // base_system
+                "libpolicy-subsystem", // generic_system
+                "libpower", // base_system
+                "libpowermanager", // base_system
+                "libprotobuf-cpp-full", // generic_system
+                "libradio_metadata", // base_system
+                "librs_jni", // handheld_system
+                "librtp_jni", // base_system
+                "libsensorservice", // base_system
+                "libsfplugin_ccodec", // base_system
+                "libskia", // base_system
+                "libsonic", // base_system
+                "libsonivox", // base_system
+                "libsoundpool", // base_system
+                "libspeexresampler", // base_system
+                "libsqlite", // base_system
+                "libstagefright", // base_system
+                "libstagefright_foundation", // base_system
+                "libstagefright_omx", // base_system
+                "libstdc++", // base_system
+                "libsysutils", // base_system
+                "libui", // base_system
+                "libusbhost", // base_system
+                "libutils", // base_system
+                "libvendorsupport", // llndk library
+                "libvintf_jni", // base_system
+                "libvulkan", // base_system
+                "libwebviewchromium_loader", // media_system
+                "libwebviewchromium_plat_support", // media_system
+                "libwilhelm", // base_system
+                "linker", // base_system
+            ] + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_DRMSERVER"), {
+                "true": ["drmserver"],
+                default: [],
+            }) + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_MEDIASERVER"), {
+                "true": ["mediaserver"],
+                default: [],
+            }),
+        },
+    },
+}
+
+prebuilt_etc {
+    name: "prebuilt_vintf_manifest",
+    src: "manifest.xml",
+    filename: "manifest.xml",
+    relative_install_path: "vintf",
+    no_full_install: true,
+}
diff --git a/target/product/generic/OWNERS b/target/product/generic/OWNERS
new file mode 100644
index 0000000..6d1446f
--- /dev/null
+++ b/target/product/generic/OWNERS
@@ -0,0 +1,6 @@
+# Bug component: 1322713
+inseob@google.com
+jeongik@google.com
+jiyong@google.com
+justinyun@google.com
+kiyoungkim@google.com
diff --git a/target/product/generic/manifest.xml b/target/product/generic/manifest.xml
new file mode 100644
index 0000000..1df2c0d
--- /dev/null
+++ b/target/product/generic/manifest.xml
@@ -0,0 +1,54 @@
+<!--
+    Input:
+        system/libhidl/vintfdata/manifest.xml
+-->
+<manifest version="8.0" type="framework">
+    <hal format="hidl" max-level="6">
+        <name>android.frameworks.displayservice</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.0::IDisplayService/default</fqname>
+    </hal>
+    <hal format="hidl" max-level="5">
+        <name>android.frameworks.schedulerservice</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.0::ISchedulingPolicyService/default</fqname>
+    </hal>
+    <hal format="aidl">
+        <name>android.frameworks.sensorservice</name>
+        <fqname>ISensorManager/default</fqname>
+    </hal>
+    <hal format="hidl" max-level="8">
+        <name>android.frameworks.sensorservice</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.0::ISensorManager/default</fqname>
+    </hal>
+    <hal format="hidl" max-level="8">
+        <name>android.hidl.memory</name>
+        <transport arch="32+64">passthrough</transport>
+        <fqname>@1.0::IMapper/ashmem</fqname>
+    </hal>
+    <hal format="hidl" max-level="7">
+        <name>android.system.net.netd</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.1::INetd/default</fqname>
+    </hal>
+    <hal format="hidl" max-level="7">
+        <name>android.system.wifi.keystore</name>
+        <transport>hwbinder</transport>
+        <fqname>@1.0::IKeystore/default</fqname>
+    </hal>
+    <hal format="native">
+        <name>netutils-wrapper</name>
+        <version>1.0</version>
+    </hal>
+    <system-sdk>
+        <version>29</version>
+        <version>30</version>
+        <version>31</version>
+        <version>32</version>
+        <version>33</version>
+        <version>34</version>
+        <version>35</version>
+        <version>VanillaIceCream</version>
+    </system-sdk>
+</manifest>
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 0a09eb1..b9a623d 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -152,4 +152,5 @@
 $(call require-artifacts-in-path, $(_my_paths), $(_my_allowed_list))
 
 # Product config map to toggle between sources and prebuilts of required mainline modules
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline/required/release_config_map.textproto)
 PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline/required/release_config_map.textproto)
diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk
index c928530..ccc4f36 100644
--- a/target/product/go_defaults.mk
+++ b/target/product/go_defaults.mk
@@ -18,6 +18,7 @@
 $(call inherit-product, build/make/target/product/go_defaults_common.mk)
 
 # Product config map to toggle between sources and prebuilts of required mainline modules
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline_go/required/release_config_map.textproto)
 PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline_go/required/release_config_map.textproto)
 
 # Add the system properties.
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index fd4047a..0fcf16b 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -24,11 +24,6 @@
 # Speed profile services and wifi-service to reduce RAM and storage.
 PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
 
-# Use a profile based boot image for this device. Note that this is currently a
-# generic profile and not Android Go optimized.
-PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := frameworks/base/config/boot-image-profile.txt
-
 # Do not generate libartd.
 PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false
 
diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp
index 45ba143..f18f35a 100644
--- a/target/product/gsi/Android.bp
+++ b/target/product/gsi/Android.bp
@@ -46,3 +46,18 @@
     installed_location: "etc/init/config",
     symlink_target: "/system/system_ext/etc/init/config",
 }
+
+// init.gsi.rc, GSI-specific init script.
+prebuilt_etc {
+    name: "init.gsi.rc",
+    src: "init.gsi.rc",
+    system_ext_specific: true,
+    relative_install_path: "init",
+}
+
+prebuilt_etc {
+    name: "init.vndk-nodef.rc",
+    src: "init.vndk-nodef.rc",
+    system_ext_specific: true,
+    relative_install_path: "gsi",
+}
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 36897fe..7291059 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -138,31 +138,3 @@
 
 
 include $(BUILD_PHONY_PACKAGE)
-
-#####################################################################
-# init.gsi.rc, GSI-specific init script.
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := init.gsi.rc
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_SYSTEM_EXT_MODULE := true
-LOCAL_MODULE_RELATIVE_PATH := init
-
-include $(BUILD_PREBUILT)
-
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := init.vndk-nodef.rc
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_SYSTEM_EXT_MODULE := true
-LOCAL_MODULE_RELATIVE_PATH := gsi
-
-include $(BUILD_PREBUILT)
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
index 0d7b35e..69d19a3 100644
--- a/target/product/security/Android.bp
+++ b/target/product/security/Android.bp
@@ -37,3 +37,7 @@
     relative_install_path: "security",
     filename: "otacerts.zip",
 }
+
+adb_keys {
+    name: "adb_keys",
+}
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
deleted file mode 100644
index 138e5bb..0000000
--- a/target/product/security/Android.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-#######################################
-# adb key, if configured via PRODUCT_ADB_KEYS
-ifdef PRODUCT_ADB_KEYS
-  ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
-    include $(CLEAR_VARS)
-    LOCAL_MODULE := adb_keys
-    LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-    LOCAL_LICENSE_CONDITIONS := notice
-    LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-    LOCAL_MODULE_CLASS := ETC
-    LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT_ETC)/security
-    LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
-    include $(BUILD_PREBUILT)
-  endif
-endif
diff --git a/teams/Android.bp b/teams/Android.bp
index 94585fc..96d241b 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -4440,3 +4440,17 @@
     // go/trendy/manage/engineers/5097003746426880
     trendy_team_id: "5097003746426880",
 }
+
+team {
+    name: "trendy_team_desktop_firmware",
+
+    // go/trendy/manage/engineers/5787938454863872
+    trendy_team_id: "5787938454863872",
+}
+
+team {
+    name: "trendy_team_art_cloud",
+
+    // go/trendy/manage/engineers/5121440647577600
+    trendy_team_id: "5121440647577600",
+}
diff --git a/tools/aconfig/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp
index 68521af..f4dd103 100644
--- a/tools/aconfig/aconfig/Android.bp
+++ b/tools/aconfig/aconfig/Android.bp
@@ -234,6 +234,7 @@
     name: "libaconfig_test_rust_library",
     crate_name: "aconfig_test_rust_library",
     aconfig_declarations: "aconfig.test.flags",
+    host_supported: true,
 }
 
 rust_test {
diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs
index 2c569da..7a9c382 100644
--- a/tools/aconfig/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/aconfig/src/codegen/cpp.rs
@@ -283,39 +283,23 @@
     virtual ~flag_provider_interface() = default;
 
     virtual bool disabled_ro() = 0;
-
-    virtual void disabled_ro(bool val) = 0;
-
     virtual bool disabled_rw() = 0;
-
-    virtual void disabled_rw(bool val) = 0;
-
     virtual bool disabled_rw_exported() = 0;
-
-    virtual void disabled_rw_exported(bool val) = 0;
-
     virtual bool disabled_rw_in_other_namespace() = 0;
-
-    virtual void disabled_rw_in_other_namespace(bool val) = 0;
-
     virtual bool enabled_fixed_ro() = 0;
-
-    virtual void enabled_fixed_ro(bool val) = 0;
-
     virtual bool enabled_fixed_ro_exported() = 0;
-
-    virtual void enabled_fixed_ro_exported(bool val) = 0;
-
     virtual bool enabled_ro() = 0;
-
-    virtual void enabled_ro(bool val) = 0;
-
     virtual bool enabled_ro_exported() = 0;
-
-    virtual void enabled_ro_exported(bool val) = 0;
-
     virtual bool enabled_rw() = 0;
 
+    virtual void disabled_ro(bool val) = 0;
+    virtual void disabled_rw(bool val) = 0;
+    virtual void disabled_rw_exported(bool val) = 0;
+    virtual void disabled_rw_in_other_namespace(bool val) = 0;
+    virtual void enabled_fixed_ro(bool val) = 0;
+    virtual void enabled_fixed_ro_exported(bool val) = 0;
+    virtual void enabled_ro(bool val) = 0;
+    virtual void enabled_ro_exported(bool val) = 0;
     virtual void enabled_rw(bool val) = 0;
 
     virtual void reset_flags() {}
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index dbc4ab5..a34166d 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -137,6 +137,7 @@
     pub default_value: bool,
     pub device_config_namespace: String,
     pub device_config_flag: String,
+    pub flag_name: String,
     pub flag_name_constant_suffix: String,
     pub flag_offset: u16,
     pub is_read_write: bool,
@@ -156,6 +157,7 @@
         default_value: pf.state() == ProtoFlagState::ENABLED,
         device_config_namespace: pf.namespace().to_string(),
         device_config_flag,
+        flag_name: pf.name().to_string(),
         flag_name_constant_suffix: pf.name().to_ascii_uppercase(),
         flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("),
         is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
@@ -507,97 +509,39 @@
             private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
         }"#;
 
-        let expected_featureflagsmpl_content_0 = r#"
+        let expected_featureflagsmpl_content = r#"
         package com.android.aconfig.test;
         // TODO(b/303773055): Remove the annotation after access issue is resolved.
         import android.compat.annotation.UnsupportedAppUsage;
         import android.provider.DeviceConfig;
         import android.provider.DeviceConfig.Properties;
-        "#;
+        import android.aconfig.storage.StorageInternalReader;
+        import java.nio.file.Files;
+        import java.nio.file.Paths;
 
-        let expected_featureflagsmpl_content_1 = r#"
         /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
+            private static final boolean isReadFromNew = Files.exists(Paths.get("/metadata/aconfig/boot/enable_only_new_storage"));
+            private static volatile boolean isCached = false;
             private static volatile boolean aconfig_test_is_cached = false;
             private static volatile boolean other_namespace_is_cached = false;
             private static boolean disabledRw = false;
             private static boolean disabledRwExported = false;
             private static boolean disabledRwInOtherNamespace = false;
             private static boolean enabledRw = true;
-        "#;
-        let expected_featureflagsmpl_content_2 = r#"
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean disabledRo() {
-                return false;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean disabledRw() {
-                if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+            private void init() {
+                StorageInternalReader reader = null;
+                try {
+                    reader = new StorageInternalReader("system", "com.android.aconfig.test");
+                    disabledRw = reader.getBooleanFlagValue(1);
+                    disabledRwExported = reader.getBooleanFlagValue(2);
+                    enabledRw = reader.getBooleanFlagValue(8);
+                    disabledRwInOtherNamespace = reader.getBooleanFlagValue(3);
+                } catch (Exception e) {
+                    throw new RuntimeException("Cannot read flag in codegen", e);
                 }
-                return disabledRw;
+                isCached = true;
             }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean disabledRwExported() {
-                if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
-                }
-                return disabledRwExported;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean disabledRwInOtherNamespace() {
-                if (!other_namespace_is_cached) {
-                    load_overrides_other_namespace();
-                }
-                return disabledRwInOtherNamespace;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean enabledFixedRo() {
-                return true;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean enabledFixedRoExported() {
-                return true;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean enabledRo() {
-                return true;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean enabledRoExported() {
-                return true;
-            }
-            @Override
-            @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
-            public boolean enabledRw() {
-                if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
-                }
-                return enabledRw;
-            }
-        }
-        "#;
-
-        let expect_featureflagsimpl_content_old = expected_featureflagsmpl_content_0.to_owned()
-            + expected_featureflagsmpl_content_1
-            + r#"
             private void load_overrides_aconfig_test() {
                 try {
                     Properties properties = DeviceConfig.getProperties("aconfig_test");
@@ -636,196 +580,104 @@
                     );
                 }
                 other_namespace_is_cached = true;
-            }"#
-            + expected_featureflagsmpl_content_2;
+            }
+
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean disabledRo() {
+                return false;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean disabledRw() {
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!aconfig_test_is_cached) {
+                        load_overrides_aconfig_test();
+                    }
+                }
+                return disabledRw;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean disabledRwExported() {
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!aconfig_test_is_cached) {
+                        load_overrides_aconfig_test();
+                    }
+                }
+                return disabledRwExported;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean disabledRwInOtherNamespace() {
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!other_namespace_is_cached) {
+                        load_overrides_other_namespace();
+                    }
+                }
+                return disabledRwInOtherNamespace;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean enabledFixedRo() {
+                return true;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean enabledFixedRoExported() {
+                return true;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean enabledRo() {
+                return true;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean enabledRoExported() {
+                return true;
+            }
+            @Override
+            @com.android.aconfig.annotations.AconfigFlagAccessor
+            @UnsupportedAppUsage
+            public boolean enabledRw() {
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!aconfig_test_is_cached) {
+                        load_overrides_aconfig_test();
+                    }
+                }
+                return enabledRw;
+            }
+        }
+        "#;
 
         let mut file_set = HashMap::from([
             ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
-            (
-                "com/android/aconfig/test/FeatureFlagsImpl.java",
-                &expect_featureflagsimpl_content_old,
-            ),
-            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
-            (
-                "com/android/aconfig/test/CustomFeatureFlags.java",
-                EXPECTED_CUSTOMFEATUREFLAGS_CONTENT,
-            ),
-            (
-                "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
-                EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
-            ),
-        ]);
-
-        for file in generated_files {
-            let file_path = file.path.to_str().unwrap();
-            assert!(file_set.contains_key(file_path), "Cannot find {}", file_path);
-            assert_eq!(
-                None,
-                crate::test::first_significant_code_diff(
-                    file_set.get(file_path).unwrap(),
-                    &String::from_utf8(file.contents).unwrap()
-                ),
-                "File {} content is not correct",
-                file_path
-            );
-            file_set.remove(file_path);
-        }
-
-        assert!(file_set.is_empty());
-
-        let parsed_flags = crate::test::parse_test_flags();
-        let mode = CodegenMode::Production;
-        let modified_parsed_flags =
-            crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let flag_ids =
-            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
-        let generated_files = generate_java_code(
-            crate::test::TEST_PACKAGE,
-            modified_parsed_flags.into_iter(),
-            mode,
-            flag_ids,
-            true,
-        )
-        .unwrap();
-
-        let expect_featureflagsimpl_content_new = expected_featureflagsmpl_content_0.to_owned()
-            + r#"
-            import android.aconfig.storage.StorageInternalReader;
-            import android.util.Log;
-            "#
-            + expected_featureflagsmpl_content_1
-            + r#"
-        StorageInternalReader reader;
-        boolean readFromNewStorage;
-
-        boolean useNewStorageValueAndDiscardOld = false;
-
-        private final static String TAG = "AconfigJavaCodegen";
-        private final static String SUCCESS_LOG = "success: %s value matches";
-        private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s";
-        private final static String ERROR_LOG = "error: failed to read flag value";
-
-        private void init() {
-            if (reader != null) return;
-            if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.storage_test_mission_1", false)) {
-                readFromNewStorage = true;
-                try {
-                    reader = new StorageInternalReader("system", "com.android.aconfig.test");
-                } catch (Exception e) {
-                    reader = null;
-                }
-            }
-
-            useNewStorageValueAndDiscardOld =
-                DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false);
-        }
-
-        private void load_overrides_aconfig_test() {
-            try {
-                Properties properties = DeviceConfig.getProperties("aconfig_test");
-                disabledRw =
-                    properties.getBoolean(Flags.FLAG_DISABLED_RW, false);
-                disabledRwExported =
-                    properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false);
-                enabledRw =
-                    properties.getBoolean(Flags.FLAG_ENABLED_RW, true);
-            } catch (NullPointerException e) {
-                throw new RuntimeException(
-                    "Cannot read value from namespace aconfig_test "
-                    + "from DeviceConfig. It could be that the code using flag "
-                    + "executed before SettingsProvider initialization. Please use "
-                    + "fixed read-only flag by adding is_fixed_read_only: true in "
-                    + "flag declaration.",
-                    e
-                );
-            }
-            aconfig_test_is_cached = true;
-            init();
-            if (readFromNewStorage && reader != null) {
-                boolean val;
-                try {
-                    val = reader.getBooleanFlagValue(1);
-                    if (val == disabledRw) {
-                        Log.i(TAG, String.format(SUCCESS_LOG, "disabledRw"));
-                    } else {
-                        Log.i(TAG, String.format(MISMATCH_LOG, "disabledRw", val, disabledRw));
-                    }
-
-                    if (useNewStorageValueAndDiscardOld) {
-                        disabledRw = val;
-                    }
-
-                    val = reader.getBooleanFlagValue(2);
-                    if (val == disabledRwExported) {
-                        Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwExported"));
-                    } else {
-                        Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwExported", val, disabledRwExported));
-                    }
-
-                    if (useNewStorageValueAndDiscardOld) {
-                        disabledRwExported = val;
-                    }
-
-                    val = reader.getBooleanFlagValue(8);
-                    if (val == enabledRw) {
-                        Log.i(TAG, String.format(SUCCESS_LOG, "enabledRw"));
-                    } else {
-                        Log.i(TAG, String.format(MISMATCH_LOG, "enabledRw", val, enabledRw));
-                    }
-
-                    if (useNewStorageValueAndDiscardOld) {
-                        enabledRw = val;
-                    }
-
-                } catch (Exception e) {
-                    Log.e(TAG, ERROR_LOG, e);
-                }
-            }
-        }
-
-        private void load_overrides_other_namespace() {
-            try {
-                Properties properties = DeviceConfig.getProperties("other_namespace");
-                disabledRwInOtherNamespace =
-                    properties.getBoolean(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false);
-            } catch (NullPointerException e) {
-                throw new RuntimeException(
-                    "Cannot read value from namespace other_namespace "
-                    + "from DeviceConfig. It could be that the code using flag "
-                    + "executed before SettingsProvider initialization. Please use "
-                    + "fixed read-only flag by adding is_fixed_read_only: true in "
-                    + "flag declaration.",
-                    e
-                );
-            }
-            other_namespace_is_cached = true;
-            init();
-            if (readFromNewStorage && reader != null) {
-                boolean val;
-                try {
-                    val = reader.getBooleanFlagValue(3);
-                    if (val == disabledRwInOtherNamespace) {
-                        Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwInOtherNamespace"));
-                    } else {
-                        Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwInOtherNamespace", val, disabledRwInOtherNamespace));
-                    }
-
-                    if (useNewStorageValueAndDiscardOld) {
-                        disabledRwInOtherNamespace = val;
-                    }
-
-                } catch (Exception e) {
-                    Log.e(TAG, ERROR_LOG, e);
-                }
-            }
-        }"# + expected_featureflagsmpl_content_2;
-
-        let mut file_set = HashMap::from([
-            ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
-            (
-                "com/android/aconfig/test/FeatureFlagsImpl.java",
-                &expect_featureflagsimpl_content_new,
-            ),
+            ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsmpl_content),
             ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
             (
                 "com/android/aconfig/test/CustomFeatureFlags.java",
@@ -916,7 +768,6 @@
             private static boolean enabledFixedRoExported = false;
             private static boolean enabledRoExported = false;
 
-
             private void load_overrides_aconfig_test() {
                 try {
                     Properties properties = DeviceConfig.getProperties("aconfig_test");
@@ -941,21 +792,21 @@
             @Override
             public boolean disabledRwExported() {
                 if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                        load_overrides_aconfig_test();
                 }
                 return disabledRwExported;
             }
             @Override
             public boolean enabledFixedRoExported() {
                 if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                        load_overrides_aconfig_test();
                 }
                 return enabledFixedRoExported;
             }
             @Override
             public boolean enabledRoExported() {
                 if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                        load_overrides_aconfig_test();
                 }
                 return enabledRoExported;
             }
diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs
index 6f3f7bf..7bc34d6 100644
--- a/tools/aconfig/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/aconfig/src/codegen/rust.rs
@@ -116,10 +116,6 @@
 use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
@@ -260,13 +256,13 @@
 use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
+static READ_FROM_NEW_STORAGE: LazyLock<bool> = LazyLock::new(|| unsafe {
+    Path::new("/metadata/aconfig/boot/enable_only_new_storage").exists()
+});
+
 static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe {
     get_mapped_storage_file("system", StorageFileType::PackageMap)
     .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test"))
@@ -279,24 +275,14 @@
 
 /// flag value cache for disabled_rw
 static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| {
-    let result = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.aconfig_test",
-        "com.android.aconfig.test.disabled_rw",
-        "false") == "true";
-
-    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.core_experiments_team_internal",
-        "com.android.providers.settings.use_new_storage_value",
-        "false") == "true";
-
-    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+    if *READ_FROM_NEW_STORAGE {
         // This will be called multiple times. Subsequent calls after the first are noops.
         logger::init(
             logger::Config::default()
-                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_tag_on_device("aconfig_rust_codegen")
                 .with_max_level(LevelFilter::Info));
 
-        let aconfig_storage_result = FLAG_VAL_MAP
+        let flag_value_result = FLAG_VAL_MAP
             .as_ref()
             .map_err(|err| format!("failed to get flag val map: {err}"))
             .and_then(|flag_val_map| {
@@ -314,54 +300,33 @@
                     })
                 });
 
-        match aconfig_storage_result {
-            Ok(storage_result) if storage_result == result => {
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
-            },
-            Ok(storage_result) => {
-                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
             },
             Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: {err}");
-                if use_new_storage_value {
-                    panic!("failed to read flag value: {err}");
-                }
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                panic!("failed to read flag value: {err}");
             }
         }
+    } else {
+        flags_rust::GetServerConfigurableFlag(
+            "aconfig_flags.aconfig_test",
+            "com.android.aconfig.test.disabled_rw",
+            "false") == "true"
     }
-
-    result
 });
 
 /// flag value cache for disabled_rw_exported
 static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| {
-    let result = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.aconfig_test",
-        "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
-
-    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.core_experiments_team_internal",
-        "com.android.providers.settings.use_new_storage_value",
-        "false") == "true";
-
-    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+    if *READ_FROM_NEW_STORAGE {
         // This will be called multiple times. Subsequent calls after the first are noops.
         logger::init(
             logger::Config::default()
-                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_tag_on_device("aconfig_rust_codegen")
                 .with_max_level(LevelFilter::Info));
 
-        let aconfig_storage_result = FLAG_VAL_MAP
+        let flag_value_result = FLAG_VAL_MAP
             .as_ref()
             .map_err(|err| format!("failed to get flag val map: {err}"))
             .and_then(|flag_val_map| {
@@ -379,54 +344,33 @@
                     })
                 });
 
-        match aconfig_storage_result {
-            Ok(storage_result) if storage_result == result => {
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
-            },
-            Ok(storage_result) => {
-                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}");
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
             },
             Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: {err}");
-                if use_new_storage_value {
-                    panic!("failed to read flag value: {err}");
-                }
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                panic!("failed to read flag value: {err}");
             }
         }
+    } else {
+        flags_rust::GetServerConfigurableFlag(
+            "aconfig_flags.aconfig_test",
+            "com.android.aconfig.test.disabled_rw_exported",
+            "false") == "true"
     }
-
-    result
 });
 
 /// flag value cache for disabled_rw_in_other_namespace
 static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| {
-    let result = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.other_namespace",
-        "com.android.aconfig.test.disabled_rw_in_other_namespace",
-        "false") == "true";
-
-    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.core_experiments_team_internal",
-        "com.android.providers.settings.use_new_storage_value",
-        "false") == "true";
-
-    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+    if *READ_FROM_NEW_STORAGE {
         // This will be called multiple times. Subsequent calls after the first are noops.
         logger::init(
             logger::Config::default()
-                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_tag_on_device("aconfig_rust_codegen")
                 .with_max_level(LevelFilter::Info));
 
-        let aconfig_storage_result = FLAG_VAL_MAP
+        let flag_value_result = FLAG_VAL_MAP
             .as_ref()
             .map_err(|err| format!("failed to get flag val map: {err}"))
             .and_then(|flag_val_map| {
@@ -444,55 +388,34 @@
                     })
                 });
 
-        match aconfig_storage_result {
-            Ok(storage_result) if storage_result == result => {
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
-            },
-            Ok(storage_result) => {
-                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}");
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
             },
             Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: {err}");
-                if use_new_storage_value {
-                    panic!("failed to read flag value: {err}");
-                }
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                panic!("failed to read flag value: {err}");
             }
         }
+    } else {
+        flags_rust::GetServerConfigurableFlag(
+            "aconfig_flags.other_namespace",
+            "com.android.aconfig.test.disabled_rw_in_other_namespace",
+            "false") == "true"
     }
-
-    result
 });
 
 
 /// flag value cache for enabled_rw
 static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| {
-    let result = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.aconfig_test",
-        "com.android.aconfig.test.enabled_rw",
-        "true") == "true";
-
-    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.core_experiments_team_internal",
-        "com.android.providers.settings.use_new_storage_value",
-        "false") == "true";
-
-    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+    if *READ_FROM_NEW_STORAGE {
         // This will be called multiple times. Subsequent calls after the first are noops.
         logger::init(
             logger::Config::default()
-                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_tag_on_device("aconfig_rust_codegen")
                 .with_max_level(LevelFilter::Info));
 
-        let aconfig_storage_result = FLAG_VAL_MAP
+        let flag_value_result = FLAG_VAL_MAP
             .as_ref()
             .map_err(|err| format!("failed to get flag val map: {err}"))
             .and_then(|flag_val_map| {
@@ -510,32 +433,21 @@
                     })
                 });
 
-        match aconfig_storage_result {
-            Ok(storage_result) if storage_result == result => {
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
-            },
-            Ok(storage_result) => {
-                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
-                if use_new_storage_value {
-                    return storage_result;
-                } else {
-                    return result;
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
             },
             Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: {err}");
-                if use_new_storage_value {
-                    panic!("failed to read flag value: {err}");
-                }
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                panic!("failed to read flag value: {err}");
             }
         }
+    } else {
+        flags_rust::GetServerConfigurableFlag(
+            "aconfig_flags.aconfig_test",
+            "com.android.aconfig.test.enabled_rw",
+            "true") == "true"
     }
-
-    result
 });
 
 impl FlagProvider {
@@ -596,65 +508,7 @@
 /// query flag disabled_ro
 #[inline(always)]
 pub fn disabled_ro() -> bool {
-
-
-    let result = false;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 0 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'disabled_ro'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = false;
-        }
-    }
-
-    result
-
+   false
 }
 
 /// query flag disabled_rw
@@ -678,257 +532,25 @@
 /// query flag enabled_fixed_ro
 #[inline(always)]
 pub fn enabled_fixed_ro() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 4 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_fixed_ro_exported
 #[inline(always)]
 pub fn enabled_fixed_ro_exported() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 5 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro_exported'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_ro
 #[inline(always)]
 pub fn enabled_ro() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 6 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_ro_exported
 #[inline(always)]
 pub fn enabled_ro_exported() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 7 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro_exported'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_rw
@@ -1203,10 +825,6 @@
 use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
@@ -1275,10 +893,6 @@
 use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index 1a14f64..496876e 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -79,8 +79,18 @@
             .read_to_string(&mut contents)
             .with_context(|| format!("failed to read {}", input.source))?;
 
-        let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents)
-            .with_context(|| input.error_context())?;
+        let mut flag_declarations =
+            aconfig_protos::flag_declarations::try_from_text_proto(&contents)
+                .with_context(|| input.error_context())?;
+
+        // system_ext flags should be treated as system flags as we are combining /system_ext
+        // and /system as one container
+        // TODO: remove this logic when we start enforcing that system_ext cannot be set as
+        // container in aconfig declaration files.
+        if flag_declarations.container() == "system_ext" {
+            flag_declarations.set_container(String::from("system"));
+        }
+
         ensure!(
             package == flag_declarations.package(),
             "failed to parse {}: expected package {}, got {}",
@@ -270,10 +280,11 @@
     caches: Vec<Input>,
     container: &str,
     file: &StorageFileType,
+    version: u32,
 ) -> Result<Vec<u8>> {
     let parsed_flags_vec: Vec<ProtoParsedFlags> =
         caches.into_iter().map(|mut input| input.try_parse_flags()).collect::<Result<Vec<_>>>()?;
-    generate_storage_file(container, parsed_flags_vec.iter(), file)
+    generate_storage_file(container, parsed_flags_vec.iter(), file, version)
 }
 
 pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> {
diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs
index 1fb64f9..edb4fd3 100644
--- a/tools/aconfig/aconfig/src/main.rs
+++ b/tools/aconfig/aconfig/src/main.rs
@@ -16,6 +16,8 @@
 
 //! `aconfig` is a build time tool to manage build time configurations, such as feature flags.
 
+use aconfig_storage_file::DEFAULT_FILE_VERSION;
+use aconfig_storage_file::MAX_SUPPORTED_FILE_VERSION;
 use anyhow::{anyhow, bail, Context, Result};
 use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command};
 use core::any::Any;
@@ -159,7 +161,13 @@
                         .value_parser(|s: &str| StorageFileType::try_from(s)),
                 )
                 .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
-                .arg(Arg::new("out").long("out").required(true)),
+                .arg(Arg::new("out").long("out").required(true))
+                .arg(
+                    Arg::new("version")
+                        .long("version")
+                        .required(false)
+                        .value_parser(|s: &str| s.parse::<u32>()),
+                ),
         )
 }
 
@@ -309,12 +317,18 @@
             write_output_to_file_or_stdout(path, &output)?;
         }
         Some(("create-storage", sub_matches)) => {
+            let version =
+                get_optional_arg::<u32>(sub_matches, "version").unwrap_or(&DEFAULT_FILE_VERSION);
+            if *version > MAX_SUPPORTED_FILE_VERSION {
+                bail!("Invalid version selected ({})", version);
+            }
             let file = get_required_arg::<StorageFileType>(sub_matches, "file")
                 .context("Invalid storage file selection")?;
             let cache = open_zero_or_more_files(sub_matches, "cache")?;
             let container = get_required_arg::<String>(sub_matches, "container")?;
             let path = get_required_arg::<String>(sub_matches, "out")?;
-            let output = commands::create_storage(cache, container, file)
+
+            let output = commands::create_storage(cache, container, file, *version)
                 .context("failed to create storage files")?;
             write_output_to_file_or_stdout(path, &output)?;
         }
diff --git a/tools/aconfig/aconfig/src/storage/flag_info.rs b/tools/aconfig/aconfig/src/storage/flag_info.rs
new file mode 100644
index 0000000..2532609
--- /dev/null
+++ b/tools/aconfig/aconfig/src/storage/flag_info.rs
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::commands::assign_flag_ids;
+use crate::storage::FlagPackage;
+use aconfig_protos::ProtoFlagPermission;
+use aconfig_storage_file::{FlagInfoHeader, FlagInfoList, FlagInfoNode, StorageFileType};
+use anyhow::{anyhow, Result};
+
+fn new_header(container: &str, num_flags: u32, version: u32) -> FlagInfoHeader {
+    FlagInfoHeader {
+        version,
+        container: String::from(container),
+        file_type: StorageFileType::FlagInfo as u8,
+        file_size: 0,
+        num_flags,
+        boolean_flag_offset: 0,
+    }
+}
+
+pub fn create_flag_info(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<FlagInfoList> {
+    // create list
+    let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
+
+    let mut is_flag_rw = vec![false; num_flags as usize];
+    for pkg in packages.iter() {
+        let start_index = pkg.boolean_start_index as usize;
+        let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
+        for pf in pkg.boolean_flags.iter() {
+            let fid = flag_ids
+                .get(pf.name())
+                .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
+            is_flag_rw[start_index + (*fid as usize)] =
+                pf.permission() == ProtoFlagPermission::READ_WRITE;
+        }
+    }
+
+    let mut list = FlagInfoList {
+        header: new_header(container, num_flags, version),
+        nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(),
+    };
+
+    // initialize all header fields
+    list.header.boolean_flag_offset = list.header.into_bytes().len() as u32;
+    let bytes_per_node = FlagInfoNode::create(false).into_bytes().len() as u32;
+    list.header.file_size = list.header.boolean_flag_offset + num_flags * bytes_per_node;
+
+    Ok(list)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
+    pub fn create_test_flag_info_list_from_source() -> Result<FlagInfoList> {
+        let caches = parse_all_test_flags();
+        let packages = group_flags_by_package(caches.iter());
+        create_flag_info("mockup", &packages, DEFAULT_FILE_VERSION)
+    }
+
+    #[test]
+    // this test point locks down the flag info creation and each field
+    fn test_list_contents() {
+        let flag_info_list = create_test_flag_info_list_from_source();
+        assert!(flag_info_list.is_ok());
+        let expected_flag_info_list =
+            aconfig_storage_file::test_utils::create_test_flag_info_list();
+        assert_eq!(flag_info_list.unwrap(), expected_flag_info_list);
+    }
+}
diff --git a/tools/aconfig/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs
index a971211..6046d7e 100644
--- a/tools/aconfig/aconfig/src/storage/flag_table.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_table.rs
@@ -19,13 +19,12 @@
 use aconfig_protos::ProtoFlagPermission;
 use aconfig_storage_file::{
     get_table_size, FlagTable, FlagTableHeader, FlagTableNode, StorageFileType, StoredFlagType,
-    FILE_VERSION,
 };
 use anyhow::{anyhow, Result};
 
-fn new_header(container: &str, num_flags: u32) -> FlagTableHeader {
+fn new_header(container: &str, num_flags: u32, version: u32) -> FlagTableHeader {
     FlagTableHeader {
-        version: FILE_VERSION,
+        version,
         container: String::from(container),
         file_type: StorageFileType::FlagMap as u8,
         file_size: 0,
@@ -86,12 +85,16 @@
     }
 }
 
-pub fn create_flag_table(container: &str, packages: &[FlagPackage]) -> Result<FlagTable> {
+pub fn create_flag_table(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<FlagTable> {
     // create table
     let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
     let num_buckets = get_table_size(num_flags)?;
 
-    let mut header = new_header(container, num_flags);
+    let mut header = new_header(container, num_flags, version);
     let mut buckets = vec![None; num_buckets as usize];
     let mut node_wrappers = packages
         .iter()
@@ -138,13 +141,15 @@
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
     use super::*;
     use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
 
     fn create_test_flag_table_from_source() -> Result<FlagTable> {
         let caches = parse_all_test_flags();
         let packages = group_flags_by_package(caches.iter());
-        create_flag_table("mockup", &packages)
+        create_flag_table("mockup", &packages, DEFAULT_FILE_VERSION)
     }
 
     #[test]
diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs
index c15ba54..6a655b9 100644
--- a/tools/aconfig/aconfig/src/storage/flag_value.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_value.rs
@@ -17,12 +17,12 @@
 use crate::commands::assign_flag_ids;
 use crate::storage::FlagPackage;
 use aconfig_protos::ProtoFlagState;
-use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType, FILE_VERSION};
+use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType};
 use anyhow::{anyhow, Result};
 
-fn new_header(container: &str, num_flags: u32) -> FlagValueHeader {
+fn new_header(container: &str, num_flags: u32, version: u32) -> FlagValueHeader {
     FlagValueHeader {
-        version: FILE_VERSION,
+        version,
         container: String::from(container),
         file_type: StorageFileType::FlagVal as u8,
         file_size: 0,
@@ -31,12 +31,16 @@
     }
 }
 
-pub fn create_flag_value(container: &str, packages: &[FlagPackage]) -> Result<FlagValueList> {
+pub fn create_flag_value(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<FlagValueList> {
     // create list
     let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
 
     let mut list = FlagValueList {
-        header: new_header(container, num_flags),
+        header: new_header(container, num_flags, version),
         booleans: vec![false; num_flags as usize],
     };
 
@@ -61,13 +65,15 @@
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
     use super::*;
     use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
 
     pub fn create_test_flag_value_list_from_source() -> Result<FlagValueList> {
         let caches = parse_all_test_flags();
         let packages = group_flags_by_package(caches.iter());
-        create_flag_value("mockup", &packages)
+        create_flag_value("mockup", &packages, DEFAULT_FILE_VERSION)
     }
 
     #[test]
diff --git a/tools/aconfig/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs
index 73339f2..9e5dad5 100644
--- a/tools/aconfig/aconfig/src/storage/mod.rs
+++ b/tools/aconfig/aconfig/src/storage/mod.rs
@@ -14,15 +14,16 @@
  * limitations under the License.
  */
 
+pub mod flag_info;
 pub mod flag_table;
 pub mod flag_value;
 pub mod package_table;
 
-use anyhow::{anyhow, Result};
+use anyhow::Result;
 use std::collections::{HashMap, HashSet};
 
 use crate::storage::{
-    flag_table::create_flag_table, flag_value::create_flag_value,
+    flag_info::create_flag_info, flag_table::create_flag_table, flag_value::create_flag_value,
     package_table::create_package_table,
 };
 use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags};
@@ -87,6 +88,7 @@
     container: &str,
     parsed_flags_vec_iter: I,
     file: &StorageFileType,
+    version: u32,
 ) -> Result<Vec<u8>>
 where
     I: Iterator<Item = &'a ProtoParsedFlags>,
@@ -95,18 +97,21 @@
 
     match file {
         StorageFileType::PackageMap => {
-            let package_table = create_package_table(container, &packages)?;
+            let package_table = create_package_table(container, &packages, version)?;
             Ok(package_table.into_bytes())
         }
         StorageFileType::FlagMap => {
-            let flag_table = create_flag_table(container, &packages)?;
+            let flag_table = create_flag_table(container, &packages, version)?;
             Ok(flag_table.into_bytes())
         }
         StorageFileType::FlagVal => {
-            let flag_value = create_flag_value(container, &packages)?;
+            let flag_value = create_flag_value(container, &packages, version)?;
             Ok(flag_value.into_bytes())
         }
-        _ => Err(anyhow!("aconfig does not support the creation of this storage file type")),
+        StorageFileType::FlagInfo => {
+            let flag_info = create_flag_info(container, &packages, version)?;
+            Ok(flag_info.into_bytes())
+        }
     }
 }
 
diff --git a/tools/aconfig/aconfig/src/storage/package_table.rs b/tools/aconfig/aconfig/src/storage/package_table.rs
index c53602f..56559f8 100644
--- a/tools/aconfig/aconfig/src/storage/package_table.rs
+++ b/tools/aconfig/aconfig/src/storage/package_table.rs
@@ -18,14 +18,13 @@
 
 use aconfig_storage_file::{
     get_table_size, PackageTable, PackageTableHeader, PackageTableNode, StorageFileType,
-    FILE_VERSION,
 };
 
 use crate::storage::FlagPackage;
 
-fn new_header(container: &str, num_packages: u32) -> PackageTableHeader {
+fn new_header(container: &str, num_packages: u32, version: u32) -> PackageTableHeader {
     PackageTableHeader {
-        version: FILE_VERSION,
+        version,
         container: String::from(container),
         file_type: StorageFileType::PackageMap as u8,
         file_size: 0,
@@ -56,20 +55,26 @@
     }
 }
 
-pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result<PackageTable> {
+pub fn create_package_table(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<PackageTable> {
     // create table
     let num_packages = packages.len() as u32;
     let num_buckets = get_table_size(num_packages)?;
-    let mut header = new_header(container, num_packages);
+    let mut header = new_header(container, num_packages, version);
     let mut buckets = vec![None; num_buckets as usize];
-    let mut node_wrappers: Vec<_> =
-        packages.iter().map(|pkg| PackageTableNodeWrapper::new(pkg, num_buckets)).collect();
+    let mut node_wrappers: Vec<_> = packages
+        .iter()
+        .map(|pkg: &FlagPackage<'_>| PackageTableNodeWrapper::new(pkg, num_buckets))
+        .collect();
 
     // initialize all header fields
     header.bucket_offset = header.into_bytes().len() as u32;
     header.node_offset = header.bucket_offset + num_buckets * 4;
     header.file_size = header.node_offset
-        + node_wrappers.iter().map(|x| x.node.into_bytes().len()).sum::<usize>() as u32;
+        + node_wrappers.iter().map(|x| x.node.into_bytes(version).len()).sum::<usize>() as u32;
 
     // sort node_wrappers by bucket index for efficiency
     node_wrappers.sort_by(|a, b| a.bucket_index.cmp(&b.bucket_index));
@@ -87,7 +92,7 @@
         if buckets[node_bucket_idx as usize].is_none() {
             buckets[node_bucket_idx as usize] = Some(offset);
         }
-        offset += node_wrappers[i].node.into_bytes().len() as u32;
+        offset += node_wrappers[i].node.into_bytes(version).len() as u32;
 
         if let Some(index) = next_node_bucket_idx {
             if index == node_bucket_idx {
@@ -106,13 +111,15 @@
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
     use super::*;
     use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
 
     pub fn create_test_package_table_from_source() -> Result<PackageTable> {
         let caches = parse_all_test_flags();
         let packages = group_flags_by_package(caches.iter());
-        create_package_table("mockup", &packages)
+        create_package_table("mockup", &packages, DEFAULT_FILE_VERSION)
     }
 
     #[test]
diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
index 9970b1f..d1cf191 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
@@ -9,56 +9,50 @@
 import android.provider.DeviceConfig;
 import android.provider.DeviceConfig.Properties;
 
-
 {{ -if not library_exported }}
-{{ -if allow_instrumentation }}
 import android.aconfig.storage.StorageInternalReader;
-import android.util.Log;
-{{ -endif }}
+import java.nio.file.Files;
+import java.nio.file.Paths;
 {{ -endif }}
 
 {{ -endif }}
 /** @hide */
 public final class FeatureFlagsImpl implements FeatureFlags \{
 {{ -if runtime_lookup_required }}
+{{ -if not library_exported }}
+    private static final boolean isReadFromNew = Files.exists(Paths.get("/metadata/aconfig/boot/enable_only_new_storage"));
+    private static volatile boolean isCached = false;
+{{ -endif }}
 {{ -for namespace_with_flags in namespace_flags }}
     private static volatile boolean {namespace_with_flags.namespace}_is_cached = false;
 {{ -endfor- }}
 
 {{ for flag in flag_elements }}
-{{- if flag.is_read_write }}
+{{ -if flag.is_read_write }}
     private static boolean {flag.method_name} = {flag.default_value};
 {{ -endif }}
 {{ -endfor }}
-{{ -if not library_exported }}
-{{ -if allow_instrumentation }}
-    StorageInternalReader reader;
-    boolean readFromNewStorage;
 
-    boolean useNewStorageValueAndDiscardOld = false;
-
-    private final static String TAG = "AconfigJavaCodegen";
-    private final static String SUCCESS_LOG = "success: %s value matches";
-    private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s";
-    private final static String ERROR_LOG = "error: failed to read flag value";
-
+{{ if not library_exported }}
     private void init() \{
-        if (reader != null) return;
-        if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.storage_test_mission_1", false)) \{
-            readFromNewStorage = true;
-            try \{
-                reader = new StorageInternalReader("{container}", "{package_name}");
-            } catch (Exception e) \{
-                reader = null;
-            }
+        StorageInternalReader reader = null;
+        try \{
+            reader = new StorageInternalReader("{container}", "{package_name}");
+{{ for namespace_with_flags in namespace_flags }}
+{{ -for flag in namespace_with_flags.flags }}
+{{ if flag.is_read_write }}
+            {flag.method_name} = reader.getBooleanFlagValue({flag.flag_offset});
+{{ endif }}
+{{ -endfor }}
+{{ -endfor }}
+        } catch (Exception e) \{
+            throw new RuntimeException("Cannot read flag in codegen", e);
         }
-
-        useNewStorageValueAndDiscardOld =
-            DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false);
+        isCached = true;
     }
+{{ endif }}
 
-{{ -endif }}
-{{ -endif }}
+
 {{ for namespace_with_flags in namespace_flags }}
     private void load_overrides_{namespace_with_flags.namespace}() \{
         try \{
@@ -80,36 +74,9 @@
             );
         }
         {namespace_with_flags.namespace}_is_cached = true;
-{{ -if not library_exported }}
-{{ -if allow_instrumentation }}
-        init();
-        if (readFromNewStorage && reader != null) \{
-            boolean val;
-            try \{
-{{ -for flag in namespace_with_flags.flags }}
-{{ -if flag.is_read_write }}
-
-                val = reader.getBooleanFlagValue({flag.flag_offset});
-                if (val == {flag.method_name}) \{
-                    Log.i(TAG, String.format(SUCCESS_LOG, "{flag.method_name}"));
-                } else \{
-                    Log.i(TAG, String.format(MISMATCH_LOG, "{flag.method_name}", val, {flag.method_name}));
-                }
-
-                if (useNewStorageValueAndDiscardOld) \{
-                    {flag.method_name} = val;
-                }
-
-{{ -endif }}
-{{ -endfor }}
-            } catch (Exception e) \{
-                    Log.e(TAG, ERROR_LOG, e);
-            }
-        }
-{{ -endif }}
-{{ -endif }}
     }
 {{ endfor- }}
+
 {{ -endif }}{#- end of runtime_lookup_required #}
 {{ -for flag in flag_elements }}
     @Override
@@ -118,19 +85,31 @@
     @UnsupportedAppUsage
 {{ -endif }}
     public boolean {flag.method_name}() \{
+{{ -if not library_exported }}
 {{ -if flag.is_read_write }}
-        if (!{flag.device_config_namespace}_is_cached) \{
-            load_overrides_{flag.device_config_namespace}();
+        if (isReadFromNew) \{
+            if (!isCached) \{
+                init();
+            }
+        } else \{
+            if (!{flag.device_config_namespace}_is_cached) \{
+                load_overrides_{flag.device_config_namespace}();
+            }
         }
         return {flag.method_name};
 {{ -else }}
         return {flag.default_value};
 {{ -endif }}
+{{ else }}
+        if (!{flag.device_config_namespace}_is_cached) \{
+            load_overrides_{flag.device_config_namespace}();
+        }
+        return {flag.method_name};
+{{ -endif }}
     }
 {{ endfor }}
 }
-{{ else }}
-{#- Generate only stub if in test mode #}
+{{ else }} {#- Generate only stub if in test mode #}
 /** @hide */
 public final class FeatureFlagsImpl implements FeatureFlags \{
 {{ for flag in flag_elements }}
diff --git a/tools/aconfig/aconfig/templates/cpp_exported_header.template b/tools/aconfig/aconfig/templates/cpp_exported_header.template
index 0f7853e..4643c97 100644
--- a/tools/aconfig/aconfig/templates/cpp_exported_header.template
+++ b/tools/aconfig/aconfig/templates/cpp_exported_header.template
@@ -27,12 +27,13 @@
     {{ -for item in class_elements}}
     virtual bool {item.flag_name}() = 0;
 
-    {{ -if is_test_mode }}
-    virtual void {item.flag_name}(bool val) = 0;
-    {{ -endif }}
     {{ -endfor }}
 
     {{ -if is_test_mode }}
+    {{ -for item in class_elements}}
+    virtual void {item.flag_name}(bool val) = 0;
+    {{ -endfor }}
+
     virtual void reset_flags() \{}
     {{ -endif }}
 };
diff --git a/tools/aconfig/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template
index b6012e7..852b905 100644
--- a/tools/aconfig/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/aconfig/templates/cpp_source_file.template
@@ -2,11 +2,11 @@
 
 {{ if allow_instrumentation }}
 {{ if readwrite- }}
-#include <sys/stat.h>
+#include <unistd.h>
 #include "aconfig_storage/aconfig_storage_read_api.hpp"
 #include <android/log.h>
 #define LOG_TAG "aconfig_cpp_codegen"
-#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
 {{ -endif }}
 {{ endif }}
 
@@ -76,13 +76,13 @@
             : boolean_start_index_()
             {{ -endif }}
             , flag_value_file_(nullptr)
-            , read_from_new_storage_(false)
-            , use_new_storage_value(false) \{
+            , read_from_new_storage_(false) \{
 
-            struct stat buffer;
-            if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) == 0) \{
+            if (access("/metadata/aconfig/boot/enable_only_new_storage", F_OK) == 0) \{
                read_from_new_storage_ = true;
-            } else \{
+            }
+
+            if (!read_from_new_storage_) \{
                return;
             }
 
@@ -90,15 +90,13 @@
                  "{container}",
                  aconfig_storage::StorageFileType::package_map);
             if (!package_map_file.ok()) \{
-                ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str());
-                return;
+                ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str());
             }
 
             auto context = aconfig_storage::get_package_read_context(
                 **package_map_file, "{package}");
             if (!context.ok()) \{
-                ALOGI("error: failed to get package read context: %s", context.error().c_str());
-                return;
+                ALOGE("error: failed to get package read context: %s", context.error().c_str());
             }
 
             // cache package boolean flag start index
@@ -111,18 +109,13 @@
                 "{container}",
                 aconfig_storage::StorageFileType::flag_val);
             if (!flag_value_file.ok()) \{
-                ALOGI("error: failed to get flag value file: %s", flag_value_file.error().c_str());
-                return;
+                ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str());
             }
 
             // cache flag value file
             flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>(
                 *flag_value_file);
 
-            use_new_storage_value = server_configurable_flags::GetServerConfigurableFlag(
-                "aconfig_flags.core_experiments_team_internal",
-                "com.android.providers.settings.use_new_storage_value",
-                "false") == "true";
         }
         {{ -endif }}
         {{ -endif }}
@@ -131,44 +124,30 @@
         virtual bool {item.flag_name}() override \{
             {{ -if item.readwrite }}
             if (cache_[{item.readwrite_idx}] == -1) \{
+            {{ if allow_instrumentation- }}
+                if (read_from_new_storage_) \{
+                    auto value = aconfig_storage::get_boolean_flag_value(
+                        *flag_value_file_,
+                        boolean_start_index_ + {item.flag_offset});
+
+                    if (!value.ok()) \{
+                        ALOGE("error: failed to read flag value: %s", value.error().c_str());
+                    }
+
+                    cache_[{item.readwrite_idx}] = *value;
+                } else \{
+                    cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag(
+                        "aconfig_flags.{item.device_config_namespace}",
+                        "{item.device_config_flag}",
+                        "{item.default_value}") == "true";
+                }
+            {{ -else- }}
                 cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag(
                     "aconfig_flags.{item.device_config_namespace}",
                     "{item.device_config_flag}",
                     "{item.default_value}") == "true";
-            }
-
-
-            {{ if allow_instrumentation- }}
-            if (read_from_new_storage_) \{
-                if (!flag_value_file_) \{
-                    ALOGI("error: failed to get flag {item.flag_name}: flag value file is null");
-                    return cache_[{item.readwrite_idx}];
-                }
-
-                auto value = aconfig_storage::get_boolean_flag_value(
-                    *flag_value_file_,
-                    boolean_start_index_ + {item.flag_offset});
-
-                if (!value.ok()) \{
-                    ALOGI("error: failed to read flag value: %s", value.error().c_str());
-                    return cache_[{item.readwrite_idx}];
-                }
-
-                bool expected_value = cache_[{item.readwrite_idx}];
-                if (*value != expected_value) \{
-                    ALOGI("error: {item.flag_name} value mismatch, new storage value is %s, old storage value is %s",
-                    *value ? "true" : "false", expected_value ? "true" : "false");
-                }
-
-                if (use_new_storage_value) \{
-                    return *value;
-                } else \{
-                    return expected_value;
-                }
-            }
             {{ -endif }}
-
-
+            }
             return cache_[{item.readwrite_idx}];
             {{ -else }}
             {{ -if item.is_fixed_read_only }}
@@ -189,7 +168,6 @@
         std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_;
 
         bool read_from_new_storage_;
-        bool use_new_storage_value;
     {{ -endif }}
     {{ -endif }}
 
diff --git a/tools/aconfig/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template
index ea1c600..c2f162f 100644
--- a/tools/aconfig/aconfig/templates/rust.template
+++ b/tools/aconfig/aconfig/templates/rust.template
@@ -5,15 +5,15 @@
 use std::sync::LazyLock;
 use log::\{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
 {{ if has_readwrite- }}
 {{ if allow_instrumentation }}
+static READ_FROM_NEW_STORAGE: LazyLock<bool> = LazyLock::new(|| unsafe \{
+    Path::new("/metadata/aconfig/boot/enable_only_new_storage").exists()
+});
+
 static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{
     get_mapped_storage_file("{container}", StorageFileType::PackageMap)
     .and_then(|package_map| get_package_read_context(&package_map, "{package}"))
@@ -30,24 +30,15 @@
 /// flag value cache for {flag.name}
 {{ if allow_instrumentation }}
 static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{
-    let result = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.{flag.device_config_namespace}",
-        "{flag.device_config_flag}",
-        "{flag.default_value}") == "true";
 
-    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.core_experiments_team_internal",
-        "com.android.providers.settings.use_new_storage_value",
-        "false") == "true";
-
-    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
+    if *READ_FROM_NEW_STORAGE \{
         // This will be called multiple times. Subsequent calls after the first are noops.
         logger::init(
             logger::Config::default()
-                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_tag_on_device("aconfig_rust_codegen")
                 .with_max_level(LevelFilter::Info));
 
-        let aconfig_storage_result = FLAG_VAL_MAP
+        let flag_value_result = FLAG_VAL_MAP
             .as_ref()
             .map_err(|err| format!("failed to get flag val map: \{err}"))
             .and_then(|flag_val_map| \{
@@ -65,33 +56,23 @@
                     })
                 });
 
-        match aconfig_storage_result \{
-            Ok(storage_result) if storage_result == result => \{
-                if use_new_storage_value \{
-                    return storage_result;
-                } else \{
-                    return result;
-                }
-            },
-            Ok(storage_result) => \{
-                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}");
-                if use_new_storage_value \{
-                    return storage_result;
-                } else \{
-                    return result;
-                }
+        match flag_value_result \{
+            Ok(flag_value) => \{
+                return flag_value;
             },
             Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: \{err}");
-                if use_new_storage_value \{
-                    panic!("failed to read flag value: \{err}");
-                }
+                log!(Level::Error, "aconfig_rust_codegen: error: \{err}");
+                panic!("failed to read flag value: \{err}");
             }
         }
+    } else \{
+        flags_rust::GetServerConfigurableFlag(
+            "aconfig_flags.{flag.device_config_namespace}",
+            "{flag.device_config_flag}",
+            "{flag.default_value}") == "true"
     }
 
-    result
-    });
+});
 {{ else }}
 static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
     "aconfig_flags.{flag.device_config_namespace}",
@@ -123,72 +104,11 @@
 {{ for flag in template_flags }}
 /// query flag {flag.name}
 #[inline(always)]
-{{ -if flag.readwrite }}
 pub fn {flag.name}() -> bool \{
+{{ -if flag.readwrite }}
     PROVIDER.{flag.name}()
 {{ -else }}
-pub fn {flag.name}() -> bool \{
-    {{ if not allow_instrumentation }}
     {flag.default_value}
-    {{ else }}
-
-    let result = {flag.default_value};
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe \{
-        let package_map = match get_mapped_storage_file("{flag.container}", StorageFileType::PackageMap) \{
-            Ok(file) => file,
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "{package}") \{
-            Ok(Some(context)) => context,
-            Ok(None) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': did not get context");
-                return result;
-            },
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("{flag.container}", StorageFileType::FlagVal) \{
-            Ok(val_map) => val_map,
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, {flag.flag_offset} + package_read_context.boolean_start_index) \{
-            Ok(val) => val,
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-
-        if result != value \{
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for '{flag.name}'. Legacy storage was \{result}, new storage was \{value}");
-        } else \{
-            let default_value = {flag.default_value};
-        }
-    }
-
-    result
-    {{ endif }}
 {{ -endif }}
 }
 {{ endfor }}
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
index 95cecf4..bdf96ed 100644
--- a/tools/aconfig/aconfig_device_paths/Android.bp
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -51,4 +51,25 @@
     static_libs: [
         "libaconfig_java_proto_nano",
     ],
+    sdk_version: "core_platform",
+    apex_available: [
+        "//apex_available:platform",
+    ],
+}
+
+genrule {
+    name: "libaconfig_java_host_device_paths_src",
+    srcs: ["src/HostDeviceProtosTemplate.java"],
+    out: ["HostDeviceProtos.java"],
+    tool_files: [
+        "partition_aconfig_flags_paths.txt",
+        "mainline_aconfig_flags_paths.txt",
+    ],
+    cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out).tmp && " +
+    "sed -e '/MAINLINE_T/{r$(location mainline_aconfig_flags_paths.txt)' -e 'd}' $(out).tmp > $(out)",
+}
+
+java_library_host {
+    name: "aconfig_host_device_paths_java",
+    srcs: [":libaconfig_java_host_device_paths_src"],
 }
diff --git a/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt
new file mode 100644
index 0000000..af73a84
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt
@@ -0,0 +1,20 @@
+"/apex/com.android.adservices/etc/aconfig_flags.pb",
+"/apex/com.android.appsearch/etc/aconfig_flags.pb",
+"/apex/com.android.art/etc/aconfig_flags.pb",
+"/apex/com.android.btservices/etc/aconfig_flags.pb",
+"/apex/com.android.cellbroadcast/etc/aconfig_flags.pb",
+"/apex/com.android.configinfrastructure/etc/aconfig_flags.pb",
+"/apex/com.android.conscrypt/etc/aconfig_flags.pb",
+"/apex/com.android.devicelock/etc/aconfig_flags.pb",
+"/apex/com.android.healthfitness/etc/aconfig_flags.pb",
+"/apex/com.android.ipsec/etc/aconfig_flags.pb",
+"/apex/com.android.media/etc/aconfig_flags.pb",
+"/apex/com.android.mediaprovider/etc/aconfig_flags.pb",
+"/apex/com.android.ondevicepersonalization/etc/aconfig_flags.pb",
+"/apex/com.android.os.statsd/etc/aconfig_flags.pb",
+"/apex/com.android.permission/etc/aconfig_flags.pb",
+"/apex/com.android.profiling/etc/aconfig_flags.pb",
+"/apex/com.android.tethering/etc/aconfig_flags.pb",
+"/apex/com.android.uwb/etc/aconfig_flags.pb",
+"/apex/com.android.virt/etc/aconfig_flags.pb",
+"/apex/com.android.wifi/etc/aconfig_flags.pb",
diff --git a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
index 140cd21..e997e3d 100644
--- a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
+++ b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
@@ -1,4 +1,3 @@
 "/system/etc/aconfig_flags.pb",
-"/system_ext/etc/aconfig_flags.pb",
 "/product/etc/aconfig_flags.pb",
 "/vendor/etc/aconfig_flags.pb",
diff --git a/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
index 58c58de..4d41199 100644
--- a/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
+++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
@@ -29,7 +29,7 @@
  * @hide
  */
 public class DeviceProtos {
-    static final String[] PATHS = {
+	public static final String[] PATHS = {
         TEMPLATE
     };
 
@@ -50,10 +50,11 @@
         ArrayList<parsed_flag> result = new ArrayList();
 
         for (String path : parsedFlagsProtoPaths()) {
-            FileInputStream inputStream = new FileInputStream(path);
-            parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
-            for (parsed_flag flag : parsedFlags.parsedFlag) {
-                result.add(flag);
+            try (FileInputStream inputStream = new FileInputStream(path)) {
+                parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
+                for (parsed_flag flag : parsedFlags.parsedFlag) {
+                    result.add(flag);
+                }
             }
         }
 
@@ -64,7 +65,7 @@
      * Returns the list of all on-device aconfig protos paths.
      * @hide
      */
-    private static List<String> parsedFlagsProtoPaths() {
+    public static List<String> parsedFlagsProtoPaths() {
         ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
 
         File apexDirectory = new File(APEX_DIR);
diff --git a/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java
new file mode 100644
index 0000000..e7d0a76
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.aconfig;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * A host lib that can read all aconfig proto file paths on a given device.
+ * This lib is only available on device with root access (userdebug/eng).
+ */
+public class HostDeviceProtos {
+    /**
+     * An interface that executes ADB command and return the result.
+     */
+    public static interface AdbCommandExecutor {
+        /** Executes the ADB command. */
+        String executeAdbCommand(String command);
+    }
+
+    static final String[] PATHS = {
+        TEMPLATE
+    };
+
+    static final String[] MAINLINE_PATHS = {
+        MAINLINE_T
+    };
+
+    private static final String APEX_DIR = "/apex";
+    private static final String RECURSIVELY_LIST_APEX_DIR_COMMAND =
+        "shell su 0 find /apex | grep aconfig_flags";
+    private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb";
+
+
+    /**
+     * Returns the list of all on-device aconfig proto paths from host side.
+     */
+    public static List<String> parsedFlagsProtoPaths(AdbCommandExecutor adbCommandExecutor) {
+        ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
+
+        String adbCommandOutput = adbCommandExecutor.executeAdbCommand(
+            RECURSIVELY_LIST_APEX_DIR_COMMAND);
+
+        if (adbCommandOutput == null || adbCommandOutput.isEmpty()) {
+            paths.addAll(Arrays.asList(MAINLINE_PATHS));
+            return paths;
+        }
+
+        Set<String> allFiles = new HashSet<>(Arrays.asList(adbCommandOutput.split("\n")));
+
+        Set<String> subdirs = allFiles.stream().map(file -> {
+            String[] filePaths = file.split("/");
+            // The first element is "", the second element is "apex".
+            return filePaths.length > 2 ? filePaths[2] : "";
+        }).collect(Collectors.toSet());
+
+        for (String prefix : subdirs) {
+            // For each mainline modules, there are two directories, one <modulepackage>/,
+            // and one <modulepackage>@<versioncode>/. Just read the former.
+            if (prefix.contains("@")) {
+                continue;
+            }
+
+            String protoPath = APEX_DIR + "/" + prefix + APEX_ACONFIG_PATH_SUFFIX;
+            if (allFiles.contains(protoPath)) {
+                paths.add(protoPath);
+            }
+        }
+        return paths;
+    }
+}
diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs
index 9ab9cea..8871b4f 100644
--- a/tools/aconfig/aconfig_device_paths/src/lib.rs
+++ b/tools/aconfig/aconfig_device_paths/src/lib.rs
@@ -62,13 +62,12 @@
 
     #[test]
     fn test_read_partition_paths() {
-        assert_eq!(read_partition_paths().len(), 4);
+        assert_eq!(read_partition_paths().len(), 3);
 
         assert_eq!(
             read_partition_paths(),
             vec![
                 PathBuf::from("/system/etc/aconfig_flags.pb"),
-                PathBuf::from("/system_ext/etc/aconfig_flags.pb"),
                 PathBuf::from("/product/etc/aconfig_flags.pb"),
                 PathBuf::from("/vendor/etc/aconfig_flags.pb")
             ]
diff --git a/tools/aconfig/aconfig_flags/Android.bp b/tools/aconfig/aconfig_flags/Android.bp
new file mode 100644
index 0000000..4c1fd4e
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+rust_library {
+    name: "libaconfig_flags",
+    crate_name: "aconfig_flags",
+    srcs: [
+        "src/lib.rs",
+    ],
+    rustlibs: [
+        "libaconfig_flags_rust",
+    ],
+    host_supported: true,
+}
+
+aconfig_declarations {
+    name: "aconfig_flags",
+    package: "com.android.aconfig.flags",
+    container: "system",
+    srcs: ["flags.aconfig"],
+}
+
+rust_aconfig_library {
+    name: "libaconfig_flags_rust",
+    crate_name: "aconfig_flags_rust",
+    aconfig_declarations: "aconfig_flags",
+    host_supported: true,
+}
+
+cc_aconfig_library {
+    name: "libaconfig_flags_cc",
+    aconfig_declarations: "aconfig_flags",
+}
+
+java_aconfig_library {
+    name: "aconfig_flags_java",
+    aconfig_declarations: "aconfig_flags",
+}
diff --git a/tools/aconfig/aconfig_flags/Cargo.toml b/tools/aconfig/aconfig_flags/Cargo.toml
new file mode 100644
index 0000000..6eb9f14
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "aconfig_flags"
+version = "0.1.0"
+edition = "2021"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_flags/flags.aconfig b/tools/aconfig/aconfig_flags/flags.aconfig
new file mode 100644
index 0000000..0a004ca
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/flags.aconfig
@@ -0,0 +1,16 @@
+package: "com.android.aconfig.flags"
+container: "system"
+
+flag {
+  name: "enable_only_new_storage"
+  namespace: "core_experiments_team_internal"
+  bug: "312235596"
+  description: "When enabled, aconfig flags are read from the new aconfig storage only."
+}
+
+flag {
+  name: "enable_aconfigd_from_mainline"
+  namespace: "core_experiments_team_internal"
+  bug: "369808805"
+  description: "When enabled, launch aconfigd from config infra module."
+}
diff --git a/tools/aconfig/aconfig_flags/src/lib.rs b/tools/aconfig/aconfig_flags/src/lib.rs
new file mode 100644
index 0000000..2e89127
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/src/lib.rs
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! `aconfig_flags` is a crate for reading aconfig flags from Rust
+// When building with the Android tool-chain
+//
+//   - the flag functions will read from aconfig_flags_inner
+//   - the feature "cargo" will be disabled
+//
+// When building with cargo
+//
+//   - the flag functions will all return some trivial value, like true
+//   - the feature "cargo" will be enabled
+//
+// This module hides these differences from the rest of aconfig.
+
+/// Module used when building with the Android tool-chain
+#[cfg(not(feature = "cargo"))]
+pub mod auto_generated {
+    /// Returns the value for the enable_only_new_storage flag.
+    pub fn enable_only_new_storage() -> bool {
+        aconfig_flags_rust::enable_only_new_storage()
+    }
+
+    /// Returns the value for the enable_aconfigd_from_mainline flag.
+    pub fn enable_aconfigd_from_mainline() -> bool {
+        aconfig_flags_rust::enable_only_new_storage()
+    }
+}
+
+/// Module used when building with cargo
+#[cfg(feature = "cargo")]
+pub mod auto_generated {
+    /// Returns a placeholder value for the enable_only_new_storage flag.
+    pub fn enable_only_new_storage() -> bool {
+        // Used only to enable typechecking and testing with cargo
+        true
+    }
+
+    /// Returns a placeholder value for the enable_aconfigd_from_mainline flag.
+    pub fn enable_aconfigd_from_mainline() -> bool {
+        // Used only to enable typechecking and testing with cargo
+        true
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp
index 40b4464..e875c7b 100644
--- a/tools/aconfig/aconfig_storage_file/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/Android.bp
@@ -14,6 +14,7 @@
         "libclap",
         "libcxx",
         "libaconfig_storage_protos",
+        "libserde",
     ],
 }
 
@@ -36,7 +37,10 @@
     name: "aconfig-storage",
     defaults: ["aconfig_storage_file.defaults"],
     srcs: ["src/main.rs"],
-    rustlibs: ["libaconfig_storage_file"],
+    rustlibs: [
+        "libaconfig_storage_file",
+        "libserde_json",
+    ],
 }
 
 rust_test_host {
diff --git a/tools/aconfig/aconfig_storage_file/Cargo.toml b/tools/aconfig/aconfig_storage_file/Cargo.toml
index 192dfad..a405578 100644
--- a/tools/aconfig/aconfig_storage_file/Cargo.toml
+++ b/tools/aconfig/aconfig_storage_file/Cargo.toml
@@ -14,6 +14,8 @@
 thiserror = "1.0.56"
 clap = { version = "4.1.8", features = ["derive"] }
 cxx = "1.0"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
 
 [[bin]]
 name = "aconfig-storage"
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_info.rs b/tools/aconfig/aconfig_storage_file/src/flag_info.rs
index beac38d..f090396 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_info.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_info.rs
@@ -20,10 +20,11 @@
 use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
 use crate::{AconfigStorageError, StorageFileType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Flag info header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagInfoHeader {
     pub version: u32,
     pub container: String,
@@ -89,7 +90,7 @@
 }
 
 /// bit field for flag info
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
 pub enum FlagInfoBit {
     HasServerOverride = 1 << 0,
     IsReadWrite = 1 << 1,
@@ -97,7 +98,7 @@
 }
 
 /// Flag info node struct
-#[derive(PartialEq, Clone)]
+#[derive(PartialEq, Clone, Serialize, Deserialize)]
 pub struct FlagInfoNode {
     pub attributes: u8,
 }
@@ -138,7 +139,7 @@
 }
 
 /// Flag info list struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagInfoList {
     pub header: FlagInfoHeader,
     pub nodes: Vec<FlagInfoNode>,
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
index 660edac..0588fe5 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
@@ -23,10 +23,11 @@
 };
 use crate::{AconfigStorageError, StorageFileType, StoredFlagType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Flag table header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagTableHeader {
     pub version: u32,
     pub container: String,
@@ -95,7 +96,7 @@
 }
 
 /// Flag table node struct
-#[derive(PartialEq, Clone)]
+#[derive(PartialEq, Clone, Serialize, Deserialize)]
 pub struct FlagTableNode {
     pub package_id: u32,
     pub flag_name: String,
@@ -154,7 +155,7 @@
     }
 }
 
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagTable {
     pub header: FlagTableHeader,
     pub buckets: Vec<Option<u32>>,
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_value.rs b/tools/aconfig/aconfig_storage_file/src/flag_value.rs
index 506924b..b64c10e 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_value.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_value.rs
@@ -20,10 +20,11 @@
 use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
 use crate::{AconfigStorageError, StorageFileType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Flag value header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagValueHeader {
     pub version: u32,
     pub container: String,
@@ -89,7 +90,7 @@
 }
 
 /// Flag value list struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagValueList {
     pub header: FlagValueHeader,
     pub booleans: Vec<bool>,
diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs
index b6367ff..1d92ba4 100644
--- a/tools/aconfig/aconfig_storage_file/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_file/src/lib.rs
@@ -41,6 +41,7 @@
 pub mod test_utils;
 
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::cmp::Ordering;
 use std::fs::File;
 use std::hash::Hasher;
@@ -56,8 +57,13 @@
     BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType,
 };
 
-/// Storage file version
-pub const FILE_VERSION: u32 = 1;
+/// The max storage file version from which we can safely read/write. May be
+/// experimental.
+pub const MAX_SUPPORTED_FILE_VERSION: u32 = 2;
+
+/// The newest fully-released version. Unless otherwise specified, this is the
+/// version we will write.
+pub const DEFAULT_FILE_VERSION: u32 = 1;
 
 /// Good hash table prime number
 pub(crate) const HASH_PRIMES: [u32; 29] = [
@@ -107,7 +113,7 @@
 
 /// Flag type enum as stored by storage file
 /// ONLY APPEND, NEVER REMOVE FOR BACKWARD COMPATIBILITY. THE MAX IS U16.
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
 pub enum StoredFlagType {
     ReadWriteBoolean = 0,
     ReadOnlyBoolean = 1,
@@ -243,6 +249,11 @@
     Ok(val)
 }
 
+/// Read and parse the first 4 bytes of buf as u32.
+pub fn read_u32_from_start_of_bytes(buf: &[u8]) -> Result<u32, AconfigStorageError> {
+    read_u32_from_bytes(buf, &mut 0)
+}
+
 /// Read and parse bytes as u32
 pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32, AconfigStorageError> {
     let val =
@@ -253,6 +264,16 @@
     Ok(val)
 }
 
+// Read and parse bytes as u64
+pub fn read_u64_from_bytes(buf: &[u8], head: &mut usize) -> Result<u64, AconfigStorageError> {
+    let val =
+        u64::from_le_bytes(buf[*head..*head + 8].try_into().map_err(|errmsg| {
+            BytesParseFail(anyhow!("fail to parse u64 from bytes: {}", errmsg))
+        })?);
+    *head += 8;
+    Ok(val)
+}
+
 /// Read and parse bytes as string
 pub(crate) fn read_str_from_bytes(
     buf: &[u8],
diff --git a/tools/aconfig/aconfig_storage_file/src/main.rs b/tools/aconfig/aconfig_storage_file/src/main.rs
index 8b9e38d..a9cfd19 100644
--- a/tools/aconfig/aconfig_storage_file/src/main.rs
+++ b/tools/aconfig/aconfig_storage_file/src/main.rs
@@ -20,9 +20,29 @@
     list_flags, list_flags_with_info, read_file_to_bytes, AconfigStorageError, FlagInfoList,
     FlagTable, FlagValueList, PackageTable, StorageFileType,
 };
-
 use clap::{builder::ArgAction, Arg, Command};
+use serde::Serialize;
+use serde_json;
+use std::fmt;
+use std::fs;
+use std::fs::File;
+use std::io::Write;
 
+/**
+ * Usage Examples
+ *
+ * Print file:
+ * $ aconfig-storage print --file=path/to/flag.map --type=flag_map
+ *
+ * List flags:
+ * $ aconfig-storage list --flag-map=path/to/flag.map \
+ * --flag-val=path/to/flag.val --package-map=path/to/package.map
+ *
+ * Write binary file for testing:
+ * $ aconfig-storage print --file=path/to/flag.map --type=flag_map --format=json > flag_map.json
+ * $ vim flag_map.json // Manually make updates
+ * $ aconfig-storage write-bytes --input-file=flag_map.json --output-file=path/to/flag.map --type=flag_map
+ */
 fn cli() -> Command {
     Command::new("aconfig-storage")
         .subcommand_required(true)
@@ -34,7 +54,8 @@
                         .long("type")
                         .required(true)
                         .value_parser(|s: &str| StorageFileType::try_from(s)),
-                ),
+                )
+                .arg(Arg::new("format").long("format").required(false).action(ArgAction::Set)),
         )
         .subcommand(
             Command::new("list")
@@ -50,41 +71,75 @@
                     Arg::new("flag-info").long("flag-info").required(false).action(ArgAction::Set),
                 ),
         )
+        .subcommand(
+            Command::new("write-bytes")
+                // Where to write the output bytes. Suggest to use the StorageFileType names (e.g. flag.map).
+                .arg(
+                    Arg::new("output-file")
+                        .long("output-file")
+                        .required(true)
+                        .action(ArgAction::Set),
+                )
+                // Input file should be json.
+                .arg(
+                    Arg::new("input-file").long("input-file").required(true).action(ArgAction::Set),
+                )
+                .arg(
+                    Arg::new("type")
+                        .long("type")
+                        .required(true)
+                        .value_parser(|s: &str| StorageFileType::try_from(s)),
+                ),
+        )
 }
 
 fn print_storage_file(
     file_path: &str,
     file_type: &StorageFileType,
+    as_json: bool,
 ) -> Result<(), AconfigStorageError> {
     let bytes = read_file_to_bytes(file_path)?;
     match file_type {
         StorageFileType::PackageMap => {
             let package_table = PackageTable::from_bytes(&bytes)?;
-            println!("{:?}", package_table);
+            println!("{}", to_print_format(package_table, as_json));
         }
         StorageFileType::FlagMap => {
             let flag_table = FlagTable::from_bytes(&bytes)?;
-            println!("{:?}", flag_table);
+            println!("{}", to_print_format(flag_table, as_json));
         }
         StorageFileType::FlagVal => {
             let flag_value = FlagValueList::from_bytes(&bytes)?;
-            println!("{:?}", flag_value);
+            println!("{}", to_print_format(flag_value, as_json));
         }
         StorageFileType::FlagInfo => {
             let flag_info = FlagInfoList::from_bytes(&bytes)?;
-            println!("{:?}", flag_info);
+            println!("{}", to_print_format(flag_info, as_json));
         }
     }
     Ok(())
 }
 
+fn to_print_format<T>(file_contents: T, as_json: bool) -> String
+where
+    T: Serialize + fmt::Debug,
+{
+    if as_json {
+        serde_json::to_string(&file_contents).unwrap()
+    } else {
+        format!("{:?}", file_contents)
+    }
+}
+
 fn main() -> Result<(), AconfigStorageError> {
     let matches = cli().get_matches();
     match matches.subcommand() {
         Some(("print", sub_matches)) => {
             let file_path = sub_matches.get_one::<String>("file").unwrap();
             let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap();
-            print_storage_file(file_path, file_type)?
+            let format = sub_matches.get_one::<String>("format");
+            let as_json: bool = format == Some(&"json".to_string());
+            print_storage_file(file_path, file_type, as_json)?
         }
         Some(("list", sub_matches)) => {
             let package_map = sub_matches.get_one::<String>("package-map").unwrap();
@@ -96,10 +151,10 @@
                     let flags = list_flags_with_info(package_map, flag_map, flag_val, info_file)?;
                     for flag in flags.iter() {
                         println!(
-                            "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}",
-                            flag.package_name, flag.flag_name, flag.flag_value, flag.value_type,
-                            flag.is_readwrite, flag.has_server_override, flag.has_local_override,
-                        );
+                          "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}",
+                          flag.package_name, flag.flag_name, flag.flag_value, flag.value_type,
+                          flag.is_readwrite, flag.has_server_override, flag.has_local_override,
+                      );
                     }
                 }
                 None => {
@@ -113,6 +168,40 @@
                 }
             }
         }
+        // Converts JSON of the file into raw bytes (as is used on-device).
+        // Intended to generate/easily update these files for testing.
+        Some(("write-bytes", sub_matches)) => {
+            let input_file_path = sub_matches.get_one::<String>("input-file").unwrap();
+            let input_json = fs::read_to_string(input_file_path).unwrap();
+
+            let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap();
+            let output_bytes: Vec<u8>;
+            match file_type {
+                StorageFileType::FlagVal => {
+                    let list: FlagValueList = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = list.into_bytes();
+                }
+                StorageFileType::FlagInfo => {
+                    let list: FlagInfoList = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = list.into_bytes();
+                }
+                StorageFileType::FlagMap => {
+                    let table: FlagTable = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = table.into_bytes();
+                }
+                StorageFileType::PackageMap => {
+                    let table: PackageTable = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = table.into_bytes();
+                }
+            }
+
+            let output_file_path = sub_matches.get_one::<String>("output-file").unwrap();
+            let file = File::create(output_file_path);
+            if file.is_err() {
+                panic!("can't make file");
+            }
+            let _ = file.unwrap().write_all(&output_bytes);
+        }
         _ => unreachable!(),
     }
     Ok(())
diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs
index 007f86e..af39fbc 100644
--- a/tools/aconfig/aconfig_storage_file/src/package_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs
@@ -17,13 +17,17 @@
 //! package table module defines the package table file format and methods for serialization
 //! and deserialization
 
-use crate::{get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
+use crate::{
+    get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u64_from_bytes,
+    read_u8_from_bytes,
+};
 use crate::{AconfigStorageError, StorageFileType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Package table header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct PackageTableHeader {
     pub version: u32,
     pub container: String,
@@ -92,7 +96,7 @@
 }
 
 /// Package table node struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct PackageTableNode {
     pub package_name: String,
     pub package_id: u32,
@@ -116,7 +120,16 @@
 
 impl PackageTableNode {
     /// Serialize to bytes
-    pub fn into_bytes(&self) -> Vec<u8> {
+    pub fn into_bytes(&self, version: u32) -> Vec<u8> {
+        match version {
+            1 => Self::into_bytes_v1(self),
+            2 => Self::into_bytes_v2(self),
+            // TODO(b/316357686): into_bytes should return a Result.
+            _ => Self::into_bytes_v2(&self),
+        }
+    }
+
+    fn into_bytes_v1(&self) -> Vec<u8> {
         let mut result = Vec::new();
         let name_bytes = self.package_name.as_bytes();
         result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
@@ -127,18 +140,64 @@
         result
     }
 
-    /// Deserialize from bytes
-    pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
+    fn into_bytes_v2(&self) -> Vec<u8> {
+        let mut result = Vec::new();
+        let name_bytes = self.package_name.as_bytes();
+        result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
+        result.extend_from_slice(name_bytes);
+        result.extend_from_slice(&self.package_id.to_le_bytes());
+        // V2 storage files have a fingerprint. Current struct (v1) does not, so
+        // we write 0.
+        result.extend_from_slice(&0u64.to_le_bytes());
+        result.extend_from_slice(&self.boolean_start_index.to_le_bytes());
+        result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes());
+        result
+    }
+
+    /// Deserialize from bytes based on file version.
+    pub fn from_bytes(bytes: &[u8], version: u32) -> Result<Self, AconfigStorageError> {
+        match version {
+            1 => Self::from_bytes_v1(bytes),
+            2 => Self::from_bytes_v2(bytes),
+            _ => {
+                return Err(AconfigStorageError::BytesParseFail(anyhow!(
+                    "Binary file is an unsupported version: {}",
+                    version
+                )))
+            }
+        }
+    }
+
+    fn from_bytes_v1(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
         let mut head = 0;
-        let node = Self {
-            package_name: read_str_from_bytes(bytes, &mut head)?,
-            package_id: read_u32_from_bytes(bytes, &mut head)?,
-            boolean_start_index: read_u32_from_bytes(bytes, &mut head)?,
-            next_offset: match read_u32_from_bytes(bytes, &mut head)? {
-                0 => None,
-                val => Some(val),
-            },
+        let package_name = read_str_from_bytes(bytes, &mut head)?;
+        let package_id = read_u32_from_bytes(bytes, &mut head)?;
+        let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?;
+        let next_offset = match read_u32_from_bytes(bytes, &mut head)? {
+            0 => None,
+            val => Some(val),
         };
+
+        let node = Self { package_name, package_id, boolean_start_index, next_offset };
+        Ok(node)
+    }
+
+    fn from_bytes_v2(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
+        let mut head = 0;
+        let package_name = read_str_from_bytes(bytes, &mut head)?;
+        let package_id = read_u32_from_bytes(bytes, &mut head)?;
+
+        // Fingerprint is unused in the current struct (v1), but we need to read
+        // the bytes if the storage file type is v2 or else the subsequent
+        // fields will be inaccurate.
+        let _fingerprint = read_u64_from_bytes(bytes, &mut head)?;
+        let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?;
+        let next_offset = match read_u32_from_bytes(bytes, &mut head)? {
+            0 => None,
+            val => Some(val),
+        };
+
+        let node = Self { package_name, package_id, boolean_start_index, next_offset };
         Ok(node)
     }
 
@@ -151,7 +210,7 @@
 }
 
 /// Package table struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct PackageTable {
     pub header: PackageTableHeader,
     pub buckets: Vec<Option<u32>>,
@@ -179,7 +238,11 @@
         [
             self.header.into_bytes(),
             self.buckets.iter().map(|v| v.unwrap_or(0).to_le_bytes()).collect::<Vec<_>>().concat(),
-            self.nodes.iter().map(|v| v.into_bytes()).collect::<Vec<_>>().concat(),
+            self.nodes
+                .iter()
+                .map(|v| v.into_bytes(self.header.version))
+                .collect::<Vec<_>>()
+                .concat(),
         ]
         .concat()
     }
@@ -198,8 +261,8 @@
             .collect();
         let nodes = (0..num_packages)
             .map(|_| {
-                let node = PackageTableNode::from_bytes(&bytes[head..])?;
-                head += node.into_bytes().len();
+                let node = PackageTableNode::from_bytes(&bytes[head..], header.version)?;
+                head += node.into_bytes(header.version).len();
                 Ok(node)
             })
             .collect::<Result<Vec<_>, AconfigStorageError>>()
@@ -218,7 +281,8 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::test_utils::create_test_package_table;
+    use crate::read_u32_from_start_of_bytes;
+    use crate::{test_utils::create_test_package_table, DEFAULT_FILE_VERSION};
 
     #[test]
     // this test point locks down the table serialization
@@ -231,7 +295,9 @@
 
         let nodes: &Vec<PackageTableNode> = &package_table.nodes;
         for node in nodes.iter() {
-            let reinterpreted_node = PackageTableNode::from_bytes(&node.into_bytes()).unwrap();
+            let reinterpreted_node =
+                PackageTableNode::from_bytes(&node.into_bytes(header.version), header.version)
+                    .unwrap();
             assert_eq!(node, &reinterpreted_node);
         }
 
@@ -248,9 +314,36 @@
     fn test_version_number() {
         let package_table = create_test_package_table();
         let bytes = &package_table.into_bytes();
-        let mut head = 0;
-        let version = read_u32_from_bytes(bytes, &mut head).unwrap();
-        assert_eq!(version, 1);
+        let version = read_u32_from_start_of_bytes(bytes).unwrap();
+        assert_eq!(version, DEFAULT_FILE_VERSION);
+    }
+
+    #[test]
+    fn test_round_trip_v1() {
+        let table_v1: PackageTable = create_test_package_table();
+        let table_bytes_v1 = table_v1.into_bytes();
+
+        // Will automatically read from version 2 as the version code is encoded
+        // into the bytes.
+        let reinterpreted_table = PackageTable::from_bytes(&table_bytes_v1).unwrap();
+
+        assert_eq!(table_v1, reinterpreted_table);
+    }
+
+    #[test]
+    fn test_round_trip_v2() {
+        // Have to fake v2 because though we will set the version to v2
+        // and write the bytes as v2, we don't have the ability to actually set
+        // the fingerprint yet.
+        let mut fake_table_v2 = create_test_package_table();
+        fake_table_v2.header.version = 2;
+        let table_bytes_v2 = fake_table_v2.into_bytes();
+
+        // Will automatically read from version 2 as the version code is encoded
+        // into the bytes.
+        let reinterpreted_table = PackageTable::from_bytes(&table_bytes_v2).unwrap();
+
+        assert_eq!(fake_table_v2, reinterpreted_table);
     }
 
     #[test]
diff --git a/tools/aconfig/aconfig_storage_file/src/test_utils.rs b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
index 106666c..5c364f6 100644
--- a/tools/aconfig/aconfig_storage_file/src/test_utils.rs
+++ b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
@@ -18,7 +18,7 @@
 use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode};
 use crate::flag_value::{FlagValueHeader, FlagValueList};
 use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode};
-use crate::{AconfigStorageError, StorageFileType, StoredFlagType};
+use crate::{AconfigStorageError, StorageFileType, StoredFlagType, DEFAULT_FILE_VERSION};
 
 use anyhow::anyhow;
 use std::io::Write;
@@ -26,7 +26,7 @@
 
 pub fn create_test_package_table() -> PackageTable {
     let header = PackageTableHeader {
-        version: 1,
+        version: DEFAULT_FILE_VERSION,
         container: String::from("mockup"),
         file_type: StorageFileType::PackageMap as u8,
         file_size: 209,
@@ -78,7 +78,7 @@
 
 pub fn create_test_flag_table() -> FlagTable {
     let header = FlagTableHeader {
-        version: 1,
+        version: DEFAULT_FILE_VERSION,
         container: String::from("mockup"),
         file_type: StorageFileType::FlagMap as u8,
         file_size: 321,
@@ -120,7 +120,7 @@
 
 pub fn create_test_flag_value_list() -> FlagValueList {
     let header = FlagValueHeader {
-        version: 1,
+        version: DEFAULT_FILE_VERSION,
         container: String::from("mockup"),
         file_type: StorageFileType::FlagVal as u8,
         file_size: 35,
@@ -133,7 +133,7 @@
 
 pub fn create_test_flag_info_list() -> FlagInfoList {
     let header = FlagInfoHeader {
-        version: 1,
+        version: DEFAULT_FILE_VERSION,
         container: String::from("mockup"),
         file_type: StorageFileType::FlagInfo as u8,
         file_size: 35,
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
index 9838a7c..757844a 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
@@ -37,9 +37,16 @@
     public Node get(int packageId, String flagName) {
         int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
         int bucketIndex = TableUtils.getBucketIndex(makeKey(packageId, flagName), numBuckets);
+        int newPosition = mHeader.mBucketOffset + bucketIndex * 4;
+        if (newPosition >= mHeader.mNodeOffset) {
+            return null;
+        }
 
-        mReader.position(mHeader.mBucketOffset + bucketIndex * 4);
+        mReader.position(newPosition);
         int nodeIndex = mReader.readInt();
+        if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) {
+            return null;
+        }
 
         while (nodeIndex != -1) {
             mReader.position(nodeIndex);
@@ -50,7 +57,7 @@
             nodeIndex = node.mNextOffset;
         }
 
-        throw new AconfigStorageException("get cannot find flag: " + flagName);
+        return null;
     }
 
     public Header getHeader() {
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
index 773b882..39b7e59 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
@@ -35,23 +35,29 @@
     }
 
     public Node get(String packageName) {
-
         int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
         int bucketIndex = TableUtils.getBucketIndex(packageName.getBytes(UTF_8), numBuckets);
-
-        mReader.position(mHeader.mBucketOffset + bucketIndex * 4);
+        int newPosition = mHeader.mBucketOffset + bucketIndex * 4;
+        if (newPosition >= mHeader.mNodeOffset) {
+            return null;
+        }
+        mReader.position(newPosition);
         int nodeIndex = mReader.readInt();
 
+        if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) {
+            return null;
+        }
+
         while (nodeIndex != -1) {
             mReader.position(nodeIndex);
-            Node node = Node.fromBytes(mReader);
+            Node node = Node.fromBytes(mReader, mHeader.mVersion);
             if (Objects.equals(packageName, node.mPackageName)) {
                 return node;
             }
             nodeIndex = node.mNextOffset;
         }
 
-        throw new AconfigStorageException("get cannot find package: " + packageName);
+        return null;
     }
 
     public Header getHeader() {
@@ -68,7 +74,7 @@
         private int mBucketOffset;
         private int mNodeOffset;
 
-        public static Header fromBytes(ByteBufferReader reader) {
+        private static Header fromBytes(ByteBufferReader reader) {
             Header header = new Header();
             header.mVersion = reader.readInt();
             header.mContainer = reader.readString();
@@ -121,7 +127,29 @@
         private int mBooleanStartIndex;
         private int mNextOffset;
 
-        public static Node fromBytes(ByteBufferReader reader) {
+        private static Node fromBytes(ByteBufferReader reader, int version) {
+            switch (version) {
+                case 1:
+                    return fromBytesV1(reader);
+                case 2:
+                    return fromBytesV2(reader);
+                default:
+                    // Do we want to throw here?
+                    return new Node();
+            }
+        }
+
+        private static Node fromBytesV1(ByteBufferReader reader) {
+            Node node = new Node();
+            node.mPackageName = reader.readString();
+            node.mPackageId = reader.readInt();
+            node.mBooleanStartIndex = reader.readInt();
+            node.mNextOffset = reader.readInt();
+            node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset;
+            return node;
+        }
+
+        private static Node fromBytesV2(ByteBufferReader reader) {
             Node node = new Node();
             node.mPackageName = reader.readString();
             node.mPackageId = reader.readInt();
diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp
index 12e4aca..13d3214 100644
--- a/tools/aconfig/aconfig_storage_file/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp
@@ -10,10 +10,10 @@
         "libbase",
     ],
     data: [
-        "package.map",
-        "flag.map",
-        "flag.val",
-        "flag.info",
+        "data/v1/package.map",
+        "data/v1/flag.map",
+        "data/v1/flag.val",
+        "data/v1/flag.info",
     ],
     test_suites: [
         "device-tests",
@@ -35,10 +35,10 @@
     test_config: "AndroidStorageJaveTest.xml",
     sdk_version: "test_current",
     data: [
-        "package.map",
-        "flag.map",
-        "flag.val",
-        "flag.info",
+        "data/v1/package.map",
+        "data/v1/flag.map",
+        "data/v1/flag.val",
+        "data/v1/flag.info",
     ],
     test_suites: [
         "general-tests",
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.info b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.info
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/flag.info
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/flag.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/flag.map
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/flag.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.val b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.val
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/flag.val
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/flag.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/package.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/package.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/package.map
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/package.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
index ebd1dd8..3626f72 100644
--- a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
+++ b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
@@ -53,7 +53,8 @@
 }
 
 TEST(AconfigStorageFileTest, test_list_flag) {
-  auto const test_dir = GetExecutableDirectory();
+  auto const test_base_dir = GetExecutableDirectory();
+  auto const test_dir = test_base_dir + "/data/v1";
   auto const package_map = test_dir + "/package.map";
   auto const flag_map = test_dir + "/flag.map";
   auto const flag_val = test_dir + "/flag.val";
@@ -82,7 +83,8 @@
 }
 
 TEST(AconfigStorageFileTest, test_list_flag_with_info) {
-  auto const test_dir = GetExecutableDirectory();
+  auto const base_test_dir = GetExecutableDirectory();
+  auto const test_dir = base_test_dir + "/data/v1";
   auto const package_map = test_dir + "/package.map";
   auto const flag_map = test_dir + "/flag.map";
   auto const flag_val = test_dir + "/flag.val";
diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp
index f96b223..80b8ece 100644
--- a/tools/aconfig/aconfig_storage_read_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/Android.bp
@@ -36,10 +36,10 @@
         "librand",
     ],
     data: [
-        "tests/package.map",
-        "tests/flag.map",
-        "tests/flag.val",
-        "tests/flag.info",
+        "tests/data/v1/package.map",
+        "tests/data/v1/flag.map",
+        "tests/data/v1/flag.val",
+        "tests/data/v1/flag.info",
     ],
 }
 
@@ -107,31 +107,12 @@
     afdo: true,
 }
 
-soong_config_module_type {
-    name: "aconfig_lib_cc_shared_link_defaults",
-    module_type: "cc_defaults",
-    config_namespace: "Aconfig",
-    bool_variables: [
-        "read_from_new_storage",
-    ],
-    properties: [
-        "shared_libs",
-    ],
-}
-
-soong_config_bool_variable {
-    name: "read_from_new_storage",
-}
-
-aconfig_lib_cc_shared_link_defaults {
+cc_defaults {
     name: "aconfig_lib_cc_shared_link.defaults",
-    soong_config_variables: {
-        read_from_new_storage: {
-            shared_libs: [
-                "libaconfig_storage_read_api_cc",
-            ],
-        },
-    },
+    shared_libs: select(release_flag("RELEASE_READ_FROM_NEW_STORAGE"), {
+        true: ["libaconfig_storage_read_api_cc"],
+        default: [],
+    }),
 }
 
 cc_defaults {
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
index 6d03377..fe57a6d 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
@@ -16,8 +16,10 @@
 
 //! flag value query module defines the flag value file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
-use aconfig_storage_file::{flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType};
+use crate::AconfigStorageError;
+use aconfig_storage_file::{
+    flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType, MAX_SUPPORTED_FILE_VERSION,
+};
 use anyhow::anyhow;
 
 /// Get flag attribute bitfield
@@ -27,11 +29,11 @@
     flag_index: u32,
 ) -> Result<u8, AconfigStorageError> {
     let interpreted_header = FlagInfoHeader::from_bytes(buf)?;
-    if interpreted_header.version > crate::FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -108,15 +110,15 @@
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
         let mut info_list = create_test_flag_info_list();
-        info_list.header.version = crate::FILE_VERSION + 1;
+        info_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let flag_info = info_list.into_bytes();
         let error = find_flag_attribute(&flag_info[..], FlagValueType::Boolean, 4).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
index a1a4793..e9bc604 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
@@ -16,9 +16,10 @@
 
 //! flag table query module defines the flag table file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
+use crate::AconfigStorageError;
 use aconfig_storage_file::{
     flag_table::FlagTableHeader, flag_table::FlagTableNode, read_u32_from_bytes, StoredFlagType,
+    MAX_SUPPORTED_FILE_VERSION,
 };
 use anyhow::anyhow;
 
@@ -36,11 +37,11 @@
     flag: &str,
 ) -> Result<Option<FlagReadContext>, AconfigStorageError> {
     let interpreted_header = FlagTableHeader::from_bytes(buf)?;
-    if interpreted_header.version > crate::FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -111,15 +112,15 @@
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
         let mut table = create_test_flag_table();
-        table.header.version = crate::FILE_VERSION + 1;
+        table.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let flag_table = table.into_bytes();
         let error = find_flag_read_context(&flag_table[..], 0, "enabled_ro").unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
index 9d32a16..12c1e83 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
@@ -16,18 +16,20 @@
 
 //! flag value query module defines the flag value file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
-use aconfig_storage_file::{flag_value::FlagValueHeader, read_u8_from_bytes};
+use crate::AconfigStorageError;
+use aconfig_storage_file::{
+    flag_value::FlagValueHeader, read_u8_from_bytes, MAX_SUPPORTED_FILE_VERSION,
+};
 use anyhow::anyhow;
 
 /// Query flag value
 pub fn find_boolean_flag_value(buf: &[u8], flag_index: u32) -> Result<bool, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
-    if interpreted_header.version > crate::FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -74,15 +76,15 @@
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
         let mut value_list = create_test_flag_value_list();
-        value_list.header.version = crate::FILE_VERSION + 1;
+        value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let flag_value = value_list.into_bytes();
         let error = find_boolean_flag_value(&flag_value[..], 4).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
index d76cf3f..884f148 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
@@ -46,7 +46,7 @@
 pub use flag_table_query::FlagReadContext;
 pub use package_table_query::PackageReadContext;
 
-use aconfig_storage_file::{read_u32_from_bytes, FILE_VERSION};
+use aconfig_storage_file::read_u32_from_bytes;
 use flag_info_query::find_flag_attribute;
 use flag_table_query::find_flag_read_context;
 use flag_value_query::find_boolean_flag_value;
@@ -412,10 +412,10 @@
         let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
         let flag_val = storage_dir.clone() + "/boot/mockup.val";
         let flag_info = storage_dir.clone() + "/boot/mockup.info";
-        fs::copy("./tests/package.map", &package_map).unwrap();
-        fs::copy("./tests/flag.map", &flag_map).unwrap();
-        fs::copy("./tests/flag.val", &flag_val).unwrap();
-        fs::copy("./tests/flag.info", &flag_info).unwrap();
+        fs::copy("./tests/data/v1/package.map", &package_map).unwrap();
+        fs::copy("./tests/data/v1/flag.map", &flag_map).unwrap();
+        fs::copy("./tests/data/v1/flag.val", &flag_val).unwrap();
+        fs::copy("./tests/data/v1/flag.info", &flag_info).unwrap();
 
         return storage_dir;
     }
@@ -507,9 +507,9 @@
     #[test]
     // this test point locks down flag storage file version number query api
     fn test_storage_version_query() {
-        assert_eq!(get_storage_file_version("./tests/package.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./tests/flag.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./tests/flag.val").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./tests/flag.info").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/package.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/flag.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/flag.val").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/flag.info").unwrap(), 1);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
index 5a16645..32dbed8 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
@@ -97,10 +97,10 @@
         let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
         let flag_val = storage_dir.clone() + "/boot/mockup.val";
         let flag_info = storage_dir.clone() + "/boot/mockup.info";
-        fs::copy("./tests/package.map", &package_map).unwrap();
-        fs::copy("./tests/flag.map", &flag_map).unwrap();
-        fs::copy("./tests/flag.val", &flag_val).unwrap();
-        fs::copy("./tests/flag.info", &flag_info).unwrap();
+        fs::copy("./tests/data/v1/package.map", &package_map).unwrap();
+        fs::copy("./tests/data/v1/flag.map", &flag_map).unwrap();
+        fs::copy("./tests/data/v1/flag.val", &flag_val).unwrap();
+        fs::copy("./tests/data/v1/flag.info", &flag_info).unwrap();
 
         return storage_dir;
     }
@@ -108,9 +108,9 @@
     #[test]
     fn test_mapped_file_contents() {
         let storage_dir = create_test_storage_files();
-        map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/package.map");
-        map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/flag.map");
-        map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/flag.val");
-        map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/flag.info");
+        map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/data/v1/package.map");
+        map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/data/v1/flag.map");
+        map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/data/v1/flag.val");
+        map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/data/v1/flag.info");
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
index 2cb854b..acb60f6 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
@@ -16,9 +16,10 @@
 
 //! package table query module defines the package table file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
+use crate::AconfigStorageError;
 use aconfig_storage_file::{
     package_table::PackageTableHeader, package_table::PackageTableNode, read_u32_from_bytes,
+    MAX_SUPPORTED_FILE_VERSION,
 };
 use anyhow::anyhow;
 
@@ -35,11 +36,11 @@
     package: &str,
 ) -> Result<Option<PackageReadContext>, AconfigStorageError> {
     let interpreted_header = PackageTableHeader::from_bytes(buf)?;
-    if interpreted_header.version > FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -55,7 +56,8 @@
     }
 
     loop {
-        let interpreted_node = PackageTableNode::from_bytes(&buf[package_node_offset..])?;
+        let interpreted_node =
+            PackageTableNode::from_bytes(&buf[package_node_offset..], interpreted_header.version)?;
         if interpreted_node.package_name == package {
             return Ok(Some(PackageReadContext {
                 package_id: interpreted_node.package_id,
@@ -118,7 +120,7 @@
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
         let mut table = create_test_package_table();
-        table.header.version = crate::FILE_VERSION + 1;
+        table.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let package_table = table.into_bytes();
         let error =
             find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1")
@@ -127,8 +129,8 @@
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
index 29ebee5..6fbcdb3 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
@@ -53,9 +53,6 @@
     @UnsupportedAppUsage
     public boolean getBooleanFlagValue(int index) {
         index += mPackageBooleanStartOffset;
-        if (index >= mFlagValueList.size()) {
-            throw new AconfigStorageException("Fail to get boolean flag value");
-        }
         return mFlagValueList.getBoolean(index);
     }
 
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
index ed0c728..b8e510d 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
@@ -1,9 +1,10 @@
 filegroup {
     name: "read_api_test_storage_files",
-    srcs: ["package.map",
-        "flag.map",
-        "flag.val",
-        "flag.info"
+    srcs: [
+        "data/v1/package.map",
+        "data/v1/flag.map",
+        "data/v1/flag.val",
+        "data/v1/flag.info",
     ],
 }
 
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.info b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.info
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/flag.info
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/flag.map
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.val b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.val
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/flag.val
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/package.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/package.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/package.map
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/package.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
index 6d29045..7537643 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
@@ -45,7 +45,8 @@
   }
 
   void SetUp() override {
-    auto const test_dir = android::base::GetExecutableDirectory();
+    auto const test_base_dir = android::base::GetExecutableDirectory();
+    auto const test_dir = test_base_dir + "/data/v1";
     storage_dir = std::string(root_dir.path);
     auto maps_dir = storage_dir + "/maps";
     auto boot_dir = storage_dir + "/boot";
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
index afc44d4..0d943f8 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
+++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
@@ -26,10 +26,10 @@
         let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
         let flag_val = storage_dir.clone() + "/boot/mockup.val";
         let flag_info = storage_dir.clone() + "/boot/mockup.info";
-        fs::copy("./package.map", package_map).unwrap();
-        fs::copy("./flag.map", flag_map).unwrap();
-        fs::copy("./flag.val", flag_val).unwrap();
-        fs::copy("./flag.info", flag_info).unwrap();
+        fs::copy("./data/v1/package.map", package_map).unwrap();
+        fs::copy("./data/v1/flag.map", flag_map).unwrap();
+        fs::copy("./data/v1/flag.val", flag_val).unwrap();
+        fs::copy("./data/v1/flag.info", flag_info).unwrap();
 
         storage_dir
     }
@@ -200,9 +200,9 @@
 
     #[test]
     fn test_storage_version_query() {
-        assert_eq!(get_storage_file_version("./package.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./flag.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./flag.val").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./flag.info").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/package.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/flag.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/flag.val").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/flag.info").unwrap(), 1);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
index 7b43574..03a8fa2 100644
--- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
@@ -100,18 +100,4 @@
   return {};
 }
 
-android::base::Result<void> create_flag_info(
-    std::string const& package_map,
-    std::string const& flag_map,
-    std::string const& flag_info_out) {
-  auto creation_cxx = create_flag_info_cxx(
-      rust::Str(package_map.c_str()),
-      rust::Str(flag_map.c_str()),
-      rust::Str(flag_info_out.c_str()));
-  if (creation_cxx.success) {
-    return {};
-  } else {
-    return android::base::Error() << creation_cxx.error_message;
-  }
-}
 } // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
index 0bba7ff..50a5188 100644
--- a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
+++ b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
@@ -36,13 +36,4 @@
     uint32_t offset,
     bool value);
 
-/// Create flag info file based on package and flag map
-/// \input package_map: package map file
-/// \input flag_map: flag map file
-/// \input flag_info_out: flag info file to be created
-android::base::Result<void> create_flag_info(
-    std::string const& package_map,
-    std::string const& flag_map,
-    std::string const& flag_info_out);
-
 } // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
index 7e60713..5640922 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
@@ -18,7 +18,7 @@
 
 use aconfig_storage_file::{
     read_u8_from_bytes, AconfigStorageError, FlagInfoBit, FlagInfoHeader, FlagValueType,
-    FILE_VERSION,
+    MAX_SUPPORTED_FILE_VERSION,
 };
 use anyhow::anyhow;
 
@@ -28,11 +28,11 @@
     flag_index: u32,
 ) -> Result<usize, AconfigStorageError> {
     let interpreted_header = FlagInfoHeader::from_bytes(buf)?;
-    if interpreted_header.version > FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot write to storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
index dd15c99..06a9b15 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
@@ -16,7 +16,7 @@
 
 //! flag value update module defines the flag value file write to mapped bytes
 
-use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, FILE_VERSION};
+use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, MAX_SUPPORTED_FILE_VERSION};
 use anyhow::anyhow;
 
 /// Set flag value
@@ -26,11 +26,11 @@
     flag_value: bool,
 ) -> Result<usize, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
-    if interpreted_header.version > FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot write to storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -84,15 +84,15 @@
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
         let mut value_list = create_test_flag_value_list();
-        value_list.header.version = FILE_VERSION + 1;
+        value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let mut flag_value = value_list.into_bytes();
         let error = update_boolean_flag_value(&mut flag_value[..], 4, true).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot write to storage file with a higher version of {} with lib version {})",
-                FILE_VERSION + 1,
-                FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
index 0396a63..09bb41f 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
@@ -24,15 +24,10 @@
 #[cfg(test)]
 mod test_utils;
 
-use aconfig_storage_file::{
-    AconfigStorageError, FlagInfoHeader, FlagInfoList, FlagInfoNode, FlagTable, FlagValueType,
-    PackageTable, StorageFileType, StoredFlagType, FILE_VERSION,
-};
+use aconfig_storage_file::{AconfigStorageError, FlagValueType};
 
 use anyhow::anyhow;
 use memmap2::MmapMut;
-use std::fs::File;
-use std::io::{Read, Write};
 
 /// Get read write mapped storage files.
 ///
@@ -104,86 +99,6 @@
     })
 }
 
-/// Read in storage file as bytes
-fn read_file_to_bytes(file_path: &str) -> Result<Vec<u8>, AconfigStorageError> {
-    let mut file = File::open(file_path).map_err(|errmsg| {
-        AconfigStorageError::FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg))
-    })?;
-    let mut buffer = Vec::new();
-    file.read_to_end(&mut buffer).map_err(|errmsg| {
-        AconfigStorageError::FileReadFail(anyhow!(
-            "Failed to read bytes from file {}: {}",
-            file_path,
-            errmsg
-        ))
-    })?;
-    Ok(buffer)
-}
-
-/// Create flag info file given package map file and flag map file
-/// \input package_map: package map file
-/// \input flag_map: flag map file
-/// \output flag_info_out: created flag info file
-pub fn create_flag_info(
-    package_map: &str,
-    flag_map: &str,
-    flag_info_out: &str,
-) -> Result<(), AconfigStorageError> {
-    let package_table = PackageTable::from_bytes(&read_file_to_bytes(package_map)?)?;
-    let flag_table = FlagTable::from_bytes(&read_file_to_bytes(flag_map)?)?;
-
-    if package_table.header.container != flag_table.header.container {
-        return Err(AconfigStorageError::FileCreationFail(anyhow!(
-            "container for package map {} and flag map {} does not match",
-            package_table.header.container,
-            flag_table.header.container,
-        )));
-    }
-
-    let mut package_start_index = vec![0; package_table.header.num_packages as usize];
-    for node in package_table.nodes.iter() {
-        package_start_index[node.package_id as usize] = node.boolean_start_index;
-    }
-
-    let mut is_flag_rw = vec![false; flag_table.header.num_flags as usize];
-    for node in flag_table.nodes.iter() {
-        let flag_index = package_start_index[node.package_id as usize] + node.flag_index as u32;
-        is_flag_rw[flag_index as usize] = node.flag_type == StoredFlagType::ReadWriteBoolean;
-    }
-
-    let mut list = FlagInfoList {
-        header: FlagInfoHeader {
-            version: FILE_VERSION,
-            container: flag_table.header.container,
-            file_type: StorageFileType::FlagInfo as u8,
-            file_size: 0,
-            num_flags: flag_table.header.num_flags,
-            boolean_flag_offset: 0,
-        },
-        nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(),
-    };
-
-    list.header.boolean_flag_offset = list.header.into_bytes().len() as u32;
-    list.header.file_size = list.into_bytes().len() as u32;
-
-    let mut file = File::create(flag_info_out).map_err(|errmsg| {
-        AconfigStorageError::FileCreationFail(anyhow!(
-            "fail to create file {}: {}",
-            flag_info_out,
-            errmsg
-        ))
-    })?;
-    file.write_all(&list.into_bytes()).map_err(|errmsg| {
-        AconfigStorageError::FileCreationFail(anyhow!(
-            "fail to write to file {}: {}",
-            flag_info_out,
-            errmsg
-        ))
-    })?;
-
-    Ok(())
-}
-
 // *************************************** //
 // CC INTERLOP
 // *************************************** //
@@ -212,12 +127,6 @@
         pub error_message: String,
     }
 
-    // Flag info file creation return for cc interlop
-    pub struct FlagInfoCreationCXX {
-        pub success: bool,
-        pub error_message: String,
-    }
-
     // Rust export to c++
     extern "Rust" {
         pub fn update_boolean_flag_value_cxx(
@@ -239,12 +148,6 @@
             offset: u32,
             value: bool,
         ) -> FlagHasLocalOverrideUpdateCXX;
-
-        pub fn create_flag_info_cxx(
-            package_map: &str,
-            flag_map: &str,
-            flag_info_out: &str,
-        ) -> FlagInfoCreationCXX;
     }
 }
 
@@ -329,34 +232,15 @@
     }
 }
 
-/// Create flag info file cc interlop
-pub(crate) fn create_flag_info_cxx(
-    package_map: &str,
-    flag_map: &str,
-    flag_info_out: &str,
-) -> ffi::FlagInfoCreationCXX {
-    match create_flag_info(package_map, flag_map, flag_info_out) {
-        Ok(()) => ffi::FlagInfoCreationCXX { success: true, error_message: String::from("") },
-        Err(errmsg) => {
-            ffi::FlagInfoCreationCXX { success: false, error_message: format!("{:?}", errmsg) }
-        }
-    }
-}
-
 #[cfg(test)]
 mod tests {
     use super::*;
     use crate::test_utils::copy_to_temp_file;
-    use aconfig_storage_file::test_utils::{
-        create_test_flag_info_list, create_test_flag_table, create_test_package_table,
-        write_bytes_to_temp_file,
-    };
     use aconfig_storage_file::FlagInfoBit;
     use aconfig_storage_read_api::flag_info_query::find_flag_attribute;
     use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
     use std::fs::File;
     use std::io::Read;
-    use tempfile::NamedTempFile;
 
     fn get_boolean_flag_value_at_offset(file: &str, offset: u32) -> bool {
         let mut f = File::open(&file).unwrap();
@@ -439,31 +323,4 @@
             }
         }
     }
-
-    fn create_empty_temp_file() -> Result<NamedTempFile, AconfigStorageError> {
-        let file = NamedTempFile::new().map_err(|_| {
-            AconfigStorageError::FileCreationFail(anyhow!("Failed to create temp file"))
-        })?;
-        Ok(file)
-    }
-
-    #[test]
-    // this test point locks down the flag info creation
-    fn test_create_flag_info() {
-        let package_table =
-            write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap();
-        let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap();
-        let flag_info = create_empty_temp_file().unwrap();
-
-        let package_table_path = package_table.path().display().to_string();
-        let flag_table_path = flag_table.path().display().to_string();
-        let flag_info_path = flag_info.path().display().to_string();
-
-        assert!(create_flag_info(&package_table_path, &flag_table_path, &flag_info_path).is_ok());
-
-        let flag_info =
-            FlagInfoList::from_bytes(&read_file_to_bytes(&flag_info_path).unwrap()).unwrap();
-        let expected_flag_info = create_test_flag_info_list();
-        assert_eq!(flag_info, expected_flag_info);
-    }
 }
diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp
index c48585a..a7aceee 100644
--- a/tools/aconfig/aflags/Android.bp
+++ b/tools/aconfig/aflags/Android.bp
@@ -10,8 +10,9 @@
     srcs: ["src/main.rs"],
     rustlibs: [
         "libaconfig_device_paths",
+        "libaconfig_flags",
         "libaconfig_protos",
-        "libaconfigd_protos",
+        "libaconfigd_protos_rust",
         "libaconfig_storage_read_api",
         "libaconfig_storage_file",
         "libanyhow",
@@ -19,11 +20,16 @@
         "libnix",
         "libprotobuf",
         "libregex",
+        // TODO: b/371021174 remove this fake dependency once we find a proper strategy to
+        // deal with test aconfig libs are not present in storage because they are never used
+        // by the actual build
+        "libaconfig_test_rust_library",
     ],
 }
 
 rust_binary {
     name: "aflags",
+    host_supported: true,
     defaults: ["aflags.defaults"],
 }
 
diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml
index 7dc3436..7efce6d 100644
--- a/tools/aconfig/aflags/Cargo.toml
+++ b/tools/aconfig/aflags/Cargo.toml
@@ -15,3 +15,4 @@
 aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" }
 clap = {version = "4.5.2" }
 aconfig_device_paths = { version = "0.1.0", path = "../aconfig_device_paths" }
+aconfig_flags = { version = "0.1.0", path = "../aconfig_flags" }
\ No newline at end of file
diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs
index b2fd3c9..68edf7d 100644
--- a/tools/aconfig/aflags/src/aconfig_storage_source.rs
+++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs
@@ -1,3 +1,4 @@
+use crate::load_protos;
 use crate::{Flag, FlagSource};
 use crate::{FlagPermission, FlagValue, ValuePickedFrom};
 use aconfigd_protos::{
@@ -9,13 +10,18 @@
 use anyhow::Result;
 use protobuf::Message;
 use protobuf::SpecialFields;
+use std::collections::HashMap;
 use std::io::{Read, Write};
 use std::net::Shutdown;
 use std::os::unix::net::UnixStream;
 
 pub struct AconfigStorageSource {}
 
-fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> {
+fn load_flag_to_container() -> Result<HashMap<String, String>> {
+    Ok(load_protos::load()?.into_iter().map(|p| (p.qualified_name(), p.container)).collect())
+}
+
+fn convert(msg: ProtoFlagQueryReturnMessage, containers: &HashMap<String, String>) -> Result<Flag> {
     let (value, value_picked_from) = match (
         &msg.boot_flag_value,
         msg.default_flag_value,
@@ -55,15 +61,21 @@
         None => return Err(anyhow!("missing permission")),
     };
 
+    let name = msg.flag_name.ok_or(anyhow!("missing flag name"))?;
+    let package = msg.package_name.ok_or(anyhow!("missing package name"))?;
+    let qualified_name = format!("{package}.{name}");
     Ok(Flag {
-        name: msg.flag_name.ok_or(anyhow!("missing flag name"))?,
-        package: msg.package_name.ok_or(anyhow!("missing package name"))?,
+        name,
+        package,
         value,
         permission,
         value_picked_from,
         staged_value,
-        container: "-".to_string(),
-
+        container: containers
+            .get(&qualified_name)
+            .cloned()
+            .unwrap_or_else(|| "<no container>".to_string())
+            .to_string(),
         // TODO: remove once DeviceConfig is not in the CLI.
         namespace: "-".to_string(),
     })
@@ -114,9 +126,13 @@
 
 impl FlagSource for AconfigStorageSource {
     fn list_flags() -> Result<Vec<Flag>> {
+        let containers = load_flag_to_container()?;
         read_from_socket()
             .map(|query_messages| {
-                query_messages.iter().map(|message| convert(message.clone())).collect::<Vec<_>>()
+                query_messages
+                    .iter()
+                    .map(|message| convert(message.clone(), &containers))
+                    .collect::<Vec<_>>()
             })?
             .into_iter()
             .collect()
diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs
index d8912a9..07b7243 100644
--- a/tools/aconfig/aflags/src/main.rs
+++ b/tools/aconfig/aflags/src/main.rs
@@ -116,9 +116,10 @@
     }
 
     fn display_staged_value(&self) -> String {
-        match self.staged_value {
-            Some(v) => format!("(->{})", v),
-            None => "-".to_string(),
+        match (&self.permission, self.staged_value) {
+            (FlagPermission::ReadOnly, _) => "-".to_string(),
+            (FlagPermission::ReadWrite, None) => "-".to_string(),
+            (FlagPermission::ReadWrite, Some(v)) => format!("(->{})", v),
         }
     }
 }
@@ -164,10 +165,6 @@
 enum Command {
     /// List all aconfig flags on this device.
     List {
-        /// Read from the new flag storage.
-        #[clap(long)]
-        use_new_storage: bool,
-
         /// Optionally filter by container name.
         #[clap(short = 'c', long = "container")]
         container: Option<String>,
@@ -184,6 +181,9 @@
         /// <package>.<flag_name>
         qualified_name: String,
     },
+
+    /// Display which flag storage backs aconfig flags.
+    WhichBacking,
 }
 
 struct PaddingInfo {
@@ -282,21 +282,31 @@
     Ok(result)
 }
 
+fn display_which_backing() -> String {
+    if aconfig_flags::auto_generated::enable_only_new_storage() {
+        "aconfig_storage".to_string()
+    } else {
+        "device_config".to_string()
+    }
+}
+
 fn main() -> Result<()> {
     ensure!(nix::unistd::Uid::current().is_root(), "must be root");
 
     let cli = Cli::parse();
     let output = match cli.command {
-        Command::List { use_new_storage: true, container } => {
-            list(FlagSourceType::AconfigStorage, container)
-                .map_err(|err| anyhow!("storage may not be enabled: {err}"))
-                .map(Some)
-        }
-        Command::List { use_new_storage: false, container } => {
-            list(FlagSourceType::DeviceConfig, container).map(Some)
+        Command::List { container } => {
+            if aconfig_flags::auto_generated::enable_only_new_storage() {
+                list(FlagSourceType::AconfigStorage, container)
+                    .map_err(|err| anyhow!("storage may not be enabled: {err}"))
+                    .map(Some)
+            } else {
+                list(FlagSourceType::DeviceConfig, container).map(Some)
+            }
         }
         Command::Enable { qualified_name } => set_flag(&qualified_name, "true").map(|_| None),
         Command::Disable { qualified_name } => set_flag(&qualified_name, "false").map(|_| None),
+        Command::WhichBacking => Ok(Some(display_which_backing())),
     };
     match output {
         Ok(Some(text)) => println!("{text}"),
diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp
index 7704742..1f17e6b 100644
--- a/tools/aconfig/fake_device_config/Android.bp
+++ b/tools/aconfig/fake_device_config/Android.bp
@@ -15,9 +15,7 @@
 java_library {
     name: "fake_device_config",
     srcs: [
-        "src/android/util/Log.java",
-        "src/android/provider/DeviceConfig.java",
-        "src/android/os/StrictMode.java",
+        "src/**/*.java",
     ],
     sdk_version: "none",
     system_modules: "core-all-system-modules",
diff --git a/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java b/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java
new file mode 100644
index 0000000..2f01b8c
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.provider;
+
+/*
+ * This class allows generated aconfig code to compile independently of the framework.
+ */
+public class AconfigPackage {
+
+    /** Flag value is true */
+    public static final int FLAG_BOOLEAN_VALUE_TRUE = 1;
+
+    /** Flag value is false */
+    public static final int FLAG_BOOLEAN_VALUE_FALSE = 0;
+
+    /** Flag value doesn't exist */
+    public static final int FLAG_BOOLEAN_VALUE_NOT_EXIST = 2;
+
+    public static int getBooleanFlagValue(String packageName, String flagName) {
+        return 0;
+    }
+
+    public AconfigPackage(String packageName) {}
+
+    public int getBooleanFlagValue(String flagName) {
+        return 0;
+    }
+}
\ No newline at end of file
diff --git a/tools/aconfig/fake_device_config/src/android/util/Log.java b/tools/aconfig/fake_device_config/src/android/util/Log.java
index 3e7fd0f..79de680 100644
--- a/tools/aconfig/fake_device_config/src/android/util/Log.java
+++ b/tools/aconfig/fake_device_config/src/android/util/Log.java
@@ -5,6 +5,10 @@
         return 0;
     }
 
+    public static int w(String tag, String msg) {
+        return 0;
+    }
+
     public static int e(String tag, String msg) {
         return 0;
     }
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index 8ee599a..d54c412 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -34,6 +34,7 @@
 PLACEHOLDER_PACKAGE = '{PACKAGE}'
 PLACEHOLDER_RUNNER = '{RUNNER}'
 PLACEHOLDER_TEST_TYPE = '{TEST_TYPE}'
+PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS = '{EXTRA_TEST_RUNNER_CONFIGS}'
 
 
 def main(argv):
@@ -59,6 +60,7 @@
       "instrumentation_test_config_template",
       help="Path to the instrumentation test config template.")
   parser.add_argument("--extra-configs", default="")
+  parser.add_argument("--extra-test-runner-configs", default="")
   args = parser.parse_args(argv)
 
   target_config = args.target_config
@@ -66,6 +68,7 @@
   empty_config = args.empty_config
   instrumentation_test_config_template = args.instrumentation_test_config_template
   extra_configs = '\n'.join(args.extra_configs.split('\\n'))
+  extra_test_runner_configs = '\n'.join(args.extra_test_runner_configs.split('\\n'))
 
   module = os.path.splitext(os.path.basename(target_config))[0]
 
@@ -131,6 +134,7 @@
     config = config.replace(PLACEHOLDER_PACKAGE, package)
     config = config.replace(PLACEHOLDER_TEST_TYPE, test_type)
     config = config.replace(PLACEHOLDER_EXTRA_CONFIGS, extra_configs)
+    config = config.replace(PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS, extra_test_runner_configs)
     config = config.replace(PLACEHOLDER_RUNNER, runner)
     with open(target_config, 'w') as config_file:
       config_file.write(config)
diff --git a/tools/edit_monitor/Android.bp b/tools/edit_monitor/Android.bp
new file mode 100644
index 0000000..3497821
--- /dev/null
+++ b/tools/edit_monitor/Android.bp
@@ -0,0 +1,66 @@
+// Copyright 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Set of error prone rules to ensure code quality
+// PackageLocation check requires the androidCompatible=false otherwise it does not do anything.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_adte",
+}
+
+python_library_host {
+    name: "edit_event_proto",
+    srcs: [
+        "proto/edit_event.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
+
+python_library_host {
+    name: "edit_monitor_lib",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "daemon_manager.py",
+    ],
+}
+
+python_test_host {
+    name: "daemon_manager_test",
+    main: "daemon_manager_test.py",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "daemon_manager_test.py",
+    ],
+    libs: [
+        "edit_monitor_lib",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+}
+
+python_binary_host {
+    name: "edit_monitor",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "main.py",
+    ],
+    libs: [
+        "edit_monitor_lib",
+    ],
+    main: "main.py",
+}
diff --git a/tools/edit_monitor/OWNERS b/tools/edit_monitor/OWNERS
new file mode 100644
index 0000000..8f0f364
--- /dev/null
+++ b/tools/edit_monitor/OWNERS
@@ -0,0 +1 @@
+include platform/tools/asuite:/OWNERS_ADTE_TEAM
\ No newline at end of file
diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py
new file mode 100644
index 0000000..445d849
--- /dev/null
+++ b/tools/edit_monitor/daemon_manager.py
@@ -0,0 +1,349 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import hashlib
+import logging
+import multiprocessing
+import os
+import pathlib
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+
+
+DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 1
+DEFAULT_MONITOR_INTERVAL_SECONDS = 5
+DEFAULT_MEMORY_USAGE_THRESHOLD = 2000
+DEFAULT_CPU_USAGE_THRESHOLD = 200
+DEFAULT_REBOOT_TIMEOUT_SECONDS = 60 * 60 * 24
+BLOCK_SIGN_FILE = "edit_monitor_block_sign"
+
+
+def default_daemon_target():
+  """Place holder for the default daemon target."""
+  print("default daemon target")
+
+
+class DaemonManager:
+  """Class to manage and monitor the daemon run as a subprocess."""
+
+  def __init__(
+      self,
+      binary_path: str,
+      daemon_target: callable = default_daemon_target,
+      daemon_args: tuple = (),
+  ):
+    self.binary_path = binary_path
+    self.daemon_target = daemon_target
+    self.daemon_args = daemon_args
+
+    self.pid = os.getpid()
+    self.daemon_process = None
+
+    self.max_memory_usage = 0
+    self.max_cpu_usage = 0
+
+    pid_file_dir = pathlib.Path(tempfile.gettempdir()).joinpath("edit_monitor")
+    pid_file_dir.mkdir(parents=True, exist_ok=True)
+    self.pid_file_path = self._get_pid_file_path(pid_file_dir)
+    self.block_sign = pathlib.Path(tempfile.gettempdir()).joinpath(
+        BLOCK_SIGN_FILE
+    )
+
+  def start(self):
+    """Writes the pidfile and starts the daemon proces."""
+    if self.block_sign.exists():
+      logging.warning("Block sign found, exiting...")
+      return
+
+    self._stop_any_existing_instance()
+    self._write_pid_to_pidfile()
+    self._start_daemon_process()
+
+  def monitor_daemon(
+      self,
+      interval: int = DEFAULT_MONITOR_INTERVAL_SECONDS,
+      memory_threshold: float = DEFAULT_MEMORY_USAGE_THRESHOLD,
+      cpu_threshold: float = DEFAULT_CPU_USAGE_THRESHOLD,
+      reboot_timeout: int = DEFAULT_REBOOT_TIMEOUT_SECONDS,
+  ):
+    """Monits the daemon process status.
+
+    Periodically check the CPU/Memory usage of the daemon process as long as the
+    process is still running and kill the process if the resource usage is above
+    given thresholds.
+    """
+    if not self.daemon_process:
+      return
+
+    logging.info("start monitoring daemon process %d.", self.daemon_process.pid)
+    reboot_time = time.time() + reboot_timeout
+    while self.daemon_process.is_alive():
+      if time.time() > reboot_time:
+        self.reboot()
+      try:
+        memory_usage = self._get_process_memory_percent(self.daemon_process.pid)
+        self.max_memory_usage = max(self.max_memory_usage, memory_usage)
+
+        cpu_usage = self._get_process_cpu_percent(self.daemon_process.pid)
+        self.max_cpu_usage = max(self.max_cpu_usage, cpu_usage)
+
+        time.sleep(interval)
+      except Exception as e:
+        # Logging the error and continue.
+        logging.warning("Failed to monitor daemon process with error: %s", e)
+
+      if (
+          self.max_memory_usage >= memory_threshold
+          or self.max_cpu_usage >= cpu_threshold
+      ):
+        logging.error(
+            "Daemon process is consuming too much resource, killing..."
+        ),
+        self._terminate_process(self.daemon_process.pid)
+
+    logging.info(
+        "Daemon process %d terminated. Max memory usage: %f, Max cpu"
+        " usage: %f.",
+        self.daemon_process.pid,
+        self.max_memory_usage,
+        self.max_cpu_usage,
+    )
+
+  def stop(self):
+    """Stops the daemon process and removes the pidfile."""
+
+    logging.debug("in daemon manager cleanup.")
+    try:
+      if self.daemon_process and self.daemon_process.is_alive():
+        self._terminate_process(self.daemon_process.pid)
+      self._remove_pidfile()
+      logging.debug("Successfully stopped daemon manager.")
+    except Exception as e:
+      logging.exception("Failed to stop daemon manager with error %s", e)
+
+  def reboot(self):
+    """Reboots the current process.
+
+    Stops the current daemon manager and reboots the entire process based on
+    the binary file. Exits directly If the binary file no longer exists.
+    """
+    logging.debug("Rebooting process based on binary %s.", self.binary_path)
+
+    # Stop the current daemon manager first.
+    self.stop()
+
+    # If the binary no longer exists, exit directly.
+    if not os.path.exists(self.binary_path):
+      logging.info("binary %s no longer exists, exiting.", self.binary_path)
+      sys.exit(0)
+
+    try:
+      os.execv(self.binary_path, sys.argv)
+    except OSError as e:
+      logging.exception("Failed to reboot process with error: %s.", e)
+      sys.exit(1)  # Indicate an error occurred
+
+  def cleanup(self):
+    """Wipes out all edit monitor instances in the system.
+
+    Stops all the existing edit monitor instances and place a block sign
+    to prevent any edit monitor process to start. This method is only used
+    in emergency case when there's something goes wrong with the edit monitor
+    that requires immediate cleanup to prevent damanger to the system.
+    """
+    logging.debug("Start cleaning up all existing instances.")
+
+    try:
+      # First places a block sign to prevent any edit monitor process to start.
+      self.block_sign.touch()
+    except (FileNotFoundError, PermissionError, OSError):
+      logging.exception("Failed to place the block sign")
+
+    # Finds and kills all the existing instances of edit monitor.
+    existing_instances_pids = self._find_all_instances_pids()
+    for pid in existing_instances_pids:
+      logging.info(
+          "Found existing edit monitor instance with pid %d, killing...", pid
+      )
+      try:
+        self._terminate_process(pid)
+      except Exception:
+        logging.exception("Failed to terminate process %d", pid)
+
+  def _stop_any_existing_instance(self):
+    if not self.pid_file_path.exists():
+      logging.debug("No existing instances.")
+      return
+
+    ex_pid = self._read_pid_from_pidfile()
+
+    if ex_pid:
+      logging.info("Found another instance with pid %d.", ex_pid)
+      self._terminate_process(ex_pid)
+      self._remove_pidfile()
+
+  def _read_pid_from_pidfile(self):
+    with open(self.pid_file_path, "r") as f:
+      return int(f.read().strip())
+
+  def _write_pid_to_pidfile(self):
+    """Creates a pidfile and writes the current pid to the file.
+
+    Raise FileExistsError if the pidfile already exists.
+    """
+    try:
+      # Use the 'x' mode to open the file for exclusive creation
+      with open(self.pid_file_path, "x") as f:
+        f.write(f"{self.pid}")
+    except FileExistsError as e:
+      # This could be caused due to race condition that a user is trying
+      # to start two edit monitors at the same time. Or because there is
+      # already an existing edit monitor running and we can not kill it
+      # for some reason.
+      logging.exception("pidfile %s already exists.", self.pid_file_path)
+      raise e
+
+  def _start_daemon_process(self):
+    """Starts a subprocess to run the daemon."""
+    p = multiprocessing.Process(
+        target=self.daemon_target, args=self.daemon_args
+    )
+    p.start()
+
+    logging.info("Start subprocess with PID %d", p.pid)
+    self.daemon_process = p
+
+  def _terminate_process(
+      self, pid: int, timeout: int = DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS
+  ):
+    """Terminates a process with given pid.
+
+    It first sends a SIGTERM to the process to allow it for proper
+    termination with a timeout. If the process is not terminated within
+    the timeout, kills it forcefully.
+    """
+    try:
+      os.kill(pid, signal.SIGTERM)
+      if not self._wait_for_process_terminate(pid, timeout):
+        logging.warning(
+            "Process %d not terminated within timeout, try force kill", pid
+        )
+        os.kill(pid, signal.SIGKILL)
+    except ProcessLookupError:
+      logging.info("Process with PID %d not found (already terminated)", pid)
+
+  def _wait_for_process_terminate(self, pid: int, timeout: int) -> bool:
+    start_time = time.time()
+
+    while time.time() < start_time + timeout:
+      if not self._is_process_alive(pid):
+        return True
+      time.sleep(1)
+
+    logging.error("Process %d not terminated within %d seconds.", pid, timeout)
+    return False
+
+  def _is_process_alive(self, pid: int) -> bool:
+    try:
+      output = subprocess.check_output(
+          ["ps", "-p", str(pid), "-o", "state="], text=True
+      ).strip()
+      state = output.split()[0]
+      return state != "Z"  # Check if the state is not 'Z' (zombie)
+    except subprocess.CalledProcessError:
+      # Process not found (already dead).
+      return False
+    except (FileNotFoundError, OSError, ValueError) as e:
+      logging.warning(
+          "Unable to check the status for process %d with error: %s.", pid, e
+      )
+      return True
+
+  def _remove_pidfile(self):
+    try:
+      os.remove(self.pid_file_path)
+    except FileNotFoundError:
+      logging.info("pid file %s already removed.", self.pid_file_path)
+
+  def _get_pid_file_path(self, pid_file_dir: pathlib.Path) -> pathlib.Path:
+    """Generates the path to store the pidfile.
+
+    The file path should have the format of "/tmp/edit_monitor/xxxx.lock"
+    where xxxx is a hashed value based on the binary path that starts the
+    process.
+    """
+    hash_object = hashlib.sha256()
+    hash_object.update(self.binary_path.encode("utf-8"))
+    pid_file_path = pid_file_dir.joinpath(hash_object.hexdigest() + ".lock")
+    logging.info("pid_file_path: %s", pid_file_path)
+
+    return pid_file_path
+
+  def _get_process_memory_percent(self, pid: int) -> float:
+    try:
+      with open(f"/proc/{pid}/stat", "r") as f:
+        stat_data = f.readline().split()
+        # RSS is the 24th field in /proc/[pid]/stat
+        rss_pages = int(stat_data[23])
+        return rss_pages * 4 / 1024  # Covert to MB
+    except (FileNotFoundError, IndexError, ValueError, IOError) as e:
+      logging.exception("Failed to get memory usage.")
+      raise e
+
+  def _get_process_cpu_percent(self, pid: int, interval: int = 1) -> float:
+    try:
+      total_start_time = self._get_total_cpu_time(pid)
+      with open("/proc/uptime", "r") as f:
+        uptime_start = float(f.readline().split()[0])
+
+      time.sleep(interval)
+
+      total_end_time = self._get_total_cpu_time(pid)
+      with open("/proc/uptime", "r") as f:
+        uptime_end = float(f.readline().split()[0])
+
+      return (
+          (total_end_time - total_start_time)
+          / (uptime_end - uptime_start)
+          * 100
+      )
+    except (FileNotFoundError, IndexError, ValueError, IOError) as e:
+      logging.exception("Failed to get CPU usage.")
+      raise e
+
+  def _get_total_cpu_time(self, pid: int) -> float:
+    with open(f"/proc/{str(pid)}/stat", "r") as f:
+      stats = f.readline().split()
+      # utime is the 14th field in /proc/[pid]/stat measured in clock ticks.
+      utime = int(stats[13])
+      # stime is the 15th field in /proc/[pid]/stat measured in clock ticks.
+      stime = int(stats[14])
+      return (utime + stime) / os.sysconf(os.sysconf_names["SC_CLK_TCK"])
+
+  def _find_all_instances_pids(self) -> list[int]:
+    pids = []
+
+    for file in os.listdir(self.pid_file_path.parent):
+      if file.endswith(".lock"):
+        try:
+          with open(self.pid_file_path.parent.joinpath(file), "r") as f:
+            pids.append(int(f.read().strip()))
+        except (FileNotFoundError, IOError, ValueError, TypeError):
+          logging.exception("Failed to get pid from file path: %s", file)
+
+    return pids
\ No newline at end of file
diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py
new file mode 100644
index 0000000..d62eade
--- /dev/null
+++ b/tools/edit_monitor/daemon_manager_test.py
@@ -0,0 +1,372 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for DaemonManager."""
+
+import logging
+import multiprocessing
+import os
+import pathlib
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+from unittest import mock
+from edit_monitor import daemon_manager
+
+
+TEST_BINARY_FILE = '/path/to/test_binary'
+TEST_PID_FILE_PATH = (
+    '587239c2d1050afdf54512e2d799f3b929f86b43575eb3c7b4bab105dd9bd25e.lock'
+)
+
+
+def simple_daemon(output_file):
+  with open(output_file, 'w') as f:
+    f.write('running daemon target')
+
+
+def long_running_daemon():
+  while True:
+    time.sleep(1)
+
+
+def memory_consume_daemon_target(size_mb):
+  try:
+    size_bytes = size_mb * 1024 * 1024
+    dummy_data = bytearray(size_bytes)
+    time.sleep(10)
+  except MemoryError:
+    print(f'Process failed to allocate {size_mb} MB of memory.')
+
+
+def cpu_consume_daemon_target(target_usage_percent):
+  while True:
+    start_time = time.time()
+    while time.time() - start_time < target_usage_percent / 100:
+      pass  # Busy loop to consume CPU
+
+    # Sleep to reduce CPU usage
+    time.sleep(1 - target_usage_percent / 100)
+
+
+class DaemonManagerTest(unittest.TestCase):
+
+  @classmethod
+  def setUpClass(cls):
+    super().setUpClass()
+    # Configure to print logging to stdout.
+    logging.basicConfig(filename=None, level=logging.DEBUG)
+    console = logging.StreamHandler(sys.stdout)
+    logging.getLogger('').addHandler(console)
+
+  def setUp(self):
+    super().setUp()
+    self.original_tempdir = tempfile.tempdir
+    self.working_dir = tempfile.TemporaryDirectory()
+    # Sets the tempdir under the working dir so any temp files created during
+    # tests will be cleaned.
+    tempfile.tempdir = self.working_dir.name
+
+  def tearDown(self):
+    # Cleans up any child processes left by the tests.
+    self._cleanup_child_processes()
+    self.working_dir.cleanup()
+    # Restores tempdir.
+    tempfile.tempdir = self.original_tempdir
+    super().tearDown()
+
+  def test_start_success_with_no_existing_instance(self):
+    self.assert_run_simple_daemon_success()
+
+  def test_start_success_with_existing_instance_running(self):
+    # Create a running daemon subprocess
+    p = self._create_fake_deamon_process()
+
+    self.assert_run_simple_daemon_success()
+
+  def test_start_success_with_existing_instance_already_dead(self):
+    # Create a pidfile with pid that does not exist.
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f:
+      f.write('123456')
+
+    self.assert_run_simple_daemon_success()
+
+  def test_start_success_with_existing_instance_from_different_binary(self):
+    # First start an instance based on "some_binary_path"
+    existing_dm = daemon_manager.DaemonManager(
+        'some_binary_path',
+        daemon_target=long_running_daemon,
+    )
+    existing_dm.start()
+
+    self.assert_run_simple_daemon_success()
+    existing_dm.stop()
+
+  def test_start_return_directly_if_block_sign_exists(self):
+    # Creates the block sign.
+    pathlib.Path(self.working_dir.name).joinpath(
+        daemon_manager.BLOCK_SIGN_FILE
+    ).touch()
+
+    dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+    dm.start()
+    # Verify no daemon process is started.
+    self.assertIsNone(dm.daemon_process)
+
+  @mock.patch('os.kill')
+  def test_start_failed_to_kill_existing_instance(self, mock_kill):
+    mock_kill.side_effect = OSError('Unknown OSError')
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f:
+      f.write('123456')
+
+    with self.assertRaises(OSError) as error:
+      dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+      dm.start()
+
+  def test_start_failed_to_write_pidfile(self):
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    # Makes the directory read-only so write pidfile will fail.
+    os.chmod(pid_file_path_dir, 0o555)
+
+    with self.assertRaises(PermissionError) as error:
+      dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+      dm.start()
+
+  def test_start_failed_to_start_daemon_process(self):
+    with self.assertRaises(TypeError) as error:
+      dm = daemon_manager.DaemonManager(
+          TEST_BINARY_FILE, daemon_target='wrong_target', daemon_args=(1)
+      )
+      dm.start()
+
+  def test_monitor_daemon_subprocess_killed_high_memory_usage(self):
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=memory_consume_daemon_target,
+        daemon_args=(2,),
+    )
+    dm.start()
+    dm.monitor_daemon(interval=1, memory_threshold=2)
+
+    self.assertTrue(dm.max_memory_usage >= 2)
+    self.assert_no_subprocess_running()
+
+  def test_monitor_daemon_subprocess_killed_high_cpu_usage(self):
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=cpu_consume_daemon_target,
+        daemon_args=(20,),
+    )
+    dm.start()
+    dm.monitor_daemon(interval=1, cpu_threshold=20)
+
+    self.assertTrue(dm.max_cpu_usage >= 20)
+    self.assert_no_subprocess_running()
+
+  @mock.patch('subprocess.check_output')
+  def test_monitor_daemon_failed_does_not_matter(self, mock_output):
+    mock_output.side_effect = OSError('Unknown OSError')
+    self.assert_run_simple_daemon_success()
+
+  @mock.patch('os.execv')
+  def test_monitor_daemon_reboot_triggered(self, mock_execv):
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.monitor_daemon(reboot_timeout=0.5)
+    mock_execv.assert_called_once()
+
+  def test_stop_success(self):
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.stop()
+
+    self.assert_no_subprocess_running()
+    self.assertFalse(dm.pid_file_path.exists())
+
+  @mock.patch('os.kill')
+  def test_stop_failed_to_kill_daemon_process(self, mock_kill):
+    mock_kill.side_effect = OSError('Unknown OSError')
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.stop()
+
+    self.assertTrue(dm.daemon_process.is_alive())
+    self.assertTrue(dm.pid_file_path.exists())
+
+  @mock.patch('os.remove')
+  def test_stop_failed_to_remove_pidfile(self, mock_remove):
+    mock_remove.side_effect = OSError('Unknown OSError')
+
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.stop()
+
+    self.assert_no_subprocess_running()
+    self.assertTrue(dm.pid_file_path.exists())
+
+  @mock.patch('os.execv')
+  def test_reboot_success(self, mock_execv):
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.reboot()
+
+    # Verifies the old process is stopped
+    self.assert_no_subprocess_running()
+    self.assertFalse(dm.pid_file_path.exists())
+
+    mock_execv.assert_called_once()
+
+  @mock.patch('os.execv')
+  def test_reboot_binary_no_longer_exists(self, mock_execv):
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE, daemon_target=long_running_daemon
+    )
+    dm.start()
+
+    with self.assertRaises(SystemExit) as cm:
+      dm.reboot()
+      mock_execv.assert_not_called()
+      self.assertEqual(cm.exception.code, 0)
+
+  @mock.patch('os.execv')
+  def test_reboot_failed(self, mock_execv):
+    mock_execv.side_effect = OSError('Unknown OSError')
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name, daemon_target=long_running_daemon
+    )
+    dm.start()
+
+    with self.assertRaises(SystemExit) as cm:
+      dm.reboot()
+      self.assertEqual(cm.exception.code, 1)
+
+  def assert_run_simple_daemon_success(self):
+    damone_output_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=simple_daemon,
+        daemon_args=(damone_output_file.name,),
+    )
+    dm.start()
+    dm.monitor_daemon(interval=1)
+
+    # Verifies the expected pid file is created.
+    expected_pid_file_path = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor', TEST_PID_FILE_PATH
+    )
+    self.assertTrue(expected_pid_file_path.exists())
+
+    # Verify the daemon process is executed successfully.
+    with open(damone_output_file.name, 'r') as f:
+      contents = f.read()
+      self.assertEqual(contents, 'running daemon target')
+
+  def assert_no_subprocess_running(self):
+    child_pids = self._get_child_processes(os.getpid())
+    for child_pid in child_pids:
+      self.assertFalse(
+          self._is_process_alive(child_pid), f'process {child_pid} still alive'
+      )
+
+  def _get_child_processes(self, parent_pid: int) -> list[int]:
+    try:
+      output = subprocess.check_output(
+          ['ps', '-o', 'pid,ppid', '--no-headers'], text=True
+      )
+
+      child_processes = []
+      for line in output.splitlines():
+        pid, ppid = line.split()
+        if int(ppid) == parent_pid:
+          child_processes.append(int(pid))
+      return child_processes
+    except subprocess.CalledProcessError as e:
+      self.fail(f'failed to get child process, error: {e}')
+
+  def _is_process_alive(self, pid: int) -> bool:
+    try:
+      output = subprocess.check_output(
+          ['ps', '-p', str(pid), '-o', 'state='], text=True
+      ).strip()
+      state = output.split()[0]
+      return state != 'Z'  # Check if the state is not 'Z' (zombie)
+    except subprocess.CalledProcessError:
+      return False
+
+  def _cleanup_child_processes(self):
+    child_pids = self._get_child_processes(os.getpid())
+    for child_pid in child_pids:
+      try:
+        os.kill(child_pid, signal.SIGKILL)
+      except ProcessLookupError:
+        # process already terminated
+        pass
+
+  def _create_fake_deamon_process(
+      self, name: str = ''
+  ) -> multiprocessing.Process:
+    # Create a long running subprocess
+    p = multiprocessing.Process(target=long_running_daemon)
+    p.start()
+
+    # Create the pidfile with the subprocess pid
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    with open(pid_file_path_dir.joinpath(name + 'pid.lock'), 'w') as f:
+      f.write(str(p.pid))
+    return p
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/edit_monitor/main.py
diff --git a/tools/edit_monitor/proto/edit_event.proto b/tools/edit_monitor/proto/edit_event.proto
new file mode 100644
index 0000000..b3630bc
--- /dev/null
+++ b/tools/edit_monitor/proto/edit_event.proto
@@ -0,0 +1,58 @@
+syntax = "proto3";
+
+package tools.asuite.edit_monitor;
+
+message EditEvent {
+  enum EditType {
+    UNSUPPORTED_TYPE = 0;
+    CREATE = 1;
+    MODIFY = 2;
+    DELETE = 3;
+    MOVE = 4;
+  }
+
+  enum ErrorType {
+    UNKNOWN_ERROR = 0;
+    FAILED_TO_START_EDIT_MONITOR = 1;
+    FAILED_TO_STOP_EDIT_MONITOR = 2;
+    FAILED_TO_REBOOT_EDIT_MONITOR = 3;
+    KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE = 4;
+    FORCE_CLEANUP = 5;
+  }
+
+  // Event that logs a single edit
+  message SingleEditEvent {
+    // Full path of the file that edited.
+    string file_path = 1;
+    // Type of the edit.
+    EditType edit_type = 2;
+  }
+
+  // Event that logs aggregated info for a set of edits.
+  message AggregatedEditEvent {
+    int32 num_edits = 1;
+  }
+
+  // Event that logs errors happened in the edit monitor.
+  message EditMonitorErrorEvent {
+    ErrorType error_type = 1;
+    string error_msg = 2;
+    string stack_trace = 3;
+  }
+
+  // ------------------------
+  // FIELDS FOR EditEvent
+  // ------------------------
+  // Internal user name.
+  string user_name = 1;
+  // The root of Android source.
+  string source_root = 2;
+  // Name of the host workstation.
+  string host_name = 3;
+
+  oneof event {
+    SingleEditEvent single_edit_event = 4;
+    AggregatedEditEvent aggregated_edit_event = 5;
+    EditMonitorErrorEvent edit_monitor_error_event = 6;
+  }
+}
diff --git a/tools/filelistdiff/Android.bp b/tools/filelistdiff/Android.bp
index ab766d6..3826e50 100644
--- a/tools/filelistdiff/Android.bp
+++ b/tools/filelistdiff/Android.bp
@@ -24,4 +24,9 @@
 prebuilt_etc_host {
     name: "system_image_diff_allowlist",
     src: "allowlist",
-}
\ No newline at end of file
+}
+
+prebuilt_etc_host {
+    name: "system_image_diff_allowlist_next",
+    src: "allowlist_next",
+}
diff --git a/tools/filelistdiff/OWNERS b/tools/filelistdiff/OWNERS
new file mode 100644
index 0000000..690fb17
--- /dev/null
+++ b/tools/filelistdiff/OWNERS
@@ -0,0 +1 @@
+per-file allowlist = justinyun@google.com, jeongik@google.com, kiyoungkim@google.com, inseob@google.com
diff --git a/tools/filelistdiff/allowlist b/tools/filelistdiff/allowlist
index c4a464d..eb78587 100644
--- a/tools/filelistdiff/allowlist
+++ b/tools/filelistdiff/allowlist
@@ -1,49 +1,5 @@
-# Known diffs only in the KATI system image
-etc/NOTICE.xml.gz
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta
-lib/aaudio-aidl-cpp.so
-lib/android.hardware.biometrics.fingerprint@2.1.so
-lib/android.hardware.radio.config@1.0.so
-lib/android.hardware.radio.deprecated@1.0.so
-lib/android.hardware.radio@1.0.so
-lib/android.hardware.radio@1.1.so
-lib/android.hardware.radio@1.2.so
-lib/android.hardware.radio@1.3.so
-lib/android.hardware.radio@1.4.so
-lib/android.hardware.secure_element@1.0.so
-lib/com.android.media.aaudio-aconfig-cc.so
-lib/heapprofd_client.so
-lib/heapprofd_client_api.so
-lib/libaaudio.so
-lib/libaaudio_internal.so
-lib/libalarm_jni.so
-lib/libamidi.so
-lib/libcups.so
-lib/libjni_deviceAsWebcam.so
-lib/libprintspooler_jni.so
-lib/libvendorsupport.so
-lib/libwfds.so
-lib/libyuv.so
-
-# b/351258461
-adb_keys
+# Known diffs that are installed in either system image with the configuration
+# b/353429422
 init.environ.rc
-
-# Known diffs only in the Soong system image
-lib/libhidcommand_jni.so
-lib/libuinputcommand_jni.so
-
-# Known diffs in internal source
-bin/uprobestats
-etc/aconfig/flag.map
-etc/aconfig/flag.val
-etc/aconfig/package.map
-etc/bpf/uprobestats/BitmapAllocation.o
-etc/bpf/uprobestats/GenericInstrumentation.o
-etc/init/UprobeStats.rc
-lib/libuprobestats_client.so
-lib64/libuprobestats_client.so
-priv-app/DeviceDiagnostics/DeviceDiagnostics.apk
\ No newline at end of file
+# b/338342381
+etc/NOTICE.xml.gz
diff --git a/tools/filelistdiff/allowlist_next b/tools/filelistdiff/allowlist_next
new file mode 100644
index 0000000..8f91c9f
--- /dev/null
+++ b/tools/filelistdiff/allowlist_next
@@ -0,0 +1,9 @@
+# Allowlist only for the next release configuration.
+# TODO(b/369678122): The list will be cleared when the trunk configurations are
+# available to the next.
+
+# KATI only installed files
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta
diff --git a/tools/filelistdiff/file_list_diff.py b/tools/filelistdiff/file_list_diff.py
index cdc5b2e..951325f 100644
--- a/tools/filelistdiff/file_list_diff.py
+++ b/tools/filelistdiff/file_list_diff.py
@@ -19,13 +19,16 @@
 COLOR_ERROR = '\033[91m'
 COLOR_NORMAL = '\033[0m'
 
-def find_unique_items(kati_installed_files, soong_installed_files, allowlist, system_module_name):
+def find_unique_items(kati_installed_files, soong_installed_files, system_module_name, allowlists):
     with open(kati_installed_files, 'r') as kati_list_file, \
-            open(soong_installed_files, 'r') as soong_list_file, \
-            open(allowlist, 'r') as allowlist_file:
+            open(soong_installed_files, 'r') as soong_list_file:
         kati_files = set(kati_list_file.read().split())
         soong_files = set(soong_list_file.read().split())
-        allowed_files = set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n'))))
+
+    allowed_files = set()
+    for allowlist in allowlists:
+        with open(allowlist, 'r') as allowlist_file:
+            allowed_files.update(set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n')))))
 
     def is_unknown_diff(filepath):
         return not filepath in allowed_files
@@ -34,23 +37,24 @@
     unique_in_soong = set(filter(is_unknown_diff, soong_files - kati_files))
 
     if unique_in_kati:
-        print(f'{COLOR_ERROR}Please add following modules into system image module {system_module_name}.{COLOR_NORMAL}')
-        print(f'{COLOR_WARNING}KATI only module(s):{COLOR_NORMAL}')
+        print('')
+        print(f'{COLOR_ERROR}Missing required modules in {system_module_name} module.{COLOR_NORMAL}')
+        print(f'To resolve this issue, please add the modules to the Android.bp file for the {system_module_name} to install the following KATI only installed files.')
+        print(f'You can find the correct Android.bp file using the command "gomod {system_module_name}".')
+        print(f'{COLOR_WARNING}KATI only installed file(s):{COLOR_NORMAL}')
         for item in sorted(unique_in_kati):
-            print(item)
+            print('  '+item)
 
     if unique_in_soong:
-        if unique_in_kati:
-            print('')
-
-        print(f'{COLOR_ERROR}Please add following modules into build/make/target/product/base_system.mk.{COLOR_NORMAL}')
-        print(f'{COLOR_WARNING}Soong only module(s):{COLOR_NORMAL}')
+        print('')
+        print(f'{COLOR_ERROR}Missing packages in base_system.mk.{COLOR_NORMAL}')
+        print('Please add packages into build/make/target/product/base_system.mk or build/make/tools/filelistdiff/allowlist to install or skip the following Soong only installed files.')
+        print(f'{COLOR_WARNING}Soong only installed file(s):{COLOR_NORMAL}')
         for item in sorted(unique_in_soong):
-            print(item)
+            print('  '+item)
 
     if unique_in_kati or unique_in_soong:
         print('')
-        print(f'{COLOR_ERROR}FAILED: System image from KATI and SOONG differs from installed file list.{COLOR_NORMAL}')
         sys.exit(1)
 
 
@@ -59,8 +63,8 @@
 
     parser.add_argument('kati_installed_file_list')
     parser.add_argument('soong_installed_file_list')
-    parser.add_argument('allowlist')
     parser.add_argument('system_module_name')
+    parser.add_argument('--allowlists', nargs='+')
     args = parser.parse_args()
 
-    find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.allowlist, args.system_module_name)
\ No newline at end of file
+    find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.system_module_name, args.allowlists)
\ No newline at end of file
diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go
index 23c7abd..89ac78f 100644
--- a/tools/ide_query/ide_query.go
+++ b/tools/ide_query/ide_query.go
@@ -363,6 +363,7 @@
 				Id:              name,
 				SourceFilePaths: mod.Srcs,
 				GeneratedFiles:  genFiles(env, paths),
+				DependencyIds:   mod.Deps,
 			}
 
 			for _, d := range mod.Deps {
diff --git a/tools/ide_query/ide_query.sh b/tools/ide_query/ide_query.sh
index 6f9b0c4..8dfffc1 100755
--- a/tools/ide_query/ide_query.sh
+++ b/tools/ide_query/ide_query.sh
@@ -19,7 +19,7 @@
 require_top
 
 # Ensure cogsetup (out/ will be symlink outside the repo)
-. ${TOP}/build/make/cogsetup.sh
+setup_cog_env_if_needed
 
 case $(uname -s) in
     Linux)
diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py
index 03b44f1..3b9374a 100644
--- a/tools/releasetools/ota_from_raw_img.py
+++ b/tools/releasetools/ota_from_raw_img.py
@@ -105,9 +105,6 @@
 
     if args.package_key:
       logger.info("Signing payload...")
-      # TODO: remove OPTIONS when no longer used as fallback in payload_signer
-      common.OPTIONS.payload_signer_args = None
-      common.OPTIONS.payload_signer_maximum_signature_size = None
       signer = PayloadSigner(args.package_key, args.private_key_suffix,
                              key_passwords[args.package_key],
                              payload_signer=args.payload_signer,
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 985cd56..6446e1f 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -264,6 +264,10 @@
 
   --compression_factor
       Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k
+
+  --full_ota_partitions
+      Specify list of partitions should be updated in full OTA fashion, even if
+      an incremental OTA is about to be generated
 """
 
 from __future__ import print_function
@@ -283,7 +287,7 @@
 import ota_utils
 import payload_signer
 from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata,
-                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR)
 from common import DoesInputFileContain, IsSparseImage
 import target_files_diff
 from non_ab_ota import GenerateNonAbOtaPackage
@@ -337,6 +341,7 @@
 OPTIONS.max_threads = None
 OPTIONS.vabc_cow_version = None
 OPTIONS.compression_factor = None
+OPTIONS.full_ota_partitions = None
 
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -892,6 +897,14 @@
 
   if source_file is not None:
     source_file = ExtractTargetFiles(source_file)
+    if OPTIONS.full_ota_partitions:
+      for partition in OPTIONS.full_ota_partitions:
+        for subdir in TARGET_FILES_IMAGES_SUBDIR:
+          image_path = os.path.join(source_file, subdir, partition + ".img")
+          if os.path.exists(image_path):
+            logger.info(
+                "Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition)
+            os.remove(image_path)
     assert "ab_partitions" in OPTIONS.source_info_dict, \
         "META/ab_partitions.txt is required for ab_update."
     assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -1193,7 +1206,7 @@
 
 def main(argv):
 
-  def option_handler(o, a):
+  def option_handler(o, a: str):
     if o in ("-i", "--incremental_from"):
       OPTIONS.incremental_source = a
     elif o == "--full_radio":
@@ -1320,6 +1333,9 @@
       else:
         raise ValueError("Cannot parse value %r for option %r - only "
                          "integers are allowed." % (a, o))
+    elif o == "--full_ota_partitions":
+      OPTIONS.full_ota_partitions = set(
+          a.strip().strip("\"").strip("'").split(","))
     else:
       return False
     return True
@@ -1370,6 +1386,7 @@
                                  "max_threads=",
                                  "vabc_cow_version=",
                                  "compression_factor=",
+                                 "full_ota_partitions=",
                              ], extra_option_handler=[option_handler, payload_signer.signer_options])
   common.InitLogging()
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index a72342f..4ad97e0 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -184,6 +184,7 @@
 import shutil
 import stat
 import sys
+import shlex
 import tempfile
 import zipfile
 from xml.etree import ElementTree
@@ -582,6 +583,24 @@
         filename.endswith("/prop.default")
 
 
+def GetOtaSigningArgs():
+  args = []
+  if OPTIONS.package_key:
+    args.extend(["--package_key", OPTIONS.package_key])
+  if OPTIONS.payload_signer:
+    args.extend(["--payload_signer=" + OPTIONS.payload_signer])
+  if OPTIONS.payload_signer_args:
+    args.extend(["--payload_signer_args=" + shlex.join(OPTIONS.payload_signer_args)])
+  if OPTIONS.search_path:
+    args.extend(["--search_path", OPTIONS.search_path])
+  if OPTIONS.payload_signer_maximum_signature_size:
+    args.extend(["--payload_signer_maximum_signature_size",
+                OPTIONS.payload_signer_maximum_signature_size])
+  if OPTIONS.private_key_suffix:
+    args.extend(["--private_key_suffix", OPTIONS.private_key_suffix])
+  return args
+
+
 def RegenerateKernelPartitions(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info):
   """Re-generate boot and dtbo partitions using new signing configuration"""
   files_to_unzip = [
@@ -648,9 +667,9 @@
       if os.path.exists(signed_16k_dtbo_image):
         signed_dtbo_image += ":" + signed_16k_dtbo_image
 
-
-  args = ["ota_from_raw_img", "--package_key", OPTIONS.package_key,
+  args = ["ota_from_raw_img",
           "--max_timestamp", timestamp, "--output", input_ota.name]
+  args.extend(GetOtaSigningArgs())
   if "dtbo" in partitions:
     args.extend(["--partition_name", "boot,dtbo",
                 signed_boot_image, signed_dtbo_image])
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
index 6901b06..4f6d3b7 100644
--- a/tools/sbom/Android.bp
+++ b/tools/sbom/Android.bp
@@ -33,6 +33,13 @@
     ],
 }
 
+python_library_host {
+    name: "compliance_metadata",
+    srcs: [
+        "compliance_metadata.py",
+    ],
+}
+
 python_binary_host {
     name: "gen_sbom",
     srcs: [
@@ -44,6 +51,7 @@
         },
     },
     libs: [
+        "compliance_metadata",
         "metadata_file_proto_py",
         "libprotobuf-python",
         "sbom_lib",
@@ -109,3 +117,17 @@
         "sbom_lib",
     ],
 }
+
+python_binary_host {
+    name: "gen_notice_xml",
+    srcs: [
+        "gen_notice_xml.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+    ],
+}
diff --git a/tools/sbom/compliance_metadata.py b/tools/sbom/compliance_metadata.py
new file mode 100644
index 0000000..9910217
--- /dev/null
+++ b/tools/sbom/compliance_metadata.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sqlite3
+
+class MetadataDb:
+  def __init__(self, db):
+    self.conn = sqlite3.connect(':memory')
+    self.conn.row_factory = sqlite3.Row
+    with sqlite3.connect(db) as c:
+      c.backup(self.conn)
+    self.reorg()
+
+  def reorg(self):
+    # package_license table
+    self.conn.execute("create table package_license as "
+                      "select name as package, pkg_default_applicable_licenses as license "
+                      "from modules "
+                      "where module_type = 'package' ")
+    cursor = self.conn.execute("select package,license from package_license where license like '% %'")
+    multi_licenses_packages = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for p in multi_licenses_packages:
+      licenses = p['license'].strip().split(' ')
+      for lic in licenses:
+        rows.append((p['package'], lic))
+    self.conn.executemany('insert into package_license values (?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from package_license where license like '% %'")
+    self.conn.commit()
+
+    # module_license table
+    self.conn.execute("create table module_license as "
+                      "select distinct name as module, package, licenses as license "
+                      "from modules "
+                      "where licenses != '' ")
+    cursor = self.conn.execute("select module,package,license from module_license where license like '% %'")
+    multi_licenses_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_licenses_modules:
+      licenses = m['license'].strip().split(' ')
+      for lic in licenses:
+        rows.append((m['module'], m['package'],lic))
+    self.conn.executemany('insert into module_license values (?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_license where license like '% %'")
+    self.conn.commit()
+
+    # module_installed_file table
+    self.conn.execute("create table module_installed_file as "
+                      "select id as module_id, name as module_name, package, installed_files as installed_file "
+                      "from modules "
+                      "where installed_files != '' ")
+    cursor = self.conn.execute("select module_id, module_name, package, installed_file "
+                               "from module_installed_file where installed_file like '% %'")
+    multi_installed_file_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_installed_file_modules:
+      installed_files = m['installed_file'].strip().split(' ')
+      for f in installed_files:
+        rows.append((m['module_id'], m['module_name'], m['package'], f))
+    self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_installed_file where installed_file like '% %'")
+    self.conn.commit()
+
+    # module_built_file table
+    self.conn.execute("create table module_built_file as "
+                      "select id as module_id, name as module_name, package, built_files as built_file "
+                      "from modules "
+                      "where built_files != '' ")
+    cursor = self.conn.execute("select module_id, module_name, package, built_file "
+                               "from module_built_file where built_file like '% %'")
+    multi_built_file_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_built_file_modules:
+      built_files = m['installed_file'].strip().split(' ')
+      for f in built_files:
+        rows.append((m['module_id'], m['module_name'], m['package'], f))
+    self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_built_file where built_file like '% %'")
+    self.conn.commit()
+
+
+    # Indexes
+    self.conn.execute('create index idx_modules_id on modules (id)')
+    self.conn.execute('create index idx_modules_name on modules (name)')
+    self.conn.execute('create index idx_package_licnese_package on package_license (package)')
+    self.conn.execute('create index idx_package_licnese_license on package_license (license)')
+    self.conn.execute('create index idx_module_licnese_module on module_license (module)')
+    self.conn.execute('create index idx_module_licnese_license on module_license (license)')
+    self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)')
+    self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)')
+    self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)')
+    self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)')
+    self.conn.commit()
+
+  def dump_debug_db(self, debug_db):
+    with sqlite3.connect(debug_db) as c:
+      self.conn.backup(c)
+
+  def get_installed_files(self):
+    # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata
+    cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata')
+    rows = cursor.fetchall()
+    cursor.close()
+    installed_files_metadata = []
+    for row in rows:
+      metadata = dict(zip(row.keys(), row))
+      installed_files_metadata.append(metadata)
+    return installed_files_metadata
+
+  def get_soong_modules(self):
+    # Get all records from table modules, which contains metadata of all soong modules
+    cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules')
+    rows = cursor.fetchall()
+    cursor.close()
+    soong_modules = []
+    for row in rows:
+      soong_module = dict(zip(row.keys(), row))
+      soong_modules.append(soong_module)
+    return soong_modules
+
+  def get_package_licenses(self, package):
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from package_license pl join modules m on pl.license = m.name '
+                               'where pl.package = ?',
+                               ('//' + package,))
+    rows = cursor.fetchall()
+    licenses = {}
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    return licenses
+
+  def get_module_licenses(self, module_name, package):
+    licenses = {}
+    # If property "licenses" is defined on module
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from module_license ml join modules m on ml.license = m.name '
+                               'where ml.module = ? and ml.package = ?',
+                               (module_name, package))
+    rows = cursor.fetchall()
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    if len(licenses) > 0:
+      return licenses
+
+    # Use default package license
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from package_license pl join modules m on pl.license = m.name '
+                               'where pl.package = ?',
+                               ('//' + package,))
+    rows = cursor.fetchall()
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    return licenses
+
+  def get_soong_module_of_installed_file(self, installed_file):
+    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
+                               'from modules m join module_installed_file mif on m.id = mif.module_id '
+                               'where mif.installed_file = ?',
+                               (installed_file,))
+    rows = cursor.fetchall()
+    cursor.close()
+    if rows:
+      soong_module = dict(zip(rows[0].keys(), rows[0]))
+      return soong_module
+
+    return None
+
+  def get_soong_module_of_built_file(self, built_file):
+    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
+                               'from modules m join module_built_file mbf on m.id = mbf.module_id '
+                               'where mbf.built_file = ?',
+                               (built_file,))
+    rows = cursor.fetchall()
+    cursor.close()
+    if rows:
+      soong_module = dict(zip(rows[0].keys(), rows[0]))
+      return soong_module
+
+    return None
\ No newline at end of file
diff --git a/tools/sbom/gen_notice_xml.py b/tools/sbom/gen_notice_xml.py
new file mode 100644
index 0000000..eaa6e5a
--- /dev/null
+++ b/tools/sbom/gen_notice_xml.py
@@ -0,0 +1,81 @@
+# !/usr/bin/env python3
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate NOTICE.xml.gz of a partition.
+Usage example:
+  gen_notice_xml.py --output_file out/soong/.intermediate/.../NOTICE.xml.gz \
+              --metadata out/soong/compliance-metadata/aosp_cf_x86_64_phone/compliance-metadata.db \
+              --partition system \
+              --product_out out/target/vsoc_x86_64 \
+              --soong_out out/soong
+"""
+
+import argparse
+
+
+FILE_HEADER = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<licenses>
+'''
+FILE_FOOTER = '''\
+</licenses>
+'''
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('-d', '--debug', action='store_true', default=True, help='Debug mode')
+  parser.add_argument('--output_file', required=True, help='The path of the generated NOTICE.xml.gz file.')
+  parser.add_argument('--partition', required=True, help='The name of partition for which the NOTICE.xml.gz is generated.')
+  parser.add_argument('--metadata', required=True, help='The path of compliance metadata DB file.')
+  parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.')
+  parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def new_file_name_tag(file_metadata, package_name):
+  file_path = file_metadata['installed_file'].removeprefix(args.product_out)
+  lib = 'Android'
+  if package_name:
+    lib = package_name
+  return f'<file-name contentId="" lib="{lib}">{file_path}</file-name>\n'
+
+
+def new_file_content_tag():
+  pass
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  with open(args.output_file, 'w', encoding="utf-8") as notice_xml_file:
+    notice_xml_file.write(FILE_HEADER)
+    notice_xml_file.write(FILE_FOOTER)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/gen_sbom.py b/tools/sbom/gen_sbom.py
index a203258..9c3a8be 100644
--- a/tools/sbom/gen_sbom.py
+++ b/tools/sbom/gen_sbom.py
@@ -26,6 +26,7 @@
 """
 
 import argparse
+import compliance_metadata
 import datetime
 import google.protobuf.text_format as text_format
 import hashlib
@@ -35,7 +36,6 @@
 import metadata_file_pb2
 import sbom_data
 import sbom_writers
-import sqlite3
 
 # Package type
 PKG_SOURCE = 'SOURCE'
@@ -568,202 +568,16 @@
   return sorted(all_static_dep_files.keys())
 
 
-class MetadataDb:
-  def __init__(self, db):
-    self.conn = sqlite3.connect(':memory')
-    self.conn.row_factory = sqlite3.Row
-    with sqlite3.connect(db) as c:
-      c.backup(self.conn)
-    self.reorg()
-
-  def reorg(self):
-    # package_license table
-    self.conn.execute("create table package_license as "
-                      "select name as package, pkg_default_applicable_licenses as license "
-                      "from modules "
-                      "where module_type = 'package' ")
-    cursor = self.conn.execute("select package,license from package_license where license like '% %'")
-    multi_licenses_packages = cursor.fetchall()
-    cursor.close()
-    rows = []
-    for p in multi_licenses_packages:
-      licenses = p['license'].strip().split(' ')
-      for lic in licenses:
-        rows.append((p['package'], lic))
-    self.conn.executemany('insert into package_license values (?, ?)', rows)
-    self.conn.commit()
-
-    self.conn.execute("delete from package_license where license like '% %'")
-    self.conn.commit()
-
-    # module_license table
-    self.conn.execute("create table module_license as "
-                      "select distinct name as module, package, licenses as license "
-                      "from modules "
-                      "where licenses != '' ")
-    cursor = self.conn.execute("select module,package,license from module_license where license like '% %'")
-    multi_licenses_modules = cursor.fetchall()
-    cursor.close()
-    rows = []
-    for m in multi_licenses_modules:
-      licenses = m['license'].strip().split(' ')
-      for lic in licenses:
-        rows.append((m['module'], m['package'],lic))
-    self.conn.executemany('insert into module_license values (?, ?, ?)', rows)
-    self.conn.commit()
-
-    self.conn.execute("delete from module_license where license like '% %'")
-    self.conn.commit()
-
-    # module_installed_file table
-    self.conn.execute("create table module_installed_file as "
-                      "select id as module_id, name as module_name, package, installed_files as installed_file "
-                      "from modules "
-                      "where installed_files != '' ")
-    cursor = self.conn.execute("select module_id, module_name, package, installed_file "
-                               "from module_installed_file where installed_file like '% %'")
-    multi_installed_file_modules = cursor.fetchall()
-    cursor.close()
-    rows = []
-    for m in multi_installed_file_modules:
-      installed_files = m['installed_file'].strip().split(' ')
-      for f in installed_files:
-        rows.append((m['module_id'], m['module_name'], m['package'], f))
-    self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows)
-    self.conn.commit()
-
-    self.conn.execute("delete from module_installed_file where installed_file like '% %'")
-    self.conn.commit()
-
-    # module_built_file table
-    self.conn.execute("create table module_built_file as "
-                      "select id as module_id, name as module_name, package, built_files as built_file "
-                      "from modules "
-                      "where built_files != '' ")
-    cursor = self.conn.execute("select module_id, module_name, package, built_file "
-                               "from module_built_file where built_file like '% %'")
-    multi_built_file_modules = cursor.fetchall()
-    cursor.close()
-    rows = []
-    for m in multi_built_file_modules:
-      built_files = m['installed_file'].strip().split(' ')
-      for f in built_files:
-        rows.append((m['module_id'], m['module_name'], m['package'], f))
-    self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows)
-    self.conn.commit()
-
-    self.conn.execute("delete from module_built_file where built_file like '% %'")
-    self.conn.commit()
-
-
-    # Indexes
-    self.conn.execute('create index idx_modules_id on modules (id)')
-    self.conn.execute('create index idx_modules_name on modules (name)')
-    self.conn.execute('create index idx_package_licnese_package on package_license (package)')
-    self.conn.execute('create index idx_package_licnese_license on package_license (license)')
-    self.conn.execute('create index idx_module_licnese_module on module_license (module)')
-    self.conn.execute('create index idx_module_licnese_license on module_license (license)')
-    self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)')
-    self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)')
-    self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)')
-    self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)')
-    self.conn.commit()
-
-    if args.debug:
-      with sqlite3.connect(os.path.dirname(args.metadata) + '/compliance-metadata-debug.db') as c:
-        self.conn.backup(c)
-
-
-  def get_installed_files(self):
-    # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata
-    cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata')
-    rows = cursor.fetchall()
-    cursor.close()
-    installed_files_metadata = []
-    for row in rows:
-      metadata = dict(zip(row.keys(), row))
-      installed_files_metadata.append(metadata)
-    return installed_files_metadata
-
-  def get_soong_modules(self):
-    # Get all records from table modules, which contains metadata of all soong modules
-    cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules')
-    rows = cursor.fetchall()
-    cursor.close()
-    soong_modules = []
-    for row in rows:
-      soong_module = dict(zip(row.keys(), row))
-      soong_modules.append(soong_module)
-    return soong_modules
-
-  def get_package_licenses(self, package):
-    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
-                               'from package_license pl join modules m on pl.license = m.name '
-                               'where pl.package = ?',
-                               ('//' + package,))
-    rows = cursor.fetchall()
-    licenses = {}
-    for r in rows:
-      licenses[r['name']] = r['license_text']
-    return licenses
-
-  def get_module_licenses(self, module_name, package):
-    licenses = {}
-    # If property "licenses" is defined on module
-    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
-                               'from module_license ml join modules m on ml.license = m.name '
-                               'where ml.module = ? and ml.package = ?',
-                               (module_name, package))
-    rows = cursor.fetchall()
-    for r in rows:
-      licenses[r['name']] = r['license_text']
-    if len(licenses) > 0:
-      return licenses
-
-    # Use default package license
-    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
-                               'from package_license pl join modules m on pl.license = m.name '
-                               'where pl.package = ?',
-                               ('//' + package,))
-    rows = cursor.fetchall()
-    for r in rows:
-      licenses[r['name']] = r['license_text']
-    return licenses
-
-  def get_soong_module_of_installed_file(self, installed_file):
-    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
-                               'from modules m join module_installed_file mif on m.id = mif.module_id '
-                               'where mif.installed_file = ?',
-                               (installed_file,))
-    rows = cursor.fetchall()
-    cursor.close()
-    if rows:
-      soong_module = dict(zip(rows[0].keys(), rows[0]))
-      return soong_module
-
-    return None
-
-  def get_soong_module_of_built_file(self, built_file):
-    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
-                               'from modules m join module_built_file mbf on m.id = mbf.module_id '
-                               'where mbf.built_file = ?',
-                               (built_file,))
-    rows = cursor.fetchall()
-    cursor.close()
-    if rows:
-      soong_module = dict(zip(rows[0].keys(), rows[0]))
-      return soong_module
-
-    return None
-
-
 def main():
   global args
   args = get_args()
   log('Args:', vars(args))
 
   global db
-  db = MetadataDb(args.metadata)
+  db = compliance_metadata.MetadataDb(args.metadata)
+  if args.debug:
+    db.dump_debug_db(os.path.dirname(args.output_file) + '/compliance-metadata-debug.db')
+
   global metadata_file_protos
   metadata_file_protos = {}
   global licenses_text
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 6b2341b..654e196 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -302,7 +302,6 @@
             final KeyStore keyStore, final String keyName)
             throws CertificateException, KeyStoreException, NoSuchAlgorithmException,
                     UnrecoverableKeyException, UnrecoverableEntryException {
-        final Key key = keyStore.getKey(keyName, readPassword(keyName));
         final PrivateKeyEntry privateKeyEntry = (PrivateKeyEntry) keyStore.getEntry(keyName, null);
         if (privateKeyEntry == null) {
         throw new Error(