Merge "Revert "Remove non existent snapuserd.recovery package"" into main
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..cd5c426
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,36 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+// Package the minimal files required to run envsetup.sh in the test
+// environment.
+genrule {
+    name: "envsetup_minimum.zip",
+    visibility: [
+        "//build/make/tests:__subpackages__",
+    ],
+    tools: [
+        "soong_zip",
+    ],
+    srcs: [
+        "envsetup.sh",
+        "shell_utils.sh",
+        "core/envsetup.mk",
+    ],
+    out: ["envsetup.zip"],
+    cmd: "$(location soong_zip) -o $(out) -D build/make",
+}
diff --git a/CleanSpec.mk b/CleanSpec.mk
index dfc0cd0..8c30883 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -779,6 +779,21 @@
 # Clear out rustc compiler intermediates after reverting rust compiler/linker split.
 $(call add-clean-step, find $(OUT_DIR) -name "*.rsp.whole.a" -print0 | xargs -0 /bin/bash -c 'rm -f $$$${@}; rm -f $$$${@/.rsp.whole.a/.rsp.a}; rm -f $$$${@/.rsp.whole.a/.rsp}')
 
+# Remove obsolete java compilation artifacts
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/)
+$(call add-clean-step, find $(OUT_DIR) -type f -name "*.jar" -print0 | xargs -0 rm -f)
+
+# Remove obsolete java compilation artifacts
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/)
+$(call add-clean-step, find $(OUT_DIR) -type f -name "*.jar" -print0 | xargs -0 rm -f)
+
+# Remove obsolete dexpreopt_config artifacts
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/dexpreopt_config/dexpreopt.config)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/dexpreopt_config/dexpreopt_soong.config)
+
+# Clear out Soong .intermediates directory regarding removal of hashed subdir
+$(call add-clean-step, rm -rf $(OUT_DIR)/soong/.intermediates)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/METADATA b/METADATA
deleted file mode 100644
index 44781a7..0000000
--- a/METADATA
+++ /dev/null
@@ -1,8 +0,0 @@
-third_party {
-  license_note: "would be NOTICE save for GPL in:\n"
-  "   core/LINUX_KERNEL_COPYING\n"
-  "   tools/droiddoc/templates-pdk/assets/jquery-1.6.2.min.js\n"
-  "   tools/droiddoc/templates-pdk/assets/jquery-history.js\n"
-  "   tools/droiddoc/templates-pdk/assets/jquery-resizable.min.js"
-  license_type: RESTRICTED
-}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index ce75150..97ecd33 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,2 +1,5 @@
 [Hook Scripts]
 do_not_use_DO_NOT_MERGE = ${REPO_ROOT}/build/soong/scripts/check_do_not_merge.sh ${PREUPLOAD_COMMIT}
+
+[Builtin Hooks]
+ktfmt = true
diff --git a/ci/Android.bp b/ci/Android.bp
new file mode 100644
index 0000000..104f517
--- /dev/null
+++ b/ci/Android.bp
@@ -0,0 +1,87 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_adte",
+}
+
+python_test_host {
+    name: "build_test_suites_test",
+    main: "build_test_suites_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "build_test_suites_test.py",
+    ],
+    libs: [
+        "build_test_suites",
+        "pyfakefs",
+        "ci_test_lib",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":py3-cmd",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
+// This test is only intended to be run locally since it's slow, not hermetic,
+// and requires a lot of system state. It is therefore not marked as `unit_test`
+// and is not part of any test suite. Note that we also don't want to run this
+// test with Bazel since that would require disabling sandboxing and explicitly
+// passing in all the env vars we depend on via the command-line. The test
+// target could be configured to do so but it's not worth doing seeing that
+// we're moving away from Bazel.
+python_test_host {
+    name: "build_test_suites_local_test",
+    main: "build_test_suites_local_test.py",
+    srcs: [
+        "build_test_suites_local_test.py",
+    ],
+    libs: [
+        "build_test_suites",
+        "pyfakefs",
+        "ci_test_lib",
+    ],
+    test_config_template: "AndroidTest.xml.template",
+    test_options: {
+        unit_test: false,
+    },
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
+python_library_host {
+    name: "build_test_suites",
+    srcs: [
+        "build_test_suites.py",
+        "optimized_targets.py",
+    ],
+}
+
+python_library_host {
+    name: "ci_test_lib",
+    srcs: [
+        "ci_test_lib.py",
+    ],
+}
diff --git a/ci/AndroidTest.xml.template b/ci/AndroidTest.xml.template
new file mode 100644
index 0000000..81a3435
--- /dev/null
+++ b/ci/AndroidTest.xml.template
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration>
+  <test class="com.android.tradefed.testtype.python.PythonBinaryHostTest">
+    <option name="par-file-name" value="{MODULE}"/>
+    <option name="use-test-output-file" value="false"/>
+    <option name="test-timeout" value="5m"/>
+  </test>
+</configuration>
diff --git a/ci/build_test_suites b/ci/build_test_suites
index 03f6731..5aaf2f4 100755
--- a/ci/build_test_suites
+++ b/ci/build_test_suites
@@ -1,4 +1,5 @@
 #!prebuilts/build-tools/linux-x86/bin/py3-cmd -B
+#
 # Copyright 2024, The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
 import build_test_suites
+import sys
 
-build_test_suites.main(sys.argv)
+build_test_suites.main(sys.argv[1:])
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 1d5b377..6e1f88c 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -12,404 +12,173 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Script to build only the necessary modules for general-tests along
-
-with whatever other targets are passed in.
-"""
+"""Build script for the CI `test_suites` target."""
 
 import argparse
-from collections.abc import Sequence
+from dataclasses import dataclass
 import json
+import logging
 import os
 import pathlib
-import re
 import subprocess
 import sys
-from typing import Any
-
-import test_mapping_module_retriever
+from typing import Callable
+import optimized_targets
 
 
-# List of modules that are always required to be in general-tests.zip
-REQUIRED_MODULES = frozenset(
-    ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util', 'soong_zip']
-)
+REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
+SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
 
 
-def build_test_suites(argv):
+class Error(Exception):
+
+  def __init__(self, message):
+    super().__init__(message)
+
+
+class BuildFailureError(Error):
+
+  def __init__(self, return_code):
+    super().__init__(f'Build command failed with return code: f{return_code}')
+    self.return_code = return_code
+
+
+class BuildPlanner:
+  """Class in charge of determining how to optimize build targets.
+
+  Given the build context and targets to build it will determine a final list of
+  targets to build along with getting a set of packaging functions to package up
+  any output zip files needed by the build.
+  """
+
+  def __init__(
+      self,
+      build_context: dict[str, any],
+      args: argparse.Namespace,
+      target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget],
+  ):
+    self.build_context = build_context
+    self.args = args
+    self.target_optimizations = target_optimizations
+
+  def create_build_plan(self):
+
+    if 'optimized_build' not in self.build_context['enabled_build_features']:
+      return BuildPlan(set(self.args.extra_targets), set())
+
+    build_targets = set()
+    packaging_functions = set()
+    for target in self.args.extra_targets:
+      target_optimizer_getter = self.target_optimizations.get(target, None)
+      if not target_optimizer_getter:
+        build_targets.add(target)
+        continue
+
+      target_optimizer = target_optimizer_getter(
+          target, self.build_context, self.args
+      )
+      build_targets.update(target_optimizer.get_build_targets())
+      packaging_functions.add(target_optimizer.package_outputs)
+
+    return BuildPlan(build_targets, packaging_functions)
+
+
+@dataclass(frozen=True)
+class BuildPlan:
+  build_targets: set[str]
+  packaging_functions: set[Callable[..., None]]
+
+
+def build_test_suites(argv: list[str]) -> int:
+  """Builds all test suites passed in, optimizing based on the build_context content.
+
+  Args:
+    argv: The command line arguments passed in.
+
+  Returns:
+    The exit code of the build.
+  """
   args = parse_args(argv)
+  check_required_env()
+  build_context = load_build_context()
+  build_planner = BuildPlanner(
+      build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS
+  )
+  build_plan = build_planner.create_build_plan()
 
-  if is_optimization_enabled():
-    # Call the class to map changed files to modules to build.
-    # TODO(lucafarsi): Move this into a replaceable class.
-    build_affected_modules(args)
-  else:
-    build_everything(args)
+  try:
+    execute_build_plan(build_plan)
+  except BuildFailureError as e:
+    logging.error('Build command failed! Check build_log for details.')
+    return e.return_code
+
+  return 0
 
 
-def parse_args(argv):
+def parse_args(argv: list[str]) -> argparse.Namespace:
   argparser = argparse.ArgumentParser()
+
   argparser.add_argument(
       'extra_targets', nargs='*', help='Extra test suites to build.'
   )
-  argparser.add_argument('--target_product')
-  argparser.add_argument('--target_release')
-  argparser.add_argument(
-      '--with_dexpreopt_boot_img_and_system_server_only', action='store_true'
-  )
-  argparser.add_argument('--change_info', nargs='?')
 
-  return argparser.parse_args()
+  return argparser.parse_args(argv)
 
 
-def is_optimization_enabled() -> bool:
-  # TODO(lucafarsi): switch back to building only affected general-tests modules
-  # in presubmit once ready.
-  # if os.environ.get('BUILD_NUMBER')[0] == 'P':
-  #   return True
-  return False
+def check_required_env():
+  """Check for required env vars.
+
+  Raises:
+    RuntimeError: If any required env vars are not found.
+  """
+  missing_env_vars = sorted(v for v in REQUIRED_ENV_VARS if v not in os.environ)
+
+  if not missing_env_vars:
+    return
+
+  t = ','.join(missing_env_vars)
+  raise Error(f'Missing required environment variables: {t}')
 
 
-def build_everything(args: argparse.Namespace):
-  build_command = base_build_command(args, args.extra_targets)
-  build_command.append('general-tests')
+def load_build_context():
+  build_context_path = pathlib.Path(os.environ.get('BUILD_CONTEXT', ''))
+  if build_context_path.is_file():
+    try:
+      with open(build_context_path, 'r') as f:
+        return json.load(f)
+    except json.decoder.JSONDecodeError as e:
+      raise Error(f'Failed to load JSON file: {build_context_path}')
 
-  run_command(build_command, print_output=True)
+  logging.info('No BUILD_CONTEXT found, skipping optimizations.')
+  return empty_build_context()
 
 
-def build_affected_modules(args: argparse.Namespace):
-  modules_to_build = find_modules_to_build(
-      pathlib.Path(args.change_info), args.extra_required_modules
-  )
-
-  # Call the build command with everything.
-  build_command = base_build_command(args, args.extra_targets)
-  build_command.extend(modules_to_build)
-  # When not building general-tests we also have to build the general tests
-  # shared libs.
-  build_command.append('general-tests-shared-libs')
-
-  run_command(build_command, print_output=True)
-
-  zip_build_outputs(modules_to_build, args.target_release)
+def empty_build_context():
+  return {'enabled_build_features': []}
 
 
-def base_build_command(
-    args: argparse.Namespace, extra_targets: set[str]
-) -> list:
+def execute_build_plan(build_plan: BuildPlan):
   build_command = []
-  build_command.append('time')
-  build_command.append('./build/soong/soong_ui.bash')
+  build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH))
   build_command.append('--make-mode')
-  build_command.append('dist')
-  build_command.append('TARGET_PRODUCT=' + args.target_product)
-  build_command.append('TARGET_RELEASE=' + args.target_release)
-  if args.with_dexpreopt_boot_img_and_system_server_only:
-    build_command.append('WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY=true')
-  build_command.extend(extra_targets)
+  build_command.extend(build_plan.build_targets)
 
-  return build_command
+  try:
+    run_command(build_command)
+  except subprocess.CalledProcessError as e:
+    raise BuildFailureError(e.returncode) from e
+
+  for packaging_function in build_plan.packaging_functions:
+    packaging_function()
 
 
-def run_command(
-    args: list[str],
-    env: dict[str, str] = os.environ,
-    print_output: bool = False,
-) -> str:
-  result = subprocess.run(
-      args=args,
-      text=True,
-      capture_output=True,
-      check=False,
-      env=env,
-  )
-  # If the process failed, print its stdout and propagate the exception.
-  if not result.returncode == 0:
-    print('Build command failed! output:')
-    print('stdout: ' + result.stdout)
-    print('stderr: ' + result.stderr)
-
-  result.check_returncode()
-
-  if print_output:
-    print(result.stdout)
-
-  return result.stdout
+def get_top() -> pathlib.Path:
+  return pathlib.Path(os.environ['TOP'])
 
 
-def find_modules_to_build(
-    change_info: pathlib.Path, extra_required_modules: list[str]
-) -> set[str]:
-  changed_files = find_changed_files(change_info)
-
-  test_mappings = test_mapping_module_retriever.GetTestMappings(
-      changed_files, set()
-  )
-
-  # Soong_zip is required to generate the output zip so always build it.
-  modules_to_build = set(REQUIRED_MODULES)
-  if extra_required_modules:
-    modules_to_build.update(extra_required_modules)
-
-  modules_to_build.update(find_affected_modules(test_mappings, changed_files))
-
-  return modules_to_build
-
-
-def find_changed_files(change_info: pathlib.Path) -> set[str]:
-  with open(change_info) as change_info_file:
-    change_info_contents = json.load(change_info_file)
-
-  changed_files = set()
-
-  for change in change_info_contents['changes']:
-    project_path = change.get('projectPath') + '/'
-
-    for revision in change.get('revisions'):
-      for file_info in revision.get('fileInfos'):
-        changed_files.add(project_path + file_info.get('path'))
-
-  return changed_files
-
-
-def find_affected_modules(
-    test_mappings: dict[str, Any], changed_files: set[str]
-) -> set[str]:
-  modules = set()
-
-  # The test_mappings object returned by GetTestMappings is organized as
-  # follows:
-  # {
-  #   'test_mapping_file_path': {
-  #     'group_name' : [
-  #       'name': 'module_name',
-  #     ],
-  #   }
-  # }
-  for test_mapping in test_mappings.values():
-    for group in test_mapping.values():
-      for entry in group:
-        module_name = entry.get('name', None)
-
-        if not module_name:
-          continue
-
-        file_patterns = entry.get('file_patterns')
-        if not file_patterns:
-          modules.add(module_name)
-          continue
-
-        if matches_file_patterns(file_patterns, changed_files):
-          modules.add(module_name)
-          continue
-
-  return modules
-
-
-# TODO(lucafarsi): Share this logic with the original logic in
-# test_mapping_test_retriever.py
-def matches_file_patterns(
-    file_patterns: list[set], changed_files: set[str]
-) -> bool:
-  for changed_file in changed_files:
-    for pattern in file_patterns:
-      if re.search(pattern, changed_file):
-        return True
-
-  return False
-
-
-def zip_build_outputs(
-    modules_to_build: set[str], target_release: str
-):
-  src_top = os.environ.get('TOP', os.getcwd())
-
-  # Call dumpvars to get the necessary things.
-  # TODO(lucafarsi): Don't call soong_ui 4 times for this, --dumpvars-mode can
-  # do it but it requires parsing.
-  host_out_testcases = pathlib.Path(
-      get_soong_var('HOST_OUT_TESTCASES', target_release)
-  )
-  target_out_testcases = pathlib.Path(
-      get_soong_var('TARGET_OUT_TESTCASES', target_release)
-  )
-  product_out = pathlib.Path(get_soong_var('PRODUCT_OUT', target_release))
-  soong_host_out = pathlib.Path(get_soong_var('SOONG_HOST_OUT', target_release))
-  host_out = pathlib.Path(get_soong_var('HOST_OUT', target_release))
-  dist_dir = pathlib.Path(get_soong_var('DIST_DIR', target_release))
-
-  # Call the class to package the outputs.
-  # TODO(lucafarsi): Move this code into a replaceable class.
-  host_paths = []
-  target_paths = []
-  host_config_files = []
-  target_config_files = []
-  for module in modules_to_build:
-    host_path = os.path.join(host_out_testcases, module)
-    if os.path.exists(host_path):
-      host_paths.append(host_path)
-      collect_config_files(src_top, host_path, host_config_files)
-
-    target_path = os.path.join(target_out_testcases, module)
-    if os.path.exists(target_path):
-      target_paths.append(target_path)
-      collect_config_files(src_top, target_path, target_config_files)
-
-  zip_test_configs_zips(
-      dist_dir, host_out, product_out, host_config_files, target_config_files
-  )
-
-  zip_command = base_zip_command(host_out, dist_dir, 'general-tests.zip')
-
-  # Add host testcases.
-  zip_command.append('-C')
-  zip_command.append(os.path.join(src_top, soong_host_out))
-  zip_command.append('-P')
-  zip_command.append('host/')
-  for path in host_paths:
-    zip_command.append('-D')
-    zip_command.append(path)
-
-  # Add target testcases.
-  zip_command.append('-C')
-  zip_command.append(os.path.join(src_top, product_out))
-  zip_command.append('-P')
-  zip_command.append('target')
-  for path in target_paths:
-    zip_command.append('-D')
-    zip_command.append(path)
-
-  # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
-  # Add necessary tools. These are also hardcoded in general-tests.mk.
-  framework_path = os.path.join(soong_host_out, 'framework')
-
-  zip_command.append('-C')
-  zip_command.append(framework_path)
-  zip_command.append('-P')
-  zip_command.append('host/tools')
-  zip_command.append('-f')
-  zip_command.append(os.path.join(framework_path, 'cts-tradefed.jar'))
-  zip_command.append('-f')
-  zip_command.append(
-      os.path.join(framework_path, 'compatibility-host-util.jar')
-  )
-  zip_command.append('-f')
-  zip_command.append(os.path.join(framework_path, 'vts-tradefed.jar'))
-
-  run_command(zip_command, print_output=True)
-
-
-def collect_config_files(
-    src_top: pathlib.Path, root_dir: pathlib.Path, config_files: list[str]
-):
-  for root, dirs, files in os.walk(os.path.join(src_top, root_dir)):
-    for file in files:
-      if file.endswith('.config'):
-        config_files.append(os.path.join(root_dir, file))
-
-
-def base_zip_command(
-    host_out: pathlib.Path, dist_dir: pathlib.Path, name: str
-) -> list[str]:
-  return [
-      'time',
-      os.path.join(host_out, 'bin', 'soong_zip'),
-      '-d',
-      '-o',
-      os.path.join(dist_dir, name),
-  ]
-
-
-# generate general-tests_configs.zip which contains all of the .config files
-# that were built and general-tests_list.zip which contains a text file which
-# lists all of the .config files that are in general-tests_configs.zip.
-#
-# general-tests_comfigs.zip is organized as follows:
-# /
-#   host/
-#     testcases/
-#       test_1.config
-#       test_2.config
-#       ...
-#   target/
-#     testcases/
-#       test_1.config
-#       test_2.config
-#       ...
-#
-# So the process is we write out the paths to all the host config files into one
-# file and all the paths to the target config files in another. We also write
-# the paths to all the config files into a third file to use for
-# general-tests_list.zip.
-def zip_test_configs_zips(
-    dist_dir: pathlib.Path,
-    host_out: pathlib.Path,
-    product_out: pathlib.Path,
-    host_config_files: list[str],
-    target_config_files: list[str],
-):
-  with open(
-      os.path.join(host_out, 'host_general-tests_list'), 'w'
-  ) as host_list_file, open(
-      os.path.join(product_out, 'target_general-tests_list'), 'w'
-  ) as target_list_file, open(
-      os.path.join(host_out, 'general-tests_list'), 'w'
-  ) as list_file:
-
-    for config_file in host_config_files:
-      host_list_file.write(config_file + '\n')
-      list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n')
-
-    for config_file in target_config_files:
-      target_list_file.write(config_file + '\n')
-      list_file.write(
-          'target/' + os.path.relpath(config_file, product_out) + '\n'
-      )
-
-  tests_config_zip_command = base_zip_command(
-      host_out, dist_dir, 'general-tests_configs.zip'
-  )
-  tests_config_zip_command.append('-P')
-  tests_config_zip_command.append('host')
-  tests_config_zip_command.append('-C')
-  tests_config_zip_command.append(host_out)
-  tests_config_zip_command.append('-l')
-  tests_config_zip_command.append(
-      os.path.join(host_out, 'host_general-tests_list')
-  )
-  tests_config_zip_command.append('-P')
-  tests_config_zip_command.append('target')
-  tests_config_zip_command.append('-C')
-  tests_config_zip_command.append(product_out)
-  tests_config_zip_command.append('-l')
-  tests_config_zip_command.append(
-      os.path.join(product_out, 'target_general-tests_list')
-  )
-  run_command(tests_config_zip_command, print_output=True)
-
-  tests_list_zip_command = base_zip_command(
-      host_out, dist_dir, 'general-tests_list.zip'
-  )
-  tests_list_zip_command.append('-C')
-  tests_list_zip_command.append(host_out)
-  tests_list_zip_command.append('-f')
-  tests_list_zip_command.append(os.path.join(host_out, 'general-tests_list'))
-  run_command(tests_list_zip_command, print_output=True)
-
-
-def get_soong_var(var: str, target_release: str) -> str:
-  new_env = os.environ.copy()
-  new_env['TARGET_RELEASE'] = target_release
-
-  value = run_command(
-      ['./build/soong/soong_ui.bash', '--dumpvar-mode', '--abs', var],
-      env=new_env,
-  ).strip()
-  if not value:
-    raise RuntimeError('Necessary soong variable ' + var + ' not found.')
-
-  return value
+def run_command(args: list[str], stdout=None):
+  subprocess.run(args=args, check=True, stdout=stdout)
 
 
 def main(argv):
-  build_test_suites(argv)
+  sys.exit(build_test_suites(argv))
diff --git a/ci/build_test_suites_local_test.py b/ci/build_test_suites_local_test.py
new file mode 100644
index 0000000..78e52d3
--- /dev/null
+++ b/ci/build_test_suites_local_test.py
@@ -0,0 +1,123 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration tests for build_test_suites that require a local build env."""
+
+import os
+import pathlib
+import shutil
+import signal
+import subprocess
+import tempfile
+import time
+import ci_test_lib
+
+
+class BuildTestSuitesLocalTest(ci_test_lib.TestCase):
+
+  def setUp(self):
+    self.top_dir = pathlib.Path(os.environ['ANDROID_BUILD_TOP']).resolve()
+    self.executable = self.top_dir.joinpath('build/make/ci/build_test_suites')
+    self.process_session = ci_test_lib.TemporaryProcessSession(self)
+    self.temp_dir = ci_test_lib.TestTemporaryDirectory.create(self)
+
+  def build_subprocess_args(self, build_args: list[str]):
+    env = os.environ.copy()
+    env['TOP'] = str(self.top_dir)
+    env['OUT_DIR'] = self.temp_dir
+
+    args = ([self.executable] + build_args,)
+    kwargs = {
+        'cwd': self.top_dir,
+        'env': env,
+        'text': True,
+    }
+
+    return (args, kwargs)
+
+  def run_build(self, build_args: list[str]) -> subprocess.CompletedProcess:
+    args, kwargs = self.build_subprocess_args(build_args)
+
+    return subprocess.run(
+        *args,
+        **kwargs,
+        check=True,
+        capture_output=True,
+        timeout=5 * 60,
+    )
+
+  def assert_children_alive(self, children: list[int]):
+    for c in children:
+      self.assertTrue(ci_test_lib.process_alive(c))
+
+  def assert_children_dead(self, children: list[int]):
+    for c in children:
+      self.assertFalse(ci_test_lib.process_alive(c))
+
+  def test_fails_for_invalid_arg(self):
+    invalid_arg = '--invalid-arg'
+
+    with self.assertRaises(subprocess.CalledProcessError) as cm:
+      self.run_build([invalid_arg])
+
+    self.assertIn(invalid_arg, cm.exception.stderr)
+
+  def test_builds_successfully(self):
+    self.run_build(['nothing'])
+
+  def test_can_interrupt_build(self):
+    args, kwargs = self.build_subprocess_args(['general-tests'])
+    p = self.process_session.create(args, kwargs)
+
+    # TODO(lucafarsi): Replace this (and other instances) with a condition.
+    time.sleep(5)  # Wait for the build to get going.
+    self.assertIsNone(p.poll())  # Check that the process is still alive.
+    children = query_child_pids(p.pid)
+    self.assert_children_alive(children)
+
+    p.send_signal(signal.SIGINT)
+    p.wait()
+
+    time.sleep(5)  # Wait for things to die out.
+    self.assert_children_dead(children)
+
+  def test_can_kill_build_process_group(self):
+    args, kwargs = self.build_subprocess_args(['general-tests'])
+    p = self.process_session.create(args, kwargs)
+
+    time.sleep(5)  # Wait for the build to get going.
+    self.assertIsNone(p.poll())  # Check that the process is still alive.
+    children = query_child_pids(p.pid)
+    self.assert_children_alive(children)
+
+    os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+    p.wait()
+
+    time.sleep(5)  # Wait for things to die out.
+    self.assert_children_dead(children)
+
+
+# TODO(hzalek): Replace this with `psutils` once available in the tree.
+def query_child_pids(parent_pid: int) -> set[int]:
+  p = subprocess.run(
+      ['pgrep', '-P', str(parent_pid)],
+      check=True,
+      capture_output=True,
+      text=True,
+  )
+  return {int(pid) for pid in p.stdout.splitlines()}
+
+
+if __name__ == '__main__':
+  ci_test_lib.main()
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
new file mode 100644
index 0000000..a9ff3fb
--- /dev/null
+++ b/ci/build_test_suites_test.py
@@ -0,0 +1,431 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for build_test_suites.py"""
+
+import argparse
+from importlib import resources
+import json
+import multiprocessing
+import os
+import pathlib
+import shutil
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import textwrap
+import time
+from typing import Callable
+import unittest
+from unittest import mock
+import build_test_suites
+import ci_test_lib
+import optimized_targets
+from pyfakefs import fake_filesystem_unittest
+
+
+class BuildTestSuitesTest(fake_filesystem_unittest.TestCase):
+
+  def setUp(self):
+    self.setUpPyfakefs()
+
+    os_environ_patcher = mock.patch.dict('os.environ', {})
+    self.addCleanup(os_environ_patcher.stop)
+    self.mock_os_environ = os_environ_patcher.start()
+
+    subprocess_run_patcher = mock.patch('subprocess.run')
+    self.addCleanup(subprocess_run_patcher.stop)
+    self.mock_subprocess_run = subprocess_run_patcher.start()
+
+    self._setup_working_build_env()
+
+  def test_missing_target_release_env_var_raises(self):
+    del os.environ['TARGET_RELEASE']
+
+    with self.assert_raises_word(build_test_suites.Error, 'TARGET_RELEASE'):
+      build_test_suites.main([])
+
+  def test_missing_target_product_env_var_raises(self):
+    del os.environ['TARGET_PRODUCT']
+
+    with self.assert_raises_word(build_test_suites.Error, 'TARGET_PRODUCT'):
+      build_test_suites.main([])
+
+  def test_missing_top_env_var_raises(self):
+    del os.environ['TOP']
+
+    with self.assert_raises_word(build_test_suites.Error, 'TOP'):
+      build_test_suites.main([])
+
+  def test_invalid_arg_raises(self):
+    invalid_args = ['--invalid_arg']
+
+    with self.assertRaisesRegex(SystemExit, '2'):
+      build_test_suites.main(invalid_args)
+
+  def test_build_failure_returns(self):
+    self.mock_subprocess_run.side_effect = subprocess.CalledProcessError(
+        42, None
+    )
+
+    with self.assertRaisesRegex(SystemExit, '42'):
+      build_test_suites.main([])
+
+  def test_incorrectly_formatted_build_context_raises(self):
+    build_context = self.fake_top.joinpath('build_context')
+    build_context.touch()
+    os.environ['BUILD_CONTEXT'] = str(build_context)
+
+    with self.assert_raises_word(build_test_suites.Error, 'JSON'):
+      build_test_suites.main([])
+
+  def test_build_success_returns(self):
+    with self.assertRaisesRegex(SystemExit, '0'):
+      build_test_suites.main([])
+
+  def assert_raises_word(self, cls, word):
+    return self.assertRaisesRegex(cls, rf'\b{word}\b')
+
+  def _setup_working_build_env(self):
+    self.fake_top = pathlib.Path('/fake/top')
+    self.fake_top.mkdir(parents=True)
+
+    self.soong_ui_dir = self.fake_top.joinpath('build/soong')
+    self.soong_ui_dir.mkdir(parents=True, exist_ok=True)
+
+    self.soong_ui = self.soong_ui_dir.joinpath('soong_ui.bash')
+    self.soong_ui.touch()
+
+    self.mock_os_environ.update({
+        'TARGET_RELEASE': 'release',
+        'TARGET_PRODUCT': 'product',
+        'TOP': str(self.fake_top),
+    })
+
+    self.mock_subprocess_run.return_value = 0
+
+
+class RunCommandIntegrationTest(ci_test_lib.TestCase):
+
+  def setUp(self):
+    self.temp_dir = ci_test_lib.TestTemporaryDirectory.create(self)
+
+    # Copy the Python executable from 'non-code' resources and make it
+    # executable for use by tests that launch a subprocess. Note that we don't
+    # use Python's native `sys.executable` property since that is not set when
+    # running via the embedded launcher.
+    base_name = 'py3-cmd'
+    dest_file = self.temp_dir.joinpath(base_name)
+    with resources.as_file(
+        resources.files('testdata').joinpath(base_name)
+    ) as p:
+      shutil.copy(p, dest_file)
+    dest_file.chmod(dest_file.stat().st_mode | stat.S_IEXEC)
+    self.python_executable = dest_file
+
+    self._managed_processes = []
+
+  def tearDown(self):
+    self._terminate_managed_processes()
+
+  def test_raises_on_nonzero_exit(self):
+    with self.assertRaises(Exception):
+      build_test_suites.run_command([
+          self.python_executable,
+          '-c',
+          textwrap.dedent(f"""\
+              import sys
+              sys.exit(1)
+              """),
+      ])
+
+  def test_streams_stdout(self):
+
+    def run_slow_command(stdout_file, marker):
+      with open(stdout_file, 'w') as f:
+        build_test_suites.run_command(
+            [
+                self.python_executable,
+                '-c',
+                textwrap.dedent(f"""\
+                  import time
+
+                  print('{marker}', end='', flush=True)
+
+                  # Keep process alive until we check stdout.
+                  time.sleep(10)
+                  """),
+            ],
+            stdout=f,
+        )
+
+    marker = 'Spinach'
+    stdout_file = self.temp_dir.joinpath('stdout.txt')
+
+    p = self.start_process(target=run_slow_command, args=[stdout_file, marker])
+
+    self.assert_file_eventually_contains(stdout_file, marker)
+
+  def test_propagates_interruptions(self):
+
+    def run(pid_file):
+      build_test_suites.run_command([
+          self.python_executable,
+          '-c',
+          textwrap.dedent(f"""\
+              import os
+              import pathlib
+              import time
+
+              pathlib.Path('{pid_file}').write_text(str(os.getpid()))
+
+              # Keep the process alive for us to explicitly interrupt it.
+              time.sleep(10)
+              """),
+      ])
+
+    pid_file = self.temp_dir.joinpath('pid.txt')
+    p = self.start_process(target=run, args=[pid_file])
+    subprocess_pid = int(read_eventual_file_contents(pid_file))
+
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    self.assert_process_eventually_dies(p.pid)
+    self.assert_process_eventually_dies(subprocess_pid)
+
+  def start_process(self, *args, **kwargs) -> multiprocessing.Process:
+    p = multiprocessing.Process(*args, **kwargs)
+    self._managed_processes.append(p)
+    p.start()
+    return p
+
+  def assert_process_eventually_dies(self, pid: int):
+    try:
+      wait_until(lambda: not ci_test_lib.process_alive(pid))
+    except TimeoutError as e:
+      self.fail(f'Process {pid} did not die after a while: {e}')
+
+  def assert_file_eventually_contains(self, file: pathlib.Path, substring: str):
+    wait_until(lambda: file.is_file() and file.stat().st_size > 0)
+    self.assertIn(substring, read_file_contents(file))
+
+  def _terminate_managed_processes(self):
+    for p in self._managed_processes:
+      if not p.is_alive():
+        continue
+
+      # We terminate the process with `SIGINT` since using `terminate` or
+      # `SIGKILL` doesn't kill any grandchild processes and we don't have
+      # `psutil` available to easily query all children.
+      os.kill(p.pid, signal.SIGINT)
+
+
+class BuildPlannerTest(unittest.TestCase):
+
+  class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget):
+
+    def __init__(self, output_targets):
+      self.output_targets = output_targets
+
+    def get_build_targets(self):
+      return self.output_targets
+
+    def package_outputs(self):
+      return f'packaging {" ".join(self.output_targets)}'
+
+  def test_build_optimization_off_builds_everything(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_context=self.create_build_context(optimized_build_enabled=False),
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_targets, build_plan.build_targets)
+
+  def test_build_optimization_off_doesnt_package(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_context=self.create_build_context(optimized_build_enabled=False),
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertEqual(len(build_plan.packaging_functions), 0)
+
+  def test_build_optimization_on_optimizes_target(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        build_context=self.create_build_context(
+            enabled_build_features={self.get_target_flag('target_1')}
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    expected_targets = {self.get_optimized_target_name('target_1'), 'target_2'}
+    self.assertSetEqual(expected_targets, build_plan.build_targets)
+
+  def test_build_optimization_on_packages_target(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        build_context=self.create_build_context(
+            enabled_build_features={self.get_target_flag('target_1')}
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    optimized_target_name = self.get_optimized_target_name('target_1')
+    self.assertIn(
+        f'packaging {optimized_target_name}',
+        self.run_packaging_functions(build_plan),
+    )
+
+  def test_individual_build_optimization_off_doesnt_optimize(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_targets, build_plan.build_targets)
+
+  def test_individual_build_optimization_off_doesnt_package(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    expected_packaging_function_outputs = {None, None}
+    self.assertSetEqual(
+        expected_packaging_function_outputs,
+        self.run_packaging_functions(build_plan),
+    )
+
+  def create_build_planner(
+      self,
+      build_targets: set[str],
+      build_context: dict[str, any] = None,
+      args: argparse.Namespace = None,
+      target_optimizations: dict[
+          str, optimized_targets.OptimizedBuildTarget
+      ] = None,
+  ) -> build_test_suites.BuildPlanner:
+    if not build_context:
+      build_context = self.create_build_context()
+    if not args:
+      args = self.create_args(extra_build_targets=build_targets)
+    if not target_optimizations:
+      target_optimizations = self.create_target_optimizations(
+          build_context, build_targets
+      )
+    return build_test_suites.BuildPlanner(
+        build_context, args, target_optimizations
+    )
+
+  def create_build_context(
+      self,
+      optimized_build_enabled: bool = True,
+      enabled_build_features: set[str] = set(),
+      test_context: dict[str, any] = {},
+  ) -> dict[str, any]:
+    build_context = {}
+    build_context['enabled_build_features'] = enabled_build_features
+    if optimized_build_enabled:
+      build_context['enabled_build_features'].add('optimized_build')
+    build_context['test_context'] = test_context
+    return build_context
+
+  def create_args(
+      self, extra_build_targets: set[str] = set()
+  ) -> argparse.Namespace:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('extra_targets', nargs='*')
+    return parser.parse_args(extra_build_targets)
+
+  def create_target_optimizations(
+      self, build_context: dict[str, any], build_targets: set[str]
+  ):
+    target_optimizations = dict()
+    for target in build_targets:
+      target_optimizations[target] = (
+          lambda target, build_context, args: optimized_targets.get_target_optimizer(
+              target,
+              self.get_target_flag(target),
+              build_context,
+              self.TestOptimizedBuildTarget(
+                  {self.get_optimized_target_name(target)}
+              ),
+          )
+      )
+
+    return target_optimizations
+
+  def get_target_flag(self, target: str):
+    return f'{target}_enabled'
+
+  def get_optimized_target_name(self, target: str):
+    return f'{target}_optimized'
+
+  def run_packaging_functions(
+      self, build_plan: build_test_suites.BuildPlan
+  ) -> set[str]:
+    output = set()
+    for packaging_function in build_plan.packaging_functions:
+      output.add(packaging_function())
+
+    return output
+
+
+def wait_until(
+    condition_function: Callable[[], bool],
+    timeout_secs: float = 3.0,
+    polling_interval_secs: float = 0.1,
+):
+  """Waits until a condition function returns True."""
+
+  start_time_secs = time.time()
+
+  while not condition_function():
+    if time.time() - start_time_secs > timeout_secs:
+      raise TimeoutError(
+          f'Condition not met within timeout: {timeout_secs} seconds'
+      )
+
+    time.sleep(polling_interval_secs)
+
+
+def read_file_contents(file: pathlib.Path) -> str:
+  with open(file, 'r') as f:
+    return f.read()
+
+
+def read_eventual_file_contents(file: pathlib.Path) -> str:
+  wait_until(lambda: file.is_file() and file.stat().st_size > 0)
+  return read_file_contents(file)
+
+
+if __name__ == '__main__':
+  ci_test_lib.main()
diff --git a/ci/ci_test_lib.py b/ci/ci_test_lib.py
new file mode 100644
index 0000000..2d70d3f
--- /dev/null
+++ b/ci/ci_test_lib.py
@@ -0,0 +1,86 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Testing utilities for tests in the CI package."""
+
+import logging
+import os
+import unittest
+import subprocess
+import pathlib
+import shutil
+import tempfile
+
+
+# Export the TestCase class to reduce the number of imports tests have to list.
+TestCase = unittest.TestCase
+
+
+def process_alive(pid):
+  """Check For the existence of a pid."""
+
+  try:
+    os.kill(pid, 0)
+  except OSError:
+    return False
+
+  return True
+
+
+class TemporaryProcessSession:
+
+  def __init__(self, test_case: TestCase):
+    self._created_processes = []
+    test_case.addCleanup(self.cleanup)
+
+  def create(self, args, kwargs):
+    p = subprocess.Popen(*args, **kwargs, start_new_session=True)
+    self._created_processes.append(p)
+    return p
+
+  def cleanup(self):
+    for p in self._created_processes:
+      if not process_alive(p.pid):
+        return
+      os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+
+
+class TestTemporaryDirectory:
+
+  def __init__(self, delete: bool, ):
+    self._delete = delete
+
+  @classmethod
+  def create(cls, test_case: TestCase, delete: bool = True):
+    temp_dir = TestTemporaryDirectory(delete)
+    temp_dir._dir = pathlib.Path(tempfile.mkdtemp())
+    test_case.addCleanup(temp_dir.cleanup)
+    return temp_dir._dir
+
+  def get_dir(self):
+    return self._dir
+
+  def cleanup(self):
+    if not self._delete:
+      return
+    shutil.rmtree(self._dir, ignore_errors=True)
+
+
+def main():
+
+  # Disable logging since it breaks the TF Python test output parser.
+  # TODO(hzalek): Use TF's `test-output-file` option to re-enable logging.
+  logging.getLogger().disabled = True
+
+  unittest.main()
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
new file mode 100644
index 0000000..224c8c0
--- /dev/null
+++ b/ci/optimized_targets.py
@@ -0,0 +1,69 @@
+#
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABC
+
+
+class OptimizedBuildTarget(ABC):
+  """A representation of an optimized build target.
+
+  This class will determine what targets to build given a given build_cotext and
+  will have a packaging function to generate any necessary output zips for the
+  build.
+  """
+
+  def __init__(self, build_context, args):
+    self.build_context = build_context
+    self.args = args
+
+  def get_build_targets(self):
+    pass
+
+  def package_outputs(self):
+    pass
+
+
+class NullOptimizer(OptimizedBuildTarget):
+  """No-op target optimizer.
+
+  This will simply build the same target it was given and do nothing for the
+  packaging step.
+  """
+
+  def __init__(self, target):
+    self.target = target
+
+  def get_build_targets(self):
+    return {self.target}
+
+  def package_outputs(self):
+    pass
+
+
+def get_target_optimizer(target, enabled_flag, build_context, optimizer):
+  if enabled_flag in build_context['enabled_build_features']:
+    return optimizer
+
+  return NullOptimizer(target)
+
+
+# To be written as:
+#    'target': lambda target, build_context, args: get_target_optimizer(
+#        target,
+#        'target_enabled_flag',
+#        build_context,
+#        TargetOptimizer(build_context, args),
+#    )
+OPTIMIZED_BUILD_TARGETS = dict()
diff --git a/cogsetup.sh b/cogsetup.sh
index 44538f2..ef1485d 100644
--- a/cogsetup.sh
+++ b/cogsetup.sh
@@ -52,7 +52,9 @@
   # it with this function. If the user is running repo within a Cog workspace,
   # we'll fail with an error, otherwise, we run the original repo command with
   # the given args.
-  ORIG_REPO_PATH=`which repo`
+  if ! ORIG_REPO_PATH=`which repo`; then
+    return 0
+  fi
   function repo {
     if [[ "${PWD}" == /google/cog/* ]]; then
       echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
diff --git a/common/math.mk b/common/math.mk
index ecee474..829ceb5 100644
--- a/common/math.mk
+++ b/common/math.mk
@@ -315,8 +315,9 @@
 $(call math-expect,(call numbers_greater_or_equal_to,0,0 2 1 3),0 2 1 3)
 $(call math-expect,(call numbers_greater_or_equal_to,1,0 2 1 3 2),2 1 3 2)
 
-_INT_LIMIT_WORDS := $(foreach a,x x,$(foreach b,x x x x x x x x x x x x x x x x,\
-  $(foreach c,x x x x x x x x x x x x x x x x,x x x x x x x x x x x x x x x x)))
+# 10,001 = 10 ** 4 + 1, contains 10,001 x's, so 1 more than 10,000 (future) API level
+_INT_LIMIT_WORDS := x $(foreach a,0 1 2 3 4 5 6 7 8 9,$(foreach b,0 1 2 3 4 5 6 7 8 9,\
+  $(foreach c,0 1 2 3 4 5 6 7 8 9,x x x x x x x x x x)))
 
 define _int_encode
 $(if $(filter $(words x $(_INT_LIMIT_WORDS)),$(words $(wordlist 1,$(1),x $(_INT_LIMIT_WORDS)))),\
diff --git a/core/Makefile b/core/Makefile
index 9d77ec1..b5a7e74 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1128,10 +1128,15 @@
 
 BOARD_VENDOR_RAMDISK_FRAGMENT.16K.PREBUILT := $(BUILT_RAMDISK_16K_TARGET)
 
+ifndef BOARD_KERNEL_MODULES_LOAD_16K
+  BOARD_KERNEL_MODULES_LOAD_16K := $(BOARD_KERNEL_MODULES_16K)
+endif
+
 $(BUILT_RAMDISK_16K_TARGET): $(DEPMOD) $(MKBOOTFS) $(EXTRACT_KERNEL) $(COMPRESSION_COMMAND_DEPS)
 $(BUILT_RAMDISK_16K_TARGET): $(foreach file,$(BOARD_KERNEL_MODULES_16K),$(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/$(notdir $(file)))
 	$(DEPMOD) -b $(RAMDISK_16K_STAGING_DIR) 0.0
-	for MODULE in $(BOARD_KERNEL_MODULES_16K); do \
+	rm -f $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/modules.load
+	for MODULE in $(BOARD_KERNEL_MODULES_LOAD_16K); do \
 		basename $$MODULE >> $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/modules.load ; \
 	done;
 	rm -rf $(TARGET_OUT_RAMDISK_16K)/lib/modules
@@ -1148,6 +1153,61 @@
 
 endif
 
+# -----------------------------------------------------------------
+# dtbo image
+ifdef BOARD_PREBUILT_DTBOIMAGE
+INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+	chmod +w $@
+	$(AVBTOOL) add_hash_footer \
+	    --image $@ \
+	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
+	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
+	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+
+$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_TARGET),$(BOARD_PREBUILT_DTBOIMAGE),$(PRODUCT_OUT)/:/)
+
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DTBOIMAGE_TARGET)
+else
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+endif
+
+endif # BOARD_PREBUILT_DTBOIMAGE
+
+# -----------------------------------------------------------------
+
+# -----------------------------------------------------------------
+# 16KB dtbo image
+ifdef BOARD_PREBUILT_DTBOIMAGE_16KB
+INSTALLED_DTBOIMAGE_16KB_TARGET := $(PRODUCT_OUT)/dtbo_16k.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_DTBOIMAGE_16KB_TARGET): $(BOARD_PREBUILT_DTBOIMAGE_16KB) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
+	cp $(BOARD_PREBUILT_DTBOIMAGE_16KB) $@
+	chmod +w $@
+	$(AVBTOOL) add_hash_footer \
+	    --image $@ \
+	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
+	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
+	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+
+$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_16KB_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_16KB_TARGET),$(BOARD_PREBUILT_DTBOIMAGE_16KB),$(PRODUCT_OUT)/:/)
+
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_DTBOIMAGE_16KB_TARGET)
+else
+$(INSTALLED_DTBOIMAGE_16KB_TARGET): $(BOARD_PREBUILT_DTBOIMAGE_16KB)
+	cp $(BOARD_PREBUILT_DTBOIMAGE_16KB) $@
+endif
+
+endif # BOARD_PREBUILT_DTBOIMAGE_16KB
+
+
 ifneq ($(BOARD_KERNEL_PATH_16K),)
 BUILT_KERNEL_16K_TARGET := $(PRODUCT_OUT)/kernel_16k
 
@@ -1170,16 +1230,26 @@
 .PHONY: bootimage_16k
 
 BUILT_BOOT_OTA_PACKAGE_16K := $(PRODUCT_OUT)/boot_ota_16k.zip
-$(BUILT_BOOT_OTA_PACKAGE_16K): $(OTA_FROM_RAW_IMG) $(BUILT_BOOTIMAGE_16K_TARGET) $(INSTALLED_BOOTIMAGE_TARGET) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8
+$(BUILT_BOOT_OTA_PACKAGE_16K):  $(OTA_FROM_RAW_IMG) \
+                                $(BUILT_BOOTIMAGE_16K_TARGET) \
+                                $(INSTALLED_BOOTIMAGE_TARGET) \
+                                $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8 \
+                                $(INSTALLED_DTBOIMAGE_16KB_TARGET) \
+                                $(INSTALLED_DTBOIMAGE_TARGET)
 	$(OTA_FROM_RAW_IMG) --package_key $(DEFAULT_SYSTEM_DEV_CERTIFICATE) \
                       --max_timestamp `cat $(BUILD_DATETIME_FILE)` \
                       --path $(HOST_OUT) \
-                      --partition_name boot \
+                      --partition_name $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),\
+                          $(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        boot$(comma)dtbo,\
+                        boot) \
                       --output $@ \
                       $(if $(BOARD_16K_OTA_USE_INCREMENTAL),\
                         $(INSTALLED_BOOTIMAGE_TARGET):$(BUILT_BOOTIMAGE_16K_TARGET),\
                         $(BUILT_BOOTIMAGE_16K_TARGET)\
-                      )
+                      )\
+                      $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),$(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        $(INSTALLED_DTBOIMAGE_16KB_TARGET))
 
 boototapackage_16k: $(BUILT_BOOT_OTA_PACKAGE_16K)
 .PHONY: boototapackage_16k
@@ -1370,30 +1440,7 @@
 	@echo "make $@: ignoring dependencies"
 	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
 
-else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # BOARD_AVB_ENABLE != true
-
-# $1: boot image target
-define build_boot_supports_vboot
-  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned
-  $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1)
-  $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
-endef
-
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY)
-	$(call pretty,"Target boot image: $@")
-	$(call build_boot_supports_vboot,$@)
-
-$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
-$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
-
-.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
-	@echo "make $@: ignoring dependencies"
-	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_vboot,$(b)))
-
-else # PRODUCT_SUPPORTS_VBOOT != true
+else # BOARD_AVB_ENABLE != true
 
 # $1: boot image target
 define build_boot_novboot
@@ -1455,25 +1502,41 @@
 
 ifneq ($(BOARD_KERNEL_PATH_16K),)
 BUILT_BOOT_OTA_PACKAGE_4K := $(PRODUCT_OUT)/boot_ota_4k.zip
-$(BUILT_BOOT_OTA_PACKAGE_4K): $(OTA_FROM_RAW_IMG) $(INSTALLED_BOOTIMAGE_TARGET) $(BUILT_BOOTIMAGE_16K_TARGET) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8
+$(BUILT_BOOT_OTA_PACKAGE_4K): $(OTA_FROM_RAW_IMG) \
+                              $(INSTALLED_BOOTIMAGE_TARGET) \
+                              $(BUILT_BOOTIMAGE_16K_TARGET) \
+                              $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8 \
+                              $(INSTALLED_DTBOIMAGE_TARGET) \
+                              $(INSTALLED_DTBOIMAGE_16KB_TARGET)
 	$(OTA_FROM_RAW_IMG) --package_key $(DEFAULT_SYSTEM_DEV_CERTIFICATE) \
                       --max_timestamp `cat $(BUILD_DATETIME_FILE)` \
                       --path $(HOST_OUT) \
-                      --partition_name boot \
+                      --partition_name $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),\
+                          $(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        boot$(comma)dtbo,\
+                        boot) \
                       --output $@ \
                       $(if $(BOARD_16K_OTA_USE_INCREMENTAL),\
                         $(BUILT_BOOTIMAGE_16K_TARGET):$(INSTALLED_BOOTIMAGE_TARGET),\
                         $(INSTALLED_BOOTIMAGE_TARGET)\
-                      )
+                      )\
+                      $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),$(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        $(INSTALLED_DTBOIMAGE_TARGET))
 
 boototapackage_4k: $(BUILT_BOOT_OTA_PACKAGE_4K)
 .PHONY: boototapackage_4k
 
+ifeq ($(BOARD_16K_OTA_MOVE_VENDOR),true)
+$(eval $(call copy-one-file,$(BUILT_BOOT_OTA_PACKAGE_4K),$(TARGET_OUT_VENDOR)/boot_otas/boot_ota_4k.zip))
+$(eval $(call copy-one-file,$(BUILT_BOOT_OTA_PACKAGE_16K),$(TARGET_OUT_VENDOR)/boot_otas/boot_ota_16k.zip))
+ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_VENDOR)/boot_otas/boot_ota_4k.zip
+ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_VENDOR)/boot_otas/boot_ota_16k.zip
+else
 $(eval $(call copy-one-file,$(BUILT_BOOT_OTA_PACKAGE_4K),$(TARGET_OUT)/boot_otas/boot_ota_4k.zip))
 $(eval $(call copy-one-file,$(BUILT_BOOT_OTA_PACKAGE_16K),$(TARGET_OUT)/boot_otas/boot_ota_16k.zip))
-
 ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT)/boot_otas/boot_ota_4k.zip
 ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT)/boot_otas/boot_ota_16k.zip
+endif # BOARD_16K_OTA_MOVE_VENDOR == true
 
 
 endif
@@ -1686,6 +1749,30 @@
 $(call declare-1p-container,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),$(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DTB_IMAGE_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS) $(INTERNAL_VENDOR_BOOTCONDIG_TARGET),$(PRODUCT_OUT)/:/)
 VENDOR_NOTICE_DEPS += $(INSTALLED_VENDOR_BOOTIMAGE_TARGET)
+
+else # BUILDING_VENDOR_BOOT_IMAGE not defined, use prebuilt image
+
+ifdef BOARD_PREBUILT_VENDOR_BOOTIMAGE
+INTERNAL_PREBUILT_VENDOR_BOOTIMAGE := $(BOARD_PREBUILT_VENDOR_BOOTIMAGE)
+INSTALLED_VENDOR_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE) $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOT_KEY_PATH)
+	cp $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE) $@
+	chmod +w $@
+	$(AVBTOOL) add_hash_footer \
+	    --image $@ \
+	    $(call get-partition-size-argument,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)) \
+	    --partition_name vendor_boot $(INTERNAL_AVB_VENDOR_BOOT_SIGNING_ARGS) \
+	    $(BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS)
+else
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE)
+	cp $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE) $@
+
+endif # BOARD_AVB_ENABLE
+$(call declare-1p-container,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),$(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE),$(PRODUCT_OUT)/:/)
+endif # BOARD_PREBUILT_VENDOR_BOOTIMAGE
 endif # BUILDING_VENDOR_BOOT_IMAGE
 
 # -----------------------------------------------------------------
@@ -2222,11 +2309,6 @@
 $(if $(PRODUCT_VENDOR_DLKM_VERITY_PARTITION),$(hide) echo "vendor_dlkm_verity_block_device=$(PRODUCT_VENDOR_DLKM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_ODM_DLKM_VERITY_PARTITION),$(hide) echo "odm_dlkm_verity_block_device=$(PRODUCT_ODM_DLKM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_SYSTEM_DLKM_VERITY_PARTITION),$(hide) echo "system_dlkm_verity_block_device=$(PRODUCT_SYSTEM_DLKM_VERITY_PARTITION)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCT_SUPPORTS_VBOOT)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(notdir $(FUTILITY))" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
 $(if $(BOARD_AVB_ENABLE), \
   $(hide) echo "avb_avbtool=$(notdir $(AVBTOOL))" >> $(1)$(newline) \
   $(if $(filter $(2),system), \
@@ -2736,15 +2818,9 @@
 # $(1): output file
 # $(2): optional kernel file
 define build-recoveryimage-target
-  $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
-    $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
-                 $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \
-                 --output $(1).unsigned, \
-    $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
-                 $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
-                 $(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1))
-  $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
-    $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
+  $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
+               $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
+               $(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1)
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
     $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot))), \
     $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
@@ -2755,9 +2831,6 @@
 endef
 
 recoveryimage-deps := $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel)
-ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-  recoveryimage-deps += $(VBOOT_SIGNER)
-endif
 ifeq (true,$(BOARD_AVB_ENABLE))
   recoveryimage-deps += $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
 endif
@@ -3395,12 +3468,29 @@
 
 FULL_SYSTEMIMAGE_DEPS += $(INTERNAL_ROOT_FILES) $(INSTALLED_FILES_FILE_ROOT)
 
-define write-file-lines
-$(1):
+# Returns a list of EXTRA_INSTALL_ZIPS trios whose primary file is contained within $(1)
+# The trios will contain the primary installed file : the directory to unzip the zip to : the zip
+define relevant-extra-install-zips
+$(strip $(foreach p,$(EXTRA_INSTALL_ZIPS), \
+  $(if $(filter $(call word-colon,1,$(p)),$(1)), \
+    $(p))))
+endef
+
+# Writes a text file that contains all of the files that will be inside a partition.
+# All the file paths will be relative to the partition's staging directory.
+# It will also take into account files inside zips listed in EXTRA_INSTALL_ZIPS.
+#
+# Arguments:
+#   $(1): Output file
+#   $(2): The partition's staging directory
+#   $(3): Files to include in the partition
+define write-partition-file-list
+$(1): $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(foreach p,$(call relevant-extra-install-zips,$(filter $(2)/%,$(3))),$(call word-colon,3,$(p)))
 	@echo Writing $$@
 	rm -f $$@
 	echo -n > $$@
-	$$(foreach f,$(2),echo "$$(f)" >> $$@$$(newline))
+	$$(foreach f,$(subst $(2)/,,$(filter $(2)/%,$(3))),echo "$$(f)" >> $$@$$(newline))
+	$$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $(call relevant-extra-install-zips,$(filter $(2)/%,$(3))) >> $$@
 endef
 
 # -----------------------------------------------------------------
@@ -3466,7 +3556,7 @@
                exit 1 )
 endef
 
-$(eval $(call write-file-lines,$(systemimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT)/,,$(filter $(TARGET_OUT)/%,$(FULL_SYSTEMIMAGE_DEPS)))))
+$(eval $(call write-partition-file-list,$(systemimage_intermediates)/file_list.txt,$(TARGET_OUT),$(FULL_SYSTEMIMAGE_DEPS)))
 # Used by soong sandwich to request the staging dir be built
 $(systemimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT)/%,$(FULL_SYSTEMIMAGE_DEPS))
 	touch $@
@@ -3583,7 +3673,7 @@
     $(INTERNAL_USERIMAGES_DEPS) \
     $(INTERNAL_USERDATAIMAGE_FILES)
 
-$(eval $(call write-file-lines,$(userdataimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_DATA)/,,$(filter $(TARGET_OUT_DATA)/%,$(INSTALLED_USERDATAIMAGE_TARGET_DEPS)))))
+$(eval $(call write-partition-file-list,$(userdataimage_intermediates)/file_list.txt,$(TARGET_OUT_DATA),$(INSTALLED_USERDATAIMAGE_TARGET_DEPS)))
 # Used by soong sandwich to request the staging dir be built
 $(userdataimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_DATA)/%,$(INSTALLED_USERDATAIMAGE_TARGET_DEPS))
 	touch $@
@@ -3639,7 +3729,7 @@
   $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(cacheimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_CACHE)/,,$(filter $(TARGET_OUT_CACHE)/%,$(INTERNAL_CACHEIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(cacheimage_intermediates)/file_list.txt,$(TARGET_OUT_CACHE),$(INTERNAL_CACHEIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(cacheimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_CACHE)/%,$(INTERNAL_CACHEIMAGE_FILES))
 	touch $@
@@ -3726,7 +3816,7 @@
   $(call assert-max-image-size,$(INSTALLED_SYSTEMOTHERIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(systemotherimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_SYSTEM_OTHER)/,,$(filter $(TARGET_OUT_SYSTEM_OTHER)/%,$(INTERNAL_SYSTEMOTHERIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(systemotherimage_intermediates)/file_list.txt,$(TARGET_OUT_SYSTEM_OTHER),$(INTERNAL_SYSTEMOTHERIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(systemotherimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_SYSTEM_OTHER)/%,$(INTERNAL_SYSTEMOTHERIMAGE_FILES))
 	touch $@
@@ -3832,7 +3922,7 @@
   $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET) $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(vendorimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_VENDOR)/,,$(filter $(TARGET_OUT_VENDOR)/%,$(INTERNAL_VENDORIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(vendorimage_intermediates)/file_list.txt,$(TARGET_OUT_VENDOR),$(INTERNAL_VENDORIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(vendorimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_VENDOR)/%,$(INTERNAL_VENDORIMAGE_FILES))
 	touch $@
@@ -3905,7 +3995,7 @@
   $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(productimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_PRODUCT)/,,$(filter $(TARGET_OUT_PRODUCT)/%,$(INTERNAL_PRODUCTIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(productimage_intermediates)/file_list.txt,$(TARGET_OUT_PRODUCT),$(INTERNAL_PRODUCTIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(productimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_PRODUCT)/%,$(INTERNAL_PRODUCTIMAGE_FILES))
 	touch $@
@@ -3975,7 +4065,7 @@
   $(call assert-max-image-size,$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET),$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(system_extimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_SYSTEM_EXT)/,,$(filter $(TARGET_OUT_SYSTEM_EXT)/%,$(INTERNAL_SYSTEM_EXTIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(system_extimage_intermediates)/file_list.txt,$(TARGET_OUT_SYSTEM_EXT),$(INTERNAL_SYSTEM_EXTIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(system_extimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_SYSTEM_EXT)/%,$(INTERNAL_SYSTEM_EXTIMAGE_FILES))
 	touch $@
@@ -4064,7 +4154,7 @@
   $(call assert-max-image-size,$(INSTALLED_ODMIMAGE_TARGET),$(BOARD_ODMIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(odmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_ODM)/,,$(filter $(TARGET_OUT_ODM)/%,$(INTERNAL_ODMIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(odmimage_intermediates)/file_list.txt,$(TARGET_OUT_ODM),$(INTERNAL_ODMIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(odmimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_ODM)/%,$(INTERNAL_ODMIMAGE_FILES))
 	touch $@
@@ -4133,7 +4223,7 @@
   $(call assert-max-image-size,$(INSTALLED_VENDOR_DLKMIMAGE_TARGET),$(BOARD_VENDOR_DLKMIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(vendor_dlkmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_VENDOR_DLKM)/,,$(filter $(TARGET_OUT_VENDOR_DLKM)/%,$(INTERNAL_VENDOR_DLKMIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(vendor_dlkmimage_intermediates)/file_list.txt,$(TARGET_OUT_VENDOR_DLKM),$(INTERNAL_VENDOR_DLKMIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(vendor_dlkmimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_VENDOR_DLKM)/%,$(INTERNAL_VENDOR_DLKMIMAGE_FILES))
 	touch $@
@@ -4202,7 +4292,7 @@
   $(call assert-max-image-size,$(INSTALLED_ODM_DLKMIMAGE_TARGET),$(BOARD_ODM_DLKMIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(odm_dlkmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_ODM_DLKM)/,,$(filter $(TARGET_OUT_ODM_DLKM)/%,$(INTERNAL_ODM_DLKMIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(odm_dlkmimage_intermediates)/file_list.txt,$(TARGET_OUT_ODM_DLKM),$(INTERNAL_ODM_DLKMIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(odm_dlkmimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_ODM_DLKM)/%,$(INTERNAL_ODM_DLKMIMAGE_FILES))
 	touch $@
@@ -4273,7 +4363,7 @@
   $(call assert-max-image-size,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(BOARD_SYSTEM_DLKMIMAGE_PARTITION_SIZE))
 endef
 
-$(eval $(call write-file-lines,$(system_dlkmimage_intermediates)/file_list.txt,$(subst $(TARGET_OUT_SYSTEM_DLKM)/,,$(filter $(TARGET_OUT_SYSTEM_DLKM)/%,$(INTERNAL_SYSTEM_DLKMIMAGE_FILES)))))
+$(eval $(call write-partition-file-list,$(system_dlkmimage_intermediates)/file_list.txt,$(TARGET_OUT_SYSTEM_DLKM),$(INTERNAL_SYSTEM_DLKMIMAGE_FILES)))
 # Used by soong sandwich to request the staging dir be built
 $(system_dlkmimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT_SYSTEM_DLKM)/%,$(INTERNAL_SYSTEM_DLKMIMAGE_FILES))
 	touch $@
@@ -4304,33 +4394,6 @@
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_SYSTEM_DLKMIMAGE),$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)))
 endif
 
-# -----------------------------------------------------------------
-# dtbo image
-ifdef BOARD_PREBUILT_DTBOIMAGE
-INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
-
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
-	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
-	chmod +w $@
-	$(AVBTOOL) add_hash_footer \
-	    --image $@ \
-	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
-	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
-	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
-
-$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_TARGET),$(BOARD_PREBUILT_DTBOIMAGE),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DTBOIMAGE_TARGET)
-else
-$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
-	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
-endif
-
-endif # BOARD_PREBUILT_DTBOIMAGE
-
-# -----------------------------------------------------------------
 # Protected VM firmware image
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 
@@ -4341,7 +4404,7 @@
 INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET := $(PRODUCT_OUT)/pvmfw_embedded.avbpubkey
 INSTALLED_PVMFW_BINARY_TARGET := $(call module-target-built-files,pvmfw_bin)
 INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,pvmfw_img)
-INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key)
+INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key_pub_bin)
 INTERNAL_PVMFW_SYMBOL := $(TARGET_OUT_EXECUTABLES_UNSTRIPPED)/pvmfw
 
 $(call declare-1p-container,$(INSTALLED_PVMFWIMAGE_TARGET),)
@@ -4891,7 +4954,7 @@
   $(if $(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST),\
     $(hide) $(foreach partition,$(BOARD_AVB_CUSTOMIMAGES_PARTITION_LIST), \
         $(AVBTOOL) extract_public_key --key $(BOARD_AVB_$(call to-upper,$(partition))_KEY_PATH) \
-            --output $(1)/$(partition).avbpubkey;)) \
+            --output $(1)/$(partition).avbpubkey;))
   $(if $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),\
     $(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS), \
         $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_KEY_PATH) \
@@ -5623,12 +5686,6 @@
   resize2fs \
   soong_zip \
 
-ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-INTERNAL_OTATOOLS_MODULES += \
-  futility-host \
-  vboot_signer
-endif
-
 INTERNAL_OTATOOLS_FILES := \
   $(filter $(HOST_OUT)/%,$(call module-installed-files,$(INTERNAL_OTATOOLS_MODULES)))
 
@@ -5664,10 +5721,6 @@
   $(sort $(shell find external/avb/test/data -type f -name "testkey_*.pem" -o \
       -name "atx_metadata.bin"))
 endif
-ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-INTERNAL_OTATOOLS_PACKAGE_FILES += \
-  $(sort $(shell find external/vboot_reference/tests/devkeys -type f))
-endif
 
 INTERNAL_OTATOOLS_RELEASETOOLS := \
   $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
@@ -6104,12 +6157,6 @@
     $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
     $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
   endif
-
-  # Not checking in board_config.mk, since AB_OTA_PARTITIONS may be updated in Android.mk (e.g. to
-  # additionally include radio or bootloader partitions).
-  ifeq ($(AB_OTA_PARTITIONS),)
-    $(error AB_OTA_PARTITIONS must be defined when using AB_OTA_UPDATER)
-  endif
 endif
 
 ifneq ($(AB_OTA_PARTITIONS),)
@@ -6206,6 +6253,8 @@
     echo "virtual_ab_retrofit=true" >> $(1))
   $(if $(PRODUCT_VIRTUAL_AB_COW_VERSION), \
     echo "virtual_ab_cow_version=$(PRODUCT_VIRTUAL_AB_COW_VERSION)" >> $(1))
+  $(if $(PRODUCT_VIRTUAL_AB_COMPRESSION_FACTOR), \
+    echo "virtual_ab_compression_factor=$(PRODUCT_VIRTUAL_AB_COMPRESSION_FACTOR)" >> $(1))
 endef
 
 # Copy an image file to a directory and generate a block list map file from the image,
@@ -6678,6 +6727,10 @@
 	@# If breakpad symbols have been generated, add them to the zip.
 	$(hide) cp -R $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD
 endif
+ifdef BOARD_PREBUILT_VENDOR_BOOTIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 ifdef BOARD_PREBUILT_VENDORIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
@@ -6907,6 +6960,7 @@
         --verbose \
         --path $(HOST_OUT) \
         $(if $(OEM_OTA_CONFIG), --oem_settings $(OEM_OTA_CONFIG)) \
+        $(if $(BOOT_VAR_OTA_CONFIG), --boot_variable_file $(BOOT_VAR_OTA_CONFIG)) \
         $(2) \
         $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) $(1)
 endef
@@ -7024,18 +7078,19 @@
 
 endif
 
-$(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
-	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
-
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS :=
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 ifneq (,$(DEX_PREOPT_CONFIG_FOR_MAKE))
-	$(hide) cp $(DEX_PREOPT_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS += -e $(notdir $(DEX_PREOPT_CONFIG_FOR_MAKE)) -f $(DEX_PREOPT_CONFIG_FOR_MAKE)
 endif
 ifneq (,$(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE))
-	$(hide) cp $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS += -e $(notdir $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)) -f $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
 endif
 endif #!TARGET_BUILD_UNBUNDLED
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config
+
+$(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
+	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config $(PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS)
 
 .PHONY: dexpreopt_config_zip
 dexpreopt_config_zip: $(DEXPREOPT_CONFIG_ZIP)
@@ -7530,6 +7585,10 @@
 droidcore-unbundled: $(QEMU_VERIFIED_BOOT_PARAMS)
 
 endif
+
+# Preprocess files for emulator and sdk.
+-include development/build/tools/sdk-preprocess-files.mk
+
 # -----------------------------------------------------------------
 # The emulator package
 ifeq ($(BUILD_EMULATOR),true)
diff --git a/core/OWNERS b/core/OWNERS
index 1c3d017..35ea83d 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -11,5 +11,3 @@
 # For Ravenwood test configs
 per-file ravenwood_test_config_template.xml = jsharkey@google.com,omakoto@google.com
 
-# For binary_translation
-per-file berberis_test.mk = levarum@google.com,khim@google.com,dimitry@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 4e42a21..f2ff286 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -26,84 +26,19 @@
 
 # Add variables to the namespace below:
 
-$(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
+$(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
+$(call soong_config_set_bool,ANDROID,BOARD_USES_RECOVERY_AS_BOOT,$(BOARD_USES_RECOVERY_AS_BOOT))
+$(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS)
+$(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_VERSION)
+$(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_COMPAT_VERSIONS)
+$(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER)
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
-$(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
-$(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
-$(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS)
-$(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
+$(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
 
-# Default behavior for the tree wrt building modules or using prebuilts. This
-# can always be overridden by setting the environment variable
-# MODULE_BUILD_FROM_SOURCE.
-BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := $(RELEASE_DEFAULT_MODULE_BUILD_FROM_SOURCE)
-# TODO(b/301454934): The value from build flag is set to empty when use `False`
-# The condition below can be removed after the issue get sorted.
-ifeq (,$(BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE))
-  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := false
-endif
+# PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not.
+$(call soong_config_set_bool,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true))
 
-ifneq ($(SANITIZE_TARGET)$(EMMA_INSTRUMENT_FRAMEWORK),)
-  # Always use sources when building the framework with Java coverage or
-  # sanitized builds as they both require purpose built prebuilts which we do
-  # not provide.
-  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
-endif
-
-ifneq ($(CLANG_COVERAGE)$(NATIVE_COVERAGE_PATHS),)
-  # Always use sources when building with clang coverage and native coverage.
-  # It is possible that there are certain situations when building with coverage
-  # would work with prebuilts, e.g. when the coverage is not being applied to
-  # modules for which we provide prebuilts. Unfortunately, determining that
-  # would require embedding knowledge of which coverage paths affect which
-  # modules here. That would duplicate a lot of information, add yet another
-  # location  module authors have to update and complicate the logic here.
-  # For nowe we will just always build from sources when doing coverage builds.
-  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
-endif
-
-# ART does not provide linux_bionic variants needed for products that
-# set HOST_CROSS_OS=linux_bionic.
-ifeq (linux_bionic,${HOST_CROSS_OS})
-  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
-endif
-
-# ART does not provide host side arm64 variants needed for products that
-# set HOST_CROSS_ARCH=arm64.
-ifeq (arm64,${HOST_CROSS_ARCH})
-  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
-endif
-
-ifneq (,$(MODULE_BUILD_FROM_SOURCE))
-  # Keep an explicit setting.
-else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES))$(findstring com.google.android.go.conscrypt,$(PRODUCT_PACKAGES)))
-  # Prebuilt module SDKs require prebuilt modules to work, and currently
-  # prebuilt modules are only provided for com.google.android(.go)?.xxx. If we can't
-  # find one of them in PRODUCT_PACKAGES then assume com.android.xxx are in use,
-  # and disable prebuilt SDKs. In particular this applies to AOSP builds.
-  #
-  # However, docs/sdk/win_sdk/sdk_addon builds might not include com.google.android.xxx
-  # packages, so for those we respect the default behavior.
-  MODULE_BUILD_FROM_SOURCE := true
-else ifneq (,$(PRODUCT_MODULE_BUILD_FROM_SOURCE))
-  # Let products override the branch default.
-  MODULE_BUILD_FROM_SOURCE := $(PRODUCT_MODULE_BUILD_FROM_SOURCE)
-else
-  MODULE_BUILD_FROM_SOURCE := $(BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE)
-endif
-
-ifneq (,$(ART_MODULE_BUILD_FROM_SOURCE))
-  # Keep an explicit setting.
-else ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
-  # Build ART modules from source if they are listed in TARGET_BUILD_APPS.
-  ART_MODULE_BUILD_FROM_SOURCE := true
-else
-  # Do the same as other modules by default.
-  ART_MODULE_BUILD_FROM_SOURCE := $(MODULE_BUILD_FROM_SOURCE)
-endif
-
-$(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
 ifdef ART_DEBUG_OPT_FLAG
 $(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
 endif
@@ -112,34 +47,6 @@
   $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
 endif
 
-# Ensure that those mainline modules who have individually toggleable prebuilts
-# are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
-# default.
-INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
-  adservices \
-  appsearch \
-  btservices \
-  devicelock \
-  configinfrastructure \
-  conscrypt \
-  healthfitness \
-  ipsec \
-  media \
-  mediaprovider \
-  ondevicepersonalization \
-  permission \
-  rkpd \
-  scheduling \
-  sdkext \
-  statsd \
-  tethering \
-  uwb \
-  wifi \
-
-$(foreach m, $(INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES),\
-  $(if $(call soong_config_get,$(m)_module,source_build),,\
-    $(call soong_config_set,$(m)_module,source_build,$(MODULE_BUILD_FROM_SOURCE))))
-
 # Apex build mode variables
 ifdef APEX_BUILD_FOR_PRE_S_DEVICES
 $(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
@@ -149,40 +56,62 @@
 endif
 endif
 
-ifeq (true,$(MODULE_BUILD_FROM_SOURCE))
+# TODO(b/308187800): some internal modules set `prefer` to true on the prebuilt apex module,
+# and set that to false when `ANDROID.module_build_from_source` is true.
+# Set this soong config variable to true for now, and cleanup `prefer` as part of b/308187800
 $(call add_soong_config_var_value,ANDROID,module_build_from_source,true)
-endif
-
-# Messaging app vars
-ifeq (eng,$(TARGET_BUILD_VARIANT))
-$(call soong_config_set,messaging,build_variant_eng,true)
-endif
 
 # Enable SystemUI optimizations by default unless explicitly set.
 SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
 
-# Enable Compose in SystemUI by default.
-SYSTEMUI_USE_COMPOSE ?= true
-$(call add_soong_config_var,ANDROID,SYSTEMUI_USE_COMPOSE)
-
 ifdef PRODUCT_AVF_ENABLED
 $(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
 endif
 
+# Enable AVF remote attestation according to the flag value if PRODUCT_AVF_REMOTE_ATTESTATION_DISABLED is not
+# set to true explicitly.
+ifneq (true,$(PRODUCT_AVF_REMOTE_ATTESTATION_DISABLED))
+  $(call add_soong_config_var_value,ANDROID,avf_remote_attestation_enabled,$(RELEASE_AVF_ENABLE_REMOTE_ATTESTATION))
+endif
+
+ifdef PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION
+$(call add_soong_config_var_value,ANDROID,avf_microdroid_guest_gki_version,$(PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION))
+endif
+
+ifdef PRODUCT_MEMCG_V2_FORCE_ENABLED
+$(call add_soong_config_var_value,ANDROID,memcg_v2_force_enabled,$(PRODUCT_MEMCG_V2_FORCE_ENABLED))
+endif
+
+ifdef PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED
+$(call add_soong_config_var_value,ANDROID,cgroup_v2_sys_app_isolation,$(PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED))
+endif
+
 $(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_device_assignment,$(RELEASE_AVF_ENABLE_DEVICE_ASSIGNMENT))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_dice_changes,$(RELEASE_AVF_ENABLE_DICE_CHANGES))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_llpvm_changes,$(RELEASE_AVF_ENABLE_LLPVM_CHANGES))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_multi_tenant_microdroid_vm,$(RELEASE_AVF_ENABLE_MULTI_TENANT_MICRODROID_VM))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_network,$(RELEASE_AVF_ENABLE_NETWORK))
+# TODO(b/341292601): This flag is needed until the V release. We with clean it up after V together
+# with most of the release_avf_ flags here.
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_remote_attestation,$(RELEASE_AVF_ENABLE_REMOTE_ATTESTATION))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_vendor_modules,$(RELEASE_AVF_ENABLE_VENDOR_MODULES))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_virt_cpufreq,$(RELEASE_AVF_ENABLE_VIRT_CPUFREQ))
 $(call add_soong_config_var_value,ANDROID,release_avf_microdroid_kernel_version,$(RELEASE_AVF_MICRODROID_KERNEL_VERSION))
+$(call add_soong_config_var_value,ANDROID,release_avf_support_custom_vm_with_paravirtualized_devices,$(RELEASE_AVF_SUPPORT_CUSTOM_VM_WITH_PARAVIRTUALIZED_DEVICES))
 
 $(call add_soong_config_var_value,ANDROID,release_binder_death_recipient_weak_from_jni,$(RELEASE_BINDER_DEATH_RECIPIENT_WEAK_FROM_JNI))
 
+$(call add_soong_config_var_value,ANDROID,release_libpower_no_lock_binder_txn,$(RELEASE_LIBPOWER_NO_LOCK_BINDER_TXN))
+
+$(call add_soong_config_var_value,ANDROID,release_package_libandroid_runtime_punch_holes,$(RELEASE_PACKAGE_LIBANDROID_RUNTIME_PUNCH_HOLES))
+
 $(call add_soong_config_var_value,ANDROID,release_selinux_data_data_ignore,$(RELEASE_SELINUX_DATA_DATA_IGNORE))
+ifneq (,$(filter eng userdebug,$(TARGET_BUILD_VARIANT)))
+    # write appcompat system properties on userdebug and eng builds
+    $(call add_soong_config_var_value,ANDROID,release_write_appcompat_override_system_properties,true)
+endif
 
 # Enable system_server optimizations by default unless explicitly set or if
 # there may be dependent runtime jars.
@@ -219,6 +148,7 @@
 
 # Add crashrecovery build flag to soong
 $(call soong_config_set,ANDROID,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+# Add crashrecovery file move flags to soong, for both platform and module
 ifeq (true,$(RELEASE_CRASHRECOVERY_FILE_MOVE))
   $(call soong_config_set,ANDROID,crashrecovery_files_in_module,true)
   $(call soong_config_set,ANDROID,crashrecovery_files_in_platform,false)
@@ -226,5 +156,9 @@
   $(call soong_config_set,ANDROID,crashrecovery_files_in_module,false)
   $(call soong_config_set,ANDROID,crashrecovery_files_in_platform,true)
 endif
-# Weirdly required because platform_bootclasspath is using AUTO namespace
-$(call soong_config_set,AUTO,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+# Required as platform_bootclasspath is using this namespace
+$(call soong_config_set,bootclasspath,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+
+# Enable Profiling module. Also used by platform_bootclasspath.
+$(call soong_config_set,ANDROID,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE))
+$(call soong_config_set,bootclasspath,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE))
diff --git a/core/art_config.mk b/core/art_config.mk
index 47b4bcf..9e87a7b 100644
--- a/core/art_config.mk
+++ b/core/art_config.mk
@@ -19,22 +19,19 @@
 endif
 
 ENABLE_UFFD_GC := $(config_enable_uffd_gc)
-# If the value is "default", it will be mangled by post_process_props.py.
-ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc)
 
 # Create APEX_BOOT_JARS_EXCLUDED which is a list of jars to be removed from
 # ApexBoorJars when built from mainline prebuilts.
-# soong variables indicate whether the prebuilt is enabled:
-# - $(m)_module/source_build for art and TOGGLEABLE_PREBUILT_MODULES
-# - ANDROID/module_build_from_source for other mainline modules
 # Note that RELEASE_APEX_BOOT_JARS_PREBUILT_EXCLUDED_LIST is the list of module names
 # and library names of jars that need to be removed. We have to keep separated list per
 # release config due to possibility of different prebuilt content.
-APEX_BOOT_JARS_EXCLUDED :=
-$(foreach pair, $(RELEASE_APEX_BOOT_JARS_PREBUILT_EXCLUDED_LIST),\
-  $(eval m := $(subst com.android.,,$(call word-colon,1,$(pair)))) \
-  $(if $(call soong_config_get,$(m)_module,source_build), \
-    $(if $(filter true,$(call soong_config_get,$(m)_module,source_build)),, \
-      $(eval APEX_BOOT_JARS_EXCLUDED += $(pair))), \
-    $(if $(filter true,$(call soong_config_get,ANDROID,module_build_from_source)),, \
-      $(eval APEX_BOOT_JARS_EXCLUDED += $(pair)))))
+#
+# If a device has opted to not use google prebuilts (determined using
+# PRODUCT_BUILD_IGNORE_APEX_CONTRIBUTION_CONTENTS), then no jars need to be removed.
+# Example of products where PRODUCT_BUILD_IGNORE_APEX_CONTRIBUTION_CONTENTS is true are
+# 1. aosp devices (they do not use google apexes)
+# 2. hwasan devices (apex prebuilts are not compatible with these devices)
+# 3. coverage builds
+ifneq (true, $(PRODUCT_BUILD_IGNORE_APEX_CONTRIBUTION_CONTENTS))
+  APEX_BOOT_JARS_EXCLUDED += $(RELEASE_APEX_BOOT_JARS_PREBUILT_EXCLUDED_LIST)
+endif
diff --git a/core/base_rules.mk b/core/base_rules.mk
index b8aa5fe..a8cf67e 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -393,8 +393,8 @@
 
 logtags_sources := $(filter %.logtags,$(LOCAL_SRC_FILES)) $(LOCAL_LOGTAGS_FILES)
 
-ifneq ($(strip $(logtags_sources)),)
-event_log_tags := $(foreach f,$(addprefix $(LOCAL_PATH)/,$(logtags_sources)),$(call clean-path,$(f)))
+ifneq ($(strip $(logtags_sources) $(LOCAL_SOONG_LOGTAGS_FILES)),)
+event_log_tags := $(foreach f,$(LOCAL_SOONG_LOGTAGS_FILES) $(addprefix $(LOCAL_PATH)/,$(logtags_sources)),$(call clean-path,$(f)))
 else
 event_log_tags :=
 endif
@@ -694,6 +694,16 @@
 endif
 
 ###########################################################
+## SOONG INSTALL PAIRS
+###########################################################
+# Declare dependencies for LOCAL_SOONG_INSTALL_PAIRS in soong to the module it relies on.
+ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE))
+$(my_all_targets): \
+    $(foreach f, $(LOCAL_SOONG_INSTALL_PAIRS),\
+      $(word 2,$(subst :,$(space),$(f))))
+endif
+
+###########################################################
 ## Compatibility suite files.
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
@@ -706,6 +716,16 @@
   test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
 endif
 
+ifeq ($(EXCLUDE_MCTS),true)
+ifeq (,$(filter $(LOCAL_MODULE),$(mcts_whitelist)))
+  ifneq (,$(test_config))
+    ifneq (,$(filter mcts-%,$(LOCAL_COMPATIBILITY_SUITE)))
+      LOCAL_COMPATIBILITY_SUITE := $(filter-out cts,$(LOCAL_COMPATIBILITY_SUITE))
+    endif
+  endif
+endif
+endif
+
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
 
 # If we are building a native test or benchmark and its stem variants are not defined,
@@ -750,6 +770,10 @@
       $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem)))) \
   $(eval my_compat_dist_config_$(suite) := ))
 
+ifneq (,$(LOCAL_SOONG_CLASSES_JAR))
+    $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+      $(eval my_compat_api_map_$(suite) += $(LOCAL_SOONG_CLASSES_JAR)))
+endif
 
 # Auto-generate build config.
 ifeq (,$(test_config))
@@ -803,6 +827,12 @@
       $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
         $(s):$(dir)/$(n)))))
 
+  $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+     $(eval my_compat_api_map_$(suite) += $(foreach f, $(LOCAL_COMPATIBILITY_SUPPORT_FILES), \
+       $(eval p := $(subst :,$(space),$(f))) \
+       $(eval s := $(word 1,$(p))) \
+       $(if $(filter %.apk,$(s)) $(filter %.jar,$(s)),$(s),))))
+
   ifneq (,$(test_config))
     $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
       $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
@@ -845,7 +875,9 @@
       $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data)))) \
     $(eval my_compat_dist_test_data_$(suite) += \
       $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
-        $(filter $(my_installed_test_data),$(call append-path,$(dir),$(file)))))))
+        $(filter $(my_installed_test_data),$(call append-path,$(dir),$(file))))) \
+    $(eval my_compat_api_map_$(suite) += \
+      $(if $(filter %.apk,$(src_path)) $(filter %.jar,$(src_path)),$(src_path),))))
 endif
 else
 ifneq ($(my_test_data_file_pairs),)
@@ -855,7 +887,9 @@
   $(eval file := $(word 2,$(parts))) \
   $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
     $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
-      $(src_path):$(call append-path,$(dir),$(file))))))
+      $(src_path):$(call append-path,$(dir),$(file)))) \
+    $(eval my_compat_api_map_$(suite) += \
+      $(if $(filter %.apk,$(src_path)) $(filter %.jar,$(src_path)),$(src_path),))))
 endif
 endif
 
@@ -867,7 +901,8 @@
 $(call create-suite-dependencies)
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval my_compat_dist_config_$(suite) := ) \
-  $(eval my_compat_dist_test_data_$(suite) := ))
+  $(eval my_compat_dist_test_data_$(suite) := ) \
+  $(eval my_compat_api_map_$(suite) := ))
 
 endif  # LOCAL_UNINSTALLABLE_MODULE
 
@@ -932,6 +967,8 @@
     $(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
 ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE := \
     $(ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE) $(LOCAL_SOONG_MODULE_TYPE)
+ALL_MODULES.$(my_register_name).IS_SOONG_MODULE := \
+    $(if $(filter $(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK)),true)
 ifndef LOCAL_IS_HOST_MODULE
 ALL_MODULES.$(my_register_name).TARGET_BUILT := \
     $(ALL_MODULES.$(my_register_name).TARGET_BUILT) $(LOCAL_BUILT_MODULE)
@@ -1090,6 +1127,7 @@
     $(LOCAL_JNI_SHARED_LIBRARIES)
 
 endif
+ALL_MODULES.$(my_register_name).TEST_MODULE_CONFIG_BASE := $(LOCAL_TEST_MODULE_CONFIG_BASE)
 
 ##########################################################################
 ## When compiling against API imported module, use API import stub
@@ -1232,6 +1270,8 @@
 endif
 endif
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=base_rules))
+
 ###########################################################
 # Ensure privileged applications always have LOCAL_PRIVILEGED_MODULE
 ###########################################################
diff --git a/core/binary.mk b/core/binary.mk
index b17ab00..0bc9469 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -258,13 +258,6 @@
       my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
     endif
 
-    ifneq ($(my_ndk_api),current)
-      ifeq ($(call math_lt,$(my_ndk_api),21),true)
-        my_ndk_stl_include_path += $(my_ndk_source_root)/android/support/include
-        my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
-      endif
-    endif
-
     my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
     my_ldlibs += -ldl
   else # LOCAL_NDK_STL_VARIANT must be none
@@ -1196,6 +1189,17 @@
 endif
 
 ###################################################################
+## When compiling a memtag_stack enabled target, use the .memtag_stack variant
+## of any static dependencies (where they exist).
+##################################################################
+ifneq ($(filter memtag_stack,$(my_sanitize)),)
+  my_whole_static_libraries := $(call use_soong_sanitized_static_libraries,\
+    $(my_whole_static_libraries),memtag_stack)
+  my_static_libraries := $(call use_soong_sanitized_static_libraries,\
+    $(my_static_libraries),memtag_stack)
+endif
+
+###################################################################
 ## When compiling against API imported module, use API import stub
 ## libraries.
 ##################################################################
@@ -1341,6 +1345,8 @@
 my_allowed_types := $(my_allowed_ndk_types) native:platform native:platform_vndk
 endif
 
+ALL_MODULES.$(my_register_name).WHOLE_STATIC_LIBS := $(my_whole_static_libraries)
+
 my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
 ifneq ($(filter-out STATIC_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
 my_link_deps += $(addprefix SHARED_LIBRARIES:,$(my_shared_libraries))
diff --git a/core/board_config.mk b/core/board_config.mk
index 633303f..d3f0493 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -274,7 +274,7 @@
 
 ifneq ($(MALLOC_IMPL),)
   $(warning *** Unsupported option MALLOC_IMPL defined by board config: $(board_config_mk).)
-  $(error Use `MALLOC_SVELTE := true` to configure jemalloc for low-memory)
+  $(error Use `MALLOC_LOW_MEMORY := true` to use low-memory allocator config)
 endif
 board_config_mk :=
 
@@ -923,7 +923,9 @@
 ###########################################
 # Ensure consistency among TARGET_RECOVERY_UPDATER_LIBS, AB_OTA_UPDATER, and PRODUCT_OTA_FORCE_NON_AB_PACKAGE.
 TARGET_RECOVERY_UPDATER_LIBS ?=
-AB_OTA_UPDATER ?=
+ifeq ($(AB_OTA_UPDATER),)
+AB_OTA_UPDATER := true
+endif
 .KATI_READONLY := TARGET_RECOVERY_UPDATER_LIBS AB_OTA_UPDATER
 
 # Ensure that if PRODUCT_OTA_FORCE_NON_AB_PACKAGE == true, then AB_OTA_UPDATER must be true
diff --git a/core/check_elf_file.mk b/core/check_elf_file.mk
index b5be81f..ec3c4b0 100644
--- a/core/check_elf_file.mk
+++ b/core/check_elf_file.mk
@@ -7,9 +7,12 @@
 #
 # Inputs:
 # - LOCAL_ALLOW_UNDEFINED_SYMBOLS
+# - LOCAL_IGNORE_MAX_PAGE_SIZE
 # - LOCAL_BUILT_MODULE
 # - LOCAL_IS_HOST_MODULE
 # - LOCAL_MODULE_CLASS
+# - TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE
+# - TARGET_MAX_PAGE_SIZE_SUPPORTED
 # - intermediates
 # - my_installed_module_stem
 # - my_prebuilt_src_file
@@ -26,6 +29,21 @@
 # In addition to $(my_check_elf_file_shared_lib_files), some file paths are
 # added by `resolve-shared-libs-for-elf-file-check` from `core/main.mk`.
 $(check_elf_files_stamp): PRIVATE_SHARED_LIBRARY_FILES := $(my_check_elf_file_shared_lib_files)
+
+# For different page sizes to work, we must support a larger max page size
+# as well as properly reflect page size at runtime. Limit this check, since many
+# devices set the max page size (for future proof) than actually use the
+# larger page size.
+ifeq ($(strip $(TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE)),true)
+ifeq ($(strip $(LOCAL_IGNORE_MAX_PAGE_SIZE)),true)
+$(check_elf_files_stamp): PRIVATE_MAX_PAGE_SIZE :=
+else
+$(check_elf_files_stamp): PRIVATE_MAX_PAGE_SIZE := $(TARGET_MAX_PAGE_SIZE_SUPPORTED)
+endif
+else
+$(check_elf_files_stamp): PRIVATE_MAX_PAGE_SIZE :=
+endif
+
 $(check_elf_files_stamp): $(my_prebuilt_src_file) $(my_check_elf_file_shared_lib_files) $(CHECK_ELF_FILE) $(LLVM_READOBJ)
 	@echo Check prebuilt ELF binary: $<
 	$(hide) mkdir -p $(dir $@)
@@ -33,6 +51,7 @@
 	$(hide) $(CHECK_ELF_FILE) \
 	    --skip-bad-elf-magic \
 	    --skip-unknown-elf-machine \
+	    $(if $(PRIVATE_MAX_PAGE_SIZE),--max-page-size=$(PRIVATE_MAX_PAGE_SIZE)) \
 	    $(if $(PRIVATE_SONAME),--soname $(PRIVATE_SONAME)) \
 	    $(foreach l,$(PRIVATE_SHARED_LIBRARY_FILES),--shared-lib $(l)) \
 	    $(foreach l,$(PRIVATE_SYSTEM_SHARED_LIBRARIES),--system-shared-lib $(l)) \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 5481d50..6192690 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -106,6 +106,7 @@
 LOCAL_HEADER_LIBRARIES:=
 LOCAL_HOST_PREFIX:=
 LOCAL_HOST_REQUIRED_MODULES:=
+LOCAL_IGNORE_MAX_PAGE_SIZE:=
 LOCAL_INIT_RC:=
 LOCAL_INJECT_BSSL_HASH:=
 LOCAL_INSTALLED_MODULE:=
@@ -259,11 +260,13 @@
 LOCAL_SOONG_HEADER_JAR :=
 LOCAL_SOONG_INSTALL_PAIRS :=
 LOCAL_SOONG_INSTALL_SYMLINKS :=
+LOCAL_SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES:=
 LOCAL_SOONG_INSTALLED_MODULE :=
 LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR :=
 LOCAL_SOONG_LICENSE_METADATA :=
 LOCAL_SOONG_LINK_TYPE :=
 LOCAL_SOONG_LINT_REPORTS :=
+LOCAL_SOONG_LOGTAGS_FILES :=
 LOCAL_SOONG_MODULE_INFO_JSON :=
 LOCAL_SOONG_MODULE_TYPE :=
 LOCAL_SOONG_PROGUARD_DICT :=
@@ -296,6 +299,7 @@
 LOCAL_TEST_DATA_BINS:=
 LOCAL_TEST_MAINLINE_MODULES:=
 LOCAL_TEST_MODULE_TO_PROGUARD_WITH:=
+LOCAL_TEST_MODULE_CONFIG_BASE:=
 LOCAL_TIDY:=
 LOCAL_TIDY_CHECKS:=
 LOCAL_TIDY_FLAGS:=
diff --git a/core/config.mk b/core/config.mk
index 22ec292..ae65944 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -316,6 +316,19 @@
 $(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(strip $3))
 endef
 
+# soong_config_set_bool is the same as soong_config_set, but it will
+# also type the variable as a bool, so that when using select() expressions
+# in blueprint files they can use boolean values instead of strings.
+# It will only accept "true" for its value, any other value will be
+# treated as false.
+# $1 is the namespace. $2 is the variable name. $3 is the variable value.
+# Ex: $(call soong_config_set_bool,acme,COOL_FEATURE,true)
+define soong_config_set_bool
+$(call soong_config_define_internal,$1,$2) \
+$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(filter true,$3))
+$(eval SOONG_CONFIG_TYPE_$(strip $1)_$(strip $2):=bool)
+endef
+
 # soong_config_append appends to the value of the variable in the given Soong
 # config namespace. If the variable does not exist, it will be defined. If the
 # namespace does not  exist, it will be defined.
@@ -419,6 +432,13 @@
 endif
 .KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
 
+ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE
+  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE)
+else
+  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
+endif
+.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE
+
 # Boolean variable determining if AOSP relies on bionic's PAGE_SIZE macro.
 ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
   TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)
@@ -509,7 +529,6 @@
 
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ccache.mk
-include $(BUILD_SYSTEM)/goma.mk
 include $(BUILD_SYSTEM)/rbe.mk
 endif
 
@@ -603,8 +622,6 @@
 prebuilt_build_tools_bin := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/asan/bin
 endif
 
-USE_PREBUILT_SDK_TOOLS_IN_PLACE := true
-
 # Work around for b/68406220
 # This should match the soong version.
 USE_D8 := true
@@ -714,8 +731,6 @@
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
 BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata
 BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
-FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
-VBOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/vboot_signer
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -824,9 +839,6 @@
   else
     min_systemsdk_version := $(PRODUCT_SHIPPING_API_LEVEL)
   endif
-  ifneq ($(call numbers_less_than,$(min_systemsdk_version),$(BOARD_SYSTEMSDK_VERSIONS)),)
-    $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to BOARD_API_LEVEL, BOARD_SHIPPING_API_LEVEL or PRODUCT_SHIPPING_API_LEVEL ($(min_systemsdk_version)))
-  endif
   ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
     ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
@@ -1234,29 +1246,35 @@
 # in the source tree.
 dont_bother_goals := out product-graph
 
+include $(BUILD_SYSTEM)/sysprop_config.mk
+
 # Make ANDROID Soong config variables visible to Android.mk files, for
 # consistency with those defined in BoardConfig.mk files.
 include $(BUILD_SYSTEM)/android_soong_config_vars.mk
 
+SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables
+SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).extra.variables
+
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ninja_config.mk
 include $(BUILD_SYSTEM)/soong_config.mk
 endif
 
+SOONG_VARIABLES :=
+SOONG_EXTRA_VARIABLES :=
+
 -include external/ltp/android/ltp_package_list.mk
 DEFAULT_DATA_OUT_MODULES := ltp $(ltp_packages)
 .KATI_READONLY := DEFAULT_DATA_OUT_MODULES
 
 include $(BUILD_SYSTEM)/dumpvar.mk
 
-ifneq ($(KEEP_VNDK),true)
 ifdef BOARD_VNDK_VERSION
 BOARD_VNDK_VERSION=
 endif
 ifdef PLATFORM_VNDK_VERSION
 PLATFORM_VNDK_VERSION=
 endif
-endif
 
 ifeq (true,$(FULL_SYSTEM_OPTIMIZE_JAVA))
 ifeq (false,$(SYSTEM_OPTIMIZE_JAVA))
diff --git a/core/copy_headers.mk b/core/copy_headers.mk
index 397ea62..2e82db7 100644
--- a/core/copy_headers.mk
+++ b/core/copy_headers.mk
@@ -50,4 +50,5 @@
 _chFrom :=
 _chTo :=
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=COPY_HEADERS))
 endif # LOCAL_COPY_HEADERS
diff --git a/core/definitions.mk b/core/definitions.mk
index 40b7980..b30b159 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2584,7 +2584,28 @@
         @$(call emit-line,$(wordlist 98001,98500,$(1)),$(2))
         @$(call emit-line,$(wordlist 98501,99000,$(1)),$(2))
         @$(call emit-line,$(wordlist 99001,99500,$(1)),$(2))
-        @$(if $(wordlist 99501,99502,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
+        @$(call emit-line,$(wordlist 99501,100000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 100001,100500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 100501,101000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 101001,101500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 101501,102000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 102001,102500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 102501,103000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 103001,103500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 103501,104000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 104001,104500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 104501,105000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 105001,105500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 105501,106000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 106001,106500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 106501,107000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 107001,107500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 107501,108000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 108001,108500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 108501,109000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 109001,109500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 109501,110000,$(1)),$(2))
+        @$(if $(wordlist 110001,110002,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
 endef
 # Return jar arguments to compress files in a given directory
 # $(1): directory
@@ -2925,19 +2946,15 @@
   echo "Install path: $(patsubst $(PRODUCT_OUT)/%,%,$(PRIVATE_INSTALLED_MODULE))" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
-ART_VERIDEX_APPCOMPAT_SCRIPT:=$(HOST_OUT)/bin/appcompat.sh
+ART_VERIDEX_APPCOMPAT:=$(HOST_OUT)/bin/appcompat
 define run-appcompat
 $(hide) \
-  echo "appcompat.sh output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
-  PACKAGING=$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING ANDROID_LOG_TAGS="*:e" $(ART_VERIDEX_APPCOMPAT_SCRIPT) --dex-file=$@ --api-flags=$(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
+  echo "appcompat output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+  ANDROID_LOG_TAGS="*:e" $(ART_VERIDEX_APPCOMPAT) --dex-file=$@ 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
 appcompat-files = \
   $(AAPT2) \
-  $(ART_VERIDEX_APPCOMPAT_SCRIPT) \
-  $(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) \
-  $(HOST_OUT_EXECUTABLES)/veridex \
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/core_dex_intermediates/classes.dex \
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/oahl_dex_intermediates/classes.dex
+  $(ART_VERIDEX_APPCOMPAT) \
 else
 appcompat-header =
 run-appcompat =
@@ -3589,11 +3606,14 @@
   $(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,\
     $(eval ALL_COMPATIBILITY_SUITES += $(suite)) \
     $(eval COMPATIBILITY.$(suite).FILES :=) \
-    $(eval COMPATIBILITY.$(suite).MODULES :=)) \
+    $(eval COMPATIBILITY.$(suite).MODULES :=) \
+    $(eval COMPATIBILITY.$(suite).API_MAP_FILES :=)) \
   $(eval COMPATIBILITY.$(suite).FILES += \
     $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(my_compat_dist_test_data_$(suite))) \
+  $(eval COMPATIBILITY.$(suite).API_MAP_FILES += $$(my_compat_api_map_$(suite))) \
+  $(eval COMPATIBILITY.$(suite).SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES += $(LOCAL_SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) \
   $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \
   $(eval COMPATIBILITY.$(suite).MODULES += $$(my_register_name))) \
 $(eval $(my_all_targets) : \
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 08311ca..26b8b17 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -123,22 +123,28 @@
 
 $(call dist-for-goals, droidcore, $(boot_zip))
 
-ifneq (,$(filter true,$(ART_MODULE_BUILD_FROM_SOURCE) $(MODULE_BUILD_FROM_SOURCE)))
 # Build the system_server.zip which contains the Apex system server jars and standalone system server jars
+system_server_dex2oat_dir := $(SOONG_OUT_DIR)/system_server_dexjars
 system_server_zip := $(PRODUCT_OUT)/system_server.zip
+# non_updatable_system_server_jars contains jars in /system and /system_ext that are not part of an apex.
+non_updatable_system_server_jars := \
+  $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),\
+    $(system_server_dex2oat_dir)/$(call word-colon,2,$(m)).jar)
+
 apex_system_server_jars := \
   $(foreach m,$(PRODUCT_APEX_SYSTEM_SERVER_JARS),\
-    $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+    $(system_server_dex2oat_dir)/$(call word-colon,2,$(m)).jar)
 
 apex_standalone_system_server_jars := \
   $(foreach m,$(PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS),\
-    $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+    $(system_server_dex2oat_dir)/$(call word-colon,2,$(m)).jar)
 
 standalone_system_server_jars := \
   $(foreach m,$(PRODUCT_STANDALONE_SYSTEM_SERVER_JARS),\
-    $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+    $(system_server_dex2oat_dir)/$(call word-colon,2,$(m)).jar)
 
-$(system_server_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
+$(system_server_zip): PRIVATE_SYSTEM_SERVER_DEX2OAT_DIR := $(system_server_dex2oat_dir)
+$(system_server_zip): PRIVATE_SYSTEM_SERVER_JARS := $(non_updatable_system_server_jars)
 $(system_server_zip): PRIVATE_APEX_SYSTEM_SERVER_JARS := $(apex_system_server_jars)
 $(system_server_zip): PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS := $(apex_standalone_system_server_jars)
 $(system_server_zip): PRIVATE_STANDALONE_SYSTEM_SERVER_JARS := $(standalone_system_server_jars)
@@ -146,14 +152,13 @@
 	@echo "Create system server package: $@"
 	rm -f $@
 	$(SOONG_ZIP) -o $@ \
-	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS)) \
-	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_APEX_SYSTEM_SERVER_JARS)) \
-          -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS)) \
-	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_STANDALONE_SYSTEM_SERVER_JARS))
+	  -C $(PRIVATE_SYSTEM_SERVER_DEX2OAT_DIR) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS)) \
+	  -C $(PRIVATE_SYSTEM_SERVER_DEX2OAT_DIR) $(addprefix -f ,$(PRIVATE_APEX_SYSTEM_SERVER_JARS)) \
+	  -C $(PRIVATE_SYSTEM_SERVER_DEX2OAT_DIR) $(addprefix -f ,$(PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS)) \
+	  -C $(PRIVATE_SYSTEM_SERVER_DEX2OAT_DIR) $(addprefix -f ,$(PRIVATE_STANDALONE_SYSTEM_SERVER_JARS))
 
 $(call dist-for-goals, droidcore, $(system_server_zip))
 
-endif  #ART_MODULE_BUILD_FROM_SOURCE || MODULE_BUILD_FROM_SOURCE
 endif  #PRODUCT_USES_DEFAULT_ART_CONFIG
 endif  #WITH_DEXPREOPT_ART_BOOT_IMG_ONLY
 endif  #WITH_DEXPREOPT
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 151591e..e7086b7 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -202,6 +202,12 @@
 endif
 ifneq (,$(LOCAL_COMPATIBILITY_SUITE))
   LOCAL_ENFORCE_USES_LIBRARIES := false
+
+  # Enable the check for WTS
+  ifneq ($(filter wts,$(LOCAL_COMPATIBILITY_SUITE)),)
+    LOCAL_ENFORCE_USES_LIBRARIES := true
+  endif
+
 endif
 
 # Disable the check if the app contains no java code.
@@ -498,8 +504,8 @@
   _system_other := $(strip $(if $(strip $(BOARD_USES_SYSTEM_OTHER_ODEX)), \
     $(if $(strip $(SANITIZE_LITE)),, \
       $(if $(filter $(_dexname),$(PRODUCT_DEXPREOPT_SPEED_APPS))$(filter $(_dexname),$(PRODUCT_SYSTEM_SERVER_APPS)),, \
-        $(if $(strip $(foreach myfilter,$(SYSTEM_OTHER_ODEX_FILTER),$(filter system/$(myfilter),$(_dexlocation)))), \
-          system_other/)))))
+        $(if $(strip $(foreach myfilter,$(SYSTEM_OTHER_ODEX_FILTER),$(filter system/$(myfilter),$(_dexlocation))$(filter $(myfilter),$(_dexlocation)))), \
+            system_other/)))))
   # _dexdir has a trailing /
   _dexdir := $(_system_other)$(dir $(_dexlocation))
   my_dexpreopt_zip_contents := $(sort \
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 7f9cbad..c063f60 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -50,25 +50,6 @@
 # Release config
 include $(BUILD_SYSTEM)/release_config.mk
 
-# Set default value of KEEP_VNDK.
-ifeq ($(RELEASE_DEPRECATE_VNDK),true)
-  KEEP_VNDK ?= false
-else
-  KEEP_VNDK ?= true
-endif
-
-ifeq ($(KEEP_VNDK),true)
-  # Starting in Android U, non-VNDK devices not supported
-  # WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
-  # letting upstream know it's important to you, we may do cleanup which breaks this
-  # significantly. Please let us know if you are changing this.
-  ifndef BOARD_VNDK_VERSION
-  # READ WARNING - DO NOT CHANGE
-  BOARD_VNDK_VERSION := current
-  # READ WARNING - DO NOT CHANGE
-  endif
-endif
-
 # ---------------------------------------------------------------
 # Set up version information
 include $(BUILD_SYSTEM)/version_util.mk
@@ -94,7 +75,7 @@
 # ---------------------------------------------------------------
 # The product defaults to generic on hardware
 ifeq ($(TARGET_PRODUCT),)
-TARGET_PRODUCT := aosp_arm
+TARGET_PRODUCT := aosp_arm64
 endif
 
 
@@ -267,6 +248,7 @@
 HOST_PREBUILT_ARCH := x86
 # This is the standard way to name a directory containing prebuilt host
 # objects. E.g., prebuilt/$(HOST_PREBUILT_TAG)/cc
+# This must match the logic in get_host_prebuilt_prefix in envsetup.sh
 HOST_PREBUILT_TAG := $(BUILD_OS)-$(HOST_PREBUILT_ARCH)
 
 # TARGET_COPY_OUT_* are all relative to the staging directory, ie PRODUCT_OUT.
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index fecf4f6..2a76c9d 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -110,4 +110,6 @@
 $(LOCAL_BUILT_MODULE): $(my_coverage_path)/$(GCNO_ARCHIVE)
 endif
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=EXECUTABLE))
+
 endif  # skip_build_from_source
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index 8a4b8c3..1181c66 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -43,3 +43,5 @@
 endif
 
 include $(BUILD_EXECUTABLE)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=FUZZ_TEST))
\ No newline at end of file
diff --git a/core/goma.mk b/core/goma.mk
deleted file mode 100644
index 2b51d8b..0000000
--- a/core/goma.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Copyright (C) 2015 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Notice: this works only with Google's Goma build infrastructure.
-ifneq ($(filter-out false,$(USE_GOMA)),)
-  ifdef GOMA_DIR
-    goma_dir := $(GOMA_DIR)
-  else
-    goma_dir := $(HOME)/goma
-  endif
-  GOMA_CC := $(goma_dir)/gomacc
-
-  # Append gomacc to existing *_WRAPPER variables so it's possible to
-  # use both ccache and gomacc.
-  CC_WRAPPER := $(strip $(CC_WRAPPER) $(GOMA_CC))
-  CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(GOMA_CC))
-  # b/143658984: goma can't handle the --system argument to javac
-  #JAVAC_WRAPPER := $(strip $(JAVAC_WRAPPER) $(GOMA_CC))
-
-  goma_dir :=
-endif
diff --git a/core/header_library_internal.mk b/core/header_library_internal.mk
index 35ee1bc..a21c853 100644
--- a/core/header_library_internal.mk
+++ b/core/header_library_internal.mk
@@ -19,3 +19,5 @@
 
 $(LOCAL_BUILT_MODULE):
 	$(hide) touch $@
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HEADER_LIBRARY))
\ No newline at end of file
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 2ff9ff2..7c79a1e 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -57,4 +57,6 @@
 $(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries) $(CLANG_CXX)
 	$(transform-host-o-to-executable)
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_EXECUTABLE))
+
 endif  # skip_build_from_source
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index d45da48..652eb0e 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -124,3 +124,5 @@
 ifeq ($(TURBINE_ENABLED),false)
 $(eval $(call copy-one-file,$(LOCAL_FULL_CLASSES_JACOCO_JAR),$(full_classes_header_jar)))
 endif
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_JAVA_LIBRARY))
\ No newline at end of file
diff --git a/core/host_prebuilt.mk b/core/host_prebuilt.mk
index 79f3ffa..7dc6704 100644
--- a/core/host_prebuilt.mk
+++ b/core/host_prebuilt.mk
@@ -17,3 +17,5 @@
 $(call record-module-type,HOST_PREBUILT)
 LOCAL_IS_HOST_MODULE := true
 include $(BUILD_MULTI_PREBUILT)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_PREBUILT))
\ No newline at end of file
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index ae8b798..22a02d4 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -53,4 +53,6 @@
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-o-to-shared-lib)
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_SHARED_LIBRARY))
+
 endif  # skip_build_from_source
diff --git a/core/host_static_library_internal.mk b/core/host_static_library_internal.mk
index 3946aa7..079c45e 100644
--- a/core/host_static_library_internal.mk
+++ b/core/host_static_library_internal.mk
@@ -23,3 +23,5 @@
 $(LOCAL_BUILT_MODULE): $(built_whole_libraries)
 $(LOCAL_BUILT_MODULE): $(all_objects)
 	$(transform-host-o-to-static-lib)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_STATIC_LIBRARY))
\ No newline at end of file
diff --git a/core/java_common.mk b/core/java_common.mk
index 65feb15..a21f062 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -32,6 +32,8 @@
     else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS))
       # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules
       LOCAL_JAVA_LANGUAGE_VERSION := 1.8
+    else ifeq ($(EXPERIMENTAL_TARGET_JAVA_VERSION_21),true)
+      LOCAL_JAVA_LANGUAGE_VERSION := 21
     else
       LOCAL_JAVA_LANGUAGE_VERSION := 17
     endif
diff --git a/core/java_library.mk b/core/java_library.mk
index 3ac03dc..97ce92c 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -88,3 +88,5 @@
 $(eval $(call copy-one-file,$(common_javalib.jar),$(LOCAL_BUILT_MODULE)))
 
 endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=JAVA_LIBRARY))
\ No newline at end of file
diff --git a/core/main.mk b/core/main.mk
index bc8adde..f3980f1 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -113,37 +113,8 @@
   # $(error TARGET_ARCH_SUITE is not supported in kati/make builds)
 endif
 
-# ADDITIONAL_<partition>_PROPERTIES are properties that are determined by the
-# build system itself. Don't let it be defined from outside of the core build
-# system like Android.mk or <product>.mk files.
-_additional_prop_var_names := \
-    ADDITIONAL_SYSTEM_PROPERTIES \
-    ADDITIONAL_VENDOR_PROPERTIES \
-    ADDITIONAL_ODM_PROPERTIES \
-    ADDITIONAL_PRODUCT_PROPERTIES
-
-$(foreach name, $(_additional_prop_var_names),\
-  $(if $($(name)),\
-    $(error $(name) must not set before here. $($(name)))\
-  ,)\
-  $(eval $(name) :=)\
-)
-_additional_prop_var_names :=
-
 $(KATI_obsolete_var ADDITIONAL_BUILD_PROPERTIES, Please use ADDITIONAL_SYSTEM_PROPERTIES)
 
-#
-# -----------------------------------------------------------------
-# Add the product-defined properties to the build properties.
-ifneq ($(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED), true)
-  ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
-else
-  ifndef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
-    ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
-  endif
-endif
-
-
 # Bring in standard build system definitions.
 include $(BUILD_SYSTEM)/definitions.mk
 
@@ -175,174 +146,8 @@
 # PDK builds are no longer supported, this is always platform
 TARGET_BUILD_JAVA_SUPPORT_LEVEL :=$= platform
 
-# -----------------------------------------------------------------
-
-ADDITIONAL_SYSTEM_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
-
 $(KATI_obsolete_var PRODUCT_FULL_TREBLE,\
 	Code should be written to work regardless of a device being Treble)
-
-# Set ro.llndk.api_level to show the maximum vendor API level that the LLNDK in
-# the system partition supports.
-ifdef RELEASE_BOARD_API_LEVEL
-ADDITIONAL_SYSTEM_PROPERTIES += ro.llndk.api_level=$(RELEASE_BOARD_API_LEVEL)
-endif
-
-# Sets ro.actionable_compatible_property.enabled to know on runtime whether the
-# allowed list of actionable compatible properties is enabled or not.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true
-
-# Add the system server compiler filter if they are specified for the product.
-ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
-ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
-endif
-
-# Add the 16K developer option if it is defined for the product.
-ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false
-endif
-
-# Enable core platform API violation warnings on userdebug and eng builds.
-ifneq ($(TARGET_BUILD_VARIANT),user)
-ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
-endif
-
-# Define ro.sanitize.<name> properties for all global sanitizers.
-ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
-
-# Sets the default value of ro.postinstall.fstab.prefix to /system.
-# Device board config should override the value to /product when needed by:
-#
-#     PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product
-#
-# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to
-# mount system_other partition.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.postinstall.fstab.prefix=/system
-
-# Add cpu properties for bionic and ART.
-ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH)
-ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.cpu_variant=$(TARGET_CPU_VARIANT_RUNTIME)
-ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.2nd_arch=$(TARGET_2ND_ARCH)
-ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.2nd_cpu_variant=$(TARGET_2ND_CPU_VARIANT_RUNTIME)
-
-ADDITIONAL_VENDOR_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart.so
-ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).variant=$(DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
-ifneq ($(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
-  ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
-endif
-
-ifdef TARGET_2ND_ARCH
-  ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).variant=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
-  ifneq ($($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
-    ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).features=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
-  endif
-endif
-
-# Although these variables are prefixed with TARGET_RECOVERY_, they are also needed under charger
-# mode (via libminui).
-ifdef TARGET_RECOVERY_DEFAULT_ROTATION
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.minui.default_rotation=$(TARGET_RECOVERY_DEFAULT_ROTATION)
-endif
-ifdef TARGET_RECOVERY_OVERSCAN_PERCENT
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.minui.overscan_percent=$(TARGET_RECOVERY_OVERSCAN_PERCENT)
-endif
-ifdef TARGET_RECOVERY_PIXEL_FORMAT
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.minui.pixel_format=$(TARGET_RECOVERY_PIXEL_FORMAT)
-endif
-
-ifdef PRODUCT_USE_DYNAMIC_PARTITIONS
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.boot.dynamic_partitions=$(PRODUCT_USE_DYNAMIC_PARTITIONS)
-endif
-
-ifdef PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.boot.dynamic_partitions_retrofit=$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)
-endif
-
-ifdef PRODUCT_SHIPPING_API_LEVEL
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
-endif
-
-ifdef PRODUCT_SHIPPING_VENDOR_API_LEVEL
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.vendor.api_level=$(PRODUCT_SHIPPING_VENDOR_API_LEVEL)
-endif
-
-ifneq ($(TARGET_BUILD_VARIANT),user)
-  ifdef PRODUCT_SET_DEBUGFS_RESTRICTIONS
-    ADDITIONAL_VENDOR_PROPERTIES += \
-      ro.product.debugfs_restrictions.enabled=$(PRODUCT_SET_DEBUGFS_RESTRICTIONS)
-  endif
-endif
-
-# Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
-# This must not be defined for the non-GRF devices.
-# The values of the GRF properties will be verified by post_process_props.py
-ifdef BOARD_SHIPPING_API_LEVEL
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.board.first_api_level=$(BOARD_SHIPPING_API_LEVEL)
-endif
-
-# Build system set BOARD_API_LEVEL to show the api level of the vendor API surface.
-# This must not be altered outside of build system.
-ifdef BOARD_API_LEVEL
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.board.api_level=$(BOARD_API_LEVEL)
-endif
-# RELEASE_BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen.
-ifdef RELEASE_BOARD_API_LEVEL_FROZEN
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.board.api_frozen=$(RELEASE_BOARD_API_LEVEL_FROZEN)
-endif
-
-# Set build prop. This prop is read by ota_from_target_files when generating OTA,
-# to decide if VABC should be disabled.
-ifeq ($(BOARD_DONT_USE_VABC_OTA),true)
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.vendor.build.dont_use_vabc=true
-endif
-
-# Set the flag in vendor. So VTS would know if the new fingerprint format is in use when
-# the system images are replaced by GSI.
-ifeq ($(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT),true)
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.vendor.build.fingerprint_has_digest=1
-endif
-
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.vendor.build.security_patch=$(VENDOR_SECURITY_PATCH) \
-    ro.product.board=$(TARGET_BOOTLOADER_BOARD_NAME) \
-    ro.board.platform=$(TARGET_BOARD_PLATFORM) \
-    ro.hwui.use_vulkan=$(TARGET_USES_VULKAN)
-
-ifdef TARGET_SCREEN_DENSITY
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.sf.lcd_density=$(TARGET_SCREEN_DENSITY)
-endif
-
-ifdef AB_OTA_UPDATER
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.build.ab_update=$(AB_OTA_UPDATER)
-endif
-
-ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
-
-ifeq ($(AB_OTA_UPDATER),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
-ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
-endif
-
-# Set this property for VTS to skip large page size tests on unsupported devices.
-ADDITIONAL_PRODUCT_PROPERTIES += \
-    ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
-
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -355,66 +160,15 @@
 is_sdk_build := true
 endif
 
-## user/userdebug ##
-
-user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
-enable_target_debugging := true
 tags_to_install :=
-ifneq (,$(user_variant))
-  # Target is secure in user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=1
-  ADDITIONAL_SYSTEM_PROPERTIES += security.perf_harden=1
 
-  ifeq ($(user_variant),user)
-    ADDITIONAL_SYSTEM_PROPERTIES += ro.adb.secure=1
-  endif
-
-  ifeq ($(user_variant),userdebug)
-    # Pick up some extra useful tools
-    tags_to_install += debug
-  else
-    # Disable debugging in plain user builds.
-    enable_target_debugging :=
-  endif
-
-  # Disallow mock locations by default for user builds
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=0
-
-else # !user_variant
-  # Turn on checkjni for non-user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.kernel.android.checkjni=1
-  # Set device insecure for non-user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=0
-  # Allow mock locations by default for non user builds
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=1
-endif # !user_variant
-
-ifeq (true,$(strip $(enable_target_debugging)))
-  # Target is more debuggable and adbd is on by default
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=1
-  # Enable Dalvik lock contention logging.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500
-else # !enable_target_debugging
-  # Target is less debuggable and adbd is off by default
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=0
-endif # !enable_target_debugging
-
-## eng ##
+ifeq ($(TARGET_BUILD_VARIANT),userdebug)
+# Pick up some extra useful tools
+tags_to_install := debug
+endif
 
 ifeq ($(TARGET_BUILD_VARIANT),eng)
 tags_to_install := debug eng
-ifneq ($(filter ro.setupwizard.mode=ENABLED, $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))),)
-  # Don't require the setup wizard on eng builds
-  ADDITIONAL_SYSTEM_PROPERTIES := $(filter-out ro.setupwizard.mode=%,\
-          $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))) \
-          ro.setupwizard.mode=OPTIONAL
-endif
-ifndef is_sdk_build
-  # To speedup startup of non-preopted builds, don't verify or compile the boot image.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.image-dex2oat-filter=extract
-endif
-# b/323566535
-ADDITIONAL_SYSTEM_PROPERTIES += init.svc_debug.no_fatal.zygote=true
 endif
 
 ## asan ##
@@ -450,18 +204,11 @@
 # TODO: this should be eng I think.  Since the sdk is built from the eng
 # variant.
 tags_to_install := debug eng
-ADDITIONAL_SYSTEM_PROPERTIES += xmpp.auto-presence=true
-ADDITIONAL_SYSTEM_PROPERTIES += ro.config.nocheckin=yes
 else # !sdk
 endif
 
 BUILD_WITHOUT_PV := true
 
-ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android
-
-# This property is set by flashing debug boot image, so default to false.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0
-
 # ------------------------------------------------------------
 # Define a function that, given a list of module tags, returns
 # non-empty if that module should be installed in /system.
@@ -502,10 +249,6 @@
 
 # Strip and readonly a few more variables so they won't be modified.
 $(readonly-final-product-vars)
-ADDITIONAL_SYSTEM_PROPERTIES := $(strip $(ADDITIONAL_SYSTEM_PROPERTIES))
-.KATI_READONLY := ADDITIONAL_SYSTEM_PROPERTIES
-ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES))
-.KATI_READONLY := ADDITIONAL_PRODUCT_PROPERTIES
 
 ifneq ($(PRODUCT_ENFORCE_RRO_TARGETS),)
 ENFORCE_RRO_SOURCES :=
@@ -1222,8 +965,7 @@
 # Returns modules included automatically as a result of certain BoardConfig
 # variables being set.
 define auto-included-modules
-  $(if $(and $(BOARD_VNDK_VERSION),$(filter true,$(KEEP_VNDK))),vndk_package) \
-  $(if $(filter true,$(KEEP_VNDK)),,llndk_in_system) \
+  llndk_in_system \
   $(if $(DEVICE_MANIFEST_FILE),vendor_manifest.xml) \
   $(if $(DEVICE_MANIFEST_SKUS),$(foreach sku, $(DEVICE_MANIFEST_SKUS),vendor_manifest_$(sku).xml)) \
   $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \
@@ -1909,7 +1651,7 @@
   $(api_xmls):
 	$(hide) echo "Converting API file to XML: $@"
 	$(hide) mkdir -p $(dir $@)
-	$(hide) $(APICHECK_COMMAND) --input-api-jar $< --api-xml $@
+	$(hide) $(APICHECK_COMMAND) jar-to-jdiff $< $@
 
   $(foreach xml,$(sort $(api_xmls)),$(call declare-1p-target,$(xml),))
 
@@ -2183,6 +1925,64 @@
 	  echo '$(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib)' >> $@; \
 	)
 
+# Create metadata for compliance support in Soong
+.PHONY: make-compliance-metadata
+make-compliance-metadata: \
+    $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv \
+    $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-modules.csv
+
+$(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv:
+	rm -f $@
+	echo 'installed_file,module_path,is_soong_module,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,static_libs,whole_static_libs,license_text' >> $@
+	$(foreach f,$(installed_files),\
+	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
+	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
+	  $(eval _build_output_path := $(PRODUCT_OUT)/$(_path_on_device)) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _is_soong_module := $(ALL_MODULES.$(_module_name).IS_SOONG_MODULE)) \
+	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
+	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
+	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
+	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
+	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
+	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
+	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
+	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
+	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
+	  $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \
+	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
+	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
+	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
+	  $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \
+	  $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
+	  $(eval _static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES))) \
+	  $(eval _whole_static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES))) \
+	  $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES))) \
+	  echo '$(_build_output_path),$(_module_path),$(_is_soong_module),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_static_libs),$(_whole_static_libs),$(_license_text)' >> $@; \
+	)
+
+$(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-modules.csv:
+	rm -f $@
+	echo 'name,module_path,module_class,module_type,static_libs,whole_static_libs,built_files,installed_files' >> $@
+	$(foreach m,$(ALL_MODULES), \
+	  $(eval _module_name := $m) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _make_module_class := $(ALL_MODULES.$(_module_name).CLASS)) \
+	  $(eval _make_module_type := $(ALL_MODULES.$(_module_name).MAKE_MODULE_TYPE)) \
+	  $(eval _static_libs := $(strip $(sort $(ALL_MODULES.$(_module_name).STATIC_LIBS)))) \
+	  $(eval _whole_static_libs := $(strip $(sort $(ALL_MODULES.$(_module_name).WHOLE_STATIC_LIBS)))) \
+	  $(eval _built_files := $(strip $(sort $(ALL_MODULES.$(_module_name).BUILT)))) \
+	  $(eval _installed_files := $(strip $(sort $(ALL_MODULES.$(_module_name).INSTALLED)))) \
+	  $(eval _is_soong_module := $(ALL_MODULES.$(_module_name).IS_SOONG_MODULE)) \
+	  $(if $(_is_soong_module),, \
+		echo '$(_module_name),$(_module_path),$(_make_module_class),$(_make_module_type),$(_static_libs),$(_whole_static_libs),$(_built_files),$(_installed_files)' >> $@; \
+	  ) \
+	)
+
+$(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/installed_files.stamp: $(installed_files)
+	touch $@
+
 # (TODO: b/272358583 find another way of always rebuilding sbom.spdx)
 # Remove the always_dirty_file.txt whenever the makefile is evaluated
 $(shell rm -f $(PRODUCT_OUT)/always_dirty_file.txt)
diff --git a/core/multi_prebuilt.mk b/core/multi_prebuilt.mk
index c97d481..415401b 100644
--- a/core/multi_prebuilt.mk
+++ b/core/multi_prebuilt.mk
@@ -132,3 +132,5 @@
 prebuilt_static_java_libraries :=
 prebuilt_is_host :=
 prebuilt_module_tags :=
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=MULTI_PREBUILT))
\ No newline at end of file
diff --git a/core/native_test.mk b/core/native_test.mk
index 8b49fbd..c12b211 100644
--- a/core/native_test.mk
+++ b/core/native_test.mk
@@ -21,3 +21,5 @@
 endif
 
 include $(BUILD_EXECUTABLE)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=NATIVE_TEST))
\ No newline at end of file
diff --git a/core/node_fns.mk b/core/node_fns.mk
index 144eb8b..d2cee9e 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -203,7 +203,7 @@
   $(call _expand-inherited-values,$(1),$(2),$(3),$(4))
 
   $(eval $(1).$(2).inherited :=)
-  $(eval _include_stack := $(wordlist 2,9999,$$(_include_stack)))
+  $(eval _include_stack := $(wordlist 2,9999,$(_include_stack)))
 endef
 
 #
diff --git a/core/package_internal.mk b/core/package_internal.mk
index a03a62b..a7eb572 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -731,3 +731,5 @@
     )
   endif
 endif
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=PACKAGE))
\ No newline at end of file
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index e715fd1..282e18e9 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -22,58 +22,32 @@
 
 
 # -----------------------------------------------------------------
-# Release Config Flags
-
-# Create a summary file of build flags for each partition
-# $(1): built build flags json file
-# $(2): installed build flags json file
-# $(3): flag names
-define generate-partition-build-flag-file
-$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
-$(eval $(strip $(1)): PRIVATE_FLAG_NAMES := $(strip $(3)))
-$(strip $(1)):
-	mkdir -p $$(dir $$(PRIVATE_OUT))
-	echo '{' > $$(PRIVATE_OUT)
-	echo '"flags": [' >> $$(PRIVATE_OUT)
-	$$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
-		( \
-			printf '  { "name": "%s", "value": "%s", ' \
-					'$$(flag)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).VALUE)' \
-					; \
-			printf '"set": "%s", "default": "%s", "declared": "%s" }' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).SET_IN)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).DEFAULT)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).DECLARED_IN)' \
-					; \
-			printf '$$(if $$(filter $$(lastword $$(PRIVATE_FLAG_NAMES)),$$(flag)),,$$(comma))\n' ; \
-		) >> $$(PRIVATE_OUT) ; \
-	)
-	echo "]" >> $$(PRIVATE_OUT)
-	echo "}" >> $$(PRIVATE_OUT)
-$(call copy-one-file, $(1), $(2))
-endef
-
-$(foreach partition, $(_FLAG_PARTITIONS), \
-	$(eval build_flag_summaries.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/build_flags.json) \
-	$(eval $(call generate-partition-build-flag-file, \
-				$(TARGET_OUT_FLAGS)/$(partition)/build_flags.json, \
-				$(build_flag_summaries.$(partition)), \
-				$(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) \
-			) \
-	) \
-)
-
-
-# -----------------------------------------------------------------
 # Aconfig Flags
 
 # Create a summary file of build flags for each partition
 # $(1): built aconfig flags file (out)
 # $(2): installed aconfig flags file (out)
-# $(3): input aconfig files for the partition (in)
+# $(3): the partition (in)
+# $(4): input aconfig files for the partition (in)
 define generate-partition-aconfig-flag-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
+$(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
+$(strip $(1)): $(ACONFIG) $(strip $(4))
+	mkdir -p $$(dir $$(PRIVATE_OUT))
+	$$(if $$(PRIVATE_IN), \
+		$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
+			--filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \
+		echo -n > $$(PRIVATE_OUT) \
+	)
+$(call copy-one-file, $(1), $(2))
+endef
+
+# Create a summary file of build flags for each partition
+# $(1): built aconfig flags file (out)
+# $(2): installed aconfig flags file (out)
+# $(3): input aconfig files for the partition (in)
+define generate-global-aconfig-flag-file
+$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
 $(eval $(strip $(1)): PRIVATE_IN := $(strip $(3)))
 $(strip $(1)): $(ACONFIG) $(strip $(3))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
@@ -91,6 +65,7 @@
 	$(eval $(call generate-partition-aconfig-flag-file, \
 				$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
 				$(aconfig_flag_summaries_protobuf.$(partition)), \
+				$(partition), \
 				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
 					$(ALL_MODULES.$(m).ACONFIG_FILES) \
 				)), \
@@ -105,7 +80,7 @@
 
 .PHONY: device_aconfig_declarations
 device_aconfig_declarations: $(PRODUCT_OUT)/device_aconfig_declarations.pb
-$(eval $(call generate-partition-aconfig-flag-file, \
+$(eval $(call generate-global-aconfig-flag-file, \
 			$(TARGET_OUT_FLAGS)/device_aconfig_declarations.pb, \
 			$(PRODUCT_OUT)/device_aconfig_declarations.pb, \
 			$(sort $(required_aconfig_flags_files)) \
diff --git a/core/phony_package.mk b/core/phony_package.mk
index 578d629..c978793 100644
--- a/core/phony_package.mk
+++ b/core/phony_package.mk
@@ -12,3 +12,5 @@
 	$(hide) echo "Fake: $@"
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=PHONY_PACKAGE))
\ No newline at end of file
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 9462640..d5261f4 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -63,3 +63,5 @@
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=PREBUILT))
\ No newline at end of file
diff --git a/core/product.mk b/core/product.mk
index aa9a9a3..a567d8f 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -32,10 +32,14 @@
 # PRODUCT_MAX_PAGE_SIZE_SUPPORTED=65536, the possible values for PAGE_SIZE could be
 # 4096, 16384 and 65536.
 _product_single_value_vars += PRODUCT_MAX_PAGE_SIZE_SUPPORTED
+_product_single_value_vars += PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE
 
 # Boolean variable determining if AOSP relies on bionic's PAGE_SIZE macro.
 _product_single_value_vars += PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
 
+# Boolean variable determining if the kernel boots with 16kb page size kernel.
+_product_single_value_vars += PRODUCT_BOOTS_16K
+
 # The resource configuration options to use for this product.
 _product_list_vars += PRODUCT_LOCALES
 _product_list_vars += PRODUCT_AAPT_CONFIG
@@ -159,7 +163,6 @@
 # List of jars to be included in the ART boot image for testing.
 _product_list_vars += PRODUCT_TEST_ONLY_ART_BOOT_IMAGE_JARS
 
-_product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
 _product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
 # List of system_server classpath jars on the platform.
 _product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
@@ -303,9 +306,6 @@
 # This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
 _product_single_value_vars += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
 
-# List of tags that will be used to gate blueprint modules from the build graph
-_product_list_vars += PRODUCT_INCLUDE_TAGS
-
 # List of directories that will be used to gate blueprint modules from the build graph
 _product_list_vars += PRODUCT_SOURCE_ROOT_DIRS
 
@@ -325,6 +325,13 @@
 # set this variable to prevent OTA failures.
 _product_list_vars += PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
 
+# If set to true, this product forces HIDL to be enabled by declaring android.hidl.manager
+# and android.hidl.token in the framework manifest. The product will also need to add the
+# 'hwservicemanager' service to PRODUCT_PACKAGES if its SHIPPING_API_LEVEL is greater than 34.
+# This should only be used during bringup for devices that are targeting FCM 202404 and still
+# have partner-owned HIDL interfaces that are being converted to AIDL.
+_product_single_value_vars += PRODUCT_HIDL_ENABLED
+
 # If set to true, this product builds a generic OTA package, which installs generic system images
 # onto matching devices. The product may only build a subset of system images (e.g. only
 # system.img), so devices need to install the package in a system-only OTA manner.
@@ -411,16 +418,26 @@
 # /system/etc/security/fsverity/BuildManifest.apk
 _product_single_value_vars += PRODUCT_FSVERITY_GENERATE_METADATA
 
-# If true, sets the default for MODULE_BUILD_FROM_SOURCE. This overrides
-# BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE but not an explicitly set value.
+# If true, this builds the mainline modules from source. This overrides any
+# prebuilts selected via RELEASE_APEX_CONTRIBUTIONS_* build flags for the
+# current release config.
 _product_single_value_vars += PRODUCT_MODULE_BUILD_FROM_SOURCE
 
 # If true, installs a full version of com.android.virt APEX.
 _product_single_value_vars += PRODUCT_AVF_ENABLED
 
+# If false, disable the AVF remote attestaton feature.
+_product_single_value_vars += PRODUCT_AVF_REMOTE_ATTESTATION_DISABLED
+
 # If true, kernel with modules will be used for Microdroid VMs.
 _product_single_value_vars += PRODUCT_AVF_KERNEL_MODULES_ENABLED
 
+# If true, the memory controller will be force-enabled in the cgroup v2 hierarchy
+_product_single_value_vars += PRODUCT_MEMCG_V2_FORCE_ENABLED
+
+# If true, the cgroup v2 hierarchy will be split into apps/system subtrees
+_product_single_value_vars += PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED
+
 # List of .json files to be merged/compiled into vendor/etc/linker.config.pb
 _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS
 
@@ -433,13 +450,17 @@
 #   device may have to re-compile everything on the first boot if the kernel doesn't support
 #   userfaultfd
 # - "false": disallows the build system and the runtime to use userfaultfd GC even if the device
-#   supports it
+#   supports it. This option is temporary - the plan is to remove it by Aug 2025, at which time
+#   Mainline updates of the ART module will ignore it as well.
 _product_single_value_vars += PRODUCT_ENABLE_UFFD_GC
 
 # Specifies COW version to be used by update_engine and libsnapshot. If this value is not
 # specified we default to COW version 2 in update_engine for backwards compatibility
 _product_single_value_vars += PRODUCT_VIRTUAL_AB_COW_VERSION
 
+# Specifies maximum bytes to be compressed at once during ota. Options: 4096, 8192, 16384, 32768, 65536, 131072, 262144.
+_product_single_value_vars += PRODUCT_VIRTUAL_AB_COMPRESSION_FACTOR
+
 # If set, determines whether the build system checks vendor seapp contexts violations.
 _product_single_value_vars += PRODUCT_CHECK_VENDOR_SEAPP_VIOLATIONS
 
@@ -462,6 +483,18 @@
 
 _product_single_value_vars += PRODUCT_EXPORT_RUNTIME_APIS
 
+# If set, determines which version of the GKI is used as guest kernel for Microdroid VMs.
+# TODO(b/325991735): link to documentation once it is done.
+_product_single_value_vars += PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION
+
+# Enables 16KB developer option for device if set.
+_product_single_value_vars += PRODUCT_16K_DEVELOPER_OPTION
+
+# If set, adb root will be disabled (really ro.debuggable=0) in userdebug
+# builds. It's already off disabled in user builds. Eng builds are unaffected
+# by this flag.
+_product_single_value_vars += PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index d16c38d..cc2fea9 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -280,27 +280,6 @@
 
 current_product_makefile :=
 
-#############################################################################
-# Check product include tag allowlist
-BLUEPRINT_INCLUDE_TAGS_ALLOWLIST := \
-  com.android.mainline_go \
-  com.android.mainline \
-  mainline_module_prebuilt_nightly \
-  mainline_module_prebuilt_monthly_release
-.KATI_READONLY := BLUEPRINT_INCLUDE_TAGS_ALLOWLIST
-$(foreach include_tag,$(PRODUCT_INCLUDE_TAGS), \
-	$(if $(filter $(include_tag),$(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST)),,\
-	$(call pretty-error, $(include_tag) is not in BLUEPRINT_INCLUDE_TAGS_ALLOWLIST: $(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST))))
-# Create default PRODUCT_INCLUDE_TAGS
-ifeq (, $(PRODUCT_INCLUDE_TAGS))
-# Soong analysis is global: even though a module might not be relevant to a specific product (e.g. build_tools for aosp_arm),
-# we still analyse it.
-# This means that in setups where we two have two prebuilts of module_sdk, we need a "default" to use in analysis
-# This should be a no-op in aosp and internal since no Android.bp file contains blueprint_package_includes
-# Use the big android one and main-based prebuilts by default
-PRODUCT_INCLUDE_TAGS += com.android.mainline mainline_module_prebuilt_nightly
-endif
-
 # AOSP and Google products currently share the same `apex_contributions` in next.
 # This causes issues when building <aosp_product>-next-userdebug in main.
 # Create a temporary allowlist to ignore the google apexes listed in `contents` of apex_contributions of `next`
@@ -314,6 +293,14 @@
 ifeq (true,$(PRODUCT_MODULE_BUILD_FROM_SOURCE))
   ignore_apex_contributions := true
 endif
+ifneq ($(EMMA_INSTRUMENT)$(EMMA_INSTRUMENT_STATIC)$(EMMA_INSTRUMENT_FRAMEWORK)$(CLANG_COVERAGE)$(NATIVE_COVERAGE_PATHS),)
+# Coverage builds for TARGET_RELEASE=foo should always build from source,
+# even if TARGET_RELEASE=foo uses prebuilt mainline modules.
+# This is necessary because the checked-in prebuilts were generated with
+# instrumentation turned off.
+  ignore_apex_contributions := true
+endif
+
 ifeq (true, $(ignore_apex_contributions))
 PRODUCT_BUILD_IGNORE_APEX_CONTRIBUTION_CONTENTS := true
 endif
@@ -418,6 +405,10 @@
   TARGET_AAPT_CHARACTERISTICS := $(PRODUCT_CHARACTERISTICS)
 endif
 
+ifndef PRODUCT_SHIPPING_API_LEVEL
+  PRODUCT_SHIPPING_API_LEVEL := 10000
+endif
+
 ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
   ifneq (1,$(words $(PRODUCT_DEFAULT_DEV_CERTIFICATE)))
     $(error PRODUCT_DEFAULT_DEV_CERTIFICATE='$(PRODUCT_DEFAULT_DEV_CERTIFICATE)', \
@@ -570,11 +561,26 @@
 endif
 
 # This table maps sdk version 35 to vendor api level 202404 and assumes yearly
-# release for the same month.
+# release for the same month. If 10000 API level or more is used, which usually
+# represents 'current' or 'future' API levels, several zeros are added to
+# preserve ordering. Specifically API level 10,000 is converted to 10,000,000
+# which importantly is greater than 202404 = 202,404. This convention will break
+# in 100,000 CE, which is the closest multiple of 10 that doesn't break earlier
+# than 10,000 as an API level breaks.
 define sdk-to-vendor-api-level
-  $(if $(call math_lt_or_eq,$(1),34),$(1),20$(call int_subtract,$(1),11)04)
+$(if $(call math_lt_or_eq,$(1),34),$(1),$(if $(call math_lt,$(1),10000),20$(call int_subtract,$(1),11)04,$(1)000))
 endef
 
+ifneq ($(call sdk-to-vendor-api-level,34),34)
+$(error sdk-to-vendor-api-level is broken for pre-Trunk-Stable SDKs)
+endif
+ifneq ($(call sdk-to-vendor-api-level,35),202404)
+$(error sdk-to-vendor-api-level is broken for post-Trunk-Stable SDKs)
+endif
+ifneq ($(call sdk-to-vendor-api-level,10000),10000000)
+$(error sdk-to-vendor-api-level is broken for current $(call sdk-to-vendor-api-level,10000))
+endif
+
 ifdef PRODUCT_SHIPPING_VENDOR_API_LEVEL
 # Follow the version that is set manually.
   VSR_VENDOR_API_LEVEL := $(PRODUCT_SHIPPING_VENDOR_API_LEVEL)
@@ -630,6 +636,15 @@
 endif
 endef
 
+ifndef PRODUCT_VIRTUAL_AB_COW_VERSION
+  PRODUCT_VIRTUAL_AB_COW_VERSION := 2
+  ifdef PRODUCT_SHIPPING_API_LEVEL
+    ifeq (true,$(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),34))
+      PRODUCT_VIRTUAL_AB_COW_VERSION := 3
+    endif
+  endif
+endif
+
 # Copy and check the value of each PRODUCT_BUILD_*_IMAGE variable
 $(foreach image, \
     PVMFW \
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 921f068..59e2c95 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -351,6 +351,7 @@
                 if cfg.get(attr, "") == "":
                     cfg[attr] = value
                     percolated_attrs[attr] = True
+                    child_cfg.pop(attr)
 
     for attr in _options.trace_variables:
         if attr in percolated_attrs:
@@ -360,7 +361,7 @@
     value = from_cfg.get(attr, [])
     if value:
         to_list.extend(value)
-        from_cfg[attr] = []
+        from_cfg.pop(attr)
 
 def _indirect(pcm_name):
     """Returns configuration item for the inherited module."""
diff --git a/core/proguard/kotlin.flags b/core/proguard/kotlin.flags
index 70dbaa7..ef6bf0e 100644
--- a/core/proguard/kotlin.flags
+++ b/core/proguard/kotlin.flags
@@ -10,7 +10,9 @@
 
 # Kotlin DebugMetadata has no value in release builds, these two rules, will
 # allow AppReduce to strip out DebutMetadata.
--checkdiscard interface kotlin.coroutines.jvm.internal.DebugMetadata
+# TODO(b/302383328): Restore the below checkdiscard after resolving transitive
+# inclusion of kotlin-stdlib from androidx.annotation library deps.
+# -checkdiscard interface kotlin.coroutines.jvm.internal.DebugMetadata
 -assumenosideeffects class kotlin.coroutines.jvm.internal.DebugMetadataKt {
   *** getDebugMetadataAnnotation(...);
 }
diff --git a/core/release_config.mk b/core/release_config.mk
index 3e51af5..2898868 100644
--- a/core/release_config.mk
+++ b/core/release_config.mk
@@ -14,6 +14,16 @@
 
 
 # -----------------------------------------------------------------
+# Determine which pass this is.
+# -----------------------------------------------------------------
+# On the first pass, we are asked for only PRODUCT_RELEASE_CONFIG_MAPS,
+# on the second pass, we are asked for whatever else is wanted.
+_final_product_config_pass:=
+ifneq (PRODUCT_RELEASE_CONFIG_MAPS,$(DUMP_MANY_VARS))
+    _final_product_config_pass:=true
+endif
+
+# -----------------------------------------------------------------
 # Choose the flag files
 # -----------------------------------------------------------------
 # Release configs are defined in reflease_config_map files, which map
@@ -39,8 +49,6 @@
 
 # If this is a google source tree, restrict it to only the one file
 # which has OWNERS control.  If it isn't let others define their own.
-# TODO: Remove wildcard for build/release one when all branch manifests
-# have updated.
 config_map_files := $(wildcard build/release/release_config_map.mk) \
     $(wildcard vendor/google_shared/build/release/release_config_map.mk) \
     $(if $(wildcard vendor/google/release/release_config_map.mk), \
@@ -53,13 +61,118 @@
         ) \
     )
 
+protobuf_map_files := build/release/release_config_map.textproto \
+    $(wildcard vendor/google_shared/build/release/release_config_map.textproto) \
+    $(if $(wildcard vendor/google/release/release_config_map.textproto), \
+        vendor/google/release/release_config_map.textproto, \
+        $(sort \
+            $(wildcard device/*/release/release_config_map.textproto) \
+            $(wildcard device/*/*/release/release_config_map.textproto) \
+            $(wildcard vendor/*/release/release_config_map.textproto) \
+            $(wildcard vendor/*/*/release/release_config_map.textproto) \
+        ) \
+    )
+
+# Remove support for the legacy approach.
+_must_protobuf := true
+
 # PRODUCT_RELEASE_CONFIG_MAPS is set by Soong using an initial run of product
 # config to capture only the list of config maps needed by the build.
 # Keep them in the order provided, but remove duplicates.
+# Treat .mk and .textproto as equal for duplicate elimination, but force
+# protobuf if any PRODUCT_RELEASE_CONFIG_MAPS specify .textproto.
 $(foreach map,$(PRODUCT_RELEASE_CONFIG_MAPS), \
-    $(if $(filter $(map),$(config_map_files)),,$(eval config_map_files += $(map))) \
+    $(if $(filter $(basename $(map)),$(basename $(config_map_files))),, \
+        $(eval config_map_files += $(map))) \
+    $(if $(filter $(basename $(map)).textproto,$(map)),$(eval _must_protobuf := true)) \
 )
 
+
+# If we are missing the textproto version of any of $(config_map_files), we cannot use protobuf.
+_can_protobuf := true
+$(foreach map,$(config_map_files), \
+    $(if $(wildcard $(basename $(map)).textproto),,$(eval _can_protobuf :=)) \
+)
+# If we are missing the mk version of any of $(protobuf_map_files), we must use protobuf.
+$(foreach map,$(protobuf_map_files), \
+    $(if $(wildcard $(basename $(map)).mk),,$(eval _must_protobuf := true)) \
+)
+
+ifneq (,$(_must_protobuf))
+    ifeq (,$(_can_protobuf))
+        # We must use protobuf, but we cannot use protobuf.
+        $(error release config is a mixture of .scl and .textproto)
+    endif
+endif
+
+_use_protobuf :=
+ifneq (,$(_must_protobuf))
+    _use_protobuf := true
+else
+    ifneq ($(_can_protobuf),)
+        # Determine the default
+        $(foreach map,$(config_map_files), \
+            $(if $(wildcard $(dir $(map))/build_config/DEFAULT=proto),$(eval _use_protobuf := true)) \
+            $(if $(wildcard $(dir $(map))/build_config/DEFAULT=make),$(eval _use_protobuf := )) \
+        )
+        # Update for this specific release config only (no inheritance).
+        $(foreach map,$(config_map_files), \
+            $(if $(wildcard $(dir $(map))/build_config/$(TARGET_RELEASE)=proto),$(eval _use_protobuf := true)) \
+            $(if $(wildcard $(dir $(map))/build_config/$(TARGET_RELEASE)=make),$(eval _use_protobuf := )) \
+        )
+    endif
+endif
+
+ifneq (,$(_use_protobuf))
+    # The .textproto files are the canonical source of truth.
+    _args := $(foreach map,$(config_map_files), --map $(map) )
+    ifneq (,$(_must_protobuf))
+        # Disable the build flag in release-config.
+        _args += --guard=false
+    endif
+    _args += --allow-missing=true
+    _flags_dir:=$(OUT_DIR)/soong/release-config
+    _flags_file:=$(_flags_dir)/release_config-$(TARGET_PRODUCT)-$(TARGET_RELEASE).vars
+    # release-config generates $(_flags_varmk)
+    _flags_varmk:=$(_flags_file:.vars=.varmk)
+    $(shell $(OUT_DIR)/release-config $(_args) >$(OUT_DIR)/release-config.out && touch -t 200001010000 $(_flags_varmk))
+    $(if $(filter-out 0,$(.SHELLSTATUS)),$(error release-config failed to run))
+    ifneq (,$(_final_product_config_pass))
+        # Save the final version of the config.
+        $(shell if ! cmp --quiet $(_flags_varmk) $(_flags_file); then cp $(_flags_varmk) $(_flags_file); fi)
+        # This will also set ALL_RELEASE_CONFIGS_FOR_PRODUCT and _used_files for us.
+        $(eval include $(_flags_file))
+        $(KATI_extra_file_deps $(OUT_DIR)/release-config $(protobuf_map_files) $(_flags_file))
+    else
+        # This is the first pass of product config.
+        $(eval include $(_flags_varmk))
+    endif
+    _used_files :=
+    ifeq (,$(_must_protobuf)$(RELEASE_BUILD_FLAGS_IN_PROTOBUF))
+        _use_protobuf :=
+    else
+        _base_all_release := all_release_configs-$(TARGET_PRODUCT)
+        $(call dist-for-goals,droid,\
+            $(_flags_dir)/$(_base_all_release).pb:build_flags/all_release_configs.pb \
+            $(_flags_dir)/$(_base_all_release).textproto:build_flags/all_release_configs.textproto \
+            $(_flags_dir)/$(_base_all_release).json:build_flags/all_release_configs.json \
+            $(_flags_dir)/inheritance_graph-$(TARGET_PRODUCT).dot:build_flags/inheritance_graph-$(TARGET_PRODUCT).dot \
+        )
+# These are always created, add an empty rule for them to keep ninja happy.
+$(_flags_dir)/inheritance_graph-$(TARGET_PRODUCT).dot:
+	: created by $(OUT_DIR)/release-config
+$(_flags_dir)/$(_base_all_release).pb $(_flags_dir)/$(_base_all_release).textproto $(_flags_dir)/$(_base_all_release).json:
+	: created by $(OUT_DIR)/release-config
+        _base_all_release :=
+    endif
+    _flags_dir:=
+    _flags_file:=
+    _flags_varmk:=
+endif
+ifeq (,$(_use_protobuf))
+    # The .mk files are the canonical source of truth.
+
+
 # Declare an alias release-config
 #
 # This should be used to declare a release as an alias of another, meaning no
@@ -105,9 +218,9 @@
         $(error declare-release-config: config $(strip $(1)) must have release config files, override another release config, or both) \
     )
     $(if $(strip $(4)),$(eval _all_release_configs.$(strip $(1)).ALIAS := true))
-    $(eval _all_release_configs := $(sort $(_all_release_configs) $(strip $(1))))
+    $(eval ALL_RELEASE_CONFIGS_FOR_PRODUCT := $(sort $(ALL_RELEASE_CONFIGS_FOR_PRODUCT) $(strip $(1))))
     $(if $(strip $(3)), \
-      $(if $(filter $(_all_release_configs), $(strip $(3))),
+      $(if $(filter $(ALL_RELEASE_CONFIGS_FOR_PRODUCT), $(strip $(3))),
         $(if $(filter $(_all_release_configs.$(strip $(1)).OVERRIDES),$(strip $(3))),,
           $(eval _all_release_configs.$(strip $(1)).OVERRIDES := $(_all_release_configs.$(strip $(1)).OVERRIDES) $(strip $(3)))), \
         $(error No release config $(strip $(3))) \
@@ -133,17 +246,20 @@
 FLAG_DECLARATION_FILES :=
 
 # Verify that all inherited/overridden release configs are declared.
-$(foreach config,$(_all_release_configs),\
+$(foreach config,$(ALL_RELEASE_CONFIGS_FOR_PRODUCT),\
   $(foreach r,$(all_release_configs.$(r).OVERRIDES),\
     $(if $(strip $(_all_release_configs.$(r).FILES)$(_all_release_configs.$(r).OVERRIDES)),,\
     $(error Release config $(config) [declared in: $(_all_release_configs.$(r).DECLARED_IN)] inherits from non-existent $(r).)\
 )))
 # Verify that alias configs do not have config files.
-$(foreach r,$(_all_release_configs),\
+$(foreach r,$(ALL_RELEASE_CONFIGS_FOR_PRODUCT),\
   $(if $(_all_release_configs.$(r).ALIAS),$(if $(_all_release_configs.$(r).FILES),\
     $(error Alias release config "$(r)" may not specify release config files $(_all_release_configs.$(r).FILES))\
 )))
 
+# Use makefiles
+endif
+
 ifeq ($(TARGET_RELEASE),)
     # We allow some internal paths to explicitly set TARGET_RELEASE to the
     # empty string.  For the most part, 'make' treats unset and empty string as
@@ -151,7 +267,7 @@
     # if the variable was completely unset.
     TARGET_RELEASE ?= was_unset
     ifeq ($(TARGET_RELEASE),was_unset)
-        $(error No release config set for target; please set TARGET_RELEASE, or if building on the command line use 'lunch <target>-<release>-<build_type>', where release is one of: $(_all_release_configs))
+        $(error No release config set for target; please set TARGET_RELEASE, or if building on the command line use 'lunch <target>-<release>-<build_type>', where release is one of: $(ALL_RELEASE_CONFIGS_FOR_PRODUCT))
     endif
     # Instead of leaving this string empty, we want to default to a valid
     # setting.  Full builds coming through this path is a bug, but in case
@@ -161,12 +277,13 @@
 
 # During pass 1 of product config, using a non-existent release config is not an error.
 # We can safely assume that we are doing pass 1 if DUMP_MANY_VARS=="PRODUCT_RELEASE_CONFIG_MAPS".
-ifneq (PRODUCT_RELEASE_CONFIG_MAPS,$(DUMP_MANY_VARS))
-    ifeq ($(filter $(_all_release_configs), $(TARGET_RELEASE)),)
-        $(error No release config found for TARGET_RELEASE: $(TARGET_RELEASE). Available releases are: $(_all_release_configs))
+ifneq (,$(_final_product_config_pass))
+    ifeq ($(filter $(ALL_RELEASE_CONFIGS_FOR_PRODUCT), $(TARGET_RELEASE)),)
+        $(error No release config found for TARGET_RELEASE: $(TARGET_RELEASE). Available releases are: $(ALL_RELEASE_CONFIGS_FOR_PRODUCT))
     endif
 endif
 
+ifeq (,$(_use_protobuf))
 # Choose flag files
 # Don't sort this, use it in the order they gave us.
 # Do allow duplicate entries, retaining only the first usage.
@@ -196,6 +313,9 @@
 $(error invalid use of apply-release-config-overrides)
 endef
 
+# use makefiles
+endif
+
 # TODO: Remove this check after enough people have sourced lunch that we don't
 # need to worry about it trying to do get_build_vars TARGET_RELEASE. Maybe after ~9/2023
 ifneq ($(CALLED_FROM_SETUP),true)
@@ -207,15 +327,19 @@
 endif
 .KATI_READONLY := TARGET_RELEASE
 
-$(foreach config, $(_all_release_configs), \
+ifeq (,$(_use_protobuf))
+$(foreach config, $(ALL_RELEASE_CONFIGS_FOR_PRODUCT), \
     $(eval _all_release_configs.$(config).DECLARED_IN:= ) \
     $(eval _all_release_configs.$(config).FILES:= ) \
 )
-_all_release_configs:=
-config_map_files:=
 applied_releases:=
+# use makefiles
+endif
+config_map_files:=
+protobuf_map_files:=
 
 
+ifeq (,$(_use_protobuf))
 # -----------------------------------------------------------------
 # Flag declarations and values
 # -----------------------------------------------------------------
@@ -252,3 +376,9 @@
 # outside of the source tree.
 $(call run-starlark,$(OUT_DIR)/release_config_entrypoint.scl,$(OUT_DIR)/release_config_entrypoint.scl,--allow_external_entrypoint)
 
+# use makefiles
+endif
+_can_protobuf :=
+_must_protobuf :=
+_use_protobuf :=
+
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 2f510d9..ae34cb8 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -101,4 +101,6 @@
 $(LOCAL_BUILT_MODULE): $(my_coverage_path)/$(GCNO_ARCHIVE)
 endif
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=SHARED_LIBRARY))
+
 endif  # skip_build_from_source
diff --git a/core/soong_config.mk b/core/soong_config.mk
index e382407..068034b 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -1,5 +1,4 @@
 SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
-SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables
 SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
 
 include $(BUILD_SYSTEM)/art_config.mk
@@ -30,6 +29,7 @@
 $(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
 
 $(call add_json_str,  BuildId,                           $(BUILD_ID))
+$(call add_json_str,  BuildFingerprintFile,              build_fingerprint.txt)
 $(call add_json_str,  BuildNumberFile,                   build_number.txt)
 $(call add_json_str,  BuildHostnameFile,                 build_hostname.txt)
 $(call add_json_str,  BuildThumbprintFile,               build_thumbprint.txt)
@@ -52,8 +52,6 @@
 
 $(call add_json_bool, Release_aidl_use_unfrozen,         $(RELEASE_AIDL_USE_UNFROZEN))
 
-$(call add_json_str,  Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
-
 $(call add_json_bool, Allow_missing_dependencies,        $(filter true,$(ALLOW_MISSING_DEPENDENCIES)))
 $(call add_json_bool, Unbundled_build,                   $(TARGET_BUILD_UNBUNDLED))
 $(call add_json_list, Unbundled_build_apps,              $(TARGET_BUILD_APPS))
@@ -146,21 +144,15 @@
 $(call add_json_list, NativeCoveragePaths,               $(NATIVE_COVERAGE_PATHS))
 $(call add_json_list, NativeCoverageExcludePaths,        $(NATIVE_COVERAGE_EXCLUDE_PATHS))
 
-$(call add_json_bool, SamplingPGO,                       $(filter true,$(SAMPLING_PGO)))
-
 $(call add_json_bool, ArtUseReadBarrier,                 $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
 $(call add_json_str,  BtConfigIncludeDir,                $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
 $(call add_json_str,  VendorApiLevel,                    $(BOARD_API_LEVEL))
-ifeq ($(KEEP_VNDK),true)
-$(call add_json_str,  DeviceVndkVersion,                 $(BOARD_VNDK_VERSION))
-$(call add_json_str,  Platform_vndk_version,             $(PLATFORM_VNDK_VERSION))
-endif
 $(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
 $(call add_json_list, DeviceSystemSdkVersions,           $(BOARD_SYSTEMSDK_VERSIONS))
 $(call add_json_str,  RecoverySnapshotVersion,           $(RECOVERY_SNAPSHOT_VERSION))
 $(call add_json_list, Platform_systemsdk_versions,       $(PLATFORM_SYSTEMSDK_VERSIONS))
-$(call add_json_bool, Malloc_not_svelte,                 $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
+$(call add_json_bool, Malloc_low_memory,                 $(findstring true,$(MALLOC_SVELTE) $(MALLOC_LOW_MEMORY)))
 $(call add_json_bool, Malloc_zero_contents,              $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS))))
 $(call add_json_bool, Malloc_pattern_fill_contents,      $(MALLOC_PATTERN_FILL_CONTENTS))
 $(call add_json_str,  Override_rs_driver,                $(OVERRIDE_RS_DRIVER))
@@ -173,13 +165,16 @@
 $(call add_json_list, BootJars,                          $(PRODUCT_BOOT_JARS))
 $(call add_json_list, ApexBootJars,                      $(filter-out $(APEX_BOOT_JARS_EXCLUDED), $(PRODUCT_APEX_BOOT_JARS)))
 
-$(call add_json_bool, VndkUseCoreVariant,                $(TARGET_VNDK_USE_CORE_VARIANT))
 $(call add_json_bool, VndkSnapshotBuildArtifacts,        $(VNDK_SNAPSHOT_BUILD_ARTIFACTS))
 
 $(call add_json_map,  BuildFlags)
 $(foreach flag,$(_ALL_RELEASE_FLAGS),\
   $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).VALUE)))
 $(call end_json_map)
+$(call add_json_map,  BuildFlagTypes)
+$(foreach flag,$(_ALL_RELEASE_FLAGS),\
+  $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).TYPE)))
+$(call end_json_map)
 
 $(call add_json_bool, DirectedVendorSnapshot,            $(DIRECTED_VENDOR_SNAPSHOT))
 $(call add_json_map,  VendorSnapshotModules)
@@ -207,9 +202,6 @@
 $(call add_json_bool, Uml,                               $(filter true,$(TARGET_USER_MODE_LINUX)))
 $(call add_json_str,  VendorPath,                        $(TARGET_COPY_OUT_VENDOR))
 $(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
-$(call add_json_str,  VendorDlkmPath,                    $(TARGET_COPY_OUT_VENDOR_DLKM))
-$(call add_json_str,  OdmDlkmPath,                       $(TARGET_COPY_OUT_ODM_DLKM))
-$(call add_json_str,  SystemDlkmPath,                    $(TARGET_COPY_OUT_SYSTEM_DLKM))
 $(call add_json_str,  ProductPath,                       $(TARGET_COPY_OUT_PRODUCT))
 $(call add_json_str,  SystemExtPath,                     $(TARGET_COPY_OUT_SYSTEM_EXT))
 $(call add_json_bool, MinimizeJavaDebugInfo,             $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
@@ -270,6 +262,18 @@
   $(call end_json_map))
 $(call end_json_map)
 
+# Add the types of the variables in VendorVars. Since this is much newer
+# than VendorVars, which has a history of just using string values for everything,
+# variables are assumed to be strings by default. For strings, SOONG_CONFIG_TYPE_*
+# will not be set, and they will not have an entry in the VendorVarTypes map.
+$(call add_json_map, VendorVarTypes)
+$(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\
+  $(call add_json_map, $(namespace))\
+  $(foreach key,$(sort $(SOONG_CONFIG_$(namespace))),\
+    $(if $(SOONG_CONFIG_TYPE_$(namespace)_$(key)),$(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_TYPE_$(namespace)_$(key))))))\
+  $(call end_json_map))
+$(call end_json_map)
+
 $(call add_json_bool, EnforceProductPartitionInterface,  $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)))
 $(call add_json_str,  DeviceCurrentApiLevelForVendorModules,  $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
 
@@ -290,7 +294,7 @@
 $(call add_json_bool, BoardMoveRecoveryResourcesToVendorBoot, $(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
 $(call add_json_str,  PrebuiltHiddenApiDir, $(BOARD_PREBUILT_HIDDENAPI_DIR))
 
-$(call add_json_str,  ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
+$(call add_json_str,  Shipping_api_level, $(PRODUCT_SHIPPING_API_LEVEL))
 
 $(call add_json_list, BuildBrokenPluginValidation,         $(BUILD_BROKEN_PLUGIN_VALIDATION))
 $(call add_json_bool, BuildBrokenClangProperty,            $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
@@ -305,6 +309,7 @@
 $(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES)))
 $(call add_json_list, BuildBrokenInputDirModules,          $(BUILD_BROKEN_INPUT_DIR_MODULES))
 $(call add_json_bool, BuildBrokenDontCheckSystemSdk,       $(filter true,$(BUILD_BROKEN_DONT_CHECK_SYSTEMSDK)))
+$(call add_json_bool, BuildBrokenDupSysprop,               $(filter true,$(BUILD_BROKEN_DUP_SYSPROP)))
 
 $(call add_json_list, BuildWarningBadOptionalUsesLibsAllowlist,    $(BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST))
 
@@ -321,7 +326,6 @@
 
 $(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
 
-$(call add_json_list, IncludeTags,                $(PRODUCT_INCLUDE_TAGS))
 $(call add_json_list, SourceRootDirs,             $(PRODUCT_SOURCE_ROOT_DIRS))
 
 $(call add_json_list, AfdoProfiles,                $(ALL_AFDO_PROFILES))
@@ -335,73 +339,10 @@
 
 $(call add_json_bool, ReleaseDefaultModuleBuildFromSource,   $(RELEASE_DEFAULT_MODULE_BUILD_FROM_SOURCE))
 
-$(call add_json_bool, KeepVndk, $(filter true,$(KEEP_VNDK)))
-
 $(call add_json_bool, CheckVendorSeappViolations, $(filter true,$(CHECK_VENDOR_SEAPP_VIOLATIONS)))
 
 $(call add_json_bool, BuildIgnoreApexContributionContents, $(PRODUCT_BUILD_IGNORE_APEX_CONTRIBUTION_CONTENTS))
 
-$(call add_json_map, PartitionVarsForBazelMigrationOnlyDoNotUse)
-  $(call add_json_str,  ProductDirectory,    $(dir $(INTERNAL_PRODUCT)))
-
-  $(call add_json_map,PartitionQualifiedVariables)
-  $(foreach image_type,SYSTEM VENDOR CACHE USERDATA PRODUCT SYSTEM_EXT OEM ODM VENDOR_DLKM ODM_DLKM SYSTEM_DLKM, \
-    $(call add_json_map,$(call to-lower,$(image_type))) \
-    $(call add_json_bool, BuildingImage, $(filter true,$(BUILDING_$(image_type)_IMAGE))) \
-    $(call add_json_str, BoardErofsCompressor, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESSOR)) \
-    $(call add_json_str, BoardErofsCompressHints, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESS_HINTS)) \
-    $(call add_json_str, BoardErofsPclusterSize, $(BOARD_$(image_type)IMAGE_EROFS_PCLUSTER_SIZE)) \
-    $(call add_json_str, BoardExtfsInodeCount, $(BOARD_$(image_type)IMAGE_EXTFS_INODE_COUNT)) \
-    $(call add_json_str, BoardExtfsRsvPct, $(BOARD_$(image_type)IMAGE_EXTFS_RSV_PCT)) \
-    $(call add_json_str, BoardF2fsSloadCompressFlags, $(BOARD_$(image_type)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)) \
-    $(call add_json_str, BoardFileSystemCompress, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_COMPRESS)) \
-    $(call add_json_str, BoardFileSystemType, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_TYPE)) \
-    $(call add_json_str, BoardJournalSize, $(BOARD_$(image_type)IMAGE_JOURNAL_SIZE)) \
-    $(call add_json_str, BoardPartitionReservedSize, $(BOARD_$(image_type)IMAGE_PARTITION_RESERVED_SIZE)) \
-    $(call add_json_str, BoardPartitionSize, $(BOARD_$(image_type)IMAGE_PARTITION_SIZE)) \
-    $(call add_json_str, BoardSquashfsBlockSize, $(BOARD_$(image_type)IMAGE_SQUASHFS_BLOCK_SIZE)) \
-    $(call add_json_str, BoardSquashfsCompressor, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR)) \
-    $(call add_json_str, BoardSquashfsCompressorOpt, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR_OPT)) \
-    $(call add_json_str, BoardSquashfsDisable4kAlign, $(BOARD_$(image_type)IMAGE_SQUASHFS_DISABLE_4K_ALIGN)) \
-    $(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \
-    $(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \
-    $(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \
-    $(call add_json_str, BoardAvbAddHashtreeFooterArgs, $(BOARD_AVB_$(image_type)_ADD_HASHTREE_FOOTER_ARGS)) \
-    $(call add_json_str, BoardAvbKeyPath, $(BOARD_AVB_$(image_type)_KEY_PATH)) \
-    $(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_$(image_type)_ALGORITHM)) \
-    $(call add_json_str, BoardAvbRollbackIndex, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX)) \
-    $(call add_json_str, BoardAvbRollbackIndexLocation, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX_LOCATION)) \
-    $(call end_json_map) \
-  )
-  $(call end_json_map)
-
-  $(call add_json_bool, TargetUserimagesUseExt2, $(filter true,$(TARGET_USERIMAGES_USE_EXT2)))
-  $(call add_json_bool, TargetUserimagesUseExt3, $(filter true,$(TARGET_USERIMAGES_USE_EXT3)))
-  $(call add_json_bool, TargetUserimagesUseExt4, $(filter true,$(TARGET_USERIMAGES_USE_EXT4)))
-
-  $(call add_json_bool, TargetUserimagesSparseExtDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)))
-  $(call add_json_bool, TargetUserimagesSparseErofsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED)))
-  $(call add_json_bool, TargetUserimagesSparseSquashfsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)))
-  $(call add_json_bool, TargetUserimagesSparseF2fsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)))
-
-  $(call add_json_str, BoardErofsCompressor, $(BOARD_EROFS_COMPRESSOR))
-  $(call add_json_str, BoardErofsCompressorHints, $(BOARD_EROFS_COMPRESS_HINTS))
-  $(call add_json_str, BoardErofsPclusterSize, $(BOARD_EROFS_PCLUSTER_SIZE))
-  $(call add_json_str, BoardErofsShareDupBlocks, $(BOARD_EROFS_SHARE_DUP_BLOCKS))
-  $(call add_json_str, BoardErofsUseLegacyCompression, $(BOARD_EROFS_USE_LEGACY_COMPRESSION))
-  $(call add_json_str, BoardExt4ShareDupBlocks, $(BOARD_EXT4_SHARE_DUP_BLOCKS))
-  $(call add_json_str, BoardFlashLogicalBlockSize, $(BOARD_FLASH_LOGICAL_BLOCK_SIZE))
-  $(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE))
-
-  $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
-  $(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)))
-  $(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP)))
-
-  $(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE)))
-
-  $(call add_json_list, ProductPackages, $(sort $(PRODUCT_PACKAGES)))
-$(call end_json_map)
-
 $(call add_json_bool, BuildFromSourceStub, $(findstring true,$(PRODUCT_BUILD_FROM_SOURCE_STUB) $(BUILD_FROM_SOURCE_STUB)))
 
 $(call add_json_bool, HiddenapiExportableStubs, $(filter true,$(PRODUCT_HIDDEN_API_EXPORTABLE_STUBS)))
@@ -428,4 +369,6 @@
 	  rm $(SOONG_VARIABLES).tmp; \
 	fi)
 
+include $(BUILD_SYSTEM)/soong_extra_config.mk
+
 endif # CONFIGURE_SOONG
diff --git a/core/soong_extra_config.mk b/core/soong_extra_config.mk
new file mode 100644
index 0000000..e4432d2
--- /dev/null
+++ b/core/soong_extra_config.mk
@@ -0,0 +1,105 @@
+$(call json_start)
+
+$(call add_json_str, DeviceCpuVariantRuntime,           $(TARGET_CPU_VARIANT_RUNTIME))
+$(call add_json_str, DeviceAbiList,                     $(TARGET_CPU_ABI_LIST))
+$(call add_json_str, DeviceAbiList32,                   $(TARGET_CPU_ABI_LIST_32_BIT))
+$(call add_json_str, DeviceAbiList64,                   $(TARGET_CPU_ABI_LIST_64_BIT))
+$(call add_json_str, DeviceSecondaryCpuVariantRuntime,  $(TARGET_2ND_CPU_VARIANT_RUNTIME))
+
+$(call add_json_str, Dex2oatTargetCpuVariantRuntime,         $(DEX2OAT_TARGET_CPU_VARIANT_RUNTIME))
+$(call add_json_str, Dex2oatTargetInstructionSetFeatures,    $(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
+$(call add_json_str, SecondaryDex2oatCpuVariantRuntime,      $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT_RUNTIME))
+$(call add_json_str, SecondaryDex2oatInstructionSetFeatures, $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
+
+$(call add_json_str, BoardPlatform,          $(TARGET_BOARD_PLATFORM))
+$(call add_json_str, BoardShippingApiLevel,  $(BOARD_SHIPPING_API_LEVEL))
+$(call add_json_str, ShippingApiLevel,       $(PRODUCT_SHIPPING_API_LEVEL))
+$(call add_json_str, ShippingVendorApiLevel, $(PRODUCT_SHIPPING_VENDOR_API_LEVEL))
+
+$(call add_json_str, ProductModel,                      $(PRODUCT_MODEL))
+$(call add_json_str, ProductModelForAttestation,        $(PRODUCT_MODEL_FOR_ATTESTATION))
+$(call add_json_str, ProductBrandForAttestation,        $(PRODUCT_BRAND_FOR_ATTESTATION))
+$(call add_json_str, ProductNameForAttestation,         $(PRODUCT_NAME_FOR_ATTESTATION))
+$(call add_json_str, ProductDeviceForAttestation,       $(PRODUCT_DEVICE_FOR_ATTESTATION))
+$(call add_json_str, ProductManufacturerForAttestation, $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))
+
+$(call add_json_str, SystemBrand, $(PRODUCT_SYSTEM_BRAND))
+$(call add_json_str, SystemDevice, $(PRODUCT_SYSTEM_DEVICE))
+$(call add_json_str, SystemManufacturer, $(PRODUCT_SYSTEM_MANUFACTURER))
+$(call add_json_str, SystemModel, $(PRODUCT_SYSTEM_MODEL))
+$(call add_json_str, SystemName, $(PRODUCT_SYSTEM_NAME))
+
+# Collapses ?= and = operators for system property variables. Also removes double quotes to prevent
+# malformed JSON. This change aligns with the existing behavior of sysprop.mk, which passes property
+# variables to the echo command, effectively discarding surrounding double quotes.
+define collapse-prop-pairs
+$(subst ",,$(call collapse-pairs,$(call collapse-pairs,$$($(1)),?=),=))
+endef
+
+$(call add_json_list, PRODUCT_SYSTEM_PROPERTIES,         $(call collapse-prop-pairs,PRODUCT_SYSTEM_PROPERTIES))
+$(call add_json_list, PRODUCT_SYSTEM_DEFAULT_PROPERTIES, $(call collapse-prop-pairs,PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
+$(call add_json_list, PRODUCT_SYSTEM_EXT_PROPERTIES,     $(call collapse-prop-pairs,PRODUCT_SYSTEM_EXT_PROPERTIES))
+$(call add_json_list, PRODUCT_VENDOR_PROPERTIES,         $(call collapse-prop-pairs,PRODUCT_VENDOR_PROPERTIES))
+$(call add_json_list, PRODUCT_PRODUCT_PROPERTIES,        $(call collapse-prop-pairs,PRODUCT_PRODUCT_PROPERTIES))
+$(call add_json_list, PRODUCT_ODM_PROPERTIES,            $(call collapse-prop-pairs,PRODUCT_ODM_PROPERTIES))
+$(call add_json_list, PRODUCT_PROPERTY_OVERRIDES,        $(call collapse-prop-pairs,PRODUCT_PROPERTY_OVERRIDES))
+
+$(call add_json_str, BootloaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME))
+
+$(call add_json_bool, SdkBuild, $(filter sdk sdk_addon,$(MAKECMDGOALS)))
+
+_config_enable_uffd_gc := \
+  $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
+$(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc))
+_config_enable_uffd_gc :=
+
+$(call add_json_str, SystemServerCompilerFilter, $(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
+
+$(call add_json_bool, Product16KDeveloperOption, $(filter true,$(PRODUCT_16K_DEVELOPER_OPTION)))
+
+$(call add_json_str, RecoveryDefaultRotation, $(TARGET_RECOVERY_DEFAULT_ROTATION))
+$(call add_json_str, RecoveryOverscanPercent, $(TARGET_RECOVERY_OVERSCAN_PERCENT))
+$(call add_json_str, RecoveryPixelFormat, $(TARGET_RECOVERY_PIXEL_FORMAT))
+
+ifdef AB_OTA_UPDATER
+$(call add_json_bool, AbOtaUpdater, $(filter true,$(AB_OTA_UPDATER)))
+$(call add_json_str, AbOtaPartitions, $(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS))))
+endif
+
+ifdef PRODUCT_USE_DYNAMIC_PARTITIONS
+$(call add_json_bool, UseDynamicPartitions, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITIONS)))
+endif
+
+ifdef PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+$(call add_json_bool, RetrofitDynamicPartitions, $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)))
+endif
+
+$(call add_json_bool, DontUseVabcOta, $(filter true,$(BOARD_DONT_USE_VABC_OTA)))
+
+$(call add_json_bool, FullTreble, $(filter true,$(PRODUCT_FULL_TREBLE)))
+
+$(call add_json_bool, NoBionicPageSizeMacro, $(filter true,$(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)))
+
+$(call add_json_bool, PropertySplitEnabled, $(filter true,$(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED)))
+
+$(call add_json_str, ScreenDensity, $(TARGET_SCREEN_DENSITY))
+
+$(call add_json_bool, UsesVulkan, $(filter true,$(TARGET_USES_VULKAN)))
+
+$(call add_json_bool, ZygoteForce64, $(filter true,$(ZYGOTE_FORCE_64)))
+
+$(call add_json_str, VendorSecurityPatch,       $(VENDOR_SECURITY_PATCH))
+$(call add_json_str, VendorImageFileSystemType, $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE))
+
+$(call add_json_list, BuildVersionTags, $(BUILD_VERSION_TAGS))
+
+$(call json_end)
+
+$(shell mkdir -p $(dir $(SOONG_EXTRA_VARIABLES)))
+$(file >$(SOONG_EXTRA_VARIABLES).tmp,$(json_contents))
+
+$(shell if ! cmp -s $(SOONG_EXTRA_VARIABLES).tmp $(SOONG_EXTRA_VARIABLES); then \
+	  mv $(SOONG_EXTRA_VARIABLES).tmp $(SOONG_EXTRA_VARIABLES); \
+	else \
+	  rm $(SOONG_EXTRA_VARIABLES).tmp; \
+	fi)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 4a72a1f..dd1d8b5 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -229,3 +229,5 @@
 aar_classes_jar :=
 all_res_assets :=
 LOCAL_IS_STATIC_JAVA_LIBRARY :=
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=STATIC_JAVA_LIBRARY))
\ No newline at end of file
diff --git a/core/static_library_internal.mk b/core/static_library_internal.mk
index 0392460..844360e 100644
--- a/core/static_library_internal.mk
+++ b/core/static_library_internal.mk
@@ -41,3 +41,5 @@
 $(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries)
 	$(package-coverage-files)
 endif
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=STATIC_LIBRARY))
\ No newline at end of file
diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk
new file mode 100644
index 0000000..6e3da72
--- /dev/null
+++ b/core/sysprop_config.mk
@@ -0,0 +1,295 @@
+# ADDITIONAL_<partition>_PROPERTIES are properties that are determined by the
+# build system itself. Don't let it be defined from outside of the core build
+# system like Android.mk or <product>.mk files.
+_additional_prop_var_names := \
+    ADDITIONAL_SYSTEM_PROPERTIES \
+    ADDITIONAL_VENDOR_PROPERTIES \
+    ADDITIONAL_ODM_PROPERTIES \
+    ADDITIONAL_PRODUCT_PROPERTIES
+
+$(foreach name, $(_additional_prop_var_names),\
+  $(if $($(name)),\
+    $(error $(name) must not set before here. $($(name)))\
+  ,)\
+  $(eval $(name) :=)\
+)
+_additional_prop_var_names :=
+
+#
+# -----------------------------------------------------------------
+# Add the product-defined properties to the build properties.
+ifneq ($(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED), true)
+  ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
+else
+  ifndef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+    ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
+  endif
+endif
+
+ADDITIONAL_SYSTEM_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
+
+# Set ro.llndk.api_level to show the maximum vendor API level that the LLNDK in
+# the system partition supports.
+ifdef RELEASE_BOARD_API_LEVEL
+ADDITIONAL_SYSTEM_PROPERTIES += ro.llndk.api_level=$(RELEASE_BOARD_API_LEVEL)
+endif
+
+# Sets ro.actionable_compatible_property.enabled to know on runtime whether the
+# allowed list of actionable compatible properties is enabled or not.
+ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true
+
+# Add the system server compiler filter if they are specified for the product.
+ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
+ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
+endif
+
+# Add the 16K developer option if it is defined for the product.
+ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true)
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true
+else
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false
+endif
+
+# Enable core platform API violation warnings on userdebug and eng builds.
+ifneq ($(TARGET_BUILD_VARIANT),user)
+ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
+endif
+
+# Define ro.sanitize.<name> properties for all global sanitizers.
+ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
+
+# Sets the default value of ro.postinstall.fstab.prefix to /system.
+# Device board config should override the value to /product when needed by:
+#
+#     PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product
+#
+# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to
+# mount system_other partition.
+ADDITIONAL_SYSTEM_PROPERTIES += ro.postinstall.fstab.prefix=/system
+
+# Add cpu properties for bionic and ART.
+ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH)
+ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.cpu_variant=$(TARGET_CPU_VARIANT_RUNTIME)
+ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.2nd_arch=$(TARGET_2ND_ARCH)
+ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.2nd_cpu_variant=$(TARGET_2ND_CPU_VARIANT_RUNTIME)
+
+ADDITIONAL_VENDOR_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart.so
+ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).variant=$(DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
+ifneq ($(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
+  ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+endif
+
+ifdef TARGET_2ND_ARCH
+  ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).variant=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
+  ifneq ($($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
+    ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).features=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+  endif
+endif
+
+# Although these variables are prefixed with TARGET_RECOVERY_, they are also needed under charger
+# mode (via libminui).
+ifdef TARGET_RECOVERY_DEFAULT_ROTATION
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.minui.default_rotation=$(TARGET_RECOVERY_DEFAULT_ROTATION)
+endif
+ifdef TARGET_RECOVERY_OVERSCAN_PERCENT
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.minui.overscan_percent=$(TARGET_RECOVERY_OVERSCAN_PERCENT)
+endif
+ifdef TARGET_RECOVERY_PIXEL_FORMAT
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.minui.pixel_format=$(TARGET_RECOVERY_PIXEL_FORMAT)
+endif
+
+ifdef PRODUCT_USE_DYNAMIC_PARTITIONS
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.boot.dynamic_partitions=$(PRODUCT_USE_DYNAMIC_PARTITIONS)
+endif
+
+ifdef PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.boot.dynamic_partitions_retrofit=$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)
+endif
+
+ifdef PRODUCT_SHIPPING_API_LEVEL
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
+endif
+
+ifdef PRODUCT_SHIPPING_VENDOR_API_LEVEL
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.vendor.api_level=$(PRODUCT_SHIPPING_VENDOR_API_LEVEL)
+endif
+
+ifneq ($(TARGET_BUILD_VARIANT),user)
+  ifdef PRODUCT_SET_DEBUGFS_RESTRICTIONS
+    ADDITIONAL_VENDOR_PROPERTIES += \
+      ro.product.debugfs_restrictions.enabled=$(PRODUCT_SET_DEBUGFS_RESTRICTIONS)
+  endif
+endif
+
+# Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
+# This must not be defined for the non-GRF devices.
+# The values of the GRF properties will be verified by post_process_props.py
+ifdef BOARD_SHIPPING_API_LEVEL
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.board.first_api_level=$(BOARD_SHIPPING_API_LEVEL)
+endif
+
+# Build system set BOARD_API_LEVEL to show the api level of the vendor API surface.
+# This must not be altered outside of build system.
+ifdef BOARD_API_LEVEL
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.board.api_level=$(BOARD_API_LEVEL)
+endif
+# RELEASE_BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen.
+ifdef RELEASE_BOARD_API_LEVEL_FROZEN
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.board.api_frozen=$(RELEASE_BOARD_API_LEVEL_FROZEN)
+endif
+
+# Set build prop. This prop is read by ota_from_target_files when generating OTA,
+# to decide if VABC should be disabled.
+ifeq ($(BOARD_DONT_USE_VABC_OTA),true)
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.vendor.build.dont_use_vabc=true
+endif
+
+# Set the flag in vendor. So VTS would know if the new fingerprint format is in use when
+# the system images are replaced by GSI.
+ifeq ($(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT),true)
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.vendor.build.fingerprint_has_digest=1
+endif
+
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.vendor.build.security_patch=$(VENDOR_SECURITY_PATCH) \
+    ro.product.board=$(TARGET_BOOTLOADER_BOARD_NAME) \
+    ro.board.platform=$(TARGET_BOARD_PLATFORM) \
+    ro.hwui.use_vulkan=$(TARGET_USES_VULKAN)
+
+ifdef TARGET_SCREEN_DENSITY
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.sf.lcd_density=$(TARGET_SCREEN_DENSITY)
+endif
+
+ifdef AB_OTA_UPDATER
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.build.ab_update=$(AB_OTA_UPDATER)
+endif
+
+ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
+
+ifeq ($(AB_OTA_UPDATER),true)
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
+ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
+endif
+
+# Set this property for VTS to skip large page size tests on unsupported devices.
+ADDITIONAL_PRODUCT_PROPERTIES += \
+    ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
+
+ifeq ($(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO),true)
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.no_bionic_page_size_macro=true
+endif
+
+user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
+enable_target_debugging := true
+enable_dalvik_lock_contention_logging := true
+ifneq (,$(user_variant))
+  # Target is secure in user builds.
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=1
+  ADDITIONAL_SYSTEM_PROPERTIES += security.perf_harden=1
+
+  ifeq ($(user_variant),user)
+    ADDITIONAL_SYSTEM_PROPERTIES += ro.adb.secure=1
+  endif
+
+  ifneq ($(user_variant),userdebug)
+    # Disable debugging in plain user builds.
+    enable_target_debugging :=
+    enable_dalvik_lock_contention_logging :=
+  else
+    # Disable debugging in userdebug builds if PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG
+    # is set.
+    ifneq (,$(strip $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG)))
+      enable_target_debugging :=
+    endif
+  endif
+
+  # Disallow mock locations by default for user builds
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=0
+
+else # !user_variant
+  # Turn on checkjni for non-user builds.
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.kernel.android.checkjni=1
+  # Set device insecure for non-user builds.
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=0
+  # Allow mock locations by default for non user builds
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=1
+endif # !user_variant
+
+ifeq (true,$(strip $(enable_dalvik_lock_contention_logging)))
+  # Enable Dalvik lock contention logging.
+  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500
+endif # !enable_dalvik_lock_contention_logging
+
+ifeq (true,$(strip $(enable_target_debugging)))
+  # Target is more debuggable and adbd is on by default
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=1
+else # !enable_target_debugging
+  # Target is less debuggable and adbd is off by default
+  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=0
+endif # !enable_target_debugging
+
+enable_target_debugging:=
+enable_dalvik_lock_contention_logging:=
+
+ifneq ($(filter sdk sdk_addon,$(MAKECMDGOALS)),)
+_is_sdk_build := true
+endif
+
+ifeq ($(TARGET_BUILD_VARIANT),eng)
+ifneq ($(filter ro.setupwizard.mode=ENABLED, $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))),)
+  # Don't require the setup wizard on eng builds
+  ADDITIONAL_SYSTEM_PROPERTIES := $(filter-out ro.setupwizard.mode=%,\
+          $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))) \
+          ro.setupwizard.mode=OPTIONAL
+endif
+ifndef _is_sdk_build
+  # To speedup startup of non-preopted builds, don't verify or compile the boot image.
+  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.image-dex2oat-filter=extract
+endif
+# b/323566535
+ADDITIONAL_SYSTEM_PROPERTIES += init.svc_debug.no_fatal.zygote=true
+endif
+
+ifdef _is_sdk_build
+ADDITIONAL_SYSTEM_PROPERTIES += xmpp.auto-presence=true
+ADDITIONAL_SYSTEM_PROPERTIES += ro.config.nocheckin=yes
+endif
+
+_is_sdk_build :=
+
+ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android
+
+# This property is set by flashing debug boot image, so default to false.
+ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0
+
+config_enable_uffd_gc := \
+  $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
+
+# This is a temporary system property that controls the ART module. The plan is
+# to remove it by Aug 2025, at which time Mainline updates of the ART module
+# will ignore it as well.
+# If the value is "default", it will be mangled by post_process_props.py.
+ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc)
+
+ADDITIONAL_SYSTEM_PROPERTIES := $(strip $(ADDITIONAL_SYSTEM_PROPERTIES))
+ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES))
+ADDITIONAL_VENDOR_PROPERTIES := $(strip $(ADDITIONAL_VENDOR_PROPERTIES))
+
+.KATI_READONLY += \
+    ADDITIONAL_SYSTEM_PROPERTIES \
+    ADDITIONAL_PRODUCT_PROPERTIES \
+    ADDITIONAL_VENDOR_PROPERTIES
diff --git a/core/tasks/art.mk b/core/tasks/art.mk
new file mode 100644
index 0000000..ded6125
--- /dev/null
+++ b/core/tasks/art.mk
@@ -0,0 +1,26 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+########################################################################
+# clean-oat rules
+#
+
+.PHONY: clean-oat
+clean-oat: clean-oat-host clean-oat-target
+
+.PHONY: clean-oat-host
+clean-oat-host:
+	find $(OUT_DIR) '(' -name '*.oat' -o -name '*.odex' -o -name '*.art' -o -name '*.vdex' ')' -a -type f | xargs rm -f
+	rm -rf $(TMPDIR)/*/test-*/dalvik-cache/*
+	rm -rf $(TMPDIR)/android-data/dalvik-cache/*
diff --git a/core/tasks/automotive-sdv-tests.mk b/core/tasks/automotive-sdv-tests.mk
new file mode 100644
index 0000000..12706ce
--- /dev/null
+++ b/core/tasks/automotive-sdv-tests.mk
@@ -0,0 +1,61 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+.PHONY: automotive-sdv-tests
+
+automotive-sdv-tests-zip := $(PRODUCT_OUT)/automotive-sdv-tests.zip
+# Create an artifact to include a list of test config files in automotive-sdv-tests.
+automotive-sdv-tests-list-zip := $(PRODUCT_OUT)/automotive-sdv-tests_list.zip
+# Create an artifact to include all test config files in automotive-sdv-tests.
+automotive-sdv-tests-configs-zip := $(PRODUCT_OUT)/automotive-sdv-tests_configs.zip
+my_host_shared_lib_for_automotive_sdv_tests := $(call copy-many-files,$(COMPATIBILITY.automotive-sdv-tests.HOST_SHARED_LIBRARY.FILES))
+automotive_sdv_tests_host_shared_libs_zip := $(PRODUCT_OUT)/automotive-sdv-tests_host-shared-libs.zip
+
+$(automotive-sdv-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(automotive-sdv-tests-list-zip) $(automotive-sdv-tests-configs-zip) $(automotive_sdv_tests_host_shared_libs_zip)
+$(automotive-sdv-tests-zip) : PRIVATE_automotive_sdv_tests_list := $(PRODUCT_OUT)/automotive-sdv-tests_list
+$(automotive-sdv-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_automotive_sdv_tests)
+$(automotive-sdv-tests-zip) : PRIVATE_automotive_host_shared_libs_zip := $(automotive_sdv_tests_host_shared_libs_zip)
+$(automotive-sdv-tests-zip) : $(COMPATIBILITY.automotive-sdv-tests.FILES) $(my_host_shared_lib_for_automotive_sdv_tests) $(SOONG_ZIP)
+	rm -f $@-shared-libs.list
+	echo $(sort $(COMPATIBILITY.automotive-sdv-tests.FILES)) | tr " " "\n" > $@.list
+	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
+	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+	  echo $$shared_lib >> $@-host.list; \
+	  echo $$shared_lib >> $@-shared-libs.list; \
+	done
+	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
+	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+	$(hide) $(SOONG_ZIP) -d -o $(automotive-sdv-tests-configs-zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
+	$(SOONG_ZIP) -d -o $(PRIVATE_automotive_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
+	rm -f $(PRIVATE_automotive_sdv_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_automotive_sdv_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_automotive_sdv_tests_list)
+	$(hide) $(SOONG_ZIP) -d -o $(automotive-sdv-tests-list-zip) -C $(dir $@) -f $(PRIVATE_automotive_sdv_tests_list)
+	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
+	  $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_automotive_sdv_tests_list)
+
+automotive-sdv-tests: $(automotive-sdv-tests-zip)
+$(call dist-for-goals, automotive-sdv-tests, $(automotive-sdv-tests-zip) $(automotive-sdv-tests-list-zip) $(automotive-sdv-tests-configs-zip) $(automotive_sdv_tests_host_shared_libs_zip))
+
+$(call declare-1p-container,$(automotive-sdv-tests-zip),)
+$(call declare-container-license-deps,$(automotive-sdv-tests-zip),$(COMPATIBILITY.automotive-sdv-tests.FILES) $(my_host_shared_lib_for_automotive_sdv_tests),$(PRODUCT_OUT)/:/)
+
+tests: automotive-sdv-tests
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index b9f0988..294cb57 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -16,6 +16,8 @@
 test_suite_tradefed := cts-tradefed
 test_suite_dynamic_config := cts/tools/cts-tradefed/DynamicConfig.xml
 test_suite_readme := cts/tools/cts-tradefed/README
+test_suite_tools := $(HOST_OUT_JAVA_LIBRARIES)/ats_console_deploy.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/ats_olc_server_local_mode_deploy.jar
 
 $(call declare-1p-target,$(test_suite_dynamic_config),cts)
 $(call declare-1p-target,$(test_suite_readme),cts)
@@ -90,8 +92,16 @@
 
 cts_api_coverage_exe := $(HOST_OUT_EXECUTABLES)/cts-api-coverage
 dexdeps_exe := $(HOST_OUT_EXECUTABLES)/dexdeps
+cts_api_map_exe := $(HOST_OUT_EXECUTABLES)/cts-api-map
 
 coverage_out := $(HOST_OUT)/cts-api-coverage
+api_map_out := $(HOST_OUT)/cts-api-map
+
+cts_jar_files := $(api_map_out)/api_map_files.txt
+$(cts_jar_files): PRIVATE_API_MAP_FILES := $(sort $(COMPATIBILITY.cts.API_MAP_FILES))
+$(cts_jar_files):
+	mkdir -p $(dir $@)
+	echo $(PRIVATE_API_MAP_FILES) > $@
 
 api_xml_description := $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml
 
@@ -114,6 +124,14 @@
 cts_api_coverage_dependencies := $(cts_api_coverage_exe) $(dexdeps_exe) $(api_xml_description) $(napi_xml_description)
 cts_system_api_coverage_dependencies := $(cts_api_coverage_exe) $(dexdeps_exe) $(system_api_xml_description)
 
+cts-api-xml-api-map-report := $(api_map_out)/api-map.xml
+cts-api-html-api-map-report := $(api_map_out)/api-map.html
+cts-system-api-xml-api-map-report := $(api_map_out)/system-api-map.xml
+cts-system-api-html-api-map-report := $(api_map_out)/system-api-map.html
+
+cts_system_api_map_dependencies := $(cts_api_map_exe) $(system_api_xml_description) $(cts_jar_files)
+cts_api_map_dependencies := $(cts_api_map_exe) $(api_xml_description) $(cts_jar_files)
+
 android_cts_zip := $(HOST_OUT)/cts/android-cts.zip
 cts_verifier_apk := $(call intermediates-dir-for,APPS,CtsVerifier)/package.apk
 
@@ -192,6 +210,48 @@
 .PHONY: cts-coverage-report-all cts-api-coverage
 cts-coverage-report-all: cts-test-coverage cts-verifier-coverage cts-combined-coverage cts-combined-xml-coverage
 
+$(cts-system-api-xml-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-system-api-xml-api-map-report): PRIVATE_API_XML_DESC := $(system_api_xml_description)
+$(cts-system-api-xml-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-system-api-xml-api-map-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS System API MAP Report - XML",\
+			$(PRIVATE_JAR_FILES),xml)
+
+$(cts-system-api-html-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-system-api-html-api-map-report): PRIVATE_API_XML_DESC := $(system_api_xml_description)
+$(cts-system-api-html-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-system-api-html-api-map-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS System API MAP Report - HTML",\
+			$(PRIVATE_JAR_FILES),html)
+
+$(cts-api-xml-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-api-xml-api-map-report): PRIVATE_API_XML_DESC := $(api_xml_description)
+$(cts-api-xml-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-api-xml-api-map-report) : $(android_cts_zip) $(cts_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS API MAP Report - XML",\
+			$(PRIVATE_JAR_FILES),xml)
+
+$(cts-api-html-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-api-html-api-map-report): PRIVATE_API_XML_DESC := $(api_xml_description)
+$(cts-api-html-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-api-html-api-map-report) : $(android_cts_zip) $(cts_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS API MAP Report - HTML",\
+			$(PRIVATE_JAR_FILES),html)
+
+.PHONY: cts-system-api-xml-api-map
+cts-system-api-xml-api-map : $(cts-system-api-xml-api-map-report)
+
+.PHONY: cts-system-api-html-api-map
+cts-system-api-html-api-map : $(cts-system-api-html-api-map-report)
+
+.PHONY: cts-api-xml-api-map
+cts-api-xml-api-map : $(cts-api-xml-api-map-report)
+
+.PHONY: cts-api-html-api-map
+cts-api-html-api-map : $(cts-api-html-api-map-report)
+
+.PHONY: cts-api-map-all
+
 # Put the test coverage report in the dist dir if "cts-api-coverage" is among the build goals.
 $(call dist-for-goals, cts-api-coverage, $(cts-test-coverage-report):cts-test-coverage-report.html)
 $(call dist-for-goals, cts-api-coverage, $(cts-system-api-coverage-report):cts-system-api-coverage-report.html)
@@ -207,6 +267,17 @@
 ALL_TARGETS.$(cts-combined-coverage-report).META_LIC:=$(module_license_metadata)
 ALL_TARGETS.$(cts-combined-xml-coverage-report).META_LIC:=$(module_license_metadata)
 
+# Put the test api map report in the dist dir if "cts-api-map-all" is among the build goals.
+$(call dist-for-goals, cts-api-map-all, $(cts-system-api-xml-api-map-report):cts-system-api-xml-api-map-report.xml)
+$(call dist-for-goals, cts-api-map-all, $(cts-system-api-html-api-map-report):cts-system-api-html-api-map-report.html)
+$(call dist-for-goals, cts-api-map-all, $(cts-api-xml-api-map-report):cts-api-xml-api-map-report.xml)
+$(call dist-for-goals, cts-api-map-all, $(cts-api-html-api-map-report):cts-api-html-api-map-report.html)
+
+ALL_TARGETS.$(cts-system-api-xml-api-map-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-system-api-html-api-map-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-api-xml-api-map-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-api-html-api-map-report).META_LIC:=$(module_license_metadata)
+
 # Arguments;
 #  1 - Name of the report printed out on the screen
 #  2 - List of apk files that will be scanned to generate the report
@@ -217,23 +288,42 @@
 	@ echo $(1): file://$$(cd $(dir $@); pwd)/$(notdir $@)
 endef
 
+# Arguments;
+#  1 - Name of the report printed out on the screen
+#  2 - A file containing list of files that to be analyzed
+#  3 - Format of the report
+define generate-api-map-report-cts
+	$(hide) mkdir -p $(dir $@)
+	$(hide) $(PRIVATE_CTS_API_MAP_EXE) -j 8 -a $(PRIVATE_API_XML_DESC) -i $(2) -f $(3) -o $@
+	@ echo $(1): file://$$(cd $(dir $@); pwd)/$(notdir $@)
+endef
+
 # Reset temp vars
 cts_api_coverage_dependencies :=
 cts_system_api_coverage_dependencies :=
+cts_api_map_dependencies :=
+cts_system_api_map_dependencies :=
 cts-combined-coverage-report :=
 cts-combined-xml-coverage-report :=
 cts-verifier-coverage-report :=
 cts-test-coverage-report :=
 cts-system-api-coverage-report :=
 cts-system-api-xml-coverage-report :=
+cts-api-xml-api-map-report :=
+cts-api-html-api-map-report :=
+cts-system-api-xml-api-map-report :=
+cts-system-api-html-api-map-report :=
 api_xml_description :=
 api_text_description :=
 system_api_xml_description :=
 napi_xml_description :=
 napi_text_description :=
 coverage_out :=
+api_map_out :=
+cts_jar_files :=
 dexdeps_exe :=
 cts_api_coverage_exe :=
+cts_api_map_exe :=
 cts_verifier_apk :=
 android_cts_zip :=
 cts-dir :=
diff --git a/core/tasks/device-platinum-tests.mk b/core/tasks/device-platinum-tests.mk
new file mode 100644
index 0000000..75f4c4c
--- /dev/null
+++ b/core/tasks/device-platinum-tests.mk
@@ -0,0 +1,71 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+.PHONY: device-platinum-tests
+
+device_platinum_tests_zip := $(PRODUCT_OUT)/device-platinum-tests.zip
+# Create an artifact to include a list of test config files in device-platinum-tests.
+device_platinum_tests_list_zip := $(PRODUCT_OUT)/device-platinum-tests_list.zip
+# Create an artifact to include all test config files in device-platinum-tests.
+device_platinum_tests_configs_zip := $(PRODUCT_OUT)/device-platinum-tests_configs.zip
+my_host_shared_lib_for_device_platinum_tests := $(call copy-many-files,$(COMPATIBILITY.device-platinum-tests.HOST_SHARED_LIBRARY.FILES))
+device_platinum_tests_host_shared_libs_zip := $(PRODUCT_OUT)/device-platinum-tests_host-shared-libs.zip
+
+$(device_platinum_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(device_platinum_tests_list_zip) $(device_platinum_tests_configs_zip) $(device_platinum_tests_host_shared_libs_zip)
+$(device_platinum_tests_zip) : PRIVATE_device_platinum_tests_list_zip := $(device_platinum_tests_list_zip)
+$(device_platinum_tests_zip) : PRIVATE_device_platinum_tests_configs_zip := $(device_platinum_tests_configs_zip)
+$(device_platinum_tests_zip) : PRIVATE_device_platinum_tests_list := $(PRODUCT_OUT)/device-platinum-tests_list
+$(device_platinum_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_platinum_tests)
+$(device_platinum_tests_zip) : PRIVATE_device_host_shared_libs_zip := $(device_platinum_tests_host_shared_libs_zip)
+$(device_platinum_tests_zip) : $(COMPATIBILITY.device-platinum-tests.FILES) $(my_host_shared_lib_for_device_platinum_tests) $(SOONG_ZIP)
+	rm -f $@-shared-libs.list
+	rm -f $(PRIVATE_device_platinum_tests_list_zip)
+	echo $(sort $(COMPATIBILITY.device-platinum-tests.FILES)) | tr " " "\n" > $@.list
+	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
+	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+	  echo $$shared_lib >> $@-host.list; \
+	  echo $$shared_lib >> $@-shared-libs.list; \
+	done
+	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
+	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_device_platinum_tests_configs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
+	$(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
+	rm -f $(PRIVATE_device_platinum_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_platinum_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_platinum_tests_list)
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_device_platinum_tests_list_zip) -C $(dir $@) -f $(PRIVATE_device_platinum_tests_list)
+	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
+	  $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_device_platinum_tests_list)
+
+device-platinum-tests: $(device_platinum_tests_zip)
+$(call dist-for-goals, device-platinum-tests, $(device_platinum_tests_zip) $(device_platinum_tests_list_zip) $(device_platinum_tests_configs_zip) $(device_platinum_tests_host_shared_libs_zip))
+
+$(call declare-1p-container,$(device_platinum_tests_zip),)
+$(call declare-container-license-deps,$(device_platinum_tests_zip),$(COMPATIBILITY.device-platinum-tests.FILES) $(my_host_shared_lib_for_device_platinum_tests),$(PRODUCT_OUT)/:/)
+
+tests: device-platinum-tests
+
+# Reset temp vars
+device_platinum_tests_zip :=
+device_platinum_tests_list_zip :=
+device_platinum_tests_configs_zip :=
+my_host_shared_lib_for_device_platinum_tests :=
+device_platinum_tests_host_shared_libs_zip :=
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 4167a7e..5850c4e 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -27,9 +27,9 @@
 $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
 $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
 $(device-tests-zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip)
-$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
+$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
 	rm -f $@-shared-libs.list
-	echo $(sort $(COMPATIBILITY.device-tests.FILES)) | tr " " "\n" > $@.list
+	echo $(sort $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index cae71e4..d6fc072 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -47,11 +47,11 @@
 $(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
 $(general_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
 $(general_tests_zip) : PRIVATE_general_tests_configs_zip := $(general_tests_configs_zip)
-$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(SOONG_ZIP)
+$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(COMPATIBILITY.general-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(general_tests_tools) $(SOONG_ZIP)
 	rm -rf $(PRIVATE_INTERMEDIATES_DIR)
 	rm -f $@ $(PRIVATE_general_tests_list_zip)
 	mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
-	echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+	echo $(sort $(COMPATIBILITY.general-tests.FILES) $(COMPATIBILITY.general-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
 	find $(PRIVATE_KERNEL_LTP_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
 	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
 	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
diff --git a/core/tasks/mcts.mk b/core/tasks/mcts.mk
new file mode 100644
index 0000000..09a4191
--- /dev/null
+++ b/core/tasks/mcts.mk
@@ -0,0 +1,32 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(wildcard test/mts/README.md),)
+
+mcts_test_suites :=
+mcts_test_suites += mcts
+
+$(foreach module, $(mts_modules), $(eval mcts_test_suites += mcts-$(module)))
+
+$(foreach suite, $(mcts_test_suites), \
+	$(eval test_suite_name := $(suite)) \
+	$(eval test_suite_tradefed := mts-tradefed) \
+	$(eval test_suite_readme := test/mts/README.md) \
+	$(eval include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk) \
+	$(eval .PHONY: $(suite)) \
+	$(eval $(suite): $(compatibility_zip)) \
+	$(eval $(call dist-for-goals, $(suite), $(compatibility_zip))) \
+)
+
+endif
diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk
index 0348844..85357eb 100644
--- a/core/tasks/meta-lic.mk
+++ b/core/tasks/meta-lic.mk
@@ -14,9 +14,133 @@
 
 # Declare license metadata for non-module files released with products.
 
+# Moved here from device/generic/car/Android.mk
+$(eval $(call declare-1p-copy-files,device/generic/car,))
+
+# Moved here from device/generic/trusty/Android.mk
+$(eval $(call declare-1p-copy-files,device/generic/trusty,))
+
+# Moved here from device/generic/uml/Android.mk
+$(eval $(call declare-1p-copy-files,device/generic/uml,))
+
+# Moved here from device/google_car/common/Android.mk
+$(eval $(call declare-1p-copy-files,device/google_car/common,))
+
+# Moved here from device/google/atv/Android.mk
+$(eval $(call declare-1p-copy-files,device/google/atv,atv-component-overrides.xml))
+$(eval $(call declare-1p-copy-files,device/google/atv,tv_core_hardware.xml))
+
+# Moved here from device/google/bramble/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/bramble,audio_policy_configuration.xml))
+
+# Moved here from device/google/barbet/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/barbet,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/barbet,audio_policy_configuration.xml))
+
+# Moved here from device/google/coral/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/coral,display_19261132550654593.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/coral,audio_policy_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/google/coral,display_19260504575090817.xml))
+
+# Moved here from device/google/gs101/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,p2p_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/gs101,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/gs101,audio_policy_configuration.xml))
+
+# Move here from device/google/raviole/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,libnfc-nci-raven.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/raviole,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/raviole,audio_policy_configuration.xml))
+
+# Moved here from device/google/redfin/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/redfin,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/redfin,audio_policy_configuration.xml))
+
 # Moved here from device/sample/Android.mk
 $(eval $(call declare-1p-copy-files,device/sample,))
 
+# Moved here from device/google/trout/Android.mk
+$(eval $(call declare-1p-copy-files,device/google/trout,))
+
 # Moved here from frameworks/av/media/Android.mk
 $(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
 $(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
@@ -33,3 +157,37 @@
 $(eval $(call declare-1p-copy-files,frameworks/base,dirty-image-objects))
 $(eval $(call declare-1p-copy-files,frameworks/base/config,))
 $(eval $(call declare-1p-copy-files,frameworks/native/data,))
+
+# Moved here from hardware/google/camera/Android.mk
+$(eval $(call declare-1p-copy-files,hardware/google/camera,))
+
+# Moved here from hardware/interfaces/tv/Android.mk
+$(eval $(call declare-1p-copy-files,hardware/interfaces/tv,tuner_vts_config_1_0.xml))
+$(eval $(call declare-1p-copy-files,hardware/interfaces/tv,tuner_vts_config_1_1.xml))
+
+# Moved here from device/generic/goldfish/Android.mk
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/data,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/input,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/wifi,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/camera,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,hals.conf))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.qemu-adb-keys.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.system_ext.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,.json))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,ueventd.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,wpa_supplicant.conf))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,media_profiles_V1_0.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,fstab.ranchu))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,display_settings.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,display_settings_freeform.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,device_state_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu-core.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu-net.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,audio_policy_configuration.xml))
+
+# Moved here from packages/services/Car/Android.mk
+$(eval $(call declare-1p-copy-files,packages/services/Car,))
+
+# Moved here from hardware/libhardware_legacy/Android.mk
+$(eval $(call declare-1p-copy-files,hardware/libhardware_legacy,))
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 8546828..7593668 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -32,6 +32,7 @@
 			$(call write-optional-json-list, "auto_test_config", $(sort $(ALL_MODULES.$(m).auto_test_config))) \
 			$(call write-optional-json-list, "test_config", $(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS))) \
 			$(call write-optional-json-list, "dependencies", $(sort $(ALL_MODULES.$(m).ALL_DEPS))) \
+			$(call write-optional-json-list, "required", $(sort $(ALL_MODULES.$(m).REQUIRED_FROM_TARGET))) \
 			$(call write-optional-json-list, "shared_libs", $(sort $(ALL_MODULES.$(m).SHARED_LIBS))) \
 			$(call write-optional-json-list, "static_libs", $(sort $(ALL_MODULES.$(m).STATIC_LIBS))) \
 			$(call write-optional-json-list, "system_shared_libs", $(sort $(ALL_MODULES.$(m).SYSTEM_SHARED_LIBS))) \
@@ -39,7 +40,7 @@
 			$(call write-optional-json-list, "srcjars", $(sort $(ALL_MODULES.$(m).SRCJARS))) \
 			$(call write-optional-json-list, "classes_jar", $(sort $(ALL_MODULES.$(m).CLASSES_JAR))) \
 			$(call write-optional-json-list, "test_mainline_modules", $(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES))) \
-			$(call write-optional-json-bool, $(ALL_MODULES.$(m).IS_UNIT_TEST)) \
+			$(call write-optional-json-bool, "is_unit_test", $(ALL_MODULES.$(m).IS_UNIT_TEST)) \
 			$(call write-optional-json-list, "test_options_tags", $(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS))) \
 			$(call write-optional-json-list, "data", $(sort $(ALL_MODULES.$(m).TEST_DATA))) \
 			$(call write-optional-json-list, "runtime_dependencies", $(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES))) \
@@ -48,10 +49,13 @@
 			$(call write-optional-json-list, "supported_variants", $(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS))) \
 			$(call write-optional-json-list, "host_dependencies", $(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET))) \
 			$(call write-optional-json-list, "target_dependencies", $(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST))) \
+			$(call write-optional-json-bool, "test_module_config_base", $(ALL_MODULES.$(m).TEST_MODULE_CONFIG_BASE)) \
 		'}')'\n}\n' >> $@.tmp
 	$(PRIVATE_MERGE_JSON_OBJECTS) -o $@ $(PRIVATE_SOONG_MODULE_INFO) $@.tmp
 	rm $@.tmp
 
+.PHONY: module-info
+module-info: $(MODULE_INFO_JSON)
 
 droidcore-unbundled: $(MODULE_INFO_JSON)
 
diff --git a/core/tasks/performance-tests.mk b/core/tasks/performance-tests.mk
new file mode 100644
index 0000000..8702756
--- /dev/null
+++ b/core/tasks/performance-tests.mk
@@ -0,0 +1,56 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+.PHONY: performance-tests
+
+performance_tests_zip := $(PRODUCT_OUT)/performance-tests.zip
+# Create an artifact to include a list of test config files in performance-tests.
+performance_tests_list_zip := $(PRODUCT_OUT)/performance-tests_list.zip
+# Create an artifact to include all test config files in performance-tests.
+performance_tests_configs_zip := $(PRODUCT_OUT)/performance-tests_configs.zip
+
+$(performance_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(performance_tests_list_zip) $(performance_tests_configs_zip)
+$(performance_tests_zip) : PRIVATE_performance_tests_list_zip := $(performance_tests_list_zip)
+$(performance_tests_zip) : PRIVATE_performance_tests_configs_zip := $(performance_tests_configs_zip)
+$(performance_tests_zip) : PRIVATE_performance_tests_list := $(PRODUCT_OUT)/performance-tests_list
+$(performance_tests_zip) : $(COMPATIBILITY.performance-tests.FILES) $(SOONG_ZIP)
+	echo $(sort $(COMPATIBILITY.performance-tests.FILES)) | tr " " "\n" > $@.list
+	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
+	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_performance_tests_configs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
+	rm -f $(PRIVATE_performance_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_performance_tests_list)
+	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_performance_tests_list)
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_performance_tests_list_zip) -C $(dir $@) -f $(PRIVATE_performance_tests_list)
+	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
+	  $(PRIVATE_performance_tests_list)
+
+performance-tests: $(performance_tests_zip)
+$(call dist-for-goals, performance-tests, $(performance_tests_zip) $(performance_tests_list_zip) $(performance_tests_configs_zip))
+
+$(call declare-1p-container,$(performance_tests_zip),)
+$(call declare-container-license-deps,$(performance_tests_zip),$(COMPATIBILITY.performance-tests.FILES),$(PRODUCT_OUT)/:/)
+
+tests: performance-tests
+
+# Reset temp vars
+performance_tests_zip :=
+performance_tests_list_zip :=
+performance_tests_configs_zip :=
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 7acac72..2fd4ce9 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -126,7 +126,7 @@
 $(full_target_img): $(full_target) $(addon_img_source_prop) | $(SOONG_ZIP)
 	@echo Packaging SDK Addon System-Image: $@
 	$(hide) mkdir -p $(dir $@)
-	cp -R $(PRODUCT_OUT)/data $(PRIVATE_STAGING_DIR)/data
+	cp -R $(PRODUCT_OUT)/data $(PRIVATE_STAGING_DIR)
 	$(hide) $(SOONG_ZIP) -o $@ -C $(dir $(PRIVATE_STAGING_DIR)) -D $(PRIVATE_STAGING_DIR)
 
 
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 4e78d89..86c23f8 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -26,7 +26,15 @@
 # Output variables:
 #   compatibility_zip: the path to the output zip file.
 
-test_suite_subdir := android-$(test_suite_name)
+special_mts_test_suites :=
+special_mts_test_suites += mcts
+special_mts_test_suites += $(mts_modules)
+ifneq ($(filter $(special_mts_test_suites),$(subst -, ,$(test_suite_name))),)
+	test_suite_subdir := android-mts
+else
+	test_suite_subdir := android-$(test_suite_name)
+endif
+
 out_dir := $(HOST_OUT)/$(test_suite_name)/$(test_suite_subdir)
 test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
 test_tools := $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
@@ -107,9 +115,9 @@
 compatibility_zip_deps += $(test_suite_notice_txt)
 compatibility_zip_resources += $(test_suite_notice_txt)
 
-compatibility_tests_list_zip := $(out_dir)-tests_list.zip
+compatibility_tests_list_zip := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)-tests_list.zip
 
-compatibility_zip := $(out_dir).zip
+compatibility_zip := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name).zip
 $(compatibility_zip) : .KATI_IMPLICIT_OUTPUTS := $(compatibility_tests_list_zip)
 $(compatibility_zip): PRIVATE_OUT_DIR := $(out_dir)
 $(compatibility_zip): PRIVATE_TOOLS := $(test_tools) $(test_suite_prebuilt_tools)
diff --git a/core/version_util.mk b/core/version_util.mk
index 610cdaf..eb568be 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -221,10 +221,8 @@
 endif
 .KATI_READONLY := HAS_BUILD_NUMBER
 
-ifndef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
-  # Used to set minimum supported target sdk version. Apps targeting sdk
-  # version lower than the set value will result in a warning being shown
-  # when any activity from the app is started.
-  PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION := 28
+ifdef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
+  $(error Do not set PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION directly. Use RELEASE_PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION. value: $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
 endif
+PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION := $(RELEASE_PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION)
 .KATI_READONLY := PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
diff --git a/envsetup.sh b/envsetup.sh
index fbe522d..06dadd3 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -48,81 +48,13 @@
 fi
 IMPORTING_ENVSETUP=true source $T/build/make/shell_utils.sh
 
-
-# Help
-function hmm() {
-cat <<EOF
-
-Run "m help" for help with the build system itself.
-
-Invoke ". build/envsetup.sh" from your shell to add the following functions to your environment:
-- lunch:      lunch <product_name>-<release_type>-<build_variant>
-              Selects <product_name> as the product to build, and <build_variant> as the variant to
-              build, and stores those selections in the environment to be read by subsequent
-              invocations of 'm' etc.
-- tapas:      tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
-              Sets up the build environment for building unbundled apps (APKs).
-- banchan:    banchan <module1> [<module2> ...] \\
-                      [arm|x86|arm64|riscv64|x86_64|arm64_only|x86_64only] [eng|userdebug|user]
-              Sets up the build environment for building unbundled modules (APEXes).
-- croot:      Changes directory to the top of the tree, or a subdirectory thereof.
-- m:          Makes from the top of the tree.
-- mm:         Builds and installs all of the modules in the current directory, and their
-              dependencies.
-- mmm:        Builds and installs all of the modules in the supplied directories, and their
-              dependencies.
-              To limit the modules being built use the syntax: mmm dir/:target1,target2.
-- mma:        Same as 'mm'
-- mmma:       Same as 'mmm'
-- provision:  Flash device with all required partitions. Options will be passed on to fastboot.
-- cgrep:      Greps on all local C/C++ files.
-- ggrep:      Greps on all local Gradle files.
-- gogrep:     Greps on all local Go files.
-- jgrep:      Greps on all local Java files.
-- jsongrep:   Greps on all local Json files.
-- ktgrep:     Greps on all local Kotlin files.
-- resgrep:    Greps on all local res/*.xml files.
-- mangrep:    Greps on all local AndroidManifest.xml files.
-- mgrep:      Greps on all local Makefiles and *.bp files.
-- owngrep:    Greps on all local OWNERS files.
-- rsgrep:     Greps on all local Rust files.
-- sepgrep:    Greps on all local sepolicy files.
-- sgrep:      Greps on all local source files.
-- tomlgrep:   Greps on all local Toml files.
-- pygrep:     Greps on all local Python files.
-- godir:      Go to the directory containing a file.
-- allmod:     List all modules.
-- gomod:      Go to the directory containing a module.
-- bmod:       Get the Bazel label of a Soong module if it is converted with bp2build.
-- pathmod:    Get the directory containing a module.
-- outmod:     Gets the location of a module's installed outputs with a certain extension.
-- dirmods:    Gets the modules defined in a given directory.
-- installmod: Adb installs a module's built APK.
-- refreshmod: Refresh list of modules for allmod/gomod/pathmod/outmod/installmod.
-- syswrite:   Remount partitions (e.g. system.img) as writable, rebooting if necessary.
-
-Environment options:
-- SANITIZE_HOST: Set to 'address' to use ASAN for all host modules.
-- ANDROID_QUIET_BUILD: set to 'true' to display only the essential messages.
-
-Look at the source to view more functions. The complete list is:
-EOF
-    local T=$(gettop)
-    local A=""
-    local i
-    for i in `cat $T/build/envsetup.sh | sed -n "/^[[:blank:]]*function /s/function \([a-z_]*\).*/\1/p" | sort | uniq`; do
-      A="$A $i"
-    done
-    echo $A
-}
-
 # Get all the build variables needed by this script in a single call to the build system.
 function build_build_var_cache()
 {
     local T=$(gettop)
     # Grep out the variable names from the script.
-    cached_vars=(`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`)
-    cached_abs_vars=(`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`)
+    cached_vars=(`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/_get_build_var_cached/) print $(i+1)}' | sort -u | tr '\n' ' '`)
+    cached_abs_vars=(`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/_get_abs_build_var_cached/) print $(i+1)}' | sort -u | tr '\n' ' '`)
     # Call the build system to dump the "<val>=<value>" pairs as a shell script.
     build_dicts_script=`\builtin cd $T; build/soong/soong_ui.bash --dumpvars-mode \
                         --vars="${cached_vars[*]}" \
@@ -163,7 +95,7 @@
 }
 
 # Get the value of a build variable as an absolute path.
-function get_abs_build_var()
+function _get_abs_build_var_cached()
 {
     if [ "$BUILD_VAR_CACHE_READY" = "true" ]
     then
@@ -180,7 +112,7 @@
 }
 
 # Get the exact value of a build variable.
-function get_build_var()
+function _get_build_var_cached()
 {
     if [ "$BUILD_VAR_CACHE_READY" = "true" ]
     then
@@ -196,40 +128,19 @@
     (\cd $T; build/soong/soong_ui.bash --dumpvar-mode $1)
 }
 
-# check to see if the supplied product is one we can build
-function check_product()
+# This logic matches envsetup.mk
+function get_host_prebuilt_prefix
 {
-    local T=$(gettop)
-    if [ ! "$T" ]; then
-        echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
-        return
-    fi
-        TARGET_PRODUCT=$1 \
-        TARGET_RELEASE= \
-        TARGET_BUILD_VARIANT= \
-        TARGET_BUILD_TYPE= \
-        TARGET_BUILD_APPS= \
-        get_build_var TARGET_DEVICE > /dev/null
-    # hide successful answers, but allow the errors to show
+  local un=$(uname)
+  if [[ $un == "Linux" ]] ; then
+    echo linux-x86
+  elif [[ $un == "Darwin" ]] ; then
+    echo darwin-x86
+  else
+    echo "Error: Invalid host operating system: $un" 1>&2
+  fi
 }
 
-VARIANT_CHOICES=(user userdebug eng)
-
-# check to see if the supplied variant is valid
-function check_variant()
-{
-    local v
-    for v in ${VARIANT_CHOICES[@]}
-    do
-        if [ "$v" = "$1" ]
-        then
-            return 0
-        fi
-    done
-    return 1
-}
-
-
 # Add directories to PATH that are dependent on the lunch target.
 # For directories that are not lunch-specific, add them in set_global_paths
 function set_lunch_paths()
@@ -273,25 +184,25 @@
     fi
 
     # And in with the new...
-    ANDROID_LUNCH_BUILD_PATHS=$(get_abs_build_var SOONG_HOST_OUT_EXECUTABLES)
-    ANDROID_LUNCH_BUILD_PATHS+=:$(get_abs_build_var HOST_OUT_EXECUTABLES)
+    ANDROID_LUNCH_BUILD_PATHS=$(_get_abs_build_var_cached SOONG_HOST_OUT_EXECUTABLES)
+    ANDROID_LUNCH_BUILD_PATHS+=:$(_get_abs_build_var_cached HOST_OUT_EXECUTABLES)
 
     # Append llvm binutils prebuilts path to ANDROID_LUNCH_BUILD_PATHS.
-    local ANDROID_LLVM_BINUTILS=$(get_abs_build_var ANDROID_CLANG_PREBUILTS)/llvm-binutils-stable
+    local ANDROID_LLVM_BINUTILS=$(_get_abs_build_var_cached ANDROID_CLANG_PREBUILTS)/llvm-binutils-stable
     ANDROID_LUNCH_BUILD_PATHS+=:$ANDROID_LLVM_BINUTILS
 
     # Set up ASAN_SYMBOLIZER_PATH for SANITIZE_HOST=address builds.
     export ASAN_SYMBOLIZER_PATH=$ANDROID_LLVM_BINUTILS/llvm-symbolizer
 
     # Append asuite prebuilts path to ANDROID_LUNCH_BUILD_PATHS.
-    local os_arch=$(get_build_var HOST_PREBUILT_TAG)
+    local os_arch=$(_get_build_var_cached HOST_PREBUILT_TAG)
     ANDROID_LUNCH_BUILD_PATHS+=:$T/prebuilts/asuite/acloud/$os_arch
     ANDROID_LUNCH_BUILD_PATHS+=:$T/prebuilts/asuite/aidegen/$os_arch
     ANDROID_LUNCH_BUILD_PATHS+=:$T/prebuilts/asuite/atest/$os_arch
 
-    export ANDROID_JAVA_HOME=$(get_abs_build_var ANDROID_JAVA_HOME)
+    export ANDROID_JAVA_HOME=$(_get_abs_build_var_cached ANDROID_JAVA_HOME)
     export JAVA_HOME=$ANDROID_JAVA_HOME
-    export ANDROID_JAVA_TOOLCHAIN=$(get_abs_build_var ANDROID_JAVA_TOOLCHAIN)
+    export ANDROID_JAVA_TOOLCHAIN=$(_get_abs_build_var_cached ANDROID_JAVA_TOOLCHAIN)
     ANDROID_LUNCH_BUILD_PATHS+=:$ANDROID_JAVA_TOOLCHAIN
 
     # Fix up PYTHONPATH
@@ -320,20 +231,20 @@
     export PYTHONPATH=$ANDROID_PYTHONPATH$PYTHONPATH
 
     unset ANDROID_PRODUCT_OUT
-    export ANDROID_PRODUCT_OUT=$(get_abs_build_var PRODUCT_OUT)
+    export ANDROID_PRODUCT_OUT=$(_get_abs_build_var_cached PRODUCT_OUT)
     export OUT=$ANDROID_PRODUCT_OUT
 
     unset ANDROID_HOST_OUT
-    export ANDROID_HOST_OUT=$(get_abs_build_var HOST_OUT)
+    export ANDROID_HOST_OUT=$(_get_abs_build_var_cached HOST_OUT)
 
     unset ANDROID_SOONG_HOST_OUT
-    export ANDROID_SOONG_HOST_OUT=$(get_abs_build_var SOONG_HOST_OUT)
+    export ANDROID_SOONG_HOST_OUT=$(_get_abs_build_var_cached SOONG_HOST_OUT)
 
     unset ANDROID_HOST_OUT_TESTCASES
-    export ANDROID_HOST_OUT_TESTCASES=$(get_abs_build_var HOST_OUT_TESTCASES)
+    export ANDROID_HOST_OUT_TESTCASES=$(_get_abs_build_var_cached HOST_OUT_TESTCASES)
 
     unset ANDROID_TARGET_OUT_TESTCASES
-    export ANDROID_TARGET_OUT_TESTCASES=$(get_abs_build_var TARGET_OUT_TESTCASES)
+    export ANDROID_TARGET_OUT_TESTCASES=$(_get_abs_build_var_cached TARGET_OUT_TESTCASES)
 
     # Finally, set PATH
     export PATH=$ANDROID_LUNCH_BUILD_PATHS:$PATH
@@ -406,7 +317,7 @@
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
         return
     fi
-    get_build_var report_config
+    _get_build_var_cached report_config
 }
 
 function set_stuff_for_environment()
@@ -467,9 +378,6 @@
         fi
     done
 
-    if should_add_completion bit ; then
-        complete -C "bit --tab" bit
-    fi
     if [ -z "$ZSH_VERSION" ]; then
         # Doesn't work in zsh.
         complete -o nospace -F _croot croot
@@ -477,245 +385,15 @@
         complete -F _bazel__complete -o nospace b
     fi
     complete -F _lunch lunch
+    complete -F _lunch_completion lunch2
 
     complete -F _complete_android_module_names pathmod
     complete -F _complete_android_module_names gomod
     complete -F _complete_android_module_names outmod
     complete -F _complete_android_module_names installmod
-    complete -F _complete_android_module_names bmod
     complete -F _complete_android_module_names m
 }
 
-function multitree_lunch_help()
-{
-    echo "usage: lunch PRODUCT-RELEASE-VARIANT" 1>&2
-    echo "    Set up android build environment based on a product short name and variant" 1>&2
-    echo 1>&2
-    echo "lunch COMBO_FILE VARIANT" 1>&2
-    echo "    Set up android build environment based on a specific lunch combo file" 1>&2
-    echo "    and variant." 1>&2
-    echo 1>&2
-    echo "lunch --print [CONFIG]" 1>&2
-    echo "    Print the contents of a configuration.  If CONFIG is supplied, that config" 1>&2
-    echo "    will be flattened and printed.  If CONFIG is not supplied, the currently" 1>&2
-    echo "    selected config will be printed.  Returns 0 on success or nonzero on error." 1>&2
-    echo 1>&2
-    echo "lunch --list" 1>&2
-    echo "    List all possible combo files available in the current tree" 1>&2
-    echo 1>&2
-    echo "lunch --help" 1>&2
-    echo "lunch -h" 1>&2
-    echo "    Prints this message." 1>&2
-}
-
-function multitree_lunch()
-{
-    local code
-    local results
-    # Lunch must be run in the topdir, but this way we get a clear error
-    # message, instead of FileNotFound.
-    local T=$(multitree_gettop)
-    if [ -z "$T" ]; then
-      _multitree_lunch_error
-      return 1
-    fi
-    if $(echo "$1" | grep -q '^-') ; then
-        # Calls starting with a -- argument are passed directly and the function
-        # returns with the lunch.py exit code.
-        "${T}/orchestrator/build/orchestrator/core/lunch.py" "$@"
-        code=$?
-        if [[ $code -eq 2 ]] ; then
-          echo 1>&2
-          multitree_lunch_help
-          return $code
-        elif [[ $code -ne 0 ]] ; then
-          return $code
-        fi
-    else
-        # All other calls go through the --lunch variant of lunch.py
-        results=($(${T}/orchestrator/build/orchestrator/core/lunch.py --lunch "$@"))
-        code=$?
-        if [[ $code -eq 2 ]] ; then
-          echo 1>&2
-          multitree_lunch_help
-          return $code
-        elif [[ $code -ne 0 ]] ; then
-          return $code
-        fi
-
-        export TARGET_BUILD_COMBO=${results[0]}
-        export TARGET_BUILD_VARIANT=${results[1]}
-    fi
-}
-
-function choosetype()
-{
-    echo "Build type choices are:"
-    echo "     1. release"
-    echo "     2. debug"
-    echo
-
-    local DEFAULT_NUM DEFAULT_VALUE
-    DEFAULT_NUM=1
-    DEFAULT_VALUE=release
-
-    export TARGET_BUILD_TYPE=
-    local ANSWER
-    while [ -z $TARGET_BUILD_TYPE ]
-    do
-        echo -n "Which would you like? ["$DEFAULT_NUM"] "
-        if [ -z "$1" ] ; then
-            read ANSWER
-        else
-            echo $1
-            ANSWER=$1
-        fi
-        case $ANSWER in
-        "")
-            export TARGET_BUILD_TYPE=$DEFAULT_VALUE
-            ;;
-        1)
-            export TARGET_BUILD_TYPE=release
-            ;;
-        release)
-            export TARGET_BUILD_TYPE=release
-            ;;
-        2)
-            export TARGET_BUILD_TYPE=debug
-            ;;
-        debug)
-            export TARGET_BUILD_TYPE=debug
-            ;;
-        *)
-            echo
-            echo "I didn't understand your response.  Please try again."
-            echo
-            ;;
-        esac
-        if [ -n "$1" ] ; then
-            break
-        fi
-    done
-
-    build_build_var_cache
-    set_stuff_for_environment
-    destroy_build_var_cache
-}
-
-#
-# This function isn't really right:  It chooses a TARGET_PRODUCT
-# based on the list of boards.  Usually, that gets you something
-# that kinda works with a generic product, but really, you should
-# pick a product by name.
-#
-function chooseproduct()
-{
-    local default_value
-    if [ "x$TARGET_PRODUCT" != x ] ; then
-        default_value=$TARGET_PRODUCT
-    else
-        default_value=aosp_arm
-    fi
-
-    export TARGET_BUILD_APPS=
-    export TARGET_PRODUCT=
-    local ANSWER
-    while [ -z "$TARGET_PRODUCT" ]
-    do
-        echo -n "Which product would you like? [$default_value] "
-        if [ -z "$1" ] ; then
-            read ANSWER
-        else
-            echo $1
-            ANSWER=$1
-        fi
-
-        if [ -z "$ANSWER" ] ; then
-            export TARGET_PRODUCT=$default_value
-        else
-            if check_product $ANSWER
-            then
-                export TARGET_PRODUCT=$ANSWER
-            else
-                echo "** Not a valid product: $ANSWER"
-            fi
-        fi
-        if [ -n "$1" ] ; then
-            break
-        fi
-    done
-
-    build_build_var_cache
-    set_stuff_for_environment
-    destroy_build_var_cache
-}
-
-function choosevariant()
-{
-    echo "Variant choices are:"
-    local index=1
-    local v
-    for v in ${VARIANT_CHOICES[@]}
-    do
-        # The product name is the name of the directory containing
-        # the makefile we found, above.
-        echo "     $index. $v"
-        index=$(($index+1))
-    done
-
-    local default_value=eng
-    local ANSWER
-
-    export TARGET_BUILD_VARIANT=
-    while [ -z "$TARGET_BUILD_VARIANT" ]
-    do
-        echo -n "Which would you like? [$default_value] "
-        if [ -z "$1" ] ; then
-            read ANSWER
-        else
-            echo $1
-            ANSWER=$1
-        fi
-
-        if [ -z "$ANSWER" ] ; then
-            export TARGET_BUILD_VARIANT=$default_value
-        elif (echo -n $ANSWER | grep -q -e "^[0-9][0-9]*$") ; then
-            if [ "$ANSWER" -le "${#VARIANT_CHOICES[@]}" ] ; then
-                export TARGET_BUILD_VARIANT=${VARIANT_CHOICES[@]:$(($ANSWER-1)):1}
-            fi
-        else
-            if check_variant $ANSWER
-            then
-                export TARGET_BUILD_VARIANT=$ANSWER
-            else
-                echo "** Not a valid variant: $ANSWER"
-            fi
-        fi
-        if [ -n "$1" ] ; then
-            break
-        fi
-    done
-}
-
-function choosecombo()
-{
-    choosetype $1
-
-    echo
-    echo
-    chooseproduct $2
-
-    echo
-    echo
-    choosevariant $3
-
-    echo
-    build_build_var_cache
-    set_stuff_for_environment
-    printconfig
-    destroy_build_var_cache
-}
-
 function add_lunch_combo()
 {
     if [ -n "$ZSH_VERSION" ]; then
@@ -730,7 +408,7 @@
 {
     local uname=$(uname)
     local choices
-    choices=$(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_RELEASE= TARGET_BUILD_VARIANT= get_build_var COMMON_LUNCH_CHOICES 2>/dev/null)
+    choices=$(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_RELEASE= TARGET_BUILD_VARIANT= _get_build_var_cached COMMON_LUNCH_CHOICES 2>/dev/null)
     local ret=$?
 
     echo
@@ -789,7 +467,7 @@
         selection=aosp_cf_x86_64_phone-trunk_staging-eng
     elif (echo -n $answer | grep -q -e "^[0-9][0-9]*$")
     then
-        local choices=($(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_RELEASE= TARGET_BUILD_VARIANT= get_build_var COMMON_LUNCH_CHOICES 2>/dev/null))
+        local choices=($(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_RELEASE= TARGET_BUILD_VARIANT= _get_build_var_cached COMMON_LUNCH_CHOICES 2>/dev/null))
         if [ $answer -le ${#choices[@]} ]
         then
             # array in zsh starts from 1 instead of 0.
@@ -819,9 +497,18 @@
         return 1
     fi
 
+    _lunch_meat $product $release $variant
+}
+
+function _lunch_meat()
+{
+    local product=$1
+    local release=$2
+    local variant=$3
+
     TARGET_PRODUCT=$product \
-    TARGET_BUILD_VARIANT=$variant \
     TARGET_RELEASE=$release \
+    TARGET_BUILD_VARIANT=$variant \
     build_build_var_cache
     if [ $? -ne 0 ]
     then
@@ -831,8 +518,8 @@
         fi
         return 1
     fi
-    export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
-    export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
+    export TARGET_PRODUCT=$(_get_build_var_cached TARGET_PRODUCT)
+    export TARGET_BUILD_VARIANT=$(_get_build_var_cached TARGET_BUILD_VARIANT)
     export TARGET_RELEASE=$release
     # Note this is the string "release", not the value of the variable.
     export TARGET_BUILD_TYPE=release
@@ -842,14 +529,11 @@
     set_stuff_for_environment
     [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
 
-    if [ "${TARGET_BUILD_VARIANT}" = "userdebug" ] && [[  -z "${ANDROID_QUIET_BUILD}" ]]; then
-      echo
-      echo "Want FASTER LOCAL BUILDS? Use -eng instead of -userdebug (however for" \
-        "performance benchmarking continue to use userdebug)"
-    fi
-    if [ $used_lunch_menu -eq 1 ]; then
-      echo
-      echo "Hint: next time you can simply run 'lunch $selection'"
+    if [[ -z "${ANDROID_QUIET_BUILD}" ]]; then
+        local spam_for_lunch=$(gettop)/build/make/tools/envsetup/spam_for_lunch
+        if [[ -x $spam_for_lunch ]]; then
+            $spam_for_lunch
+        fi
     fi
 
     destroy_build_var_cache
@@ -869,13 +553,119 @@
     prev="${COMP_WORDS[COMP_CWORD-1]}"
 
     if [ -z "$COMMON_LUNCH_CHOICES_CACHE" ]; then
-        COMMON_LUNCH_CHOICES_CACHE=$(TARGET_BUILD_APPS= get_build_var COMMON_LUNCH_CHOICES)
+        COMMON_LUNCH_CHOICES_CACHE=$(TARGET_BUILD_APPS= _get_build_var_cached COMMON_LUNCH_CHOICES)
     fi
 
     COMPREPLY=( $(compgen -W "${COMMON_LUNCH_CHOICES_CACHE}" -- ${cur}) )
     return 0
 }
 
+function _lunch_usage()
+{
+    (
+        echo "The lunch command selects the configuration to use for subsequent"
+        echo "Android builds."
+        echo
+        echo "Usage: lunch TARGET_PRODUCT [TARGET_RELEASE [TARGET_BUILD_VARIANT]]"
+        echo
+        echo "  Choose the product, release and variant to use. If not"
+        echo "  supplied, TARGET_RELEASE will be 'trunk_staging' and"
+        echo "  TARGET_BUILD_VARIANT will be 'eng'"
+        echo
+        echo
+        echo "Usage: lunch TARGET_PRODUCT-TARGET_RELEASE-TARGET_BUILD_VARIANT"
+        echo
+        echo "  Chose the product, release and variant to use. This"
+        echo "  legacy format is maintained for compatibility."
+        echo
+        echo
+        echo "Note that the previous interactive menu and list of hard-coded"
+        echo "list of curated targets has been removed. If you would like the"
+        echo "list of products, release configs for a particular product, or"
+        echo "variants, run list_products, list_release_configs, list_variants"
+        echo "respectively."
+        echo
+    ) 1>&2
+}
+
+function lunch2()
+{
+    if [[ $# -eq 1 && $1 = "--help" ]]; then
+        _lunch_usage
+        return 0
+    fi
+    if [[ $# -eq 0 ]]; then
+        echo "No target specified. See lunch --help" 1>&2
+        return 1
+    fi
+    if [[ $# -gt 3 ]]; then
+        echo "Too many parameters given. See lunch --help" 1>&2
+        return 1
+    fi
+
+    local product release variant
+
+    # Handle the legacy format
+    local legacy=$(echo $1 | grep "-")
+    if [[ $# -eq 1 && -n $legacy ]]; then
+        IFS="-" read -r product release variant <<< "$1"
+        if [[ -z "$product" ]] || [[ -z "$release" ]] || [[ -z "$variant" ]]; then
+            echo "Invalid lunch combo: $1" 1>&2
+            echo "Valid combos must be of the form <product>-<release>-<variant> when using" 1>&2
+            echo "the legacy format.  Run 'lunch --help' for usage." 1>&2
+            return 1
+        fi
+    fi
+
+    # Handle the new format.
+    if [[ -z $legacy ]]; then
+        product=$1
+        release=$2
+        if [[ -z $release ]]; then
+            release=trunk_staging
+        fi
+        variant=$3
+        if [[ -z $variant ]]; then
+            variant=eng
+        fi
+    fi
+
+    # Validate the selection and set all the environment stuff
+    _lunch_meat $product $release $variant
+}
+
+unset ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE
+unset ANDROID_LUNCH_COMPLETION_CHOSEN_PRODUCT
+unset ANDROID_LUNCH_COMPLETION_RELEASE_CACHE
+# Tab completion for lunch.
+function _lunch_completion()
+{
+    # Available products
+    if [[ $COMP_CWORD -eq 1 ]] ; then
+        if [[ -z $ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE ]]; then
+            ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE=$(list_products)
+        fi
+        COMPREPLY=( $(compgen -W "${ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE}" -- "${COMP_WORDS[COMP_CWORD]}") )
+    fi
+
+    # Available release configs
+    if [[ $COMP_CWORD -eq 2 ]] ; then
+        if [[ -z $ANDROID_LUNCH_COMPLETION_RELEASE_CACHE || $ANDROID_LUNCH_COMPLETION_CHOSEN_PRODUCT != ${COMP_WORDS[1]} ]] ; then
+            ANDROID_LUNCH_COMPLETION_RELEASE_CACHE=$(list_releases ${COMP_WORDS[1]})
+            ANDROID_LUNCH_COMPLETION_CHOSEN_PRODUCT=${COMP_WORDS[1]}
+        fi
+        COMPREPLY=( $(compgen -W "${ANDROID_LUNCH_COMPLETION_RELEASE_CACHE}" -- "${COMP_WORDS[COMP_CWORD]}") )
+    fi
+
+    # Available variants
+    if [[ $COMP_CWORD -eq 3 ]] ; then
+        COMPREPLY=(user userdebug eng)
+    fi
+
+    return 0
+}
+
+
 # Configures the build to build unbundled apps.
 # Run tapas with one or more app names (from LOCAL_PACKAGE_NAME)
 function tapas()
@@ -1013,34 +803,6 @@
     destroy_build_var_cache
 }
 
-# TODO: Merge into gettop as part of launching multitree
-function multitree_gettop
-{
-    local TOPFILE=orchestrator/build/make/core/envsetup.mk
-    if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
-        # The following circumlocution ensures we remove symlinks from TOP.
-        (cd "$TOP"; PWD= /bin/pwd)
-    else
-        if [ -f $TOPFILE ] ; then
-            # The following circumlocution (repeated below as well) ensures
-            # that we record the true directory name and not one that is
-            # faked up with symlink names.
-            PWD= /bin/pwd
-        else
-            local HERE=$PWD
-            local T=
-            while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
-                \cd ..
-                T=`PWD= /bin/pwd -P`
-            done
-            \cd "$HERE"
-            if [ -f "$T/$TOPFILE" ]; then
-                echo "$T"
-            fi
-        fi
-    fi
-}
-
 function croot()
 {
     local T=$(gettop)
@@ -1100,290 +862,17 @@
         echo "Command adb not found; try lunch (and building) first?"
         return 1
     fi
-    $ADB "${@}"
+    run_tool_with_logging "ADB" $ADB "${@}"
 }
 
-# simplified version of ps; output in the form
-# <pid> <procname>
-function qpid() {
-    local prepend=''
-    local append=''
-    if [ "$1" = "--exact" ]; then
-        prepend=' '
-        append='$'
-        shift
-    elif [ "$1" = "--help" -o "$1" = "-h" ]; then
-        echo "usage: qpid [[--exact] <process name|pid>"
-        return 255
+function fastboot() {
+    local FASTBOOT=$(command which fastboot)
+    if [ -z "$FASTBOOT" ]; then
+        echo "Command fastboot not found; try lunch (and building) first?"
+        return 1
     fi
-
-    local EXE="$1"
-    if [ "$EXE" ] ; then
-        qpid | \grep "$prepend$EXE$append"
-    else
-        adb shell ps \
-            | tr -d '\r' \
-            | sed -e 1d -e 's/^[^ ]* *\([0-9]*\).* \([^ ]*\)$/\1 \2/'
-    fi
-}
-
-# syswrite - disable verity, reboot if needed, and remount image
-#
-# Easy way to make system.img/etc writable
-function syswrite() {
-  adb wait-for-device && adb root && adb wait-for-device || return 1
-  if [[ $(adb disable-verity | grep -i "reboot") ]]; then
-      echo "rebooting"
-      adb reboot && adb wait-for-device && adb root && adb wait-for-device || return 1
-  fi
-  adb remount || return 1
-}
-
-# coredump_setup - enable core dumps globally for any process
-#                  that has the core-file-size limit set correctly
-#
-# NOTE: You must call also coredump_enable for a specific process
-#       if its core-file-size limit is not set already.
-# NOTE: Core dumps are written to ramdisk; they will not survive a reboot!
-
-function coredump_setup()
-{
-    echo "Getting root...";
-    adb root;
-    adb wait-for-device;
-
-    echo "Remounting root partition read-write...";
-    adb shell mount -w -o remount -t rootfs rootfs;
-    sleep 1;
-    adb wait-for-device;
-    adb shell mkdir -p /cores;
-    adb shell mount -t tmpfs tmpfs /cores;
-    adb shell chmod 0777 /cores;
-
-    echo "Granting SELinux permission to dump in /cores...";
-    adb shell restorecon -R /cores;
-
-    echo "Set core pattern.";
-    adb shell 'echo /cores/core.%p > /proc/sys/kernel/core_pattern';
-
-    echo "Done."
-}
-
-# coredump_enable - enable core dumps for the specified process
-# $1 = PID of process (e.g., $(pid mediaserver))
-#
-# NOTE: coredump_setup must have been called as well for a core
-#       dump to actually be generated.
-
-function coredump_enable()
-{
-    local PID=$1;
-    if [ -z "$PID" ]; then
-        printf "Expecting a PID!\n";
-        return;
-    fi;
-    echo "Setting core limit for $PID to infinite...";
-    adb shell /system/bin/ulimit -P $PID -c unlimited
-}
-
-# core - send SIGV and pull the core for process
-# $1 = PID of process (e.g., $(pid mediaserver))
-#
-# NOTE: coredump_setup must be called once per boot for core dumps to be
-#       enabled globally.
-
-function core()
-{
-    local PID=$1;
-
-    if [ -z "$PID" ]; then
-        printf "Expecting a PID!\n";
-        return;
-    fi;
-
-    local CORENAME=core.$PID;
-    local COREPATH=/cores/$CORENAME;
-    local SIG=SEGV;
-
-    coredump_enable $1;
-
-    local done=0;
-    while [ $(adb shell "[ -d /proc/$PID ] && echo -n yes") ]; do
-        printf "\tSending SIG%s to %d...\n" $SIG $PID;
-        adb shell kill -$SIG $PID;
-        sleep 1;
-    done;
-
-    adb shell "while [ ! -f $COREPATH ] ; do echo waiting for $COREPATH to be generated; sleep 1; done"
-    echo "Done: core is under $COREPATH on device.";
-}
-
-# systemstack - dump the current stack trace of all threads in the system process
-# to the usual ANR traces file
-function systemstack()
-{
-    stacks system_server
-}
-
-# Read the ELF header from /proc/$PID/exe to determine if the process is
-# 64-bit.
-function is64bit()
-{
-    local PID="$1"
-    if [ "$PID" ] ; then
-        if [[ "$(adb shell cat /proc/$PID/exe | xxd -l 1 -s 4 -p)" -eq "02" ]] ; then
-            echo "64"
-        else
-            echo ""
-        fi
-    else
-        echo ""
-    fi
-}
-
-case `uname -s` in
-    Darwin)
-        function sgrep()
-        {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto|rs|go)' \
-                -exec grep --color -n "$@" {} +
-        }
-
-        ;;
-    *)
-        function sgrep()
-        {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\|rs\|go\)' \
-                -exec grep --color -n "$@" {} +
-        }
-        ;;
-esac
-
-function gettargetarch
-{
-    get_build_var TARGET_ARCH
-}
-
-function ggrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.gradle" \
-        -exec grep --color -n "$@" {} +
-}
-
-function gogrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.go" \
-        -exec grep --color -n "$@" {} +
-}
-
-function jgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.java" \
-        -exec grep --color -n "$@" {} +
-}
-
-function rsgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rs" \
-        -exec grep --color -n "$@" {} +
-}
-
-function jsongrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.json" \
-        -exec grep --color -n "$@" {} +
-}
-
-function tomlgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.toml" \
-        -exec grep --color -n "$@" {} +
-}
-
-function ktgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.kt" \
-        -exec grep --color -n "$@" {} +
-}
-
-function cgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) \
-        -exec grep --color -n "$@" {} +
-}
-
-function resgrep()
-{
-    local dir
-    for dir in `find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -name res -type d`; do
-        find $dir -type f -name '*\.xml' -exec grep --color -n "$@" {} +
-    done
-}
-
-function mangrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -name 'AndroidManifest.xml' \
-        -exec grep --color -n "$@" {} +
-}
-
-function owngrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -name 'OWNERS' \
-        -exec grep --color -n "$@" {} +
-}
-
-function sepgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -name sepolicy -type d \
-        -exec grep --color -n -r --exclude-dir=\.git "$@" {} +
-}
-
-function rcgrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rc*" \
-        -exec grep --color -n "$@" {} +
-}
-
-function pygrep()
-{
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.py" \
-        -exec grep --color -n "$@" {} +
-}
-
-case `uname -s` in
-    Darwin)
-        function mgrep()
-        {
-            find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o \( -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -o -regex '(.*/)?(build|soong)/.*[^/]*\.go' \) -type f \
-                -exec grep --color -n "$@" {} +
-        }
-
-        function treegrep()
-        {
-            find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|hpp|S|java|kt|xml)' \
-                -exec grep --color -n -i "$@" {} +
-        }
-
-        ;;
-    *)
-        function mgrep()
-        {
-            find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o \( -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -o -regextype posix-extended -regex '(.*/)?(build|soong)/.*[^/]*\.go' \) -type f \
-                -exec grep --color -n "$@" {} +
-        }
-
-        function treegrep()
-        {
-            find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|hpp|S|java|kt|xml)' -type f \
-                -exec grep --color -n -i "$@" {} +
-        }
-
-        ;;
-esac
-
-function getprebuilt
-{
-    get_abs_build_var ANDROID_PREBUILTS
+    # Support tool event logging for fastboot command.
+    run_tool_with_logging "FASTBOOT" $FASTBOOT "${@}"
 }
 
 # communicate with a running device or emulator, set up necessary state,
@@ -1438,111 +927,6 @@
     hat -JXmx512m $localFile
 }
 
-function getbugreports()
-{
-    local reports=(`adb shell ls /sdcard/bugreports | tr -d '\r'`)
-
-    if [ ! "$reports" ]; then
-        echo "Could not locate any bugreports."
-        return
-    fi
-
-    local report
-    for report in ${reports[@]}
-    do
-        echo "/sdcard/bugreports/${report}"
-        adb pull /sdcard/bugreports/${report} ${report}
-        gunzip ${report}
-    done
-}
-
-function getsdcardpath()
-{
-    adb ${adbOptions} shell echo -n \$\{EXTERNAL_STORAGE\}
-}
-
-function getscreenshotpath()
-{
-    echo "$(getsdcardpath)/Pictures/Screenshots"
-}
-
-function getlastscreenshot()
-{
-    local screenshot_path=$(getscreenshotpath)
-    local screenshot=`adb ${adbOptions} ls ${screenshot_path} | grep Screenshot_[0-9-]*.*\.png | sort -rk 3 | cut -d " " -f 4 | head -n 1`
-    if [ "$screenshot" = "" ]; then
-        echo "No screenshots found."
-        return
-    fi
-    echo "${screenshot}"
-    adb ${adbOptions} pull ${screenshot_path}/${screenshot}
-}
-
-function startviewserver()
-{
-    local port=4939
-    if [ $# -gt 0 ]; then
-            port=$1
-    fi
-    adb shell service call window 1 i32 $port
-}
-
-function stopviewserver()
-{
-    adb shell service call window 2
-}
-
-function isviewserverstarted()
-{
-    adb shell service call window 3
-}
-
-function key_home()
-{
-    adb shell input keyevent 3
-}
-
-function key_back()
-{
-    adb shell input keyevent 4
-}
-
-function key_menu()
-{
-    adb shell input keyevent 82
-}
-
-function smoketest()
-{
-    if [ ! "$ANDROID_PRODUCT_OUT" ]; then
-        echo "Couldn't locate output files.  Try running 'lunch' first." >&2
-        return
-    fi
-    local T=$(gettop)
-    if [ ! "$T" ]; then
-        echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
-        return
-    fi
-
-    (\cd "$T" && mmm tests/SmokeTest) &&
-      adb uninstall com.android.smoketest > /dev/null &&
-      adb uninstall com.android.smoketest.tests > /dev/null &&
-      adb install $ANDROID_PRODUCT_OUT/data/app/SmokeTestApp.apk &&
-      adb install $ANDROID_PRODUCT_OUT/data/app/SmokeTest.apk &&
-      adb shell am instrument -w com.android.smoketest.tests/android.test.InstrumentationTestRunner
-}
-
-# simple shortcut to the runtest command
-function runtest()
-{
-    local T=$(gettop)
-    if [ ! "$T" ]; then
-        echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
-        return
-    fi
-    ("$T"/development/testrunner/runtest.py $@)
-}
-
 function godir () {
     if [[ -z "$1" ]]; then
         echo "Usage: godir <regex>"
@@ -1594,146 +978,10 @@
     \cd $T/$pathname
 }
 
-# Update module-info.json in out.
-function refreshmod() {
-    if [ ! "$ANDROID_PRODUCT_OUT" ]; then
-        echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
-        return 1
-    fi
-
-    echo "Refreshing modules (building module-info.json). Log at $ANDROID_PRODUCT_OUT/module-info.json.build.log." >&2
-
-    # for the output of the next command
-    mkdir -p $ANDROID_PRODUCT_OUT || return 1
-
-    # Note, can't use absolute path because of the way make works.
-    m $(get_build_var PRODUCT_OUT)/module-info.json \
-        > $ANDROID_PRODUCT_OUT/module-info.json.build.log 2>&1
-}
-
-# Verifies that module-info.txt exists, returning nonzero if it doesn't.
-function verifymodinfo() {
-    if [ ! "$ANDROID_PRODUCT_OUT" ]; then
-        if [ "$QUIET_VERIFYMODINFO" != "true" ] ; then
-            echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
-        fi
-        return 1
-    fi
-
-    if [ ! -f "$ANDROID_PRODUCT_OUT/module-info.json" ]; then
-        if [ "$QUIET_VERIFYMODINFO" != "true" ] ; then
-            echo "Could not find module-info.json. Please run 'refreshmod' first." >&2
-        fi
-        return 1
-    fi
-}
-
-# List all modules for the current device, as cached in all_modules.txt. If any build change is
-# made and it should be reflected in the output, you should run `m nothing` first.
-function allmod() {
-    cat $ANDROID_PRODUCT_OUT/all_modules.txt 2>/dev/null
-}
-
-# Return the Bazel label of a Soong module if it is converted with bp2build.
-function bmod()
-(
-    if [ $# -eq 0 ]; then
-        echo "usage: bmod <module 1> <module 2> ... <module n>" >&2
-        return 1
-    fi
-
-    # We could run bp2build here, but it might trigger bp2build invalidation
-    # when used with `b` (e.g. --run_soong_tests) and/or add unnecessary waiting
-    # time overhead.
-    #
-    # For a snappy result, use the latest generated version in soong_injection,
-    # and ask users to run m bp2build if it doesn't exist.
-    converted_json="$(get_abs_build_var OUT_DIR)/soong/soong_injection/metrics/converted_modules_path_map.json"
-
-    if [ ! -f ${converted_json} ]; then
-      echo "bp2build files not found. Have you ran 'm bp2build'?" >&2
-      return 1
-    fi
-
-    modules=()
-    for m in "$@"; do
-        modules+=("\"$m\",")
-    done
-    local res=$(python3 -c "import json
-modules = [${modules[*]}]
-converted_json='$converted_json'
-bp2build_converted_map = json.load(open(converted_json))
-for module in modules:
-    if module not in bp2build_converted_map:
-        print(module + ' is not converted to Bazel.')
-    else:
-        print(bp2build_converted_map[module] + ':' + module)")
-
-    echo "${res}"
-    unconverted_count=$(echo "${res}" | grep -c "not converted to Bazel")
-    if [[ ${unconverted_count} -ne 0 ]]; then
-        return 1
-    fi
-)
-
-# Get the path of a specific module in the android tree, as cached in module-info.json.
-# If any build change is made, and it should be reflected in the output, you should run
-# 'refreshmod' first.  Note: This is the inverse of dirmods.
-function pathmod() {
-    if [[ $# -ne 1 ]]; then
-        echo "usage: pathmod <module>" >&2
-        return 1
-    fi
-
-    verifymodinfo || return 1
-
-    local relpath=$(python3 -c "import json, os
-module = '$1'
-module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
-if module not in module_info:
-    exit(1)
-print(module_info[module]['path'][0])" 2>/dev/null)
-
-    if [ -z "$relpath" ]; then
-        echo "Could not find module '$1' (try 'refreshmod' if there have been build changes?)." >&2
-        return 1
-    else
-        echo "$ANDROID_BUILD_TOP/$relpath"
-    fi
-}
-
-# Get the path of a specific module in the android tree, as cached in module-info.json.
-# If any build change is made, and it should be reflected in the output, you should run
-# 'refreshmod' first.  Note: This is the inverse of pathmod.
-function dirmods() {
-    if [[ $# -ne 1 ]]; then
-        echo "usage: dirmods <path>" >&2
-        return 1
-    fi
-
-    verifymodinfo || return 1
-
-    python3 -c "import json, os
-dir = '$1'
-while dir.endswith('/'):
-    dir = dir[:-1]
-prefix = dir + '/'
-module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
-results = set()
-for m in module_info.values():
-    for path in m.get(u'path', []):
-        if path == dir or path.startswith(prefix):
-            name = m.get(u'module_name')
-            if name:
-                results.add(name)
-for name in sorted(results):
-    print(name)
-"
-}
-
-
 # Go to a specific module in the android tree, as cached in module-info.json. If any build change
 # is made, and it should be reflected in the output, you should run 'refreshmod' first.
+# Note: This function is in envsetup because changing the directory needs to happen in the current
+# shell. All other functions that use module-info.json should be in build/soong/bin.
 function gomod() {
     if [[ $# -ne 1 ]]; then
         echo "usage: gomod <module>" >&2
@@ -1747,90 +995,11 @@
     cd $path
 }
 
-# Gets the list of a module's installed outputs, as cached in module-info.json.
-# If any build change is made, and it should be reflected in the output, you should run 'refreshmod' first.
-function outmod() {
-    if [[ $# -ne 1 ]]; then
-        echo "usage: outmod <module>" >&2
-        return 1
-    fi
-
-    verifymodinfo || return 1
-
-    local relpath
-    relpath=$(python3 -c "import json, os
-module = '$1'
-module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
-if module not in module_info:
-    exit(1)
-for output in module_info[module]['installed']:
-    print(os.path.join('$ANDROID_BUILD_TOP', output))" 2>/dev/null)
-
-    if [ $? -ne 0 ]; then
-        echo "Could not find module '$1' (try 'refreshmod' if there have been build changes?)" >&2
-        return 1
-    elif [ ! -z "$relpath" ]; then
-        echo "$relpath"
-    fi
-}
-
-# adb install a module's apk, as cached in module-info.json. If any build change
-# is made, and it should be reflected in the output, you should run 'refreshmod' first.
-# Usage: installmod [adb install arguments] <module>
-# For example: installmod -r Dialer -> adb install -r /path/to/Dialer.apk
-function installmod() {
-    if [[ $# -eq 0 ]]; then
-        echo "usage: installmod [adb install arguments] <module>" >&2
-        echo "" >&2
-        echo "Only flags to be passed after the \"install\" in adb install are supported," >&2
-        echo "with the exception of -s. If -s is passed it will be placed before the \"install\"." >&2
-        echo "-s must be the first flag passed if it exists." >&2
-        return 1
-    fi
-
-    local _path
-    _path=$(outmod ${@:$#:1})
-    if [ $? -ne 0 ]; then
-        return 1
-    fi
-
-    _path=$(echo "$_path" | grep -E \\.apk$ | head -n 1)
-    if [ -z "$_path" ]; then
-        echo "Module '$1' does not produce a file ending with .apk (try 'refreshmod' if there have been build changes?)" >&2
-        return 1
-    fi
-    local serial_device=""
-    if [[ "$1" == "-s" ]]; then
-        if [[ $# -le 2 ]]; then
-            echo "-s requires an argument" >&2
-            return 1
-        fi
-        serial_device="-s $2"
-        shift 2
-    fi
-    local length=$(( $# - 1 ))
-    echo adb $serial_device install ${@:1:$length} $_path
-    adb $serial_device install ${@:1:$length} $_path
-}
-
 function _complete_android_module_names() {
     local word=${COMP_WORDS[COMP_CWORD]}
     COMPREPLY=( $(allmod | grep -E "^$word") )
 }
 
-# Print colored exit condition
-function pez {
-    "$@"
-    local retval=$?
-    if [ $retval -ne 0 ]
-    then
-        echo $'\E'"[0;31mFAILURE\e[00m"
-    else
-        echo $'\E'"[0;32mSUCCESS\e[00m"
-    fi
-    return $retval
-}
-
 function get_make_command()
 {
     # If we're in the top of an Android tree, use soong_ui.bash instead of make
@@ -1848,142 +1017,11 @@
     fi
 }
 
-function _wrap_build()
-{
-    if [[ "${ANDROID_QUIET_BUILD:-}" == true ]]; then
-      "$@"
-      return $?
-    fi
-    local start_time=$(date +"%s")
-    "$@"
-    local ret=$?
-    local end_time=$(date +"%s")
-    local tdiff=$(($end_time-$start_time))
-    local hours=$(($tdiff / 3600 ))
-    local mins=$((($tdiff % 3600) / 60))
-    local secs=$(($tdiff % 60))
-    local ncolors=$(tput colors 2>/dev/null)
-    if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
-        color_failed=$'\E'"[0;31m"
-        color_success=$'\E'"[0;32m"
-        color_warning=$'\E'"[0;33m"
-        color_reset=$'\E'"[00m"
-    else
-        color_failed=""
-        color_success=""
-        color_reset=""
-    fi
-
-    echo
-    if [ $ret -eq 0 ] ; then
-        echo -n "${color_success}#### build completed successfully "
-    else
-        echo -n "${color_failed}#### failed to build some targets "
-    fi
-    if [ $hours -gt 0 ] ; then
-        printf "(%02g:%02g:%02g (hh:mm:ss))" $hours $mins $secs
-    elif [ $mins -gt 0 ] ; then
-        printf "(%02g:%02g (mm:ss))" $mins $secs
-    elif [ $secs -gt 0 ] ; then
-        printf "(%s seconds)" $secs
-    fi
-    echo " ####${color_reset}"
-    echo
-    return $ret
-}
-
-function _trigger_build()
-(
-    local -r bc="$1"; shift
-    local T=$(gettop)
-    if [ -n "$T" ]; then
-      _wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
-    else
-      >&2 echo "Couldn't locate the top of the tree. Try setting TOP."
-      return 1
-    fi
-    local ret=$?
-    if [[ ret -eq 0 &&  -z "${ANDROID_QUIET_BUILD:-}" && -n "${ANDROID_BUILD_BANNER}" ]]; then
-      echo "${ANDROID_BUILD_BANNER}"
-    fi
-    return $ret
-)
-
-function m()
-(
-    _trigger_build "all-modules" "$@"
-)
-
-function mm()
-(
-    _trigger_build "modules-in-a-dir-no-deps" "$@"
-)
-
-function mmm()
-(
-    _trigger_build "modules-in-dirs-no-deps" "$@"
-)
-
-function mma()
-(
-    _trigger_build "modules-in-a-dir" "$@"
-)
-
-function mmma()
-(
-    _trigger_build "modules-in-dirs" "$@"
-)
-
 function make()
 {
     _wrap_build $(get_make_command "$@") "$@"
 }
 
-function _multitree_lunch_error()
-{
-      >&2 echo "Couldn't locate the top of the tree. Please run \'source build/envsetup.sh\' and multitree_lunch from the root of your workspace."
-}
-
-function multitree_build()
-{
-    local T=$(multitree_gettop)
-    if [ -n "$T" ]; then
-      "$T/orchestrator/build/orchestrator/core/orchestrator.py" "$@"
-    else
-      _multitree_lunch_error
-      return 1
-    fi
-}
-
-function provision()
-{
-    if [ ! "$ANDROID_PRODUCT_OUT" ]; then
-        echo "Couldn't locate output files.  Try running 'lunch' first." >&2
-        return 1
-    fi
-    if [ ! -e "$ANDROID_PRODUCT_OUT/provision-device" ]; then
-        echo "There is no provisioning script for the device." >&2
-        return 1
-    fi
-
-    # Check if user really wants to do this.
-    if [ "$1" = "--no-confirmation" ]; then
-        shift 1
-    else
-        echo "This action will reflash your device."
-        echo ""
-        echo "ALL DATA ON THE DEVICE WILL BE IRREVOCABLY ERASED."
-        echo ""
-        echo -n "Are you sure you want to do this (yes/no)? "
-        read
-        if [[ "${REPLY}" != "yes" ]] ; then
-            echo "Not taking any action. Exiting." >&2
-            return 1
-        fi
-    fi
-    "$ANDROID_PRODUCT_OUT/provision-device" "$@"
-}
-
 # Zsh needs bashcompinit called to support bash-style completion.
 function enable_zsh_completion() {
     # Don't override user's options if bash-style completion is already enabled.
@@ -2065,7 +1103,7 @@
             return
             ;;
     esac
-    OUT_DIR="$(get_abs_build_var OUT_DIR)"
+    OUT_DIR="$(_get_abs_build_var_cached OUT_DIR)"
     if [[ "$1" == "--regenerate" ]]; then
       shift 1
       NINJA_ARGS="-t commands $@" m
@@ -2076,25 +1114,68 @@
     fi
 }
 
-function avbtool() {
-    if [[ ! -f "$ANDROID_SOONG_HOST_OUT"/bin/avbtool ]]; then
-        m avbtool
-    fi
-    "$ANDROID_SOONG_HOST_OUT"/bin/avbtool $@
-}
+# These functions used to be here but are now standalone scripts
+# in build/soong/bin.  Unset these for the time being so the real
+# script is picked up.
+# TODO: Remove this some time after a suitable delay (maybe 2025?)
+unset allmod
+unset aninja
+unset cgrep
+unset core
+unset coredump_enable
+unset coredump_setup
+unset dirmods
+unset get_build_var
+unset get_abs_build_var
+unset getlastscreenshot
+unset getprebuilt
+unset getscreenshotpath
+unset getsdcardpath
+unset gettargetarch
+unset ggrep
+unset gogrep
+unset hmm
+unset installmod
+unset is64bit
+unset isviewserverstarted
+unset jgrep
+unset jsongrep
+unset key_back
+unset key_home
+unset key_menu
+unset ktgrep
+unset m
+unset mangrep
+unset mgrep
+unset mm
+unset mma
+unset mmm
+unset mmma
+unset outmod
+unset overrideflags
+unset owngrep
+unset pathmod
+unset pez
+unset pygrep
+unset qpid
+unset rcgrep
+unset refreshmod
+unset resgrep
+unset rsgrep
+unset run_tool_with_logging
+unset sepgrep
+unset sgrep
+unset startviewserver
+unset stopviewserver
+unset systemstack
+unset syswrite
+unset tomlgrep
+unset treegrep
 
-function overrideflags() {
-    local T="$(gettop)"
-    (\cd "${T}" && build/make/tools/overrideflags.sh "$@")
-}
-
-function aninja() {
-    local T="$(gettop)"
-    (\cd "${T}" && prebuilts/build-tools/linux-x86/bin/ninja -f out/combined-${TARGET_PRODUCT}.ninja "$@")
-}
 
 validate_current_shell
 set_global_paths
 source_vendorsetup
 addcompletions
 
+
diff --git a/shell_utils.sh b/shell_utils.sh
index 9de5a50..86f3f49 100644
--- a/shell_utils.sh
+++ b/shell_utils.sh
@@ -40,15 +40,24 @@
     fi
 }
 
-# Sets TOP, or if the root of the tree can't be found, prints a message and
-# exits.  Since this function exits, it should not be called from functions
-# defined in envsetup.sh.
+# Asserts that the root of the tree can be found.
 if [ -z "${IMPORTING_ENVSETUP:-}" ] ; then
 function require_top
 {
     TOP=$(gettop)
     if [[ ! $TOP ]] ; then
-        echo "Can not locate root of source tree. $(basename $0) must be run from within the Android source tree." >&2
+        echo "Can not locate root of source tree. $(basename $0) must be run from within the Android source tree or TOP must be set." >&2
+        exit 1
+    fi
+}
+fi
+
+# Asserts that the lunch variables have been set
+if [ -z "${IMPORTING_ENVSETUP:-}" ] ; then
+function require_lunch
+{
+    if [[ ! $TARGET_PRODUCT || ! $TARGET_RELEASE || ! $TARGET_BUILD_VARIANT  ]] ; then
+        echo "Please run lunch and try again." >&2
         exit 1
     fi
 }
@@ -71,4 +80,73 @@
     echo "${out_dir}"
 }
 
+# Pretty print the build status and duration
+function _wrap_build()
+{
+    if [[ "${ANDROID_QUIET_BUILD:-}" == true ]]; then
+      "$@"
+      return $?
+    fi
+    local start_time=$(date +"%s")
+    "$@"
+    local ret=$?
+    local end_time=$(date +"%s")
+    local tdiff=$(($end_time-$start_time))
+    local hours=$(($tdiff / 3600 ))
+    local mins=$((($tdiff % 3600) / 60))
+    local secs=$(($tdiff % 60))
+    local ncolors=$(tput colors 2>/dev/null)
+    if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
+        color_failed=$'\E'"[0;31m"
+        color_success=$'\E'"[0;32m"
+        color_warning=$'\E'"[0;33m"
+        color_reset=$'\E'"[00m"
+    else
+        color_failed=""
+        color_success=""
+        color_reset=""
+    fi
+
+    echo
+    if [ $ret -eq 0 ] ; then
+        echo -n "${color_success}#### build completed successfully "
+    else
+        echo -n "${color_failed}#### failed to build some targets "
+    fi
+    if [ $hours -gt 0 ] ; then
+        printf "(%02g:%02g:%02g (hh:mm:ss))" $hours $mins $secs
+    elif [ $mins -gt 0 ] ; then
+        printf "(%02g:%02g (mm:ss))" $mins $secs
+    elif [ $secs -gt 0 ] ; then
+        printf "(%s seconds)" $secs
+    fi
+    echo " ####${color_reset}"
+    echo
+    return $ret
+}
+
+
+function log_tool_invocation()
+{
+    if [[ -z $ANDROID_TOOL_LOGGER ]]; then
+        return
+    fi
+
+    LOG_TOOL_TAG=$1
+    LOG_START_TIME=$(date +%s.%N)
+    trap '
+        exit_code=$?;
+        # Remove the trap to prevent duplicate log.
+        trap - EXIT;
+        $ANDROID_TOOL_LOGGER \
+                --tool_tag="${LOG_TOOL_TAG}" \
+                --start_timestamp="${LOG_START_TIME}" \
+                --end_timestamp="$(date +%s.%N)" \
+                --tool_args="$*" \
+                --exit_code="${exit_code}" \
+                ${ANDROID_TOOL_LOGGER_EXTRA_ARGS} \
+           > /dev/null 2>&1 &
+        exit ${exit_code}
+    ' SIGINT SIGTERM SIGQUIT EXIT
+}
 
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index 2b17349..b5e3dc2 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -24,11 +24,6 @@
 # the devices with metadata parition
 BOARD_USES_METADATA_PARTITION := true
 
-ifeq ($(KEEP_VNDK),true)
-# Default is current, but allow devices to override vndk version if needed.
-BOARD_VNDK_VERSION ?= current
-endif
-
 # 64 bit mediadrmserver
 TARGET_ENABLE_MEDIADRM_64 := true
 
diff --git a/target/board/ndk/BoardConfig.mk b/target/board/ndk/BoardConfig.mk
index b485f8b..e367918 100644
--- a/target/board/ndk/BoardConfig.mk
+++ b/target/board/ndk/BoardConfig.mk
@@ -14,7 +14,3 @@
 #
 
 TARGET_ARCH_SUITE := ndk
-
-MALLOC_SVELTE := true
-
-USE_SAFESTACK := false
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index d3514a5..7a9325d 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -44,7 +44,7 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
 
 # pKVM
-$(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
+$(call inherit-product-if-exists, packages/modules/Virtualization/apex/product_packages.mk)
 
 #
 # All components inherited here go to product image
@@ -55,7 +55,8 @@
 # All components inherited here go to vendor or vendor_boot image
 #
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_arm64/device.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS ?= system
 
 #
 # Special settings for GSI releasing
@@ -72,3 +73,5 @@
 PRODUCT_DEVICE := generic_arm64
 PRODUCT_BRAND := Android
 PRODUCT_MODEL := AOSP on ARM64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/aosp_product.mk b/target/product/aosp_product.mk
index f72f2df..3a5b622 100644
--- a/target/product/aosp_product.mk
+++ b/target/product/aosp_product.mk
@@ -34,7 +34,6 @@
     PhotoTable \
     preinstalled-packages-platform-aosp-product.xml \
     ThemePicker \
-    WallpaperPicker \
 
 # Telephony:
 #   Provide a APN configuration to GSI product
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 3040dd3..595940d 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -57,7 +57,8 @@
 # All components inherited here go to vendor image
 #
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS ?= system
 
 #
 # Special settings for GSI releasing
@@ -74,3 +75,5 @@
 PRODUCT_DEVICE := generic_x86_64
 PRODUCT_BRAND := Android
 PRODUCT_MODEL := AOSP on x86_64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 5446064..0ac220b 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -16,6 +16,7 @@
 
 # Base modules and settings for the product partition.
 PRODUCT_PACKAGES += \
+    build_flag_product \
     fs_config_dirs_product \
     fs_config_files_product \
     group_product \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 8dc680b..795d3f8 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -45,6 +45,7 @@
     bu \
     bugreport \
     bugreportz \
+    build_flag_system \
     cgroups.json \
     charger \
     cmd \
@@ -82,6 +83,7 @@
     CtsShimPrivPrebuilt \
     debuggerd\
     device_config \
+    DeviceDiagnostics \
     dmctl \
     dnsmasq \
     dmesgd \
@@ -90,7 +92,9 @@
     dump.erofs \
     dumpstate \
     dumpsys \
+    E2eeContactKeysProvider \
     e2fsck \
+    enhanced-confirmation.xml \
     ExtShared \
     flags_health_check \
     framework-graphics \
@@ -234,6 +238,7 @@
     org.apache.http.legacy \
     otacerts \
     PackageInstaller \
+    package-shareduid-allowlist.xml \
     passwd_system \
     perfetto \
     perfetto-extras \
@@ -286,8 +291,10 @@
     uiautomator \
     uinput \
     uncrypt \
+    uprobestats \
     usbd \
     vdc \
+    vintf \
     voip-common \
     vold \
     watchdogd \
@@ -326,6 +333,13 @@
         com.android.nfcservices
 endif
 
+# Check if the build supports Profiling module
+ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true)
+    PRODUCT_PACKAGES += \
+       com.android.profiling \
+       trace_redactor
+endif
+
 ifeq ($(RELEASE_USE_WEBVIEW_BOOTSTRAP_MODULE),true)
     PRODUCT_PACKAGES += \
         com.android.webview.bootstrap
@@ -399,7 +413,7 @@
     flags_health_check \
     fsck.erofs \
     icu-data_host_i18n_apex \
-    icu_tzdata.dat_host_tzdata_apex \
+    tzdata_icu_res_files_host_prebuilts \
     idmap2 \
     incident_report \
     ld.mc \
@@ -422,8 +436,7 @@
 
 PRODUCT_PACKAGES += init.usb.rc init.usb.configfs.rc
 
-PRODUCT_COPY_FILES += \
-    system/core/rootdir/etc/hosts:system/etc/hosts
+PRODUCT_PACKAGES += etc_hosts
 
 PRODUCT_PACKAGES += init.zygote32.rc
 PRODUCT_VENDOR_PROPERTIES += ro.zygote?=zygote32
@@ -483,10 +496,13 @@
 PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
     frameworks/base/config/preloaded-classes:system/etc/preloaded-classes)
 
-# Note: it is acceptable to not have a dirty-image-objects file. In that case, the special bin
-#       for known dirty objects in the image will be empty.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
-    frameworks/base/config/dirty-image-objects:system/etc/dirty-image-objects)
+# Enable dirty image object binning to reduce dirty pages in the image.
+PRODUCT_PACKAGES += dirty-image-objects
+
+# Enable go/perfetto-persistent-tracing for eng builds
+ifneq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
+    PRODUCT_PRODUCT_PROPERTIES += persist.debug.perfetto.persistent_sysui_tracing_for_bugreport=1
+endif
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
 
@@ -495,3 +511,6 @@
 
 # Use "image" APEXes always.
 $(call inherit-product,$(SRC_TARGET_DIR)/product/updatable_apex.mk)
+
+$(call soong_config_set, bionic, large_system_property_node, $(RELEASE_LARGE_SYSTEM_PROPERTY_NODE))
+$(call soong_config_set, Aconfig, read_from_new_storage, $(RELEASE_READ_FROM_NEW_STORAGE))
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index 76f008f..92ca227 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -16,6 +16,7 @@
 
 # Base modules and settings for the system_ext partition.
 PRODUCT_PACKAGES += \
+    build_flag_system_ext \
     fs_config_dirs_system_ext \
     fs_config_files_system_ext \
     group_system_ext \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index ec3de75..1854f97 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -18,6 +18,7 @@
 PRODUCT_PACKAGES += \
     adbd.recovery \
     android.hardware.health@2.0-impl-default.recovery \
+    build_flag_vendor \
     cgroups.recovery.json \
     charger.recovery \
     init_second_stage.recovery \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 4a968d7..1a3f2cf 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -101,8 +101,16 @@
     PRODUCT_BOOT_JARS += \
         framework-nfc
 else
-    PRODUCT_APEX_BOOT_JARS := \
+    PRODUCT_APEX_BOOT_JARS += \
         com.android.nfcservices:framework-nfc
+    $(call soong_config_set,bootclasspath,nfc_apex_bootclasspath_fragment,true)
+endif
+
+# Check if build supports Profiling module.
+ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true)
+    PRODUCT_APEX_BOOT_JARS += \
+        com.android.profiling:framework-profiling \
+
 endif
 
 # List of system_server classpath jars delivered via apex.
@@ -153,6 +161,13 @@
     com.android.uwb:service-uwb \
     com.android.wifi:service-wifi \
 
+# Check if build supports Profiling module.
+ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true)
+    PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS += \
+        com.android.profiling:service-profiling \
+
+endif
+
 # Overrides the (apex, jar) pairs above when determining the on-device location. The format is:
 # <old_apex>:<old_jar>:<new_apex>:<new_jar>
 PRODUCT_CONFIGURED_JAR_LOCATION_OVERRIDES := \
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index fa31e04..0a09eb1 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -36,6 +36,11 @@
     Stk \
     Tag \
 
+ifeq ($(RELEASE_AVATAR_PICKER_APP),true)
+  PRODUCT_PACKAGES += \
+    AvatarPicker
+endif
+
 # OTA support
 PRODUCT_PACKAGES += \
     recovery-refresh \
@@ -68,7 +73,6 @@
     android.hardware.radio.config@1.0 \
     android.hardware.radio.deprecated@1.0 \
     android.hardware.secure_element@1.0 \
-    android.hardware.wifi \
     libaudio-resampler \
     libaudiohal \
     libdrm \
@@ -116,6 +120,10 @@
     init.zygote64.rc \
     init.zygote64_32.rc
 
+# Support Credential Manager
+PRODUCT_PACKAGES += \
+    android.software.credentials.prebuilt.xml
+
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
 
@@ -142,3 +150,6 @@
   $(TARGET_COPY_OUT_SYSTEM)/ \
 
 $(call require-artifacts-in-path, $(_my_paths), $(_my_allowed_list))
+
+# Product config map to toggle between sources and prebuilts of required mainline modules
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline/required/release_config_map.textproto)
diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk
index b717486..4627fde 100644
--- a/target/product/go_defaults.mk
+++ b/target/product/go_defaults.mk
@@ -17,6 +17,8 @@
 # Inherit common Android Go defaults.
 $(call inherit-product, build/make/target/product/go_defaults_common.mk)
 
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/go_devices/release_config_map.textproto)
+
 # Add the system properties.
 TARGET_SYSTEM_PROP += \
     build/make/target/board/go_defaults.prop
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 3bb65ac..36897fe 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -1,96 +1,22 @@
 LOCAL_PATH:= $(call my-dir)
 
 #####################################################################
-# list of vndk libraries from the source code.
-INTERNAL_VNDK_LIB_LIST := $(SOONG_VNDK_LIBRARIES_FILE)
-
-#####################################################################
 # Check the generate list against the latest list stored in the
 # source tree
-.PHONY: check-vndk-list
+.PHONY: check-abi-dump-list
 
 # Check if vndk list is changed
-droidcore: check-vndk-list
+droidcore: check-abi-dump-list
 
-check-vndk-list-timestamp := $(call intermediates-dir-for,PACKAGING,vndk)/check-list-timestamp
-check-vndk-abi-dump-list-timestamp := $(call intermediates-dir-for,PACKAGING,vndk)/check-abi-dump-list-timestamp
+check-abi-dump-list-timestamp := $(call intermediates-dir-for,PACKAGING,vndk)/check-abi-dump-list-timestamp
 
-ifeq ($(TARGET_IS_64_BIT)|$(TARGET_2ND_ARCH),true|)
-# TODO(b/110429754) remove this condition when we support 64-bit-only device
-check-vndk-list: ;
-else ifeq ($(TARGET_SKIP_CURRENT_VNDK),true)
-check-vndk-list: ;
-else ifeq ($(BOARD_VNDK_VERSION),)
-check-vndk-list: ;
-else
-check-vndk-list: $(check-vndk-list-timestamp)
-ifneq ($(SKIP_ABI_CHECKS),true)
-check-vndk-list: $(check-vndk-abi-dump-list-timestamp)
+# The ABI tool does not support sanitizer and coverage builds.
+ifeq (,$(filter true,$(SKIP_ABI_CHECKS) $(CLANG_COVERAGE)))
+ifeq (,$(SANITIZE_TARGET))
+check-abi-dump-list: $(check-abi-dump-list-timestamp)
 endif
 endif
 
-_vndk_check_failure_message := " error: VNDK library list has been changed.\n"
-ifeq (REL,$(PLATFORM_VERSION_CODENAME))
-_vndk_check_failure_message += "       Changing the VNDK library list is not allowed in API locked branches."
-else
-_vndk_check_failure_message += "       Run \`update-vndk-list.sh\` to update $(LATEST_VNDK_LIB_LIST)"
-endif
-
-# The *-ndk_platform.so libraries no longer exist and are removed from the VNDK set. However, they
-# can exist if NEED_AIDL_NDK_PLATFORM_BACKEND is set to true for legacy devices. Don't be bothered
-# with the extraneous libraries.
-ifeq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),true)
-	_READ_INTERNAL_VNDK_LIB_LIST := sed /ndk_platform.so/d $(INTERNAL_VNDK_LIB_LIST)
-else
-	_READ_INTERNAL_VNDK_LIB_LIST := cat $(INTERNAL_VNDK_LIB_LIST)
-endif
-
-$(check-vndk-list-timestamp): $(INTERNAL_VNDK_LIB_LIST) $(LATEST_VNDK_LIB_LIST) $(HOST_OUT_EXECUTABLES)/update-vndk-list.sh
-	$(hide) ($(_READ_INTERNAL_VNDK_LIB_LIST) | sort | \
-	diff --old-line-format="Removed %L" \
-	  --new-line-format="Added %L" \
-	  --unchanged-line-format="" \
-	  <(cat $(LATEST_VNDK_LIB_LIST) | sort) - \
-	  || ( echo -e $(_vndk_check_failure_message); exit 1 ))
-	$(hide) mkdir -p $(dir $@)
-	$(hide) touch $@
-
-#####################################################################
-# Script to update the latest VNDK lib list
-include $(CLEAR_VARS)
-LOCAL_MODULE := update-vndk-list.sh
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := EXECUTABLES
-LOCAL_MODULE_STEM := $(LOCAL_MODULE)
-LOCAL_IS_HOST_MODULE := true
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_INTERNAL_VNDK_LIB_LIST := $(INTERNAL_VNDK_LIB_LIST)
-$(LOCAL_BUILT_MODULE): PRIVATE_LATEST_VNDK_LIB_LIST := $(LATEST_VNDK_LIB_LIST)
-$(LOCAL_BUILT_MODULE):
-	@echo "Generate: $@"
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) echo "#!/bin/bash" > $@
-ifeq (REL,$(PLATFORM_VERSION_CODENAME))
-	$(hide) echo "echo Updating VNDK library list is NOT allowed in API locked branches." >> $@; \
-	        echo "exit 1" >> $@
-else
-	$(hide) echo "if [ -z \"\$${ANDROID_BUILD_TOP}\" ]; then" >> $@; \
-	        echo "  echo Run lunch or choosecombo first" >> $@; \
-	        echo "  exit 1" >> $@; \
-	        echo "fi" >> $@; \
-	        echo "cd \$${ANDROID_BUILD_TOP}" >> $@
-ifeq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),true)
-	$(hide) echo "sed /ndk_platform.so/d $(PRIVATE_INTERNAL_VNDK_LIB_LIST) > $(PRIVATE_LATEST_VNDK_LIB_LIST)" >> $@
-else
-	$(hide) echo "cp $(PRIVATE_INTERNAL_VNDK_LIB_LIST) $(PRIVATE_LATEST_VNDK_LIB_LIST)" >> $@
-endif
-	$(hide) echo "echo $(PRIVATE_LATEST_VNDK_LIB_LIST) updated." >> $@
-endif
-	@chmod a+x $@
-
 #####################################################################
 # ABI reference dumps.
 
@@ -111,6 +37,9 @@
 endef
 
 # Subsets of LSDUMP_PATHS.
+.PHONY: findlsdumps_APEX
+findlsdumps_APEX: $(LSDUMP_PATHS_FILE) $(call filter-abi-dump-paths,APEX,$(LSDUMP_PATHS))
+
 .PHONY: findlsdumps_LLNDK
 findlsdumps_LLNDK: $(LSDUMP_PATHS_FILE) $(call filter-abi-dump-paths,LLNDK,$(LSDUMP_PATHS))
 
@@ -125,7 +54,7 @@
 
 #####################################################################
 # Check that all ABI reference dumps have corresponding
-# NDK/VNDK/PLATFORM libraries.
+# APEX/LLNDK/PLATFORM libraries.
 
 # $(1): The directory containing ABI dumps.
 # Return a list of ABI dump paths ending with .so.lsdump.
@@ -137,52 +66,47 @@
 
 # $(1): A list of tags.
 # $(2): A list of tag:path.
-# Return the file names of the ABI dumps that match the tags.
+# Return the file names of the ABI dumps that match the tags, and replace the
+# file name extensions with .so.lsdump.
 define filter-abi-dump-names
-$(notdir $(call filter-abi-dump-paths,$(1),$(2)))
+$(patsubst %.so.llndk.lsdump,%.so.lsdump, \
+  $(patsubst %.so.apex.lsdump,%.so.lsdump, \
+    $(notdir $(call filter-abi-dump-paths,$(1),$(2)))))
 endef
 
-
+VNDK_ABI_DUMP_DIR := prebuilts/abi-dumps/vndk/$(RELEASE_BOARD_API_LEVEL)
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
-    NDK_ABI_DUMP_DIR := prebuilts/abi-dumps/ndk/$(PLATFORM_SDK_VERSION)
     PLATFORM_ABI_DUMP_DIR := prebuilts/abi-dumps/platform/$(PLATFORM_SDK_VERSION)
 else
-    NDK_ABI_DUMP_DIR := prebuilts/abi-dumps/ndk/current
     PLATFORM_ABI_DUMP_DIR := prebuilts/abi-dumps/platform/current
 endif
-NDK_ABI_DUMPS := $(call find-abi-dump-paths,$(NDK_ABI_DUMP_DIR))
+VNDK_ABI_DUMPS := $(call find-abi-dump-paths,$(VNDK_ABI_DUMP_DIR))
 PLATFORM_ABI_DUMPS := $(call find-abi-dump-paths,$(PLATFORM_ABI_DUMP_DIR))
 
 # Check for superfluous lsdump files. Since LSDUMP_PATHS only covers the
 # libraries that can be built from source in the current build, and prebuilts of
 # Mainline modules may be in use, we also allow the libs in STUB_LIBRARIES for
-# NDK and platform ABIs.
+# platform ABIs.
+# In addition, libRS is allowed because it's disabled for RISC-V.
 
-$(check-vndk-abi-dump-list-timestamp): PRIVATE_LSDUMP_PATHS := $(LSDUMP_PATHS)
-$(check-vndk-abi-dump-list-timestamp): PRIVATE_STUB_LIBRARIES := $(STUB_LIBRARIES)
-$(check-vndk-abi-dump-list-timestamp):
+$(check-abi-dump-list-timestamp): PRIVATE_LSDUMP_PATHS := $(LSDUMP_PATHS)
+$(check-abi-dump-list-timestamp): PRIVATE_STUB_LIBRARIES := $(STUB_LIBRARIES)
+$(check-abi-dump-list-timestamp):
 	$(eval added_vndk_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-names,LLNDK VNDK-SP VNDK-core,$(PRIVATE_LSDUMP_PATHS)), \
+	  $(call filter-abi-dump-names,LLNDK,$(PRIVATE_LSDUMP_PATHS)) libRS.so.lsdump, \
 	  $(notdir $(VNDK_ABI_DUMPS))))))
 	$(if $(added_vndk_abi_dumps), \
 	  echo -e "Found unexpected ABI reference dump files under $(VNDK_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(VNDK_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_vndk_abi_dumps)) ')' -delete\` to delete the dump files.")
 
-	$(eval added_ndk_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-names,NDK,$(PRIVATE_LSDUMP_PATHS)) \
-	  $(addsuffix .lsdump,$(PRIVATE_STUB_LIBRARIES)), \
-	  $(notdir $(NDK_ABI_DUMPS))))))
-	$(if $(added_ndk_abi_dumps), \
-	  echo -e "Found unexpected ABI reference dump files under $(NDK_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(NDK_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_ndk_abi_dumps)) ')' -delete\` to delete the dump files.")
-
 	# TODO(b/314010764): Remove LLNDK tag after PLATFORM_SDK_VERSION is upgraded to 35.
 	$(eval added_platform_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-names,LLNDK PLATFORM,$(PRIVATE_LSDUMP_PATHS)) \
-	  $(addsuffix .lsdump,$(PRIVATE_STUB_LIBRARIES)), \
+	  $(call filter-abi-dump-names,APEX LLNDK PLATFORM,$(PRIVATE_LSDUMP_PATHS)) \
+	  $(addsuffix .lsdump,$(PRIVATE_STUB_LIBRARIES)) libRS.so.lsdump, \
 	  $(notdir $(PLATFORM_ABI_DUMPS))))))
 	$(if $(added_platform_abi_dumps), \
 	  echo -e "Found unexpected ABI reference dump files under $(PLATFORM_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(PLATFORM_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_platform_abi_dumps)) ')' -delete\` to delete the dump files.")
 
-	$(if $(added_vndk_abi_dumps)$(added_ndk_abi_dumps)$(added_platform_abi_dumps),exit 1)
+	$(if $(added_vndk_abi_dumps)$(added_platform_abi_dumps),exit 1)
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
 
@@ -190,27 +114,6 @@
 # VNDK package and snapshot.
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_package
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
-LOCAL_REQUIRED_MODULES := llndk_in_system
-
-ifneq ($(TARGET_SKIP_CURRENT_VNDK),true)
-LOCAL_REQUIRED_MODULES += \
-    vndkcorevariant.libraries.txt \
-    $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
-    $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
-    $(VNDK_USING_CORE_VARIANT_LIBRARIES)
-
-LOCAL_ADDITIONAL_DEPENDENCIES += $(call module-built-files,\
-    $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES)))
-
-endif
-include $(BUILD_PHONY_PACKAGE)
-
-include $(CLEAR_VARS)
 
 LOCAL_MODULE := vndk_apex_snapshot_package
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 2e37366..39428d2 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -44,22 +44,21 @@
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
 
-# Disable the build-time debugfs restrictions on GSI builds
-PRODUCT_SET_DEBUGFS_RESTRICTIONS := false
-
 # GSI specific tasks on boot
 PRODUCT_PACKAGES += \
     gsi_skip_mount.cfg \
     init.gsi.rc \
     init.vndk-nodef.rc \
 
-# Overlay the GSI specific SystemUI setting
-PRODUCT_PACKAGES += gsi_overlay_systemui
-PRODUCT_COPY_FILES += \
-    device/generic/common/overlays/overlay-config.xml:$(TARGET_COPY_OUT_SYSTEM_EXT)/overlay/config/config.xml
 
-# b/308878144 no more VNDK on 24Q1 and beyond
-KEEP_VNDK ?= false
+# Overlay the GSI specific setting for framework and SystemUI
+ifneq ($(PRODUCT_IS_AUTOMOTIVE),true)
+    PRODUCT_PACKAGES += \
+        gsi_overlay_framework \
+        gsi_overlay_systemui \
+    PRODUCT_COPY_FILES += \
+        device/generic/common/overlays/overlay-config.xml:$(TARGET_COPY_OUT_SYSTEM_EXT)/overlay/config/config.xml
+endif
 
 # Support additional VNDK snapshots
 PRODUCT_EXTRA_VNDK_VERSIONS := \
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index bf9aa41..3f3bd01 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -33,6 +33,7 @@
 $(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
 
 PRODUCT_PACKAGES += \
+    android.software.window_magnification.prebuilt.xml \
     BasicDreams \
     BlockedNumberProvider \
     BluetoothMidiService \
@@ -75,6 +76,10 @@
     vr \
 
 PRODUCT_PACKAGES += $(RELEASE_PACKAGE_VIRTUAL_CAMERA)
+# Set virtual_camera_service_enabled soong config variable based on the
+# RELEASE_PACKAGE_VIRTUAL_CAMERA build. virtual_camera_service_enabled soong config
+# variable is used to prevent accessing the service when it's not present in the build.
+$(call soong_config_set,vdm,virtual_camera_service_enabled,$(if $(RELEASE_PACKAGE_VIRTUAL_CAMERA),true,false))
 
 PRODUCT_SYSTEM_SERVER_APPS += \
     FusedLocation \
@@ -82,9 +87,7 @@
     KeyChain \
     Telecom \
 
-PRODUCT_COPY_FILES += \
-    frameworks/av/media/libeffects/data/audio_effects.xml:system/etc/audio_effects.xml \
-    frameworks/native/data/etc/android.software.window_magnification.xml:$(TARGET_COPY_OUT_SYSTEM)/etc/permissions/android.software.window_magnification.xml \
+PRODUCT_PACKAGES += framework-audio_effects.xml
 
 PRODUCT_VENDOR_PROPERTIES += \
     ro.carrier?=unknown \
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 38ba219..af3857e 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -21,6 +21,7 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base_system.mk)
 
 PRODUCT_PACKAGES += \
+    android.software.webview.prebuilt.xml \
     com.android.future.usb.accessory \
     com.android.mediadrm.signer \
     com.android.media.remotedisplay \
@@ -39,12 +40,9 @@
 PRODUCT_HOST_PACKAGES += \
     fsck.f2fs \
 
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
-
 ifneq (REL,$(PLATFORM_VERSION_CODENAME))
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.software.preview_sdk.xml:system/etc/permissions/android.software.preview_sdk.xml
+PRODUCT_PACKAGES += \
+    android.software.preview_sdk.prebuilt.xml
 endif
 
 # The order here is the same order they end up on the classpath, so it matters.
@@ -59,10 +57,6 @@
 PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
     frameworks/base/config/compiled-classes-phone:system/etc/compiled-classes)
 
-# Enable dirty image object binning to reduce dirty pages in the image.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
-    frameworks/base/dirty-image-objects-phone:system/etc/dirty-image-objects)
-
 # On userdebug builds, collect more tombstones by default.
 ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
 PRODUCT_VENDOR_PROPERTIES += \
diff --git a/target/product/module_arm64.mk b/target/product/module_arm64.mk
index 2e8c8a7..d6487ca 100644
--- a/target/product/module_arm64.mk
+++ b/target/product/module_arm64.mk
@@ -19,3 +19,6 @@
 
 PRODUCT_NAME := module_arm64
 PRODUCT_DEVICE := module_arm64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
+PRODUCT_MAX_PAGE_SIZE_SUPPORTED := 16384
diff --git a/target/product/module_arm64only.mk b/target/product/module_arm64only.mk
index c0769bf..137701a 100644
--- a/target/product/module_arm64only.mk
+++ b/target/product/module_arm64only.mk
@@ -19,3 +19,6 @@
 
 PRODUCT_NAME := module_arm64only
 PRODUCT_DEVICE := module_arm64only
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
+PRODUCT_MAX_PAGE_SIZE_SUPPORTED := 16384
diff --git a/target/product/module_common.mk b/target/product/module_common.mk
index bf146a0..da4ea23 100644
--- a/target/product/module_common.mk
+++ b/target/product/module_common.mk
@@ -24,8 +24,9 @@
 # uses -DENFORCE_VINTF_MANIFEST. See b/185759877
 PRODUCT_SHIPPING_API_LEVEL := 29
 
-# Builds using a module product should build modules from source, even if
-# BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE says otherwise.
+# If true, this builds the mainline modules from source. This overrides any
+# prebuilts selected via RELEASE_APEX_CONTRIBUTIONS_* build flags for the
+# current release config.
 PRODUCT_MODULE_BUILD_FROM_SOURCE := true
 
 # Build sdk from source if the branch is not using slim manifests.
diff --git a/target/product/module_x86_64.mk b/target/product/module_x86_64.mk
index 20f443a..e182bf6 100644
--- a/target/product/module_x86_64.mk
+++ b/target/product/module_x86_64.mk
@@ -19,3 +19,6 @@
 
 PRODUCT_NAME := module_x86_64
 PRODUCT_DEVICE := module_x86_64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
+PRODUCT_MAX_PAGE_SIZE_SUPPORTED :=  16384
diff --git a/target/product/module_x86_64only.mk b/target/product/module_x86_64only.mk
index b0d72bf..fa4a04d 100644
--- a/target/product/module_x86_64only.mk
+++ b/target/product/module_x86_64only.mk
@@ -19,3 +19,6 @@
 
 PRODUCT_NAME := module_x86_64only
 PRODUCT_DEVICE := module_x86_64only
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
+PRODUCT_MAX_PAGE_SIZE_SUPPORTED := 16384
diff --git a/target/product/ndk.mk b/target/product/ndk.mk
index 1dfd0db..e4f77f7 100644
--- a/target/product/ndk.mk
+++ b/target/product/ndk.mk
@@ -19,3 +19,5 @@
 PRODUCT_NAME := ndk
 PRODUCT_BRAND := Android
 PRODUCT_DEVICE := ndk
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index a9d478d..dc78368 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -175,15 +175,6 @@
     dalvik.vm.usap_pool_size_min?=1 \
     dalvik.vm.usap_pool_refill_delay_ms?=3000
 
-# Allow dexopt files that are side-effects of already allowlisted files.
-# This is only necessary when ART is prebuilt.
-ifeq (false,$(ART_MODULE_BUILD_FROM_SOURCE))
-  PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
-      system/framework/%.art \
-      system/framework/%.oat \
-      system/framework/%.odex \
-      system/framework/%.vdex
-endif
-
 PRODUCT_SYSTEM_PROPERTIES += \
-    dalvik.vm.useartservice=true
+    dalvik.vm.useartservice=true \
+    dalvik.vm.enable_pr_dexopt=true
diff --git a/target/product/security/README b/target/product/security/README
index 2b161bb..1e015f0 100644
--- a/target/product/security/README
+++ b/target/product/security/README
@@ -37,3 +37,31 @@
 
 This is called by build/make/core/Makefile to incorporate the OTA signing keys
 into the recovery image.
+
+converting to java keystore for Android Studio
+----------------------------------------------
+
+Suppose we want to convert shared.pk8 and shared.x509.pem to shared.keystore.
+
+ $ openssl pkcs8 -inform DER -nocrypt                      \
+   -in build/make/target/product/security/shared.pk8       \
+   -out shared.pem
+ $ openssl pkcs12 -export                                  \
+   -in build/make/target/product/security/shared.x509.pem  \
+   -inkey shared.pem -out shared.p12                       \
+   -password pass:android -name AndroidDebugKey
+ $ keytool -importkeystore -deststorepass android          \
+   -destkeystore shared.keystore -srckeystore shared.p12   \
+   -srcstoretype PKCS12 -srcstorepass android
+
+The keystore can be used in build.gradle as follows.
+
+signingConfigs {
+    shared {
+        storeFile file("shared.keystore")
+        storePassword "android"
+        keyPassword "android"
+        keyAlias "AndroidDebugKey"
+    }
+}
+
diff --git a/target/product/vboot.mk b/target/product/vboot.mk
deleted file mode 100644
index 48a4883..0000000
--- a/target/product/vboot.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Copyright (C) 2015 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Provides dependencies necessary for verified boot
-
-PRODUCT_SUPPORTS_VBOOT := true
-
-# The dev key is used to sign boot and recovery images.
-# We expect this file to exist with the suffixes ".vbprivk" and ".vbpupk".
-# TODO: find a proper location for this
-PRODUCT_VBOOT_SIGNING_KEY := external/vboot_reference/tests/devkeys/kernel_data_key
-PRODUCT_VBOOT_SIGNING_SUBKEY := external/vboot_reference/tests/devkeys/kernel_subkey
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index af0f7a9..418aaa4 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -20,5 +20,3 @@
 #
 # All U+ launching devices should instead use vabc_features.mk.
 $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/vabc_features.mk)
-
-PRODUCT_VIRTUAL_AB_COW_VERSION ?= 2
diff --git a/target/product/virtual_ab_ota/vabc_features.mk b/target/product/virtual_ab_ota/vabc_features.mk
index 874eb9c..e2745a1 100644
--- a/target/product/virtual_ab_ota/vabc_features.mk
+++ b/target/product/virtual_ab_ota/vabc_features.mk
@@ -34,10 +34,29 @@
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true
+# Low memory device configurations. If memory usage and cpu utilization is
+# a bottleneck during OTA, the below configurations can be added to a
+# device's .mk file improve performance for low mem devices. Disabling
+# ro.virtual_ab.compression.xor.enabled and ro.virtual_ab.io_uring.enabled
+# is also recommended
+#
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.read_ahead_size=16
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.o_direct.enabled=true
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.merge_thread_priority=19
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.worker_thread_priority=0
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.num_worker_threads=3
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.num_merge_threads=1
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.num_verify_threads=1
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.cow_op_merge_size=16
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.verify_threshold_size=1073741824
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.verify_block_size=1048576
 
 # Enabling this property, will improve OTA install time
 # but will use an additional CPU core
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.threads=true
+ifndef PRODUCT_VIRTUAL_AB_COMPRESSION_FACTOR
+    PRODUCT_VIRTUAL_AB_COMPRESSION_FACTOR := 65536
+endif
 
 PRODUCT_VIRTUAL_AB_COMPRESSION := true
 PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD ?= none
diff --git a/teams/Android.bp b/teams/Android.bp
index 8f83e71..a9699d2 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -732,7 +732,7 @@
 }
 
 team {
-    name: "trendy_team_deprecated_systemui_gfx",
+    name: "trendy_team_ailabs",
 
     // go/trendy/manage/engineers/6673470538285056
     trendy_team_id: "6673470538285056",
@@ -4337,37 +4337,85 @@
 }
 
 team {
-  name: "trendy_team_media_framework_drm",
+    name: "trendy_team_media_framework_drm",
 
-  // go/trendy/manage/engineers/5311752690335744
-  trendy_team_id: "5311752690335744",
+    // go/trendy/manage/engineers/5311752690335744
+    trendy_team_id: "5311752690335744",
 }
 
 team {
-  name: "trendy_team_media_framework_audio",
+    name: "trendy_team_media_framework_audio",
 
-  // go/trendy/manage/engineers/5823575353065472
-  trendy_team_id: "5823575353065472",
+    // go/trendy/manage/engineers/5823575353065472
+    trendy_team_id: "5823575353065472",
 }
 
 team {
-  name: "trendy_team_ar_sensors_context_hub",
+    name: "trendy_team_pixel_pearl",
 
-  // go/trendy/manage/engineers/4776371090259968
-  trendy_team_id: "4776371090259968",
-}
-
-
-team {
-  name: "trendy_team_media_codec_framework",
-
-  // go/trendy/manage/engineers/4943966050844672
-  trendy_team_id: "4943966050844672",
+    // go/trendy/manage/engineers/6326219602231296
+    trendy_team_id: "6326219602231296",
 }
 
 team {
-  name: "trendy_team_android_platform_performance_testing",
+    name: "trendy_team_ar_sensors_context_hub",
 
-  // go/trendy/manage/engineers/5810097836621824
-  trendy_team_id: "5810097836621824",
+    // go/trendy/manage/engineers/4776371090259968
+    trendy_team_id: "4776371090259968",
+}
+
+team {
+    name: "trendy_team_media_codec_framework",
+
+    // go/trendy/manage/engineers/4943966050844672
+    trendy_team_id: "4943966050844672",
+}
+
+team {
+    name: "trendy_team_android_platform_performance_testing",
+
+    // go/trendy/manage/engineers/5810097836621824
+    trendy_team_id: "5810097836621824",
+}
+
+team {
+    name: "trendy_team_adte",
+
+    // go/trendy/manage/engineers/5551098528825344
+    trendy_team_id: "5551098528825344",
+}
+
+team {
+    name: "trendy_team_incremental",
+
+    // go/trendy/manage/engineers/5955405559201792
+    trendy_team_id: "5955405559201792",
+}
+
+team {
+    name: "trendy_team_android_media_better_together",
+
+    // go/trendy/manage/engineers/5617300451721216
+    trendy_team_id: "5617300451721216",
+}
+
+team {
+    name: "trendy_team_attack_tools",
+
+    // go/trendy/manage/engineers/4705629185081344
+    trendy_team_id: "4705629185081344",
+}
+
+team {
+    name: "trendy_team_android_media_solutions_editing",
+
+    // go/trendy/manage/engineers/5350750192762880
+    trendy_team_id: "5350750192762880",
+}
+
+team {
+    name: "trendy_team_android_telemetry_client_infra",
+
+    // go/trendy/manage/engineers/5403245077430272
+    trendy_team_id: "5403245077430272",
 }
diff --git a/tests/run.rbc b/tests/run.rbc
index 85d6c09..221b40f 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -26,6 +26,7 @@
 load(":board.rbc", board_init = "init")
 load(":board_input_vars.rbc", board_input_vars_init = "init")
 load("//build/make/tests/single_value_inheritance:test.rbc", test_single_value_inheritance = "test")
+load("//build/make/tests/single_value_inheritance_2:test.rbc", test_single_value_inheritance_2 = "test")
 load("//build/make/tests/artifact_path_requirements:test.rbc", test_artifact_path_requirements = "test")
 load("//build/make/tests/prefixed_sort_order:test.rbc", test_prefixed_sort_order = "test")
 load("//build/make/tests/inherits_in_regular_variables:test.rbc", test_inherits_in_regular_variables = "test")
@@ -181,6 +182,7 @@
 assert_eq("", g.get("NEWVAR"))
 
 test_single_value_inheritance()
+test_single_value_inheritance_2()
 test_artifact_path_requirements()
 test_prefixed_sort_order()
 test_inherits_in_regular_variables()
diff --git a/tests/single_value_inheritance_2/a.rbc b/tests/single_value_inheritance_2/a.rbc
new file mode 100644
index 0000000..fe186c7
--- /dev/null
+++ b/tests/single_value_inheritance_2/a.rbc
@@ -0,0 +1,20 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  cfg["PRODUCT_ENABLE_UFFD_GC"] = "true"
diff --git a/tests/single_value_inheritance_2/b.rbc b/tests/single_value_inheritance_2/b.rbc
new file mode 100644
index 0000000..7d95749
--- /dev/null
+++ b/tests/single_value_inheritance_2/b.rbc
@@ -0,0 +1,20 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  cfg["PRODUCT_ENABLE_UFFD_GC"] = "default"
diff --git a/tests/single_value_inheritance_2/c.rbc b/tests/single_value_inheritance_2/c.rbc
new file mode 100644
index 0000000..e90e37d
--- /dev/null
+++ b/tests/single_value_inheritance_2/c.rbc
@@ -0,0 +1,21 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":b.rbc", _b_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.inherit(handle, "test/b", _b_init)
diff --git a/tests/single_value_inheritance_2/d.rbc b/tests/single_value_inheritance_2/d.rbc
new file mode 100644
index 0000000..3a88c2c
--- /dev/null
+++ b/tests/single_value_inheritance_2/d.rbc
@@ -0,0 +1,23 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":c.rbc", _c_init = "init")
+load(":a.rbc", _a_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.inherit(handle, "test/a", _a_init)
+  rblf.inherit(handle, "test/c", _c_init)
diff --git a/tests/single_value_inheritance_2/product.rbc b/tests/single_value_inheritance_2/product.rbc
new file mode 100644
index 0000000..c47664d
--- /dev/null
+++ b/tests/single_value_inheritance_2/product.rbc
@@ -0,0 +1,23 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":b.rbc", _b_init = "init")
+load(":d.rbc", _d_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.inherit(handle, "test/b", _b_init)
+  rblf.inherit(handle, "test/d", _d_init)
diff --git a/tests/single_value_inheritance_2/test.rbc b/tests/single_value_inheritance_2/test.rbc
new file mode 100644
index 0000000..fa93aaa
--- /dev/null
+++ b/tests/single_value_inheritance_2/test.rbc
@@ -0,0 +1,40 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load("//build/make/tests/input_variables.rbc", input_variables_init = "init")
+load(":product.rbc", "init")
+
+
+def assert_eq(expected, actual):
+    if expected != actual:
+        fail("Expected '%s', got '%s'" % (expected, actual))
+
+# This test is testing that single value variables are "stolen" when processing the inheritance
+# graph. i.e. if you have a graph like this:
+#
+#   B   A
+#   |\  |
+#   | C |
+#    \ \|
+#     \ D
+#      \|
+#       E
+#
+# The same variable is defined in both A and B. In D, the value from A is chosen because it comes
+# alphabetically before C. But then in E, the value from D is chosen instead of the value from B,
+# because the value of B was "stolen" and sucked into C, leaving B with no value set.
+def test():
+    (globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+    assert_eq("true", globals["PRODUCTS.test/device.mk.PRODUCT_ENABLE_UFFD_GC"])
diff --git a/tools/Android.bp b/tools/Android.bp
index 0a55ed4..59831a6 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -115,3 +115,11 @@
         },
     },
 }
+
+python_binary_host {
+    name: "merge-event-log-tags",
+    srcs: [
+        "event_log_tags.py",
+        "merge-event-log-tags.py",
+    ],
+}
diff --git a/tools/aconfig/.editorconfig b/tools/aconfig/.editorconfig
new file mode 100644
index 0000000..cc5985f
--- /dev/null
+++ b/tools/aconfig/.editorconfig
@@ -0,0 +1,9 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+# top-most EditorConfig file
+root = true
+
+[*.java]
+indent_style = tab
+indent_size = 4
+
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
index 6bd0d06..bf5e1a9 100644
--- a/tools/aconfig/Cargo.toml
+++ b/tools/aconfig/Cargo.toml
@@ -2,6 +2,7 @@
 
 members = [
     "aconfig",
+    "aconfig_device_paths",
     "aconfig_protos",
     "aconfig_storage_file",
     "aconfig_storage_read_api",
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 638b92a..448d8cf 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -84,18 +84,26 @@
       "name": "aconfig_storage_write_api.test.rust"
     },
     {
+      // aconfig_storage write api cpp integration tests
+      "name": "aconfig_storage_write_api.test.cpp"
+    },
+    {
       // aconfig_storage read api rust integration tests
       "name": "aconfig_storage_read_api.test.rust"
     },
     {
       // aconfig_storage read api cpp integration tests
       "name": "aconfig_storage_read_api.test.cpp"
+    },
+    {
+      // aconfig_storage file cpp integration tests
+      "name": "aconfig_storage_file.test.cpp"
     }
   ],
   "postsubmit": [
     {
-      // aconfig_storage write api cpp integration tests
-      "name": "aconfig_storage_write_api.test.cpp"
+      // aconfig_storage read api java integration tests
+      "name": "aconfig_storage_read_api.test.java"
     }
   ]
 }
diff --git a/tools/aconfig/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp
index 00a6fee..68521af 100644
--- a/tools/aconfig/aconfig/Android.bp
+++ b/tools/aconfig/aconfig/Android.bp
@@ -161,6 +161,9 @@
     shared_libs: [
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
     test_suites: ["general-tests"],
 }
 
@@ -176,6 +179,9 @@
     shared_libs: [
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
     test_suites: ["general-tests"],
 }
 
@@ -199,6 +205,9 @@
     shared_libs: [
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
     test_suites: ["general-tests"],
 }
 */
@@ -215,6 +224,9 @@
     shared_libs: [
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
     test_suites: ["general-tests"],
 }
 
diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs
index cd71b10..e743b2f 100644
--- a/tools/aconfig/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/aconfig/src/codegen/cpp.rs
@@ -16,6 +16,7 @@
 
 use anyhow::{ensure, Result};
 use serde::Serialize;
+use std::collections::HashMap;
 use std::path::PathBuf;
 use tinytemplate::TinyTemplate;
 
@@ -29,13 +30,15 @@
     package: &str,
     parsed_flags_iter: I,
     codegen_mode: CodegenMode,
+    flag_ids: HashMap<String, u16>,
+    allow_instrumentation: bool,
 ) -> Result<Vec<OutputFile>>
 where
     I: Iterator<Item = ProtoParsedFlag>,
 {
     let mut readwrite_count = 0;
     let class_elements: Vec<ClassElement> = parsed_flags_iter
-        .map(|pf| create_class_element(package, &pf, &mut readwrite_count))
+        .map(|pf| create_class_element(package, &pf, flag_ids.clone(), &mut readwrite_count))
         .collect();
     let readwrite = readwrite_count > 0;
     let has_fixed_read_only = class_elements.iter().any(|item| item.is_fixed_read_only);
@@ -53,6 +56,7 @@
         readwrite_count,
         is_test_mode: codegen_mode == CodegenMode::Test,
         class_elements,
+        allow_instrumentation,
     };
 
     let files = [
@@ -96,6 +100,7 @@
     pub readwrite_count: i32,
     pub is_test_mode: bool,
     pub class_elements: Vec<ClassElement>,
+    pub allow_instrumentation: bool,
 }
 
 #[derive(Serialize)]
@@ -106,11 +111,18 @@
     pub default_value: String,
     pub flag_name: String,
     pub flag_macro: String,
+    pub flag_offset: u16,
     pub device_config_namespace: String,
     pub device_config_flag: String,
+    pub container: String,
 }
 
-fn create_class_element(package: &str, pf: &ProtoParsedFlag, rw_count: &mut i32) -> ClassElement {
+fn create_class_element(
+    package: &str,
+    pf: &ProtoParsedFlag,
+    flag_ids: HashMap<String, u16>,
+    rw_count: &mut i32,
+) -> ClassElement {
     ClassElement {
         readwrite_idx: if pf.permission() == ProtoFlagPermission::READ_WRITE {
             let index = *rw_count;
@@ -128,9 +140,11 @@
         },
         flag_name: pf.name().to_string(),
         flag_macro: pf.name().to_uppercase(),
+        flag_offset: *flag_ids.get(pf.name()).expect("values checked at flag parse time"),
         device_config_namespace: pf.namespace().to_string(),
         device_config_flag: codegen::create_device_config_ident(package, pf.name())
             .expect("values checked at flag parse time"),
+        container: pf.container().to_string(),
     }
 }
 
@@ -1162,18 +1176,27 @@
     return true;
 }
 "#;
+    use crate::commands::assign_flag_ids;
 
     fn test_generate_cpp_code(
         parsed_flags: ProtoParsedFlags,
         mode: CodegenMode,
         expected_header: &str,
         expected_src: &str,
+        allow_instrumentation: bool,
     ) {
         let modified_parsed_flags =
             crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let generated =
-            generate_cpp_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
-                .unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated = generate_cpp_code(
+            crate::test::TEST_PACKAGE,
+            modified_parsed_flags.into_iter(),
+            mode,
+            flag_ids,
+            allow_instrumentation,
+        )
+        .unwrap();
         let mut generated_files_map = HashMap::new();
         for file in generated {
             generated_files_map.insert(
@@ -1211,6 +1234,7 @@
             CodegenMode::Production,
             EXPORTED_PROD_HEADER_EXPECTED,
             PROD_SOURCE_FILE_EXPECTED,
+            false,
         );
     }
 
@@ -1222,6 +1246,7 @@
             CodegenMode::Test,
             EXPORTED_TEST_HEADER_EXPECTED,
             TEST_SOURCE_FILE_EXPECTED,
+            false,
         );
     }
 
@@ -1233,6 +1258,7 @@
             CodegenMode::Exported,
             EXPORTED_EXPORTED_HEADER_EXPECTED,
             EXPORTED_SOURCE_FILE_EXPECTED,
+            false,
         );
     }
 
@@ -1244,6 +1270,7 @@
             CodegenMode::ForceReadOnly,
             EXPORTED_FORCE_READ_ONLY_HEADER_EXPECTED,
             FORCE_READ_ONLY_SOURCE_FILE_EXPECTED,
+            false,
         );
     }
 
@@ -1255,6 +1282,7 @@
             CodegenMode::Production,
             READ_ONLY_EXPORTED_PROD_HEADER_EXPECTED,
             READ_ONLY_PROD_SOURCE_FILE_EXPECTED,
+            false,
         );
     }
 }
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index 18a4be5..3360ddd 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -64,20 +64,27 @@
         include_str!("../../templates/FeatureFlags.java.template"),
     )?;
     template.add_template(
+        "CustomFeatureFlags.java",
+        include_str!("../../templates/CustomFeatureFlags.java.template"),
+    )?;
+    template.add_template(
         "FakeFeatureFlagsImpl.java",
         include_str!("../../templates/FakeFeatureFlagsImpl.java.template"),
     )?;
 
     let path: PathBuf = package.split('.').collect();
-    ["Flags.java", "FeatureFlags.java", "FeatureFlagsImpl.java", "FakeFeatureFlagsImpl.java"]
-        .iter()
-        .map(|file| {
-            Ok(OutputFile {
-                contents: template.render(file, &context)?.into(),
-                path: path.join(file),
-            })
-        })
-        .collect::<Result<Vec<OutputFile>>>()
+    [
+        "Flags.java",
+        "FeatureFlags.java",
+        "FeatureFlagsImpl.java",
+        "CustomFeatureFlags.java",
+        "FakeFeatureFlagsImpl.java",
+    ]
+    .iter()
+    .map(|file| {
+        Ok(OutputFile { contents: template.render(file, &context)?.into(), path: path.join(file) })
+    })
+    .collect::<Result<Vec<OutputFile>>>()
 }
 
 fn gen_flags_by_namespace(flags: &[FlagElement]) -> Vec<NamespaceFlags> {
@@ -292,76 +299,82 @@
         }
     "#;
 
-    const EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT: &str = r#"
+    const EXPECTED_CUSTOMFEATUREFLAGS_CONTENT: &str = r#"
     package com.android.aconfig.test;
+
     // TODO(b/303773055): Remove the annotation after access issue is resolved.
     import android.compat.annotation.UnsupportedAppUsage;
     import java.util.Arrays;
-    import java.util.HashMap;
     import java.util.HashSet;
-    import java.util.Map;
+    import java.util.List;
     import java.util.Set;
+    import java.util.function.BiPredicate;
+    import java.util.function.Predicate;
+
     /** @hide */
-    public class FakeFeatureFlagsImpl implements FeatureFlags {
-        public FakeFeatureFlagsImpl() {
-            resetAll();
+    public class CustomFeatureFlags implements FeatureFlags {
+
+        private BiPredicate<String, Predicate<FeatureFlags>> mGetValueImpl;
+
+        public CustomFeatureFlags(BiPredicate<String, Predicate<FeatureFlags>> getValueImpl) {
+            mGetValueImpl = getValueImpl;
         }
+
         @Override
         @UnsupportedAppUsage
         public boolean disabledRo() {
-            return getValue(Flags.FLAG_DISABLED_RO);
+            return getValue(Flags.FLAG_DISABLED_RO,
+                    FeatureFlags::disabledRo);
         }
         @Override
         @UnsupportedAppUsage
         public boolean disabledRw() {
-            return getValue(Flags.FLAG_DISABLED_RW);
+            return getValue(Flags.FLAG_DISABLED_RW,
+                FeatureFlags::disabledRw);
         }
         @Override
         @UnsupportedAppUsage
         public boolean disabledRwExported() {
-            return getValue(Flags.FLAG_DISABLED_RW_EXPORTED);
+            return getValue(Flags.FLAG_DISABLED_RW_EXPORTED,
+                FeatureFlags::disabledRwExported);
         }
         @Override
         @UnsupportedAppUsage
         public boolean disabledRwInOtherNamespace() {
-            return getValue(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE);
+            return getValue(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE,
+                FeatureFlags::disabledRwInOtherNamespace);
         }
         @Override
         @UnsupportedAppUsage
         public boolean enabledFixedRo() {
-            return getValue(Flags.FLAG_ENABLED_FIXED_RO);
+            return getValue(Flags.FLAG_ENABLED_FIXED_RO,
+                FeatureFlags::enabledFixedRo);
         }
         @Override
         @UnsupportedAppUsage
         public boolean enabledFixedRoExported() {
-            return getValue(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED);
+            return getValue(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED,
+                FeatureFlags::enabledFixedRoExported);
         }
         @Override
         @UnsupportedAppUsage
         public boolean enabledRo() {
-            return getValue(Flags.FLAG_ENABLED_RO);
+            return getValue(Flags.FLAG_ENABLED_RO,
+                FeatureFlags::enabledRo);
         }
         @Override
         @UnsupportedAppUsage
         public boolean enabledRoExported() {
-            return getValue(Flags.FLAG_ENABLED_RO_EXPORTED);
+            return getValue(Flags.FLAG_ENABLED_RO_EXPORTED,
+                FeatureFlags::enabledRoExported);
         }
         @Override
         @UnsupportedAppUsage
         public boolean enabledRw() {
-            return getValue(Flags.FLAG_ENABLED_RW);
+            return getValue(Flags.FLAG_ENABLED_RW,
+                FeatureFlags::enabledRw);
         }
-        public void setFlag(String flagName, boolean value) {
-            if (!this.mFlagMap.containsKey(flagName)) {
-                throw new IllegalArgumentException("no such flag " + flagName);
-            }
-            this.mFlagMap.put(flagName, value);
-        }
-        public void resetAll() {
-            for (Map.Entry entry : mFlagMap.entrySet()) {
-                entry.setValue(null);
-            }
-        }
+
         public boolean isFlagReadOnlyOptimized(String flagName) {
             if (mReadOnlyFlagsSet.contains(flagName) &&
                 isOptimizationEnabled()) {
@@ -369,30 +382,30 @@
             }
             return false;
         }
+
         @com.android.aconfig.annotations.AssumeTrueForR8
         private boolean isOptimizationEnabled() {
             return false;
         }
-        private boolean getValue(String flagName) {
-            Boolean value = this.mFlagMap.get(flagName);
-            if (value == null) {
-                throw new IllegalArgumentException(flagName + " is not set");
-            }
-            return value;
+
+        protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) {
+            return mGetValueImpl.test(flagName, getter);
         }
-        private Map<String, Boolean> mFlagMap = new HashMap<>(
-            Map.ofEntries(
-                Map.entry(Flags.FLAG_DISABLED_RO, false),
-                Map.entry(Flags.FLAG_DISABLED_RW, false),
-                Map.entry(Flags.FLAG_DISABLED_RW_EXPORTED, false),
-                Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
-                Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
-                Map.entry(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, false),
-                Map.entry(Flags.FLAG_ENABLED_RO, false),
-                Map.entry(Flags.FLAG_ENABLED_RO_EXPORTED, false),
-                Map.entry(Flags.FLAG_ENABLED_RW, false)
-            )
-        );
+
+        public List<String> getFlagNames() {
+            return Arrays.asList(
+                Flags.FLAG_DISABLED_RO,
+                Flags.FLAG_DISABLED_RW,
+                Flags.FLAG_DISABLED_RW_EXPORTED,
+                Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE,
+                Flags.FLAG_ENABLED_FIXED_RO,
+                Flags.FLAG_ENABLED_FIXED_RO_EXPORTED,
+                Flags.FLAG_ENABLED_RO,
+                Flags.FLAG_ENABLED_RO_EXPORTED,
+                Flags.FLAG_ENABLED_RW
+            );
+        }
+
         private Set<String> mReadOnlyFlagsSet = new HashSet<>(
             Arrays.asList(
                 Flags.FLAG_DISABLED_RO,
@@ -406,6 +419,58 @@
     }
     "#;
 
+    const EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT: &str = r#"
+    package com.android.aconfig.test;
+
+    import java.util.HashMap;
+    import java.util.Map;
+    import java.util.function.Predicate;
+
+    /** @hide */
+    public class FakeFeatureFlagsImpl extends CustomFeatureFlags {
+        private final Map<String, Boolean> mFlagMap = new HashMap<>();
+        private final FeatureFlags mDefaults;
+
+        public FakeFeatureFlagsImpl() {
+            this(null);
+        }
+
+        public FakeFeatureFlagsImpl(FeatureFlags defaults) {
+            super(null);
+            mDefaults = defaults;
+            // Initialize the map with null values
+            for (String flagName : getFlagNames()) {
+                mFlagMap.put(flagName, null);
+            }
+        }
+
+        @Override
+        protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) {
+            Boolean value = this.mFlagMap.get(flagName);
+            if (value != null) {
+                return value;
+            }
+            if (mDefaults != null) {
+                return getter.test(mDefaults);
+            }
+            throw new IllegalArgumentException(flagName + " is not set");
+        }
+
+        public void setFlag(String flagName, boolean value) {
+            if (!this.mFlagMap.containsKey(flagName)) {
+                throw new IllegalArgumentException("no such flag " + flagName);
+            }
+            this.mFlagMap.put(flagName, value);
+        }
+
+        public void resetAll() {
+            for (Map.Entry entry : mFlagMap.entrySet()) {
+                entry.setValue(null);
+            }
+        }
+    }
+    "#;
+
     #[test]
     fn test_generate_java_code_production() {
         let parsed_flags = crate::test::parse_test_flags();
@@ -549,6 +614,10 @@
             ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
             ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
             (
+                "com/android/aconfig/test/CustomFeatureFlags.java",
+                EXPECTED_CUSTOMFEATUREFLAGS_CONTENT,
+            ),
+            (
                 "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
                 EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
             ),
@@ -671,55 +740,53 @@
             }
         }"#;
 
-        let expect_fake_feature_flags_impl_content = r#"
+        let expect_custom_feature_flags_content = r#"
         package com.android.aconfig.test;
+
         import java.util.Arrays;
-        import java.util.HashMap;
         import java.util.HashSet;
-        import java.util.Map;
+        import java.util.List;
         import java.util.Set;
+        import java.util.function.BiPredicate;
+        import java.util.function.Predicate;
+
         /** @hide */
-        public class FakeFeatureFlagsImpl implements FeatureFlags {
-            public FakeFeatureFlagsImpl() {
-                resetAll();
+        public class CustomFeatureFlags implements FeatureFlags {
+
+            private BiPredicate<String, Predicate<FeatureFlags>> mGetValueImpl;
+
+            public CustomFeatureFlags(BiPredicate<String, Predicate<FeatureFlags>> getValueImpl) {
+                mGetValueImpl = getValueImpl;
             }
+
             @Override
             public boolean disabledRwExported() {
-                return getValue(Flags.FLAG_DISABLED_RW_EXPORTED);
+                return getValue(Flags.FLAG_DISABLED_RW_EXPORTED,
+                    FeatureFlags::disabledRwExported);
             }
             @Override
             public boolean enabledFixedRoExported() {
-                return getValue(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED);
+                return getValue(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED,
+                    FeatureFlags::enabledFixedRoExported);
             }
             @Override
             public boolean enabledRoExported() {
-                return getValue(Flags.FLAG_ENABLED_RO_EXPORTED);
+                return getValue(Flags.FLAG_ENABLED_RO_EXPORTED,
+                    FeatureFlags::enabledRoExported);
             }
-            public void setFlag(String flagName, boolean value) {
-                if (!this.mFlagMap.containsKey(flagName)) {
-                    throw new IllegalArgumentException("no such flag " + flagName);
-                }
-                this.mFlagMap.put(flagName, value);
+
+            protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) {
+                return mGetValueImpl.test(flagName, getter);
             }
-            public void resetAll() {
-                for (Map.Entry entry : mFlagMap.entrySet()) {
-                    entry.setValue(null);
-                }
+
+            public List<String> getFlagNames() {
+                return Arrays.asList(
+                    Flags.FLAG_DISABLED_RW_EXPORTED,
+                    Flags.FLAG_ENABLED_FIXED_RO_EXPORTED,
+                    Flags.FLAG_ENABLED_RO_EXPORTED
+                );
             }
-            private boolean getValue(String flagName) {
-                Boolean value = this.mFlagMap.get(flagName);
-                if (value == null) {
-                    throw new IllegalArgumentException(flagName + " is not set");
-                }
-                return value;
-            }
-            private Map<String, Boolean> mFlagMap = new HashMap<>(
-                Map.ofEntries(
-                    Map.entry(Flags.FLAG_DISABLED_RW_EXPORTED, false),
-                    Map.entry(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, false),
-                    Map.entry(Flags.FLAG_ENABLED_RO_EXPORTED, false)
-                )
-            );
+
             private Set<String> mReadOnlyFlagsSet = new HashSet<>(
                 Arrays.asList(
                     ""
@@ -733,8 +800,12 @@
             ("com/android/aconfig/test/FeatureFlags.java", expect_feature_flags_content),
             ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_feature_flags_impl_content),
             (
+                "com/android/aconfig/test/CustomFeatureFlags.java",
+                expect_custom_feature_flags_content,
+            ),
+            (
                 "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
-                expect_fake_feature_flags_impl_content,
+                EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
             ),
         ]);
 
@@ -854,6 +925,10 @@
             ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
             ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
             (
+                "com/android/aconfig/test/CustomFeatureFlags.java",
+                EXPECTED_CUSTOMFEATUREFLAGS_CONTENT,
+            ),
+            (
                 "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
                 EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
             ),
@@ -1020,61 +1095,64 @@
             private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
         }"#;
 
-        let expect_fakefeatureflags_content = r#"
+        let expect_customfeatureflags_content = r#"
         package com.android.aconfig.test;
+
         // TODO(b/303773055): Remove the annotation after access issue is resolved.
         import android.compat.annotation.UnsupportedAppUsage;
         import java.util.Arrays;
-        import java.util.HashMap;
         import java.util.HashSet;
-        import java.util.Map;
+        import java.util.List;
         import java.util.Set;
+        import java.util.function.BiPredicate;
+        import java.util.function.Predicate;
+
         /** @hide */
-        public class FakeFeatureFlagsImpl implements FeatureFlags {
-            public FakeFeatureFlagsImpl() {
-                resetAll();
+        public class CustomFeatureFlags implements FeatureFlags {
+
+            private BiPredicate<String, Predicate<FeatureFlags>> mGetValueImpl;
+
+            public CustomFeatureFlags(BiPredicate<String, Predicate<FeatureFlags>> getValueImpl) {
+                mGetValueImpl = getValueImpl;
             }
+
             @Override
             @UnsupportedAppUsage
             public boolean disabledRo() {
-                return getValue(Flags.FLAG_DISABLED_RO);
+                return getValue(Flags.FLAG_DISABLED_RO,
+                        FeatureFlags::disabledRo);
             }
             @Override
             @UnsupportedAppUsage
             public boolean disabledRw() {
-                return getValue(Flags.FLAG_DISABLED_RW);
+                return getValue(Flags.FLAG_DISABLED_RW,
+                    FeatureFlags::disabledRw);
             }
             @Override
             @UnsupportedAppUsage
             public boolean disabledRwInOtherNamespace() {
-                return getValue(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE);
+                return getValue(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE,
+                    FeatureFlags::disabledRwInOtherNamespace);
             }
             @Override
             @UnsupportedAppUsage
             public boolean enabledFixedRo() {
-                return getValue(Flags.FLAG_ENABLED_FIXED_RO);
+                return getValue(Flags.FLAG_ENABLED_FIXED_RO,
+                    FeatureFlags::enabledFixedRo);
             }
             @Override
             @UnsupportedAppUsage
             public boolean enabledRo() {
-                return getValue(Flags.FLAG_ENABLED_RO);
+                return getValue(Flags.FLAG_ENABLED_RO,
+                    FeatureFlags::enabledRo);
             }
             @Override
             @UnsupportedAppUsage
             public boolean enabledRw() {
-                return getValue(Flags.FLAG_ENABLED_RW);
+                return getValue(Flags.FLAG_ENABLED_RW,
+                    FeatureFlags::enabledRw);
             }
-            public void setFlag(String flagName, boolean value) {
-                if (!this.mFlagMap.containsKey(flagName)) {
-                    throw new IllegalArgumentException("no such flag " + flagName);
-                }
-                this.mFlagMap.put(flagName, value);
-            }
-            public void resetAll() {
-                for (Map.Entry entry : mFlagMap.entrySet()) {
-                    entry.setValue(null);
-                }
-            }
+
             public boolean isFlagReadOnlyOptimized(String flagName) {
                 if (mReadOnlyFlagsSet.contains(flagName) &&
                     isOptimizationEnabled()) {
@@ -1082,27 +1160,27 @@
                 }
                 return false;
             }
+
             @com.android.aconfig.annotations.AssumeTrueForR8
             private boolean isOptimizationEnabled() {
                 return false;
             }
-            private boolean getValue(String flagName) {
-                Boolean value = this.mFlagMap.get(flagName);
-                if (value == null) {
-                    throw new IllegalArgumentException(flagName + " is not set");
-                }
-                return value;
+
+            protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) {
+                return mGetValueImpl.test(flagName, getter);
             }
-            private Map<String, Boolean> mFlagMap = new HashMap<>(
-                Map.ofEntries(
-                    Map.entry(Flags.FLAG_DISABLED_RO, false),
-                    Map.entry(Flags.FLAG_DISABLED_RW, false),
-                    Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
-                    Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
-                    Map.entry(Flags.FLAG_ENABLED_RO, false),
-                    Map.entry(Flags.FLAG_ENABLED_RW, false)
-                )
-            );
+
+            public List<String> getFlagNames() {
+                return Arrays.asList(
+                    Flags.FLAG_DISABLED_RO,
+                    Flags.FLAG_DISABLED_RW,
+                    Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE,
+                    Flags.FLAG_ENABLED_FIXED_RO,
+                    Flags.FLAG_ENABLED_RO,
+                    Flags.FLAG_ENABLED_RW
+                );
+            }
+
             private Set<String> mReadOnlyFlagsSet = new HashSet<>(
                 Arrays.asList(
                     Flags.FLAG_DISABLED_RO,
@@ -1116,11 +1194,16 @@
             );
         }
         "#;
+
         let mut file_set = HashMap::from([
             ("com/android/aconfig/test/Flags.java", expect_flags_content),
             ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
             ("com/android/aconfig/test/FeatureFlags.java", expect_featureflags_content),
-            ("com/android/aconfig/test/FakeFeatureFlagsImpl.java", expect_fakefeatureflags_content),
+            ("com/android/aconfig/test/CustomFeatureFlags.java", expect_customfeatureflags_content),
+            (
+                "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
+                EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
+            ),
         ]);
 
         for file in generated_files {
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index 7736ce7..6945fd4 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -202,7 +202,11 @@
     generate_java_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
 }
 
-pub fn create_cpp_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<Vec<OutputFile>> {
+pub fn create_cpp_lib(
+    mut input: Input,
+    codegen_mode: CodegenMode,
+    allow_instrumentation: bool,
+) -> Result<Vec<OutputFile>> {
     // TODO(327420679): Enable export mode for native flag library
     ensure!(
         codegen_mode != CodegenMode::Exported,
@@ -214,8 +218,14 @@
         bail!("no parsed flags, or the parsed flags use different packages");
     };
     let package = package.to_string();
-    let _flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
-    generate_cpp_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
+    let flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
+    generate_cpp_code(
+        &package,
+        modified_parsed_flags.into_iter(),
+        codegen_mode,
+        flag_ids,
+        allow_instrumentation,
+    )
 }
 
 pub fn create_rust_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<OutputFile> {
@@ -239,13 +249,8 @@
     container: &str,
     file: &StorageFileType,
 ) -> Result<Vec<u8>> {
-    let parsed_flags_vec: Vec<ProtoParsedFlags> = caches
-        .into_iter()
-        .map(|mut input| input.try_parse_flags())
-        .collect::<Result<Vec<_>>>()?
-        .into_iter()
-        .filter(|pfs| find_unique_container(pfs) == Some(container))
-        .collect();
+    let parsed_flags_vec: Vec<ProtoParsedFlags> =
+        caches.into_iter().map(|mut input| input.try_parse_flags()).collect::<Result<Vec<_>>>()?;
     generate_storage_file(container, parsed_flags_vec.iter(), file)
 }
 
@@ -324,14 +329,6 @@
     Some(package)
 }
 
-fn find_unique_container(parsed_flags: &ProtoParsedFlags) -> Option<&str> {
-    let container = parsed_flags.parsed_flag.first().map(|pf| pf.container())?;
-    if parsed_flags.parsed_flag.iter().any(|pf| pf.container() != container) {
-        return None;
-    }
-    Some(container)
-}
-
 pub fn modify_parsed_flags_based_on_mode(
     parsed_flags: ProtoParsedFlags,
     codegen_mode: CodegenMode,
diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs
index 69f5458..72be1c9 100644
--- a/tools/aconfig/aconfig/src/main.rs
+++ b/tools/aconfig/aconfig/src/main.rs
@@ -83,6 +83,12 @@
                         .long("mode")
                         .value_parser(EnumValueParser::<CodegenMode>::new())
                         .default_value("production"),
+                )
+                .arg(
+                    Arg::new("allow-instrumentation")
+                        .long("allow-instrumentation")
+                        .value_parser(clap::value_parser!(bool))
+                        .default_value("false"),
                 ),
         )
         .subcommand(
@@ -241,8 +247,10 @@
         Some(("create-cpp-lib", sub_matches)) => {
             let cache = open_single_file(sub_matches, "cache")?;
             let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?;
-            let generated_files =
-                commands::create_cpp_lib(cache, *mode).context("failed to create cpp lib")?;
+            let allow_instrumentation =
+                get_required_arg::<bool>(sub_matches, "allow-instrumentation")?;
+            let generated_files = commands::create_cpp_lib(cache, *mode, *allow_instrumentation)
+                .context("failed to create cpp lib")?;
             let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
             generated_files
                 .iter()
diff --git a/tools/aconfig/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs
index b339821..a971211 100644
--- a/tools/aconfig/aconfig/src/storage/flag_table.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_table.rs
@@ -48,7 +48,7 @@
         package_id: u32,
         flag_name: &str,
         flag_type: StoredFlagType,
-        flag_id: u16,
+        flag_index: u16,
         num_buckets: u32,
     ) -> Self {
         let bucket_index = FlagTableNode::find_bucket_index(package_id, flag_name, num_buckets);
@@ -56,7 +56,7 @@
             package_id,
             flag_name: flag_name.to_string(),
             flag_type,
-            flag_id,
+            flag_index,
             next_offset: None,
         };
         Self { node, bucket_index }
diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs
index a37ad9f..c15ba54 100644
--- a/tools/aconfig/aconfig/src/storage/flag_value.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_value.rs
@@ -41,14 +41,14 @@
     };
 
     for pkg in packages.iter() {
-        let start_offset = pkg.boolean_offset as usize;
+        let start_index = pkg.boolean_start_index as usize;
         let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
         for pf in pkg.boolean_flags.iter() {
             let fid = flag_ids
                 .get(pf.name())
                 .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
 
-            list.booleans[start_offset + (*fid as usize)] = pf.state() == ProtoFlagState::ENABLED;
+            list.booleans[start_index + (*fid as usize)] = pf.state() == ProtoFlagState::ENABLED;
         }
     }
 
diff --git a/tools/aconfig/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs
index 30517de..73339f2 100644
--- a/tools/aconfig/aconfig/src/storage/mod.rs
+++ b/tools/aconfig/aconfig/src/storage/mod.rs
@@ -33,9 +33,9 @@
     pub package_id: u32,
     pub flag_names: HashSet<&'a str>,
     pub boolean_flags: Vec<&'a ProtoParsedFlag>,
-    // offset of the first boolean flag in this flag package with respect to the start of
-    // boolean flag value array in the flag value file
-    pub boolean_offset: u32,
+    // The index of the first boolean flag in this aconfig package among all boolean
+    // flags in this container.
+    pub boolean_start_index: u32,
 }
 
 impl<'a> FlagPackage<'a> {
@@ -45,7 +45,7 @@
             package_id,
             flag_names: HashSet::new(),
             boolean_flags: vec![],
-            boolean_offset: 0,
+            boolean_start_index: 0,
         }
     }
 
@@ -73,12 +73,11 @@
         }
     }
 
-    // calculate package flag value start offset, in flag value file, each boolean
-    // is stored as a single byte
-    let mut boolean_offset = 0;
+    // cacluate boolean flag start index for each package
+    let mut boolean_start_index = 0;
     for p in packages.iter_mut() {
-        p.boolean_offset = boolean_offset;
-        boolean_offset += p.boolean_flags.len() as u32;
+        p.boolean_start_index = boolean_start_index;
+        boolean_start_index += p.boolean_flags.len() as u32;
     }
 
     packages
@@ -184,21 +183,21 @@
         assert!(packages[0].flag_names.contains("enabled_rw"));
         assert!(packages[0].flag_names.contains("disabled_rw"));
         assert!(packages[0].flag_names.contains("enabled_ro"));
-        assert_eq!(packages[0].boolean_offset, 0);
+        assert_eq!(packages[0].boolean_start_index, 0);
 
         assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2");
         assert_eq!(packages[1].package_id, 1);
         assert_eq!(packages[1].flag_names.len(), 3);
         assert!(packages[1].flag_names.contains("enabled_ro"));
-        assert!(packages[1].flag_names.contains("disabled_ro"));
+        assert!(packages[1].flag_names.contains("disabled_rw"));
         assert!(packages[1].flag_names.contains("enabled_fixed_ro"));
-        assert_eq!(packages[1].boolean_offset, 3);
+        assert_eq!(packages[1].boolean_start_index, 3);
 
         assert_eq!(packages[2].package_name, "com.android.aconfig.storage.test_4");
         assert_eq!(packages[2].package_id, 2);
         assert_eq!(packages[2].flag_names.len(), 2);
-        assert!(packages[2].flag_names.contains("enabled_ro"));
+        assert!(packages[2].flag_names.contains("enabled_rw"));
         assert!(packages[2].flag_names.contains("enabled_fixed_ro"));
-        assert_eq!(packages[2].boolean_offset, 6);
+        assert_eq!(packages[2].boolean_start_index, 6);
     }
 }
diff --git a/tools/aconfig/aconfig/src/storage/package_table.rs b/tools/aconfig/aconfig/src/storage/package_table.rs
index 0a3df77..c53602f 100644
--- a/tools/aconfig/aconfig/src/storage/package_table.rs
+++ b/tools/aconfig/aconfig/src/storage/package_table.rs
@@ -48,7 +48,7 @@
         let node = PackageTableNode {
             package_name: String::from(package.package_name),
             package_id: package.package_id,
-            boolean_offset: package.boolean_offset,
+            boolean_start_index: package.boolean_start_index,
             next_offset: None,
         };
         let bucket_index = PackageTableNode::find_bucket_index(package.package_name, num_buckets);
diff --git a/tools/aconfig/aconfig/templates/CustomFeatureFlags.java.template b/tools/aconfig/aconfig/templates/CustomFeatureFlags.java.template
new file mode 100644
index 0000000..b82b9cb
--- /dev/null
+++ b/tools/aconfig/aconfig/templates/CustomFeatureFlags.java.template
@@ -0,0 +1,70 @@
+package {package_name};
+
+{{ if not library_exported- }}
+// TODO(b/303773055): Remove the annotation after access issue is resolved.
+import android.compat.annotation.UnsupportedAppUsage;
+{{ -endif }}
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.function.BiPredicate;
+import java.util.function.Predicate;
+
+/** @hide */
+public class CustomFeatureFlags implements FeatureFlags \{
+
+    private BiPredicate<String, Predicate<FeatureFlags>> mGetValueImpl;
+
+    public CustomFeatureFlags(BiPredicate<String, Predicate<FeatureFlags>> getValueImpl) \{
+        mGetValueImpl = getValueImpl;
+    }
+
+{{ -for item in flag_elements}}
+    @Override
+{{ if not library_exported }}    @UnsupportedAppUsage{{ -endif }}
+    public boolean {item.method_name}() \{
+        return getValue(Flags.FLAG_{item.flag_name_constant_suffix},
+            FeatureFlags::{item.method_name});
+    }
+{{ endfor }}
+
+{{ -if not library_exported }}
+    public boolean isFlagReadOnlyOptimized(String flagName) \{
+        if (mReadOnlyFlagsSet.contains(flagName) &&
+            isOptimizationEnabled()) \{
+                return true;
+        }
+        return false;
+    }
+
+    @com.android.aconfig.annotations.AssumeTrueForR8
+    private boolean isOptimizationEnabled() \{
+        return false;
+    }
+{{ -endif }}
+
+    protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) \{
+        return mGetValueImpl.test(flagName, getter);
+    }
+
+    public List<String> getFlagNames() \{
+        return Arrays.asList(
+            {{ -for item in flag_elements }}
+            Flags.FLAG_{item.flag_name_constant_suffix}
+            {{ -if not @last }},{{ endif }}
+            {{ -endfor }}
+        );
+    }
+
+    private Set<String> mReadOnlyFlagsSet = new HashSet<>(
+        Arrays.asList(
+            {{ -for item in flag_elements }}
+            {{ -if not item.is_read_write }}
+            Flags.FLAG_{item.flag_name_constant_suffix},
+            {{ -endif }}
+            {{ -endfor }}
+            ""{# The empty string here is to resolve the ending comma #}
+        )
+    );
+}
diff --git a/tools/aconfig/aconfig/templates/FakeFeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FakeFeatureFlagsImpl.java.template
index 177e711..290d2c4 100644
--- a/tools/aconfig/aconfig/templates/FakeFeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FakeFeatureFlagsImpl.java.template
@@ -1,27 +1,39 @@
 package {package_name};
-{{ if not library_exported- }}
-// TODO(b/303773055): Remove the annotation after access issue is resolved.
-import android.compat.annotation.UnsupportedAppUsage;
-{{ -endif }}
-import java.util.Arrays;
+
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
-import java.util.Set;
+import java.util.function.Predicate;
 
 /** @hide */
-public class FakeFeatureFlagsImpl implements FeatureFlags \{
+public class FakeFeatureFlagsImpl extends CustomFeatureFlags \{
+    private final Map<String, Boolean> mFlagMap = new HashMap<>();
+    private final FeatureFlags mDefaults;
+
     public FakeFeatureFlagsImpl() \{
-        resetAll();
+        this(null);
     }
 
-{{ for item in flag_elements}}
-    @Override
-{{ if not library_exported }}    @UnsupportedAppUsage{{ -endif }}
-    public boolean {item.method_name}() \{
-        return getValue(Flags.FLAG_{item.flag_name_constant_suffix});
+    public FakeFeatureFlagsImpl(FeatureFlags defaults) \{
+        super(null);
+        mDefaults = defaults;
+        // Initialize the map with null values
+        for (String flagName : getFlagNames()) \{
+            mFlagMap.put(flagName, null);
+        }
     }
-{{ endfor}}
+
+    @Override
+    protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) \{
+        Boolean value = this.mFlagMap.get(flagName);
+        if (value != null) \{
+            return value;
+        }
+        if (mDefaults != null) \{
+            return getter.test(mDefaults);
+        }
+        throw new IllegalArgumentException(flagName + " is not set");
+    }
+
     public void setFlag(String flagName, boolean value) \{
         if (!this.mFlagMap.containsKey(flagName)) \{
             throw new IllegalArgumentException("no such flag " + flagName);
@@ -34,46 +46,4 @@
             entry.setValue(null);
         }
     }
-{{ if not library_exported }}
-    public boolean isFlagReadOnlyOptimized(String flagName) \{
-        if (mReadOnlyFlagsSet.contains(flagName) &&
-            isOptimizationEnabled()) \{
-                return true;
-        }
-        return false;
-    }
-
-    @com.android.aconfig.annotations.AssumeTrueForR8
-    private boolean isOptimizationEnabled() \{
-        return false;
-    }
-{{ -endif }}
-    private boolean getValue(String flagName) \{
-        Boolean value = this.mFlagMap.get(flagName);
-        if (value == null) \{
-            throw new IllegalArgumentException(flagName + " is not set");
-        }
-        return value;
-    }
-
-
-    private Map<String, Boolean> mFlagMap = new HashMap<>(
-        Map.ofEntries(
-            {{ -for item in flag_elements }}
-            Map.entry(Flags.FLAG_{item.flag_name_constant_suffix}, false)
-            {{ -if not @last }},{{ endif }}
-            {{ -endfor }}
-        )
-    );
-
-    private Set<String> mReadOnlyFlagsSet = new HashSet<>(
-        Arrays.asList(
-            {{ -for item in flag_elements }}
-            {{ -if not item.is_read_write }}
-            Flags.FLAG_{item.flag_name_constant_suffix},
-            {{ -endif }}
-            {{ -endfor }}
-            ""{# The empty string here is to resolve the ending comma #}
-        )
-    );
 }
diff --git a/tools/aconfig/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template
index 4bcd1b7..38dda7d 100644
--- a/tools/aconfig/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/aconfig/templates/cpp_source_file.template
@@ -1,5 +1,15 @@
 #include "{header}.h"
 
+{{ if allow_instrumentation }}
+#include <sys/stat.h>
+#include "aconfig_storage/aconfig_storage_read_api.hpp"
+#include <android/log.h>
+
+#define ALOGI(msg, ...)                                                        \
+  __android_log_print(ANDROID_LOG_INFO, "AconfigTestMission1", (msg), __VA_ARGS__)
+
+{{ endif }}
+
 {{ if readwrite- }}
 #include <server_configurable_flags/get_flags.h>
 {{ endif }}
@@ -97,6 +107,62 @@
     {{ -if item.readwrite }}
     return {cpp_namespace}::{item.flag_name}();
     {{ -else }}
+    {{ if allow_instrumentation }}
+    auto result =
+        {{ if item.is_fixed_read_only }}
+	    {package_macro}_{item.flag_macro}
+	{{ else }}
+	    {item.default_value}
+	{{ endif }};
+
+    struct stat buffer;
+    if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) != 0) \{
+        return result;
+    }
+
+    auto package_map_file = aconfig_storage::get_mapped_file(
+        "{item.container}",
+        aconfig_storage::StorageFileType::package_map);
+    if (!package_map_file.ok()) \{
+        ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str());
+        return result;
+    }
+
+    auto package_read_context = aconfig_storage::get_package_read_context(
+        **package_map_file, "{package}");
+    if (!package_read_context.ok()) \{
+        ALOGI("error: failed to get package read context: %s", package_map_file.error().c_str());
+        return result;
+    }
+
+    delete *package_map_file;
+
+    auto flag_val_map = aconfig_storage::get_mapped_file(
+        "{item.container}",
+        aconfig_storage::StorageFileType::flag_val);
+    if (!flag_val_map.ok()) \{
+        ALOGI("error: failed to get flag val map: %s", package_map_file.error().c_str());
+        return result;
+    }
+
+    auto value = aconfig_storage::get_boolean_flag_value(
+        **flag_val_map,
+        package_read_context->boolean_start_index + {item.flag_offset});
+    if (!value.ok()) \{
+        ALOGI("error: failed to get flag val: %s", package_map_file.error().c_str());
+        return result;
+    }
+
+    delete *flag_val_map;
+
+    if (*value != result) \{
+        ALOGI("error: new storage value '%d' does not match current value '%d'", *value, result);
+    } else \{
+        ALOGI("success: new storage value was '%d, legacy storage was '%d'", *value, result);
+    }
+
+    return result;
+    {{ else }}
     {{ -if item.is_fixed_read_only }}
     return {package_macro}_{item.flag_macro};
     {{ -else }}
@@ -104,6 +170,7 @@
     {{ -endif }}
     {{ -endif }}
     {{ -endif }}
+    {{ -endif }}
 }
 
 {{ -if is_test_mode }}
@@ -119,3 +186,4 @@
 }
 {{ -endif }}
 
+
diff --git a/tools/aconfig/aconfig/tests/storage_test_2.aconfig b/tools/aconfig/aconfig/tests/storage_test_2.aconfig
index bb14fd1..db77f7a 100644
--- a/tools/aconfig/aconfig/tests/storage_test_2.aconfig
+++ b/tools/aconfig/aconfig/tests/storage_test_2.aconfig
@@ -9,7 +9,7 @@
 }
 
 flag {
-    name: "disabled_ro"
+    name: "disabled_rw"
     namespace: "aconfig_test"
     description: "This flag is DISABLED + READ_ONLY"
     bug: "123"
diff --git a/tools/aconfig/aconfig/tests/storage_test_2.values b/tools/aconfig/aconfig/tests/storage_test_2.values
index a7bb0b1..b650721 100644
--- a/tools/aconfig/aconfig/tests/storage_test_2.values
+++ b/tools/aconfig/aconfig/tests/storage_test_2.values
@@ -6,9 +6,9 @@
 }
 flag_value {
     package: "com.android.aconfig.storage.test_2"
-    name: "disabled_ro"
+    name: "disabled_rw"
     state: DISABLED
-    permission: READ_ONLY
+    permission: READ_WRITE
 }
 flag_value {
     package: "com.android.aconfig.storage.test_2"
diff --git a/tools/aconfig/aconfig/tests/storage_test_4.aconfig b/tools/aconfig/aconfig/tests/storage_test_4.aconfig
index 333fe09..5802a73 100644
--- a/tools/aconfig/aconfig/tests/storage_test_4.aconfig
+++ b/tools/aconfig/aconfig/tests/storage_test_4.aconfig
@@ -2,7 +2,7 @@
 container: "system"
 
 flag {
-    name: "enabled_ro"
+    name: "enabled_rw"
     namespace: "aconfig_test"
     description: "This flag is ENABLED + READ_ONLY"
     bug: "abc"
diff --git a/tools/aconfig/aconfig/tests/storage_test_4.values b/tools/aconfig/aconfig/tests/storage_test_4.values
index fa21317..784b744 100644
--- a/tools/aconfig/aconfig/tests/storage_test_4.values
+++ b/tools/aconfig/aconfig/tests/storage_test_4.values
@@ -1,8 +1,8 @@
 flag_value {
     package: "com.android.aconfig.storage.test_4"
-    name: "enabled_ro"
+    name: "enabled_rw"
     state: ENABLED
-    permission: READ_ONLY
+    permission: READ_WRITE
 }
 flag_value {
     package: "com.android.aconfig.storage.test_4"
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
new file mode 100644
index 0000000..2c771e0
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -0,0 +1,51 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+rust_defaults {
+    name: "libaconfig_device_paths.defaults",
+    edition: "2021",
+    clippy_lints: "android",
+    lints: "android",
+    srcs: ["src/lib.rs"],
+    rustlibs: [
+        "libaconfig_protos",
+        "libanyhow",
+        "libprotobuf",
+        "libregex",
+    ],
+}
+
+rust_library {
+    name: "libaconfig_device_paths",
+    crate_name: "aconfig_device_paths",
+    host_supported: true,
+    defaults: ["libaconfig_device_paths.defaults"],
+}
+
+genrule {
+    name: "libaconfig_java_device_paths_src",
+    srcs: ["src/DevicePathsTemplate.java"],
+    out: ["DevicePaths.java"],
+    tool_files: ["partition_aconfig_flags_paths.txt"],
+    cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)"
+}
+
+java_library {
+    name: "aconfig_device_paths_java",
+    srcs: [":libaconfig_java_device_paths_src"],
+}
diff --git a/tools/aconfig/aconfig_device_paths/Cargo.toml b/tools/aconfig/aconfig_device_paths/Cargo.toml
new file mode 100644
index 0000000..dbe9b3a
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "aconfig_device_paths"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.82"
diff --git a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
new file mode 100644
index 0000000..140cd21
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
@@ -0,0 +1,4 @@
+"/system/etc/aconfig_flags.pb",
+"/system_ext/etc/aconfig_flags.pb",
+"/product/etc/aconfig_flags.pb",
+"/vendor/etc/aconfig_flags.pb",
diff --git a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java b/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
new file mode 100644
index 0000000..f27b9bd
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.aconfig;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * @hide
+ */
+public class DevicePaths {
+    static final String[] PATHS = {
+        TEMPLATE
+    };
+
+    private static final String APEX_DIR = "/apex";
+    private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb";
+
+
+    /**
+     * Returns the list of all on-device aconfig protos paths.
+     * @hide
+     */
+    public List<String> parsedFlagsProtoPaths() {
+        ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
+
+        File apexDirectory = new File(APEX_DIR);
+        if (!apexDirectory.isDirectory()) {
+            return paths;
+        }
+
+        File[] subdirs = apexDirectory.listFiles();
+        if (subdirs == null) {
+            return paths;
+        }
+
+        for (File prefix : subdirs) {
+            // For each mainline modules, there are two directories, one <modulepackage>/,
+            // and one <modulepackage>@<versioncode>/. Just read the former.
+            if (prefix.getAbsolutePath().contains("@")) {
+                continue;
+            }
+
+            File protoPath = new File(prefix + APEX_ACONFIG_PATH_SUFFIX);
+            if (!protoPath.exists()) {
+                continue;
+            }
+
+            paths.add(protoPath.getAbsolutePath());
+        }
+        return paths;
+    }
+}
diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs
new file mode 100644
index 0000000..9ab9cea
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/src/lib.rs
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! Library for finding all aconfig on-device protobuf file paths.
+
+use anyhow::Result;
+use std::path::PathBuf;
+
+use std::fs;
+
+fn read_partition_paths() -> Vec<PathBuf> {
+    include_str!("../partition_aconfig_flags_paths.txt")
+        .split(',')
+        .map(|s| s.trim().trim_matches('"'))
+        .filter(|s| !s.is_empty())
+        .map(|s| PathBuf::from(s.to_string()))
+        .collect()
+}
+
+/// Determines all paths that contain an aconfig protobuf file,
+/// filtering out nonexistent partition protobuf files.
+pub fn parsed_flags_proto_paths() -> Result<Vec<PathBuf>> {
+    let mut result: Vec<PathBuf> =
+        read_partition_paths().into_iter().filter(|s| s.exists()).collect();
+
+    for dir in fs::read_dir("/apex")? {
+        let dir = dir?;
+
+        // Only scan the currently active version of each mainline module; skip the @version dirs.
+        if dir.file_name().as_encoded_bytes().iter().any(|&b| b == b'@') {
+            continue;
+        }
+
+        let mut path = PathBuf::from("/apex");
+        path.push(dir.path());
+        path.push("etc");
+        path.push("aconfig_flags.pb");
+        if path.exists() {
+            result.push(path);
+        }
+    }
+
+    Ok(result)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_read_partition_paths() {
+        assert_eq!(read_partition_paths().len(), 4);
+
+        assert_eq!(
+            read_partition_paths(),
+            vec![
+                PathBuf::from("/system/etc/aconfig_flags.pb"),
+                PathBuf::from("/system_ext/etc/aconfig_flags.pb"),
+                PathBuf::from("/product/etc/aconfig_flags.pb"),
+                PathBuf::from("/vendor/etc/aconfig_flags.pb")
+            ]
+        );
+    }
+}
diff --git a/tools/aconfig/aconfig_protos/Android.bp b/tools/aconfig/aconfig_protos/Android.bp
index 18c545a..d241994 100644
--- a/tools/aconfig/aconfig_protos/Android.bp
+++ b/tools/aconfig/aconfig_protos/Android.bp
@@ -17,7 +17,22 @@
     apex_available: [
         "com.android.configinfrastructure",
         "//apex_available:platform",
-    ]
+    ],
+}
+
+java_library {
+    name: "libaconfig_java_proto_nano",
+    srcs: ["protos/aconfig.proto"],
+    static_libs: ["libprotobuf-java-nano"],
+    proto: {
+        type: "nano",
+    },
+    sdk_version: "current",
+    min_sdk_version: "UpsideDownCake",
+    apex_available: [
+        "//apex_available:platform",
+    ],
+    jarjar_rules: "jarjar-nano-rules.txt",
 }
 
 java_library_host {
@@ -58,7 +73,7 @@
     ],
     proc_macros: [
         "libpaste",
-    ]
+    ],
 }
 
 rust_library {
diff --git a/tools/aconfig/aconfig_protos/jarjar-nano-rules.txt b/tools/aconfig/aconfig_protos/jarjar-nano-rules.txt
new file mode 100644
index 0000000..b58fa64
--- /dev/null
+++ b/tools/aconfig/aconfig_protos/jarjar-nano-rules.txt
@@ -0,0 +1 @@
+rule com.google.protobuf.** android.internal.framework.protobuf.@1
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp
index b590312..e066e31 100644
--- a/tools/aconfig/aconfig_storage_file/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/Android.bp
@@ -12,6 +12,7 @@
         "libtempfile",
         "libprotobuf",
         "libclap",
+        "libcxx",
         "libaconfig_storage_protos",
     ],
 }
@@ -27,6 +28,8 @@
         "//apex_available:anyapex",
     ],
     min_sdk_version: "29",
+    vendor_available: true,
+    product_available: true,
 }
 
 rust_binary_host {
@@ -54,9 +57,11 @@
         "//apex_available:anyapex",
     ],
     min_sdk_version: "29",
+    vendor_available: true,
+    product_available: true,
 }
 
-cc_library_static {
+cc_library {
     name: "libaconfig_storage_protos_cc",
     proto: {
         export_proto_headers: true,
@@ -68,4 +73,67 @@
         "//apex_available:anyapex",
     ],
     host_supported: true,
+    min_sdk_version: "29",
+    vendor_available: true,
+    product_available: true,
+    double_loadable: true,
+}
+
+// cxx source codegen from rust api
+genrule {
+    name: "libcxx_aconfig_storage_file_bridge_code",
+    tools: ["cxxbridge"],
+    cmd: "$(location cxxbridge) $(in) > $(out)",
+    srcs: ["src/lib.rs"],
+    out: ["aconfig_storage/lib.rs.cc"],
+}
+
+// cxx header codegen from rust api
+genrule {
+    name: "libcxx_aconfig_storage_file_bridge_header",
+    tools: ["cxxbridge"],
+    cmd: "$(location cxxbridge) $(in) --header > $(out)",
+    srcs: ["src/lib.rs"],
+    out: ["aconfig_storage/lib.rs.h"],
+}
+
+// a static cc lib based on generated code
+rust_ffi_static {
+    name: "libaconfig_storage_file_cxx_bridge",
+    crate_name: "aconfig_storage_file_cxx_bridge",
+    host_supported: true,
+    vendor_available: true,
+    product_available: true,
+    srcs: ["src/lib.rs"],
+    defaults: ["aconfig_storage_file.defaults"],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    min_sdk_version: "29",
+}
+
+// storage file parse api cc interface
+cc_library {
+    name: "libaconfig_storage_file_cc",
+    srcs: ["aconfig_storage_file.cpp"],
+    generated_headers: [
+        "cxx-bridge-header",
+        "libcxx_aconfig_storage_file_bridge_header",
+    ],
+    generated_sources: ["libcxx_aconfig_storage_file_bridge_code"],
+    whole_static_libs: ["libaconfig_storage_file_cxx_bridge"],
+    export_include_dirs: ["include"],
+    host_supported: true,
+    vendor_available: true,
+    product_available: true,
+    shared_libs: [
+        "libbase",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    min_sdk_version: "29",
+    double_loadable: true,
 }
diff --git a/tools/aconfig/aconfig_storage_file/Cargo.toml b/tools/aconfig/aconfig_storage_file/Cargo.toml
index 641f481..192dfad 100644
--- a/tools/aconfig/aconfig_storage_file/Cargo.toml
+++ b/tools/aconfig/aconfig_storage_file/Cargo.toml
@@ -13,6 +13,7 @@
 tempfile = "3.9.0"
 thiserror = "1.0.56"
 clap = { version = "4.1.8", features = ["derive"] }
+cxx = "1.0"
 
 [[bin]]
 name = "aconfig-storage"
diff --git a/tools/aconfig/aconfig_storage_file/aconfig_storage_file.cpp b/tools/aconfig/aconfig_storage_file/aconfig_storage_file.cpp
new file mode 100644
index 0000000..7af024b
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/aconfig_storage_file.cpp
@@ -0,0 +1,61 @@
+#include "rust/cxx.h"
+#include "aconfig_storage/lib.rs.h"
+
+#include "aconfig_storage/aconfig_storage_file.hpp"
+
+using namespace android::base;
+
+namespace aconfig_storage {
+
+Result<std::vector<FlagValueSummary>> list_flags(
+    const std::string& package_map,
+    const std::string& flag_map,
+    const std::string& flag_val) {
+  auto flag_list_cxx = list_flags_cxx(rust::Str(package_map.c_str()),
+                                      rust::Str(flag_map.c_str()),
+                                      rust::Str(flag_val.c_str()));
+  if (flag_list_cxx.query_success) {
+    auto flag_list = std::vector<FlagValueSummary>();
+    for (const auto& flag_cxx : flag_list_cxx.flags) {
+      auto flag = FlagValueSummary();
+      flag.package_name = std::string(flag_cxx.package_name);
+      flag.flag_name = std::string(flag_cxx.flag_name);
+      flag.flag_value = std::string(flag_cxx.flag_value);
+      flag.value_type = std::string(flag_cxx.value_type);
+      flag_list.push_back(flag);
+    }
+    return flag_list;
+  } else {
+    return Error() << flag_list_cxx.error_message;
+  }
+}
+
+Result<std::vector<FlagValueAndInfoSummary>> list_flags_with_info(
+    const std::string& package_map,
+    const std::string& flag_map,
+    const std::string& flag_val,
+    const std::string& flag_info) {
+  auto flag_list_cxx = list_flags_with_info_cxx(rust::Str(package_map.c_str()),
+                                                rust::Str(flag_map.c_str()),
+                                                rust::Str(flag_val.c_str()),
+                                                rust::Str(flag_info.c_str()));
+  if (flag_list_cxx.query_success) {
+    auto flag_list = std::vector<FlagValueAndInfoSummary>();
+    for (const auto& flag_cxx : flag_list_cxx.flags) {
+      auto flag = FlagValueAndInfoSummary();
+      flag.package_name = std::string(flag_cxx.package_name);
+      flag.flag_name = std::string(flag_cxx.flag_name);
+      flag.flag_value = std::string(flag_cxx.flag_value);
+      flag.value_type = std::string(flag_cxx.value_type);
+      flag.is_readwrite = flag_cxx.is_readwrite;
+      flag.has_server_override = flag_cxx.has_server_override;
+      flag.has_local_override = flag_cxx.has_local_override;
+      flag_list.push_back(flag);
+    }
+    return flag_list;
+  } else {
+    return Error() << flag_list_cxx.error_message;
+  }
+}
+
+} // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_file/build.rs b/tools/aconfig/aconfig_storage_file/build.rs
index 1feeb60..e0ade2a 100644
--- a/tools/aconfig/aconfig_storage_file/build.rs
+++ b/tools/aconfig/aconfig_storage_file/build.rs
@@ -14,4 +14,6 @@
         .inputs(proto_files)
         .cargo_out_dir("aconfig_storage_protos")
         .run_from_script();
+
+    let _ = cxx_build::bridge("src/lib.rs");
 }
diff --git a/tools/aconfig/aconfig_storage_file/include/aconfig_storage/aconfig_storage_file.hpp b/tools/aconfig/aconfig_storage_file/include/aconfig_storage/aconfig_storage_file.hpp
new file mode 100644
index 0000000..9f3cdb0
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/include/aconfig_storage/aconfig_storage_file.hpp
@@ -0,0 +1,48 @@
+#pragma once
+
+#include <vector>
+#include <string>
+#include <android-base/result.h>
+
+namespace aconfig_storage {
+
+/// Flag value summary for a flag
+struct FlagValueSummary {
+  std::string package_name;
+  std::string flag_name;
+  std::string flag_value;
+  std::string value_type;
+};
+
+/// List all flag values
+/// \input package_map: package map file
+/// \input flag_map: flag map file
+/// \input flag_val: flag value file
+android::base::Result<std::vector<FlagValueSummary>> list_flags(
+    const std::string& package_map,
+    const std::string& flag_map,
+    const std::string& flag_val);
+
+/// Flag value and info summary for a flag
+struct FlagValueAndInfoSummary {
+  std::string package_name;
+  std::string flag_name;
+  std::string flag_value;
+  std::string value_type;
+  bool is_readwrite;
+  bool has_server_override;
+  bool has_local_override;
+};
+
+/// List all flag values with their flag info
+/// \input package_map: package map file
+/// \input flag_map: flag map file
+/// \input flag_val: flag value file
+/// \input flag_info: flag info file
+android::base::Result<std::vector<FlagValueAndInfoSummary>> list_flags_with_info(
+    const std::string& package_map,
+    const std::string& flag_map,
+    const std::string& flag_val,
+    const std::string& flag_info);
+
+}// namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto b/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto
index c6728bd..e1c1c7f 100644
--- a/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto
+++ b/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto
@@ -26,7 +26,8 @@
   optional string package_map = 3;
   optional string flag_map = 4;
   optional string flag_val = 5;
-  optional int64 timestamp = 6;
+  optional string flag_info = 6;
+  optional int64 timestamp = 7;
 }
 
 message storage_files {
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_info.rs b/tools/aconfig/aconfig_storage_file/src/flag_info.rs
index 3fff263..beac38d 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_info.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_info.rs
@@ -91,9 +91,9 @@
 /// bit field for flag info
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub enum FlagInfoBit {
-    IsSticky = 0,
-    IsReadWrite = 1,
-    HasOverride = 2,
+    HasServerOverride = 1 << 0,
+    IsReadWrite = 1 << 1,
+    HasLocalOverride = 1 << 2,
 }
 
 /// Flag info node struct
@@ -107,10 +107,10 @@
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         writeln!(
             f,
-            "sticky: {}, readwrite: {}, override: {}",
-            self.attributes & (FlagInfoBit::IsSticky as u8),
-            self.attributes & (FlagInfoBit::IsReadWrite as u8),
-            self.attributes & (FlagInfoBit::HasOverride as u8),
+            "readwrite: {}, server override: {}, local override: {}",
+            self.attributes & (FlagInfoBit::IsReadWrite as u8) != 0,
+            self.attributes & (FlagInfoBit::HasServerOverride as u8) != 0,
+            self.attributes & (FlagInfoBit::HasLocalOverride as u8) != 0,
         )?;
         Ok(())
     }
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
index f41f4ce..64b90ea 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
@@ -100,7 +100,8 @@
     pub package_id: u32,
     pub flag_name: String,
     pub flag_type: StoredFlagType,
-    pub flag_id: u16,
+    // within package flag index of this flag type
+    pub flag_index: u16,
     pub next_offset: Option<u32>,
 }
 
@@ -109,8 +110,8 @@
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         writeln!(
             f,
-            "Package Id: {}, Flag: {}, Type: {:?}, Offset: {}, Next: {:?}",
-            self.package_id, self.flag_name, self.flag_type, self.flag_id, self.next_offset
+            "Package Id: {}, Flag: {}, Type: {:?}, Index: {}, Next: {:?}",
+            self.package_id, self.flag_name, self.flag_type, self.flag_index, self.next_offset
         )?;
         Ok(())
     }
@@ -125,7 +126,7 @@
         result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
         result.extend_from_slice(name_bytes);
         result.extend_from_slice(&(self.flag_type as u16).to_le_bytes());
-        result.extend_from_slice(&self.flag_id.to_le_bytes());
+        result.extend_from_slice(&self.flag_index.to_le_bytes());
         result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes());
         result
     }
@@ -137,7 +138,7 @@
             package_id: read_u32_from_bytes(bytes, &mut head)?,
             flag_name: read_str_from_bytes(bytes, &mut head)?,
             flag_type: StoredFlagType::try_from(read_u16_from_bytes(bytes, &mut head)?)?,
-            flag_id: read_u16_from_bytes(bytes, &mut head)?,
+            flag_index: read_u16_from_bytes(bytes, &mut head)?,
             next_offset: match read_u32_from_bytes(bytes, &mut head)? {
                 0 => None,
                 val => Some(val),
diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs
index d14bab6..26e9c1a 100644
--- a/tools/aconfig/aconfig_storage_file/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_file/src/lib.rs
@@ -46,12 +46,14 @@
 use std::hash::{Hash, Hasher};
 use std::io::Read;
 
-pub use crate::flag_info::{FlagInfoHeader, FlagInfoList, FlagInfoNode};
+pub use crate::flag_info::{FlagInfoBit, FlagInfoHeader, FlagInfoList, FlagInfoNode};
 pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode};
 pub use crate::flag_value::{FlagValueHeader, FlagValueList};
 pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode};
 
-use crate::AconfigStorageError::{BytesParseFail, HashTableSizeLimit, InvalidStoredFlagType};
+use crate::AconfigStorageError::{
+    BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType,
+};
 
 /// Storage file version
 pub const FILE_VERSION: u32 = 1;
@@ -103,6 +105,7 @@
 }
 
 /// Flag type enum as stored by storage file
+/// ONLY APPEND, NEVER REMOVE FOR BACKWARD COMPATIBILITY. THE MAX IS U16.
 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub enum StoredFlagType {
     ReadWriteBoolean = 0,
@@ -123,6 +126,36 @@
     }
 }
 
+/// Flag value type enum, one FlagValueType maps to many StoredFlagType
+/// ONLY APPEND, NEVER REMOVE FOR BACKWARD COMPATIBILITY. THE MAX IS U16
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum FlagValueType {
+    Boolean = 0,
+}
+
+impl TryFrom<StoredFlagType> for FlagValueType {
+    type Error = AconfigStorageError;
+
+    fn try_from(value: StoredFlagType) -> Result<Self, Self::Error> {
+        match value {
+            StoredFlagType::ReadWriteBoolean => Ok(Self::Boolean),
+            StoredFlagType::ReadOnlyBoolean => Ok(Self::Boolean),
+            StoredFlagType::FixedReadOnlyBoolean => Ok(Self::Boolean),
+        }
+    }
+}
+
+impl TryFrom<u16> for FlagValueType {
+    type Error = AconfigStorageError;
+
+    fn try_from(value: u16) -> Result<Self, Self::Error> {
+        match value {
+            x if x == Self::Boolean as u16 => Ok(Self::Boolean),
+            _ => Err(InvalidFlagValueType(anyhow!("Invalid flag value type"))),
+        }
+    }
+}
+
 /// Storage query api error
 #[non_exhaustive]
 #[derive(thiserror::Error, Debug)]
@@ -162,6 +195,9 @@
 
     #[error("invalid stored flag type")]
     InvalidStoredFlagType(#[source] anyhow::Error),
+
+    #[error("invalid flag value type")]
+    InvalidFlagValueType(#[source] anyhow::Error),
 }
 
 /// Get the right hash table size given number of entries in the table. Use a
@@ -242,47 +278,234 @@
     Ok(buffer)
 }
 
+/// Flag value summary
+#[derive(Debug, PartialEq)]
+pub struct FlagValueSummary {
+    pub package_name: String,
+    pub flag_name: String,
+    pub flag_value: String,
+    pub value_type: StoredFlagType,
+}
+
 /// List flag values from storage files
 pub fn list_flags(
     package_map: &str,
     flag_map: &str,
     flag_val: &str,
-) -> Result<Vec<(String, String, StoredFlagType, bool)>, AconfigStorageError> {
+) -> Result<Vec<FlagValueSummary>, AconfigStorageError> {
     let package_table = PackageTable::from_bytes(&read_file_to_bytes(package_map)?)?;
     let flag_table = FlagTable::from_bytes(&read_file_to_bytes(flag_map)?)?;
     let flag_value_list = FlagValueList::from_bytes(&read_file_to_bytes(flag_val)?)?;
 
     let mut package_info = vec![("", 0); package_table.header.num_packages as usize];
     for node in package_table.nodes.iter() {
-        package_info[node.package_id as usize] = (&node.package_name, node.boolean_offset);
+        package_info[node.package_id as usize] = (&node.package_name, node.boolean_start_index);
     }
 
     let mut flags = Vec::new();
     for node in flag_table.nodes.iter() {
-        let (package_name, package_offset) = package_info[node.package_id as usize];
-        let flag_offset = package_offset + node.flag_id as u32;
-        let flag_value = flag_value_list.booleans[flag_offset as usize];
-        flags.push((
-            String::from(package_name),
-            node.flag_name.clone(),
-            node.flag_type,
-            flag_value,
-        ));
+        let (package_name, boolean_start_index) = package_info[node.package_id as usize];
+        let flag_index = boolean_start_index + node.flag_index as u32;
+        let flag_value = flag_value_list.booleans[flag_index as usize];
+        flags.push(FlagValueSummary {
+            package_name: String::from(package_name),
+            flag_name: node.flag_name.clone(),
+            flag_value: flag_value.to_string(),
+            value_type: node.flag_type,
+        });
     }
 
-    flags.sort_by(|v1, v2| match v1.0.cmp(&v2.0) {
-        Ordering::Equal => v1.1.cmp(&v2.1),
+    flags.sort_by(|v1, v2| match v1.package_name.cmp(&v2.package_name) {
+        Ordering::Equal => v1.flag_name.cmp(&v2.flag_name),
         other => other,
     });
     Ok(flags)
 }
 
+/// Flag value and info summary
+#[derive(Debug, PartialEq)]
+pub struct FlagValueAndInfoSummary {
+    pub package_name: String,
+    pub flag_name: String,
+    pub flag_value: String,
+    pub value_type: StoredFlagType,
+    pub is_readwrite: bool,
+    pub has_server_override: bool,
+    pub has_local_override: bool,
+}
+
+/// List flag values and info from storage files
+pub fn list_flags_with_info(
+    package_map: &str,
+    flag_map: &str,
+    flag_val: &str,
+    flag_info: &str,
+) -> Result<Vec<FlagValueAndInfoSummary>, AconfigStorageError> {
+    let package_table = PackageTable::from_bytes(&read_file_to_bytes(package_map)?)?;
+    let flag_table = FlagTable::from_bytes(&read_file_to_bytes(flag_map)?)?;
+    let flag_value_list = FlagValueList::from_bytes(&read_file_to_bytes(flag_val)?)?;
+    let flag_info = FlagInfoList::from_bytes(&read_file_to_bytes(flag_info)?)?;
+
+    let mut package_info = vec![("", 0); package_table.header.num_packages as usize];
+    for node in package_table.nodes.iter() {
+        package_info[node.package_id as usize] = (&node.package_name, node.boolean_start_index);
+    }
+
+    let mut flags = Vec::new();
+    for node in flag_table.nodes.iter() {
+        let (package_name, boolean_start_index) = package_info[node.package_id as usize];
+        let flag_index = boolean_start_index + node.flag_index as u32;
+        let flag_value = flag_value_list.booleans[flag_index as usize];
+        let flag_attribute = flag_info.nodes[flag_index as usize].attributes;
+        flags.push(FlagValueAndInfoSummary {
+            package_name: String::from(package_name),
+            flag_name: node.flag_name.clone(),
+            flag_value: flag_value.to_string(),
+            value_type: node.flag_type,
+            is_readwrite: flag_attribute & (FlagInfoBit::IsReadWrite as u8) != 0,
+            has_server_override: flag_attribute & (FlagInfoBit::HasServerOverride as u8) != 0,
+            has_local_override: flag_attribute & (FlagInfoBit::HasLocalOverride as u8) != 0,
+        });
+    }
+
+    flags.sort_by(|v1, v2| match v1.package_name.cmp(&v2.package_name) {
+        Ordering::Equal => v1.flag_name.cmp(&v2.flag_name),
+        other => other,
+    });
+    Ok(flags)
+}
+
+// *************************************** //
+// CC INTERLOP
+// *************************************** //
+
+// Exported rust data structure and methods, c++ code will be generated
+#[cxx::bridge]
+mod ffi {
+    /// flag value summary cxx return
+    pub struct FlagValueSummaryCXX {
+        pub package_name: String,
+        pub flag_name: String,
+        pub flag_value: String,
+        pub value_type: String,
+    }
+
+    /// flag value and info summary cxx return
+    pub struct FlagValueAndInfoSummaryCXX {
+        pub package_name: String,
+        pub flag_name: String,
+        pub flag_value: String,
+        pub value_type: String,
+        pub is_readwrite: bool,
+        pub has_server_override: bool,
+        pub has_local_override: bool,
+    }
+
+    /// list flag result cxx return
+    pub struct ListFlagValueResultCXX {
+        pub query_success: bool,
+        pub error_message: String,
+        pub flags: Vec<FlagValueSummaryCXX>,
+    }
+
+    /// list flag with info result cxx return
+    pub struct ListFlagValueAndInfoResultCXX {
+        pub query_success: bool,
+        pub error_message: String,
+        pub flags: Vec<FlagValueAndInfoSummaryCXX>,
+    }
+
+    // Rust export to c++
+    extern "Rust" {
+        pub fn list_flags_cxx(
+            package_map: &str,
+            flag_map: &str,
+            flag_val: &str,
+        ) -> ListFlagValueResultCXX;
+
+        pub fn list_flags_with_info_cxx(
+            package_map: &str,
+            flag_map: &str,
+            flag_val: &str,
+            flag_info: &str,
+        ) -> ListFlagValueAndInfoResultCXX;
+    }
+}
+
+/// implement flag value summary cxx return type
+impl ffi::FlagValueSummaryCXX {
+    pub(crate) fn new(summary: FlagValueSummary) -> Self {
+        Self {
+            package_name: summary.package_name,
+            flag_name: summary.flag_name,
+            flag_value: summary.flag_value,
+            value_type: format!("{:?}", summary.value_type),
+        }
+    }
+}
+
+/// implement flag value and info summary cxx return type
+impl ffi::FlagValueAndInfoSummaryCXX {
+    pub(crate) fn new(summary: FlagValueAndInfoSummary) -> Self {
+        Self {
+            package_name: summary.package_name,
+            flag_name: summary.flag_name,
+            flag_value: summary.flag_value,
+            value_type: format!("{:?}", summary.value_type),
+            is_readwrite: summary.is_readwrite,
+            has_server_override: summary.has_server_override,
+            has_local_override: summary.has_local_override,
+        }
+    }
+}
+
+/// implement list flag cxx interlop
+pub fn list_flags_cxx(
+    package_map: &str,
+    flag_map: &str,
+    flag_val: &str,
+) -> ffi::ListFlagValueResultCXX {
+    match list_flags(package_map, flag_map, flag_val) {
+        Ok(summary) => ffi::ListFlagValueResultCXX {
+            query_success: true,
+            error_message: String::new(),
+            flags: summary.into_iter().map(ffi::FlagValueSummaryCXX::new).collect(),
+        },
+        Err(errmsg) => ffi::ListFlagValueResultCXX {
+            query_success: false,
+            error_message: format!("{:?}", errmsg),
+            flags: Vec::new(),
+        },
+    }
+}
+
+/// implement list flag with info cxx interlop
+pub fn list_flags_with_info_cxx(
+    package_map: &str,
+    flag_map: &str,
+    flag_val: &str,
+    flag_info: &str,
+) -> ffi::ListFlagValueAndInfoResultCXX {
+    match list_flags_with_info(package_map, flag_map, flag_val, flag_info) {
+        Ok(summary) => ffi::ListFlagValueAndInfoResultCXX {
+            query_success: true,
+            error_message: String::new(),
+            flags: summary.into_iter().map(ffi::FlagValueAndInfoSummaryCXX::new).collect(),
+        },
+        Err(errmsg) => ffi::ListFlagValueAndInfoResultCXX {
+            query_success: false,
+            error_message: format!("{:?}", errmsg),
+            flags: Vec::new(),
+        },
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
     use crate::test_utils::{
-        create_test_flag_table, create_test_flag_value_list, create_test_package_table,
-        write_bytes_to_temp_file,
+        create_test_flag_info_list, create_test_flag_table, create_test_flag_value_list,
+        create_test_package_table, write_bytes_to_temp_file,
     };
 
     #[test]
@@ -301,54 +524,154 @@
         let flags =
             list_flags(&package_table_path, &flag_table_path, &flag_value_list_path).unwrap();
         let expected = [
-            (
-                String::from("com.android.aconfig.storage.test_1"),
-                String::from("disabled_rw"),
-                StoredFlagType::ReadWriteBoolean,
-                false,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_1"),
-                String::from("enabled_ro"),
-                StoredFlagType::ReadOnlyBoolean,
-                true,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_1"),
-                String::from("enabled_rw"),
-                StoredFlagType::ReadWriteBoolean,
-                true,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_2"),
-                String::from("disabled_ro"),
-                StoredFlagType::ReadOnlyBoolean,
-                false,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_2"),
-                String::from("enabled_fixed_ro"),
-                StoredFlagType::FixedReadOnlyBoolean,
-                true,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_2"),
-                String::from("enabled_ro"),
-                StoredFlagType::ReadOnlyBoolean,
-                true,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_4"),
-                String::from("enabled_fixed_ro"),
-                StoredFlagType::FixedReadOnlyBoolean,
-                true,
-            ),
-            (
-                String::from("com.android.aconfig.storage.test_4"),
-                String::from("enabled_ro"),
-                StoredFlagType::ReadOnlyBoolean,
-                true,
-            ),
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_1"),
+                flag_name: String::from("disabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("false"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_1"),
+                flag_name: String::from("enabled_ro"),
+                value_type: StoredFlagType::ReadOnlyBoolean,
+                flag_value: String::from("true"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_1"),
+                flag_name: String::from("enabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("true"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_2"),
+                flag_name: String::from("disabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("false"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_2"),
+                flag_name: String::from("enabled_fixed_ro"),
+                value_type: StoredFlagType::FixedReadOnlyBoolean,
+                flag_value: String::from("true"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_2"),
+                flag_name: String::from("enabled_ro"),
+                value_type: StoredFlagType::ReadOnlyBoolean,
+                flag_value: String::from("true"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_4"),
+                flag_name: String::from("enabled_fixed_ro"),
+                value_type: StoredFlagType::FixedReadOnlyBoolean,
+                flag_value: String::from("true"),
+            },
+            FlagValueSummary {
+                package_name: String::from("com.android.aconfig.storage.test_4"),
+                flag_name: String::from("enabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("true"),
+            },
+        ];
+        assert_eq!(flags, expected);
+    }
+
+    #[test]
+    // this test point locks down the flag list with info api
+    fn test_list_flag_with_info() {
+        let package_table =
+            write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap();
+        let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap();
+        let flag_value_list =
+            write_bytes_to_temp_file(&create_test_flag_value_list().into_bytes()).unwrap();
+        let flag_info_list =
+            write_bytes_to_temp_file(&create_test_flag_info_list().into_bytes()).unwrap();
+
+        let package_table_path = package_table.path().display().to_string();
+        let flag_table_path = flag_table.path().display().to_string();
+        let flag_value_list_path = flag_value_list.path().display().to_string();
+        let flag_info_list_path = flag_info_list.path().display().to_string();
+
+        let flags = list_flags_with_info(
+            &package_table_path,
+            &flag_table_path,
+            &flag_value_list_path,
+            &flag_info_list_path,
+        )
+        .unwrap();
+        let expected = [
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_1"),
+                flag_name: String::from("disabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("false"),
+                is_readwrite: true,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_1"),
+                flag_name: String::from("enabled_ro"),
+                value_type: StoredFlagType::ReadOnlyBoolean,
+                flag_value: String::from("true"),
+                is_readwrite: false,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_1"),
+                flag_name: String::from("enabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("true"),
+                is_readwrite: true,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_2"),
+                flag_name: String::from("disabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("false"),
+                is_readwrite: true,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_2"),
+                flag_name: String::from("enabled_fixed_ro"),
+                value_type: StoredFlagType::FixedReadOnlyBoolean,
+                flag_value: String::from("true"),
+                is_readwrite: false,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_2"),
+                flag_name: String::from("enabled_ro"),
+                value_type: StoredFlagType::ReadOnlyBoolean,
+                flag_value: String::from("true"),
+                is_readwrite: false,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_4"),
+                flag_name: String::from("enabled_fixed_ro"),
+                value_type: StoredFlagType::FixedReadOnlyBoolean,
+                flag_value: String::from("true"),
+                is_readwrite: false,
+                has_server_override: false,
+                has_local_override: false,
+            },
+            FlagValueAndInfoSummary {
+                package_name: String::from("com.android.aconfig.storage.test_4"),
+                flag_name: String::from("enabled_rw"),
+                value_type: StoredFlagType::ReadWriteBoolean,
+                flag_value: String::from("true"),
+                is_readwrite: true,
+                has_server_override: false,
+                has_local_override: false,
+            },
         ];
         assert_eq!(flags, expected);
     }
diff --git a/tools/aconfig/aconfig_storage_file/src/main.rs b/tools/aconfig/aconfig_storage_file/src/main.rs
index b686274..8b9e38d 100644
--- a/tools/aconfig/aconfig_storage_file/src/main.rs
+++ b/tools/aconfig/aconfig_storage_file/src/main.rs
@@ -17,8 +17,8 @@
 //! `aconfig-storage` is a debugging tool to parse storage files
 
 use aconfig_storage_file::{
-    list_flags, read_file_to_bytes, AconfigStorageError, FlagInfoList, FlagTable, FlagValueList,
-    PackageTable, StorageFileType,
+    list_flags, list_flags_with_info, read_file_to_bytes, AconfigStorageError, FlagInfoList,
+    FlagTable, FlagValueList, PackageTable, StorageFileType,
 };
 
 use clap::{builder::ArgAction, Arg, Command};
@@ -45,7 +45,10 @@
                         .action(ArgAction::Set),
                 )
                 .arg(Arg::new("flag-map").long("flag-map").required(true).action(ArgAction::Set))
-                .arg(Arg::new("flag-val").long("flag-val").required(true).action(ArgAction::Set)),
+                .arg(Arg::new("flag-val").long("flag-val").required(true).action(ArgAction::Set))
+                .arg(
+                    Arg::new("flag-info").long("flag-info").required(false).action(ArgAction::Set),
+                ),
         )
 }
 
@@ -87,9 +90,27 @@
             let package_map = sub_matches.get_one::<String>("package-map").unwrap();
             let flag_map = sub_matches.get_one::<String>("flag-map").unwrap();
             let flag_val = sub_matches.get_one::<String>("flag-val").unwrap();
-            let flags = list_flags(package_map, flag_map, flag_val)?;
-            for (package_name, flag_name, flag_type, flag_value) in flags.iter() {
-                println!("{} {} {:?} {}", package_name, flag_name, flag_type, flag_value);
+            let flag_info = sub_matches.get_one::<String>("flag-info");
+            match flag_info {
+                Some(info_file) => {
+                    let flags = list_flags_with_info(package_map, flag_map, flag_val, info_file)?;
+                    for flag in flags.iter() {
+                        println!(
+                            "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}",
+                            flag.package_name, flag.flag_name, flag.flag_value, flag.value_type,
+                            flag.is_readwrite, flag.has_server_override, flag.has_local_override,
+                        );
+                    }
+                }
+                None => {
+                    let flags = list_flags(package_map, flag_map, flag_val)?;
+                    for flag in flags.iter() {
+                        println!(
+                            "{} {} {} {:?}",
+                            flag.package_name, flag.flag_name, flag.flag_value, flag.value_type,
+                        );
+                    }
+                }
             }
         }
         _ => unreachable!(),
diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs
index 36b0493..b734972 100644
--- a/tools/aconfig/aconfig_storage_file/src/package_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs
@@ -96,9 +96,9 @@
 pub struct PackageTableNode {
     pub package_name: String,
     pub package_id: u32,
-    // offset of the first boolean flag in this flag package with respect to the start of
-    // boolean flag value array in the flag value file
-    pub boolean_offset: u32,
+    // The index of the first boolean flag in this aconfig package among all boolean
+    // flags in this container.
+    pub boolean_start_index: u32,
     pub next_offset: Option<u32>,
 }
 
@@ -107,8 +107,8 @@
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         writeln!(
             f,
-            "Package: {}, Id: {}, Offset: {}, Next: {:?}",
-            self.package_name, self.package_id, self.boolean_offset, self.next_offset
+            "Package: {}, Id: {}, Boolean flag start index: {}, Next: {:?}",
+            self.package_name, self.package_id, self.boolean_start_index, self.next_offset
         )?;
         Ok(())
     }
@@ -122,7 +122,7 @@
         result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
         result.extend_from_slice(name_bytes);
         result.extend_from_slice(&self.package_id.to_le_bytes());
-        result.extend_from_slice(&self.boolean_offset.to_le_bytes());
+        result.extend_from_slice(&self.boolean_start_index.to_le_bytes());
         result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes());
         result
     }
@@ -133,7 +133,7 @@
         let node = Self {
             package_name: read_str_from_bytes(bytes, &mut head)?,
             package_id: read_u32_from_bytes(bytes, &mut head)?,
-            boolean_offset: read_u32_from_bytes(bytes, &mut head)?,
+            boolean_start_index: read_u32_from_bytes(bytes, &mut head)?,
             next_offset: match read_u32_from_bytes(bytes, &mut head)? {
                 0 => None,
                 val => Some(val),
diff --git a/tools/aconfig/aconfig_storage_file/src/test_utils.rs b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
index c0f647a..106666c 100644
--- a/tools/aconfig/aconfig_storage_file/src/test_utils.rs
+++ b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
@@ -38,19 +38,19 @@
     let first_node = PackageTableNode {
         package_name: String::from("com.android.aconfig.storage.test_2"),
         package_id: 1,
-        boolean_offset: 3,
+        boolean_start_index: 3,
         next_offset: None,
     };
     let second_node = PackageTableNode {
         package_name: String::from("com.android.aconfig.storage.test_1"),
         package_id: 0,
-        boolean_offset: 0,
+        boolean_start_index: 0,
         next_offset: Some(159),
     };
     let third_node = PackageTableNode {
         package_name: String::from("com.android.aconfig.storage.test_4"),
         package_id: 2,
-        boolean_offset: 6,
+        boolean_start_index: 6,
         next_offset: None,
     };
     let nodes = vec![first_node, second_node, third_node];
@@ -63,14 +63,14 @@
         package_id: u32,
         flag_name: &str,
         flag_type: u16,
-        flag_id: u16,
+        flag_index: u16,
         next_offset: Option<u32>,
     ) -> Self {
         Self {
             package_id,
             flag_name: flag_name.to_string(),
             flag_type: StoredFlagType::try_from(flag_type).unwrap(),
-            flag_id,
+            flag_index,
             next_offset,
         }
     }
@@ -92,8 +92,8 @@
         None,
         None,
         None,
-        Some(178),
         None,
+        Some(177),
         Some(204),
         None,
         Some(262),
@@ -108,8 +108,8 @@
     let nodes = vec![
         FlagTableNode::new_expected(0, "enabled_ro", 1, 1, None),
         FlagTableNode::new_expected(0, "enabled_rw", 0, 2, Some(151)),
-        FlagTableNode::new_expected(1, "disabled_ro", 1, 0, None),
-        FlagTableNode::new_expected(2, "enabled_ro", 1, 1, None),
+        FlagTableNode::new_expected(2, "enabled_rw", 0, 1, None),
+        FlagTableNode::new_expected(1, "disabled_rw", 0, 0, None),
         FlagTableNode::new_expected(1, "enabled_fixed_ro", 2, 1, Some(236)),
         FlagTableNode::new_expected(1, "enabled_ro", 1, 2, None),
         FlagTableNode::new_expected(2, "enabled_fixed_ro", 2, 0, None),
@@ -140,7 +140,7 @@
         num_flags: 8,
         boolean_flag_offset: 27,
     };
-    let is_flag_rw = [true, false, true, false, false, false, false, false];
+    let is_flag_rw = [true, false, true, true, false, false, false, true];
     let nodes = is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect();
     FlagInfoList { header, nodes }
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp
new file mode 100644
index 0000000..26b7800
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp
@@ -0,0 +1,23 @@
+
+cc_test {
+    name: "aconfig_storage_file.test.cpp",
+    team: "trendy_team_android_core_experiments",
+    srcs: [
+        "storage_file_test.cpp",
+    ],
+    static_libs: [
+        "libgmock",
+        "libaconfig_storage_file_cc",
+        "libbase",
+    ],
+    data: [
+        "package.map",
+        "flag.map",
+        "flag.val",
+        "flag.info",
+    ],
+    test_suites: [
+        "device-tests",
+        "general-tests",
+    ],
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.info b/tools/aconfig/aconfig_storage_file/tests/flag.info
new file mode 100644
index 0000000..6223edf
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/flag.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.map b/tools/aconfig/aconfig_storage_file/tests/flag.map
new file mode 100644
index 0000000..e868f53
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/flag.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.val b/tools/aconfig/aconfig_storage_file/tests/flag.val
new file mode 100644
index 0000000..ed203d4
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/flag.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/package.map b/tools/aconfig/aconfig_storage_file/tests/package.map
new file mode 100644
index 0000000..6c46a03
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/package.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
new file mode 100644
index 0000000..ebd1dd8
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string>
+#include <vector>
+#include <android-base/file.h>
+#include <android-base/result.h>
+#include <gtest/gtest.h>
+#include "aconfig_storage/aconfig_storage_file.hpp"
+
+using namespace android::base;
+using namespace aconfig_storage;
+
+void verify_value(const FlagValueSummary& flag,
+                  const std::string& package_name,
+                  const std::string& flag_name,
+                  const std::string& flag_val,
+                  const std::string& value_type) {
+  ASSERT_EQ(flag.package_name, package_name);
+  ASSERT_EQ(flag.flag_name, flag_name);
+  ASSERT_EQ(flag.flag_value, flag_val);
+  ASSERT_EQ(flag.value_type, value_type);
+}
+
+void verify_value_info(const FlagValueAndInfoSummary& flag,
+                       const std::string& package_name,
+                       const std::string& flag_name,
+                       const std::string& flag_val,
+                       const std::string& value_type,
+                       bool is_readwrite,
+                       bool has_server_override,
+                       bool has_local_override) {
+  ASSERT_EQ(flag.package_name, package_name);
+  ASSERT_EQ(flag.flag_name, flag_name);
+  ASSERT_EQ(flag.flag_value, flag_val);
+  ASSERT_EQ(flag.value_type, value_type);
+  ASSERT_EQ(flag.is_readwrite, is_readwrite);
+  ASSERT_EQ(flag.has_server_override, has_server_override);
+  ASSERT_EQ(flag.has_local_override, has_local_override);
+}
+
+TEST(AconfigStorageFileTest, test_list_flag) {
+  auto const test_dir = GetExecutableDirectory();
+  auto const package_map = test_dir + "/package.map";
+  auto const flag_map = test_dir + "/flag.map";
+  auto const flag_val = test_dir + "/flag.val";
+  auto flag_list_result = aconfig_storage::list_flags(
+      package_map, flag_map, flag_val);
+  ASSERT_TRUE(flag_list_result.ok());
+
+  auto const& flag_list = *flag_list_result;
+  ASSERT_EQ(flag_list.size(), 8);
+  verify_value(flag_list[0], "com.android.aconfig.storage.test_1", "disabled_rw",
+               "false", "ReadWriteBoolean");
+  verify_value(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro",
+               "true", "ReadOnlyBoolean");
+  verify_value(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw",
+               "true", "ReadWriteBoolean");
+  verify_value(flag_list[3], "com.android.aconfig.storage.test_2", "disabled_rw",
+               "false", "ReadWriteBoolean");
+  verify_value(flag_list[4], "com.android.aconfig.storage.test_2", "enabled_fixed_ro",
+               "true", "FixedReadOnlyBoolean");
+  verify_value(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro",
+               "true", "ReadOnlyBoolean");
+  verify_value(flag_list[6], "com.android.aconfig.storage.test_4", "enabled_fixed_ro",
+               "true", "FixedReadOnlyBoolean");
+  verify_value(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw",
+               "true", "ReadWriteBoolean");
+}
+
+TEST(AconfigStorageFileTest, test_list_flag_with_info) {
+  auto const test_dir = GetExecutableDirectory();
+  auto const package_map = test_dir + "/package.map";
+  auto const flag_map = test_dir + "/flag.map";
+  auto const flag_val = test_dir + "/flag.val";
+  auto const flag_info = test_dir + "/flag.info";
+  auto flag_list_result = aconfig_storage::list_flags_with_info(
+      package_map, flag_map, flag_val, flag_info);
+  ASSERT_TRUE(flag_list_result.ok());
+
+  auto const& flag_list = *flag_list_result;
+  ASSERT_EQ(flag_list.size(), 8);
+  verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1", "disabled_rw",
+                    "false", "ReadWriteBoolean", true, false, false);
+  verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro",
+                    "true", "ReadOnlyBoolean", false, false, false);
+  verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw",
+                    "true", "ReadWriteBoolean", true, false, false);
+  verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2", "disabled_rw",
+                    "false", "ReadWriteBoolean", true, false, false);
+  verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2", "enabled_fixed_ro",
+                    "true", "FixedReadOnlyBoolean", false, false, false);
+  verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro",
+                    "true", "ReadOnlyBoolean", false, false, false);
+  verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4", "enabled_fixed_ro",
+                    "true", "FixedReadOnlyBoolean", false, false, false);
+  verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw",
+                    "true", "ReadWriteBoolean", true, false, false);
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp
index b252e9d..5e9eb54 100644
--- a/tools/aconfig/aconfig_storage_read_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/Android.bp
@@ -9,8 +9,6 @@
     srcs: ["src/lib.rs"],
     rustlibs: [
         "libanyhow",
-        "libonce_cell",
-        "libtempfile",
         "libmemmap2",
         "libcxx",
         "libthiserror",
@@ -34,10 +32,14 @@
     name: "aconfig_storage_read_api.test",
     test_suites: ["general-tests"],
     defaults: ["aconfig_storage_read_api.defaults"],
+    rustlibs: [
+        "librand",
+    ],
     data: [
         "tests/package.map",
         "tests/flag.map",
         "tests/flag.val",
+        "tests/flag.info",
     ],
 }
 
@@ -64,11 +66,18 @@
     name: "libaconfig_storage_read_api_cxx_bridge",
     crate_name: "aconfig_storage_read_api_cxx_bridge",
     host_supported: true,
+    vendor_available: true,
+    product_available: true,
     defaults: ["aconfig_storage_read_api.defaults"],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    min_sdk_version: "29",
 }
 
 // flag read api cc interface
-cc_library_static {
+cc_library {
     name: "libaconfig_storage_read_api_cc",
     srcs: ["aconfig_storage_read_api.cpp"],
     generated_headers: [
@@ -78,9 +87,79 @@
     generated_sources: ["libcxx_aconfig_storage_read_api_bridge_code"],
     whole_static_libs: ["libaconfig_storage_read_api_cxx_bridge"],
     export_include_dirs: ["include"],
-    static_libs: [
-        "libaconfig_storage_protos_cc",
-        "libprotobuf-cpp-lite",
-        "libbase",
+    host_supported: true,
+    vendor_available: true,
+    product_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    min_sdk_version: "29",
+    target: {
+        linux: {
+            version_script: "libaconfig_storage_read_api_cc.map",
+        },
+    },
+    double_loadable: true,
+    afdo: true,
+}
+
+soong_config_module_type {
+    name: "aconfig_lib_cc_shared_link_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "Aconfig",
+    bool_variables: [
+        "read_from_new_storage",
+    ],
+    properties: [
+        "shared_libs",
+    ],
+}
+
+soong_config_bool_variable {
+    name: "read_from_new_storage",
+}
+
+aconfig_lib_cc_shared_link_defaults {
+    name: "aconfig_lib_cc_shared_link.defaults",
+    soong_config_variables: {
+        read_from_new_storage: {
+            shared_libs: [
+                "libaconfig_storage_read_api_cc",
+            ],
+        },
+    },
+}
+
+cc_defaults {
+    name: "aconfig_lib_cc_static_link.defaults",
+    shared_libs: [
+        "libaconfig_storage_read_api_cc",
+        "liblog",
+    ],
+}
+
+rust_ffi_shared {
+    name: "libaconfig_storage_read_api_rust_jni",
+    crate_name: "aconfig_storage_read_api_rust_jni",
+    srcs: ["srcs/lib.rs"],
+    rustlibs: [
+        "libaconfig_storage_read_api",
+        "libanyhow",
+        "libjni",
+    ],
+    prefer_rlib: true,
+}
+
+java_library {
+    name: "libaconfig_storage_read_api_java",
+    srcs: [
+        "srcs/**/*.java",
+    ],
+    required: ["libaconfig_storage_read_api_rust_jni"],
+    min_sdk_version: "UpsideDownCake",
+    apex_available: [
+        "//apex_available:anyapex",
+        "//apex_available:platform",
     ],
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/Cargo.toml b/tools/aconfig/aconfig_storage_read_api/Cargo.toml
index 30a4298..2b27e4b 100644
--- a/tools/aconfig/aconfig_storage_read_api/Cargo.toml
+++ b/tools/aconfig/aconfig_storage_read_api/Cargo.toml
@@ -8,10 +8,9 @@
 cargo = []
 
 [dependencies]
+rand = "0.8.5"
 anyhow = "1.0.69"
 memmap2 = "0.8.0"
-once_cell = "1.19.0"
-tempfile = "3.9.0"
 cxx = "1.0"
 thiserror = "1.0.56"
 aconfig_storage_file = { path = "../aconfig_storage_file" }
diff --git a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
index 2213831..97ada3a 100644
--- a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
+++ b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
@@ -1,115 +1,114 @@
-#include <android-base/file.h>
-#include <android-base/logging.h>
-#include <protos/aconfig_storage_metadata.pb.h>
-
 #include <sys/mman.h>
 #include <sys/stat.h>
 #include <fcntl.h>
+#include <errno.h>
+#include <string.h>
 
 #include "rust/cxx.h"
 #include "aconfig_storage/lib.rs.h"
 #include "aconfig_storage/aconfig_storage_read_api.hpp"
 
-using storage_records_pb = android::aconfig_storage_metadata::storage_files;
-using storage_record_pb = android::aconfig_storage_metadata::storage_file_info;
-using namespace android::base;
-
 namespace aconfig_storage {
 
 /// Storage location pb file
-static constexpr char kAvailableStorageRecordsPb[] =
-    "/metadata/aconfig/boot/available_storage_file_records.pb";
+static constexpr char kStorageDir[] = "/metadata/aconfig";
 
-/// Read aconfig storage records pb file
-static Result<storage_records_pb> read_storage_records_pb(std::string const& pb_file) {
-  auto records = storage_records_pb();
-  auto content = std::string();
-  if (!ReadFileToString(pb_file, &content)) {
-    return ErrnoError() << "ReadFileToString failed";
-  }
-
-  if (!records.ParseFromString(content)) {
-    return ErrnoError() << "Unable to parse persistent storage records protobuf";
-  }
-  return records;
+/// destructor
+MappedStorageFile::~MappedStorageFile() {
+  munmap(file_ptr, file_size);
 }
 
 /// Get storage file path
 static Result<std::string> find_storage_file(
-    std::string const& pb_file,
+    std::string const& storage_dir,
     std::string const& container,
     StorageFileType file_type) {
-  auto records_pb = read_storage_records_pb(pb_file);
-  if (!records_pb.ok()) {
-    return Error() << "Unable to read storage records from " << pb_file
-                   << " : " << records_pb.error();
+  switch(file_type) {
+    case StorageFileType::package_map:
+      return storage_dir + "/maps/" + container + ".package.map";
+    case StorageFileType::flag_map:
+      return storage_dir + "/maps/" + container + ".flag.map";
+    case StorageFileType::flag_val:
+      return storage_dir + "/boot/" + container + ".val";
+    case StorageFileType::flag_info:
+      return storage_dir + "/boot/" + container + ".info";
+    default:
+      auto result = Result<std::string>();
+      result.errmsg = "Invalid storage file type";
+      return result;
   }
-
-  for (auto& entry : records_pb->files()) {
-    if (entry.container() == container) {
-      switch(file_type) {
-        case StorageFileType::package_map:
-          return entry.package_map();
-        case StorageFileType::flag_map:
-          return entry.flag_map();
-        case StorageFileType::flag_val:
-          return entry.flag_val();
-        default:
-          return Error() << "Invalid file type " << file_type;
-      }
-    }
-  }
-
-  return Error() << "Unable to find storage files for container " << container;;
-}
-
-/// Map a storage file
-static Result<MappedStorageFile> map_storage_file(std::string const& file) {
-  int fd = open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY);
-  if (fd == -1) {
-    return ErrnoError() << "failed to open " << file;
-  };
-
-  struct stat fd_stat;
-  if (fstat(fd, &fd_stat) < 0) {
-    return ErrnoError() << "fstat failed";
-  }
-  size_t file_size = fd_stat.st_size;
-
-  void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, fd, 0);
-  if (map_result == MAP_FAILED) {
-    return ErrnoError() << "mmap failed";
-  }
-
-  auto mapped_file = MappedStorageFile();
-  mapped_file.file_ptr = map_result;
-  mapped_file.file_size = file_size;
-
-  return mapped_file;
 }
 
 namespace private_internal_api {
 
 /// Get mapped file implementation.
-Result<MappedStorageFile> get_mapped_file_impl(
-    std::string const& pb_file,
+Result<MappedStorageFile*> get_mapped_file_impl(
+    std::string const& storage_dir,
     std::string const& container,
     StorageFileType file_type) {
-  auto file_result = find_storage_file(pb_file, container, file_type);
+  auto file_result = find_storage_file(storage_dir, container, file_type);
   if (!file_result.ok()) {
-    return Error() << file_result.error();
+    auto result = Result<MappedStorageFile*>();
+    result.errmsg = file_result.error();
+    return result;
   }
   return map_storage_file(*file_result);
 }
 
 } // namespace private internal api
 
+/// Map a storage file
+Result<MappedStorageFile*> map_storage_file(std::string const& file) {
+  int fd = open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY);
+  if (fd == -1) {
+    auto result = Result<MappedStorageFile*>();
+    result.errmsg = std::string("failed to open ") + file + ": " + strerror(errno);
+    return result;
+  };
+
+  struct stat fd_stat;
+  if (fstat(fd, &fd_stat) < 0) {
+    auto result = Result<MappedStorageFile*>();
+    result.errmsg = std::string("fstat failed: ") + strerror(errno);
+    return result;
+  }
+  size_t file_size = fd_stat.st_size;
+
+  void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, fd, 0);
+  if (map_result == MAP_FAILED) {
+    auto result = Result<MappedStorageFile*>();
+    result.errmsg = std::string("mmap failed: ") + strerror(errno);
+    return result;
+  }
+
+  auto mapped_file = new MappedStorageFile();
+  mapped_file->file_ptr = map_result;
+  mapped_file->file_size = file_size;
+
+  return mapped_file;
+}
+
+/// Map from StoredFlagType to FlagValueType
+Result<FlagValueType> map_to_flag_value_type(
+    StoredFlagType stored_type) {
+  switch (stored_type) {
+    case StoredFlagType::ReadWriteBoolean:
+    case StoredFlagType::ReadOnlyBoolean:
+    case StoredFlagType::FixedReadOnlyBoolean:
+      return FlagValueType::Boolean;
+    default:
+      auto result = Result<FlagValueType>();
+      result.errmsg = "Unsupported stored flag type";
+      return result;
+  }
+}
+
 /// Get mapped storage file
-Result<MappedStorageFile> get_mapped_file(
+Result<MappedStorageFile*> get_mapped_file(
     std::string const& container,
     StorageFileType file_type) {
   return private_internal_api::get_mapped_file_impl(
-      kAvailableStorageRecordsPb, container, file_type);
+      kStorageDir, container, file_type);
 }
 
 /// Get storage file version number
@@ -120,58 +119,84 @@
   if (version_cxx.query_success) {
     return version_cxx.version_number;
   } else {
-    return Error() << version_cxx.error_message;
+    auto result = Result<uint32_t>();
+    result.errmsg = version_cxx.error_message.c_str();
+    return result;
   }
 }
 
-/// Get package offset
-Result<PackageOffset> get_package_offset(
+/// Get package context
+Result<PackageReadContext> get_package_read_context(
     MappedStorageFile const& file,
     std::string const& package) {
   auto content = rust::Slice<const uint8_t>(
       static_cast<uint8_t*>(file.file_ptr), file.file_size);
-  auto offset_cxx = get_package_offset_cxx(content, rust::Str(package.c_str()));
-  if (offset_cxx.query_success) {
-    auto offset = PackageOffset();
-    offset.package_exists = offset_cxx.package_exists;
-    offset.package_id = offset_cxx.package_id;
-    offset.boolean_offset = offset_cxx.boolean_offset;
-    return offset;
+  auto context_cxx = get_package_read_context_cxx(content, rust::Str(package.c_str()));
+  if (context_cxx.query_success) {
+    auto context = PackageReadContext();
+    context.package_exists = context_cxx.package_exists;
+    context.package_id = context_cxx.package_id;
+    context.boolean_start_index = context_cxx.boolean_start_index;
+    return context;
   } else {
-    return Error() << offset_cxx.error_message;
+    auto result = Result<PackageReadContext>();
+    result.errmsg = context_cxx.error_message.c_str();
+    return result;
   }
 }
 
-/// Get flag offset
-Result<FlagOffset> get_flag_offset(
+/// Get flag read context
+Result<FlagReadContext> get_flag_read_context(
     MappedStorageFile const& file,
     uint32_t package_id,
     std::string const& flag_name){
   auto content = rust::Slice<const uint8_t>(
       static_cast<uint8_t*>(file.file_ptr), file.file_size);
-  auto offset_cxx = get_flag_offset_cxx(content, package_id, rust::Str(flag_name.c_str()));
-  if (offset_cxx.query_success) {
-    auto offset = FlagOffset();
-    offset.flag_exists = offset_cxx.flag_exists;
-    offset.flag_offset = offset_cxx.flag_offset;
-    return offset;
+  auto context_cxx = get_flag_read_context_cxx(content, package_id, rust::Str(flag_name.c_str()));
+  if (context_cxx.query_success) {
+    auto context = FlagReadContext();
+    context.flag_exists = context_cxx.flag_exists;
+    context.flag_type = static_cast<StoredFlagType>(context_cxx.flag_type);
+    context.flag_index = context_cxx.flag_index;
+    return context;
   } else {
-   return Error() << offset_cxx.error_message;
+    auto result = Result<FlagReadContext>();
+    result.errmsg = context_cxx.error_message.c_str();
+    return result;
   }
 }
 
 /// Get boolean flag value
 Result<bool> get_boolean_flag_value(
     MappedStorageFile const& file,
-    uint32_t offset) {
+    uint32_t index) {
   auto content = rust::Slice<const uint8_t>(
       static_cast<uint8_t*>(file.file_ptr), file.file_size);
-  auto value_cxx = get_boolean_flag_value_cxx(content, offset);
+  auto value_cxx = get_boolean_flag_value_cxx(content, index);
   if (value_cxx.query_success) {
     return value_cxx.flag_value;
   } else {
-    return Error() << value_cxx.error_message;
+    auto result = Result<bool>();
+    result.errmsg = value_cxx.error_message.c_str();
+    return result;
   }
 }
 
+/// Get boolean flag attribute
+Result<uint8_t> get_flag_attribute(
+    MappedStorageFile const& file,
+    FlagValueType value_type,
+    uint32_t index) {
+  auto content = rust::Slice<const uint8_t>(
+      static_cast<uint8_t*>(file.file_ptr), file.file_size);
+  auto info_cxx = get_flag_attribute_cxx(
+      content, static_cast<uint16_t>(value_type), index);
+  if (info_cxx.query_success) {
+    return info_cxx.flag_attribute;
+  } else {
+    auto result = Result<uint8_t>();
+    result.errmsg = info_cxx.error_message.c_str();
+    return result;
+  }
+}
 } // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_read_api/include/aconfig_storage/aconfig_storage_read_api.hpp b/tools/aconfig/aconfig_storage_read_api/include/aconfig_storage/aconfig_storage_read_api.hpp
index aa90f47..b50935b 100644
--- a/tools/aconfig/aconfig_storage_read_api/include/aconfig_storage/aconfig_storage_read_api.hpp
+++ b/tools/aconfig/aconfig_storage_read_api/include/aconfig_storage/aconfig_storage_read_api.hpp
@@ -2,84 +2,170 @@
 
 #include <stdint.h>
 #include <string>
-#include <android-base/result.h>
+#include <cassert>
 
 namespace aconfig_storage {
 
-/// Storage file type enum
+/// Storage file type enum, to be consistent with the one defined in
+/// aconfig_storage_file/src/lib.rs
 enum StorageFileType {
   package_map,
   flag_map,
-  flag_val
+  flag_val,
+  flag_info
+};
+
+/// Flag type enum, to be consistent with the one defined in
+/// aconfig_storage_file/src/lib.rs
+enum StoredFlagType {
+  ReadWriteBoolean = 0,
+  ReadOnlyBoolean = 1,
+  FixedReadOnlyBoolean = 2,
+};
+
+/// Flag value type enum, to be consistent with the one defined in
+/// aconfig_storage_file/src/lib.rs
+enum FlagValueType {
+  Boolean = 0,
+};
+
+/// Flag info enum, to be consistent with the one defined in
+/// aconfig_storage_file/src/flag_info.rs
+enum FlagInfoBit {
+  HasServerOverride = 1<<0,
+  IsReadWrite = 1<<1,
+  HasLocalOverride = 1<<2,
 };
 
 /// Mapped storage file
 struct MappedStorageFile {
   void* file_ptr;
   size_t file_size;
+  virtual ~MappedStorageFile();
 };
 
-/// Package offset query result
-struct PackageOffset {
+/// Package read context query result
+struct PackageReadContext {
   bool package_exists;
   uint32_t package_id;
-  uint32_t boolean_offset;
+  uint32_t boolean_start_index;
 };
 
-/// Flag offset query result
-struct FlagOffset {
+/// Flag read context query result
+struct FlagReadContext {
   bool flag_exists;
-  uint16_t flag_offset;
+  StoredFlagType flag_type;
+  uint16_t flag_index;
+};
+
+
+template <class T>
+class Result {
+  public:
+
+  Result()
+      : data()
+      , errmsg()
+      , has_data(false)
+  {}
+
+  Result(T const& value)
+      : data(value)
+      , errmsg()
+      , has_data(true)
+  {}
+
+  bool ok() {
+    return has_data;
+  }
+
+  T& operator*() {
+    assert(has_data);
+    return data;
+  }
+
+  T* operator->() {
+    assert(has_data);
+    return &data;
+  }
+
+  std::string const& error() {
+    assert(!has_data);
+    return errmsg;
+  }
+
+  T data;
+  std::string errmsg;
+  bool has_data;
 };
 
 /// DO NOT USE APIS IN THE FOLLOWING NAMESPACE DIRECTLY
 namespace private_internal_api {
 
-android::base::Result<MappedStorageFile> get_mapped_file_impl(
+Result<MappedStorageFile*> get_mapped_file_impl(
     std::string const& pb_file,
     std::string const& container,
     StorageFileType file_type);
-
 } // namespace private_internal_api
 
+/// Map a storage file
+Result<MappedStorageFile*> map_storage_file(
+    std::string const& file);
+
+
+/// Map from StoredFlagType to FlagValueType
+/// \input stored_type: stored flag type in the storage file
+/// \returns the flag value type enum
+Result<FlagValueType> map_to_flag_value_type(
+    StoredFlagType stored_type);
+
 /// Get mapped storage file
 /// \input container: stoarge container name
 /// \input file_type: storage file type enum
 /// \returns a MappedStorageFileQuery
-android::base::Result<MappedStorageFile> get_mapped_file(
+Result<MappedStorageFile*> get_mapped_file(
     std::string const& container,
     StorageFileType file_type);
 
 /// Get storage file version number
 /// \input file_path: the path to the storage file
 /// \returns the storage file version
-android::base::Result<uint32_t> get_storage_file_version(
+Result<uint32_t> get_storage_file_version(
     std::string const& file_path);
 
-/// Get package offset
+/// Get package read context
 /// \input file: mapped storage file
 /// \input package: the flag package name
-/// \returns a package offset
-android::base::Result<PackageOffset> get_package_offset(
+/// \returns a package read context
+Result<PackageReadContext> get_package_read_context(
     MappedStorageFile const& file,
     std::string const& package);
 
-/// Get flag offset
+/// Get flag read context
 /// \input file: mapped storage file
 /// \input package_id: the flag package id obtained from package offset query
 /// \input flag_name: flag name
-/// \returns the flag offset
-android::base::Result<FlagOffset> get_flag_offset(
+/// \returns the flag read context
+Result<FlagReadContext> get_flag_read_context(
     MappedStorageFile const& file,
     uint32_t package_id,
     std::string const& flag_name);
 
 /// Get boolean flag value
 /// \input file: mapped storage file
-/// \input offset: the boolean flag value byte offset in the file
+/// \input index: the boolean flag index in the file
 /// \returns the boolean flag value
-android::base::Result<bool> get_boolean_flag_value(
+Result<bool> get_boolean_flag_value(
     MappedStorageFile const& file,
-    uint32_t offset);
+    uint32_t index);
 
+/// Get boolean flag attribute
+/// \input file: mapped storage file
+/// \input value_type: flag value type
+/// \input index: the boolean flag index in the file
+/// \returns the boolean flag attribute
+Result<uint8_t> get_flag_attribute(
+    MappedStorageFile const& file,
+    FlagValueType value_type,
+    uint32_t index);
 } // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_read_api/libaconfig_storage_read_api_cc.map b/tools/aconfig/aconfig_storage_read_api/libaconfig_storage_read_api_cc.map
new file mode 100644
index 0000000..7d47e0b
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/libaconfig_storage_read_api_cc.map
@@ -0,0 +1,11 @@
+LIBACONFIG_STORAGE_READ_API_CC {
+  # Export everything in the aconfig_storage namespace. This includes both the
+  # public API and library internals.
+  global:
+    extern "C++" {
+        aconfig_storage::*;
+    };
+  # Hide everything else.
+  local:
+    *;
+};
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
new file mode 100644
index 0000000..6d03377
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! flag value query module defines the flag value file read from mapped bytes
+
+use crate::{AconfigStorageError, FILE_VERSION};
+use aconfig_storage_file::{flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType};
+use anyhow::anyhow;
+
+/// Get flag attribute bitfield
+pub fn find_flag_attribute(
+    buf: &[u8],
+    flag_type: FlagValueType,
+    flag_index: u32,
+) -> Result<u8, AconfigStorageError> {
+    let interpreted_header = FlagInfoHeader::from_bytes(buf)?;
+    if interpreted_header.version > crate::FILE_VERSION {
+        return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
+            "Cannot read storage file with a higher version of {} with lib version {}",
+            interpreted_header.version,
+            FILE_VERSION
+        )));
+    }
+
+    // get byte offset to the flag info
+    let mut head = match flag_type {
+        FlagValueType::Boolean => (interpreted_header.boolean_flag_offset + flag_index) as usize,
+    };
+
+    if head >= interpreted_header.file_size as usize {
+        return Err(AconfigStorageError::InvalidStorageFileOffset(anyhow!(
+            "Flag info offset goes beyond the end of the file."
+        )));
+    }
+
+    let val = read_u8_from_bytes(buf, &mut head)?;
+    Ok(val)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use aconfig_storage_file::{test_utils::create_test_flag_info_list, FlagInfoBit};
+
+    #[test]
+    // this test point locks down query if flag has server override
+    fn test_is_flag_sticky() {
+        let flag_info_list = create_test_flag_info_list().into_bytes();
+        for offset in 0..8 {
+            let attribute =
+                find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap();
+            assert_eq!((attribute & FlagInfoBit::HasServerOverride as u8) != 0u8, false);
+        }
+    }
+
+    #[test]
+    // this test point locks down query if flag is readwrite
+    fn test_is_flag_readwrite() {
+        let flag_info_list = create_test_flag_info_list().into_bytes();
+        let baseline: Vec<bool> = vec![true, false, true, true, false, false, false, true];
+        for offset in 0..8 {
+            let attribute =
+                find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap();
+            assert_eq!(
+                (attribute & FlagInfoBit::IsReadWrite as u8) != 0u8,
+                baseline[offset as usize]
+            );
+        }
+    }
+
+    #[test]
+    // this test point locks down query if flag has local override
+    fn test_flag_has_override() {
+        let flag_info_list = create_test_flag_info_list().into_bytes();
+        for offset in 0..8 {
+            let attribute =
+                find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap();
+            assert_eq!((attribute & FlagInfoBit::HasLocalOverride as u8) != 0u8, false);
+        }
+    }
+
+    #[test]
+    // this test point locks down query beyond the end of boolean section
+    fn test_boolean_out_of_range() {
+        let flag_info_list = create_test_flag_info_list().into_bytes();
+        let error =
+            find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, 8).unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            "InvalidStorageFileOffset(Flag info offset goes beyond the end of the file.)"
+        );
+    }
+
+    #[test]
+    // this test point locks down query error when file has a higher version
+    fn test_higher_version_storage_file() {
+        let mut info_list = create_test_flag_info_list();
+        info_list.header.version = crate::FILE_VERSION + 1;
+        let flag_info = info_list.into_bytes();
+        let error = find_flag_attribute(&flag_info[..], FlagValueType::Boolean, 4).unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            format!(
+                "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
+                crate::FILE_VERSION + 1,
+                crate::FILE_VERSION
+            )
+        );
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
index a251b41..a1a4793 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
@@ -18,18 +18,23 @@
 
 use crate::{AconfigStorageError, FILE_VERSION};
 use aconfig_storage_file::{
-    flag_table::FlagTableHeader, flag_table::FlagTableNode, read_u32_from_bytes,
+    flag_table::FlagTableHeader, flag_table::FlagTableNode, read_u32_from_bytes, StoredFlagType,
 };
 use anyhow::anyhow;
 
-pub type FlagOffset = u16;
+/// Flag table query return
+#[derive(PartialEq, Debug)]
+pub struct FlagReadContext {
+    pub flag_type: StoredFlagType,
+    pub flag_index: u16,
+}
 
-/// Query flag within package offset
-pub fn find_flag_offset(
+/// Query flag read context: flag type and within package flag index
+pub fn find_flag_read_context(
     buf: &[u8],
     package_id: u32,
     flag: &str,
-) -> Result<Option<FlagOffset>, AconfigStorageError> {
+) -> Result<Option<FlagReadContext>, AconfigStorageError> {
     let interpreted_header = FlagTableHeader::from_bytes(buf)?;
     if interpreted_header.version > crate::FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
@@ -53,7 +58,10 @@
     loop {
         let interpreted_node = FlagTableNode::from_bytes(&buf[flag_node_offset..])?;
         if interpreted_node.package_id == package_id && interpreted_node.flag_name == flag {
-            return Ok(Some(interpreted_node.flag_id));
+            return Ok(Some(FlagReadContext {
+                flag_type: interpreted_node.flag_type,
+                flag_index: interpreted_node.flag_index,
+            }));
         }
         match interpreted_node.next_offset {
             Some(offset) => flag_node_offset = offset as usize,
@@ -72,19 +80,20 @@
     fn test_flag_query() {
         let flag_table = create_test_flag_table().into_bytes();
         let baseline = vec![
-            (0, "enabled_ro", 1u16),
-            (0, "enabled_rw", 2u16),
-            (1, "disabled_ro", 0u16),
-            (2, "enabled_ro", 1u16),
-            (1, "enabled_fixed_ro", 1u16),
-            (1, "enabled_ro", 2u16),
-            (2, "enabled_fixed_ro", 0u16),
-            (0, "disabled_rw", 0u16),
+            (0, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 1u16),
+            (0, "enabled_rw", StoredFlagType::ReadWriteBoolean, 2u16),
+            (2, "enabled_rw", StoredFlagType::ReadWriteBoolean, 1u16),
+            (1, "disabled_rw", StoredFlagType::ReadWriteBoolean, 0u16),
+            (1, "enabled_fixed_ro", StoredFlagType::FixedReadOnlyBoolean, 1u16),
+            (1, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 2u16),
+            (2, "enabled_fixed_ro", StoredFlagType::FixedReadOnlyBoolean, 0u16),
+            (0, "disabled_rw", StoredFlagType::ReadWriteBoolean, 0u16),
         ];
-        for (package_id, flag_name, expected_offset) in baseline.into_iter() {
-            let flag_offset =
-                find_flag_offset(&flag_table[..], package_id, flag_name).unwrap().unwrap();
-            assert_eq!(flag_offset, expected_offset);
+        for (package_id, flag_name, flag_type, flag_index) in baseline.into_iter() {
+            let flag_context =
+                find_flag_read_context(&flag_table[..], package_id, flag_name).unwrap().unwrap();
+            assert_eq!(flag_context.flag_type, flag_type);
+            assert_eq!(flag_context.flag_index, flag_index);
         }
     }
 
@@ -92,10 +101,10 @@
     // this test point locks down table query of a non exist flag
     fn test_not_existed_flag_query() {
         let flag_table = create_test_flag_table().into_bytes();
-        let flag_offset = find_flag_offset(&flag_table[..], 1, "disabled_fixed_ro").unwrap();
-        assert_eq!(flag_offset, None);
-        let flag_offset = find_flag_offset(&flag_table[..], 2, "disabled_rw").unwrap();
-        assert_eq!(flag_offset, None);
+        let flag_context = find_flag_read_context(&flag_table[..], 1, "disabled_fixed_ro").unwrap();
+        assert_eq!(flag_context, None);
+        let flag_context = find_flag_read_context(&flag_table[..], 2, "disabled_rw").unwrap();
+        assert_eq!(flag_context, None);
     }
 
     #[test]
@@ -104,7 +113,7 @@
         let mut table = create_test_flag_table();
         table.header.version = crate::FILE_VERSION + 1;
         let flag_table = table.into_bytes();
-        let error = find_flag_offset(&flag_table[..], 0, "enabled_ro").unwrap_err();
+        let error = find_flag_read_context(&flag_table[..], 0, "enabled_ro").unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
index 964cd69..9d32a16 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
@@ -21,7 +21,7 @@
 use anyhow::anyhow;
 
 /// Query flag value
-pub fn find_boolean_flag_value(buf: &[u8], flag_offset: u32) -> Result<bool, AconfigStorageError> {
+pub fn find_boolean_flag_value(buf: &[u8], flag_index: u32) -> Result<bool, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
     if interpreted_header.version > crate::FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
@@ -31,10 +31,8 @@
         )));
     }
 
-    let mut head = (interpreted_header.boolean_value_offset + flag_offset) as usize;
-
-    // TODO: right now, there is only boolean flags, with more flag value types added
-    // later, the end of boolean flag value section should be updated (b/322826265).
+    // Find byte offset to the flag value, each boolean flag cost one byte to store
+    let mut head = (interpreted_header.boolean_value_offset + flag_index) as usize;
     if head >= interpreted_header.file_size as usize {
         return Err(AconfigStorageError::InvalidStorageFileOffset(anyhow!(
             "Flag value offset goes beyond the end of the file."
@@ -48,26 +46,13 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::{FlagValueList, StorageFileType};
-
-    pub fn create_test_flag_value_list() -> FlagValueList {
-        let header = FlagValueHeader {
-            version: crate::FILE_VERSION,
-            container: String::from("system"),
-            file_type: StorageFileType::FlagVal as u8,
-            file_size: 35,
-            num_flags: 8,
-            boolean_value_offset: 27,
-        };
-        let booleans: Vec<bool> = vec![false, true, false, false, true, true, false, true];
-        FlagValueList { header, booleans }
-    }
+    use aconfig_storage_file::test_utils::create_test_flag_value_list;
 
     #[test]
     // this test point locks down flag value query
     fn test_flag_value_query() {
         let flag_value_list = create_test_flag_value_list().into_bytes();
-        let baseline: Vec<bool> = vec![false, true, false, false, true, true, false, true];
+        let baseline: Vec<bool> = vec![false, true, true, false, true, true, true, true];
         for (offset, expected_value) in baseline.into_iter().enumerate() {
             let flag_value = find_boolean_flag_value(&flag_value_list[..], offset as u32).unwrap();
             assert_eq!(flag_value, expected_value);
diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
index da64cb7..61f9e96 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
@@ -17,14 +17,16 @@
 //! `aconfig_storage_read_api` is a crate that defines read apis to read flags from storage
 //! files. It provides four apis to interface with storage files:
 //!
-//! 1, function to get package flag value start offset
-//! pub fn get_package_offset(container: &str, package: &str) -> `Result<Option<PackageOffset>>>`
+//! 1, function to get package read context
+//! pub fn get_packager_read_context(container: &str, package: &str)
+//! -> `Result<Option<PackageReadContext>>>`
 //!
-//! 2, function to get flag offset within a specific package
-//! pub fn get_flag_offset(container: &str, package_id: u32, flag: &str) -> `Result<Option<u16>>>`
+//! 2, function to get flag read context
+//! pub fn get_flag_read_context(container: &str, package_id: u32, flag: &str)
+//! -> `Result<Option<FlagReadContext>>>`
 //!
-//! 3, function to get the actual flag value given the global offset (combined package and
-//! flag offset).
+//! 3, function to get the actual flag value given the global index (combined package and
+//! flag index).
 //! pub fn get_boolean_flag_value(container: &str, offset: u32) -> `Result<bool>`
 //!
 //! 4, function to get storage file version without mmapping the file.
@@ -34,30 +36,29 @@
 //! apis. DO NOT DIRECTLY USE THESE APIS IN YOUR SOURCE CODE. For auto generated flag apis
 //! please refer to the g3doc go/android-flags
 
+pub mod flag_info_query;
 pub mod flag_table_query;
 pub mod flag_value_query;
 pub mod mapped_file;
 pub mod package_table_query;
 
-#[cfg(test)]
-mod test_utils;
-
-pub use aconfig_storage_file::{AconfigStorageError, StorageFileType};
-pub use flag_table_query::FlagOffset;
-pub use package_table_query::PackageOffset;
+pub use aconfig_storage_file::{AconfigStorageError, FlagValueType, StorageFileType};
+pub use flag_table_query::FlagReadContext;
+pub use package_table_query::PackageReadContext;
 
 use aconfig_storage_file::{read_u32_from_bytes, FILE_VERSION};
-use flag_table_query::find_flag_offset;
+use flag_info_query::find_flag_attribute;
+use flag_table_query::find_flag_read_context;
 use flag_value_query::find_boolean_flag_value;
-use package_table_query::find_package_offset;
+use package_table_query::find_package_read_context;
 
 use anyhow::anyhow;
 use memmap2::Mmap;
 use std::fs::File;
 use std::io::Read;
 
-/// Storage file location pb file
-pub const STORAGE_LOCATION_FILE: &str = "/metadata/aconfig/boot/available_storage_file_records.pb";
+/// Storage file location
+pub const STORAGE_LOCATION: &str = "/metadata/aconfig";
 
 /// Get read only mapped storage files.
 ///
@@ -74,53 +75,53 @@
     container: &str,
     file_type: StorageFileType,
 ) -> Result<Mmap, AconfigStorageError> {
-    unsafe { crate::mapped_file::get_mapped_file(STORAGE_LOCATION_FILE, container, file_type) }
+    unsafe { crate::mapped_file::get_mapped_file(STORAGE_LOCATION, container, file_type) }
 }
 
-/// Get package start offset for flags.
+/// Get package read context for a specific package.
 ///
 /// \input file: mapped package file
 /// \input package: package name
 ///
 /// \return
-/// If a package is found, it returns Ok(Some(PackageOffset))
+/// If a package is found, it returns Ok(Some(PackageReadContext))
 /// If a package is not found, it returns Ok(None)
 /// If errors out, it returns an Err(errmsg)
-pub fn get_package_offset(
+pub fn get_package_read_context(
     file: &Mmap,
     package: &str,
-) -> Result<Option<PackageOffset>, AconfigStorageError> {
-    find_package_offset(file, package)
+) -> Result<Option<PackageReadContext>, AconfigStorageError> {
+    find_package_read_context(file, package)
 }
 
-/// Get flag offset within a package given.
+/// Get flag read context for a specific flag.
 ///
 /// \input file: mapped flag file
 /// \input package_id: package id obtained from package mapping file
 /// \input flag: flag name
 ///
 /// \return
-/// If a flag is found, it returns Ok(Some(u16))
+/// If a flag is found, it returns Ok(Some(FlagReadContext))
 /// If a flag is not found, it returns Ok(None)
 /// If errors out, it returns an Err(errmsg)
-pub fn get_flag_offset(
+pub fn get_flag_read_context(
     file: &Mmap,
     package_id: u32,
     flag: &str,
-) -> Result<Option<FlagOffset>, AconfigStorageError> {
-    find_flag_offset(file, package_id, flag)
+) -> Result<Option<FlagReadContext>, AconfigStorageError> {
+    find_flag_read_context(file, package_id, flag)
 }
 
 /// Get the boolean flag value.
 ///
 /// \input file: mapped flag file
-/// \input offset: flag value offset
+/// \input index: boolean flag offset
 ///
 /// \return
 /// If the provide offset is valid, it returns the boolean flag value, otherwise it
 /// returns the error message.
-pub fn get_boolean_flag_value(file: &Mmap, offset: u32) -> Result<bool, AconfigStorageError> {
-    find_boolean_flag_value(file, offset)
+pub fn get_boolean_flag_value(file: &Mmap, index: u32) -> Result<bool, AconfigStorageError> {
+    find_boolean_flag_value(file, index)
 }
 
 /// Get storage file version number
@@ -145,6 +146,23 @@
     read_u32_from_bytes(&buffer, &mut head)
 }
 
+/// Get the flag attribute.
+///
+/// \input file: mapped flag info file
+/// \input flag_type: flag value type
+/// \input flag_index: flag index
+///
+/// \return
+/// If the provide offset is valid, it returns the flag attribute bitfiled, otherwise it
+/// returns the error message.
+pub fn get_flag_attribute(
+    file: &Mmap,
+    flag_type: FlagValueType,
+    flag_index: u32,
+) -> Result<u8, AconfigStorageError> {
+    find_flag_attribute(file, flag_type, flag_index)
+}
+
 // *************************************** //
 // CC INTERLOP
 // *************************************** //
@@ -160,20 +178,21 @@
     }
 
     // Package table query return for cc interlop
-    pub struct PackageOffsetQueryCXX {
+    pub struct PackageReadContextQueryCXX {
         pub query_success: bool,
         pub error_message: String,
         pub package_exists: bool,
         pub package_id: u32,
-        pub boolean_offset: u32,
+        pub boolean_start_index: u32,
     }
 
     // Flag table query return for cc interlop
-    pub struct FlagOffsetQueryCXX {
+    pub struct FlagReadContextQueryCXX {
         pub query_success: bool,
         pub error_message: String,
         pub flag_exists: bool,
-        pub flag_offset: u16,
+        pub flag_type: u16,
+        pub flag_index: u16,
     }
 
     // Flag value query return for cc interlop
@@ -183,21 +202,43 @@
         pub flag_value: bool,
     }
 
+    // Flag info query return for cc interlop
+    pub struct FlagAttributeQueryCXX {
+        pub query_success: bool,
+        pub error_message: String,
+        pub flag_attribute: u8,
+    }
+
     // Rust export to c++
     extern "Rust" {
         pub fn get_storage_file_version_cxx(file_path: &str) -> VersionNumberQueryCXX;
 
-        pub fn get_package_offset_cxx(file: &[u8], package: &str) -> PackageOffsetQueryCXX;
+        pub fn get_package_read_context_cxx(
+            file: &[u8],
+            package: &str,
+        ) -> PackageReadContextQueryCXX;
 
-        pub fn get_flag_offset_cxx(file: &[u8], package_id: u32, flag: &str) -> FlagOffsetQueryCXX;
+        pub fn get_flag_read_context_cxx(
+            file: &[u8],
+            package_id: u32,
+            flag: &str,
+        ) -> FlagReadContextQueryCXX;
 
         pub fn get_boolean_flag_value_cxx(file: &[u8], offset: u32) -> BooleanFlagValueQueryCXX;
+
+        pub fn get_flag_attribute_cxx(
+            file: &[u8],
+            flag_type: u16,
+            flag_index: u32,
+        ) -> FlagAttributeQueryCXX;
     }
 }
 
 /// Implement the package offset interlop return type, create from actual package offset api return type
-impl ffi::PackageOffsetQueryCXX {
-    pub(crate) fn new(offset_result: Result<Option<PackageOffset>, AconfigStorageError>) -> Self {
+impl ffi::PackageReadContextQueryCXX {
+    pub(crate) fn new(
+        offset_result: Result<Option<PackageReadContext>, AconfigStorageError>,
+    ) -> Self {
         match offset_result {
             Ok(offset_opt) => match offset_opt {
                 Some(offset) => Self {
@@ -205,14 +246,14 @@
                     error_message: String::from(""),
                     package_exists: true,
                     package_id: offset.package_id,
-                    boolean_offset: offset.boolean_offset,
+                    boolean_start_index: offset.boolean_start_index,
                 },
                 None => Self {
                     query_success: true,
                     error_message: String::from(""),
                     package_exists: false,
                     package_id: 0,
-                    boolean_offset: 0,
+                    boolean_start_index: 0,
                 },
             },
             Err(errmsg) => Self {
@@ -220,35 +261,38 @@
                 error_message: format!("{:?}", errmsg),
                 package_exists: false,
                 package_id: 0,
-                boolean_offset: 0,
+                boolean_start_index: 0,
             },
         }
     }
 }
 
 /// Implement the flag offset interlop return type, create from actual flag offset api return type
-impl ffi::FlagOffsetQueryCXX {
-    pub(crate) fn new(offset_result: Result<Option<FlagOffset>, AconfigStorageError>) -> Self {
+impl ffi::FlagReadContextQueryCXX {
+    pub(crate) fn new(offset_result: Result<Option<FlagReadContext>, AconfigStorageError>) -> Self {
         match offset_result {
             Ok(offset_opt) => match offset_opt {
                 Some(offset) => Self {
                     query_success: true,
                     error_message: String::from(""),
                     flag_exists: true,
-                    flag_offset: offset,
+                    flag_type: offset.flag_type as u16,
+                    flag_index: offset.flag_index,
                 },
                 None => Self {
                     query_success: true,
                     error_message: String::from(""),
                     flag_exists: false,
-                    flag_offset: 0,
+                    flag_type: 0u16,
+                    flag_index: 0u16,
                 },
             },
             Err(errmsg) => Self {
                 query_success: false,
                 error_message: format!("{:?}", errmsg),
                 flag_exists: false,
-                flag_offset: 0,
+                flag_type: 0u16,
+                flag_index: 0u16,
             },
         }
     }
@@ -270,6 +314,22 @@
     }
 }
 
+/// Implement the flag info interlop return type, create from actual flag info api return type
+impl ffi::FlagAttributeQueryCXX {
+    pub(crate) fn new(info_result: Result<u8, AconfigStorageError>) -> Self {
+        match info_result {
+            Ok(info) => {
+                Self { query_success: true, error_message: String::from(""), flag_attribute: info }
+            }
+            Err(errmsg) => Self {
+                query_success: false,
+                error_message: format!("{:?}", errmsg),
+                flag_attribute: 0u8,
+            },
+        }
+    }
+}
+
 /// Implement the storage version number interlop return type, create from actual version number
 /// api return type
 impl ffi::VersionNumberQueryCXX {
@@ -289,14 +349,18 @@
     }
 }
 
-/// Get package start offset cc interlop
-pub fn get_package_offset_cxx(file: &[u8], package: &str) -> ffi::PackageOffsetQueryCXX {
-    ffi::PackageOffsetQueryCXX::new(find_package_offset(file, package))
+/// Get package read context cc interlop
+pub fn get_package_read_context_cxx(file: &[u8], package: &str) -> ffi::PackageReadContextQueryCXX {
+    ffi::PackageReadContextQueryCXX::new(find_package_read_context(file, package))
 }
 
-/// Get flag start offset cc interlop
-pub fn get_flag_offset_cxx(file: &[u8], package_id: u32, flag: &str) -> ffi::FlagOffsetQueryCXX {
-    ffi::FlagOffsetQueryCXX::new(find_flag_offset(file, package_id, flag))
+/// Get flag read context cc interlop
+pub fn get_flag_read_context_cxx(
+    file: &[u8],
+    package_id: u32,
+    flag: &str,
+) -> ffi::FlagReadContextQueryCXX {
+    ffi::FlagReadContextQueryCXX::new(find_flag_read_context(file, package_id, flag))
 }
 
 /// Get boolean flag value cc interlop
@@ -304,6 +368,20 @@
     ffi::BooleanFlagValueQueryCXX::new(find_boolean_flag_value(file, offset))
 }
 
+/// Get flag attribute cc interlop
+pub fn get_flag_attribute_cxx(
+    file: &[u8],
+    flag_type: u16,
+    flag_index: u32,
+) -> ffi::FlagAttributeQueryCXX {
+    match FlagValueType::try_from(flag_type) {
+        Ok(value_type) => {
+            ffi::FlagAttributeQueryCXX::new(find_flag_attribute(file, value_type, flag_index))
+        }
+        Err(errmsg) => ffi::FlagAttributeQueryCXX::new(Err(errmsg)),
+    }
+}
+
 /// Get storage version number cc interlop
 pub fn get_storage_file_version_cxx(file_path: &str) -> ffi::VersionNumberQueryCXX {
     ffi::VersionNumberQueryCXX::new(get_storage_file_version(file_path))
@@ -313,97 +391,96 @@
 mod tests {
     use super::*;
     use crate::mapped_file::get_mapped_file;
-    use crate::test_utils::copy_to_temp_file;
-    use aconfig_storage_file::protos::storage_record_pb::write_proto_to_temp_file;
-    use tempfile::NamedTempFile;
+    use aconfig_storage_file::{FlagInfoBit, StoredFlagType};
+    use rand::Rng;
+    use std::fs;
 
-    fn create_test_storage_files() -> [NamedTempFile; 4] {
-        let package_map = copy_to_temp_file("./tests/package.map").unwrap();
-        let flag_map = copy_to_temp_file("./tests/flag.map").unwrap();
-        let flag_val = copy_to_temp_file("./tests/flag.val").unwrap();
+    fn create_test_storage_files() -> String {
+        let mut rng = rand::thread_rng();
+        let number: u32 = rng.gen();
+        let storage_dir = String::from("/tmp/") + &number.to_string();
+        if std::fs::metadata(&storage_dir).is_ok() {
+            fs::remove_dir_all(&storage_dir).unwrap();
+        }
+        let maps_dir = storage_dir.clone() + "/maps";
+        let boot_dir = storage_dir.clone() + "/boot";
+        fs::create_dir(&storage_dir).unwrap();
+        fs::create_dir(&maps_dir).unwrap();
+        fs::create_dir(&boot_dir).unwrap();
 
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "mockup"
-    package_map: "{}"
-    flag_map: "{}"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            package_map.path().display(),
-            flag_map.path().display(),
-            flag_val.path().display()
-        );
-        let pb_file = write_proto_to_temp_file(&text_proto).unwrap();
-        [package_map, flag_map, flag_val, pb_file]
+        let package_map = storage_dir.clone() + "/maps/mockup.package.map";
+        let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
+        let flag_val = storage_dir.clone() + "/boot/mockup.val";
+        let flag_info = storage_dir.clone() + "/boot/mockup.info";
+        fs::copy("./tests/package.map", &package_map).unwrap();
+        fs::copy("./tests/flag.map", &flag_map).unwrap();
+        fs::copy("./tests/flag.val", &flag_val).unwrap();
+        fs::copy("./tests/flag.info", &flag_info).unwrap();
+
+        return storage_dir;
     }
 
     #[test]
-    // this test point locks down flag package offset query
-    fn test_package_offset_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+    // this test point locks down flag package read context query
+    fn test_package_context_query() {
+        let storage_dir = create_test_storage_files();
         let package_mapped_file = unsafe {
-            get_mapped_file(&pb_file_path, "mockup", StorageFileType::PackageMap).unwrap()
+            get_mapped_file(&storage_dir, "mockup", StorageFileType::PackageMap).unwrap()
         };
 
-        let package_offset =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_1")
+        let package_context =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_1")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 0, boolean_offset: 0 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 0, boolean_start_index: 0 };
+        assert_eq!(package_context, expected_package_context);
 
-        let package_offset =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_2")
+        let package_context =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_2")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 1, boolean_offset: 3 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 1, boolean_start_index: 3 };
+        assert_eq!(package_context, expected_package_context);
 
-        let package_offset =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_4")
+        let package_context =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_4")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 2, boolean_offset: 6 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 2, boolean_start_index: 6 };
+        assert_eq!(package_context, expected_package_context);
     }
 
     #[test]
-    // this test point locks down flag offset query
-    fn test_flag_offset_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+    // this test point locks down flag read context query
+    fn test_flag_context_query() {
+        let storage_dir = create_test_storage_files();
         let flag_mapped_file =
-            unsafe { get_mapped_file(&pb_file_path, "mockup", StorageFileType::FlagMap).unwrap() };
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagMap).unwrap() };
 
         let baseline = vec![
-            (0, "enabled_ro", 1u16),
-            (0, "enabled_rw", 2u16),
-            (1, "disabled_ro", 0u16),
-            (2, "enabled_ro", 1u16),
-            (1, "enabled_fixed_ro", 1u16),
-            (1, "enabled_ro", 2u16),
-            (2, "enabled_fixed_ro", 0u16),
-            (0, "disabled_rw", 0u16),
+            (0, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 1u16),
+            (0, "enabled_rw", StoredFlagType::ReadWriteBoolean, 2u16),
+            (2, "enabled_rw", StoredFlagType::ReadWriteBoolean, 1u16),
+            (1, "disabled_rw", StoredFlagType::ReadWriteBoolean, 0u16),
+            (1, "enabled_fixed_ro", StoredFlagType::FixedReadOnlyBoolean, 1u16),
+            (1, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 2u16),
+            (2, "enabled_fixed_ro", StoredFlagType::FixedReadOnlyBoolean, 0u16),
+            (0, "disabled_rw", StoredFlagType::ReadWriteBoolean, 0u16),
         ];
-        for (package_id, flag_name, expected_offset) in baseline.into_iter() {
-            let flag_offset =
-                get_flag_offset(&flag_mapped_file, package_id, flag_name).unwrap().unwrap();
-            assert_eq!(flag_offset, expected_offset);
+        for (package_id, flag_name, flag_type, flag_index) in baseline.into_iter() {
+            let flag_context =
+                get_flag_read_context(&flag_mapped_file, package_id, flag_name).unwrap().unwrap();
+            assert_eq!(flag_context.flag_type, flag_type);
+            assert_eq!(flag_context.flag_index, flag_index);
         }
     }
 
     #[test]
-    // this test point locks down flag offset query
+    // this test point locks down flag value query
     fn test_flag_value_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+        let storage_dir = create_test_storage_files();
         let flag_value_file =
-            unsafe { get_mapped_file(&pb_file_path, "mockup", StorageFileType::FlagVal).unwrap() };
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagVal).unwrap() };
         let baseline: Vec<bool> = vec![false, true, true, false, true, true, true, true];
         for (offset, expected_value) in baseline.into_iter().enumerate() {
             let flag_value = get_boolean_flag_value(&flag_value_file, offset as u32).unwrap();
@@ -412,10 +489,27 @@
     }
 
     #[test]
+    // this test point locks donw flag info query
+    fn test_flag_info_query() {
+        let storage_dir = create_test_storage_files();
+        let flag_info_file =
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagInfo).unwrap() };
+        let is_rw: Vec<bool> = vec![true, false, true, true, false, false, false, true];
+        for (offset, expected_value) in is_rw.into_iter().enumerate() {
+            let attribute =
+                get_flag_attribute(&flag_info_file, FlagValueType::Boolean, offset as u32).unwrap();
+            assert_eq!((attribute & FlagInfoBit::IsReadWrite as u8) != 0u8, expected_value);
+            assert!((attribute & FlagInfoBit::HasServerOverride as u8) == 0u8);
+            assert!((attribute & FlagInfoBit::HasLocalOverride as u8) == 0u8);
+        }
+    }
+
+    #[test]
     // this test point locks down flag storage file version number query api
     fn test_storage_version_query() {
         assert_eq!(get_storage_file_version("./tests/package.map").unwrap(), 1);
         assert_eq!(get_storage_file_version("./tests/flag.map").unwrap(), 1);
         assert_eq!(get_storage_file_version("./tests/flag.val").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/flag.info").unwrap(), 1);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
index 51354db..5a16645 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
@@ -14,47 +14,12 @@
  * limitations under the License.
  */
 
-use std::fs::File;
-use std::io::{BufReader, Read};
-
 use anyhow::anyhow;
 use memmap2::Mmap;
+use std::fs::File;
 
-use crate::AconfigStorageError::{
-    self, FileReadFail, MapFileFail, ProtobufParseFail, StorageFileNotFound,
-};
+use crate::AconfigStorageError::{self, FileReadFail, MapFileFail, StorageFileNotFound};
 use crate::StorageFileType;
-use aconfig_storage_file::protos::{
-    storage_record_pb::try_from_binary_proto, ProtoStorageFileInfo, ProtoStorageFiles,
-};
-
-/// Find where storage files are stored for a particular container
-fn find_container_storage_location(
-    location_pb_file: &str,
-    container: &str,
-) -> Result<ProtoStorageFileInfo, AconfigStorageError> {
-    let file = File::open(location_pb_file).map_err(|errmsg| {
-        FileReadFail(anyhow!("Failed to open file {}: {}", location_pb_file, errmsg))
-    })?;
-    let mut reader = BufReader::new(file);
-    let mut bytes = Vec::new();
-    reader.read_to_end(&mut bytes).map_err(|errmsg| {
-        FileReadFail(anyhow!("Failed to read file {}: {}", location_pb_file, errmsg))
-    })?;
-    let storage_locations: ProtoStorageFiles = try_from_binary_proto(&bytes).map_err(|errmsg| {
-        ProtobufParseFail(anyhow!(
-            "Failed to parse storage location pb file {}: {}",
-            location_pb_file,
-            errmsg
-        ))
-    })?;
-    for location_info in storage_locations.files.iter() {
-        if location_info.container() == container {
-            return Ok(location_info.clone());
-        }
-    }
-    Err(StorageFileNotFound(anyhow!("Storage file does not exist for {}", container)))
-}
 
 /// Get the read only memory mapping of a storage file
 ///
@@ -82,125 +47,70 @@
 /// file after being mapped. Ensure no writes can happen to this file while this
 /// mapping stays alive.
 pub unsafe fn get_mapped_file(
-    location_pb_file: &str,
+    storage_dir: &str,
     container: &str,
     file_type: StorageFileType,
 ) -> Result<Mmap, AconfigStorageError> {
-    let files_location = find_container_storage_location(location_pb_file, container)?;
-    match file_type {
-        StorageFileType::PackageMap => unsafe { map_file(files_location.package_map()) },
-        StorageFileType::FlagMap => unsafe { map_file(files_location.flag_map()) },
-        StorageFileType::FlagVal => unsafe { map_file(files_location.flag_val()) },
-        StorageFileType::FlagInfo => {
-            Err(MapFileFail(anyhow!("TODO: add support for flag info file")))
+    let storage_file = match file_type {
+        StorageFileType::PackageMap => {
+            String::from(storage_dir) + "/maps/" + container + ".package.map"
         }
+        StorageFileType::FlagMap => String::from(storage_dir) + "/maps/" + container + ".flag.map",
+        StorageFileType::FlagVal => String::from(storage_dir) + "/boot/" + container + ".val",
+        StorageFileType::FlagInfo => String::from(storage_dir) + "/boot/" + container + ".info",
+    };
+    if std::fs::metadata(&storage_file).is_err() {
+        return Err(StorageFileNotFound(anyhow!("storage file {} does not exist", storage_file)));
     }
+    unsafe { map_file(&storage_file) }
 }
 
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::test_utils::copy_to_temp_file;
-    use aconfig_storage_file::protos::storage_record_pb::write_proto_to_temp_file;
-    use tempfile::NamedTempFile;
+    use rand::Rng;
+    use std::fs;
+    use std::io::Read;
 
-    #[test]
-    fn test_find_storage_file_location() {
-        let text_proto = r#"
-files {
-    version: 0
-    container: "system"
-    package_map: "/system/etc/package.map"
-    flag_map: "/system/etc/flag.map"
-    flag_val: "/metadata/aconfig/system.val"
-    timestamp: 12345
-}
-files {
-    version: 1
-    container: "product"
-    package_map: "/product/etc/package.map"
-    flag_map: "/product/etc/flag.map"
-    flag_val: "/metadata/aconfig/product.val"
-    timestamp: 54321
-}
-"#;
-        let file = write_proto_to_temp_file(&text_proto).unwrap();
-        let file_full_path = file.path().display().to_string();
-        let file_info = find_container_storage_location(&file_full_path, "system").unwrap();
-        assert_eq!(file_info.version(), 0);
-        assert_eq!(file_info.container(), "system");
-        assert_eq!(file_info.package_map(), "/system/etc/package.map");
-        assert_eq!(file_info.flag_map(), "/system/etc/flag.map");
-        assert_eq!(file_info.flag_val(), "/metadata/aconfig/system.val");
-        assert_eq!(file_info.timestamp(), 12345);
-
-        let file_info = find_container_storage_location(&file_full_path, "product").unwrap();
-        assert_eq!(file_info.version(), 1);
-        assert_eq!(file_info.container(), "product");
-        assert_eq!(file_info.package_map(), "/product/etc/package.map");
-        assert_eq!(file_info.flag_map(), "/product/etc/flag.map");
-        assert_eq!(file_info.flag_val(), "/metadata/aconfig/product.val");
-        assert_eq!(file_info.timestamp(), 54321);
-
-        let err = find_container_storage_location(&file_full_path, "vendor").unwrap_err();
-        assert_eq!(
-            format!("{:?}", err),
-            "StorageFileNotFound(Storage file does not exist for vendor)"
-        );
-    }
-
-    fn map_and_verify(location_pb_file: &str, file_type: StorageFileType, actual_file: &str) {
+    fn map_and_verify(storage_dir: &str, file_type: StorageFileType, actual_file: &str) {
         let mut opened_file = File::open(actual_file).unwrap();
         let mut content = Vec::new();
         opened_file.read_to_end(&mut content).unwrap();
-
-        let mmaped_file =
-            unsafe { get_mapped_file(location_pb_file, "system", file_type).unwrap() };
+        let mmaped_file = unsafe { get_mapped_file(storage_dir, "mockup", file_type).unwrap() };
         assert_eq!(mmaped_file[..], content[..]);
     }
 
-    fn create_test_storage_files() -> [NamedTempFile; 4] {
-        let package_map = copy_to_temp_file("./tests/package.map").unwrap();
-        let flag_map = copy_to_temp_file("./tests/flag.map").unwrap();
-        let flag_val = copy_to_temp_file("./tests/package.map").unwrap();
+    fn create_test_storage_files() -> String {
+        let mut rng = rand::thread_rng();
+        let number: u32 = rng.gen();
+        let storage_dir = String::from("/tmp/") + &number.to_string();
+        if std::fs::metadata(&storage_dir).is_ok() {
+            fs::remove_dir_all(&storage_dir).unwrap();
+        }
+        let maps_dir = storage_dir.clone() + "/maps";
+        let boot_dir = storage_dir.clone() + "/boot";
+        fs::create_dir(&storage_dir).unwrap();
+        fs::create_dir(&maps_dir).unwrap();
+        fs::create_dir(&boot_dir).unwrap();
 
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "system"
-    package_map: "{}"
-    flag_map: "{}"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            package_map.path().display(),
-            flag_map.path().display(),
-            flag_val.path().display()
-        );
-        let pb_file = write_proto_to_temp_file(&text_proto).unwrap();
-        [package_map, flag_map, flag_val, pb_file]
+        let package_map = storage_dir.clone() + "/maps/mockup.package.map";
+        let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
+        let flag_val = storage_dir.clone() + "/boot/mockup.val";
+        let flag_info = storage_dir.clone() + "/boot/mockup.info";
+        fs::copy("./tests/package.map", &package_map).unwrap();
+        fs::copy("./tests/flag.map", &flag_map).unwrap();
+        fs::copy("./tests/flag.val", &flag_val).unwrap();
+        fs::copy("./tests/flag.info", &flag_info).unwrap();
+
+        return storage_dir;
     }
 
     #[test]
     fn test_mapped_file_contents() {
-        let [package_map, flag_map, flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
-        map_and_verify(
-            &pb_file_path,
-            StorageFileType::PackageMap,
-            &package_map.path().display().to_string(),
-        );
-        map_and_verify(
-            &pb_file_path,
-            StorageFileType::FlagMap,
-            &flag_map.path().display().to_string(),
-        );
-        map_and_verify(
-            &pb_file_path,
-            StorageFileType::FlagVal,
-            &flag_val.path().display().to_string(),
-        );
+        let storage_dir = create_test_storage_files();
+        map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/package.map");
+        map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/flag.map");
+        map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/flag.val");
+        map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/flag.info");
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
index d83844e..2cb854b 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
@@ -24,16 +24,16 @@
 
 /// Package table query return
 #[derive(PartialEq, Debug)]
-pub struct PackageOffset {
+pub struct PackageReadContext {
     pub package_id: u32,
-    pub boolean_offset: u32,
+    pub boolean_start_index: u32,
 }
 
-/// Query package id and start offset
-pub fn find_package_offset(
+/// Query package read context: package id and start index
+pub fn find_package_read_context(
     buf: &[u8],
     package: &str,
-) -> Result<Option<PackageOffset>, AconfigStorageError> {
+) -> Result<Option<PackageReadContext>, AconfigStorageError> {
     let interpreted_header = PackageTableHeader::from_bytes(buf)?;
     if interpreted_header.version > FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
@@ -57,9 +57,9 @@
     loop {
         let interpreted_node = PackageTableNode::from_bytes(&buf[package_node_offset..])?;
         if interpreted_node.package_name == package {
-            return Ok(Some(PackageOffset {
+            return Ok(Some(PackageReadContext {
                 package_id: interpreted_node.package_id,
-                boolean_offset: interpreted_node.boolean_offset,
+                boolean_start_index: interpreted_node.boolean_start_index,
             }));
         }
         match interpreted_node.next_offset {
@@ -78,24 +78,24 @@
     // this test point locks down table query
     fn test_package_query() {
         let package_table = create_test_package_table().into_bytes();
-        let package_offset =
-            find_package_offset(&package_table[..], "com.android.aconfig.storage.test_1")
+        let package_context =
+            find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 0, boolean_offset: 0 };
-        assert_eq!(package_offset, expected_package_offset);
-        let package_offset =
-            find_package_offset(&package_table[..], "com.android.aconfig.storage.test_2")
+        let expected_package_context = PackageReadContext { package_id: 0, boolean_start_index: 0 };
+        assert_eq!(package_context, expected_package_context);
+        let package_context =
+            find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_2")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 1, boolean_offset: 3 };
-        assert_eq!(package_offset, expected_package_offset);
-        let package_offset =
-            find_package_offset(&package_table[..], "com.android.aconfig.storage.test_4")
+        let expected_package_context = PackageReadContext { package_id: 1, boolean_start_index: 3 };
+        assert_eq!(package_context, expected_package_context);
+        let package_context =
+            find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_4")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 2, boolean_offset: 6 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 2, boolean_start_index: 6 };
+        assert_eq!(package_context, expected_package_context);
     }
 
     #[test]
@@ -103,13 +103,15 @@
     fn test_not_existed_package_query() {
         // this will land at an empty bucket
         let package_table = create_test_package_table().into_bytes();
-        let package_offset =
-            find_package_offset(&package_table[..], "com.android.aconfig.storage.test_3").unwrap();
-        assert_eq!(package_offset, None);
+        let package_context =
+            find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_3")
+                .unwrap();
+        assert_eq!(package_context, None);
         // this will land at the end of a linked list
-        let package_offset =
-            find_package_offset(&package_table[..], "com.android.aconfig.storage.test_5").unwrap();
-        assert_eq!(package_offset, None);
+        let package_context =
+            find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_5")
+                .unwrap();
+        assert_eq!(package_context, None);
     }
 
     #[test]
@@ -118,8 +120,9 @@
         let mut table = create_test_package_table();
         table.header.version = crate::FILE_VERSION + 1;
         let package_table = table.into_bytes();
-        let error = find_package_offset(&package_table[..], "com.android.aconfig.storage.test_1")
-            .unwrap_err();
+        let error =
+            find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1")
+                .unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
new file mode 100644
index 0000000..406ff24
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.channels.FileChannel.MapMode;
+
+import android.aconfig.storage.PackageReadContext;
+import android.aconfig.storage.FlagReadContext;
+
+import dalvik.annotation.optimization.FastNative;
+
+public class AconfigStorageReadAPI {
+
+    // Storage file dir on device
+    private static final String STORAGEDIR = "/metadata/aconfig";
+
+    // Stoarge file type
+    public enum StorageFileType {
+        PACKAGE_MAP,
+        FLAG_MAP,
+        FLAG_VAL,
+        FLAG_INFO
+    }
+
+    // Map a storage file given file path
+    public static MappedByteBuffer mapStorageFile(String file) throws IOException {
+        FileInputStream stream = new FileInputStream(file);
+        FileChannel channel = stream.getChannel();
+        return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size());
+    }
+
+    // Map a storage file given container and file type
+    public static MappedByteBuffer getMappedFile(
+        String container,
+        StorageFileType type) throws IOException{
+        switch (type) {
+            case PACKAGE_MAP:
+                return mapStorageFile(STORAGEDIR + "/maps/" + container + ".package.map");
+            case FLAG_MAP:
+                return mapStorageFile(STORAGEDIR + "/maps/" + container + ".flag.map");
+            case FLAG_VAL:
+                return mapStorageFile(STORAGEDIR + "/boot/" + container + ".val");
+            case FLAG_INFO:
+                return mapStorageFile(STORAGEDIR + "/boot/" + container + ".info");
+            default:
+                throw new IOException("Invalid storage file type");
+        }
+    }
+
+    // JNI interface to get package read context
+    // @param mappedFile: memory mapped package map file
+    // @param packageName: package name
+    // @throws IOException if the passed in file is not a valid package map file
+    @FastNative
+    private static native ByteBuffer getPackageReadContextImpl(
+        ByteBuffer mappedFile, String packageName) throws IOException;
+
+    // API to get package read context
+    // @param mappedFile: memory mapped package map file
+    // @param packageName: package name
+    // @throws IOException if the passed in file is not a valid package map file
+    static public PackageReadContext getPackageReadContext (
+        ByteBuffer mappedFile, String packageName) throws IOException {
+        ByteBuffer buffer = getPackageReadContextImpl(mappedFile, packageName);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        return new PackageReadContext(buffer.getInt(), buffer.getInt(4));
+    }
+
+    // JNI interface to get flag read context
+    // @param mappedFile: memory mapped flag map file
+    // @param packageId: package id to represent a specific package, obtained from
+    // package map file
+    // @param flagName: flag name
+    // @throws IOException if the passed in file is not a valid flag map file
+    @FastNative
+    private static native ByteBuffer getFlagReadContextImpl(
+        ByteBuffer mappedFile, int packageId, String flagName) throws IOException;
+
+    // API to get flag read context
+    // @param mappedFile: memory mapped flag map file
+    // @param packageId: package id to represent a specific package, obtained from
+    // package map file
+    // @param flagName: flag name
+    // @throws IOException if the passed in file is not a valid flag map file
+    public static FlagReadContext getFlagReadContext(
+        ByteBuffer mappedFile, int packageId, String flagName) throws IOException {
+        ByteBuffer buffer = getFlagReadContextImpl(mappedFile, packageId, flagName);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        return new FlagReadContext(buffer.getInt(), buffer.getInt(4));
+    }
+
+    // JNI interface to get boolean flag value
+    // @param mappedFile: memory mapped flag value file
+    // @param flagIndex: flag global index in the flag value array
+    // @throws IOException if the passed in file is not a valid flag value file or the
+    // flag index went over the file boundary.
+    @FastNative
+    public static native boolean getBooleanFlagValue(
+        ByteBuffer mappedFile, int flagIndex) throws IOException;
+
+    static {
+        System.loadLibrary("aconfig_storage_read_api_rust_jni");
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java
new file mode 100644
index 0000000..60559a9
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java
@@ -0,0 +1,47 @@
+package android.aconfig.storage;
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class FlagReadContext {
+    public StoredFlagType mFlagType;
+    public int mFlagIndex;
+
+    public FlagReadContext(int flagType,
+            int flagIndex) {
+        mFlagType = StoredFlagType.fromInteger(flagType);
+        mFlagIndex = flagIndex;
+    }
+
+    // Flag type enum, consistent with the definition in aconfig_storage_file/src/lib.rs
+    public enum StoredFlagType {
+        ReadWriteBoolean,
+        ReadOnlyBoolean,
+        FixedReadOnlyBoolean;
+
+        public static StoredFlagType fromInteger(int x) {
+            switch(x) {
+                case 0:
+                    return ReadWriteBoolean;
+                case 1:
+                    return ReadOnlyBoolean;
+                case 2:
+                    return FixedReadOnlyBoolean;
+                default:
+                    return null;
+            }
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/src/test_utils.rs b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java
similarity index 60%
rename from tools/aconfig/aconfig_storage_read_api/src/test_utils.rs
rename to tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java
index 84f31aa..b781d9b 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/test_utils.rs
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java
@@ -1,5 +1,6 @@
+package android.aconfig.storage;
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,13 +15,13 @@
  * limitations under the License.
  */
 
-use anyhow::Result;
-use std::fs;
-use tempfile::NamedTempFile;
+public class PackageReadContext {
+    public int mPackageId;
+    public int mBooleanStartIndex;
 
-/// Create temp file copy
-pub(crate) fn copy_to_temp_file(source_file: &str) -> Result<NamedTempFile> {
-    let file = NamedTempFile::new()?;
-    fs::copy(source_file, file.path())?;
-    Ok(file)
+    public PackageReadContext(int packageId,
+                              int booleanStartIndex) {
+        mPackageId = packageId;
+        mBooleanStartIndex = booleanStartIndex;
+    }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
new file mode 100644
index 0000000..304a059
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
@@ -0,0 +1,160 @@
+//! aconfig storage read api java rust interlop
+
+use aconfig_storage_read_api::flag_table_query::find_flag_read_context;
+use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
+use aconfig_storage_read_api::package_table_query::find_package_read_context;
+use aconfig_storage_read_api::{FlagReadContext, PackageReadContext};
+
+use anyhow::Result;
+use jni::objects::{JByteBuffer, JClass, JString};
+use jni::sys::{jboolean, jint};
+use jni::JNIEnv;
+
+/// Call rust find package read context
+fn get_package_read_context_java(
+    env: &mut JNIEnv,
+    file: JByteBuffer,
+    package: JString,
+) -> Result<Option<PackageReadContext>> {
+    // SAFETY:
+    // The safety here is ensured as the package name is guaranteed to be a java string
+    let package_name: String = unsafe { env.get_string_unchecked(&package)?.into() };
+    let buffer_ptr = env.get_direct_buffer_address(&file)?;
+    let buffer_size = env.get_direct_buffer_capacity(&file)?;
+    // SAFETY:
+    // The safety here is ensured as only non null MemoryMappedBuffer will be passed in,
+    // so the conversion to slice is guaranteed to be valid
+    let buffer = unsafe { std::slice::from_raw_parts(buffer_ptr, buffer_size) };
+    Ok(find_package_read_context(buffer, &package_name)?)
+}
+
+/// Get package read context JNI
+#[no_mangle]
+#[allow(unused)]
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getPackageReadContextImpl<
+    'local,
+>(
+    mut env: JNIEnv<'local>,
+    class: JClass<'local>,
+    file: JByteBuffer<'local>,
+    package: JString<'local>,
+) -> JByteBuffer<'local> {
+    let mut package_id = -1;
+    let mut boolean_start_index = -1;
+
+    match get_package_read_context_java(&mut env, file, package) {
+        Ok(context_opt) => {
+            if let Some(context) = context_opt {
+                package_id = context.package_id as i32;
+                boolean_start_index = context.boolean_start_index as i32;
+            }
+        }
+        Err(errmsg) => {
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+        }
+    }
+
+    let mut bytes = Vec::new();
+    bytes.extend_from_slice(&package_id.to_le_bytes());
+    bytes.extend_from_slice(&boolean_start_index.to_le_bytes());
+    let (addr, len) = {
+        let buf = bytes.leak();
+        (buf.as_mut_ptr(), buf.len())
+    };
+    // SAFETY:
+    // The safety here is ensured as the content is ensured to be valid
+    unsafe { env.new_direct_byte_buffer(addr, len).expect("failed to create byte buffer") }
+}
+
+/// Call rust find flag read context
+fn get_flag_read_context_java(
+    env: &mut JNIEnv,
+    file: JByteBuffer,
+    package_id: jint,
+    flag: JString,
+) -> Result<Option<FlagReadContext>> {
+    // SAFETY:
+    // The safety here is ensured as the flag name is guaranteed to be a java string
+    let flag_name: String = unsafe { env.get_string_unchecked(&flag)?.into() };
+    let buffer_ptr = env.get_direct_buffer_address(&file)?;
+    let buffer_size = env.get_direct_buffer_capacity(&file)?;
+    // SAFETY:
+    // The safety here is ensured as only non null MemoryMappedBuffer will be passed in,
+    // so the conversion to slice is guaranteed to be valid
+    let buffer = unsafe { std::slice::from_raw_parts(buffer_ptr, buffer_size) };
+    Ok(find_flag_read_context(buffer, package_id as u32, &flag_name)?)
+}
+
+/// Get flag read context JNI
+#[no_mangle]
+#[allow(unused)]
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getFlagReadContextImpl<
+    'local,
+>(
+    mut env: JNIEnv<'local>,
+    class: JClass<'local>,
+    file: JByteBuffer<'local>,
+    package_id: jint,
+    flag: JString<'local>,
+) -> JByteBuffer<'local> {
+    let mut flag_type = -1;
+    let mut flag_index = -1;
+
+    match get_flag_read_context_java(&mut env, file, package_id, flag) {
+        Ok(context_opt) => {
+            if let Some(context) = context_opt {
+                flag_type = context.flag_type as i32;
+                flag_index = context.flag_index as i32;
+            }
+        }
+        Err(errmsg) => {
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+        }
+    }
+
+    let mut bytes = Vec::new();
+    bytes.extend_from_slice(&flag_type.to_le_bytes());
+    bytes.extend_from_slice(&flag_index.to_le_bytes());
+    let (addr, len) = {
+        let buf = bytes.leak();
+        (buf.as_mut_ptr(), buf.len())
+    };
+    // SAFETY:
+    // The safety here is ensured as the content is ensured to be valid
+    unsafe { env.new_direct_byte_buffer(addr, len).expect("failed to create byte buffer") }
+}
+
+/// Call rust find boolean flag value
+fn get_boolean_flag_value_java(
+    env: &mut JNIEnv,
+    file: JByteBuffer,
+    flag_index: jint,
+) -> Result<bool> {
+    let buffer_ptr = env.get_direct_buffer_address(&file)?;
+    let buffer_size = env.get_direct_buffer_capacity(&file)?;
+    // SAFETY:
+    // The safety here is ensured as only non null MemoryMappedBuffer will be passed in,
+    // so the conversion to slice is guaranteed to be valid
+    let buffer = unsafe { std::slice::from_raw_parts(buffer_ptr, buffer_size) };
+    Ok(find_boolean_flag_value(buffer, flag_index as u32)?)
+}
+
+/// Get flag value JNI
+#[no_mangle]
+#[allow(unused)]
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getBooleanFlagValue<
+    'local,
+>(
+    mut env: JNIEnv<'local>,
+    class: JClass<'local>,
+    file: JByteBuffer<'local>,
+    flag_index: jint,
+) -> jboolean {
+    match get_boolean_flag_value_java(&mut env, file, flag_index) {
+        Ok(value) => value as u8,
+        Err(errmsg) => {
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+            0u8
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
index d9cf238..ed0c728 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
@@ -1,19 +1,25 @@
+filegroup {
+    name: "read_api_test_storage_files",
+    srcs: ["package.map",
+        "flag.map",
+        "flag.val",
+        "flag.info"
+    ],
+}
+
 rust_test {
     name: "aconfig_storage_read_api.test.rust",
     srcs: [
-        "storage_read_api_test.rs"
+        "storage_read_api_test.rs",
     ],
     rustlibs: [
         "libanyhow",
         "libaconfig_storage_file",
         "libaconfig_storage_read_api",
-        "libprotobuf",
-        "libtempfile",
+        "librand",
     ],
     data: [
-        "package.map",
-        "flag.map",
-        "flag.val",
+        ":read_api_test_storage_files",
     ],
     test_suites: ["general-tests"],
 }
@@ -25,16 +31,12 @@
     ],
     static_libs: [
         "libgmock",
-        "libaconfig_storage_protos_cc",
-        "libprotobuf-cpp-lite",
         "libaconfig_storage_read_api_cc",
         "libbase",
         "liblog",
     ],
     data: [
-        "package.map",
-        "flag.map",
-        "flag.val",
+        ":read_api_test_storage_files",
     ],
     test_suites: [
         "device-tests",
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.info b/tools/aconfig/aconfig_storage_read_api/tests/flag.info
new file mode 100644
index 0000000..6223edf
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/flag.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.map b/tools/aconfig/aconfig_storage_read_api/tests/flag.map
index d26e00f..e868f53 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/flag.map
+++ b/tools/aconfig/aconfig_storage_read_api/tests/flag.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
new file mode 100644
index 0000000..a26b257
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import java.io.IOException;
+import java.nio.MappedByteBuffer;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import android.aconfig.storage.AconfigStorageReadAPI;
+import android.aconfig.storage.PackageReadContext;
+import android.aconfig.storage.FlagReadContext;
+import android.aconfig.storage.FlagReadContext.StoredFlagType;
+
+@RunWith(JUnit4.class)
+public class AconfigStorageReadAPITest{
+
+    private String mStorageDir = "/data/local/tmp/aconfig_java_api_test";
+
+    @Test
+    public void testPackageContextQuery() {
+        MappedByteBuffer packageMap = null;
+        try {
+            packageMap = AconfigStorageReadAPI.mapStorageFile(
+                mStorageDir + "/maps/mockup.package.map");
+        } catch(IOException ex){
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(packageMap != null);
+
+        try {
+            PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
+                packageMap, "com.android.aconfig.storage.test_1");
+            assertEquals(context.mPackageId, 0);
+            assertEquals(context.mBooleanStartIndex, 0);
+
+            context = AconfigStorageReadAPI.getPackageReadContext(
+                packageMap, "com.android.aconfig.storage.test_2");
+            assertEquals(context.mPackageId, 1);
+            assertEquals(context.mBooleanStartIndex, 3);
+
+            context = AconfigStorageReadAPI.getPackageReadContext(
+                packageMap, "com.android.aconfig.storage.test_4");
+            assertEquals(context.mPackageId, 2);
+            assertEquals(context.mBooleanStartIndex, 6);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testNonExistPackageContextQuery() {
+        MappedByteBuffer packageMap = null;
+        try {
+            packageMap = AconfigStorageReadAPI.mapStorageFile(
+                mStorageDir + "/maps/mockup.package.map");
+        } catch(IOException ex){
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(packageMap != null);
+
+        try {
+            PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
+                packageMap, "unknown");
+            assertEquals(context.mPackageId, -1);
+            assertEquals(context.mBooleanStartIndex, -1);
+        } catch(IOException ex){
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testFlagContextQuery() {
+        MappedByteBuffer flagMap = null;
+        try {
+            flagMap = AconfigStorageReadAPI.mapStorageFile(
+                mStorageDir + "/maps/mockup.flag.map");
+        } catch(IOException ex){
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagMap!= null);
+
+        class Baseline {
+            public int mPackageId;
+            public String mFlagName;
+            public StoredFlagType mFlagType;
+            public int mFlagIndex;
+
+            public Baseline(int packageId,
+                    String flagName,
+                    StoredFlagType flagType,
+                    int flagIndex) {
+                mPackageId = packageId;
+                mFlagName = flagName;
+                mFlagType = flagType;
+                mFlagIndex = flagIndex;
+            }
+        }
+
+        List<Baseline> baselines = new ArrayList();
+        baselines.add(new Baseline(0, "enabled_ro", StoredFlagType.ReadOnlyBoolean, 1));
+        baselines.add(new Baseline(0, "enabled_rw", StoredFlagType.ReadWriteBoolean, 2));
+        baselines.add(new Baseline(2, "enabled_rw", StoredFlagType.ReadWriteBoolean, 1));
+        baselines.add(new Baseline(1, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
+        baselines.add(new Baseline(1, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 1));
+        baselines.add(new Baseline(1, "enabled_ro", StoredFlagType.ReadOnlyBoolean, 2));
+        baselines.add(new Baseline(2, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 0));
+        baselines.add(new Baseline(0, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
+
+        try {
+            for (Baseline baseline : baselines) {
+                FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
+                    flagMap, baseline.mPackageId,  baseline.mFlagName);
+                assertEquals(context.mFlagType, baseline.mFlagType);
+                assertEquals(context.mFlagIndex, baseline.mFlagIndex);
+            }
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testNonExistFlagContextQuery() {
+        MappedByteBuffer flagMap = null;
+        try {
+            flagMap = AconfigStorageReadAPI.mapStorageFile(
+                mStorageDir + "/maps/mockup.flag.map");
+        } catch(IOException ex){
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagMap!= null);
+
+        try {
+            FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
+                flagMap, 0,  "unknown");
+            assertEquals(context.mFlagType, null);
+            assertEquals(context.mFlagIndex, -1);
+
+            context = AconfigStorageReadAPI.getFlagReadContext(
+                flagMap, 3,  "enabled_ro");
+            assertEquals(context.mFlagType, null);
+            assertEquals(context.mFlagIndex, -1);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testBooleanFlagValueQuery() {
+        MappedByteBuffer flagVal = null;
+        try {
+            flagVal = AconfigStorageReadAPI.mapStorageFile(
+                mStorageDir + "/boot/mockup.val");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagVal!= null);
+
+        boolean[] baselines = {false, true, true, false, true, true, true, true};
+        for (int i = 0; i < 8; ++i) {
+            try {
+                Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, i);
+                assertEquals(value, baselines[i]);
+            } catch (IOException ex) {
+                assertTrue(ex.toString(), false);
+            }
+        }
+    }
+
+    @Test
+    public void testInvalidBooleanFlagValueQuery() {
+        MappedByteBuffer flagVal = null;
+        try {
+            flagVal = AconfigStorageReadAPI.mapStorageFile(
+                mStorageDir + "/boot/mockup.val");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagVal!= null);
+
+        try {
+            Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9);
+            assertTrue("should throw", false);
+        } catch (IOException ex) {
+            String expectedErrmsg = "invalid storage file byte offset";
+            assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg));
+        }
+    }
+ }
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
new file mode 100644
index 0000000..d94b2b4
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
@@ -0,0 +1,21 @@
+android_test {
+    name: "aconfig_storage_read_api.test.java",
+    srcs: ["AconfigStorageReadAPITest.java"],
+    static_libs: [
+        "androidx.test.rules",
+        "libaconfig_storage_read_api_java",
+        "junit",
+    ],
+    jni_libs: [
+        "libaconfig_storage_read_api_rust_jni",
+    ],
+    data: [
+        ":read_api_test_storage_files",
+    ],
+    platform_apis: true,
+    certificate: "platform",
+    test_suites: [
+        "general-tests",
+    ],
+    team: "trendy_team_android_core_experiments",
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml b/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml
new file mode 100644
index 0000000..78bfb37
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  ~ Copyright (C) 2024 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="android.aconfig_storage.test">
+    <application>
+        <uses-library android:name="android.test.runner" />
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+                     android:targetPackage="android.aconfig_storage.test" />
+
+</manifest>
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml b/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml
new file mode 100644
index 0000000..99c9e25
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml
@@ -0,0 +1,51 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<configuration description="Config for aconfig storage read java api tests">
+    <!-- Need root to start virtualizationservice -->
+    <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer"/>
+
+    <!-- Prepare test directories. -->
+    <target_preparer class="com.android.tradefed.targetprep.RunCommandTargetPreparer">
+        <option name="throw-if-cmd-fail" value="true" />
+        <option name="run-command" value="mkdir -p /data/local/tmp/aconfig_java_api" />
+        <option name="teardown-command" value="rm -rf /data/local/tmp/aconfig_java_api" />
+    </target_preparer>
+
+    <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+        <option name="test-file-name" value="aconfig_storage_read_api.test.java.apk" />
+    </target_preparer>
+
+    <target_preparer class="com.android.tradefed.targetprep.DisableSELinuxTargetPreparer" />
+
+    <!-- Test data files -->
+    <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
+        <option name="cleanup" value="true" />
+        <option name="abort-on-push-failure" value="true" />
+        <option name="push-file" key="package.map"
+                value="/data/local/tmp/aconfig_java_api_test/maps/mockup.package.map" />
+        <option name="push-file" key="flag.map"
+                value="/data/local/tmp/aconfig_java_api_test/maps/mockup.flag.map" />
+        <option name="push-file" key="flag.val"
+                value="/data/local/tmp/aconfig_java_api_test/boot/mockup.val" />
+    </target_preparer>
+
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
+        <option name="package" value="android.aconfig_storage.test" />
+        <option name="runtime-hint" value="1m" />
+    </test>
+</configuration>
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
index 539474b..6d29045 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
@@ -16,16 +16,15 @@
 
 #include <string>
 #include <vector>
+#include <memory>
 #include <cstdio>
 
 #include <sys/stat.h>
 #include "aconfig_storage/aconfig_storage_read_api.hpp"
 #include <gtest/gtest.h>
-#include <protos/aconfig_storage_metadata.pb.h>
 #include <android-base/file.h>
 #include <android-base/result.h>
 
-using android::aconfig_storage_metadata::storage_files;
 using namespace android::base;
 
 namespace api = aconfig_storage;
@@ -33,59 +32,48 @@
 
 class AconfigStorageTest : public ::testing::Test {
  protected:
-  Result<std::string> copy_to_temp_file(std::string const& source_file) {
-    auto temp_file = std::string(std::tmpnam(nullptr));
+  Result<void> copy_file(std::string const& src_file,
+                         std::string const& dst_file) {
     auto content = std::string();
-    if (!ReadFileToString(source_file, &content)) {
-      return Error() << "failed to read file: " << source_file;
+    if (!ReadFileToString(src_file, &content)) {
+      return Error() << "failed to read file: " << src_file;
     }
-    if (!WriteStringToFile(content, temp_file)) {
-      return Error() << "failed to copy file: " << source_file;
+    if (!WriteStringToFile(content, dst_file)) {
+      return Error() << "failed to copy file: " << dst_file;
     }
-    return temp_file;
-  }
-
-  Result<std::string> write_storage_location_pb_file(std::string const& package_map,
-                                                     std::string const& flag_map,
-                                                     std::string const& flag_val) {
-    auto temp_file = std::tmpnam(nullptr);
-    auto proto = storage_files();
-    auto* info = proto.add_files();
-    info->set_version(0);
-    info->set_container("mockup");
-    info->set_package_map(package_map);
-    info->set_flag_map(flag_map);
-    info->set_flag_val(flag_val);
-    info->set_timestamp(12345);
-
-    auto content = std::string();
-    proto.SerializeToString(&content);
-    if (!WriteStringToFile(content, temp_file)) {
-      return Error() << "failed to write storage records pb file";
-    }
-    return temp_file;
+    return {};
   }
 
   void SetUp() override {
     auto const test_dir = android::base::GetExecutableDirectory();
-    package_map = *copy_to_temp_file(test_dir + "/package.map");
-    flag_map = *copy_to_temp_file(test_dir + "/flag.map");
-    flag_val = *copy_to_temp_file(test_dir + "/flag.val");
-    storage_record_pb = *write_storage_location_pb_file(
-        package_map, flag_map, flag_val);
+    storage_dir = std::string(root_dir.path);
+    auto maps_dir = storage_dir + "/maps";
+    auto boot_dir = storage_dir + "/boot";
+    mkdir(maps_dir.c_str(), 0775);
+    mkdir(boot_dir.c_str(), 0775);
+    package_map = std::string(maps_dir) + "/mockup.package.map";
+    flag_map = std::string(maps_dir) + "/mockup.flag.map";
+    flag_val = std::string(boot_dir) + "/mockup.val";
+    flag_info = std::string(boot_dir) + "/mockup.info";
+    copy_file(test_dir + "/package.map", package_map);
+    copy_file(test_dir + "/flag.map", flag_map);
+    copy_file(test_dir + "/flag.val", flag_val);
+    copy_file(test_dir + "/flag.info", flag_info);
   }
 
   void TearDown() override {
     std::remove(package_map.c_str());
     std::remove(flag_map.c_str());
     std::remove(flag_val.c_str());
-    std::remove(storage_record_pb.c_str());
+    std::remove(flag_info.c_str());
   }
 
+  TemporaryDir root_dir;
+  std::string storage_dir;
   std::string package_map;
   std::string flag_map;
   std::string flag_val;
-  std::string storage_record_pb;
+  std::string flag_info;
 };
 
 /// Test to lock down storage file version query api
@@ -99,119 +87,162 @@
   version = api::get_storage_file_version(flag_val);
   ASSERT_TRUE(version.ok());
   ASSERT_EQ(*version, 1);
+  version = api::get_storage_file_version(flag_info);
+  ASSERT_TRUE(version.ok());
+  ASSERT_EQ(*version, 1);
 }
 
 /// Negative test to lock down the error when mapping none exist storage files
 TEST_F(AconfigStorageTest, test_none_exist_storage_file_mapping) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "vendor", api::StorageFileType::package_map);
-  ASSERT_FALSE(mapped_file.ok());
-  ASSERT_EQ(mapped_file.error().message(),
-            "Unable to find storage files for container vendor");
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "vendor", api::StorageFileType::package_map);
+  ASSERT_FALSE(mapped_file_result.ok());
+  ASSERT_EQ(mapped_file_result.error(),
+            std::string("failed to open ") + storage_dir
+            + "/maps/vendor.package.map: No such file or directory");
 }
 
-/// Test to lock down storage package offset query api
-TEST_F(AconfigStorageTest, test_package_offset_query) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "mockup", api::StorageFileType::package_map);
-  ASSERT_TRUE(mapped_file.ok());
+/// Test to lock down storage package context query api
+TEST_F(AconfigStorageTest, test_package_context_query) {
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::package_map);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
 
-  auto offset = api::get_package_offset(
+  auto context = api::get_package_read_context(
       *mapped_file, "com.android.aconfig.storage.test_1");
-  ASSERT_TRUE(offset.ok());
-  ASSERT_TRUE(offset->package_exists);
-  ASSERT_EQ(offset->package_id, 0);
-  ASSERT_EQ(offset->boolean_offset, 0);
+  ASSERT_TRUE(context.ok());
+  ASSERT_TRUE(context->package_exists);
+  ASSERT_EQ(context->package_id, 0);
+  ASSERT_EQ(context->boolean_start_index, 0);
 
-  offset = api::get_package_offset(
+  context = api::get_package_read_context(
       *mapped_file, "com.android.aconfig.storage.test_2");
-  ASSERT_TRUE(offset.ok());
-  ASSERT_TRUE(offset->package_exists);
-  ASSERT_EQ(offset->package_id, 1);
-  ASSERT_EQ(offset->boolean_offset, 3);
+  ASSERT_TRUE(context.ok());
+  ASSERT_TRUE(context->package_exists);
+  ASSERT_EQ(context->package_id, 1);
+  ASSERT_EQ(context->boolean_start_index, 3);
 
-  offset = api::get_package_offset(
+  context = api::get_package_read_context(
       *mapped_file, "com.android.aconfig.storage.test_4");
-  ASSERT_TRUE(offset.ok());
-  ASSERT_TRUE(offset->package_exists);
-  ASSERT_EQ(offset->package_id, 2);
-  ASSERT_EQ(offset->boolean_offset, 6);
+  ASSERT_TRUE(context.ok());
+  ASSERT_TRUE(context->package_exists);
+  ASSERT_EQ(context->package_id, 2);
+  ASSERT_EQ(context->boolean_start_index, 6);
 }
 
 /// Test to lock down when querying none exist package
-TEST_F(AconfigStorageTest, test_none_existent_package_offset_query) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "mockup", api::StorageFileType::package_map);
-  ASSERT_TRUE(mapped_file.ok());
+TEST_F(AconfigStorageTest, test_none_existent_package_context_query) {
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::package_map);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
 
-  auto offset = api::get_package_offset(
+  auto context = api::get_package_read_context(
       *mapped_file, "com.android.aconfig.storage.test_3");
-  ASSERT_TRUE(offset.ok());
-  ASSERT_FALSE(offset->package_exists);
+  ASSERT_TRUE(context.ok());
+  ASSERT_FALSE(context->package_exists);
 }
 
-/// Test to lock down storage flag offset query api
-TEST_F(AconfigStorageTest, test_flag_offset_query) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "mockup", api::StorageFileType::flag_map);
-  ASSERT_TRUE(mapped_file.ok());
+/// Test to lock down storage flag context query api
+TEST_F(AconfigStorageTest, test_flag_context_query) {
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::flag_map);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
 
-  auto baseline = std::vector<std::tuple<int, std::string, int>>{
-    {0, "enabled_ro", 1},
-    {0, "enabled_rw", 2},
-    {1, "disabled_ro", 0},
-    {2, "enabled_ro", 1},
-    {1, "enabled_fixed_ro", 1},
-    {1, "enabled_ro", 2},
-    {2, "enabled_fixed_ro", 0},
-    {0, "disabled_rw", 0},
+  auto baseline = std::vector<std::tuple<int, std::string, api::StoredFlagType, int>>{
+    {0, "enabled_ro", api::StoredFlagType::ReadOnlyBoolean, 1},
+    {0, "enabled_rw", api::StoredFlagType::ReadWriteBoolean, 2},
+    {2, "enabled_rw", api::StoredFlagType::ReadWriteBoolean, 1},
+    {1, "disabled_rw", api::StoredFlagType::ReadWriteBoolean, 0},
+    {1, "enabled_fixed_ro", api::StoredFlagType::FixedReadOnlyBoolean, 1},
+    {1, "enabled_ro", api::StoredFlagType::ReadOnlyBoolean, 2},
+    {2, "enabled_fixed_ro", api::StoredFlagType::FixedReadOnlyBoolean, 0},
+    {0, "disabled_rw", api::StoredFlagType::ReadWriteBoolean, 0},
   };
-  for (auto const&[package_id, flag_name, expected_offset] : baseline) {
-    auto offset = api::get_flag_offset(*mapped_file, package_id, flag_name);
-    ASSERT_TRUE(offset.ok());
-    ASSERT_TRUE(offset->flag_exists);
-    ASSERT_EQ(offset->flag_offset, expected_offset);
+  for (auto const&[package_id, flag_name, flag_type, flag_index] : baseline) {
+    auto context = api::get_flag_read_context(*mapped_file, package_id, flag_name);
+    ASSERT_TRUE(context.ok());
+    ASSERT_TRUE(context->flag_exists);
+    ASSERT_EQ(context->flag_type, flag_type);
+    ASSERT_EQ(context->flag_index, flag_index);
   }
 }
 
 /// Test to lock down when querying none exist flag
-TEST_F(AconfigStorageTest, test_none_existent_flag_offset_query) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "mockup", api::StorageFileType::flag_map);
-  ASSERT_TRUE(mapped_file.ok());
+TEST_F(AconfigStorageTest, test_none_existent_flag_context_query) {
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::flag_map);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
 
-  auto offset = api::get_flag_offset(*mapped_file, 0, "none_exist");
-  ASSERT_TRUE(offset.ok());
-  ASSERT_FALSE(offset->flag_exists);
+  auto context = api::get_flag_read_context(*mapped_file, 0, "none_exist");
+  ASSERT_TRUE(context.ok());
+  ASSERT_FALSE(context->flag_exists);
 
-  offset = api::get_flag_offset(*mapped_file, 3, "enabled_ro");
-  ASSERT_TRUE(offset.ok());
-  ASSERT_FALSE(offset->flag_exists);
+  context = api::get_flag_read_context(*mapped_file, 3, "enabled_ro");
+  ASSERT_TRUE(context.ok());
+  ASSERT_FALSE(context->flag_exists);
 }
 
 /// Test to lock down storage flag value query api
 TEST_F(AconfigStorageTest, test_boolean_flag_value_query) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "mockup", api::StorageFileType::flag_val);
-  ASSERT_TRUE(mapped_file.ok());
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::flag_val);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
 
   auto expected_value = std::vector<bool>{
     false, true, true, false, true, true, true, true};
-  for (int offset = 0; offset < 8; ++offset) {
-    auto value = api::get_boolean_flag_value(*mapped_file, offset);
+  for (int index = 0; index < 8; ++index) {
+    auto value = api::get_boolean_flag_value(*mapped_file, index);
     ASSERT_TRUE(value.ok());
-    ASSERT_EQ(*value, expected_value[offset]);
+    ASSERT_EQ(*value, expected_value[index]);
   }
 }
 
 /// Negative test to lock down the error when querying flag value out of range
 TEST_F(AconfigStorageTest, test_invalid_boolean_flag_value_query) {
-  auto mapped_file = private_api::get_mapped_file_impl(
-      storage_record_pb, "mockup", api::StorageFileType::flag_val);
-  ASSERT_TRUE(mapped_file.ok());
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::flag_val);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
 
   auto value = api::get_boolean_flag_value(*mapped_file, 8);
   ASSERT_FALSE(value.ok());
-  ASSERT_EQ(value.error().message(),
+  ASSERT_EQ(value.error(),
             std::string("InvalidStorageFileOffset(Flag value offset goes beyond the end of the file.)"));
 }
+
+/// Test to lock down storage flag info query api
+TEST_F(AconfigStorageTest, test_boolean_flag_info_query) {
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::flag_info);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
+
+  auto expected_value = std::vector<bool>{
+    true, false, true, true, false, false, false, true};
+  for (int index = 0; index < 8; ++index) {
+    auto attribute = api::get_flag_attribute(*mapped_file, api::FlagValueType::Boolean, index);
+    ASSERT_TRUE(attribute.ok());
+    ASSERT_EQ(*attribute & static_cast<uint8_t>(api::FlagInfoBit::HasServerOverride), 0);
+    ASSERT_EQ((*attribute & static_cast<uint8_t>(api::FlagInfoBit::IsReadWrite)) != 0,
+              expected_value[index]);
+    ASSERT_EQ(*attribute & static_cast<uint8_t>(api::FlagInfoBit::HasLocalOverride), 0);
+  }
+}
+
+/// Negative test to lock down the error when querying flag info out of range
+TEST_F(AconfigStorageTest, test_invalid_boolean_flag_info_query) {
+  auto mapped_file_result = private_api::get_mapped_file_impl(
+      storage_dir, "mockup", api::StorageFileType::flag_info);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MappedStorageFile>(*mapped_file_result);
+
+  auto attribute = api::get_flag_attribute(*mapped_file, api::FlagValueType::Boolean, 8);
+  ASSERT_FALSE(attribute.ok());
+  ASSERT_EQ(attribute.error(),
+            std::string("InvalidStorageFileOffset(Flag info offset goes beyond the end of the file.)"));
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
index 7687d0f..afc44d4 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
+++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
@@ -1,155 +1,151 @@
 #[cfg(not(feature = "cargo"))]
 mod aconfig_storage_rust_test {
-    use aconfig_storage_file::protos::storage_record_pb::write_proto_to_temp_file;
-    use aconfig_storage_file::StorageFileType;
+    use aconfig_storage_file::{FlagInfoBit, FlagValueType, StorageFileType, StoredFlagType};
     use aconfig_storage_read_api::{
-        get_boolean_flag_value, get_flag_offset, get_package_offset, get_storage_file_version,
-        mapped_file::get_mapped_file, PackageOffset,
+        get_boolean_flag_value, get_flag_attribute, get_flag_read_context,
+        get_package_read_context, get_storage_file_version, mapped_file::get_mapped_file,
+        PackageReadContext,
     };
+    use rand::Rng;
     use std::fs;
-    use tempfile::NamedTempFile;
 
-    pub fn copy_to_temp_file(source_file: &str) -> NamedTempFile {
-        let file = NamedTempFile::new().unwrap();
-        fs::copy(source_file, file.path()).unwrap();
-        file
-    }
+    fn create_test_storage_files() -> String {
+        let mut rng = rand::thread_rng();
+        let number: u32 = rng.gen();
+        let storage_dir = String::from("/tmp/") + &number.to_string();
+        if std::fs::metadata(&storage_dir).is_ok() {
+            fs::remove_dir_all(&storage_dir).unwrap();
+        }
+        let maps_dir = storage_dir.clone() + "/maps";
+        let boot_dir = storage_dir.clone() + "/boot";
+        fs::create_dir(&storage_dir).unwrap();
+        fs::create_dir(maps_dir).unwrap();
+        fs::create_dir(boot_dir).unwrap();
 
-    fn create_test_storage_files() -> [NamedTempFile; 4] {
-        let package_map = copy_to_temp_file("./package.map");
-        let flag_map = copy_to_temp_file("./flag.map");
-        let flag_val = copy_to_temp_file("./flag.val");
+        let package_map = storage_dir.clone() + "/maps/mockup.package.map";
+        let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
+        let flag_val = storage_dir.clone() + "/boot/mockup.val";
+        let flag_info = storage_dir.clone() + "/boot/mockup.info";
+        fs::copy("./package.map", package_map).unwrap();
+        fs::copy("./flag.map", flag_map).unwrap();
+        fs::copy("./flag.val", flag_val).unwrap();
+        fs::copy("./flag.info", flag_info).unwrap();
 
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "mockup"
-    package_map: "{}"
-    flag_map: "{}"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            package_map.path().display(),
-            flag_map.path().display(),
-            flag_val.path().display()
-        );
-        let pb_file = write_proto_to_temp_file(&text_proto).unwrap();
-        [package_map, flag_map, flag_val, pb_file]
+        storage_dir
     }
 
     #[test]
     fn test_unavailable_stoarge() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let err = unsafe {
-            get_mapped_file(&pb_file_path, "vendor", StorageFileType::PackageMap).unwrap_err()
+            get_mapped_file(&storage_dir, "vendor", StorageFileType::PackageMap).unwrap_err()
         };
         assert_eq!(
             format!("{:?}", err),
-            "StorageFileNotFound(Storage file does not exist for vendor)"
+            format!(
+                "StorageFileNotFound(storage file {}/maps/vendor.package.map does not exist)",
+                storage_dir
+            )
         );
     }
 
     #[test]
-    fn test_package_offset_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+    fn test_package_context_query() {
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let package_mapped_file = unsafe {
-            get_mapped_file(&pb_file_path, "mockup", StorageFileType::PackageMap).unwrap()
+            get_mapped_file(&storage_dir, "mockup", StorageFileType::PackageMap).unwrap()
         };
 
-        let package_offset =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_1")
+        let package_context =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_1")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 0, boolean_offset: 0 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 0, boolean_start_index: 0 };
+        assert_eq!(package_context, expected_package_context);
 
-        let package_offset =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_2")
+        let package_context =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_2")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 1, boolean_offset: 3 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 1, boolean_start_index: 3 };
+        assert_eq!(package_context, expected_package_context);
 
-        let package_offset =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_4")
+        let package_context =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_4")
                 .unwrap()
                 .unwrap();
-        let expected_package_offset = PackageOffset { package_id: 2, boolean_offset: 6 };
-        assert_eq!(package_offset, expected_package_offset);
+        let expected_package_context = PackageReadContext { package_id: 2, boolean_start_index: 6 };
+        assert_eq!(package_context, expected_package_context);
     }
 
     #[test]
-    fn test_none_exist_package_offset_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+    fn test_none_exist_package_context_query() {
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let package_mapped_file = unsafe {
-            get_mapped_file(&pb_file_path, "mockup", StorageFileType::PackageMap).unwrap()
+            get_mapped_file(&storage_dir, "mockup", StorageFileType::PackageMap).unwrap()
         };
 
-        let package_offset_option =
-            get_package_offset(&package_mapped_file, "com.android.aconfig.storage.test_3").unwrap();
-        assert_eq!(package_offset_option, None);
+        let package_context_option =
+            get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_3")
+                .unwrap();
+        assert_eq!(package_context_option, None);
     }
 
     #[test]
-    fn test_flag_offset_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+    fn test_flag_context_query() {
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_mapped_file =
-            unsafe { get_mapped_file(&pb_file_path, "mockup", StorageFileType::FlagMap).unwrap() };
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagMap).unwrap() };
 
         let baseline = vec![
-            (0, "enabled_ro", 1u16),
-            (0, "enabled_rw", 2u16),
-            (1, "disabled_ro", 0u16),
-            (2, "enabled_ro", 1u16),
-            (1, "enabled_fixed_ro", 1u16),
-            (1, "enabled_ro", 2u16),
-            (2, "enabled_fixed_ro", 0u16),
-            (0, "disabled_rw", 0u16),
+            (0, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 1u16),
+            (0, "enabled_rw", StoredFlagType::ReadWriteBoolean, 2u16),
+            (2, "enabled_rw", StoredFlagType::ReadWriteBoolean, 1u16),
+            (1, "disabled_rw", StoredFlagType::ReadWriteBoolean, 0u16),
+            (1, "enabled_fixed_ro", StoredFlagType::FixedReadOnlyBoolean, 1u16),
+            (1, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 2u16),
+            (2, "enabled_fixed_ro", StoredFlagType::FixedReadOnlyBoolean, 0u16),
+            (0, "disabled_rw", StoredFlagType::ReadWriteBoolean, 0u16),
         ];
-        for (package_id, flag_name, expected_offset) in baseline.into_iter() {
-            let flag_offset =
-                get_flag_offset(&flag_mapped_file, package_id, flag_name).unwrap().unwrap();
-            assert_eq!(flag_offset, expected_offset);
+        for (package_id, flag_name, flag_type, flag_index) in baseline.into_iter() {
+            let flag_context =
+                get_flag_read_context(&flag_mapped_file, package_id, flag_name).unwrap().unwrap();
+            assert_eq!(flag_context.flag_type, flag_type);
+            assert_eq!(flag_context.flag_index, flag_index);
         }
     }
 
     #[test]
-    fn test_none_exist_flag_offset_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+    fn test_none_exist_flag_context_query() {
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_mapped_file =
-            unsafe { get_mapped_file(&pb_file_path, "mockup", StorageFileType::FlagMap).unwrap() };
-        let flag_offset_option = get_flag_offset(&flag_mapped_file, 0, "none_exist").unwrap();
-        assert_eq!(flag_offset_option, None);
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagMap).unwrap() };
+        let flag_context_option =
+            get_flag_read_context(&flag_mapped_file, 0, "none_exist").unwrap();
+        assert_eq!(flag_context_option, None);
 
-        let flag_offset_option = get_flag_offset(&flag_mapped_file, 3, "enabled_ro").unwrap();
-        assert_eq!(flag_offset_option, None);
+        let flag_context_option =
+            get_flag_read_context(&flag_mapped_file, 3, "enabled_ro").unwrap();
+        assert_eq!(flag_context_option, None);
     }
 
     #[test]
     fn test_boolean_flag_value_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_value_file =
-            unsafe { get_mapped_file(&pb_file_path, "mockup", StorageFileType::FlagVal).unwrap() };
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagVal).unwrap() };
         let baseline: Vec<bool> = vec![false, true, true, false, true, true, true, true];
         for (offset, expected_value) in baseline.into_iter().enumerate() {
             let flag_value = get_boolean_flag_value(&flag_value_file, offset as u32).unwrap();
@@ -159,12 +155,11 @@
 
     #[test]
     fn test_invalid_boolean_flag_value_query() {
-        let [_package_map, _flag_map, _flag_val, pb_file] = create_test_storage_files();
-        let pb_file_path = pb_file.path().display().to_string();
+        let storage_dir = create_test_storage_files();
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_value_file =
-            unsafe { get_mapped_file(&pb_file_path, "mockup", StorageFileType::FlagVal).unwrap() };
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagVal).unwrap() };
         let err = get_boolean_flag_value(&flag_value_file, 8u32).unwrap_err();
         assert_eq!(
             format!("{:?}", err),
@@ -173,9 +168,41 @@
     }
 
     #[test]
+    fn test_flag_info_query() {
+        let storage_dir = create_test_storage_files();
+        // SAFETY:
+        // The safety here is ensured as the test process will not write to temp storage file
+        let flag_info_file =
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagInfo).unwrap() };
+        let is_rw: Vec<bool> = vec![true, false, true, true, false, false, false, true];
+        for (offset, expected_value) in is_rw.into_iter().enumerate() {
+            let attribute =
+                get_flag_attribute(&flag_info_file, FlagValueType::Boolean, offset as u32).unwrap();
+            assert!((attribute & FlagInfoBit::HasServerOverride as u8) == 0u8);
+            assert_eq!((attribute & FlagInfoBit::IsReadWrite as u8) != 0u8, expected_value);
+            assert!((attribute & FlagInfoBit::HasLocalOverride as u8) == 0u8);
+        }
+    }
+
+    #[test]
+    fn test_invalid_boolean_flag_info_query() {
+        let storage_dir = create_test_storage_files();
+        // SAFETY:
+        // The safety here is ensured as the test process will not write to temp storage file
+        let flag_info_file =
+            unsafe { get_mapped_file(&storage_dir, "mockup", StorageFileType::FlagInfo).unwrap() };
+        let err = get_flag_attribute(&flag_info_file, FlagValueType::Boolean, 8u32).unwrap_err();
+        assert_eq!(
+            format!("{:?}", err),
+            "InvalidStorageFileOffset(Flag info offset goes beyond the end of the file.)"
+        );
+    }
+
+    #[test]
     fn test_storage_version_query() {
         assert_eq!(get_storage_file_version("./package.map").unwrap(), 1);
         assert_eq!(get_storage_file_version("./flag.map").unwrap(), 1);
         assert_eq!(get_storage_file_version("./flag.val").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./flag.info").unwrap(), 1);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_write_api/Android.bp b/tools/aconfig/aconfig_storage_write_api/Android.bp
index 0f15b9c..0f1962c 100644
--- a/tools/aconfig/aconfig_storage_write_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_write_api/Android.bp
@@ -14,6 +14,7 @@
         "libcxx",
         "libthiserror",
         "libaconfig_storage_file",
+        "libaconfig_storage_read_api",
     ],
 }
 
@@ -30,6 +31,7 @@
     defaults: ["aconfig_storage_write_api.defaults"],
     data: [
         "tests/flag.val",
+        "tests/flag.info",
     ],
     rustlibs: [
         "libaconfig_storage_read_api",
@@ -68,13 +70,13 @@
     srcs: ["aconfig_storage_write_api.cpp"],
     generated_headers: [
         "cxx-bridge-header",
-        "libcxx_aconfig_storage_write_api_bridge_header"
+        "libcxx_aconfig_storage_write_api_bridge_header",
     ],
     generated_sources: ["libcxx_aconfig_storage_write_api_bridge_code"],
     whole_static_libs: ["libaconfig_storage_write_api_cxx_bridge"],
     export_include_dirs: ["include"],
     static_libs: [
-        "libaconfig_storage_protos_cc",
+        "libaconfig_storage_read_api_cc",
         "libprotobuf-cpp-lite",
         "libbase",
     ],
diff --git a/tools/aconfig/aconfig_storage_write_api/Cargo.toml b/tools/aconfig/aconfig_storage_write_api/Cargo.toml
index eaa55f2..2ce6edf 100644
--- a/tools/aconfig/aconfig_storage_write_api/Cargo.toml
+++ b/tools/aconfig/aconfig_storage_write_api/Cargo.toml
@@ -13,7 +13,6 @@
 memmap2 = "0.8.0"
 tempfile = "3.9.0"
 thiserror = "1.0.56"
-protobuf = "3.2.0"
 aconfig_storage_file = { path = "../aconfig_storage_file" }
 aconfig_storage_read_api = { path = "../aconfig_storage_read_api" }
 
diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
index ea88f05..7b43574 100644
--- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
@@ -1,7 +1,7 @@
 
 #include <android-base/file.h>
 #include <android-base/logging.h>
-#include <protos/aconfig_storage_metadata.pb.h>
+#include <android-base/unique_fd.h>
 
 #include <sys/mman.h>
 #include <sys/stat.h>
@@ -11,122 +11,96 @@
 #include "aconfig_storage/lib.rs.h"
 #include "aconfig_storage/aconfig_storage_write_api.hpp"
 
-using storage_records_pb = android::aconfig_storage_metadata::storage_files;
-using storage_record_pb = android::aconfig_storage_metadata::storage_file_info;
-using namespace android::base;
-
 namespace aconfig_storage {
 
-/// Storage location pb file
-static constexpr char kPersistStorageRecordsPb[] =
-    "/metadata/aconfig/persistent_storage_file_records.pb";
-
-/// Read aconfig storage records pb file
-static Result<storage_records_pb> read_storage_records_pb(std::string const& pb_file) {
-  auto records = storage_records_pb();
-  auto content = std::string();
-  if (!ReadFileToString(pb_file, &content)) {
-    return ErrnoError() << "ReadFileToString failed";
-  }
-
-  if (!records.ParseFromString(content)) {
-    return ErrnoError() << "Unable to parse persistent storage records protobuf";
-  }
-  return records;
-}
-
-/// Get storage file path
-static Result<std::string> find_storage_file(
-    std::string const& pb_file,
-    std::string const& container) {
-  auto records_pb = read_storage_records_pb(pb_file);
-  if (!records_pb.ok()) {
-    return Error() << "Unable to read storage records from " << pb_file
-                   << " : " << records_pb.error();
-  }
-
-  for (auto& entry : records_pb->files()) {
-    if (entry.container() == container) {
-        return entry.flag_val();
-    }
-  }
-
-  return Error() << "Unable to find storage files for container " << container;;
-}
-
 /// Map a storage file
-static Result<MappedFlagValueFile> map_storage_file(std::string const& file) {
+android::base::Result<MutableMappedStorageFile *> map_mutable_storage_file(
+    std::string const &file) {
   struct stat file_stat;
   if (stat(file.c_str(), &file_stat) < 0) {
-    return ErrnoError() << "stat failed";
+    return android::base::ErrnoError() << "stat failed";
   }
 
   if ((file_stat.st_mode & (S_IWUSR | S_IWGRP | S_IWOTH)) == 0) {
-    return Error() << "cannot map nonwriteable file";
+    return android::base::Error() << "cannot map nonwriteable file";
   }
 
   size_t file_size = file_stat.st_size;
 
-  const int fd = open(file.c_str(), O_RDWR | O_NOFOLLOW | O_CLOEXEC);
-  if (fd == -1) {
-    return ErrnoError() << "failed to open " << file;
+  android::base::unique_fd ufd(open(file.c_str(), O_RDWR | O_NOFOLLOW | O_CLOEXEC));
+  if (ufd.get() == -1) {
+    return android::base::ErrnoError() << "failed to open " << file;
   };
 
-  void* const map_result =
-      mmap(nullptr, file_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+  void *const map_result =
+      mmap(nullptr, file_size, PROT_READ | PROT_WRITE, MAP_SHARED, ufd.get(), 0);
   if (map_result == MAP_FAILED) {
-    return ErrnoError() << "mmap failed";
+    return android::base::ErrnoError() << "mmap failed";
   }
 
-  auto mapped_file = MappedFlagValueFile();
-  mapped_file.file_ptr = map_result;
-  mapped_file.file_size = file_size;
+  auto mapped_file = new MutableMappedStorageFile();
+  mapped_file->file_ptr = map_result;
+  mapped_file->file_size = file_size;
 
   return mapped_file;
 }
 
-namespace private_internal_api {
-
-/// Get mapped file implementation.
-Result<MappedFlagValueFile> get_mapped_flag_value_file_impl(
-    std::string const& pb_file,
-    std::string const& container) {
-  auto file_result = find_storage_file(pb_file, container);
-  if (!file_result.ok()) {
-    return Error() << file_result.error();
-  }
-  auto mapped_result = map_storage_file(*file_result);
-  if (!mapped_result.ok()) {
-    return Error() << "failed to map " << *file_result << ": "
-                   << mapped_result.error();
-  }
-  return *mapped_result;
-}
-
-} // namespace private internal api
-
-/// Get mapped writeable flag value file
-Result<MappedFlagValueFile> get_mapped_flag_value_file(
-    std::string const& container) {
-  return private_internal_api::get_mapped_flag_value_file_impl(
-      kPersistStorageRecordsPb, container);
-}
-
 /// Set boolean flag value
-Result<void> set_boolean_flag_value(
-    const MappedFlagValueFile& file,
+android::base::Result<void> set_boolean_flag_value(
+    const MutableMappedStorageFile &file,
     uint32_t offset,
     bool value) {
   auto content = rust::Slice<uint8_t>(
-      static_cast<uint8_t*>(file.file_ptr), file.file_size);
+      static_cast<uint8_t *>(file.file_ptr), file.file_size);
   auto update_cxx = update_boolean_flag_value_cxx(content, offset, value);
   if (!update_cxx.update_success) {
-    return Error() << std::string(update_cxx.error_message.c_str());
+    return android::base::Error() << update_cxx.error_message.c_str();
+  }
+  if (!msync(static_cast<uint8_t *>(file.file_ptr) + update_cxx.offset, 1, MS_SYNC)) {
+    return android::base::ErrnoError() << "msync failed";
   }
   return {};
 }
 
-Result<void> create_flag_info(
+/// Set if flag has server override
+android::base::Result<void> set_flag_has_server_override(
+    const MutableMappedStorageFile &file,
+    FlagValueType value_type,
+    uint32_t offset,
+    bool value) {
+  auto content = rust::Slice<uint8_t>(
+      static_cast<uint8_t *>(file.file_ptr), file.file_size);
+  auto update_cxx = update_flag_has_server_override_cxx(
+      content, static_cast<uint16_t>(value_type), offset, value);
+  if (!update_cxx.update_success) {
+    return android::base::Error() << update_cxx.error_message.c_str();
+  }
+  if (!msync(static_cast<uint8_t *>(file.file_ptr) + update_cxx.offset, 1, MS_SYNC)) {
+    return android::base::ErrnoError() << "msync failed";
+  }
+  return {};
+}
+
+/// Set if flag has local override
+android::base::Result<void> set_flag_has_local_override(
+    const MutableMappedStorageFile &file,
+    FlagValueType value_type,
+    uint32_t offset,
+    bool value) {
+  auto content = rust::Slice<uint8_t>(
+      static_cast<uint8_t *>(file.file_ptr), file.file_size);
+  auto update_cxx = update_flag_has_local_override_cxx(
+      content, static_cast<uint16_t>(value_type), offset, value);
+  if (!update_cxx.update_success) {
+    return android::base::Error() << update_cxx.error_message.c_str();
+  }
+  if (!msync(static_cast<uint8_t *>(file.file_ptr) + update_cxx.offset, 1, MS_SYNC)) {
+    return android::base::ErrnoError() << "msync failed";
+  }
+  return {};
+}
+
+android::base::Result<void> create_flag_info(
     std::string const& package_map,
     std::string const& flag_map,
     std::string const& flag_info_out) {
diff --git a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
index b652510..0bba7ff 100644
--- a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
+++ b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
@@ -4,33 +4,35 @@
 #include <string>
 
 #include <android-base/result.h>
+#include <aconfig_storage/aconfig_storage_read_api.hpp>
 
-using namespace android::base;
 
 namespace aconfig_storage {
 
 /// Mapped flag value file
-struct MappedFlagValueFile{
-  void* file_ptr;
-  size_t file_size;
-};
+struct MutableMappedStorageFile : MappedStorageFile {};
 
-/// DO NOT USE APIS IN THE FOLLOWING NAMESPACE DIRECTLY
-namespace private_internal_api {
-
-Result<MappedFlagValueFile> get_mapped_flag_value_file_impl(
-    std::string const& pb_file,
-    std::string const& container);
-
-} // namespace private_internal_api
-
-/// Get mapped writeable flag value file
-Result<MappedFlagValueFile> get_mapped_flag_value_file(
-    std::string const& container);
+/// Map a storage file
+android::base::Result<MutableMappedStorageFile*> map_mutable_storage_file(
+    std::string const& file);
 
 /// Set boolean flag value
-Result<void> set_boolean_flag_value(
-    const MappedFlagValueFile& file,
+android::base::Result<void> set_boolean_flag_value(
+    const MutableMappedStorageFile& file,
+    uint32_t offset,
+    bool value);
+
+/// Set if flag has server override
+android::base::Result<void> set_flag_has_server_override(
+    const MutableMappedStorageFile& file,
+    FlagValueType value_type,
+    uint32_t offset,
+    bool value);
+
+/// Set if flag has local override
+android::base::Result<void> set_flag_has_local_override(
+    const MutableMappedStorageFile& file,
+    FlagValueType value_type,
     uint32_t offset,
     bool value);
 
@@ -38,7 +40,7 @@
 /// \input package_map: package map file
 /// \input flag_map: flag map file
 /// \input flag_info_out: flag info file to be created
-Result<void> create_flag_info(
+android::base::Result<void> create_flag_info(
     std::string const& package_map,
     std::string const& flag_map,
     std::string const& flag_info_out);
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
new file mode 100644
index 0000000..7e60713
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! flag info update module defines the flag info file write to mapped bytes
+
+use aconfig_storage_file::{
+    read_u8_from_bytes, AconfigStorageError, FlagInfoBit, FlagInfoHeader, FlagValueType,
+    FILE_VERSION,
+};
+use anyhow::anyhow;
+
+fn get_flag_info_offset(
+    buf: &mut [u8],
+    flag_type: FlagValueType,
+    flag_index: u32,
+) -> Result<usize, AconfigStorageError> {
+    let interpreted_header = FlagInfoHeader::from_bytes(buf)?;
+    if interpreted_header.version > FILE_VERSION {
+        return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
+            "Cannot write to storage file with a higher version of {} with lib version {}",
+            interpreted_header.version,
+            FILE_VERSION
+        )));
+    }
+
+    // get byte offset to the flag info
+    let head = match flag_type {
+        FlagValueType::Boolean => (interpreted_header.boolean_flag_offset + flag_index) as usize,
+    };
+
+    if head >= interpreted_header.file_size as usize {
+        return Err(AconfigStorageError::InvalidStorageFileOffset(anyhow!(
+            "Flag value offset goes beyond the end of the file."
+        )));
+    }
+
+    Ok(head)
+}
+
+fn get_flag_attribute_and_offset(
+    buf: &mut [u8],
+    flag_type: FlagValueType,
+    flag_index: u32,
+) -> Result<(u8, usize), AconfigStorageError> {
+    let head = get_flag_info_offset(buf, flag_type, flag_index)?;
+    let mut pos = head;
+    let attribute = read_u8_from_bytes(buf, &mut pos)?;
+    Ok((attribute, head))
+}
+
+/// Set if flag has server override
+pub fn update_flag_has_server_override(
+    buf: &mut [u8],
+    flag_type: FlagValueType,
+    flag_index: u32,
+    value: bool,
+) -> Result<usize, AconfigStorageError> {
+    let (attribute, head) = get_flag_attribute_and_offset(buf, flag_type, flag_index)?;
+    let has_override = (attribute & (FlagInfoBit::HasServerOverride as u8)) != 0;
+    if has_override != value {
+        buf[head] = (attribute ^ FlagInfoBit::HasServerOverride as u8).to_le_bytes()[0];
+    }
+    Ok(head)
+}
+
+/// Set if flag has local override
+pub fn update_flag_has_local_override(
+    buf: &mut [u8],
+    flag_type: FlagValueType,
+    flag_index: u32,
+    value: bool,
+) -> Result<usize, AconfigStorageError> {
+    let (attribute, head) = get_flag_attribute_and_offset(buf, flag_type, flag_index)?;
+    let has_override = (attribute & (FlagInfoBit::HasLocalOverride as u8)) != 0;
+    if has_override != value {
+        buf[head] = (attribute ^ FlagInfoBit::HasLocalOverride as u8).to_le_bytes()[0];
+    }
+    Ok(head)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use aconfig_storage_file::test_utils::create_test_flag_info_list;
+    use aconfig_storage_read_api::flag_info_query::find_flag_attribute;
+
+    #[test]
+    // this test point locks down has server override update
+    fn test_update_flag_has_server_override() {
+        let flag_info_list = create_test_flag_info_list();
+        let mut buf = flag_info_list.into_bytes();
+        for i in 0..flag_info_list.header.num_flags {
+            update_flag_has_server_override(&mut buf, FlagValueType::Boolean, i, true).unwrap();
+            let attribute = find_flag_attribute(&buf, FlagValueType::Boolean, i).unwrap();
+            assert!((attribute & (FlagInfoBit::HasServerOverride as u8)) != 0);
+            update_flag_has_server_override(&mut buf, FlagValueType::Boolean, i, false).unwrap();
+            let attribute = find_flag_attribute(&buf, FlagValueType::Boolean, i).unwrap();
+            assert!((attribute & (FlagInfoBit::HasServerOverride as u8)) == 0);
+        }
+    }
+
+    #[test]
+    // this test point locks down has local override update
+    fn test_update_flag_has_local_override() {
+        let flag_info_list = create_test_flag_info_list();
+        let mut buf = flag_info_list.into_bytes();
+        for i in 0..flag_info_list.header.num_flags {
+            update_flag_has_local_override(&mut buf, FlagValueType::Boolean, i, true).unwrap();
+            let attribute = find_flag_attribute(&buf, FlagValueType::Boolean, i).unwrap();
+            assert!((attribute & (FlagInfoBit::HasLocalOverride as u8)) != 0);
+            update_flag_has_local_override(&mut buf, FlagValueType::Boolean, i, false).unwrap();
+            let attribute = find_flag_attribute(&buf, FlagValueType::Boolean, i).unwrap();
+            assert!((attribute & (FlagInfoBit::HasLocalOverride as u8)) == 0);
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
index 4cb7939..dd15c99 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
@@ -22,9 +22,9 @@
 /// Set flag value
 pub fn update_boolean_flag_value(
     buf: &mut [u8],
-    flag_offset: u32,
+    flag_index: u32,
     flag_value: bool,
-) -> Result<(), AconfigStorageError> {
+) -> Result<usize, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
     if interpreted_header.version > FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
@@ -34,10 +34,8 @@
         )));
     }
 
-    let head = (interpreted_header.boolean_value_offset + flag_offset) as usize;
-
-    // TODO: right now, there is only boolean flags, with more flag value types added
-    // later, the end of boolean flag value section should be updated (b/322826265).
+    // get byte offset to the flag
+    let head = (interpreted_header.boolean_value_offset + flag_index) as usize;
     if head >= interpreted_header.file_size as usize {
         return Err(AconfigStorageError::InvalidStorageFileOffset(anyhow!(
             "Flag value offset goes beyond the end of the file."
@@ -45,26 +43,13 @@
     }
 
     buf[head] = u8::from(flag_value).to_le_bytes()[0];
-    Ok(())
+    Ok(head)
 }
 
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::{FlagValueList, StorageFileType};
-
-    pub fn create_test_flag_value_list() -> FlagValueList {
-        let header = FlagValueHeader {
-            version: FILE_VERSION,
-            container: String::from("system"),
-            file_type: StorageFileType::FlagVal as u8,
-            file_size: 35,
-            num_flags: 8,
-            boolean_value_offset: 27,
-        };
-        let booleans: Vec<bool> = vec![false; 8];
-        FlagValueList { header, booleans }
-    }
+    use aconfig_storage_file::test_utils::create_test_flag_value_list;
 
     #[test]
     // this test point locks down flag value update
diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
index 678bbd5..0396a63 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
@@ -17,6 +17,7 @@
 //! `aconfig_storage_write_api` is a crate that defines write apis to update flag value
 //! in storage file. It provides one api to interface with storage files.
 
+pub mod flag_info_update;
 pub mod flag_value_update;
 pub mod mapped_file;
 
@@ -24,8 +25,8 @@
 mod test_utils;
 
 use aconfig_storage_file::{
-    AconfigStorageError, FlagInfoHeader, FlagInfoList, FlagInfoNode, FlagTable, PackageTable,
-    StorageFileType, StoredFlagType, FILE_VERSION,
+    AconfigStorageError, FlagInfoHeader, FlagInfoList, FlagInfoNode, FlagTable, FlagValueType,
+    PackageTable, StorageFileType, StoredFlagType, FILE_VERSION,
 };
 
 use anyhow::anyhow;
@@ -33,14 +34,9 @@
 use std::fs::File;
 use std::io::{Read, Write};
 
-/// Storage file location pb file
-pub const STORAGE_LOCATION_FILE: &str = "/metadata/aconfig/persistent_storage_file_records.pb";
-
-/// Get mmaped flag value file given the container name
+/// Get read write mapped storage files.
 ///
-/// \input container: the flag package container
-/// \return a result of mapped file
-///
+/// \input file_path: path to the storage file
 ///
 /// # Safety
 ///
@@ -48,23 +44,61 @@
 /// file not thru this memory mapped file or there are concurrent writes to this
 /// memory mapped file. Ensure all writes to the underlying file are thru this memory
 /// mapped file and there are no concurrent writes.
-pub unsafe fn get_mapped_flag_value_file(container: &str) -> Result<MmapMut, AconfigStorageError> {
-    unsafe { crate::mapped_file::get_mapped_file(STORAGE_LOCATION_FILE, container) }
+pub unsafe fn map_mutable_storage_file(file_path: &str) -> Result<MmapMut, AconfigStorageError> {
+    crate::mapped_file::map_file(file_path)
 }
 
 /// Set boolean flag value thru mapped file and flush the change to file
 ///
 /// \input mapped_file: the mapped flag value file
-/// \input offset: flag value offset
+/// \input index: flag index
 /// \input value: updated flag value
 /// \return a result of ()
 ///
 pub fn set_boolean_flag_value(
     file: &mut MmapMut,
-    offset: u32,
+    index: u32,
     value: bool,
 ) -> Result<(), AconfigStorageError> {
-    crate::flag_value_update::update_boolean_flag_value(file, offset, value)?;
+    crate::flag_value_update::update_boolean_flag_value(file, index, value)?;
+    file.flush().map_err(|errmsg| {
+        AconfigStorageError::MapFlushFail(anyhow!("fail to flush storage file: {}", errmsg))
+    })
+}
+
+/// Set if flag is has server override thru mapped file and flush the change to file
+///
+/// \input mapped_file: the mapped flag info file
+/// \input index: flag index
+/// \input value: updated flag has server override value
+/// \return a result of ()
+///
+pub fn set_flag_has_server_override(
+    file: &mut MmapMut,
+    flag_type: FlagValueType,
+    index: u32,
+    value: bool,
+) -> Result<(), AconfigStorageError> {
+    crate::flag_info_update::update_flag_has_server_override(file, flag_type, index, value)?;
+    file.flush().map_err(|errmsg| {
+        AconfigStorageError::MapFlushFail(anyhow!("fail to flush storage file: {}", errmsg))
+    })
+}
+
+/// Set if flag has local override thru mapped file and flush the change to file
+///
+/// \input mapped_file: the mapped flag info file
+/// \input index: flag index
+/// \input value: updated flag has local override value
+/// \return a result of ()
+///
+pub fn set_flag_has_local_override(
+    file: &mut MmapMut,
+    flag_type: FlagValueType,
+    index: u32,
+    value: bool,
+) -> Result<(), AconfigStorageError> {
+    crate::flag_info_update::update_flag_has_local_override(file, flag_type, index, value)?;
     file.flush().map_err(|errmsg| {
         AconfigStorageError::MapFlushFail(anyhow!("fail to flush storage file: {}", errmsg))
     })
@@ -106,15 +140,15 @@
         )));
     }
 
-    let mut package_offsets = vec![0; package_table.header.num_packages as usize];
+    let mut package_start_index = vec![0; package_table.header.num_packages as usize];
     for node in package_table.nodes.iter() {
-        package_offsets[node.package_id as usize] = node.boolean_offset;
+        package_start_index[node.package_id as usize] = node.boolean_start_index;
     }
 
     let mut is_flag_rw = vec![false; flag_table.header.num_flags as usize];
     for node in flag_table.nodes.iter() {
-        let flag_offset = package_offsets[node.package_id as usize] + node.flag_id as u32;
-        is_flag_rw[flag_offset as usize] = node.flag_type == StoredFlagType::ReadWriteBoolean;
+        let flag_index = package_start_index[node.package_id as usize] + node.flag_index as u32;
+        is_flag_rw[flag_index as usize] = node.flag_type == StoredFlagType::ReadWriteBoolean;
     }
 
     let mut list = FlagInfoList {
@@ -160,6 +194,21 @@
     // Flag value update return for cc interlop
     pub struct BooleanFlagValueUpdateCXX {
         pub update_success: bool,
+        pub offset: usize,
+        pub error_message: String,
+    }
+
+    // Flag has server override update return for cc interlop
+    pub struct FlagHasServerOverrideUpdateCXX {
+        pub update_success: bool,
+        pub offset: usize,
+        pub error_message: String,
+    }
+
+    // Flag has local override update return for cc interlop
+    pub struct FlagHasLocalOverrideUpdateCXX {
+        pub update_success: bool,
+        pub offset: usize,
         pub error_message: String,
     }
 
@@ -177,6 +226,20 @@
             value: bool,
         ) -> BooleanFlagValueUpdateCXX;
 
+        pub fn update_flag_has_server_override_cxx(
+            file: &mut [u8],
+            flag_type: u16,
+            offset: u32,
+            value: bool,
+        ) -> FlagHasServerOverrideUpdateCXX;
+
+        pub fn update_flag_has_local_override_cxx(
+            file: &mut [u8],
+            flag_type: u16,
+            offset: u32,
+            value: bool,
+        ) -> FlagHasLocalOverrideUpdateCXX;
+
         pub fn create_flag_info_cxx(
             package_map: &str,
             flag_map: &str,
@@ -191,11 +254,76 @@
     value: bool,
 ) -> ffi::BooleanFlagValueUpdateCXX {
     match crate::flag_value_update::update_boolean_flag_value(file, offset, value) {
-        Ok(()) => {
-            ffi::BooleanFlagValueUpdateCXX { update_success: true, error_message: String::from("") }
-        }
+        Ok(head) => ffi::BooleanFlagValueUpdateCXX {
+            update_success: true,
+            offset: head,
+            error_message: String::from(""),
+        },
         Err(errmsg) => ffi::BooleanFlagValueUpdateCXX {
             update_success: false,
+            offset: usize::MAX,
+            error_message: format!("{:?}", errmsg),
+        },
+    }
+}
+
+pub(crate) fn update_flag_has_server_override_cxx(
+    file: &mut [u8],
+    flag_type: u16,
+    offset: u32,
+    value: bool,
+) -> ffi::FlagHasServerOverrideUpdateCXX {
+    match FlagValueType::try_from(flag_type) {
+        Ok(value_type) => {
+            match crate::flag_info_update::update_flag_has_server_override(
+                file, value_type, offset, value,
+            ) {
+                Ok(head) => ffi::FlagHasServerOverrideUpdateCXX {
+                    update_success: true,
+                    offset: head,
+                    error_message: String::from(""),
+                },
+                Err(errmsg) => ffi::FlagHasServerOverrideUpdateCXX {
+                    update_success: false,
+                    offset: usize::MAX,
+                    error_message: format!("{:?}", errmsg),
+                },
+            }
+        }
+        Err(errmsg) => ffi::FlagHasServerOverrideUpdateCXX {
+            update_success: false,
+            offset: usize::MAX,
+            error_message: format!("{:?}", errmsg),
+        },
+    }
+}
+
+pub(crate) fn update_flag_has_local_override_cxx(
+    file: &mut [u8],
+    flag_type: u16,
+    offset: u32,
+    value: bool,
+) -> ffi::FlagHasLocalOverrideUpdateCXX {
+    match FlagValueType::try_from(flag_type) {
+        Ok(value_type) => {
+            match crate::flag_info_update::update_flag_has_local_override(
+                file, value_type, offset, value,
+            ) {
+                Ok(head) => ffi::FlagHasLocalOverrideUpdateCXX {
+                    update_success: true,
+                    offset: head,
+                    error_message: String::from(""),
+                },
+                Err(errmsg) => ffi::FlagHasLocalOverrideUpdateCXX {
+                    update_success: false,
+                    offset: usize::MAX,
+                    error_message: format!("{:?}", errmsg),
+                },
+            }
+        }
+        Err(errmsg) => ffi::FlagHasLocalOverrideUpdateCXX {
+            update_success: false,
+            offset: usize::MAX,
             error_message: format!("{:?}", errmsg),
         },
     }
@@ -219,11 +347,12 @@
 mod tests {
     use super::*;
     use crate::test_utils::copy_to_temp_file;
-    use aconfig_storage_file::protos::storage_record_pb::write_proto_to_temp_file;
     use aconfig_storage_file::test_utils::{
         create_test_flag_info_list, create_test_flag_table, create_test_package_table,
         write_bytes_to_temp_file,
     };
+    use aconfig_storage_file::FlagInfoBit;
+    use aconfig_storage_read_api::flag_info_query::find_flag_attribute;
     use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
     use std::fs::File;
     use std::io::Read;
@@ -240,27 +369,12 @@
     fn test_set_boolean_flag_value() {
         let flag_value_file = copy_to_temp_file("./tests/flag.val", false).unwrap();
         let flag_value_path = flag_value_file.path().display().to_string();
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "system"
-    package_map: "some_package.map"
-    flag_map: "some_flag.map"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            flag_value_path
-        );
-        let record_pb_file = write_proto_to_temp_file(&text_proto).unwrap();
-        let record_pb_path = record_pb_file.path().display().to_string();
 
         // SAFETY:
         // The safety here is guaranteed as only this single threaded test process will
         // write to this file
         unsafe {
-            let mut file = crate::mapped_file::get_mapped_file(&record_pb_path, "system").unwrap();
+            let mut file = map_mutable_storage_file(&flag_value_path).unwrap();
             for i in 0..8 {
                 set_boolean_flag_value(&mut file, i, true).unwrap();
                 let value = get_boolean_flag_value_at_offset(&flag_value_path, i);
@@ -273,6 +387,59 @@
         }
     }
 
+    fn get_flag_attribute_at_offset(file: &str, value_type: FlagValueType, offset: u32) -> u8 {
+        let mut f = File::open(&file).unwrap();
+        let mut bytes = Vec::new();
+        f.read_to_end(&mut bytes).unwrap();
+        find_flag_attribute(&bytes, value_type, offset).unwrap()
+    }
+
+    #[test]
+    fn test_set_flag_has_server_override() {
+        let flag_info_file = copy_to_temp_file("./tests/flag.info", false).unwrap();
+        let flag_info_path = flag_info_file.path().display().to_string();
+
+        // SAFETY:
+        // The safety here is guaranteed as only this single threaded test process will
+        // write to this file
+        unsafe {
+            let mut file = map_mutable_storage_file(&flag_info_path).unwrap();
+            for i in 0..8 {
+                set_flag_has_server_override(&mut file, FlagValueType::Boolean, i, true).unwrap();
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                assert!((attribute & (FlagInfoBit::HasServerOverride as u8)) != 0);
+                set_flag_has_server_override(&mut file, FlagValueType::Boolean, i, false).unwrap();
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                assert!((attribute & (FlagInfoBit::HasServerOverride as u8)) == 0);
+            }
+        }
+    }
+
+    #[test]
+    fn test_set_flag_has_local_override() {
+        let flag_info_file = copy_to_temp_file("./tests/flag.info", false).unwrap();
+        let flag_info_path = flag_info_file.path().display().to_string();
+
+        // SAFETY:
+        // The safety here is guaranteed as only this single threaded test process will
+        // write to this file
+        unsafe {
+            let mut file = map_mutable_storage_file(&flag_info_path).unwrap();
+            for i in 0..8 {
+                set_flag_has_local_override(&mut file, FlagValueType::Boolean, i, true).unwrap();
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                assert!((attribute & (FlagInfoBit::HasLocalOverride as u8)) != 0);
+                set_flag_has_local_override(&mut file, FlagValueType::Boolean, i, false).unwrap();
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                assert!((attribute & (FlagInfoBit::HasLocalOverride as u8)) == 0);
+            }
+        }
+    }
+
     fn create_empty_temp_file() -> Result<NamedTempFile, AconfigStorageError> {
         let file = NamedTempFile::new().map_err(|_| {
             AconfigStorageError::FileCreationFail(anyhow!("Failed to create temp file"))
diff --git a/tools/aconfig/aconfig_storage_write_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_write_api/src/mapped_file.rs
index 4c98be4..401d6b7 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/mapped_file.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/mapped_file.rs
@@ -14,46 +14,13 @@
  * limitations under the License.
  */
 
-use std::fs::{self, File, OpenOptions};
-use std::io::{BufReader, Read};
-
 use anyhow::anyhow;
 use memmap2::MmapMut;
+use std::fs::{self, OpenOptions};
 
-use aconfig_storage_file::protos::{storage_record_pb::try_from_binary_proto, ProtoStorageFiles};
-use aconfig_storage_file::AconfigStorageError::{
-    self, FileReadFail, MapFileFail, ProtobufParseFail, StorageFileNotFound,
-};
+use aconfig_storage_file::AconfigStorageError::{self, FileReadFail, MapFileFail};
 
-/// Find where persistent storage value file is for a particular container
-fn find_persist_flag_value_file(
-    location_pb_file: &str,
-    container: &str,
-) -> Result<String, AconfigStorageError> {
-    let file = File::open(location_pb_file).map_err(|errmsg| {
-        FileReadFail(anyhow!("Failed to open file {}: {}", location_pb_file, errmsg))
-    })?;
-    let mut reader = BufReader::new(file);
-    let mut bytes = Vec::new();
-    reader.read_to_end(&mut bytes).map_err(|errmsg| {
-        FileReadFail(anyhow!("Failed to read file {}: {}", location_pb_file, errmsg))
-    })?;
-    let storage_locations: ProtoStorageFiles = try_from_binary_proto(&bytes).map_err(|errmsg| {
-        ProtobufParseFail(anyhow!(
-            "Failed to parse storage location pb file {}: {}",
-            location_pb_file,
-            errmsg
-        ))
-    })?;
-    for location_info in storage_locations.files.iter() {
-        if location_info.container() == container {
-            return Ok(location_info.flag_val().to_string());
-        }
-    }
-    Err(StorageFileNotFound(anyhow!("Persistent flag value file does not exist for {}", container)))
-}
-
-/// Get a mapped storage file given the container and file type
+/// Get the mutable memory mapping of a storage file
 ///
 /// # Safety
 ///
@@ -61,27 +28,23 @@
 /// file not thru this memory mapped file or there are concurrent writes to this
 /// memory mapped file. Ensure all writes to the underlying file are thru this memory
 /// mapped file and there are no concurrent writes.
-pub unsafe fn get_mapped_file(
-    location_pb_file: &str,
-    container: &str,
-) -> Result<MmapMut, AconfigStorageError> {
-    let file_path = find_persist_flag_value_file(location_pb_file, container)?;
-
+pub(crate) unsafe fn map_file(file_path: &str) -> Result<MmapMut, AconfigStorageError> {
     // make sure file has read write permission
-    let perms = fs::metadata(&file_path).unwrap().permissions();
+    let perms = fs::metadata(file_path).unwrap().permissions();
     if perms.readonly() {
         return Err(MapFileFail(anyhow!("fail to map non read write storage file {}", file_path)));
     }
 
     let file =
-        OpenOptions::new().read(true).write(true).open(&file_path).map_err(|errmsg| {
+        OpenOptions::new().read(true).write(true).open(file_path).map_err(|errmsg| {
             FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg))
         })?;
 
     unsafe {
-        MmapMut::map_mut(&file).map_err(|errmsg| {
+        let mapped_file = MmapMut::map_mut(&file).map_err(|errmsg| {
             MapFileFail(anyhow!("fail to map storage file {}: {}", file_path, errmsg))
-        })
+        })?;
+        Ok(mapped_file)
     }
 }
 
@@ -89,100 +52,48 @@
 mod tests {
     use super::*;
     use crate::test_utils::copy_to_temp_file;
-    use aconfig_storage_file::protos::storage_record_pb::write_proto_to_temp_file;
-
-    #[test]
-    fn test_find_persist_flag_value_file_location() {
-        let text_proto = r#"
-files {
-    version: 0
-    container: "system"
-    package_map: "/system/etc/package.map"
-    flag_map: "/system/etc/flag.map"
-    flag_val: "/metadata/aconfig/system.val"
-    timestamp: 12345
-}
-files {
-    version: 1
-    container: "product"
-    package_map: "/product/etc/package.map"
-    flag_map: "/product/etc/flag.map"
-    flag_val: "/metadata/aconfig/product.val"
-    timestamp: 54321
-}
-"#;
-        let file = write_proto_to_temp_file(&text_proto).unwrap();
-        let file_full_path = file.path().display().to_string();
-        let flag_value_file = find_persist_flag_value_file(&file_full_path, "system").unwrap();
-        assert_eq!(flag_value_file, "/metadata/aconfig/system.val");
-        let flag_value_file = find_persist_flag_value_file(&file_full_path, "product").unwrap();
-        assert_eq!(flag_value_file, "/metadata/aconfig/product.val");
-        let err = find_persist_flag_value_file(&file_full_path, "vendor").unwrap_err();
-        assert_eq!(
-            format!("{:?}", err),
-            "StorageFileNotFound(Persistent flag value file does not exist for vendor)"
-        );
-    }
+    use std::io::Read;
 
     #[test]
     fn test_mapped_file_contents() {
-        let mut rw_file = copy_to_temp_file("./tests/flag.val", false).unwrap();
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "system"
-    package_map: "some_package.map"
-    flag_map: "some_flag.map"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            rw_file.path().display().to_string()
-        );
-        let storage_record_file = write_proto_to_temp_file(&text_proto).unwrap();
-        let storage_record_file_path = storage_record_file.path().display().to_string();
+        let mut rw_val_file = copy_to_temp_file("./tests/flag.val", false).unwrap();
+        let mut rw_info_file = copy_to_temp_file("./tests/flag.info", false).unwrap();
+        let flag_val = rw_val_file.path().display().to_string();
+        let flag_info = rw_info_file.path().display().to_string();
 
         let mut content = Vec::new();
-        rw_file.read_to_end(&mut content).unwrap();
+        rw_val_file.read_to_end(&mut content).unwrap();
 
         // SAFETY:
         // The safety here is guaranteed here as no writes happens to this temp file
         unsafe {
-            let mmaped_file = get_mapped_file(&storage_record_file_path, "system").unwrap();
+            let mmaped_file = map_file(&flag_val).unwrap();
+            assert_eq!(mmaped_file[..], content[..]);
+        }
+
+        let mut content = Vec::new();
+        rw_info_file.read_to_end(&mut content).unwrap();
+
+        // SAFETY:
+        // The safety here is guaranteed here as no writes happens to this temp file
+        unsafe {
+            let mmaped_file = map_file(&flag_info).unwrap();
             assert_eq!(mmaped_file[..], content[..]);
         }
     }
 
     #[test]
     fn test_mapped_read_only_file() {
-        let ro_file = copy_to_temp_file("./tests/flag.val", true).unwrap();
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "system"
-    package_map: "some_package.map"
-    flag_map: "some_flag.map"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            ro_file.path().display().to_string()
-        );
-        let storage_record_file = write_proto_to_temp_file(&text_proto).unwrap();
-        let storage_record_file_path = storage_record_file.path().display().to_string();
+        let ro_val_file = copy_to_temp_file("./tests/flag.val", true).unwrap();
+        let flag_val = ro_val_file.path().display().to_string();
 
         // SAFETY:
         // The safety here is guaranteed here as no writes happens to this temp file
         unsafe {
-            let error = get_mapped_file(&storage_record_file_path, "system").unwrap_err();
+            let error = map_file(&flag_val).unwrap_err();
             assert_eq!(
                 format!("{:?}", error),
-                format!(
-                    "MapFileFail(fail to map non read write storage file {})",
-                    ro_file.path().display().to_string()
-                )
+                format!("MapFileFail(fail to map non read write storage file {})", flag_val)
             );
         }
     }
diff --git a/tools/aconfig/aconfig_storage_write_api/tests/Android.bp b/tools/aconfig/aconfig_storage_write_api/tests/Android.bp
index 5b23dbc..5508dac 100644
--- a/tools/aconfig/aconfig_storage_write_api/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_write_api/tests/Android.bp
@@ -1,8 +1,7 @@
-
 rust_test {
     name: "aconfig_storage_write_api.test.rust",
     srcs: [
-        "storage_write_api_test.rs"
+        "storage_write_api_test.rs",
     ],
     rustlibs: [
         "libanyhow",
@@ -14,6 +13,7 @@
     ],
     data: [
         "flag.val",
+        "flag.info",
     ],
     test_suites: ["general-tests"],
 }
@@ -25,8 +25,6 @@
     ],
     static_libs: [
         "libgmock",
-        "libaconfig_storage_protos_cc",
-        "libprotobuf-cpp-lite",
         "libaconfig_storage_read_api_cc",
         "libaconfig_storage_write_api_cc",
         "libbase",
@@ -34,10 +32,16 @@
     ],
     data: [
         "flag.val",
+        "flag.info",
     ],
     test_suites: [
         "device-tests",
         "general-tests",
     ],
-    ldflags: ["-Wl,--allow-multiple-definition"],
+    generated_headers: [
+        "cxx-bridge-header",
+        "libcxx_aconfig_storage_read_api_bridge_header",
+    ],
+    generated_sources: ["libcxx_aconfig_storage_read_api_bridge_code"],
+    whole_static_libs: ["libaconfig_storage_read_api_cxx_bridge"],
 }
diff --git a/tools/aconfig/aconfig_storage_write_api/tests/flag.info b/tools/aconfig/aconfig_storage_write_api/tests/flag.info
new file mode 100644
index 0000000..6223edf
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_write_api/tests/flag.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp b/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp
index 00b737c..133f5a0 100644
--- a/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp
@@ -22,11 +22,12 @@
 #include "aconfig_storage/aconfig_storage_read_api.hpp"
 #include "aconfig_storage/aconfig_storage_write_api.hpp"
 #include <gtest/gtest.h>
-#include <protos/aconfig_storage_metadata.pb.h>
 #include <android-base/file.h>
 #include <android-base/result.h>
 
-using android::aconfig_storage_metadata::storage_files;
+#include "rust/cxx.h"
+#include "aconfig_storage/lib.rs.h"
+
 using namespace android::base;
 
 namespace api = aconfig_storage;
@@ -50,54 +51,25 @@
     return temp_file;
   }
 
-  Result<std::string> write_storage_location_pb_file(std::string const& flag_val) {
-    auto temp_file = std::tmpnam(nullptr);
-    auto proto = storage_files();
-    auto* info = proto.add_files();
-    info->set_version(0);
-    info->set_container("mockup");
-    info->set_package_map("some_package.map");
-    info->set_flag_map("some_flag.map");
-    info->set_flag_val(flag_val);
-    info->set_timestamp(12345);
-
-    auto content = std::string();
-    proto.SerializeToString(&content);
-    if (!WriteStringToFile(content, temp_file)) {
-      return Error() << "failed to write storage records pb file";
-    }
-    return temp_file;
-  }
-
   void SetUp() override {
     auto const test_dir = android::base::GetExecutableDirectory();
     flag_val = *copy_to_rw_temp_file(test_dir + "/flag.val");
-    storage_record_pb = *write_storage_location_pb_file(flag_val);
+    flag_info = *copy_to_rw_temp_file(test_dir + "/flag.info");
   }
 
   void TearDown() override {
     std::remove(flag_val.c_str());
-    std::remove(storage_record_pb.c_str());
+    std::remove(flag_info.c_str());
   }
 
   std::string flag_val;
-  std::string storage_record_pb;
+  std::string flag_info;
 };
 
-/// Negative test to lock down the error when mapping none exist storage files
-TEST_F(AconfigStorageTest, test_none_exist_storage_file_mapping) {
-  auto mapped_file_result = private_api::get_mapped_flag_value_file_impl(
-      storage_record_pb, "vendor");
-  ASSERT_FALSE(mapped_file_result.ok());
-  ASSERT_EQ(mapped_file_result.error().message(),
-            "Unable to find storage files for container vendor");
-}
-
 /// Negative test to lock down the error when mapping a non writeable storage file
 TEST_F(AconfigStorageTest, test_non_writable_storage_file_mapping) {
   ASSERT_TRUE(chmod(flag_val.c_str(), S_IRUSR | S_IRGRP | S_IROTH) != -1);
-  auto mapped_file_result = private_api::get_mapped_flag_value_file_impl(
-      storage_record_pb, "mockup");
+  auto mapped_file_result = api::map_mutable_storage_file(flag_val);
   ASSERT_FALSE(mapped_file_result.ok());
   auto it = mapped_file_result.error().message().find("cannot map nonwriteable file");
   ASSERT_TRUE(it != std::string::npos) << mapped_file_result.error().message();
@@ -105,31 +77,149 @@
 
 /// Test to lock down storage flag value update api
 TEST_F(AconfigStorageTest, test_boolean_flag_value_update) {
-  auto mapped_file_result = private_api::get_mapped_flag_value_file_impl(
-      storage_record_pb, "mockup");
+  auto mapped_file_result = api::map_mutable_storage_file(flag_val);
   ASSERT_TRUE(mapped_file_result.ok());
-  auto mapped_file = *mapped_file_result;
+  auto mapped_file = std::unique_ptr<api::MutableMappedStorageFile>(*mapped_file_result);
 
   for (int offset = 0; offset < 8; ++offset) {
-    auto update_result = api::set_boolean_flag_value(mapped_file, offset, true);
+    auto update_result = api::set_boolean_flag_value(*mapped_file, offset, true);
     ASSERT_TRUE(update_result.ok());
-    auto ro_mapped_file = api::MappedStorageFile();
-    ro_mapped_file.file_ptr = mapped_file.file_ptr;
-    ro_mapped_file.file_size = mapped_file.file_size;
-    auto value = api::get_boolean_flag_value(ro_mapped_file, offset);
+    auto value = api::get_boolean_flag_value(*mapped_file, offset);
     ASSERT_TRUE(value.ok());
     ASSERT_TRUE(*value);
   }
+
+  // load the file on disk and check has been updated
+  std::ifstream file(flag_val, std::ios::binary | std::ios::ate);
+  std::streamsize size = file.tellg();
+  file.seekg(0, std::ios::beg);
+
+  std::vector<uint8_t> buffer(size);
+  file.read(reinterpret_cast<char *>(buffer.data()), size);
+
+  auto content = rust::Slice<const uint8_t>(
+      buffer.data(), mapped_file->file_size);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto value_cxx = get_boolean_flag_value_cxx(content, offset);
+    ASSERT_TRUE(value_cxx.query_success);
+    ASSERT_TRUE(value_cxx.flag_value);
+  }
 }
 
 /// Negative test to lock down the error when querying flag value out of range
 TEST_F(AconfigStorageTest, test_invalid_boolean_flag_value_update) {
-  auto mapped_file_result = private_api::get_mapped_flag_value_file_impl(
-      storage_record_pb, "mockup");
+  auto mapped_file_result = api::map_mutable_storage_file(flag_val);
   ASSERT_TRUE(mapped_file_result.ok());
-  auto mapped_file = *mapped_file_result;
-  auto update_result = api::set_boolean_flag_value(mapped_file, 8, true);
+  auto mapped_file = std::unique_ptr<api::MutableMappedStorageFile>(*mapped_file_result);
+  auto update_result = api::set_boolean_flag_value(*mapped_file, 8, true);
   ASSERT_FALSE(update_result.ok());
   ASSERT_EQ(update_result.error().message(),
             std::string("InvalidStorageFileOffset(Flag value offset goes beyond the end of the file.)"));
 }
+
+/// Test to lock down storage flag has server override update api
+TEST_F(AconfigStorageTest, test_flag_has_server_override_update) {
+  auto mapped_file_result = api::map_mutable_storage_file(flag_info);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MutableMappedStorageFile>(*mapped_file_result);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto update_result = api::set_flag_has_server_override(
+        *mapped_file, api::FlagValueType::Boolean, offset, true);
+    ASSERT_TRUE(update_result.ok()) << update_result.error();
+    auto attribute = api::get_flag_attribute(
+        *mapped_file, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.ok());
+    ASSERT_TRUE(*attribute & api::FlagInfoBit::HasServerOverride);
+  }
+
+  // load the file on disk and check has been updated
+  std::ifstream file(flag_info, std::ios::binary | std::ios::ate);
+  std::streamsize size = file.tellg();
+  file.seekg(0, std::ios::beg);
+
+  std::vector<uint8_t> buffer(size);
+  file.read(reinterpret_cast<char *>(buffer.data()), size);
+
+  auto content = rust::Slice<const uint8_t>(
+      buffer.data(), mapped_file->file_size);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_TRUE(attribute.flag_attribute & api::FlagInfoBit::HasServerOverride);
+  }
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto update_result = api::set_flag_has_server_override(
+        *mapped_file, api::FlagValueType::Boolean, offset, false);
+    ASSERT_TRUE(update_result.ok());
+    auto attribute = api::get_flag_attribute(
+        *mapped_file, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.ok());
+    ASSERT_FALSE(*attribute & api::FlagInfoBit::HasServerOverride);
+  }
+
+  std::ifstream file2(flag_info, std::ios::binary);
+  buffer.clear();
+  file2.read(reinterpret_cast<char *>(buffer.data()), size);
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_FALSE(attribute.flag_attribute & api::FlagInfoBit::HasServerOverride);
+  }
+}
+
+/// Test to lock down storage flag has local override update api
+TEST_F(AconfigStorageTest, test_flag_has_local_override_update) {
+  auto mapped_file_result = api::map_mutable_storage_file(flag_info);
+  ASSERT_TRUE(mapped_file_result.ok());
+  auto mapped_file = std::unique_ptr<api::MutableMappedStorageFile>(*mapped_file_result);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto update_result = api::set_flag_has_local_override(
+        *mapped_file, api::FlagValueType::Boolean, offset, true);
+    ASSERT_TRUE(update_result.ok());
+    auto attribute = api::get_flag_attribute(
+        *mapped_file, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.ok());
+    ASSERT_TRUE(*attribute & api::FlagInfoBit::HasLocalOverride);
+  }
+
+  // load the file on disk and check has been updated
+  std::ifstream file(flag_info, std::ios::binary | std::ios::ate);
+  std::streamsize size = file.tellg();
+  file.seekg(0, std::ios::beg);
+
+  std::vector<uint8_t> buffer(size);
+  file.read(reinterpret_cast<char *>(buffer.data()), size);
+
+  auto content = rust::Slice<const uint8_t>(
+      buffer.data(), mapped_file->file_size);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_TRUE(attribute.flag_attribute & api::FlagInfoBit::HasLocalOverride);
+  }
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto update_result = api::set_flag_has_local_override(
+        *mapped_file, api::FlagValueType::Boolean, offset, false);
+    ASSERT_TRUE(update_result.ok());
+    auto attribute = api::get_flag_attribute(
+        *mapped_file, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.ok());
+    ASSERT_FALSE(*attribute & api::FlagInfoBit::HasLocalOverride);
+  }
+
+  std::ifstream file2(flag_info, std::ios::binary);
+  buffer.clear();
+  file2.read(reinterpret_cast<char *>(buffer.data()), size);
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_FALSE(attribute.flag_attribute & api::FlagInfoBit::HasLocalOverride);
+  }
+}
diff --git a/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.rs b/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.rs
index 4bda54c..367569d 100644
--- a/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.rs
+++ b/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.rs
@@ -1,38 +1,17 @@
 #[cfg(not(feature = "cargo"))]
 mod aconfig_storage_write_api_test {
-    use aconfig_storage_file::protos::ProtoStorageFiles;
+    use aconfig_storage_file::{FlagInfoBit, FlagValueType};
+    use aconfig_storage_read_api::flag_info_query::find_flag_attribute;
     use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
-    use aconfig_storage_write_api::{mapped_file::get_mapped_file, set_boolean_flag_value};
+    use aconfig_storage_write_api::{
+        map_mutable_storage_file, set_boolean_flag_value, set_flag_has_local_override,
+        set_flag_has_server_override,
+    };
 
-    use protobuf::Message;
     use std::fs::{self, File};
-    use std::io::{Read, Write};
+    use std::io::Read;
     use tempfile::NamedTempFile;
 
-    /// Write storage location record pb to a temp file
-    fn write_storage_record_file(flag_val: &str) -> NamedTempFile {
-        let text_proto = format!(
-            r#"
-files {{
-    version: 0
-    container: "mockup"
-    package_map: "some_package_map"
-    flag_map: "some_flag_map"
-    flag_val: "{}"
-    timestamp: 12345
-}}
-"#,
-            flag_val
-        );
-        let storage_files: ProtoStorageFiles =
-            protobuf::text_format::parse_from_str(&text_proto).unwrap();
-        let mut binary_proto_bytes = Vec::new();
-        storage_files.write_to_vec(&mut binary_proto_bytes).unwrap();
-        let mut file = NamedTempFile::new().unwrap();
-        file.write_all(&binary_proto_bytes).unwrap();
-        file
-    }
-
     /// Create temp file copy
     fn copy_to_temp_rw_file(source_file: &str) -> NamedTempFile {
         let file = NamedTempFile::new().unwrap();
@@ -48,18 +27,24 @@
         find_boolean_flag_value(&bytes, offset).unwrap()
     }
 
+    /// Get flag attribute at offset
+    fn get_flag_attribute_at_offset(file: &str, value_type: FlagValueType, offset: u32) -> u8 {
+        let mut f = File::open(file).unwrap();
+        let mut bytes = Vec::new();
+        f.read_to_end(&mut bytes).unwrap();
+        find_flag_attribute(&bytes, value_type, offset).unwrap()
+    }
+
     #[test]
     /// Test to lock down flag value update api
     fn test_boolean_flag_value_update() {
         let flag_value_file = copy_to_temp_rw_file("./flag.val");
         let flag_value_path = flag_value_file.path().display().to_string();
-        let record_pb_file = write_storage_record_file(&flag_value_path);
-        let record_pb_path = record_pb_file.path().display().to_string();
 
         // SAFETY:
         // The safety here is ensured as only this single threaded test process will
         // write to this file
-        let mut file = unsafe { get_mapped_file(&record_pb_path, "mockup").unwrap() };
+        let mut file = unsafe { map_mutable_storage_file(&flag_value_path).unwrap() };
         for i in 0..8 {
             set_boolean_flag_value(&mut file, i, true).unwrap();
             let value = get_boolean_flag_value_at_offset(&flag_value_path, i);
@@ -70,4 +55,48 @@
             assert!(!value);
         }
     }
+
+    #[test]
+    /// Test to lock down flag has server override update api
+    fn test_set_flag_has_server_override() {
+        let flag_info_file = copy_to_temp_rw_file("./flag.info");
+        let flag_info_path = flag_info_file.path().display().to_string();
+
+        // SAFETY:
+        // The safety here is ensured as only this single threaded test process will
+        // write to this file
+        let mut file = unsafe { map_mutable_storage_file(&flag_info_path).unwrap() };
+        for i in 0..8 {
+            set_flag_has_server_override(&mut file, FlagValueType::Boolean, i, true).unwrap();
+            let attribute =
+                get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+            assert!((attribute & (FlagInfoBit::HasServerOverride as u8)) != 0);
+            set_flag_has_server_override(&mut file, FlagValueType::Boolean, i, false).unwrap();
+            let attribute =
+                get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+            assert!((attribute & (FlagInfoBit::HasServerOverride as u8)) == 0);
+        }
+    }
+
+    #[test]
+    /// Test to lock down flag has local override update api
+    fn test_set_flag_has_local_override() {
+        let flag_info_file = copy_to_temp_rw_file("./flag.info");
+        let flag_info_path = flag_info_file.path().display().to_string();
+
+        // SAFETY:
+        // The safety here is ensured as only this single threaded test process will
+        // write to this file
+        let mut file = unsafe { map_mutable_storage_file(&flag_info_path).unwrap() };
+        for i in 0..8 {
+            set_flag_has_local_override(&mut file, FlagValueType::Boolean, i, true).unwrap();
+            let attribute =
+                get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+            assert!((attribute & (FlagInfoBit::HasLocalOverride as u8)) != 0);
+            set_flag_has_local_override(&mut file, FlagValueType::Boolean, i, false).unwrap();
+            let attribute =
+                get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+            assert!((attribute & (FlagInfoBit::HasLocalOverride as u8)) == 0);
+        }
+    }
 }
diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp
index 4920a6f..2a02379 100644
--- a/tools/aconfig/aflags/Android.bp
+++ b/tools/aconfig/aflags/Android.bp
@@ -9,6 +9,7 @@
     lints: "android",
     srcs: ["src/main.rs"],
     rustlibs: [
+        "libaconfig_device_paths",
         "libaconfig_protos",
         "libaconfig_storage_read_api",
         "libaconfig_storage_file",
diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml
index cce7f9d..eeae295 100644
--- a/tools/aconfig/aflags/Cargo.toml
+++ b/tools/aconfig/aflags/Cargo.toml
@@ -13,3 +13,4 @@
 aconfig_storage_file = { version = "0.1.0", path = "../aconfig_storage_file" }
 aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" }
 clap = {version = "4.5.2" }
+aconfig_device_paths = { version = "0.1.0", path = "../aconfig_device_paths" }
diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs
index a3ca221..04140c7 100644
--- a/tools/aconfig/aflags/src/aconfig_storage_source.rs
+++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs
@@ -27,13 +27,12 @@
             let container =
                 file_info.container.ok_or(anyhow!("storage file is missing container"))?;
 
-            for (package, name, _flag_type, val) in
-                aconfig_storage_file::list_flags(&package_map, &flag_map, &flag_val)?
+            for listed_flag in aconfig_storage_file::list_flags(&package_map, &flag_map, &flag_val)?
             {
                 result.push(Flag {
-                    name: name.to_string(),
-                    package: package.to_string(),
-                    value: FlagValue::try_from(val.to_string().as_str())?,
+                    name: listed_flag.flag_name,
+                    package: listed_flag.package_name,
+                    value: FlagValue::try_from(listed_flag.flag_value.as_str())?,
                     container: container.to_string(),
 
                     // TODO(b/324436145): delete namespace field once DeviceConfig isn't in CLI.
diff --git a/tools/aconfig/aflags/src/device_config_source.rs b/tools/aconfig/aflags/src/device_config_source.rs
index 089f33d..cf6ab28 100644
--- a/tools/aconfig/aflags/src/device_config_source.rs
+++ b/tools/aconfig/aflags/src/device_config_source.rs
@@ -14,78 +14,17 @@
  * limitations under the License.
  */
 
-use crate::{Flag, FlagPermission, FlagSource, FlagValue, ValuePickedFrom};
-use aconfig_protos::ProtoFlagPermission as ProtoPermission;
-use aconfig_protos::ProtoFlagState as ProtoState;
-use aconfig_protos::ProtoParsedFlag;
-use aconfig_protos::ProtoParsedFlags;
+use crate::load_protos;
+use crate::{Flag, FlagSource, FlagValue, ValuePickedFrom};
+
 use anyhow::{anyhow, bail, Result};
 use regex::Regex;
-use std::collections::BTreeMap;
 use std::collections::HashMap;
 use std::process::Command;
-use std::{fs, str};
+use std::str;
 
 pub struct DeviceConfigSource {}
 
-fn convert_parsed_flag(flag: &ProtoParsedFlag) -> Flag {
-    let namespace = flag.namespace().to_string();
-    let package = flag.package().to_string();
-    let name = flag.name().to_string();
-
-    let container = if flag.container().is_empty() {
-        "system".to_string()
-    } else {
-        flag.container().to_string()
-    };
-
-    let value = match flag.state() {
-        ProtoState::ENABLED => FlagValue::Enabled,
-        ProtoState::DISABLED => FlagValue::Disabled,
-    };
-
-    let permission = match flag.permission() {
-        ProtoPermission::READ_ONLY => FlagPermission::ReadOnly,
-        ProtoPermission::READ_WRITE => FlagPermission::ReadWrite,
-    };
-
-    Flag {
-        namespace,
-        package,
-        name,
-        container,
-        value,
-        staged_value: None,
-        permission,
-        value_picked_from: ValuePickedFrom::Default,
-    }
-}
-
-fn read_pb_files() -> Result<Vec<Flag>> {
-    let mut flags: BTreeMap<String, Flag> = BTreeMap::new();
-    for partition in ["system", "system_ext", "product", "vendor"] {
-        let path = format!("/{partition}/etc/aconfig_flags.pb");
-        let Ok(bytes) = fs::read(&path) else {
-            eprintln!("warning: failed to read {}", path);
-            continue;
-        };
-        let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?;
-        for flag in parsed_flags.parsed_flag {
-            let key = format!("{}.{}", flag.package(), flag.name());
-            let container = if flag.container().is_empty() {
-                "system".to_string()
-            } else {
-                flag.container().to_string()
-            };
-
-            if container.eq(partition) {
-                flags.insert(key, convert_parsed_flag(&flag));
-            }
-        }
-    }
-    Ok(flags.values().cloned().collect())
-}
-
 fn parse_device_config(raw: &str) -> Result<HashMap<String, FlagValue>> {
     let mut flags = HashMap::new();
     let regex = Regex::new(r"(?m)^([[[:alnum:]]_]+/[[[:alnum:]]_\.]+)=(true|false)$")?;
@@ -180,7 +119,7 @@
 
 impl FlagSource for DeviceConfigSource {
     fn list_flags() -> Result<Vec<Flag>> {
-        let pb_flags = read_pb_files()?;
+        let pb_flags = load_protos::load()?;
         let dc_flags = read_device_config_flags()?;
         let staged_flags = read_staged_flags()?;
 
diff --git a/tools/aconfig/aflags/src/load_protos.rs b/tools/aconfig/aflags/src/load_protos.rs
new file mode 100644
index 0000000..90d8599
--- /dev/null
+++ b/tools/aconfig/aflags/src/load_protos.rs
@@ -0,0 +1,62 @@
+use crate::{Flag, FlagPermission, FlagValue, ValuePickedFrom};
+use aconfig_protos::ProtoFlagPermission as ProtoPermission;
+use aconfig_protos::ProtoFlagState as ProtoState;
+use aconfig_protos::ProtoParsedFlag;
+use aconfig_protos::ProtoParsedFlags;
+use anyhow::Result;
+use std::fs;
+use std::path::Path;
+
+// TODO(b/329875578): use container field directly instead of inferring.
+fn infer_container(path: &Path) -> String {
+    let path_str = path.to_string_lossy();
+    path_str
+        .strip_prefix("/apex/")
+        .or_else(|| path_str.strip_prefix('/'))
+        .unwrap_or(&path_str)
+        .strip_suffix("/etc/aconfig_flags.pb")
+        .unwrap_or(&path_str)
+        .to_string()
+}
+
+fn convert_parsed_flag(path: &Path, flag: &ProtoParsedFlag) -> Flag {
+    let namespace = flag.namespace().to_string();
+    let package = flag.package().to_string();
+    let name = flag.name().to_string();
+
+    let value = match flag.state() {
+        ProtoState::ENABLED => FlagValue::Enabled,
+        ProtoState::DISABLED => FlagValue::Disabled,
+    };
+
+    let permission = match flag.permission() {
+        ProtoPermission::READ_ONLY => FlagPermission::ReadOnly,
+        ProtoPermission::READ_WRITE => FlagPermission::ReadWrite,
+    };
+
+    Flag {
+        namespace,
+        package,
+        name,
+        container: infer_container(path),
+        value,
+        staged_value: None,
+        permission,
+        value_picked_from: ValuePickedFrom::Default,
+    }
+}
+
+pub(crate) fn load() -> Result<Vec<Flag>> {
+    let mut result = Vec::new();
+
+    let paths = aconfig_device_paths::parsed_flags_proto_paths()?;
+    for path in paths {
+        let bytes = fs::read(path.clone())?;
+        let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?;
+        for flag in parsed_flags.parsed_flag {
+            // TODO(b/334954748): enforce one-container-per-flag invariant.
+            result.push(convert_parsed_flag(&path, &flag));
+        }
+    }
+    Ok(result)
+}
diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs
index 1c453c5..810f2e3 100644
--- a/tools/aconfig/aflags/src/main.rs
+++ b/tools/aconfig/aflags/src/main.rs
@@ -25,18 +25,24 @@
 mod aconfig_storage_source;
 use aconfig_storage_source::AconfigStorageSource;
 
+mod load_protos;
+
 #[derive(Clone, PartialEq, Debug)]
 enum FlagPermission {
     ReadOnly,
     ReadWrite,
 }
 
-impl ToString for FlagPermission {
-    fn to_string(&self) -> String {
-        match &self {
-            Self::ReadOnly => "read-only".into(),
-            Self::ReadWrite => "read-write".into(),
-        }
+impl std::fmt::Display for FlagPermission {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "{}",
+            match &self {
+                Self::ReadOnly => "read-only",
+                Self::ReadWrite => "read-write",
+            }
+        )
     }
 }
 
@@ -46,12 +52,16 @@
     Server,
 }
 
-impl ToString for ValuePickedFrom {
-    fn to_string(&self) -> String {
-        match &self {
-            Self::Default => "default".into(),
-            Self::Server => "server".into(),
-        }
+impl std::fmt::Display for ValuePickedFrom {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "{}",
+            match &self {
+                Self::Default => "default",
+                Self::Server => "server",
+            }
+        )
     }
 }
 
@@ -73,12 +83,16 @@
     }
 }
 
-impl ToString for FlagValue {
-    fn to_string(&self) -> String {
-        match &self {
-            Self::Enabled => "enabled".into(),
-            Self::Disabled => "disabled".into(),
-        }
+impl std::fmt::Display for FlagValue {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "{}",
+            match &self {
+                Self::Enabled => "enabled",
+                Self::Disabled => "disabled",
+            }
+        )
     }
 }
 
@@ -101,7 +115,7 @@
 
     fn display_staged_value(&self) -> String {
         match self.staged_value {
-            Some(v) => format!("(->{})", v.to_string()),
+            Some(v) => format!("(->{})", v),
             None => "-".to_string(),
         }
     }
@@ -151,6 +165,10 @@
         /// Read from the new flag storage.
         #[clap(long)]
         use_new_storage: bool,
+
+        /// Optionally filter by container name.
+        #[clap(short = 'c', long = "container")]
+        container: Option<String>,
     },
 
     /// Enable an aconfig flag on this device, on the next boot.
@@ -174,6 +192,23 @@
     longest_permission_col: usize,
 }
 
+struct Filter {
+    container: Option<String>,
+}
+
+impl Filter {
+    fn apply(&self, flags: &[Flag]) -> Vec<Flag> {
+        flags
+            .iter()
+            .filter(|flag| match &self.container {
+                Some(c) => flag.container == *c,
+                None => true,
+            })
+            .cloned()
+            .collect()
+    }
+}
+
 fn format_flag_row(flag: &Flag, info: &PaddingInfo) -> String {
     let full_name = flag.qualified_name();
     let p0 = info.longest_flag_col + 1;
@@ -198,8 +233,6 @@
 }
 
 fn set_flag(qualified_name: &str, value: &str) -> Result<()> {
-    ensure!(nix::unistd::Uid::current().is_root(), "must be root to mutate flags");
-
     let flags_binding = DeviceConfigSource::list_flags()?;
     let flag = flags_binding.iter().find(|f| f.qualified_name() == qualified_name).ok_or(
         anyhow!("no aconfig flag '{qualified_name}'. Does the flag have an .aconfig definition?"),
@@ -213,11 +246,12 @@
     Ok(())
 }
 
-fn list(source_type: FlagSourceType) -> Result<String> {
-    let flags = match source_type {
+fn list(source_type: FlagSourceType, container: Option<String>) -> Result<String> {
+    let flags_unfiltered = match source_type {
         FlagSourceType::DeviceConfig => DeviceConfigSource::list_flags()?,
         FlagSourceType::AconfigStorage => AconfigStorageSource::list_flags()?,
     };
+    let flags = (Filter { container }).apply(&flags_unfiltered);
     let padding_info = PaddingInfo {
         longest_flag_col: flags.iter().map(|f| f.qualified_name().len()).max().unwrap_or(0),
         longest_val_col: flags.iter().map(|f| f.value.to_string().len()).max().unwrap_or(0),
@@ -246,11 +280,17 @@
     Ok(result)
 }
 
-fn main() {
+fn main() -> Result<()> {
+    ensure!(nix::unistd::Uid::current().is_root(), "must be root");
+
     let cli = Cli::parse();
     let output = match cli.command {
-        Command::List { use_new_storage: true } => list(FlagSourceType::AconfigStorage).map(Some),
-        Command::List { use_new_storage: false } => list(FlagSourceType::DeviceConfig).map(Some),
+        Command::List { use_new_storage: true, container } => {
+            list(FlagSourceType::AconfigStorage, container).map(Some)
+        }
+        Command::List { use_new_storage: false, container } => {
+            list(FlagSourceType::DeviceConfig, container).map(Some)
+        }
         Command::Enable { qualified_name } => set_flag(&qualified_name, "true").map(|_| None),
         Command::Disable { qualified_name } => set_flag(&qualified_name, "false").map(|_| None),
     };
@@ -259,4 +299,87 @@
         Ok(None) => (),
         Err(message) => println!("Error: {message}"),
     }
+
+    Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_filter_container() {
+        let flags = vec![
+            Flag {
+                namespace: "namespace".to_string(),
+                name: "test1".to_string(),
+                package: "package".to_string(),
+                value: FlagValue::Disabled,
+                staged_value: None,
+                permission: FlagPermission::ReadWrite,
+                value_picked_from: ValuePickedFrom::Default,
+                container: "system".to_string(),
+            },
+            Flag {
+                namespace: "namespace".to_string(),
+                name: "test2".to_string(),
+                package: "package".to_string(),
+                value: FlagValue::Disabled,
+                staged_value: None,
+                permission: FlagPermission::ReadWrite,
+                value_picked_from: ValuePickedFrom::Default,
+                container: "not_system".to_string(),
+            },
+            Flag {
+                namespace: "namespace".to_string(),
+                name: "test3".to_string(),
+                package: "package".to_string(),
+                value: FlagValue::Disabled,
+                staged_value: None,
+                permission: FlagPermission::ReadWrite,
+                value_picked_from: ValuePickedFrom::Default,
+                container: "system".to_string(),
+            },
+        ];
+
+        assert_eq!((Filter { container: Some("system".to_string()) }).apply(&flags).len(), 2);
+    }
+
+    #[test]
+    fn test_filter_no_container() {
+        let flags = vec![
+            Flag {
+                namespace: "namespace".to_string(),
+                name: "test1".to_string(),
+                package: "package".to_string(),
+                value: FlagValue::Disabled,
+                staged_value: None,
+                permission: FlagPermission::ReadWrite,
+                value_picked_from: ValuePickedFrom::Default,
+                container: "system".to_string(),
+            },
+            Flag {
+                namespace: "namespace".to_string(),
+                name: "test2".to_string(),
+                package: "package".to_string(),
+                value: FlagValue::Disabled,
+                staged_value: None,
+                permission: FlagPermission::ReadWrite,
+                value_picked_from: ValuePickedFrom::Default,
+                container: "not_system".to_string(),
+            },
+            Flag {
+                namespace: "namespace".to_string(),
+                name: "test3".to_string(),
+                package: "package".to_string(),
+                value: FlagValue::Disabled,
+                staged_value: None,
+                permission: FlagPermission::ReadWrite,
+                value_picked_from: ValuePickedFrom::Default,
+                container: "system".to_string(),
+            },
+        ];
+
+        assert_eq!((Filter { container: None }).apply(&flags).len(), 3);
+    }
 }
diff --git a/tools/check-flagged-apis/Android.bp b/tools/check-flagged-apis/Android.bp
new file mode 100644
index 0000000..43c9c8e
--- /dev/null
+++ b/tools/check-flagged-apis/Android.bp
@@ -0,0 +1,51 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_team: "trendy_team_updatable_sdk_apis",
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_defaults {
+    name: "check-flagged-apis-defaults",
+    srcs: [
+        "src/com/android/checkflaggedapis/Main.kt",
+    ],
+    static_libs: [
+        "libaconfig_java_proto_lite",
+        "metalava-signature-reader",
+        "metalava-tools-common-m2-deps",
+    ],
+}
+
+java_binary_host {
+    name: "check-flagged-apis",
+    defaults: [
+        "check-flagged-apis-defaults",
+    ],
+    main_class: "com.android.checkflaggedapis.Main",
+}
+
+java_test_host {
+    name: "check-flagged-apis-test",
+    defaults: [
+        "check-flagged-apis-defaults",
+    ],
+    srcs: [
+        "src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt",
+    ],
+    static_libs: [
+        "junit",
+    ],
+}
diff --git a/tools/check-flagged-apis/OWNERS b/tools/check-flagged-apis/OWNERS
new file mode 100644
index 0000000..289e21e
--- /dev/null
+++ b/tools/check-flagged-apis/OWNERS
@@ -0,0 +1,4 @@
+amhk@google.com
+gurpreetgs@google.com
+michaelwr@google.com
+paulduffin@google.com
diff --git a/tools/check-flagged-apis/check-flagged-apis.sh b/tools/check-flagged-apis/check-flagged-apis.sh
new file mode 100755
index 0000000..8078cd8
--- /dev/null
+++ b/tools/check-flagged-apis/check-flagged-apis.sh
@@ -0,0 +1,136 @@
+#!/bin/bash
+
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Run check-flagged-apis for public APIs and the three @SystemApi flavours.
+#
+# This script expects an argument to tell it which subcommand of
+# check-flagged-apis to execute. Run the script without any arguments to see
+# the valid options.
+#
+# Remember to lunch to select the relevant release config before running this script.
+
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../shell_utils.sh
+require_top
+
+PUBLIC_XML_VERSIONS=out/target/common/obj/PACKAGING/api_versions_public_generated-api-versions.xml
+SYSTEM_XML_VERSIONS=out/target/common/obj/PACKAGING/api_versions_system_generated-api-versions.xml
+SYSTEM_SERVER_XML_VERSONS=out/target/common/obj/PACKAGING/api_versions_system_server_complete_generated-api-versions.xml
+MODULE_LIB_XML_VERSIONS=out/target/common/obj/PACKAGING/api_versions_module_lib_complete_generated-api-versions.xml
+
+function m() {
+    $(gettop)/build/soong/soong_ui.bash --build-mode --all-modules --dir="$(pwd)" "$@"
+}
+
+function build() {
+    m \
+        check-flagged-apis \
+        all_aconfig_declarations \
+        frameworks-base-api-current.txt \
+        frameworks-base-api-system-current.txt \
+        frameworks-base-api-system-server-current.txt \
+        frameworks-base-api-module-lib-current.txt \
+        $PUBLIC_XML_VERSIONS \
+        $SYSTEM_XML_VERSIONS \
+        $SYSTEM_SERVER_XML_VERSONS \
+        $MODULE_LIB_XML_VERSIONS
+}
+
+function noop() {
+    true
+}
+
+function aninja() {
+    local T="$(gettop)"
+    (\cd "${T}" && prebuilts/build-tools/linux-x86/bin/ninja -f out/combined-${TARGET_PRODUCT}.ninja "$@")
+}
+
+function path_to_api_signature_file {
+    aninja -t query device_"$1"_all_targets | grep -A1 -e input: | tail -n1
+}
+
+function run_check() {
+    local errors=0
+
+    echo "# current"
+    check-flagged-apis check \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
+        --api-versions $PUBLIC_XML_VERSIONS
+    (( errors += $? ))
+
+    echo
+    echo "# system-current"
+    check-flagged-apis check \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
+        --api-versions $SYSTEM_XML_VERSIONS
+    (( errors += $? ))
+
+    echo
+    echo "# system-server-current"
+    check-flagged-apis check \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-server-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
+        --api-versions $SYSTEM_SERVER_XML_VERSONS
+    (( errors += $? ))
+
+    echo
+    echo "# module-lib"
+    check-flagged-apis check \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-module-lib-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
+        --api-versions $MODULE_LIB_XML_VERSIONS
+    (( errors += $? ))
+
+    return $errors
+}
+
+function run_list() {
+    echo "# current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# system-current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# system-server-current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-server-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# module-lib"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-module-lib-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+}
+
+build_cmd=build
+if [[ "$1" == "--skip-build" ]]; then
+    build_cmd=noop
+    shift 1
+fi
+
+case "$1" in
+    check) $build_cmd && run_check ;;
+    list) $build_cmd && run_list ;;
+    *) echo "usage: $(basename $0): [--skip-build] check|list"; exit 1
+esac
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
new file mode 100644
index 0000000..e07ac1d
--- /dev/null
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
@@ -0,0 +1,380 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.checkflaggedapis
+
+import android.aconfig.Aconfig
+import android.aconfig.Aconfig.flag_state.DISABLED
+import android.aconfig.Aconfig.flag_state.ENABLED
+import java.io.ByteArrayInputStream
+import java.io.ByteArrayOutputStream
+import java.io.InputStream
+import org.junit.Assert.assertEquals
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+private val API_SIGNATURE =
+    """
+      // Signature format: 2.0
+      package android {
+        @FlaggedApi("android.flag.foo") public final class Clazz {
+          ctor @FlaggedApi("android.flag.foo") public Clazz();
+          field @FlaggedApi("android.flag.foo") public static final int FOO = 1; // 0x1
+          method @FlaggedApi("android.flag.foo") public int getErrorCode();
+          method @FlaggedApi("android.flag.foo") public boolean setData(int, int[][], @NonNull android.util.Utility<T, U>);
+          method @FlaggedApi("android.flag.foo") public boolean setVariableData(int, android.util.Atom...);
+          method @FlaggedApi("android.flag.foo") public boolean innerClassArg(android.Clazz.Builder);
+        }
+        @FlaggedApi("android.flag.bar") public static class Clazz.Builder {
+        }
+      }
+"""
+        .trim()
+
+private val API_VERSIONS =
+    """
+      <?xml version="1.0" encoding="utf-8"?>
+      <api version="3">
+        <class name="android/Clazz" since="1">
+          <extends name="java/lang/Object"/>
+          <method name="&lt;init>()V"/>
+          <field name="FOO"/>
+          <method name="getErrorCode()I"/>
+          <method name="setData(I[[ILandroid/util/Utility;)Z"/>
+          <method name="setVariableData(I[Landroid/util/Atom;)Z"/>
+          <method name="innerClassArg(Landroid/Clazz${"$"}Builder;)"/>
+        </class>
+        <class name="android/Clazz${"$"}Builder" since="2">
+          <extends name="java/lang/Object"/>
+        </class>
+      </api>
+"""
+        .trim()
+
+private fun generateFlagsProto(
+    fooState: Aconfig.flag_state,
+    barState: Aconfig.flag_state
+): InputStream {
+  val fooFlag =
+      Aconfig.parsed_flag
+          .newBuilder()
+          .setPackage("android.flag")
+          .setName("foo")
+          .setState(fooState)
+          .setPermission(Aconfig.flag_permission.READ_ONLY)
+          .build()
+  val barFlag =
+      Aconfig.parsed_flag
+          .newBuilder()
+          .setPackage("android.flag")
+          .setName("bar")
+          .setState(barState)
+          .setPermission(Aconfig.flag_permission.READ_ONLY)
+          .build()
+  val flags =
+      Aconfig.parsed_flags.newBuilder().addParsedFlag(fooFlag).addParsedFlag(barFlag).build()
+  val binaryProto = ByteArrayOutputStream()
+  flags.writeTo(binaryProto)
+  return ByteArrayInputStream(binaryProto.toByteArray())
+}
+
+@RunWith(JUnit4::class)
+class CheckFlaggedApisTest {
+  @Test
+  fun testParseApiSignature() {
+    val expected =
+        setOf(
+            Pair(
+                Symbol.createClass("android/Clazz", "java/lang/Object", setOf()),
+                Flag("android.flag.foo")),
+            Pair(Symbol.createMethod("android/Clazz", "Clazz()"), Flag("android.flag.foo")),
+            Pair(Symbol.createField("android/Clazz", "FOO"), Flag("android.flag.foo")),
+            Pair(Symbol.createMethod("android/Clazz", "getErrorCode()"), Flag("android.flag.foo")),
+            Pair(
+                Symbol.createMethod("android/Clazz", "setData(I[[ILandroid/util/Utility;)"),
+                Flag("android.flag.foo")),
+            Pair(
+                Symbol.createMethod("android/Clazz", "setVariableData(I[Landroid/util/Atom;)"),
+                Flag("android.flag.foo")),
+            Pair(
+                Symbol.createMethod("android/Clazz", "innerClassArg(Landroid/Clazz/Builder;)"),
+                Flag("android.flag.foo")),
+            Pair(
+                Symbol.createClass("android/Clazz/Builder", "java/lang/Object", setOf()),
+                Flag("android.flag.bar")),
+        )
+    val actual = parseApiSignature("in-memory", API_SIGNATURE.byteInputStream())
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testParseApiSignatureInterfacesInheritFromJavaLangObject() {
+    val apiSignature =
+        """
+          // Signature format: 2.0
+          package android {
+            @FlaggedApi("android.flag.foo") public interface Interface {
+            }
+          }
+        """
+            .trim()
+    val expected =
+        setOf(
+            Pair(
+                Symbol.createClass("android/Interface", "java/lang/Object", setOf()),
+                Flag("android.flag.foo")))
+    val actual = parseApiSignature("in-memory", apiSignature.byteInputStream())
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testParseFlagValues() {
+    val expected: Map<Flag, Boolean> =
+        mapOf(Flag("android.flag.foo") to true, Flag("android.flag.bar") to true)
+    val actual = parseFlagValues(generateFlagsProto(ENABLED, ENABLED))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testParseApiVersions() {
+    val expected: Set<Symbol> =
+        setOf(
+            Symbol.createClass("android/Clazz", "java/lang/Object", setOf()),
+            Symbol.createMethod("android/Clazz", "Clazz()"),
+            Symbol.createField("android/Clazz", "FOO"),
+            Symbol.createMethod("android/Clazz", "getErrorCode()"),
+            Symbol.createMethod("android/Clazz", "setData(I[[ILandroid/util/Utility;)"),
+            Symbol.createMethod("android/Clazz", "setVariableData(I[Landroid/util/Atom;)"),
+            Symbol.createMethod("android/Clazz", "innerClassArg(Landroid/Clazz/Builder;)"),
+            Symbol.createClass("android/Clazz/Builder", "java/lang/Object", setOf()),
+        )
+    val actual = parseApiVersions(API_VERSIONS.byteInputStream())
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testParseApiVersionsNestedClasses() {
+    val apiVersions =
+        """
+          <?xml version="1.0" encoding="utf-8"?>
+          <api version="3">
+            <class name="android/Clazz${'$'}Foo${'$'}Bar" since="1">
+              <extends name="java/lang/Object"/>
+              <method name="&lt;init>()V"/>
+            </class>
+          </api>
+        """
+            .trim()
+    val expected: Set<Symbol> =
+        setOf(
+            Symbol.createClass("android/Clazz/Foo/Bar", "java/lang/Object", setOf()),
+            Symbol.createMethod("android/Clazz/Foo/Bar", "Bar()"),
+        )
+    val actual = parseApiVersions(apiVersions.byteInputStream())
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testFindErrorsNoErrors() {
+    val expected = setOf<ApiError>()
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(ENABLED, ENABLED)),
+            parseApiVersions(API_VERSIONS.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testFindErrorsVerifyImplements() {
+    val apiSignature =
+        """
+          // Signature format: 2.0
+          package android {
+            @FlaggedApi("android.flag.foo") public final class Clazz implements android.Interface {
+              method @FlaggedApi("android.flag.foo") public boolean foo();
+              method @FlaggedApi("android.flag.foo") public boolean bar();
+            }
+            public interface Interface {
+              method public boolean bar();
+            }
+          }
+        """
+            .trim()
+
+    val apiVersions =
+        """
+          <?xml version="1.0" encoding="utf-8"?>
+          <api version="3">
+            <class name="android/Clazz" since="1">
+              <extends name="java/lang/Object"/>
+              <implements name="android/Interface"/>
+              <method name="foo()Z"/>
+            </class>
+            <class name="android/Interface" since="1">
+              <method name="bar()Z"/>
+            </class>
+          </api>
+        """
+            .trim()
+
+    val expected = setOf<ApiError>()
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", apiSignature.byteInputStream()),
+            parseFlagValues(generateFlagsProto(ENABLED, ENABLED)),
+            parseApiVersions(apiVersions.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testFindErrorsVerifySuperclass() {
+    val apiSignature =
+        """
+          // Signature format: 2.0
+          package android {
+            @FlaggedApi("android.flag.foo") public final class C extends android.B {
+              method @FlaggedApi("android.flag.foo") public boolean c();
+              method @FlaggedApi("android.flag.foo") public boolean b();
+              method @FlaggedApi("android.flag.foo") public boolean a();
+            }
+            public final class B extends android.A {
+              method public boolean b();
+            }
+            public final class A {
+              method public boolean a();
+            }
+          }
+        """
+            .trim()
+
+    val apiVersions =
+        """
+          <?xml version="1.0" encoding="utf-8"?>
+          <api version="3">
+            <class name="android/C" since="1">
+              <extends name="android/B"/>
+              <method name="c()Z"/>
+            </class>
+            <class name="android/B" since="1">
+              <extends name="android/A"/>
+              <method name="b()Z"/>
+            </class>
+            <class name="android/A" since="1">
+              <method name="a()Z"/>
+            </class>
+          </api>
+        """
+            .trim()
+
+    val expected = setOf<ApiError>()
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", apiSignature.byteInputStream()),
+            parseFlagValues(generateFlagsProto(ENABLED, ENABLED)),
+            parseApiVersions(apiVersions.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testNestedFlagsOuterFlagWins() {
+    val apiSignature =
+        """
+          // Signature format: 2.0
+          package android {
+            @FlaggedApi("android.flag.foo") public final class A {
+              method @FlaggedApi("android.flag.bar") public boolean method();
+            }
+            @FlaggedApi("android.flag.bar") public final class B {
+              method @FlaggedApi("android.flag.foo") public boolean method();
+            }
+          }
+        """
+            .trim()
+
+    val apiVersions =
+        """
+          <?xml version="1.0" encoding="utf-8"?>
+          <api version="3">
+            <class name="android/B" since="1">
+            <extends name="java/lang/Object"/>
+            </class>
+          </api>
+        """
+            .trim()
+
+    val expected = setOf<ApiError>()
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", apiSignature.byteInputStream()),
+            parseFlagValues(generateFlagsProto(DISABLED, ENABLED)),
+            parseApiVersions(apiVersions.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testFindErrorsDisabledFlaggedApiIsPresent() {
+    val expected =
+        setOf<ApiError>(
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createClass("android/Clazz", "java/lang/Object", setOf()),
+                Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createMethod("android/Clazz", "Clazz()"), Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createField("android/Clazz", "FOO"), Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createMethod("android/Clazz", "getErrorCode()"), Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createMethod("android/Clazz", "setData(I[[ILandroid/util/Utility;)"),
+                Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createMethod("android/Clazz", "setVariableData(I[Landroid/util/Atom;)"),
+                Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createMethod("android/Clazz", "innerClassArg(Landroid/Clazz/Builder;)"),
+                Flag("android.flag.foo")),
+            DisabledFlaggedApiIsPresentError(
+                Symbol.createClass("android/Clazz/Builder", "java/lang/Object", setOf()),
+                Flag("android.flag.bar")),
+        )
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(DISABLED, DISABLED)),
+            parseApiVersions(API_VERSIONS.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testListFlaggedApis() {
+    val expected =
+        listOf(
+            "android.flag.bar DISABLED android/Clazz/Builder",
+            "android.flag.foo ENABLED android/Clazz",
+            "android.flag.foo ENABLED android/Clazz/Clazz()",
+            "android.flag.foo ENABLED android/Clazz/FOO",
+            "android.flag.foo ENABLED android/Clazz/getErrorCode()",
+            "android.flag.foo ENABLED android/Clazz/innerClassArg(Landroid/Clazz/Builder;)",
+            "android.flag.foo ENABLED android/Clazz/setData(I[[ILandroid/util/Utility;)",
+            "android.flag.foo ENABLED android/Clazz/setVariableData(I[Landroid/util/Atom;)")
+    val actual =
+        listFlaggedApis(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(ENABLED, DISABLED)))
+    assertEquals(expected, actual)
+  }
+}
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
new file mode 100644
index 0000000..1125d39
--- /dev/null
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
@@ -0,0 +1,528 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@file:JvmName("Main")
+
+package com.android.checkflaggedapis
+
+import android.aconfig.Aconfig
+import com.android.tools.metalava.model.BaseItemVisitor
+import com.android.tools.metalava.model.ClassItem
+import com.android.tools.metalava.model.FieldItem
+import com.android.tools.metalava.model.Item
+import com.android.tools.metalava.model.MethodItem
+import com.android.tools.metalava.model.text.ApiFile
+import com.github.ajalt.clikt.core.CliktCommand
+import com.github.ajalt.clikt.core.ProgramResult
+import com.github.ajalt.clikt.core.subcommands
+import com.github.ajalt.clikt.parameters.options.help
+import com.github.ajalt.clikt.parameters.options.option
+import com.github.ajalt.clikt.parameters.options.required
+import com.github.ajalt.clikt.parameters.types.path
+import java.io.InputStream
+import javax.xml.parsers.DocumentBuilderFactory
+import org.w3c.dom.Node
+
+/**
+ * Class representing the fully qualified name of a class, method or field.
+ *
+ * This tool reads a multitude of input formats all of which represents the fully qualified path to
+ * a Java symbol slightly differently. To keep things consistent, all parsed APIs are converted to
+ * Symbols.
+ *
+ * Symbols are encoded using the format similar to the one described in section 4.3.2 of the JVM
+ * spec [1], that is, "package.class.inner-class.method(int, int[], android.util.Clazz)" is
+ * represented as
+ * <pre>
+ *   package.class.inner-class.method(II[Landroid/util/Clazz;)
+ * <pre>
+ *
+ * Where possible, the format has been simplified (to make translation of the
+ * various input formats easier): for instance, only / is used as delimiter (#
+ * and $ are never used).
+ *
+ * 1. https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3.2
+ */
+internal sealed class Symbol {
+  companion object {
+    private val FORBIDDEN_CHARS = listOf('#', '$', '.')
+
+    fun createClass(clazz: String, superclass: String?, interfaces: Set<String>): Symbol {
+      return ClassSymbol(
+          toInternalFormat(clazz),
+          superclass?.let { toInternalFormat(it) },
+          interfaces.map { toInternalFormat(it) }.toSet())
+    }
+
+    fun createField(clazz: String, field: String): Symbol {
+      require(!field.contains("(") && !field.contains(")"))
+      return MemberSymbol(toInternalFormat(clazz), toInternalFormat(field))
+    }
+
+    fun createMethod(clazz: String, method: String): Symbol {
+      return MemberSymbol(toInternalFormat(clazz), toInternalFormat(method))
+    }
+
+    protected fun toInternalFormat(name: String): String {
+      var internalName = name
+      for (ch in FORBIDDEN_CHARS) {
+        internalName = internalName.replace(ch, '/')
+      }
+      return internalName
+    }
+  }
+
+  abstract fun toPrettyString(): String
+}
+
+internal data class ClassSymbol(
+    val clazz: String,
+    val superclass: String?,
+    val interfaces: Set<String>
+) : Symbol() {
+  override fun toPrettyString(): String = "$clazz"
+}
+
+internal data class MemberSymbol(val clazz: String, val member: String) : Symbol() {
+  override fun toPrettyString(): String = "$clazz/$member"
+}
+
+/**
+ * Class representing the fully qualified name of an aconfig flag.
+ *
+ * This includes both the flag's package and name, separated by a dot, e.g.:
+ * <pre>
+ *   com.android.aconfig.test.disabled_ro
+ * <pre>
+ */
+@JvmInline
+internal value class Flag(val name: String) {
+  override fun toString(): String = name.toString()
+}
+
+internal sealed class ApiError {
+  abstract val symbol: Symbol
+  abstract val flag: Flag
+}
+
+internal data class EnabledFlaggedApiNotPresentError(
+    override val symbol: Symbol,
+    override val flag: Flag
+) : ApiError() {
+  override fun toString(): String {
+    return "error: enabled @FlaggedApi not present in built artifact: symbol=${symbol.toPrettyString()} flag=$flag"
+  }
+}
+
+internal data class DisabledFlaggedApiIsPresentError(
+    override val symbol: Symbol,
+    override val flag: Flag
+) : ApiError() {
+  override fun toString(): String {
+    return "error: disabled @FlaggedApi is present in built artifact: symbol=${symbol.toPrettyString()} flag=$flag"
+  }
+}
+
+internal data class UnknownFlagError(override val symbol: Symbol, override val flag: Flag) :
+    ApiError() {
+  override fun toString(): String {
+    return "error: unknown flag: symbol=${symbol.toPrettyString()} flag=$flag"
+  }
+}
+
+val ARG_API_SIGNATURE = "--api-signature"
+val ARG_API_SIGNATURE_HELP =
+    """
+Path to API signature file.
+Usually named *current.txt.
+Tip: `m frameworks-base-api-current.txt` will generate a file that includes all platform and mainline APIs.
+"""
+
+val ARG_FLAG_VALUES = "--flag-values"
+val ARG_FLAG_VALUES_HELP =
+    """
+Path to aconfig parsed_flags binary proto file.
+Tip: `m all_aconfig_declarations` will generate a file that includes all information about all flags.
+"""
+
+val ARG_API_VERSIONS = "--api-versions"
+val ARG_API_VERSIONS_HELP =
+    """
+Path to API versions XML file.
+Usually named xml-versions.xml.
+Tip: `m sdk dist` will generate a file that includes all platform and mainline APIs.
+"""
+
+class MainCommand : CliktCommand() {
+  override fun run() {}
+}
+
+class CheckCommand :
+    CliktCommand(
+        help =
+            """
+Check that all flagged APIs are used in the correct way.
+
+This tool reads the API signature file and checks that all flagged APIs are used in the correct way.
+
+The tool will exit with a non-zero exit code if any flagged APIs are found to be used in the incorrect way.
+""") {
+  private val apiSignaturePath by
+      option(ARG_API_SIGNATURE)
+          .help(ARG_API_SIGNATURE_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val flagValuesPath by
+      option(ARG_FLAG_VALUES)
+          .help(ARG_FLAG_VALUES_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val apiVersionsPath by
+      option(ARG_API_VERSIONS)
+          .help(ARG_API_VERSIONS_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+
+  override fun run() {
+    val flaggedSymbols =
+        apiSignaturePath.toFile().inputStream().use {
+          parseApiSignature(apiSignaturePath.toString(), it)
+        }
+    val flags = flagValuesPath.toFile().inputStream().use { parseFlagValues(it) }
+    val exportedSymbols = apiVersionsPath.toFile().inputStream().use { parseApiVersions(it) }
+    val errors = findErrors(flaggedSymbols, flags, exportedSymbols)
+    for (e in errors) {
+      println(e)
+    }
+    throw ProgramResult(errors.size)
+  }
+}
+
+class ListCommand :
+    CliktCommand(
+        help =
+            """
+List all flagged APIs and corresponding flags.
+
+The output format is "<fully-qualified-name-of-flag> <state-of-flag> <API>", one line per API.
+
+The output can be post-processed by e.g. piping it to grep to filter out only enabled APIs, or all APIs guarded by a given flag.
+""") {
+  private val apiSignaturePath by
+      option(ARG_API_SIGNATURE)
+          .help(ARG_API_SIGNATURE_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val flagValuesPath by
+      option(ARG_FLAG_VALUES)
+          .help(ARG_FLAG_VALUES_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+
+  override fun run() {
+    val flaggedSymbols =
+        apiSignaturePath.toFile().inputStream().use {
+          parseApiSignature(apiSignaturePath.toString(), it)
+        }
+    val flags = flagValuesPath.toFile().inputStream().use { parseFlagValues(it) }
+    val output = listFlaggedApis(flaggedSymbols, flags)
+    if (output.isNotEmpty()) {
+      println(output.joinToString("\n"))
+    }
+  }
+}
+
+internal fun parseApiSignature(path: String, input: InputStream): Set<Pair<Symbol, Flag>> {
+  val output = mutableSetOf<Pair<Symbol, Flag>>()
+  val visitor =
+      object : BaseItemVisitor() {
+        override fun visitClass(cls: ClassItem) {
+          getFlagOrNull(cls)?.let { flag ->
+            val symbol =
+                Symbol.createClass(
+                    cls.baselineElementId(),
+                    if (cls.isInterface()) {
+                      "java/lang/Object"
+                    } else {
+                      cls.superClass()?.baselineElementId()
+                    },
+                    cls.allInterfaces()
+                        .map { it.baselineElementId() }
+                        .filter { it != cls.baselineElementId() }
+                        .toSet())
+            output.add(Pair(symbol, flag))
+          }
+        }
+
+        override fun visitField(field: FieldItem) {
+          getFlagOrNull(field)?.let { flag ->
+            val symbol =
+                Symbol.createField(field.containingClass().baselineElementId(), field.name())
+            output.add(Pair(symbol, flag))
+          }
+        }
+
+        override fun visitMethod(method: MethodItem) {
+          getFlagOrNull(method)?.let { flag ->
+            val methodName = buildString {
+              append(method.name())
+              append("(")
+              method.parameters().joinTo(this, separator = "") { it.type().internalName() }
+              append(")")
+            }
+            val symbol = Symbol.createMethod(method.containingClass().qualifiedName(), methodName)
+            output.add(Pair(symbol, flag))
+          }
+        }
+
+        private fun getFlagOrNull(item: Item): Flag? {
+          return item.modifiers
+              .findAnnotation("android.annotation.FlaggedApi")
+              ?.findAttribute("value")
+              ?.value
+              ?.let { Flag(it.value() as String) }
+        }
+      }
+  val codebase = ApiFile.parseApi(path, input)
+  codebase.accept(visitor)
+  return output
+}
+
+internal fun parseFlagValues(input: InputStream): Map<Flag, Boolean> {
+  val parsedFlags = Aconfig.parsed_flags.parseFrom(input).getParsedFlagList()
+  return parsedFlags.associateBy(
+      { Flag("${it.getPackage()}.${it.getName()}") },
+      { it.getState() == Aconfig.flag_state.ENABLED })
+}
+
+internal fun parseApiVersions(input: InputStream): Set<Symbol> {
+  fun Node.getAttribute(name: String): String? = getAttributes()?.getNamedItem(name)?.getNodeValue()
+
+  val output = mutableSetOf<Symbol>()
+  val factory = DocumentBuilderFactory.newInstance()
+  val parser = factory.newDocumentBuilder()
+  val document = parser.parse(input)
+
+  val classes = document.getElementsByTagName("class")
+  // ktfmt doesn't understand the `..<` range syntax; explicitly call .rangeUntil instead
+  for (i in 0.rangeUntil(classes.getLength())) {
+    val cls = classes.item(i)
+    val className =
+        requireNotNull(cls.getAttribute("name")) {
+          "Bad XML: <class> element without name attribute"
+        }
+    var superclass: String? = null
+    val interfaces = mutableSetOf<String>()
+    val children = cls.getChildNodes()
+    for (j in 0.rangeUntil(children.getLength())) {
+      val child = children.item(j)
+      when (child.getNodeName()) {
+        "extends" -> {
+          superclass =
+              requireNotNull(child.getAttribute("name")) {
+                "Bad XML: <extends> element without name attribute"
+              }
+        }
+        "implements" -> {
+          val interfaceName =
+              requireNotNull(child.getAttribute("name")) {
+                "Bad XML: <implements> element without name attribute"
+              }
+          interfaces.add(interfaceName)
+        }
+      }
+    }
+    output.add(Symbol.createClass(className, superclass, interfaces))
+  }
+
+  val fields = document.getElementsByTagName("field")
+  // ktfmt doesn't understand the `..<` range syntax; explicitly call .rangeUntil instead
+  for (i in 0.rangeUntil(fields.getLength())) {
+    val field = fields.item(i)
+    val fieldName =
+        requireNotNull(field.getAttribute("name")) {
+          "Bad XML: <field> element without name attribute"
+        }
+    val className =
+        requireNotNull(field.getParentNode()?.getAttribute("name")) {
+          "Bad XML: top level <field> element"
+        }
+    output.add(Symbol.createField(className, fieldName))
+  }
+
+  val methods = document.getElementsByTagName("method")
+  // ktfmt doesn't understand the `..<` range syntax; explicitly call .rangeUntil instead
+  for (i in 0.rangeUntil(methods.getLength())) {
+    val method = methods.item(i)
+    val methodSignature =
+        requireNotNull(method.getAttribute("name")) {
+          "Bad XML: <method> element without name attribute"
+        }
+    val methodSignatureParts = methodSignature.split(Regex("\\(|\\)"))
+    if (methodSignatureParts.size != 3) {
+      throw Exception("Bad XML: method signature '$methodSignature'")
+    }
+    var (methodName, methodArgs, _) = methodSignatureParts
+    val packageAndClassName =
+        requireNotNull(method.getParentNode()?.getAttribute("name")) {
+              "Bad XML: top level <method> element, or <class> element missing name attribute"
+            }
+            .replace("$", "/")
+    if (methodName == "<init>") {
+      methodName = packageAndClassName.split("/").last()
+    }
+    output.add(Symbol.createMethod(packageAndClassName, "$methodName($methodArgs)"))
+  }
+
+  return output
+}
+
+/**
+ * Find errors in the given data.
+ *
+ * @param flaggedSymbolsInSource the set of symbols that are flagged in the source code
+ * @param flags the set of flags and their values
+ * @param symbolsInOutput the set of symbols that are present in the output
+ * @return the set of errors found
+ */
+internal fun findErrors(
+    flaggedSymbolsInSource: Set<Pair<Symbol, Flag>>,
+    flags: Map<Flag, Boolean>,
+    symbolsInOutput: Set<Symbol>
+): Set<ApiError> {
+  fun Set<Symbol>.containsSymbol(symbol: Symbol): Boolean {
+    // trivial case: the symbol is explicitly listed in api-versions.xml
+    if (contains(symbol)) {
+      return true
+    }
+
+    // non-trivial case: the symbol could be part of the surrounding class'
+    // super class or interfaces
+    val (className, memberName) =
+        when (symbol) {
+          is ClassSymbol -> return false
+          is MemberSymbol -> {
+            Pair(symbol.clazz, symbol.member)
+          }
+        }
+    val clazz = find { it is ClassSymbol && it.clazz == className } as? ClassSymbol?
+    if (clazz == null) {
+      return false
+    }
+
+    for (interfaceName in clazz.interfaces) {
+      // createMethod is the same as createField, except it allows parenthesis
+      val interfaceSymbol = Symbol.createMethod(interfaceName, memberName)
+      if (contains(interfaceSymbol)) {
+        return true
+      }
+    }
+
+    if (clazz.superclass != null) {
+      val superclassSymbol = Symbol.createMethod(clazz.superclass, memberName)
+      return containsSymbol(superclassSymbol)
+    }
+
+    return false
+  }
+
+  /**
+   * Returns whether the given flag is enabled for the given symbol.
+   *
+   * A flagged member inside a flagged class is ignored (and the flag value considered disabled) if
+   * the class' flag is disabled.
+   *
+   * @param symbol the symbol to check
+   * @param flag the flag to check
+   * @return whether the flag is enabled for the given symbol
+   */
+  fun isFlagEnabledForSymbol(symbol: Symbol, flag: Flag): Boolean {
+    when (symbol) {
+      is ClassSymbol -> return flags.getValue(flag)
+      is MemberSymbol -> {
+        val memberFlagValue = flags.getValue(flag)
+        if (!memberFlagValue) {
+          return false
+        }
+        // Special case: if the MemberSymbol's flag is enabled, but the outer
+        // ClassSymbol's flag (if the class is flagged) is disabled, consider
+        // the MemberSymbol's flag as disabled:
+        //
+        //   @FlaggedApi(this-flag-is-disabled) Clazz {
+        //       @FlaggedApi(this-flag-is-enabled) method(); // The Clazz' flag "wins"
+        //   }
+        //
+        // Note: the current implementation does not handle nested classes.
+        val classFlagValue =
+            flaggedSymbolsInSource
+                .find { it.first.toPrettyString() == symbol.clazz }
+                ?.let { flags.getValue(it.second) }
+                ?: true
+        return classFlagValue
+      }
+    }
+  }
+
+  val errors = mutableSetOf<ApiError>()
+  for ((symbol, flag) in flaggedSymbolsInSource) {
+    try {
+      if (isFlagEnabledForSymbol(symbol, flag)) {
+        if (!symbolsInOutput.containsSymbol(symbol)) {
+          errors.add(EnabledFlaggedApiNotPresentError(symbol, flag))
+        }
+      } else {
+        if (symbolsInOutput.containsSymbol(symbol)) {
+          errors.add(DisabledFlaggedApiIsPresentError(symbol, flag))
+        }
+      }
+    } catch (e: NoSuchElementException) {
+      errors.add(UnknownFlagError(symbol, flag))
+    }
+  }
+  return errors
+}
+
+/**
+ * Collect all known info about all @FlaggedApi annotated APIs.
+ *
+ * Each API will be represented as a String, on the format
+ * <pre>
+ *   &lt;fully-qualified-name-of-flag&lt; &lt;state-of-flag&lt; &lt;API&lt;
+ * </pre>
+ *
+ * @param flaggedSymbolsInSource the set of symbols that are flagged in the source code
+ * @param flags the set of flags and their values
+ * @return a list of Strings encoding API data using the format described above, sorted
+ *   alphabetically
+ */
+internal fun listFlaggedApis(
+    flaggedSymbolsInSource: Set<Pair<Symbol, Flag>>,
+    flags: Map<Flag, Boolean>
+): List<String> {
+  val output = mutableListOf<String>()
+  for ((symbol, flag) in flaggedSymbolsInSource) {
+    val flagState =
+        when (flags.get(flag)) {
+          true -> "ENABLED"
+          false -> "DISABLED"
+          null -> "UNKNOWN"
+        }
+    output.add("$flag $flagState ${symbol.toPrettyString()}")
+  }
+  output.sort()
+  return output
+}
+
+fun main(args: Array<String>) = MainCommand().subcommands(CheckCommand(), ListCommand()).main(args)
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 51ec23b..1fd7950 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -67,7 +67,7 @@
 
 ELF = collections.namedtuple(
   'ELF',
-  ('dt_soname', 'dt_needed', 'imported', 'exported', 'header'))
+  ('alignments', 'dt_soname', 'dt_needed', 'imported', 'exported', 'header'))
 
 
 def _get_os_name():
@@ -195,7 +195,8 @@
   @classmethod
   def _read_llvm_readobj(cls, elf_file_path, header, llvm_readobj):
     """Run llvm-readobj and parse the output."""
-    cmd = [llvm_readobj, '--dynamic-table', '--dyn-symbols', elf_file_path]
+    cmd = [llvm_readobj, '--program-headers', '--dynamic-table',
+           '--dyn-symbols', elf_file_path]
     out = subprocess.check_output(cmd, text=True)
     lines = out.splitlines()
     return cls._parse_llvm_readobj(elf_file_path, header, lines)
@@ -205,9 +206,56 @@
   def _parse_llvm_readobj(cls, elf_file_path, header, lines):
     """Parse the output of llvm-readobj."""
     lines_it = iter(lines)
+    alignments = cls._parse_program_headers(lines_it)
     dt_soname, dt_needed = cls._parse_dynamic_table(elf_file_path, lines_it)
     imported, exported = cls._parse_dynamic_symbols(lines_it)
-    return ELF(dt_soname, dt_needed, imported, exported, header)
+    return ELF(alignments, dt_soname, dt_needed, imported, exported, header)
+
+
+  _PROGRAM_HEADERS_START_PATTERN = 'ProgramHeaders ['
+  _PROGRAM_HEADERS_END_PATTERN = ']'
+  _PROGRAM_HEADER_START_PATTERN = 'ProgramHeader {'
+  _PROGRAM_HEADER_TYPE_PATTERN = re.compile('^\\s+Type:\\s+(.*)$')
+  _PROGRAM_HEADER_ALIGN_PATTERN = re.compile('^\\s+Alignment:\\s+(.*)$')
+  _PROGRAM_HEADER_END_PATTERN = '}'
+
+
+  @classmethod
+  def _parse_program_headers(cls, lines_it):
+    """Parse the dynamic table section."""
+    alignments = []
+
+    if not cls._find_prefix(cls._PROGRAM_HEADERS_START_PATTERN, lines_it):
+      raise ELFError()
+
+    for line in lines_it:
+      # Parse each program header
+      if line.strip() == cls._PROGRAM_HEADER_START_PATTERN:
+        p_align = None
+        p_type = None
+        for line in lines_it:
+          if line.strip() == cls._PROGRAM_HEADER_END_PATTERN:
+            if not p_align:
+              raise ELFError("Could not parse alignment from program header!")
+            if not p_type:
+              raise ELFError("Could not parse type from program header!")
+
+            if p_type.startswith("PT_LOAD "):
+              alignments.append(int(p_align))
+            break
+
+          match = cls._PROGRAM_HEADER_TYPE_PATTERN.match(line)
+          if match:
+            p_type = match.group(1)
+
+          match = cls._PROGRAM_HEADER_ALIGN_PATTERN.match(line)
+          if match:
+            p_align = match.group(1)
+
+      if line == cls._PROGRAM_HEADERS_END_PATTERN:
+        break
+
+    return alignments
 
 
   _DYNAMIC_SECTION_START_PATTERN = 'DynamicSection ['
@@ -434,6 +482,24 @@
 
       sys.exit(2)
 
+  def check_max_page_size(self, max_page_size):
+    for alignment in self._file_under_test.alignments:
+      if alignment % max_page_size != 0:
+        self._error(f'Load segment has alignment {alignment} but '
+                    f'{max_page_size} required.')
+        self._note()
+        self._note('Fix suggestions:')
+        self._note(f'  use linker flag "-Wl,-z,max-page-size={max_page_size}" '
+                   f'when compiling this lib')
+        self._note()
+        self._note('If the fix above doesn\'t work, bypass this check with:')
+        self._note('  Android.bp: ignore_max_page_size: true,')
+        self._note('  Android.mk: LOCAL_IGNORE_MAX_PAGE_SIZE := true')
+        self._note('  Device mk: PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE := false')
+
+        # TODO: instead of exiting immediately, we may want to collect the
+        # errors from all checks and emit them at once
+        sys.exit(2)
 
   @staticmethod
   def _find_symbol(lib, name, version):
@@ -514,6 +580,8 @@
                       help='Ignore the input file with unknown machine ID')
   parser.add_argument('--allow-undefined-symbols', action='store_true',
                       help='Ignore unresolved undefined symbols')
+  parser.add_argument('--max-page-size', action='store', type=int,
+                      help='Required page size alignment support')
 
   # Other options
   parser.add_argument('--llvm-readobj',
@@ -542,6 +610,9 @@
 
   checker.check_dt_needed(args.system_shared_lib)
 
+  if args.max_page_size:
+    checker.check_max_page_size(args.max_page_size)
+
   if not args.allow_undefined_symbols:
     checker.check_symbols()
 
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
index bd04077..532efd4 100644
--- a/tools/compliance/go.mod
+++ b/tools/compliance/go.mod
@@ -1,29 +1,11 @@
+go 1.22
+
 module android/soong/tools/compliance
 
-require google.golang.org/protobuf v0.0.0
-
-replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
-
 require (
-	android/soong v0.0.0
 	github.com/google/blueprint v0.0.0
+	android/soong v0.0.0
+	google.golang.org/protobuf v0.0.0
 	github.com/spdx/tools-golang v0.0.0
+	github.com/google/go-cmp v0.0.0
 )
-
-replace github.com/spdx/tools-golang v0.0.0 => ../../../../external/spdx-tools
-
-require golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
-
-replace android/soong v0.0.0 => ../../../soong
-
-replace github.com/google/blueprint => ../../../blueprint
-
-// Indirect deps from golang-protobuf
-exclude github.com/golang/protobuf v1.5.0
-
-replace github.com/google/go-cmp v0.5.5 => ../../../../external/go-cmp
-
-// Indirect dep from go-cmp
-exclude golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543
-
-go 1.21
diff --git a/tools/compliance/go.sum b/tools/compliance/go.sum
deleted file mode 100644
index cbe76d9..0000000
--- a/tools/compliance/go.sum
+++ /dev/null
@@ -1,2 +0,0 @@
-golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
-golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
diff --git a/tools/compliance/go.work b/tools/compliance/go.work
new file mode 100644
index 0000000..a24d2ea
--- /dev/null
+++ b/tools/compliance/go.work
@@ -0,0 +1,18 @@
+go 1.22
+
+use (
+	.
+	../../../../build/blueprint
+	../../../../build/soong
+	../../../../external/go-cmp
+	../../../../external/golang-protobuf
+	../../../../external/spdx-tools
+)
+
+replace (
+	github.com/google/blueprint v0.0.0 => ../../../../build/blueprint
+	android/soong v0.0.0 => ../../../../build/soong
+	github.com/google/go-cmp v0.0.0 => ../../../../external/go-cmp
+	google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
+	github.com/spdx/tools-golang v0.0.0 => ../../../../external/spdx-tools
+)
diff --git a/tools/envsetup/run_envsetup_tests b/tools/envsetup/run_envsetup_tests
new file mode 100755
index 0000000..5977448
--- /dev/null
+++ b/tools/envsetup/run_envsetup_tests
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import pathlib
+import subprocess
+import sys
+
+SOURCE_ENVSETUP="source build/make/envsetup.sh && "
+
+def update_display():
+    sys.stderr.write("passed\n")
+
+def go_to_root():
+    while True:
+        if os.path.exists("build/make/envsetup.sh"):
+            return
+        if os.getcwd() == "/":
+            sys.stderr.write("Can't find root of the source tree\n");
+            print("\nFAILED")
+            sys.exit(1)
+        os.chdir("..")
+
+def is_test(name, thing):
+    if not callable(thing):
+        return False
+    if name == "test":
+        return False
+    return name.startswith("test")
+
+
+def test(shell, command, expected_return, expected_stdout, expected_stderr, expected_env):
+    command += "; _rc=$?"
+    for env in expected_env.keys():
+        command += f"; echo ENV: {env}=\\\"${env}\\\""
+    command += "; exit $_rc"
+
+    cmd = [shell, "-c", command]
+    result = subprocess.run(cmd, capture_output=True, text=True)
+
+    status = True
+
+    if result.returncode != expected_return:
+        print()
+        print(f"Expected return code: {expected_return}")
+        print(f"Actual return code:   {result.returncode}")
+        status = False
+
+    printed_stdout = False
+    if expected_stdout and expected_stdout not in result.stdout:
+        print()
+        print(f"Expected stdout to contain:\n{expected_stdout}")
+        print(f"\nActual stdout:\n{result.stdout}")
+        printed_stdout = True
+        status = False
+
+    if expected_stderr and expected_stderr not in result.stderr:
+        print()
+        print(f"Expected stderr to contain:\n{expected_stderr}")
+        print(f"\nActual stderr:\n{result.stderr}")
+        status = False
+
+    env_failure = False
+    for k, v in expected_env.items():
+        if f"{k}=\"{v}\"" not in result.stdout:
+            print()
+            print(f"Expected environment variable {k} to be: {v} --- {k}=\"{v}\"")
+            env_failure = True
+            status = False
+
+    if env_failure and not printed_stdout:
+        print()
+        print("See stdout:")
+        print(result.stdout)
+
+    if not status:
+        print()
+        print("Command to reproduce:")
+        print(command)
+        print()
+
+    return status
+
+NO_LUNCH = {
+    "TARGET_PRODUCT": "",
+    "TARGET_RELEASE": "",
+    "TARGET_BUILD_VARIANT": "",
+}
+
+def test_invalid_lunch_target(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch invalid-trunk_staging-eng",
+         expected_return=1, expected_stdout=None,
+         expected_stderr="Cannot locate config makefile for product",
+         expected_env=NO_LUNCH)
+
+
+def test_aosp_arm(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch aosp_arm-trunk_staging-eng",
+         expected_return=0, expected_stdout=None, expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "eng",
+        })
+
+
+def test_lunch2_empty(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2",
+         expected_return=1, expected_stdout=None,
+         expected_stderr="No target specified. See lunch --help",
+         expected_env=NO_LUNCH)
+
+def test_lunch2_four_params(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2 a b c d",
+         expected_return=1, expected_stdout=None,
+         expected_stderr="Too many parameters given. See lunch --help",
+         expected_env=NO_LUNCH)
+
+def test_lunch2_aosp_arm(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2 aosp_arm",
+         expected_return=0, expected_stdout="=========", expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "eng",
+        })
+
+def test_lunch2_aosp_arm_trunk_staging(shell):
+    # Somewhat unfortunate because trunk_staging is the only config in
+    # aosp so we can't really test that this isn't just getting the default
+    return test(shell, SOURCE_ENVSETUP + "lunch2 aosp_arm trunk_staging",
+         expected_return=0, expected_stdout="=========", expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "eng",
+        })
+
+def test_lunch2_aosp_arm_trunk_staging_userdebug(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2 aosp_arm trunk_staging userdebug",
+         expected_return=0, expected_stdout="=========", expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "userdebug",
+        })
+
+def test_list_products(shell):
+    return test(shell, "build/soong/bin/list_products",
+         expected_return=0, expected_stdout="aosp_arm", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_releases_param(shell):
+    return test(shell, "build/soong/bin/list_releases aosp_arm",
+         expected_return=0, expected_stdout="trunk_staging", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_releases_env(shell):
+    return test(shell, "TARGET_PRODUCT=aosp_arm build/soong/bin/list_releases",
+         expected_return=0, expected_stdout="trunk_staging", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_releases_no_product(shell):
+    return test(shell, "build/soong/bin/list_releases",
+         expected_return=1, expected_stdout=None, expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_variants(shell):
+    return test(shell, "build/soong/bin/list_variants",
+         expected_return=0, expected_stdout="userdebug", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+
+def test_get_build_var_in_path(shell):
+    return test(shell, SOURCE_ENVSETUP + "which get_build_var ",
+         expected_return=0, expected_stdout="soong/bin", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+
+
+TESTS=sorted([(name, thing) for name, thing in locals().items() if is_test(name, thing)])
+
+def main():
+    if any([x.endswith("/soong/bin") for x in os.getenv("PATH").split(":")]):
+        sys.stderr.write("run_envsetup_tests must be run in a shell that has not sourced"
+                + " envsetup.sh\n\nFAILED\n")
+        return 1
+
+    go_to_root()
+
+    tests = TESTS
+    if len(sys.argv) > 1:
+        tests = [(name, func) for name, func in tests if name in sys.argv]
+
+    shells = ["/usr/bin/bash", "/usr/bin/zsh"]
+    total_count = len(tests) * len(shells)
+    index = 1
+    failed_tests = 0
+
+    for name, func in tests:
+        for shell in shells:
+            sys.stdout.write(f"\33[2K\r{index} of {total_count}: {name} in {shell}")
+            passed = func(shell)
+            if not passed:
+                failed_tests += 1
+            index += 1
+
+    if failed_tests > 0:
+        print(f"\n\nFAILED: {failed_tests} of {total_count}")
+        return 1
+    else:
+        print("\n\nSUCCESS")
+        return 0
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/tools/envsetup/spam_for_lunch b/tools/envsetup/spam_for_lunch
new file mode 100755
index 0000000..2e150a6
--- /dev/null
+++ b/tools/envsetup/spam_for_lunch
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This ad is kind of big, so only show it if this appears to be a clean build.
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../shell_utils.sh
+if [[ ! -e $(getoutdir)/soong/build.${TARGET_PRODUCT}.ninja ]]; then
+  echo
+  echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+  echo "  Wondering whether to use user, userdebug or eng?"
+  echo
+  echo "  user        The builds that ship to users. Reduced debugability."
+  echo "  userdebug   High fidelity to user builds but with some debugging options"
+  echo "              enabled. Best suited for performance testing or day-to-day use"
+  echo "              with debugging enabled."
+  echo "  eng         More debugging options enabled and faster build times, but"
+  echo "              runtime performance tradeoffs. Best suited for day-to-day"
+  echo "              local development when not doing performance testing."
+  echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+  echo
+fi
+
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
index cc97d1f..d0aed69 100644
--- a/tools/finalization/README.md
+++ b/tools/finalization/README.md
@@ -3,18 +3,19 @@
 
 ## Automation:
 1. [Environment setup](./environment.sh). Set values for varios finalization constants.
-2. [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
-3. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
-4. [Finalize SDK and submit](./step-1.sh). Do [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) step, create CLs, organize them into topic and send to Gerrit.
-  a. [Update SDK and submit](./update-step-1.sh). Same as above, but updates the existings CLs.
-5. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
-  a. [Update Android and submit](./update-step-2.sh). Same as above, but updates the existings CLs.
+1. [Finalize VINTF](./finalize-vintf-resources.sh). Prepare the branch for VINTF release.
+1. [Finalize SDK](./finalize-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
+1. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
+1. [Finalize VINTF and submit](./step-0.sh). Do Finalize VINTF step, create CLs, organize them into topic and send to Gerrit.
+1. [Finalize SDK and submit](./step-1.sh). Do Finalize SDK step, create CLs, organize them into topic and send to Gerrit.
+1. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
 
 ## CI:
 Performed in build targets in Finalization branches.
-1. [Finalization Step 1, git_main-fina-1-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-1-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
-3. [Finalization Step 2, git_main-fina-2-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-2-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
-5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
+1. [Finalization Step 0, git_main-fina-0-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-0-release). Test Finalize VINTF.
+1. [Finalization Step 1, git_main-fina-1-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-1-release). Test Finalize VINTF, Finalize SDK.
+1. [Finalization Step 2, git_main-fina-2-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-2-release). Test Finalize VINTF, Finalize SDK, and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
+1. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
 
 ## Utility:
 [Full cleanup](./cleanup.sh). Remove all local changes and switch each project into head-less state. This is the best state to sync/rebase/finalize the branch.
diff --git a/tools/finalization/build-step-0.sh b/tools/finalization/build-step-0.sh
new file mode 100755
index 0000000..f81b720
--- /dev/null
+++ b/tools/finalization/build-step-0.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright 2024 Google Inc. All rights reserved.
+
+set -ex
+
+function finalize_main_step0() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # VINTF finalization
+        source $top/build/make/tools/finalization/finalize-vintf-resources.sh
+    fi;
+}
+
+finalize_main_step0
+
diff --git a/tools/finalization/build-step-1-and-2.sh b/tools/finalization/build-step-1-and-2.sh
index 84e2782..ca22678 100755
--- a/tools/finalization/build-step-1-and-2.sh
+++ b/tools/finalization/build-step-1-and-2.sh
@@ -7,11 +7,16 @@
     source $top/build/make/tools/finalization/environment.sh
 
     if [ "$FINAL_STATE" = "unfinalized" ] ; then
-        # SDK codename -> int
-        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+        # VINTF finalization
+        source $top/build/make/tools/finalization/finalize-vintf-resources.sh
     fi;
 
-    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "sdk" ] ; then
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "vintf" ] ; then
+        # SDK codename -> int
+        source $top/build/make/tools/finalization/finalize-sdk-resources.sh
+    fi;
+
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "vintf" ] || [ "$FINAL_STATE" = "sdk" ] ; then
         # ADB, Platform/Mainline SDKs build and move to prebuilts
         source $top/build/make/tools/finalization/localonly-steps.sh
 
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
index 3d5eadb..7294698 100755
--- a/tools/finalization/build-step-1.sh
+++ b/tools/finalization/build-step-1.sh
@@ -7,8 +7,13 @@
     source $top/build/make/tools/finalization/environment.sh
 
     if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # VINTF finalization
+        source $top/build/make/tools/finalization/finalize-vintf-resources.sh
+    fi;
+
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "vintf" ] ; then
         # Build finalization artifacts.
-        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+        source $top/build/make/tools/finalization/finalize-sdk-resources.sh
     fi;
 }
 
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index d9c42c8..7961e8b 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -19,8 +19,14 @@
 
 # Options:
 # 'unfinalized' - branch is in development state,
-# 'sdk' - SDK/API is finalized
+# 'vintf' - VINTF is finalized
+# 'sdk' - VINTF and SDK/API are finalized
 # 'rel' - branch is finalized, switched to REL
-export FINAL_STATE='unfinalized'
+export FINAL_STATE='vintf'
 
-export BUILD_FROM_SOURCE_STUB=true
\ No newline at end of file
+export BUILD_FROM_SOURCE_STUB=true
+
+# FINAL versions for VINTF
+# TODO(b/323985297): The version must match with that from the release configuration.
+# Instead of hardcoding the version here, read it from a release configuration.
+export FINAL_BOARD_API_LEVEL='202404'
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index 245305b..59fe28c 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -40,9 +40,6 @@
     fi
     git -C "$top/cts" mv hostsidetests/theme/assets/${FINAL_PLATFORM_CODENAME} hostsidetests/theme/assets/${FINAL_PLATFORM_SDK_VERSION}
 
-    # system/sepolicy
-    system/sepolicy/tools/finalize-sdk-rel.sh "$top" "$FINAL_PLATFORM_SDK_VERSION"
-
     # prebuilts/abi-dumps/platform
     mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
     cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/"
@@ -52,10 +49,6 @@
     # prebuilts/abi-dumps/ndk
     #mkdir -p "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
     #cp -r "$top/prebuilts/abi-dumps/ndk/current/64/" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION/"
-    #if [ "$FINAL_STATE" != "sdk" ] || [ "$FINAL_PLATFORM_CODENAME" == "$CURRENT_PLATFORM_CODENAME" ] ; then
-        # prebuilts/abi-dumps/vndk
-        #mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
-    #fi;
 }
 
 finalize_sdk_rel
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-sdk-resources.sh
similarity index 89%
rename from tools/finalization/finalize-aidl-vndk-sdk-resources.sh
rename to tools/finalization/finalize-sdk-resources.sh
index 671b036..596f803 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-sdk-resources.sh
@@ -96,7 +96,7 @@
         $modules_arg
 }
 
-function finalize_aidl_vndk_sdk_resources() {
+function finalize_sdk_resources() {
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
@@ -111,13 +111,6 @@
     # bionic/NDK
     finalize_bionic_ndk
 
-    # pre-finalization build target (trunk)
-    local aidl_m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_RELEASE=trunk TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
-    AIDL_TRANSITIVE_FREEZE=true $aidl_m aidl-freeze-api
-
-    # TODO(b/309880485)
-    # Add back create_reference_dumps and $top/build/make/target/product/gsi/current.txt
-
     # Finalize SDK
 
     # frameworks/libs/modules-utils
@@ -129,14 +122,10 @@
     local build_tools_source="$top/development/sdk/build_tools_source.prop_template"
     sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=${PLATFORM_SDK_VERSION}.0.0/g' $build_tools_source
 
-    # build/make
-    sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
-    cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$FINAL_PLATFORM_SDK_VERSION.txt"
-
-    # build/bazel
+    # build/soong
     local codename_version="\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}"
-    if ! grep -q "$codename_version" "$top/build/bazel/rules/common/api_constants.bzl" ; then
-        sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\    $codename_version," "$top/build/bazel/rules/common/api_constants.bzl"
+    if ! grep -q "$codename_version" "$top/build/soong/android/api_levels.go" ; then
+        sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\\t\t$codename_version," "$top/build/soong/android/api_levels.go"
     fi
 
     # cts
@@ -179,5 +168,5 @@
     $sdk_m update-api
 }
 
-finalize_aidl_vndk_sdk_resources
+finalize_sdk_resources
 
diff --git a/tools/finalization/finalize-vintf-resources.sh b/tools/finalization/finalize-vintf-resources.sh
new file mode 100755
index 0000000..a55d8e1
--- /dev/null
+++ b/tools/finalization/finalize-vintf-resources.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_vintf_resources() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+    # environment needed to build dependencies and run scripts
+    # These should remain the same for all steps here to speed up build time
+    export ANDROID_BUILD_TOP="$top"
+    export ANDROID_HOST_OUT="$ANDROID_BUILD_TOP/out/host/linux-x86"
+    export ANDROID_PRODUCT_OUT="$ANDROID_BUILD_TOP/out/target/product/generic_arm64"
+    export PATH="$PATH:$ANDROID_HOST_OUT/bin/"
+    export TARGET_BUILD_VARIANT=userdebug
+    export DIST_DIR=out/dist
+    export TARGET_RELEASE=fina_0
+    export TARGET_PRODUCT=aosp_arm64
+
+    # TODO(b/314010764): finalize LL_NDK
+
+    # system/sepolicy
+    "$top/system/sepolicy/tools/finalize-vintf-resources.sh" "$top" "$FINAL_BOARD_API_LEVEL"
+
+    create_new_compat_matrix_and_kernel_configs
+
+    # pre-finalization build target (trunk)
+    local aidl_m="$top/build/soong/soong_ui.bash --make-mode"
+    AIDL_TRANSITIVE_FREEZE=true $aidl_m aidl-freeze-api
+}
+
+function create_new_compat_matrix_and_kernel_configs() {
+    # The compatibility matrix versions are bumped during vFRC
+    # These will change every time we have a new vFRC
+    local CURRENT_COMPATIBILITY_MATRIX_LEVEL='202404'
+    local NEXT_COMPATIBILITY_MATRIX_LEVEL='202504'
+    # The kernel configs need the letter of the Android release
+    local CURRENT_RELEASE_LETTER='v'
+    local NEXT_RELEASE_LETTER='w'
+
+
+    # build the targets required before touching the Android.bp/Android.mk files
+    local build_cmd="$top/build/soong/soong_ui.bash --make-mode"
+    $build_cmd bpmodify
+
+    "$top/prebuilts/build-tools/path/linux-x86/python3" "$top/hardware/interfaces/compatibility_matrices/bump.py" "$CURRENT_COMPATIBILITY_MATRIX_LEVEL" "$NEXT_COMPATIBILITY_MATRIX_LEVEL" "$CURRENT_RELEASE_LETTER" "$NEXT_RELEASE_LETTER"
+
+    # Freeze the current framework manifest file. This relies on the
+    # aosp_cf_x86_64-trunk_staging build target to get the right manifest
+    # fragments installed.
+    "$top/system/libhidl/vintfdata/freeze.sh" "$CURRENT_COMPATIBILITY_MATRIX_LEVEL"
+}
+
+function freeze_framework_manifest() {
+   ANDROID_PRODUCT_OUT=~/workspace/internal/main/out/target/product/vsoc_x86 ANDROID_BUILD_TOP=~/workspace/internal/main ANDROID_HOST_OUT=~/workspace/internal/main/out/host/linux-x86 ./freeze.sh 202404
+
+}
+
+
+finalize_vintf_resources
+
diff --git a/tools/finalization/step-0.sh b/tools/finalization/step-0.sh
new file mode 100755
index 0000000..e61c644
--- /dev/null
+++ b/tools/finalization/step-0.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+# Copyright 2024 Google Inc. All rights reserved.
+
+# Script to perform a 0th step of Android Finalization: VINTF finalization, create CLs and upload to Gerrit.
+
+set -ex
+
+function commit_step_0_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start "VINTF-$FINAL_BOARD_API_LEVEL-Finalization" ;
+            git add -A . ;
+            git commit -m "Vendor API level $FINAL_BOARD_API_LEVEL is now frozen" \
+                       -m "Ignore-AOSP-First: VINTF $FINAL_BOARD_API_LEVEL Finalization
+Bug: $FINAL_BUG_ID
+Test: build";
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function finalize_step_0_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    source $top/build/make/tools/finalization/finalize-vintf-resources.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    commit_step_0_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_step_0_main
diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh
index 0dd4b3a..0e483d5 100755
--- a/tools/finalization/step-1.sh
+++ b/tools/finalization/step-1.sh
@@ -21,10 +21,9 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
 
-    # vndk etc finalization
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    source $top/build/make/tools/finalization/finalize-sdk-resources.sh
 
     # move all changes to finalization branch/topic and upload to gerrit
     commit_step_1_changes
diff --git a/tools/finalization/step-2.sh b/tools/finalization/step-2.sh
index d0b24ae..356cad0 100755
--- a/tools/finalization/step-2.sh
+++ b/tools/finalization/step-2.sh
@@ -19,7 +19,7 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
 
     # prebuilts etc
     source $top/build/make/tools/finalization/finalize-sdk-rel.sh
diff --git a/tools/finalization/update-step-1.sh b/tools/finalization/update-step-1.sh
deleted file mode 100755
index b469988..0000000
--- a/tools/finalization/update-step-1.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-# Script to perform a 1st step of Android Finalization: API/SDK finalization, update CLs and upload to Gerrit.
-
-# WIP, does not work yet
-exit 10
-
-set -ex
-
-function update_step_1_changes() {
-    set +e
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            git stash -u ;
-            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
-            git stash pop ;
-            git add -A . ;
-            git commit --amend --no-edit ;
-            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
-        fi'
-}
-
-function update_step_1_main() {
-    local top="$(dirname "$0")"/../../../..
-    source $top/build/make/tools/finalization/environment.sh
-
-
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # vndk etc finalization
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
-
-    # update existing CLs and upload to gerrit
-    update_step_1_changes
-
-    # build to confirm everything is OK
-    AIDL_FROZEN_REL=true $m
-}
-
-update_step_1_main
diff --git a/tools/finalization/update-step-2.sh b/tools/finalization/update-step-2.sh
deleted file mode 100755
index d2b8592..0000000
--- a/tools/finalization/update-step-2.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
-
-# WIP, does not work yet
-exit 10
-
-set -ex
-
-function update_step_2_changes() {
-    set +e
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            git stash -u ;
-            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
-            git stash pop ;
-            git add -A . ;
-            git commit --amend --no-edit ;
-            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
-        fi'
-}
-
-function update_step_2_main() {
-    local top="$(dirname "$0")"/../../../..
-    source $top/build/make/tools/finalization/environment.sh
-
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # prebuilts etc
-    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
-
-    # move all changes to finalization branch/topic and upload to gerrit
-    update_step_2_changes
-
-    # build to confirm everything is OK
-    AIDL_FROZEN_REL=true $m
-}
-
-update_step_2_main
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index bd9543a..6aa5289 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -258,3 +258,173 @@
     system_ext_specific: true,
     src: ":group_gen_system_ext",
 }
+
+fs_config_cmd = "$(location fs_config_generator) fsconfig " +
+    "--aid-header $(location :android_filesystem_config_header) " +
+    "--capability-header $(location :linux_capability_header) " +
+    "--out_file $(out) "
+fs_config_cmd_dirs = fs_config_cmd + "--dirs "
+fs_config_cmd_files = fs_config_cmd + "--files "
+
+genrule_defaults {
+    name: "fs_config_defaults",
+    tools: ["fs_config_generator"],
+    srcs: [
+        ":android_filesystem_config_header",
+        ":linux_capability_header",
+        ":target_fs_config_gen",
+    ],
+    out: ["out"],
+}
+
+genrule {
+    name: "fs_config_dirs_system_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition system " +
+        "--all-partitions vendor,oem,odm,vendor_dlkm,odm_dlkm,system_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_system",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_system_gen",
+}
+
+genrule {
+    name: "fs_config_files_system_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition system " +
+        "--all-partitions vendor,oem,odm,vendor_dlkm,odm_dlkm,system_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_system",
+    filename: "fs_config_files",
+    src: ":fs_config_files_system_gen",
+}
+
+genrule {
+    name: "fs_config_dirs_system_ext_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition system_ext " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_system_ext",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_system_ext_gen",
+    system_ext_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_system_ext_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition system_ext " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_system_ext",
+    filename: "fs_config_files",
+    src: ":fs_config_files_system_ext_gen",
+    system_ext_specific: true,
+}
+
+genrule {
+    name: "fs_config_dirs_product_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition product " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_product",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_product_gen",
+    product_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_product_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition product " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_product",
+    filename: "fs_config_files",
+    src: ":fs_config_files_product_gen",
+    product_specific: true,
+}
+
+genrule {
+    name: "fs_config_dirs_vendor_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition vendor " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_vendor",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_vendor_gen",
+    vendor: true,
+}
+
+genrule {
+    name: "fs_config_files_vendor_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition vendor " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_vendor",
+    filename: "fs_config_files",
+    src: ":fs_config_files_vendor_gen",
+    vendor: true,
+}
+
+genrule {
+    name: "fs_config_dirs_odm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition odm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_odm",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_odm_gen",
+    device_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_odm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition odm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_odm",
+    filename: "fs_config_files",
+    src: ":fs_config_files_odm_gen",
+    device_specific: true,
+}
+
+// TODO(jiyong): add fs_config for oem, system_dlkm, vendor_dlkm, odm_dlkm partitions
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index c36c3aa..e4c3626 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -24,23 +24,8 @@
 $(error Using $(TARGET_DEVICE_DIR)/android_filesystem_config.h is deprecated, please use TARGET_FS_CONFIG_GEN instead)
 endif
 
-system_android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h
-system_capability_header := bionic/libc/kernel/uapi/linux/capability.h
-
-# Use snapshots if exist
-vendor_android_filesystem_config := $(strip \
-  $(if $(filter-out current,$(BOARD_VNDK_VERSION)), \
-    $(SOONG_VENDOR_$(BOARD_VNDK_VERSION)_SNAPSHOT_DIR)/include/$(system_android_filesystem_config)))
-ifeq (,$(wildcard $(vendor_android_filesystem_config)))
-vendor_android_filesystem_config := $(system_android_filesystem_config)
-endif
-
-vendor_capability_header := $(strip \
-  $(if $(filter-out current,$(BOARD_VNDK_VERSION)), \
-    $(SOONG_VENDOR_$(BOARD_VNDK_VERSION)_SNAPSHOT_DIR)/include/$(system_capability_header)))
-ifeq (,$(wildcard $(vendor_capability_header)))
-vendor_capability_header := $(system_capability_header)
-endif
+android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h
+capability_header := bionic/libc/kernel/uapi/linux/capability.h
 
 # List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, and system_dlkm Partitions
 fs_config_generate_extra_partition_list := $(strip \
@@ -85,58 +70,6 @@
 include $(BUILD_PHONY_PACKAGE)
 
 ##################################
-# Generate the system_ext/etc/fs_config_dirs binary file for the target if the
-# system_ext partition is generated. Add fs_config_dirs or fs_config_dirs_system_ext
-# to PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_system_ext
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_system_ext)
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
-# Generate the system_ext/etc/fs_config_files binary file for the target if the
-# system_ext partition is generated. Add fs_config_files or fs_config_files_system_ext
-# to PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_system_ext
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_system_ext)
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
-# Generate the product/etc/fs_config_dirs binary file for the target if the
-# product partition is generated. Add fs_config_dirs or fs_config_dirs_product
-# to PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_product
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_product)
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
-# Generate the product/etc/fs_config_files binary file for the target if the
-# product partition is generated. Add fs_config_files or fs_config_files_product
-# to PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_product
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_product)
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
 # Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions
 # excluding /system, /system_ext and /product. Add fs_config_dirs_nonsystem to
 # PRODUCT_PACKAGES in the device make file to enable.
@@ -146,7 +79,7 @@
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_dirs_$(t))
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_dirs_$(t))
 include $(BUILD_PHONY_PACKAGE)
 
 ##################################
@@ -159,122 +92,9 @@
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_files_$(t))
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_files_$(t))
 include $(BUILD_PHONY_PACKAGE)
 
-##################################
-# Generate the system/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_system to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_system
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_PARTITION_LIST := $(fs_config_generate_extra_partition_list)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition system \
-	   --all-partitions "$(subst $(space),$(comma),$(PRIVATE_PARTITION_LIST))" \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the system/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_system to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_system
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_PARTITION_LIST := $(fs_config_generate_extra_partition_list)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition system \
-	   --all-partitions "$(subst $(space),$(comma),$(PRIVATE_PARTITION_LIST))" \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-ifneq ($(filter vendor,$(fs_config_generate_extra_partition_list)),)
-##################################
-# Generate the vendor/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_dirs_vendor
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition vendor \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the vendor/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_files_vendor
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition vendor \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-endif
-
 ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),)
 ##################################
 # Generate the oem/etc/fs_config_dirs binary file for the target
@@ -282,7 +102,7 @@
 # in the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_dirs_oem
+LOCAL_MODULE := fs_config_dirs_oem
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -290,10 +110,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -309,7 +129,7 @@
 # in the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_files_oem
+LOCAL_MODULE := fs_config_files_oem
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -317,10 +137,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -332,63 +152,6 @@
 
 endif
 
-ifneq ($(filter odm,$(fs_config_generate_extra_partition_list)),)
-##################################
-# Generate the odm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_dirs_odm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition odm \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the odm/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_files_odm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition odm \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-endif
-
 ifneq ($(filter vendor_dlkm,$(fs_config_generate_extra_partition_list)),)
 ##################################
 # Generate the vendor_dlkm/etc/fs_config_dirs binary file for the target
@@ -396,7 +159,7 @@
 # the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_dirs_vendor_dlkm
+LOCAL_MODULE := fs_config_dirs_vendor_dlkm
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -404,10 +167,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -423,7 +186,7 @@
 # the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_files_vendor_dlkm
+LOCAL_MODULE := fs_config_files_vendor_dlkm
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -431,10 +194,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -453,7 +216,7 @@
 # in the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_dirs_odm_dlkm
+LOCAL_MODULE := fs_config_dirs_odm_dlkm
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -461,10 +224,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -480,7 +243,7 @@
 # in the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_files_odm_dlkm
+LOCAL_MODULE := fs_config_files_odm_dlkm
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -488,10 +251,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -510,7 +273,7 @@
 # in the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_dirs_system_dlkm
+LOCAL_MODULE := fs_config_dirs_system_dlkm
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -518,10 +281,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -537,7 +300,7 @@
 # in the device make file to enable
 include $(CLEAR_VARS)
 
-LOCAL_MODULE := _fs_config_files_system_dlkm
+LOCAL_MODULE := fs_config_files_system_dlkm
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
@@ -545,10 +308,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -560,118 +323,6 @@
 
 endif
 
-ifneq ($(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),)
-##################################
-# Generate the product/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_product to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_dirs_product
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition product \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the product/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_product to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_files_product
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition product \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-endif
-
-ifneq ($(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),)
-##################################
-# Generate the system_ext/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_system_ext to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_dirs_system_ext
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition system_ext \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the system_ext/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_system_ext to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := _fs_config_files_system_ext
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition system_ext \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-endif
-
-system_android_filesystem_config :=
-system_capability_header :=
+android_filesystem_config :=
+capability_header :=
 fs_config_generate_extra_partition_list :=
diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go
index 50264fd..de84fbe 100644
--- a/tools/ide_query/ide_query.go
+++ b/tools/ide_query/ide_query.go
@@ -304,6 +304,7 @@
 	args := []string{
 		"--make-mode",
 		"ANDROID_BUILD_ENVIRONMENT_CONFIG=googler-cog",
+		"SOONG_GEN_COMPDB=1",
 		"TARGET_PRODUCT=" + env.LunchTarget.Product,
 		"TARGET_RELEASE=" + env.LunchTarget.Release,
 		"TARGET_BUILD_VARIANT=" + env.LunchTarget.Variant,
diff --git a/tools/lunchable b/tools/lunchable
new file mode 100755
index 0000000..fce2c27
--- /dev/null
+++ b/tools/lunchable
@@ -0,0 +1,72 @@
+#!/bin/bash
+
+# TODO: Currently only checks trunk_staging. Should check trunk_staging first,
+#       then use the product-specfic releases. Only applies to -c though.
+
+function Help() {
+cat <<@EOF@
+Usage: lunchable [options]
+
+Lists products that have no functioning lunch combo.
+
+options:
+-c    prints all failing lunch combos for all targets;
+-w    why? Prints the error message after each failed lunch combo. Only
+      works with -c
+
+@EOF@
+}
+
+complete=0
+why=0
+while getopts "cwh" option; do
+  case $option in
+    c)
+      complete=1;;
+    w)
+      why=1;;
+    h)
+      Help
+      exit;;
+  esac
+done
+
+# Getting all named products can fail if we haven't lunched anything
+source $(pwd)/build/envsetup.sh &> /dev/null
+all_named_products=( $(get_build_var all_named_products 2> /dev/null) )
+if [[ $? -ne 0 ]]; then
+  echo "get_build_var all_named_products failed. Lunch something first?" >&2
+  exit 1
+fi
+total_products=${#all_named_products[@]}
+current_product=0
+
+for product in "${all_named_products[@]}"; do
+  (( current_product += 1 ))
+  single_pass=0
+  printf " Checking ${current_product}/${total_products} \r" >&2
+  for release in trunk_staging; do
+    for variant in eng user userdebug; do
+      lunchcombo="${product}-${release}-${variant}"
+      lunch_error="$(lunch $lunchcombo 2>&1 > /dev/null)"
+      if [[ $? -ne 0 ]]; then
+        # Lunch failed
+        if [[ $complete -eq 1 ]]; then
+          echo -e "${product} : ${lunchcombo}"
+          if [[ $why -eq 1 ]]; then
+            echo -e "$(sed 's/^/    /g' <<<$lunch_error)"
+          fi
+        fi
+      elif [[ $complete -ne 1 ]]; then
+        single_pass=1
+        break # skip variant
+      fi
+    done
+    if [[ $single_pass -eq 1 ]]; then
+      break # skip release
+    fi
+  done
+  if [[ $complete -eq 0 ]] && [[ $single_pass -eq 0 ]]; then
+    echo "${product}"
+  fi
+done
diff --git a/tools/overrideflags.sh b/tools/overrideflags.sh
deleted file mode 100755
index b8605dc..0000000
--- a/tools/overrideflags.sh
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/bin/bash -e
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../shell_utils.sh
-require_top
-
-function print_help() {
-    echo -e "overrideflags is used to set default value for local build."
-    echo -e "\nOptions:"
-    echo -e "\t--release-config  \tPath to release configuration directory. Required"
-    echo -e "\t--no-edit         \tIf present, skip editing flag value file."
-    echo -e "\t-h/--help         \tShow this help."
-}
-
-function main() {
-    while (($# > 0)); do
-        case $1 in
-        --release-config)
-            if [[ $# -le 1 ]]; then
-                echo "--release-config requires a path"
-                return 1
-            fi
-            local release_config_dir="$2"
-            shift 2
-            ;;
-        --no-edit)
-            local no_edit="true"
-            shift 1
-            ;;
-        -h|--help)
-            print_help
-            return
-            ;;
-        *)
-            echo "$1 is unrecognized"
-            print_help
-            return 1
-            ;;
-        esac
-    done
-
-
-
-    case $(uname -s) in
-        Darwin)
-            local host_arch=darwin-x86
-            ;;
-        Linux)
-            local host_arch=linux-x86
-            ;;
-        *)
-            >&2 echo Unknown host $(uname -s)
-            return
-            ;;
-    esac
-
-    if [[ -z "${release_config_dir}" ]]; then
-        echo "Please provide release configuration path by --release-config"
-        exit 1
-    elif [ ! -d "${release_config_dir}" ]; then
-        echo "${release_config_dir} is an invalid directory"
-        exit 1
-    fi
-    local T="$(gettop)"
-    local aconfig_dir="${T}"/build/make/tools/aconfig/
-    local overrideflag_py="${aconfig_dir}"/overrideflags/overrideflags.py
-    local overridefile="${release_config_dir}/aconfig/override_values.textproto"
-
-    # Edit override file
-    if [[ -z "${no_edit}" ]]; then
-        editor="${EDITOR:-$(which vim)}"
-
-        eval "${editor} ${overridefile}"
-        if [ $? -ne 0 ]; then
-            echo "Fail to set override values"
-            return 1
-        fi
-    fi
-
-    ${T}/prebuilts/build-tools/${host_arch}/bin/py3-cmd -u "${overrideflag_py}" \
-        --overrides "${overridefile}" \
-        --out "${release_config_dir}/aconfig"
-}
-
-
-main "$@"
diff --git a/tools/perf/format_benchmarks b/tools/perf/format_benchmarks
index 162c577..807e546 100755
--- a/tools/perf/format_benchmarks
+++ b/tools/perf/format_benchmarks
@@ -25,6 +25,7 @@
 import pathlib
 import statistics
 import zoneinfo
+import csv
 
 import pretty
 import utils
@@ -103,7 +104,7 @@
     def SetFixedCol(self, row_key, columns):
         self._fixed_cols[row_key] = columns
 
-    def Write(self, out):
+    def Write(self, out, fmt):
         table = []
         # Expand the column items
         for row in zip(*self._cols):
@@ -114,26 +115,33 @@
             # Update the last row of the header with title and add separator
             for i in range(len(self._titles)):
                 table[len(table)-1][i] = self._titles[i]
-            table.append(pretty.SEPARATOR)
+            if fmt == "table":
+                table.append(pretty.SEPARATOR)
         # Populate the data
         for row in self._rows:
             table.append([str(row)]
                          + self._fixed_cols[row]
                          + [str(self._data.get((col, row), "")) for col in self._cols])
-        out.write(pretty.FormatTable(table, alignments="LL"))
+        if fmt == "csv":
+            csv.writer(sys.stdout, quoting=csv.QUOTE_MINIMAL).writerows(table)
+        else:
+            out.write(pretty.FormatTable(table, alignments="LL"))
 
 
-def format_duration_sec(ns):
+def format_duration_sec(ns, fmt_sec):
     "Format a duration in ns to second precision"
     sec = round(ns / 1000000000)
-    h, sec = divmod(sec, 60*60)
-    m, sec = divmod(sec, 60)
-    result = ""
-    if h > 0:
-        result += f"{h:2d}h "
-    if h > 0 or m > 0:
-        result += f"{m:2d}m "
-    return result + f"{sec:2d}s"
+    if fmt_sec:
+        return f"{sec}"
+    else:
+        h, sec = divmod(sec, 60*60)
+        m, sec = divmod(sec, 60)
+        result = ""
+        if h > 0:
+            result += f"{h:2d}h "
+        if h > 0 or m > 0:
+            result += f"{m:2d}m "
+        return result + f"{sec:2d}s"
 
 
 def main(argv):
@@ -142,6 +150,12 @@
             allow_abbrev=False, # Don't let people write unsupportable scripts.
             description="Print analysis tables for benchmarks")
 
+    parser.add_argument("--csv", action="store_true",
+                        help="Print in CSV instead of table.")
+
+    parser.add_argument("--sec", action="store_true",
+                        help="Print in seconds instead of minutes and seconds")
+
     parser.add_argument("--tags", nargs="*",
                         help="The tags to print, in order.")
 
@@ -188,14 +202,17 @@
         for key, column in summary["columns"]:
             for id, cell in column:
                 duration_ns = statistics.median([b["duration_ns"] for b in cell])
-                table.SetFixedCol(cell[0]["title"], [" ".join(cell[0]["modules"])])
+                modules = cell[0]["modules"]
+                if not modules:
+                    modules = ["---"]
+                table.SetFixedCol(cell[0]["title"], [" ".join(modules)])
                 table.Set(tuple([summary["date"].strftime("%Y-%m-%d"),
                                  summary["branch"],
                                  summary["tag"]]
                                 + list(key)),
-                          cell[0]["title"], format_duration_sec(duration_ns))
+                          cell[0]["title"], format_duration_sec(duration_ns, args.sec))
 
-    table.Write(sys.stdout)
+    table.Write(sys.stdout, "csv" if args.csv else "table")
 
 if __name__ == "__main__":
     main(sys.argv)
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 4941c71..9b134f2 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -244,7 +244,6 @@
         "boot_signer",
         "brotli",
         "bsdiff",
-        "imgdiff",
         "lz4",
         "mkbootfs",
         "signapk",
@@ -308,7 +307,6 @@
         "brotli",
         "bsdiff",
         "deapexer",
-        "imgdiff",
         "lz4",
         "mkbootfs",
         "signapk",
@@ -634,7 +632,6 @@
     data: [
         "testdata/**/*",
         ":com.android.apex.compressed.v1",
-        ":com.android.apex.compressed.v1_original",
         ":com.android.apex.vendor.foo.with_vintf"
     ],
     target: {
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 8836248..4834834 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -490,7 +490,6 @@
       return -1
 
     props = [
-        "ro.board.api_level",
         "ro.board.first_api_level",
         "ro.product.first_api_level",
     ]
@@ -955,6 +954,13 @@
   d["build.prop"] = d["system.build.prop"]
 
   if d.get("avb_enable") == "true":
+    build_info = BuildInfo(d, use_legacy_id=True)
+    # Set up the salt for partitions without build.prop
+    if build_info.fingerprint:
+      if "fingerprint" not in d:
+        d["fingerprint"] = build_info.fingerprint
+      if "avb_salt" not in d:
+        d["avb_salt"] = sha256(build_info.fingerprint.encode()).hexdigest()
     # Set the vbmeta digest if exists
     try:
       d["vbmeta_digest"] = read_helper("META/vbmeta_digest.txt").rstrip()
@@ -1517,7 +1523,7 @@
       AVB_ARG_NAME_CHAIN_PARTITION: []
   }
 
-  for partition, path in partitions.items():
+  for partition, path in sorted(partitions.items()):
     avb_partition_arg = GetAvbPartitionArg(partition, path, info_dict)
     if not avb_partition_arg:
       continue
@@ -1605,7 +1611,7 @@
       "avb_custom_vbmeta_images_partition_list", "").strip().split()]
 
   avb_partitions = {}
-  for partition, path in partitions.items():
+  for partition, path in sorted(partitions.items()):
     if partition not in needed_partitions:
       continue
     assert (partition in AVB_PARTITIONS or
@@ -1778,12 +1784,7 @@
   if has_ramdisk:
     cmd.extend(["--ramdisk", ramdisk_img.name])
 
-  img_unsigned = None
-  if info_dict.get("vboot"):
-    img_unsigned = tempfile.NamedTemporaryFile()
-    cmd.extend(["--output", img_unsigned.name])
-  else:
-    cmd.extend(["--output", img.name])
+  cmd.extend(["--output", img.name])
 
   if partition_name == "recovery":
     if info_dict.get("include_recovery_dtbo") == "true":
@@ -1795,28 +1796,6 @@
 
   RunAndCheckOutput(cmd)
 
-  # Sign the image if vboot is non-empty.
-  if info_dict.get("vboot"):
-    path = "/" + partition_name
-    img_keyblock = tempfile.NamedTemporaryFile()
-    # We have switched from the prebuilt futility binary to using the tool
-    # (futility-host) built from the source. Override the setting in the old
-    # TF.zip.
-    futility = info_dict["futility"]
-    if futility.startswith("prebuilts/"):
-      futility = "futility-host"
-    cmd = [info_dict["vboot_signer_cmd"], futility,
-           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
-           info_dict["vboot_key"] + ".vbprivk",
-           info_dict["vboot_subkey"] + ".vbprivk",
-           img_keyblock.name,
-           img.name]
-    RunAndCheckOutput(cmd)
-
-    # Clean up the temp files.
-    img_unsigned.close()
-    img_keyblock.close()
-
   # AVB: if enabled, calculate and add hash to boot.img or recovery.img.
   if info_dict.get("avb_enable") == "true":
     avbtool = info_dict["avb_avbtool"]
@@ -1965,7 +1944,7 @@
   return None
 
 
-def _BuildVendorBootImage(sourcedir, partition_name, info_dict=None):
+def _BuildVendorBootImage(sourcedir, fs_config_file, partition_name, info_dict=None):
   """Build a vendor boot image from the specified sourcedir.
 
   Take a ramdisk, dtb, and vendor_cmdline from the input (in 'sourcedir'), and
@@ -1981,7 +1960,7 @@
   img = tempfile.NamedTemporaryFile()
 
   ramdisk_format = GetRamdiskFormat(info_dict)
-  ramdisk_img = _MakeRamdisk(sourcedir, ramdisk_format=ramdisk_format)
+  ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file=fs_config_file, ramdisk_format=ramdisk_format)
 
   # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
   mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
@@ -2095,8 +2074,9 @@
   if info_dict is None:
     info_dict = OPTIONS.info_dict
 
+  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
   data = _BuildVendorBootImage(
-      os.path.join(unpack_dir, tree_subdir), "vendor_boot", info_dict)
+      os.path.join(unpack_dir, tree_subdir), os.path.join(unpack_dir, fs_config), "vendor_boot", info_dict)
   if data:
     return File(name, data)
   return None
@@ -2120,7 +2100,7 @@
     info_dict = OPTIONS.info_dict
 
   data = _BuildVendorBootImage(
-      os.path.join(unpack_dir, tree_subdir), "vendor_kernel_boot", info_dict)
+      os.path.join(unpack_dir, tree_subdir), None, "vendor_kernel_boot", info_dict)
   if data:
     return File(name, data)
   return None
@@ -2808,6 +2788,7 @@
             break
         elif handler(o, a):
           success = True
+          break
       if not success:
         raise ValueError("unknown option \"%s\"" % (o,))
 
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
index 9e040a5..bf50f71 100644
--- a/tools/releasetools/create_brick_ota.py
+++ b/tools/releasetools/create_brick_ota.py
@@ -45,10 +45,10 @@
   partitions_to_wipe = PARTITIONS_TO_WIPE
   if extra_wipe_partitions is not None:
     partitions_to_wipe = PARTITIONS_TO_WIPE + extra_wipe_partitions.split(",")
-    ota_metadata = ["ota-type=BRICK", "post-timestamp=9999999999",
-                    "pre-device=" + product_name]
-    if serialno is not None:
-        ota_metadata.append("serialno=" + serialno)
+  ota_metadata = ["ota-type=BRICK", "post-timestamp=9999999999",
+                  "pre-device=" + product_name]
+  if serialno is not None:
+      ota_metadata.append("serialno=" + serialno)
   # recovery requiers product name to be a | separated list
   product_name = product_name.replace(",", "|")
   with zipfile.ZipFile(output_path, "w") as zfp:
diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py
index c186940..03b44f1 100644
--- a/tools/releasetools/ota_from_raw_img.py
+++ b/tools/releasetools/ota_from_raw_img.py
@@ -54,7 +54,7 @@
       prog=argv[0], description="Given a series of .img files, produces a full OTA package that installs thoese images")
   parser.add_argument("images", nargs="+", type=str,
                       help="List of images to generate OTA")
-  parser.add_argument("--partition_names", nargs='+', type=str,
+  parser.add_argument("--partition_names", nargs='?', type=str,
                       help="Partition names to install the images, default to basename of the image(no file name extension)")
   parser.add_argument('--output', type=str,
                       help='Paths to output merged ota', required=True)
@@ -74,18 +74,20 @@
       old_imgs[i], args.images[i] = img.split(":", maxsplit=1)
 
   if not args.partition_names:
-    args.partition_names = [os.path.os.path.splitext(os.path.basename(path))[
+    args.partition_names = [os.path.splitext(os.path.basename(path))[
         0] for path in args.images]
+  else:
+    args.partition_names = args.partition_names.split(",")
   with tempfile.NamedTemporaryFile() as unsigned_payload, tempfile.NamedTemporaryFile() as dynamic_partition_info_file:
     dynamic_partition_info_file.writelines(
         [b"virtual_ab=true\n", b"super_partition_groups=\n"])
     dynamic_partition_info_file.flush()
     cmd = [ResolveBinaryPath("delta_generator", args.search_path)]
-    cmd.append("--partition_names=" + ",".join(args.partition_names))
+    cmd.append("--partition_names=" + ":".join(args.partition_names))
     cmd.append("--dynamic_partition_info_file=" +
                dynamic_partition_info_file.name)
-    cmd.append("--old_partitions=" + ",".join(old_imgs))
-    cmd.append("--new_partitions=" + ",".join(args.images))
+    cmd.append("--old_partitions=" + ":".join(old_imgs))
+    cmd.append("--new_partitions=" + ":".join(args.images))
     cmd.append("--out_file=" + unsigned_payload.name)
     cmd.append("--is_partial_update")
     if args.max_timestamp:
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 67438e6..985cd56 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -195,6 +195,8 @@
       ro.product.* properties are overridden by the 'import' statement.
       The file expects one property per line, and each line has the following
       format: 'prop_name=value1,value2'. e.g. 'ro.boot.product.sku=std,pro'
+      The path specified can either be relative to the current working directory
+      or the path to a file inside of input_target_files.
 
   --skip_postinstall
       Skip the postinstall hooks when generating an A/B OTA package (default:
@@ -327,7 +329,7 @@
 OPTIONS.enable_vabc_xor = True
 OPTIONS.force_minor_version = None
 OPTIONS.compressor_types = None
-OPTIONS.enable_zucchini = True
+OPTIONS.enable_zucchini = False
 OPTIONS.enable_puffdiff = None
 OPTIONS.enable_lz4diff = False
 OPTIONS.vabc_compression_param = None
@@ -745,7 +747,7 @@
     os.rename(source_path, target_path)
 
   # Write new ab_partitions.txt file
-  new_ab_partitions = os.paht.join(input_file, AB_PARTITIONS)
+  new_ab_partitions = os.path.join(input_file, AB_PARTITIONS)
   with open(new_ab_partitions, 'w') as f:
     for partition in ab_partitions:
       if (partition in dynamic_partition_list and
@@ -912,12 +914,13 @@
     # and install time performance. All OTA's with
     # both the source build and target build with VIRTUAL_AB_COW_VERSION = 3
     # can support the new format. Otherwise, fallback on older versions
-    if not source_info.vabc_cow_version or not target_info.vabc_cow_version:
-      logger.info("Source or Target doesn't have VABC_COW_VERSION specified, default to version 2")
-      OPTIONS.vabc_cow_version = 2
-    elif source_info.vabc_cow_version != target_info.vabc_cow_version:
-      logger.info("Source and Target have different cow VABC_COW_VERSION specified, default to minimum version")
-      OPTIONS.vabc_cow_version = min(source_info.vabc_cow_version, target_info.vabc_cow_version)
+    if not OPTIONS.vabc_cow_version:
+      if not source_info.vabc_cow_version or not target_info.vabc_cow_version:
+        logger.info("Source or Target doesn't have VABC_COW_VERSION specified, default to version 2")
+        OPTIONS.vabc_cow_version = 2
+      elif source_info.vabc_cow_version != target_info.vabc_cow_version:
+        logger.info("Source and Target have different cow VABC_COW_VERSION specified, default to minimum version")
+        OPTIONS.vabc_cow_version = min(source_info.vabc_cow_version, target_info.vabc_cow_version)
 
     # Virtual AB Compression was introduced in Androd S.
     # Later, we backported VABC to Android R. But verity support was not
@@ -931,19 +934,20 @@
     assert "ab_partitions" in OPTIONS.info_dict, \
         "META/ab_partitions.txt is required for ab_update."
     source_info = None
-    if not target_info.vabc_cow_version:
+    if not OPTIONS.vabc_cow_version:
+      if not target_info.vabc_cow_version:
+          OPTIONS.vabc_cow_version = 2
+      elif target_info.vabc_cow_version >= "3" and target_info.vendor_api_level < 35:
+        logger.warning(
+              "This full OTA is configured to use VABC cow version"
+              " 3 which is supported since"
+              " Android API level 35, but device is "
+              "launched with {} . If this full OTA is"
+              " served to a device running old build, OTA might fail due to "
+              "unsupported vabc cow version. For safety, version 2 is used because "
+              "it's supported since day 1.".format(
+                  target_info.vendor_api_level))
         OPTIONS.vabc_cow_version = 2
-    elif target_info.vabc_cow_version >= "3" and target_info.vendor_api_level < 35:
-      logger.warning(
-            "This full OTA is configured to use VABC cow version"
-            " 3 which is supported since"
-            " Android API level 35, but device is "
-            "launched with {} . If this full OTA is"
-            " served to a device running old build, OTA might fail due to "
-            "unsupported vabc cow version. For safety, version 2 is used because "
-            "it's supported since day 1.".format(
-                target_info.vendor_api_level))
-      OPTIONS.vabc_cow_version = 2
     if OPTIONS.vabc_compression_param is None and vabc_compression_param:
       minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[
           vabc_compression_param]
@@ -1048,6 +1052,10 @@
   from check_target_files_vintf import CheckVintfIfTrebleEnabled
   CheckVintfIfTrebleEnabled(target_file, target_info)
 
+  # Allow boot_variable_file to also exist in target-files
+  if OPTIONS.boot_variable_file:
+    if not os.path.isfile(OPTIONS.boot_variable_file):
+      OPTIONS.boot_variable_file = os.path.join(target_file, OPTIONS.boot_variable_file)
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
@@ -1288,7 +1296,7 @@
       assert len(words) >= 1 and len(words) <= 2
       OPTIONS.vabc_compression_param = a.lower()
       if len(words) == 2:
-        if not words[1].isdigit():
+        if not words[1].lstrip("-").isdigit():
           raise ValueError("Cannot parse value %r for option $COMPRESSION_LEVEL - only "
                            "integers are allowed." % words[1])
     elif o == "--security_patch_level":
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 048a497..81b53dc 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -1111,9 +1111,10 @@
       relative_path = path.removeprefix(input_dir).removeprefix("/")
       if not Fnmatch(relative_path, UNZIP_PATTERN):
         continue
-      if filename.endswith(".prop") or filename == "prop.default" or "/etc/vintf/" in relative_path:
-        target_path = os.path.join(
-            output_dir, relative_path)
-        os.makedirs(os.path.dirname(target_path), exist_ok=True)
-        shutil.copy(path, target_path)
+      target_path = os.path.join(
+          output_dir, relative_path)
+      if os.path.exists(target_path):
+        continue
+      os.makedirs(os.path.dirname(target_path), exist_ok=True)
+      shutil.copy(path, target_path)
   return output_dir
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
index b5ac8a5..fc5c704 100644
--- a/tools/sbom/sbom_data.py
+++ b/tools/sbom/sbom_data.py
@@ -30,6 +30,7 @@
 SPDXID_DOC = 'SPDXRef-DOCUMENT'
 SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
 SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+SPDXID_LICENSE_APACHE = 'LicenseRef-Android-Apache-2.0'
 
 PACKAGE_NAME_PRODUCT = 'PRODUCT'
 PACKAGE_NAME_PLATFORM = 'PLATFORM'
@@ -50,7 +51,7 @@
   cpe23Type = 'cpe23Type'
 
 
-@dataclass
+@dataclass(frozen=True)
 class PackageExternalRef:
   category: PackageExternalRefCategory
   type: PackageExternalRefType
@@ -68,6 +69,7 @@
   verification_code: str = None
   file_ids: List[str] = field(default_factory=list)
   external_refs: List[PackageExternalRef] = field(default_factory=list)
+  declared_license_ids: List[str] = field(default_factory=list)
 
 
 @dataclass
@@ -75,6 +77,7 @@
   id: str
   name: str
   checksum: str
+  concluded_license_ids: List[str] = field(default_factory=list)
 
 
 class RelationshipType:
@@ -85,20 +88,27 @@
   STATIC_LINK = 'STATIC_LINK'
 
 
-@dataclass
+@dataclass(frozen=True)
 class Relationship:
   id1: str
   relationship: RelationshipType
   id2: str
 
 
-@dataclass
+@dataclass(frozen=True)
 class DocumentExternalReference:
   id: str
   uri: str
   checksum: str
 
 
+@dataclass(frozen=True)
+class License:
+  id: str
+  text: str
+  name: str
+
+
 @dataclass
 class Document:
   name: str
@@ -111,20 +121,30 @@
   packages: List[Package] = field(default_factory=list)
   files: List[File] = field(default_factory=list)
   relationships: List[Relationship] = field(default_factory=list)
+  licenses: List[License] = field(default_factory=list)
 
   def add_external_ref(self, external_ref):
     if not any(external_ref.uri == ref.uri for ref in self.external_refs):
       self.external_refs.append(external_ref)
 
   def add_package(self, package):
-    if not any(package.id == p.id for p in self.packages):
+    p = next((p for p in self.packages if package.id == p.id), None)
+    if not p:
       self.packages.append(package)
+    else:
+      for license_id in package.declared_license_ids:
+        if license_id not in p.declared_license_ids:
+          p.declared_license_ids.append(license_id)
 
   def add_relationship(self, rel):
     if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
                for r in self.relationships):
       self.relationships.append(rel)
 
+  def add_license(self, license):
+    if not any(license.id == l.id for l in self.licenses):
+      self.licenses.append(license)
+
   def generate_packages_verification_code(self):
     for package in self.packages:
       if not package.file_ids:
diff --git a/tools/sbom/sbom_data_test.py b/tools/sbom/sbom_data_test.py
index 69bc9d2..9d987c4 100644
--- a/tools/sbom/sbom_data_test.py
+++ b/tools/sbom/sbom_data_test.py
@@ -23,6 +23,7 @@
 SUPPLIER_UPSTREAM = 'Organization: upstream'
 
 SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_PREBUILT_PACKAGE2 = 'SPDXRef-PREBUILT-package2'
 SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
 SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
 
@@ -31,6 +32,9 @@
 SPDXID_FILE3 = 'SPDXRef-file3'
 SPDXID_FILE4 = 'SPDXRef-file4'
 
+SPDXID_LICENSE1 = "SPDXRef-License-1"
+SPDXID_LICENSE2 = "SPDXRef-License-2"
+
 
 class SBOMDataTest(unittest.TestCase):
 
@@ -134,6 +138,47 @@
     self.sbom_doc.generate_packages_verification_code()
     self.assertEqual(expected_package_verification_code, self.sbom_doc.packages[0].verification_code)
 
+  def test_add_package_(self):
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                ))
+    p = next((p for p in self.sbom_doc.packages if p.id == SPDXID_PREBUILT_PACKAGE2), None)
+    self.assertNotEqual(p, None)
+    self.assertEqual(p.declared_license_ids, [])
+
+    # Add same package with license 1
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                declared_license_ids=[SPDXID_LICENSE1]
+                                                ))
+    self.assertEqual(p.declared_license_ids, [SPDXID_LICENSE1])
+
+    # Add same package with license 2
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                declared_license_ids=[SPDXID_LICENSE2]
+                                                ))
+    self.assertEqual(p.declared_license_ids, [SPDXID_LICENSE1, SPDXID_LICENSE2])
+
+    # Add same package with license 2 again
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                declared_license_ids=[SPDXID_LICENSE2]
+                                                ))
+    self.assertEqual(p.declared_license_ids, [SPDXID_LICENSE1, SPDXID_LICENSE2])
+
 
 if __name__ == '__main__':
   unittest.main(verbosity=2)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
index 1cb864d..26b3c57 100644
--- a/tools/sbom/sbom_writers.py
+++ b/tools/sbom/sbom_writers.py
@@ -64,6 +64,11 @@
   # Relationship
   RELATIONSHIP = 'Relationship'
 
+  # License
+  LICENSE_ID = 'LicenseID'
+  LICENSE_NAME = 'LicenseName'
+  LICENSE_EXTRACTED_TEXT = 'ExtractedText'
+
 
 class TagValueWriter:
   @staticmethod
@@ -99,6 +104,12 @@
       tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
     if package.supplier:
       tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+
+    license = sbom_data.VALUE_NOASSERTION
+    if package.declared_license_ids:
+      license = ' OR '.join(package.declared_license_ids)
+    tagvalues.append(f'{Tags.PACKAGE_LICENSE_DECLARED}: {license}')
+
     if package.verification_code:
       tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
     if package.external_refs:
@@ -155,8 +166,12 @@
       f'{Tags.FILE_NAME}: {file.name}',
       f'{Tags.SPDXID}: {file.id}',
       f'{Tags.FILE_CHECKSUM}: {file.checksum}',
-      '',
     ]
+    license = sbom_data.VALUE_NOASSERTION
+    if file.concluded_license_ids:
+      license = ' OR '.join(file.concluded_license_ids)
+    tagvalues.append(f'{Tags.FILE_LICENSE_CONCLUDED}: {license}')
+    tagvalues.append('')
 
     return tagvalues
 
@@ -194,6 +209,22 @@
     return tagvalues
 
   @staticmethod
+  def marshal_license(license):
+    tagvalues = []
+    tagvalues.append(f'{Tags.LICENSE_ID}: {license.id}')
+    tagvalues.append(f'{Tags.LICENSE_NAME}: {license.name}')
+    tagvalues.append(f'{Tags.LICENSE_EXTRACTED_TEXT}: <text>{license.text}</text>')
+    return tagvalues
+
+  @staticmethod
+  def marshal_licenses(sbom_doc):
+    tagvalues = []
+    for license in sbom_doc.licenses:
+      tagvalues += TagValueWriter.marshal_license(license)
+      tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
   def write(sbom_doc, file, fragment=False):
     content = []
     if not fragment:
@@ -202,6 +233,7 @@
     tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc, fragment)
     content += tagvalues
     content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+    content += TagValueWriter.marshal_licenses(sbom_doc)
     file.write('\n'.join(content))
 
 
@@ -236,11 +268,13 @@
   PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
   PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
   PACKAGE_HAS_FILES = 'hasFiles'
+  PACKAGE_LICENSE_DECLARED = 'licenseDeclared'
 
   # File
   FILES = 'files'
   FILE_NAME = 'fileName'
   FILE_CHECKSUMS = 'checksums'
+  FILE_LICENSE_CONCLUDED = 'licenseConcluded'
 
   # Relationship
   RELATIONSHIPS = 'relationships'
@@ -248,6 +282,12 @@
   REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
   REL_TYPE = 'relationshipType'
 
+  # License
+  LICENSES = 'hasExtractedLicensingInfos'
+  LICENSE_ID = 'licenseId'
+  LICENSE_NAME = 'name'
+  LICENSE_EXTRACTED_TEXT = 'extractedText'
+
 
 class JSONWriter:
   @staticmethod
@@ -294,6 +334,9 @@
         package[PropNames.PACKAGE_VERSION] = p.version
       if p.supplier:
         package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+      package[PropNames.PACKAGE_LICENSE_DECLARED] = sbom_data.VALUE_NOASSERTION
+      if p.declared_license_ids:
+        package[PropNames.PACKAGE_LICENSE_DECLARED] = ' OR '.join(p.declared_license_ids)
       if p.verification_code:
         package[PropNames.PACKAGE_VERIFICATION_CODE] = {
           PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
@@ -329,6 +372,9 @@
         PropNames.ALGORITHM: checksum[0],
         PropNames.CHECKSUM_VALUE: checksum[1],
       }]
+      file[PropNames.FILE_LICENSE_CONCLUDED] = sbom_data.VALUE_NOASSERTION
+      if f.concluded_license_ids:
+        file[PropNames.FILE_LICENSE_CONCLUDED] = ' OR '.join(f.concluded_license_ids)
       files.append(file)
     return {PropNames.FILES: files}
 
@@ -347,10 +393,22 @@
     return {PropNames.RELATIONSHIPS: relationships}
 
   @staticmethod
+  def marshal_licenses(sbom_doc):
+    licenses = []
+    for l in sbom_doc.licenses:
+      licenses.append({
+          PropNames.LICENSE_ID: l.id,
+          PropNames.LICENSE_NAME: l.name,
+          PropNames.LICENSE_EXTRACTED_TEXT: f'<text>{l.text}</text>'
+      })
+    return {PropNames.LICENSES: licenses}
+
+  @staticmethod
   def write(sbom_doc, file):
     doc = {}
     doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
     doc.update(JSONWriter.marshal_packages(sbom_doc))
     doc.update(JSONWriter.marshal_files(sbom_doc))
     doc.update(JSONWriter.marshal_relationships(sbom_doc))
+    doc.update(JSONWriter.marshal_licenses(sbom_doc))
     file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
index cf85e01..f9f5230 100644
--- a/tools/sbom/sbom_writers_test.py
+++ b/tools/sbom/sbom_writers_test.py
@@ -33,6 +33,14 @@
 SPDXID_FILE3 = 'SPDXRef-file3'
 SPDXID_FILE4 = 'SPDXRef-file4'
 
+SPDXID_LICENSE_1 = 'LicenseRef-Android-License-1'
+SPDXID_LICENSE_2 = 'LicenseRef-Android-License-2'
+SPDXID_LICENSE_3 = 'LicenseRef-Android-License-3'
+
+LICENSE_APACHE_TEXT = "LICENSE_APACHE"
+LICENSE1_TEXT = 'LICENSE 1'
+LICENSE2_TEXT = 'LICENSE 2'
+LICENSE3_TEXT = 'LICENSE 3'
 
 class SBOMWritersTest(unittest.TestCase):
 
@@ -63,6 +71,7 @@
                         download_location=sbom_data.VALUE_NONE,
                         supplier=SUPPLIER_GOOGLE,
                         version=BUILD_FINGER_PRINT,
+                        declared_license_ids=[sbom_data.SPDXID_LICENSE_APACHE]
                         ))
 
     self.sbom_doc.add_package(
@@ -71,6 +80,7 @@
                         download_location=sbom_data.VALUE_NONE,
                         supplier=SUPPLIER_GOOGLE,
                         version=BUILD_FINGER_PRINT,
+                        declared_license_ids=[SPDXID_LICENSE_1],
                         ))
 
     self.sbom_doc.add_package(
@@ -79,6 +89,7 @@
                         download_location=sbom_data.VALUE_NONE,
                         supplier=SUPPLIER_GOOGLE,
                         version=BUILD_FINGER_PRINT,
+                        declared_license_ids=[SPDXID_LICENSE_2, SPDXID_LICENSE_3],
                         external_refs=[sbom_data.PackageExternalRef(
                           category=sbom_data.PackageExternalRefCategory.SECURITY,
                           type=sbom_data.PackageExternalRefType.cpe22Type,
@@ -90,6 +101,7 @@
                         name='Upstream package1',
                         supplier=SUPPLIER_UPSTREAM,
                         version='1.1',
+                        declared_license_ids=[SPDXID_LICENSE_2, SPDXID_LICENSE_3],
                         ))
 
     self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
@@ -97,11 +109,11 @@
                                                           id2=SPDXID_UPSTREAM_PACKAGE1))
 
     self.sbom_doc.files.append(
-      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111', concluded_license_ids=[sbom_data.SPDXID_LICENSE_APACHE]))
     self.sbom_doc.files.append(
-      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222', concluded_license_ids=[SPDXID_LICENSE_1]))
     self.sbom_doc.files.append(
-      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333', concluded_license_ids=[SPDXID_LICENSE_2, SPDXID_LICENSE_3]))
     self.sbom_doc.files.append(
       sbom_data.File(id=SPDXID_FILE4, name='file4.a', checksum='SHA1: 44444'))
 
@@ -120,6 +132,11 @@
                                                           id2=SPDXID_FILE4
                                                           ))
 
+    self.sbom_doc.add_license(sbom_data.License(sbom_data.SPDXID_LICENSE_APACHE, LICENSE_APACHE_TEXT, "License-Apache"))
+    self.sbom_doc.add_license(sbom_data.License(SPDXID_LICENSE_1, LICENSE1_TEXT, "License-1"))
+    self.sbom_doc.add_license(sbom_data.License(SPDXID_LICENSE_2, LICENSE2_TEXT, "License-2"))
+    self.sbom_doc.add_license(sbom_data.License(SPDXID_LICENSE_3, LICENSE3_TEXT, "License-3"))
+
     # SBOM fragment of a APK
     self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
                                                  namespace='http://www.google.com/sbom/spdx/android',
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
index 53936c5..a877810 100644
--- a/tools/sbom/testdata/expected_json_sbom.spdx.json
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -31,6 +31,7 @@
             "filesAnalyzed": true,
             "versionInfo": "build_finger_print",
             "supplier": "Organization: Google",
+            "licenseDeclared": "NOASSERTION",
             "packageVerificationCode": {
                 "packageVerificationCodeValue": "123456"
             },
@@ -46,7 +47,8 @@
             "downloadLocation": "NONE",
             "filesAnalyzed": false,
             "versionInfo": "build_finger_print",
-            "supplier": "Organization: Google"
+            "supplier": "Organization: Google",
+            "licenseDeclared": "LicenseRef-Android-Apache-2.0"
         },
         {
             "name": "Prebuilt package1",
@@ -54,7 +56,8 @@
             "downloadLocation": "NONE",
             "filesAnalyzed": false,
             "versionInfo": "build_finger_print",
-            "supplier": "Organization: Google"
+            "supplier": "Organization: Google",
+            "licenseDeclared": "LicenseRef-Android-License-1"
         },
         {
             "name": "Source package1",
@@ -63,6 +66,7 @@
             "filesAnalyzed": false,
             "versionInfo": "build_finger_print",
             "supplier": "Organization: Google",
+            "licenseDeclared": "LicenseRef-Android-License-2 OR LicenseRef-Android-License-3",
             "externalRefs": [
                 {
                     "referenceCategory": "SECURITY",
@@ -77,7 +81,8 @@
             "downloadLocation": "NOASSERTION",
             "filesAnalyzed": false,
             "versionInfo": "1.1",
-            "supplier": "Organization: upstream"
+            "supplier": "Organization: upstream",
+            "licenseDeclared": "LicenseRef-Android-License-2 OR LicenseRef-Android-License-3"
         }
     ],
     "files": [
@@ -89,7 +94,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "11111"
                 }
-            ]
+            ],
+            "licenseConcluded": "LicenseRef-Android-Apache-2.0"
         },
         {
             "fileName": "/bin/file2",
@@ -99,7 +105,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "22222"
                 }
-            ]
+            ],
+            "licenseConcluded": "LicenseRef-Android-License-1"
         },
         {
             "fileName": "/bin/file3",
@@ -109,7 +116,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "33333"
                 }
-            ]
+            ],
+            "licenseConcluded": "LicenseRef-Android-License-2 OR LicenseRef-Android-License-3"
         },
         {
             "fileName": "file4.a",
@@ -119,7 +127,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "44444"
                 }
-            ]
+            ],
+            "licenseConcluded": "NOASSERTION"
         }
     ],
     "relationships": [
@@ -148,5 +157,27 @@
             "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
             "relationshipType": "VARIANT_OF"
         }
+    ],
+    "hasExtractedLicensingInfos": [
+        {
+            "licenseId": "LicenseRef-Android-Apache-2.0",
+            "name": "License-Apache",
+            "extractedText": "<text>LICENSE_APACHE</text>"
+        },
+        {
+            "licenseId": "LicenseRef-Android-License-1",
+            "name": "License-1",
+            "extractedText": "<text>LICENSE 1</text>"
+        },
+        {
+            "licenseId": "LicenseRef-Android-License-2",
+            "name": "License-2",
+            "extractedText": "<text>LICENSE 2</text>"
+        },
+        {
+            "licenseId": "LicenseRef-Android-License-3",
+            "name": "License-3",
+            "extractedText": "<text>LICENSE 3</text>"
+        }
     ]
 }
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
index e6fd17e..1c54410 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -10,6 +10,7 @@
 FileName: file4.a
 SPDXID: SPDXRef-file4
 FileChecksum: SHA1: 44444
+LicenseConcluded: NOASSERTION
 
 PackageName: PRODUCT
 SPDXID: SPDXRef-PRODUCT
@@ -17,6 +18,7 @@
 FilesAnalyzed: true
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: NOASSERTION
 PackageVerificationCode: 123456
 
 Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
@@ -24,14 +26,17 @@
 FileName: /bin/file1
 SPDXID: SPDXRef-file1
 FileChecksum: SHA1: 11111
+LicenseConcluded: LicenseRef-Android-Apache-2.0
 
 FileName: /bin/file2
 SPDXID: SPDXRef-file2
 FileChecksum: SHA1: 22222
+LicenseConcluded: LicenseRef-Android-License-1
 
 FileName: /bin/file3
 SPDXID: SPDXRef-file3
 FileChecksum: SHA1: 33333
+LicenseConcluded: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 PackageName: PLATFORM
 SPDXID: SPDXRef-PLATFORM
@@ -39,6 +44,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-Apache-2.0
 
 PackageName: Prebuilt package1
 SPDXID: SPDXRef-PREBUILT-package1
@@ -46,6 +52,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-1
 
 PackageName: Source package1
 SPDXID: SPDXRef-SOURCE-package1
@@ -53,6 +60,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
 
 PackageName: Upstream package1
@@ -61,6 +69,7 @@
 FilesAnalyzed: false
 PackageVersion: 1.1
 PackageSupplier: Organization: upstream
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
 
@@ -68,3 +77,19 @@
 Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
 Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
 Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4
+
+LicenseID: LicenseRef-Android-Apache-2.0
+LicenseName: License-Apache
+ExtractedText: <text>LICENSE_APACHE</text>
+
+LicenseID: LicenseRef-Android-License-1
+LicenseName: License-1
+ExtractedText: <text>LICENSE 1</text>
+
+LicenseID: LicenseRef-Android-License-2
+LicenseName: License-2
+ExtractedText: <text>LICENSE 2</text>
+
+LicenseID: LicenseRef-Android-License-3
+LicenseName: License-3
+ExtractedText: <text>LICENSE 3</text>
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
index 428d7e3..36afc8b 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
@@ -10,6 +10,7 @@
 FileName: file4.a
 SPDXID: SPDXRef-file4
 FileChecksum: SHA1: 44444
+LicenseConcluded: NOASSERTION
 
 Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-file4
 
@@ -19,19 +20,23 @@
 FilesAnalyzed: true
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: NOASSERTION
 PackageVerificationCode: 123456
 
 FileName: /bin/file1
 SPDXID: SPDXRef-file1
 FileChecksum: SHA1: 11111
+LicenseConcluded: LicenseRef-Android-Apache-2.0
 
 FileName: /bin/file2
 SPDXID: SPDXRef-file2
 FileChecksum: SHA1: 22222
+LicenseConcluded: LicenseRef-Android-License-1
 
 FileName: /bin/file3
 SPDXID: SPDXRef-file3
 FileChecksum: SHA1: 33333
+LicenseConcluded: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 PackageName: PLATFORM
 SPDXID: SPDXRef-PLATFORM
@@ -39,6 +44,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-Apache-2.0
 
 PackageName: Prebuilt package1
 SPDXID: SPDXRef-PREBUILT-package1
@@ -46,6 +52,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-1
 
 PackageName: Source package1
 SPDXID: SPDXRef-SOURCE-package1
@@ -53,6 +60,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
 
 PackageName: Upstream package1
@@ -61,6 +69,7 @@
 FilesAnalyzed: false
 PackageVersion: 1.1
 PackageSupplier: Organization: upstream
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
 
@@ -68,3 +77,19 @@
 Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
 Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
 Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4
+
+LicenseID: LicenseRef-Android-Apache-2.0
+LicenseName: License-Apache
+ExtractedText: <text>LICENSE_APACHE</text>
+
+LicenseID: LicenseRef-Android-License-1
+LicenseName: License-1
+ExtractedText: <text>LICENSE 1</text>
+
+LicenseID: LicenseRef-Android-License-2
+LicenseName: License-2
+ExtractedText: <text>LICENSE 2</text>
+
+LicenseID: LicenseRef-Android-License-3
+LicenseName: License-3
+ExtractedText: <text>LICENSE 3</text>
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
index a00c291..4b14a4b 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -1,6 +1,7 @@
 FileName: /bin/file1.apk
 SPDXID: SPDXRef-file1
 FileChecksum: SHA1: 11111
+LicenseConcluded: NOASSERTION
 
 PackageName: Unbundled apk package
 SPDXID: SPDXRef-SOURCE-package1
@@ -8,5 +9,6 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: NOASSERTION
 
 Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/tool_event_logger/Android.bp b/tools/tool_event_logger/Android.bp
new file mode 100644
index 0000000..7a1d2aa
--- /dev/null
+++ b/tools/tool_event_logger/Android.bp
@@ -0,0 +1,67 @@
+// Copyright 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Set of error prone rules to ensure code quality
+// PackageLocation check requires the androidCompatible=false otherwise it does not do anything.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_adte",
+}
+
+python_library_host {
+    name: "tool_event_proto",
+    srcs: [
+        "proto/tool_event.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
+
+python_binary_host {
+    name: "tool_event_logger",
+    pkg_path: "tool_event_logger",
+    srcs: [
+        "tool_event_logger.py",
+    ],
+    libs: [
+        "asuite_cc_client",
+        "tool_event_proto",
+    ],
+    main: "tool_event_logger.py",
+}
+
+python_test_host {
+    name: "tool_event_logger_test",
+    main: "tool_event_logger_test.py",
+    pkg_path: "tool_event_logger",
+    srcs: [
+        "tool_event_logger.py",
+        "tool_event_logger_test.py",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    libs: [
+        "asuite_cc_client",
+        "tool_event_proto",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+            enabled: true,
+        },
+    },
+}
diff --git a/tools/tool_event_logger/OWNERS b/tools/tool_event_logger/OWNERS
new file mode 100644
index 0000000..b692c9e
--- /dev/null
+++ b/tools/tool_event_logger/OWNERS
@@ -0,0 +1,4 @@
+include platform/tools/asuite:/OWNERS
+
+zhuoyao@google.com
+hzalek@google.com
\ No newline at end of file
diff --git a/tools/tool_event_logger/proto/tool_event.proto b/tools/tool_event_logger/proto/tool_event.proto
new file mode 100644
index 0000000..ef71eac
--- /dev/null
+++ b/tools/tool_event_logger/proto/tool_event.proto
@@ -0,0 +1,37 @@
+syntax = "proto3";
+
+package tools.asuite.tool_event_logger;
+
+message ToolEvent {
+  // Occurs immediately upon execution of the tool.
+  message InvocationStarted {
+    string command_args = 1;
+    string cwd = 2;
+    string os = 3;
+  }
+
+  // Occurs when tool exits for any reason.
+  message InvocationStopped {
+    int32 exit_code = 2;
+    string exit_log = 3;
+  }
+
+  // ------------------------
+  // FIELDS FOR ToolEvent
+  // ------------------------
+  // Random string generated to identify the invocation.
+  string invocation_id = 1;
+  // Internal user name.
+  string user_name = 2;
+  // The root of Android source.
+  string source_root = 3;
+  // Name of the tool used.
+  string tool_tag = 6;
+  // Name of the host workstation.
+  string host_name = 7;
+
+  oneof event {
+    InvocationStarted invocation_started = 4;
+    InvocationStopped invocation_stopped = 5;
+  }
+}
diff --git a/tools/tool_event_logger/tool_event_logger.py b/tools/tool_event_logger/tool_event_logger.py
new file mode 100644
index 0000000..b249d91
--- /dev/null
+++ b/tools/tool_event_logger/tool_event_logger.py
@@ -0,0 +1,233 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+import datetime
+import getpass
+import logging
+import os
+import platform
+import sys
+import tempfile
+import uuid
+
+from atest.metrics import clearcut_client
+from atest.proto import clientanalytics_pb2
+from proto import tool_event_pb2
+
+LOG_SOURCE = 2395
+
+
+class ToolEventLogger:
+  """Logs tool events to Sawmill through Clearcut."""
+
+  def __init__(
+      self,
+      tool_tag: str,
+      invocation_id: str,
+      user_name: str,
+      host_name: str,
+      source_root: str,
+      platform_version: str,
+      python_version: str,
+      client: clearcut_client.Clearcut,
+  ):
+    self.tool_tag = tool_tag
+    self.invocation_id = invocation_id
+    self.user_name = user_name
+    self.host_name = host_name
+    self.source_root = source_root
+    self.platform_version = platform_version
+    self.python_version = python_version
+    self._clearcut_client = client
+
+  @classmethod
+  def create(cls, tool_tag: str):
+    return ToolEventLogger(
+        tool_tag=tool_tag,
+        invocation_id=str(uuid.uuid4()),
+        user_name=getpass.getuser(),
+        host_name=platform.node(),
+        source_root=os.environ.get('ANDROID_BUILD_TOP', ''),
+        platform_version=platform.platform(),
+        python_version=platform.python_version(),
+        client=clearcut_client.Clearcut(LOG_SOURCE),
+    )
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.flush()
+
+  def log_invocation_started(self, event_time: datetime, command_args: str):
+    """Creates an event log with invocation started info."""
+    event = self._create_tool_event()
+    event.invocation_started.CopyFrom(
+        tool_event_pb2.ToolEvent.InvocationStarted(
+            command_args=command_args,
+            os=f'{self.platform_version}:{self.python_version}',
+        )
+    )
+
+    logging.debug('Log invocation_started: %s', event)
+    self._log_clearcut_event(event, event_time)
+
+  def log_invocation_stopped(
+      self,
+      event_time: datetime,
+      exit_code: int,
+      exit_log: str,
+  ):
+    """Creates an event log with invocation stopped info."""
+    event = self._create_tool_event()
+    event.invocation_stopped.CopyFrom(
+        tool_event_pb2.ToolEvent.InvocationStopped(
+            exit_code=exit_code,
+            exit_log=exit_log,
+        )
+    )
+
+    logging.debug('Log invocation_stopped: %s', event)
+    self._log_clearcut_event(event, event_time)
+
+  def flush(self):
+    """Sends all batched events to Clearcut."""
+    logging.debug('Sending events to Clearcut.')
+    self._clearcut_client.flush_events()
+
+  def _create_tool_event(self):
+    return tool_event_pb2.ToolEvent(
+        tool_tag=self.tool_tag,
+        invocation_id=self.invocation_id,
+        user_name=self.user_name,
+        host_name=self.host_name,
+        source_root=self.source_root,
+    )
+
+  def _log_clearcut_event(
+      self, tool_event: tool_event_pb2.ToolEvent, event_time: datetime
+  ):
+    log_event = clientanalytics_pb2.LogEvent(
+        event_time_ms=int(event_time.timestamp() * 1000),
+        source_extension=tool_event.SerializeToString(),
+    )
+    self._clearcut_client.log(log_event)
+
+
+class ArgumentParserWithLogging(argparse.ArgumentParser):
+
+  def error(self, message):
+    logging.error('Failed to parse args with error: %s', message)
+    super().error(message)
+
+
+def create_arg_parser():
+  """Creates an instance of the default ToolEventLogger arg parser."""
+
+  parser = ArgumentParserWithLogging(
+      description='Build and upload logs for Android dev tools',
+      add_help=True,
+      formatter_class=argparse.RawDescriptionHelpFormatter,
+  )
+
+  parser.add_argument(
+      '--tool_tag',
+      type=str,
+      required=True,
+      help='Name of the tool.',
+  )
+
+  parser.add_argument(
+      '--start_timestamp',
+      type=lambda ts: datetime.datetime.fromtimestamp(float(ts)),
+      required=True,
+      help=(
+          'Timestamp when the tool starts. The timestamp should have the format'
+          '%s.%N which represents the seconds elapses since epoch.'
+      ),
+  )
+
+  parser.add_argument(
+      '--end_timestamp',
+      type=lambda ts: datetime.datetime.fromtimestamp(float(ts)),
+      required=True,
+      help=(
+          'Timestamp when the tool exits. The timestamp should have the format'
+          '%s.%N which represents the seconds elapses since epoch.'
+      ),
+  )
+
+  parser.add_argument(
+      '--tool_args',
+      type=str,
+      help='Parameters that are passed to the tool.',
+  )
+
+  parser.add_argument(
+      '--exit_code',
+      type=int,
+      required=True,
+      help='Tool exit code.',
+  )
+
+  parser.add_argument(
+      '--exit_log',
+      type=str,
+      help='Logs when tool exits.',
+  )
+
+  parser.add_argument(
+      '--dry_run',
+      action='store_true',
+      help='Dry run the tool event logger if set.',
+  )
+
+  return parser
+
+
+def configure_logging():
+  root_logging_dir = tempfile.mkdtemp(prefix='tool_event_logger_')
+
+  log_fmt = '%(asctime)s %(filename)s:%(lineno)s:%(levelname)s: %(message)s'
+  date_fmt = '%Y-%m-%d %H:%M:%S'
+  _, log_path = tempfile.mkstemp(dir=root_logging_dir, suffix='.log')
+
+  logging.basicConfig(
+      filename=log_path, level=logging.DEBUG, format=log_fmt, datefmt=date_fmt
+  )
+
+
+def main(argv: list[str]):
+  args = create_arg_parser().parse_args(argv[1:])
+
+  if args.dry_run:
+    logging.debug('This is a dry run.')
+    return
+
+  try:
+    with ToolEventLogger.create(args.tool_tag) as logger:
+      logger.log_invocation_started(args.start_timestamp, args.tool_args)
+      logger.log_invocation_stopped(
+          args.end_timestamp, args.exit_code, args.exit_log
+      )
+  except Exception as e:
+    logging.error('Log failed with unexpected error: %s', e)
+    raise
+
+
+if __name__ == '__main__':
+  configure_logging()
+  main(sys.argv)
diff --git a/tools/tool_event_logger/tool_event_logger_test.py b/tools/tool_event_logger/tool_event_logger_test.py
new file mode 100644
index 0000000..788812a
--- /dev/null
+++ b/tools/tool_event_logger/tool_event_logger_test.py
@@ -0,0 +1,212 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for ToolEventLogger."""
+
+import datetime
+import logging
+import unittest
+from unittest import mock
+
+from atest.metrics import clearcut_client
+from proto import tool_event_pb2
+from tool_event_logger import tool_event_logger
+
+TEST_INVOCATION_ID = 'test_invocation_id'
+TEST_USER_NAME = 'test_user'
+TEST_HOST_NAME = 'test_host_name'
+TEST_TOOL_TAG = 'test_tool'
+TEST_SOURCE_ROOT = 'test_source_root'
+TEST_PLATFORM_VERSION = 'test_platform_version'
+TEST_PYTHON_VERSION = 'test_python_version'
+TEST_EVENT_TIMESTAMP = datetime.datetime.now()
+
+
+class ToolEventLoggerTest(unittest.TestCase):
+
+  def setUp(self):
+    super().setUp()
+    self.clearcut_client = FakeClearcutClient()
+    self.logger = tool_event_logger.ToolEventLogger(
+        TEST_TOOL_TAG,
+        TEST_INVOCATION_ID,
+        TEST_USER_NAME,
+        TEST_HOST_NAME,
+        TEST_SOURCE_ROOT,
+        TEST_PLATFORM_VERSION,
+        TEST_PYTHON_VERSION,
+        client=self.clearcut_client,
+    )
+
+  def test_log_event_timestamp(self):
+    with self.logger:
+      self.logger.log_invocation_started(
+          datetime.datetime.fromtimestamp(100.101), 'test_command'
+      )
+
+    self.assertEqual(
+        self.clearcut_client.get_last_sent_event().event_time_ms, 100101
+    )
+
+  def test_log_event_basic_information(self):
+    with self.logger:
+      self.logger.log_invocation_started(TEST_EVENT_TIMESTAMP, 'test_command')
+
+    sent_event = self.clearcut_client.get_last_sent_event()
+    log_event = tool_event_pb2.ToolEvent.FromString(sent_event.source_extension)
+    self.assertEqual(log_event.invocation_id, TEST_INVOCATION_ID)
+    self.assertEqual(log_event.user_name, TEST_USER_NAME)
+    self.assertEqual(log_event.host_name, TEST_HOST_NAME)
+    self.assertEqual(log_event.tool_tag, TEST_TOOL_TAG)
+    self.assertEqual(log_event.source_root, TEST_SOURCE_ROOT)
+
+  def test_log_invocation_started(self):
+    expected_invocation_started = tool_event_pb2.ToolEvent.InvocationStarted(
+        command_args='test_command',
+        os=TEST_PLATFORM_VERSION + ':' + TEST_PYTHON_VERSION,
+    )
+
+    with self.logger:
+      self.logger.log_invocation_started(TEST_EVENT_TIMESTAMP, 'test_command')
+
+    self.assertEqual(self.clearcut_client.get_number_of_sent_events(), 1)
+    sent_event = self.clearcut_client.get_last_sent_event()
+    self.assertEqual(
+        expected_invocation_started,
+        tool_event_pb2.ToolEvent.FromString(
+            sent_event.source_extension
+        ).invocation_started,
+    )
+
+  def test_log_invocation_stopped(self):
+    expected_invocation_stopped = tool_event_pb2.ToolEvent.InvocationStopped(
+        exit_code=0,
+        exit_log='exit_log',
+    )
+
+    with self.logger:
+      self.logger.log_invocation_stopped(TEST_EVENT_TIMESTAMP, 0, 'exit_log')
+
+    self.assertEqual(self.clearcut_client.get_number_of_sent_events(), 1)
+    sent_event = self.clearcut_client.get_last_sent_event()
+    self.assertEqual(
+        expected_invocation_stopped,
+        tool_event_pb2.ToolEvent.FromString(
+            sent_event.source_extension
+        ).invocation_stopped,
+    )
+
+  def test_log_multiple_events(self):
+    with self.logger:
+      self.logger.log_invocation_started(TEST_EVENT_TIMESTAMP, 'test_command')
+      self.logger.log_invocation_stopped(TEST_EVENT_TIMESTAMP, 0, 'exit_log')
+
+    self.assertEqual(self.clearcut_client.get_number_of_sent_events(), 2)
+
+
+class MainTest(unittest.TestCase):
+
+  REQUIRED_ARGS = [
+      '',
+      '--tool_tag',
+      'test_tool',
+      '--start_timestamp',
+      '1',
+      '--end_timestamp',
+      '2',
+      '--exit_code',
+      '0',
+  ]
+
+  def test_log_and_exit_with_missing_required_args(self):
+    with self.assertLogs() as logs:
+      with self.assertRaises(SystemExit) as ex:
+        tool_event_logger.main(['', '--tool_tag', 'test_tool'])
+
+    with self.subTest('Verify exception code'):
+      self.assertEqual(ex.exception.code, 2)
+
+    with self.subTest('Verify log messages'):
+      self.assertIn(
+          'the following arguments are required',
+          '\n'.join(logs.output),
+      )
+
+  def test_log_and_exit_with_invalid_args(self):
+    with self.assertLogs() as logs:
+      with self.assertRaises(SystemExit) as ex:
+        tool_event_logger.main(['', '--start_timestamp', 'test'])
+
+    with self.subTest('Verify exception code'):
+      self.assertEqual(ex.exception.code, 2)
+
+    with self.subTest('Verify log messages'):
+      self.assertIn(
+          '--start_timestamp: invalid',
+          '\n'.join(logs.output),
+      )
+
+  def test_log_and_exit_with_dry_run(self):
+    with self.assertLogs(level=logging.DEBUG) as logs:
+      tool_event_logger.main(self.REQUIRED_ARGS + ['--dry_run'])
+
+    with self.subTest('Verify log messages'):
+      self.assertIn('dry run', '\n'.join(logs.output))
+
+  @mock.patch.object(clearcut_client, 'Clearcut')
+  def test_log_and_exit_with_unexpected_exception(self, mock_cc):
+    mock_cc.return_value = FakeClearcutClient(raise_log_exception=True)
+
+    with self.assertLogs() as logs:
+      with self.assertRaises(Exception) as ex:
+        tool_event_logger.main(self.REQUIRED_ARGS)
+
+    with self.subTest('Verify log messages'):
+      self.assertIn('unexpected error', '\n'.join(logs.output))
+
+  @mock.patch.object(clearcut_client, 'Clearcut')
+  def test_success(self, mock_cc):
+    mock_clear_cut_client = FakeClearcutClient()
+    mock_cc.return_value = mock_clear_cut_client
+
+    tool_event_logger.main(self.REQUIRED_ARGS)
+
+    self.assertEqual(mock_clear_cut_client.get_number_of_sent_events(), 2)
+
+
+class FakeClearcutClient:
+
+  def __init__(self, raise_log_exception=False):
+    self.pending_log_events = []
+    self.sent_log_events = []
+    self.raise_log_exception = raise_log_exception
+
+  def log(self, log_event):
+    if self.raise_log_exception:
+      raise Exception('unknown exception')
+    self.pending_log_events.append(log_event)
+
+  def flush_events(self):
+    self.sent_log_events.extend(self.pending_log_events)
+    self.pending_log_events.clear()
+
+  def get_number_of_sent_events(self):
+    return len(self.sent_log_events)
+
+  def get_last_sent_event(self):
+    return self.sent_log_events[-1]
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/whichgit b/tools/whichgit
index b0bf2e4..55c8c6f 100755
--- a/tools/whichgit
+++ b/tools/whichgit
@@ -1,6 +1,7 @@
 #!/usr/bin/env python3
 
 import argparse
+import itertools
 import os
 import subprocess
 import sys
@@ -10,15 +11,34 @@
                         check=True, capture_output=True, text=True).stdout.strip()
 
 
+def get_all_modules():
+  product_out = subprocess.run(["build/soong/soong_ui.bash", "--dumpvar-mode", "--abs", "PRODUCT_OUT"],
+                                check=True, capture_output=True, text=True).stdout.strip()
+  result = subprocess.run(["cat", product_out + "/all_modules.txt"], check=True, capture_output=True, text=True)
+  return result.stdout.strip().split("\n")
+
+
+def batched(iterable, n):
+  # introduced in itertools 3.12, could delete once that's universally available
+  if n < 1:
+    raise ValueError('n must be at least one')
+  it = iter(iterable)
+  while batch := tuple(itertools.islice(it, n)):
+    yield batch
+
+
 def get_sources(modules):
-  result = subprocess.run(["./prebuilts/build-tools/linux-x86/bin/ninja", "-f",
-                           "out/combined-" + os.environ["TARGET_PRODUCT"] + ".ninja",
-                           "-t", "inputs", "-d", ] + modules,
-                          stderr=subprocess.STDOUT, stdout=subprocess.PIPE, check=False, text=True)
-  if result.returncode != 0:
-    sys.stderr.write(result.stdout)
-    sys.exit(1)
-  return set([f for f in result.stdout.split("\n") if not f.startswith("out/")])
+  sources = set()
+  for module_group in batched(modules, 40_000):
+    result = subprocess.run(["./prebuilts/build-tools/linux-x86/bin/ninja", "-f",
+                            "out/combined-" + os.environ["TARGET_PRODUCT"] + ".ninja",
+                            "-t", "inputs", "-d", ] + list(module_group),
+                            stderr=subprocess.STDOUT, stdout=subprocess.PIPE, check=False, text=True)
+    if result.returncode != 0:
+      sys.stderr.write(result.stdout)
+      sys.exit(1)
+    sources.update(set([f for f in result.stdout.split("\n") if not f.startswith("out/")]))
+  return sources
 
 
 def m_nothing():
@@ -50,57 +70,76 @@
         referenced_dirs.add(d)
         prev_dir = d
         break
-  return [d[0:-1] for d in referenced_dirs]
+  return referenced_dirs
 
 
 def main(argv):
   # Argument parsing
   ap = argparse.ArgumentParser(description="List the required git projects for the given modules")
   ap.add_argument("--products", nargs="*",
-                  help="The TARGET_PRODUCT to check. If not provided just uses whatever has"
-                        + " already been built")
+                  help="One or more TARGET_PRODUCT to check, or \"*\" for all. If not provided"
+                        + "just uses whatever has already been built")
   ap.add_argument("--variants", nargs="*",
                   help="The TARGET_BUILD_VARIANTS to check. If not provided just uses whatever has"
                         + " already been built, or eng if --products is supplied")
   ap.add_argument("--modules", nargs="*",
-                  help="The build modules to check, or droid it not supplied")
+                  help="The build modules to check, or \"*\" for all, or droid if not supplied")
   ap.add_argument("--why", nargs="*",
                   help="Also print the input files used in these projects, or \"*\" for all")
+  ap.add_argument("--unused", help="List the unused git projects for the given modules rather than"
+                        + "the used ones. Ignores --why", action="store_true")
   args = ap.parse_args(argv[1:])
 
   modules = args.modules if args.modules else ["droid"]
 
+  match args.products:
+    case ["*"]:
+      products = get_build_var("all_named_products").split(" ")
+    case _:
+      products = args.products
+
   # Get the list of sources for all of the requested build combos
-  if not args.products and not args.variants:
+  if not products and not args.variants:
+    m_nothing()
+    if args.modules == ["*"]:
+      modules = get_all_modules()
     sources = get_sources(modules)
   else:
-    if not args.products:
+    if not products:
       sys.stderr.write("Error: --products must be supplied if --variants is supplied")
       sys.exit(1)
     sources = set()
     build_num = 1
-    for product in args.products:
+    for product in products:
       os.environ["TARGET_PRODUCT"] = product
       variants = args.variants if args.variants else ["user", "userdebug", "eng"]
       for variant in variants:
-        sys.stderr.write(f"Analyzing build {build_num} of {len(args.products)*len(variants)}\r")
+        sys.stderr.write(f"Analyzing build {build_num} of {len(products)*len(variants)}\r")
         os.environ["TARGET_BUILD_VARIANT"] = variant
         m_nothing()
+        if args.modules == ["*"]:
+          modules = get_all_modules()
         sources.update(get_sources(modules))
         build_num += 1
     sys.stderr.write("\n\n")
 
   sources = sorted(sources)
 
-  # Print the list of git directories that has one or more of the sources in it
-  for project in sorted(get_referenced_projects(get_git_dirs(), sources)):
-    print(project)
-    if args.why:
-      if "*" in args.why or project in args.why:
-        prefix = project + "/"
-        for f in sources:
-          if f.startswith(prefix):
-            print("  " + f)
+  if args.unused:
+    # Print the list of git directories that don't contain sources
+    used_git_dirs = set(get_git_dirs())
+    for project in sorted(used_git_dirs.difference(set(get_referenced_projects(used_git_dirs, sources)))):
+      print(project[0:-1])
+  else:
+    # Print the list of git directories that has one or more of the sources in it
+    for project in sorted(get_referenced_projects(get_git_dirs(), sources)):
+      print(project[0:-1])
+      if args.why:
+        if "*" in args.why or project[0:-1] in args.why:
+          prefix = project
+          for f in sources:
+            if f.startswith(prefix):
+              print("  " + f)
 
 
 if __name__ == "__main__":
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index f32f90b..3d65bc0 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -200,7 +200,7 @@
     }
 
     if (verbose)
-        printf("Verification %s\n", foundBad ? "FAILED" : "succesful");
+        printf("Verification %s\n", foundBad ? "FAILED" : "successful");
 
     return foundBad ? 1 : 0;
 }