Merge "Propagate PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION to soong" into main
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..cd5c426
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,36 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+// Package the minimal files required to run envsetup.sh in the test
+// environment.
+genrule {
+    name: "envsetup_minimum.zip",
+    visibility: [
+        "//build/make/tests:__subpackages__",
+    ],
+    tools: [
+        "soong_zip",
+    ],
+    srcs: [
+        "envsetup.sh",
+        "shell_utils.sh",
+        "core/envsetup.mk",
+    ],
+    out: ["envsetup.zip"],
+    cmd: "$(location soong_zip) -o $(out) -D build/make",
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index ce75150..97ecd33 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,2 +1,5 @@
 [Hook Scripts]
 do_not_use_DO_NOT_MERGE = ${REPO_ROOT}/build/soong/scripts/check_do_not_merge.sh ${PREUPLOAD_COMMIT}
+
+[Builtin Hooks]
+ktfmt = true
diff --git a/ci/Android.bp b/ci/Android.bp
new file mode 100644
index 0000000..066b83f
--- /dev/null
+++ b/ci/Android.bp
@@ -0,0 +1,85 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_test_host {
+    name: "build_test_suites_test",
+    main: "build_test_suites_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "build_test_suites_test.py",
+    ],
+    libs: [
+        "build_test_suites",
+        "pyfakefs",
+        "ci_test_lib",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":py3-cmd",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
+// This test is only intended to be run locally since it's slow, not hermetic,
+// and requires a lot of system state. It is therefore not marked as `unit_test`
+// and is not part of any test suite. Note that we also don't want to run this
+// test with Bazel since that would require disabling sandboxing and explicitly
+// passing in all the env vars we depend on via the command-line. The test
+// target could be configured to do so but it's not worth doing seeing that
+// we're moving away from Bazel.
+python_test_host {
+    name: "build_test_suites_local_test",
+    main: "build_test_suites_local_test.py",
+    srcs: [
+        "build_test_suites_local_test.py",
+    ],
+    libs: [
+        "build_test_suites",
+        "pyfakefs",
+        "ci_test_lib",
+    ],
+    test_config_template: "AndroidTest.xml.template",
+    test_options: {
+        unit_test: false,
+    },
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
+python_library_host {
+    name: "build_test_suites",
+    srcs: [
+        "build_test_suites.py",
+    ],
+}
+
+python_library_host {
+    name: "ci_test_lib",
+    srcs: [
+        "ci_test_lib.py",
+    ],
+}
diff --git a/ci/AndroidTest.xml.template b/ci/AndroidTest.xml.template
new file mode 100644
index 0000000..81a3435
--- /dev/null
+++ b/ci/AndroidTest.xml.template
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration>
+  <test class="com.android.tradefed.testtype.python.PythonBinaryHostTest">
+    <option name="par-file-name" value="{MODULE}"/>
+    <option name="use-test-output-file" value="false"/>
+    <option name="test-timeout" value="5m"/>
+  </test>
+</configuration>
diff --git a/ci/build_test_suites b/ci/build_test_suites
index 03f6731..5aaf2f4 100755
--- a/ci/build_test_suites
+++ b/ci/build_test_suites
@@ -1,4 +1,5 @@
 #!prebuilts/build-tools/linux-x86/bin/py3-cmd -B
+#
 # Copyright 2024, The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import sys
 import build_test_suites
+import sys
 
-build_test_suites.main(sys.argv)
+build_test_suites.main(sys.argv[1:])
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 1d5b377..29ed50e 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -12,404 +12,115 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Script to build only the necessary modules for general-tests along
-
-with whatever other targets are passed in.
-"""
+"""Build script for the CI `test_suites` target."""
 
 import argparse
-from collections.abc import Sequence
-import json
+import logging
 import os
 import pathlib
-import re
 import subprocess
 import sys
-from typing import Any
-
-import test_mapping_module_retriever
 
 
-# List of modules that are always required to be in general-tests.zip
-REQUIRED_MODULES = frozenset(
-    ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util', 'soong_zip']
-)
+class Error(Exception):
+
+  def __init__(self, message):
+    super().__init__(message)
 
 
-def build_test_suites(argv):
+class BuildFailureError(Error):
+
+  def __init__(self, return_code):
+    super().__init__(f'Build command failed with return code: f{return_code}')
+    self.return_code = return_code
+
+
+REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
+SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
+
+
+def get_top() -> pathlib.Path:
+  return pathlib.Path(os.environ['TOP'])
+
+
+def build_test_suites(argv: list[str]) -> int:
+  """Builds the general-tests and any other test suites passed in.
+
+  Args:
+    argv: The command line arguments passed in.
+
+  Returns:
+    The exit code of the build.
+  """
   args = parse_args(argv)
+  check_required_env()
 
-  if is_optimization_enabled():
-    # Call the class to map changed files to modules to build.
-    # TODO(lucafarsi): Move this into a replaceable class.
-    build_affected_modules(args)
-  else:
+  try:
     build_everything(args)
+  except BuildFailureError as e:
+    logging.error('Build command failed! Check build_log for details.')
+    return e.return_code
+
+  return 0
+
+
+def check_required_env():
+  """Check for required env vars.
+
+  Raises:
+    RuntimeError: If any required env vars are not found.
+  """
+  missing_env_vars = sorted(v for v in REQUIRED_ENV_VARS if v not in os.environ)
+
+  if not missing_env_vars:
+    return
+
+  t = ','.join(missing_env_vars)
+  raise Error(f'Missing required environment variables: {t}')
 
 
 def parse_args(argv):
   argparser = argparse.ArgumentParser()
+
   argparser.add_argument(
       'extra_targets', nargs='*', help='Extra test suites to build.'
   )
-  argparser.add_argument('--target_product')
-  argparser.add_argument('--target_release')
-  argparser.add_argument(
-      '--with_dexpreopt_boot_img_and_system_server_only', action='store_true'
-  )
-  argparser.add_argument('--change_info', nargs='?')
 
-  return argparser.parse_args()
-
-
-def is_optimization_enabled() -> bool:
-  # TODO(lucafarsi): switch back to building only affected general-tests modules
-  # in presubmit once ready.
-  # if os.environ.get('BUILD_NUMBER')[0] == 'P':
-  #   return True
-  return False
+  return argparser.parse_args(argv)
 
 
 def build_everything(args: argparse.Namespace):
+  """Builds all tests (regardless of whether they are needed).
+
+  Args:
+    args: The parsed arguments.
+
+  Raises:
+    BuildFailure: If the build command fails.
+  """
   build_command = base_build_command(args, args.extra_targets)
-  build_command.append('general-tests')
 
-  run_command(build_command, print_output=True)
-
-
-def build_affected_modules(args: argparse.Namespace):
-  modules_to_build = find_modules_to_build(
-      pathlib.Path(args.change_info), args.extra_required_modules
-  )
-
-  # Call the build command with everything.
-  build_command = base_build_command(args, args.extra_targets)
-  build_command.extend(modules_to_build)
-  # When not building general-tests we also have to build the general tests
-  # shared libs.
-  build_command.append('general-tests-shared-libs')
-
-  run_command(build_command, print_output=True)
-
-  zip_build_outputs(modules_to_build, args.target_release)
+  try:
+    run_command(build_command)
+  except subprocess.CalledProcessError as e:
+    raise BuildFailureError(e.returncode) from e
 
 
 def base_build_command(
     args: argparse.Namespace, extra_targets: set[str]
-) -> list:
+) -> list[str]:
+
   build_command = []
-  build_command.append('time')
-  build_command.append('./build/soong/soong_ui.bash')
+  build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH))
   build_command.append('--make-mode')
-  build_command.append('dist')
-  build_command.append('TARGET_PRODUCT=' + args.target_product)
-  build_command.append('TARGET_RELEASE=' + args.target_release)
-  if args.with_dexpreopt_boot_img_and_system_server_only:
-    build_command.append('WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY=true')
   build_command.extend(extra_targets)
 
   return build_command
 
 
-def run_command(
-    args: list[str],
-    env: dict[str, str] = os.environ,
-    print_output: bool = False,
-) -> str:
-  result = subprocess.run(
-      args=args,
-      text=True,
-      capture_output=True,
-      check=False,
-      env=env,
-  )
-  # If the process failed, print its stdout and propagate the exception.
-  if not result.returncode == 0:
-    print('Build command failed! output:')
-    print('stdout: ' + result.stdout)
-    print('stderr: ' + result.stderr)
-
-  result.check_returncode()
-
-  if print_output:
-    print(result.stdout)
-
-  return result.stdout
-
-
-def find_modules_to_build(
-    change_info: pathlib.Path, extra_required_modules: list[str]
-) -> set[str]:
-  changed_files = find_changed_files(change_info)
-
-  test_mappings = test_mapping_module_retriever.GetTestMappings(
-      changed_files, set()
-  )
-
-  # Soong_zip is required to generate the output zip so always build it.
-  modules_to_build = set(REQUIRED_MODULES)
-  if extra_required_modules:
-    modules_to_build.update(extra_required_modules)
-
-  modules_to_build.update(find_affected_modules(test_mappings, changed_files))
-
-  return modules_to_build
-
-
-def find_changed_files(change_info: pathlib.Path) -> set[str]:
-  with open(change_info) as change_info_file:
-    change_info_contents = json.load(change_info_file)
-
-  changed_files = set()
-
-  for change in change_info_contents['changes']:
-    project_path = change.get('projectPath') + '/'
-
-    for revision in change.get('revisions'):
-      for file_info in revision.get('fileInfos'):
-        changed_files.add(project_path + file_info.get('path'))
-
-  return changed_files
-
-
-def find_affected_modules(
-    test_mappings: dict[str, Any], changed_files: set[str]
-) -> set[str]:
-  modules = set()
-
-  # The test_mappings object returned by GetTestMappings is organized as
-  # follows:
-  # {
-  #   'test_mapping_file_path': {
-  #     'group_name' : [
-  #       'name': 'module_name',
-  #     ],
-  #   }
-  # }
-  for test_mapping in test_mappings.values():
-    for group in test_mapping.values():
-      for entry in group:
-        module_name = entry.get('name', None)
-
-        if not module_name:
-          continue
-
-        file_patterns = entry.get('file_patterns')
-        if not file_patterns:
-          modules.add(module_name)
-          continue
-
-        if matches_file_patterns(file_patterns, changed_files):
-          modules.add(module_name)
-          continue
-
-  return modules
-
-
-# TODO(lucafarsi): Share this logic with the original logic in
-# test_mapping_test_retriever.py
-def matches_file_patterns(
-    file_patterns: list[set], changed_files: set[str]
-) -> bool:
-  for changed_file in changed_files:
-    for pattern in file_patterns:
-      if re.search(pattern, changed_file):
-        return True
-
-  return False
-
-
-def zip_build_outputs(
-    modules_to_build: set[str], target_release: str
-):
-  src_top = os.environ.get('TOP', os.getcwd())
-
-  # Call dumpvars to get the necessary things.
-  # TODO(lucafarsi): Don't call soong_ui 4 times for this, --dumpvars-mode can
-  # do it but it requires parsing.
-  host_out_testcases = pathlib.Path(
-      get_soong_var('HOST_OUT_TESTCASES', target_release)
-  )
-  target_out_testcases = pathlib.Path(
-      get_soong_var('TARGET_OUT_TESTCASES', target_release)
-  )
-  product_out = pathlib.Path(get_soong_var('PRODUCT_OUT', target_release))
-  soong_host_out = pathlib.Path(get_soong_var('SOONG_HOST_OUT', target_release))
-  host_out = pathlib.Path(get_soong_var('HOST_OUT', target_release))
-  dist_dir = pathlib.Path(get_soong_var('DIST_DIR', target_release))
-
-  # Call the class to package the outputs.
-  # TODO(lucafarsi): Move this code into a replaceable class.
-  host_paths = []
-  target_paths = []
-  host_config_files = []
-  target_config_files = []
-  for module in modules_to_build:
-    host_path = os.path.join(host_out_testcases, module)
-    if os.path.exists(host_path):
-      host_paths.append(host_path)
-      collect_config_files(src_top, host_path, host_config_files)
-
-    target_path = os.path.join(target_out_testcases, module)
-    if os.path.exists(target_path):
-      target_paths.append(target_path)
-      collect_config_files(src_top, target_path, target_config_files)
-
-  zip_test_configs_zips(
-      dist_dir, host_out, product_out, host_config_files, target_config_files
-  )
-
-  zip_command = base_zip_command(host_out, dist_dir, 'general-tests.zip')
-
-  # Add host testcases.
-  zip_command.append('-C')
-  zip_command.append(os.path.join(src_top, soong_host_out))
-  zip_command.append('-P')
-  zip_command.append('host/')
-  for path in host_paths:
-    zip_command.append('-D')
-    zip_command.append(path)
-
-  # Add target testcases.
-  zip_command.append('-C')
-  zip_command.append(os.path.join(src_top, product_out))
-  zip_command.append('-P')
-  zip_command.append('target')
-  for path in target_paths:
-    zip_command.append('-D')
-    zip_command.append(path)
-
-  # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
-  # Add necessary tools. These are also hardcoded in general-tests.mk.
-  framework_path = os.path.join(soong_host_out, 'framework')
-
-  zip_command.append('-C')
-  zip_command.append(framework_path)
-  zip_command.append('-P')
-  zip_command.append('host/tools')
-  zip_command.append('-f')
-  zip_command.append(os.path.join(framework_path, 'cts-tradefed.jar'))
-  zip_command.append('-f')
-  zip_command.append(
-      os.path.join(framework_path, 'compatibility-host-util.jar')
-  )
-  zip_command.append('-f')
-  zip_command.append(os.path.join(framework_path, 'vts-tradefed.jar'))
-
-  run_command(zip_command, print_output=True)
-
-
-def collect_config_files(
-    src_top: pathlib.Path, root_dir: pathlib.Path, config_files: list[str]
-):
-  for root, dirs, files in os.walk(os.path.join(src_top, root_dir)):
-    for file in files:
-      if file.endswith('.config'):
-        config_files.append(os.path.join(root_dir, file))
-
-
-def base_zip_command(
-    host_out: pathlib.Path, dist_dir: pathlib.Path, name: str
-) -> list[str]:
-  return [
-      'time',
-      os.path.join(host_out, 'bin', 'soong_zip'),
-      '-d',
-      '-o',
-      os.path.join(dist_dir, name),
-  ]
-
-
-# generate general-tests_configs.zip which contains all of the .config files
-# that were built and general-tests_list.zip which contains a text file which
-# lists all of the .config files that are in general-tests_configs.zip.
-#
-# general-tests_comfigs.zip is organized as follows:
-# /
-#   host/
-#     testcases/
-#       test_1.config
-#       test_2.config
-#       ...
-#   target/
-#     testcases/
-#       test_1.config
-#       test_2.config
-#       ...
-#
-# So the process is we write out the paths to all the host config files into one
-# file and all the paths to the target config files in another. We also write
-# the paths to all the config files into a third file to use for
-# general-tests_list.zip.
-def zip_test_configs_zips(
-    dist_dir: pathlib.Path,
-    host_out: pathlib.Path,
-    product_out: pathlib.Path,
-    host_config_files: list[str],
-    target_config_files: list[str],
-):
-  with open(
-      os.path.join(host_out, 'host_general-tests_list'), 'w'
-  ) as host_list_file, open(
-      os.path.join(product_out, 'target_general-tests_list'), 'w'
-  ) as target_list_file, open(
-      os.path.join(host_out, 'general-tests_list'), 'w'
-  ) as list_file:
-
-    for config_file in host_config_files:
-      host_list_file.write(config_file + '\n')
-      list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n')
-
-    for config_file in target_config_files:
-      target_list_file.write(config_file + '\n')
-      list_file.write(
-          'target/' + os.path.relpath(config_file, product_out) + '\n'
-      )
-
-  tests_config_zip_command = base_zip_command(
-      host_out, dist_dir, 'general-tests_configs.zip'
-  )
-  tests_config_zip_command.append('-P')
-  tests_config_zip_command.append('host')
-  tests_config_zip_command.append('-C')
-  tests_config_zip_command.append(host_out)
-  tests_config_zip_command.append('-l')
-  tests_config_zip_command.append(
-      os.path.join(host_out, 'host_general-tests_list')
-  )
-  tests_config_zip_command.append('-P')
-  tests_config_zip_command.append('target')
-  tests_config_zip_command.append('-C')
-  tests_config_zip_command.append(product_out)
-  tests_config_zip_command.append('-l')
-  tests_config_zip_command.append(
-      os.path.join(product_out, 'target_general-tests_list')
-  )
-  run_command(tests_config_zip_command, print_output=True)
-
-  tests_list_zip_command = base_zip_command(
-      host_out, dist_dir, 'general-tests_list.zip'
-  )
-  tests_list_zip_command.append('-C')
-  tests_list_zip_command.append(host_out)
-  tests_list_zip_command.append('-f')
-  tests_list_zip_command.append(os.path.join(host_out, 'general-tests_list'))
-  run_command(tests_list_zip_command, print_output=True)
-
-
-def get_soong_var(var: str, target_release: str) -> str:
-  new_env = os.environ.copy()
-  new_env['TARGET_RELEASE'] = target_release
-
-  value = run_command(
-      ['./build/soong/soong_ui.bash', '--dumpvar-mode', '--abs', var],
-      env=new_env,
-  ).strip()
-  if not value:
-    raise RuntimeError('Necessary soong variable ' + var + ' not found.')
-
-  return value
+def run_command(args: list[str], stdout=None):
+  subprocess.run(args=args, check=True, stdout=stdout)
 
 
 def main(argv):
-  build_test_suites(argv)
+  sys.exit(build_test_suites(argv))
diff --git a/ci/build_test_suites_local_test.py b/ci/build_test_suites_local_test.py
new file mode 100644
index 0000000..78e52d3
--- /dev/null
+++ b/ci/build_test_suites_local_test.py
@@ -0,0 +1,123 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration tests for build_test_suites that require a local build env."""
+
+import os
+import pathlib
+import shutil
+import signal
+import subprocess
+import tempfile
+import time
+import ci_test_lib
+
+
+class BuildTestSuitesLocalTest(ci_test_lib.TestCase):
+
+  def setUp(self):
+    self.top_dir = pathlib.Path(os.environ['ANDROID_BUILD_TOP']).resolve()
+    self.executable = self.top_dir.joinpath('build/make/ci/build_test_suites')
+    self.process_session = ci_test_lib.TemporaryProcessSession(self)
+    self.temp_dir = ci_test_lib.TestTemporaryDirectory.create(self)
+
+  def build_subprocess_args(self, build_args: list[str]):
+    env = os.environ.copy()
+    env['TOP'] = str(self.top_dir)
+    env['OUT_DIR'] = self.temp_dir
+
+    args = ([self.executable] + build_args,)
+    kwargs = {
+        'cwd': self.top_dir,
+        'env': env,
+        'text': True,
+    }
+
+    return (args, kwargs)
+
+  def run_build(self, build_args: list[str]) -> subprocess.CompletedProcess:
+    args, kwargs = self.build_subprocess_args(build_args)
+
+    return subprocess.run(
+        *args,
+        **kwargs,
+        check=True,
+        capture_output=True,
+        timeout=5 * 60,
+    )
+
+  def assert_children_alive(self, children: list[int]):
+    for c in children:
+      self.assertTrue(ci_test_lib.process_alive(c))
+
+  def assert_children_dead(self, children: list[int]):
+    for c in children:
+      self.assertFalse(ci_test_lib.process_alive(c))
+
+  def test_fails_for_invalid_arg(self):
+    invalid_arg = '--invalid-arg'
+
+    with self.assertRaises(subprocess.CalledProcessError) as cm:
+      self.run_build([invalid_arg])
+
+    self.assertIn(invalid_arg, cm.exception.stderr)
+
+  def test_builds_successfully(self):
+    self.run_build(['nothing'])
+
+  def test_can_interrupt_build(self):
+    args, kwargs = self.build_subprocess_args(['general-tests'])
+    p = self.process_session.create(args, kwargs)
+
+    # TODO(lucafarsi): Replace this (and other instances) with a condition.
+    time.sleep(5)  # Wait for the build to get going.
+    self.assertIsNone(p.poll())  # Check that the process is still alive.
+    children = query_child_pids(p.pid)
+    self.assert_children_alive(children)
+
+    p.send_signal(signal.SIGINT)
+    p.wait()
+
+    time.sleep(5)  # Wait for things to die out.
+    self.assert_children_dead(children)
+
+  def test_can_kill_build_process_group(self):
+    args, kwargs = self.build_subprocess_args(['general-tests'])
+    p = self.process_session.create(args, kwargs)
+
+    time.sleep(5)  # Wait for the build to get going.
+    self.assertIsNone(p.poll())  # Check that the process is still alive.
+    children = query_child_pids(p.pid)
+    self.assert_children_alive(children)
+
+    os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+    p.wait()
+
+    time.sleep(5)  # Wait for things to die out.
+    self.assert_children_dead(children)
+
+
+# TODO(hzalek): Replace this with `psutils` once available in the tree.
+def query_child_pids(parent_pid: int) -> set[int]:
+  p = subprocess.run(
+      ['pgrep', '-P', str(parent_pid)],
+      check=True,
+      capture_output=True,
+      text=True,
+  )
+  return {int(pid) for pid in p.stdout.splitlines()}
+
+
+if __name__ == '__main__':
+  ci_test_lib.main()
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
new file mode 100644
index 0000000..08a79a3
--- /dev/null
+++ b/ci/build_test_suites_test.py
@@ -0,0 +1,254 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for build_test_suites.py"""
+
+from importlib import resources
+import multiprocessing
+import os
+import pathlib
+import shutil
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import textwrap
+import time
+from typing import Callable
+from unittest import mock
+import build_test_suites
+import ci_test_lib
+from pyfakefs import fake_filesystem_unittest
+
+
+class BuildTestSuitesTest(fake_filesystem_unittest.TestCase):
+
+  def setUp(self):
+    self.setUpPyfakefs()
+
+    os_environ_patcher = mock.patch.dict('os.environ', {})
+    self.addCleanup(os_environ_patcher.stop)
+    self.mock_os_environ = os_environ_patcher.start()
+
+    subprocess_run_patcher = mock.patch('subprocess.run')
+    self.addCleanup(subprocess_run_patcher.stop)
+    self.mock_subprocess_run = subprocess_run_patcher.start()
+
+    self._setup_working_build_env()
+
+  def test_missing_target_release_env_var_raises(self):
+    del os.environ['TARGET_RELEASE']
+
+    with self.assert_raises_word(build_test_suites.Error, 'TARGET_RELEASE'):
+      build_test_suites.main([])
+
+  def test_missing_target_product_env_var_raises(self):
+    del os.environ['TARGET_PRODUCT']
+
+    with self.assert_raises_word(build_test_suites.Error, 'TARGET_PRODUCT'):
+      build_test_suites.main([])
+
+  def test_missing_top_env_var_raises(self):
+    del os.environ['TOP']
+
+    with self.assert_raises_word(build_test_suites.Error, 'TOP'):
+      build_test_suites.main([])
+
+  def test_invalid_arg_raises(self):
+    invalid_args = ['--invalid_arg']
+
+    with self.assertRaisesRegex(SystemExit, '2'):
+      build_test_suites.main(invalid_args)
+
+  def test_build_failure_returns(self):
+    self.mock_subprocess_run.side_effect = subprocess.CalledProcessError(
+        42, None
+    )
+
+    with self.assertRaisesRegex(SystemExit, '42'):
+      build_test_suites.main([])
+
+  def test_build_success_returns(self):
+    with self.assertRaisesRegex(SystemExit, '0'):
+      build_test_suites.main([])
+
+  def assert_raises_word(self, cls, word):
+    return self.assertRaisesRegex(build_test_suites.Error, rf'\b{word}\b')
+
+  def _setup_working_build_env(self):
+    self.fake_top = pathlib.Path('/fake/top')
+    self.fake_top.mkdir(parents=True)
+
+    self.soong_ui_dir = self.fake_top.joinpath('build/soong')
+    self.soong_ui_dir.mkdir(parents=True, exist_ok=True)
+
+    self.soong_ui = self.soong_ui_dir.joinpath('soong_ui.bash')
+    self.soong_ui.touch()
+
+    self.mock_os_environ.update({
+        'TARGET_RELEASE': 'release',
+        'TARGET_PRODUCT': 'product',
+        'TOP': str(self.fake_top),
+    })
+
+    self.mock_subprocess_run.return_value = 0
+
+
+class RunCommandIntegrationTest(ci_test_lib.TestCase):
+
+  def setUp(self):
+    self.temp_dir = ci_test_lib.TestTemporaryDirectory.create(self)
+
+    # Copy the Python executable from 'non-code' resources and make it
+    # executable for use by tests that launch a subprocess. Note that we don't
+    # use Python's native `sys.executable` property since that is not set when
+    # running via the embedded launcher.
+    base_name = 'py3-cmd'
+    dest_file = self.temp_dir.joinpath(base_name)
+    with resources.as_file(
+        resources.files('testdata').joinpath(base_name)
+    ) as p:
+      shutil.copy(p, dest_file)
+    dest_file.chmod(dest_file.stat().st_mode | stat.S_IEXEC)
+    self.python_executable = dest_file
+
+    self._managed_processes = []
+
+  def tearDown(self):
+    self._terminate_managed_processes()
+
+  def test_raises_on_nonzero_exit(self):
+    with self.assertRaises(Exception):
+      build_test_suites.run_command([
+          self.python_executable,
+          '-c',
+          textwrap.dedent(f"""\
+              import sys
+              sys.exit(1)
+              """),
+      ])
+
+  def test_streams_stdout(self):
+
+    def run_slow_command(stdout_file, marker):
+      with open(stdout_file, 'w') as f:
+        build_test_suites.run_command(
+            [
+                self.python_executable,
+                '-c',
+                textwrap.dedent(f"""\
+                  import time
+
+                  print('{marker}', end='', flush=True)
+
+                  # Keep process alive until we check stdout.
+                  time.sleep(10)
+                  """),
+            ],
+            stdout=f,
+        )
+
+    marker = 'Spinach'
+    stdout_file = self.temp_dir.joinpath('stdout.txt')
+
+    p = self.start_process(target=run_slow_command, args=[stdout_file, marker])
+
+    self.assert_file_eventually_contains(stdout_file, marker)
+
+  def test_propagates_interruptions(self):
+
+    def run(pid_file):
+      build_test_suites.run_command([
+          self.python_executable,
+          '-c',
+          textwrap.dedent(f"""\
+              import os
+              import pathlib
+              import time
+
+              pathlib.Path('{pid_file}').write_text(str(os.getpid()))
+
+              # Keep the process alive for us to explicitly interrupt it.
+              time.sleep(10)
+              """),
+      ])
+
+    pid_file = self.temp_dir.joinpath('pid.txt')
+    p = self.start_process(target=run, args=[pid_file])
+    subprocess_pid = int(read_eventual_file_contents(pid_file))
+
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    self.assert_process_eventually_dies(p.pid)
+    self.assert_process_eventually_dies(subprocess_pid)
+
+  def start_process(self, *args, **kwargs) -> multiprocessing.Process:
+    p = multiprocessing.Process(*args, **kwargs)
+    self._managed_processes.append(p)
+    p.start()
+    return p
+
+  def assert_process_eventually_dies(self, pid: int):
+    try:
+      wait_until(lambda: not ci_test_lib.process_alive(pid))
+    except TimeoutError as e:
+      self.fail(f'Process {pid} did not die after a while: {e}')
+
+  def assert_file_eventually_contains(self, file: pathlib.Path, substring: str):
+    wait_until(lambda: file.is_file() and file.stat().st_size > 0)
+    self.assertIn(substring, read_file_contents(file))
+
+  def _terminate_managed_processes(self):
+    for p in self._managed_processes:
+      if not p.is_alive():
+        continue
+
+      # We terminate the process with `SIGINT` since using `terminate` or
+      # `SIGKILL` doesn't kill any grandchild processes and we don't have
+      # `psutil` available to easily query all children.
+      os.kill(p.pid, signal.SIGINT)
+
+
+def wait_until(
+    condition_function: Callable[[], bool],
+    timeout_secs: float = 3.0,
+    polling_interval_secs: float = 0.1,
+):
+  """Waits until a condition function returns True."""
+
+  start_time_secs = time.time()
+
+  while not condition_function():
+    if time.time() - start_time_secs > timeout_secs:
+      raise TimeoutError(
+          f'Condition not met within timeout: {timeout_secs} seconds'
+      )
+
+    time.sleep(polling_interval_secs)
+
+
+def read_file_contents(file: pathlib.Path) -> str:
+  with open(file, 'r') as f:
+    return f.read()
+
+
+def read_eventual_file_contents(file: pathlib.Path) -> str:
+  wait_until(lambda: file.is_file() and file.stat().st_size > 0)
+  return read_file_contents(file)
+
+
+if __name__ == '__main__':
+  ci_test_lib.main()
diff --git a/ci/ci_test_lib.py b/ci/ci_test_lib.py
new file mode 100644
index 0000000..2d70d3f
--- /dev/null
+++ b/ci/ci_test_lib.py
@@ -0,0 +1,86 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Testing utilities for tests in the CI package."""
+
+import logging
+import os
+import unittest
+import subprocess
+import pathlib
+import shutil
+import tempfile
+
+
+# Export the TestCase class to reduce the number of imports tests have to list.
+TestCase = unittest.TestCase
+
+
+def process_alive(pid):
+  """Check For the existence of a pid."""
+
+  try:
+    os.kill(pid, 0)
+  except OSError:
+    return False
+
+  return True
+
+
+class TemporaryProcessSession:
+
+  def __init__(self, test_case: TestCase):
+    self._created_processes = []
+    test_case.addCleanup(self.cleanup)
+
+  def create(self, args, kwargs):
+    p = subprocess.Popen(*args, **kwargs, start_new_session=True)
+    self._created_processes.append(p)
+    return p
+
+  def cleanup(self):
+    for p in self._created_processes:
+      if not process_alive(p.pid):
+        return
+      os.killpg(os.getpgid(p.pid), signal.SIGKILL)
+
+
+class TestTemporaryDirectory:
+
+  def __init__(self, delete: bool, ):
+    self._delete = delete
+
+  @classmethod
+  def create(cls, test_case: TestCase, delete: bool = True):
+    temp_dir = TestTemporaryDirectory(delete)
+    temp_dir._dir = pathlib.Path(tempfile.mkdtemp())
+    test_case.addCleanup(temp_dir.cleanup)
+    return temp_dir._dir
+
+  def get_dir(self):
+    return self._dir
+
+  def cleanup(self):
+    if not self._delete:
+      return
+    shutil.rmtree(self._dir, ignore_errors=True)
+
+
+def main():
+
+  # Disable logging since it breaks the TF Python test output parser.
+  # TODO(hzalek): Use TF's `test-output-file` option to re-enable logging.
+  logging.getLogger().disabled = True
+
+  unittest.main()
diff --git a/core/base_rules.mk b/core/base_rules.mk
index b8aa5fe..4c92814 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -694,6 +694,16 @@
 endif
 
 ###########################################################
+## SOONG INSTALL PAIRS
+###########################################################
+# Declare dependencies for LOCAL_SOONG_INSTALL_PAIRS in soong to the module it relies on.
+ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE))
+$(my_all_targets): \
+    $(foreach f, $(LOCAL_SOONG_INSTALL_PAIRS),\
+      $(word 2,$(subst :,$(space),$(f))))
+endif
+
+###########################################################
 ## Compatibility suite files.
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
diff --git a/core/config.mk b/core/config.mk
index daefa70..22ec292 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -420,9 +420,12 @@
 .KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
 
 # Boolean variable determining if AOSP relies on bionic's PAGE_SIZE macro.
-TARGET_NO_BIONIC_PAGE_SIZE_MACRO := false
 ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
   TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)
+else ifeq ($(call math_lt,$(VSR_VENDOR_API_LEVEL),35),true)
+  TARGET_NO_BIONIC_PAGE_SIZE_MACRO := false
+else
+  TARGET_NO_BIONIC_PAGE_SIZE_MACRO := true
 endif
 .KATI_READONLY := TARGET_NO_BIONIC_PAGE_SIZE_MACRO
 
diff --git a/core/product_config.mk b/core/product_config.mk
index d16c38d..4eeac95 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -630,6 +630,15 @@
 endif
 endef
 
+ifndef PRODUCT_VIRTUAL_AB_COW_VERSION
+  PRODUCT_VIRTUAL_AB_COW_VERSION := 2
+  ifdef PRODUCT_SHIPPING_API_LEVEL
+    ifeq (true,$(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),34))
+      PRODUCT_VIRTUAL_AB_COW_VERSION := 3
+    endif
+  endif
+endif
+
 # Copy and check the value of each PRODUCT_BUILD_*_IMAGE variable
 $(foreach image, \
     PVMFW \
diff --git a/envsetup.sh b/envsetup.sh
index fbe522d..ca75132 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1103,6 +1103,48 @@
     $ADB "${@}"
 }
 
+function run_tool_with_logging() {
+  # Run commands in a subshell for us to handle forced terminations with a trap
+  # handler.
+  (
+  local tool_tag="$1"
+  shift
+  local tool_binary="$1"
+  shift
+
+  # If logging is not enabled or the logger is not configured, run the original command and return.
+  if [[ "${ANDROID_ENABLE_TOOL_LOGGING}" != "true" ]] || [[ -z "${ANDROID_TOOL_LOGGER}" ]]; then
+     "${tool_binary}" "${@}"
+     return $?
+  fi
+
+  # Otherwise, run the original command and call the logger when done.
+  local start_time
+  start_time=$(date +%s.%N)
+  local logger=${ANDROID_TOOL_LOGGER}
+
+  # Install a trap to call the logger even when the process terminates abnormally.
+  # The logger is run in the background and its output suppressed to avoid
+  # interference with the user flow.
+  trap '
+  exit_code=$?;
+  # Remove the trap to prevent duplicate log.
+  trap - EXIT;
+  "${logger}" \
+    --tool_tag "${tool_tag}" \
+    --start_timestamp "${start_time}" \
+    --end_timestamp "$(date +%s.%N)" \
+    --tool_args \""${@}"\" \
+    --exit_code "${exit_code}" \
+    > /dev/null 2>&1 &
+  exit ${exit_code}
+  ' SIGINT SIGTERM SIGQUIT EXIT
+
+  # Run the original command.
+  "${tool_binary}" "${@}"
+  )
+}
+
 # simplified version of ps; output in the form
 # <pid> <procname>
 function qpid() {
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index d3514a5..d944615 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -72,3 +72,5 @@
 PRODUCT_DEVICE := generic_arm64
 PRODUCT_BRAND := Android
 PRODUCT_MODEL := AOSP on ARM64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 3040dd3..4344f50 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -74,3 +74,5 @@
 PRODUCT_DEVICE := generic_x86_64
 PRODUCT_BRAND := Android
 PRODUCT_MODEL := AOSP on x86_64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 884af4f..47d9db5 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -484,10 +484,8 @@
 PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
     frameworks/base/config/preloaded-classes:system/etc/preloaded-classes)
 
-# Note: it is acceptable to not have a dirty-image-objects file. In that case, the special bin
-#       for known dirty objects in the image will be empty.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
-    frameworks/base/config/dirty-image-objects:system/etc/dirty-image-objects)
+# Enable dirty image object binning to reduce dirty pages in the image.
+PRODUCT_PACKAGES += dirty-image-objects
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
 
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index fc6cc68..36897fe 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -8,11 +8,14 @@
 # Check if vndk list is changed
 droidcore: check-abi-dump-list
 
-check-vndk-abi-dump-list-timestamp := $(call intermediates-dir-for,PACKAGING,vndk)/check-abi-dump-list-timestamp
+check-abi-dump-list-timestamp := $(call intermediates-dir-for,PACKAGING,vndk)/check-abi-dump-list-timestamp
 
-ifneq ($(SKIP_ABI_CHECKS),true)
+# The ABI tool does not support sanitizer and coverage builds.
+ifeq (,$(filter true,$(SKIP_ABI_CHECKS) $(CLANG_COVERAGE)))
+ifeq (,$(SANITIZE_TARGET))
 check-abi-dump-list: $(check-abi-dump-list-timestamp)
 endif
+endif
 
 #####################################################################
 # ABI reference dumps.
@@ -34,6 +37,9 @@
 endef
 
 # Subsets of LSDUMP_PATHS.
+.PHONY: findlsdumps_APEX
+findlsdumps_APEX: $(LSDUMP_PATHS_FILE) $(call filter-abi-dump-paths,APEX,$(LSDUMP_PATHS))
+
 .PHONY: findlsdumps_LLNDK
 findlsdumps_LLNDK: $(LSDUMP_PATHS_FILE) $(call filter-abi-dump-paths,LLNDK,$(LSDUMP_PATHS))
 
@@ -48,7 +54,7 @@
 
 #####################################################################
 # Check that all ABI reference dumps have corresponding
-# NDK/VNDK/PLATFORM libraries.
+# APEX/LLNDK/PLATFORM libraries.
 
 # $(1): The directory containing ABI dumps.
 # Return a list of ABI dump paths ending with .so.lsdump.
@@ -60,9 +66,12 @@
 
 # $(1): A list of tags.
 # $(2): A list of tag:path.
-# Return the file names of the ABI dumps that match the tags.
+# Return the file names of the ABI dumps that match the tags, and replace the
+# file name extensions with .so.lsdump.
 define filter-abi-dump-names
-$(notdir $(call filter-abi-dump-paths,$(1),$(2)))
+$(patsubst %.so.llndk.lsdump,%.so.lsdump, \
+  $(patsubst %.so.apex.lsdump,%.so.lsdump, \
+    $(notdir $(call filter-abi-dump-paths,$(1),$(2)))))
 endef
 
 VNDK_ABI_DUMP_DIR := prebuilts/abi-dumps/vndk/$(RELEASE_BOARD_API_LEVEL)
@@ -77,21 +86,22 @@
 # Check for superfluous lsdump files. Since LSDUMP_PATHS only covers the
 # libraries that can be built from source in the current build, and prebuilts of
 # Mainline modules may be in use, we also allow the libs in STUB_LIBRARIES for
-# NDK and platform ABIs.
+# platform ABIs.
+# In addition, libRS is allowed because it's disabled for RISC-V.
 
 $(check-abi-dump-list-timestamp): PRIVATE_LSDUMP_PATHS := $(LSDUMP_PATHS)
 $(check-abi-dump-list-timestamp): PRIVATE_STUB_LIBRARIES := $(STUB_LIBRARIES)
 $(check-abi-dump-list-timestamp):
 	$(eval added_vndk_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-names,LLNDK,$(PRIVATE_LSDUMP_PATHS)), \
+	  $(call filter-abi-dump-names,LLNDK,$(PRIVATE_LSDUMP_PATHS)) libRS.so.lsdump, \
 	  $(notdir $(VNDK_ABI_DUMPS))))))
 	$(if $(added_vndk_abi_dumps), \
 	  echo -e "Found unexpected ABI reference dump files under $(VNDK_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(VNDK_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_vndk_abi_dumps)) ')' -delete\` to delete the dump files.")
 
 	# TODO(b/314010764): Remove LLNDK tag after PLATFORM_SDK_VERSION is upgraded to 35.
 	$(eval added_platform_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-names,LLNDK PLATFORM,$(PRIVATE_LSDUMP_PATHS)) \
-	  $(addsuffix .lsdump,$(PRIVATE_STUB_LIBRARIES)), \
+	  $(call filter-abi-dump-names,APEX LLNDK PLATFORM,$(PRIVATE_LSDUMP_PATHS)) \
+	  $(addsuffix .lsdump,$(PRIVATE_STUB_LIBRARIES)) libRS.so.lsdump, \
 	  $(notdir $(PLATFORM_ABI_DUMPS))))))
 	$(if $(added_platform_abi_dumps), \
 	  echo -e "Found unexpected ABI reference dump files under $(PLATFORM_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(PLATFORM_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_platform_abi_dumps)) ')' -delete\` to delete the dump files.")
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 38ba219..503c9b3 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -59,10 +59,6 @@
 PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
     frameworks/base/config/compiled-classes-phone:system/etc/compiled-classes)
 
-# Enable dirty image object binning to reduce dirty pages in the image.
-PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
-    frameworks/base/dirty-image-objects-phone:system/etc/dirty-image-objects)
-
 # On userdebug builds, collect more tombstones by default.
 ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
 PRODUCT_VENDOR_PROPERTIES += \
diff --git a/target/product/module_arm64.mk b/target/product/module_arm64.mk
index 2e8c8a7..634a03d 100644
--- a/target/product/module_arm64.mk
+++ b/target/product/module_arm64.mk
@@ -19,3 +19,5 @@
 
 PRODUCT_NAME := module_arm64
 PRODUCT_DEVICE := module_arm64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/module_arm64only.mk b/target/product/module_arm64only.mk
index c0769bf..822ac24 100644
--- a/target/product/module_arm64only.mk
+++ b/target/product/module_arm64only.mk
@@ -19,3 +19,5 @@
 
 PRODUCT_NAME := module_arm64only
 PRODUCT_DEVICE := module_arm64only
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/module_x86_64.mk b/target/product/module_x86_64.mk
index 20f443a..9bd0264 100644
--- a/target/product/module_x86_64.mk
+++ b/target/product/module_x86_64.mk
@@ -19,3 +19,5 @@
 
 PRODUCT_NAME := module_x86_64
 PRODUCT_DEVICE := module_x86_64
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/module_x86_64only.mk b/target/product/module_x86_64only.mk
index b0d72bf..056fb90 100644
--- a/target/product/module_x86_64only.mk
+++ b/target/product/module_x86_64only.mk
@@ -19,3 +19,5 @@
 
 PRODUCT_NAME := module_x86_64only
 PRODUCT_DEVICE := module_x86_64only
+
+PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO := true
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index af0f7a9..418aaa4 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -20,5 +20,3 @@
 #
 # All U+ launching devices should instead use vabc_features.mk.
 $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/vabc_features.mk)
-
-PRODUCT_VIRTUAL_AB_COW_VERSION ?= 2
diff --git a/tests/Android.bp b/tests/Android.bp
new file mode 100644
index 0000000..b2ff583
--- /dev/null
+++ b/tests/Android.bp
@@ -0,0 +1,40 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_test_host {
+    name: "run_tool_with_logging_test",
+    main: "run_tool_with_logging_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "run_tool_with_logging_test.py",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":envsetup_minimum.zip",
+    ],
+    test_suites: [
+        "general-tests",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
diff --git a/tests/run_tool_with_logging_test.py b/tests/run_tool_with_logging_test.py
new file mode 100644
index 0000000..1eb78f1
--- /dev/null
+++ b/tests/run_tool_with_logging_test.py
@@ -0,0 +1,336 @@
+# Copyright 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import dataclasses
+from importlib import resources
+import logging
+import os
+from pathlib import Path
+import re
+import signal
+import stat
+import subprocess
+import tempfile
+import textwrap
+import time
+import unittest
+import zipfile
+import sys
+
+EXII_RETURN_CODE = 0
+INTERRUPTED_RETURN_CODE = 130
+
+
+class RunToolWithLoggingTest(unittest.TestCase):
+
+  @classmethod
+  def setUpClass(cls):
+    super().setUpClass()
+    # Configure to print logging to stdout.
+    logging.basicConfig(filename=None, level=logging.DEBUG)
+    console = logging.StreamHandler(sys.stdout)
+    logging.getLogger('').addHandler(console)
+
+  def setUp(self):
+    super().setUp()
+    self.working_dir = tempfile.TemporaryDirectory()
+    # Run all the tests from working_dir which is our temp Android build top.
+    os.chdir(self.working_dir.name)
+    # Extract envsetup.zip which contains the envsetup.sh and other dependent
+    # scripts required to set up the build environments.
+    with resources.files("testdata").joinpath("envsetup.zip").open('rb') as p:
+      with zipfile.ZipFile(p, "r") as zip_f:
+        zip_f.extractall()
+
+  def tearDown(self):
+    self.working_dir.cleanup()
+    super().tearDown()
+
+  def test_does_not_log_when_logging_disabled(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=false
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_tool.assert_called_once_with_args("arg1 arg2")
+    test_logger.assert_not_called()
+
+  def test_does_not_log_when_logger_var_unset(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      unset ANDROID_ENABLE_TOOL_LOGGING
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_tool.assert_called_once_with_args("arg1 arg2")
+    test_logger.assert_not_called()
+
+  def test_does_not_log_when_logger_var_empty(self):
+    test_tool = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER=""
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_tool.assert_called_once_with_args("arg1 arg2")
+
+  def test_does_not_log_with_logger_unset(self):
+    test_tool = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      unset ANDROID_TOOL_LOGGER
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_tool.assert_called_once_with_args("arg1 arg2")
+
+  def test_log_success_with_logger_enabled(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_tool.assert_called_once_with_args("arg1 arg2")
+    expected_logger_args = (
+        "--tool_tag FAKE_TOOL --start_timestamp \d+\.\d+ --end_timestamp"
+        ' \d+\.\d+ --tool_args "arg1 arg2" --exit_code 0'
+    )
+    test_logger.assert_called_once_with_args(expected_logger_args)
+
+  def test_run_tool_output_is_same_with_and_without_logging(self):
+    test_tool = TestScript.create(self.working_dir, "echo 'tool called'")
+    test_logger = TestScript.create(self.working_dir)
+
+    run_tool_with_logging_stdout, run_tool_with_logging_stderr = (
+        self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+    )
+
+    run_tool_without_logging_stdout, run_tool_without_logging_stderr = (
+        self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      {test_tool.executable} arg1 arg2
+    """)
+    )
+
+    self.assertEqual(
+        run_tool_with_logging_stdout, run_tool_without_logging_stdout
+    )
+    self.assertEqual(
+        run_tool_with_logging_stderr, run_tool_without_logging_stderr
+    )
+
+  def test_logger_output_is_suppressed(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(self.working_dir, "echo 'logger called'")
+
+    run_tool_with_logging_output, _ = self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    self.assertNotIn("logger called", run_tool_with_logging_output)
+
+  def test_logger_error_is_suppressed(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(
+        self.working_dir, "echo 'logger failed' > /dev/stderr; exit 1"
+    )
+
+    _, err = self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    self.assertNotIn("logger failed", err)
+
+  def test_log_success_when_tool_interrupted(self):
+    test_tool = TestScript.create(self.working_dir, script_body="sleep 100")
+    test_logger = TestScript.create(self.working_dir)
+
+    process = self._run_script_in_build_env(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    pgid = os.getpgid(process.pid)
+    # Give sometime for the subprocess to start.
+    time.sleep(1)
+    # Kill the subprocess and any processes created in the same group.
+    os.killpg(pgid, signal.SIGINT)
+
+    returncode, _, _ = self._wait_for_process(process)
+    self.assertEqual(returncode, INTERRUPTED_RETURN_CODE)
+
+    expected_logger_args = (
+        "--tool_tag FAKE_TOOL --start_timestamp \d+\.\d+ --end_timestamp"
+        ' \d+\.\d+ --tool_args "arg1 arg2" --exit_code 130'
+    )
+    test_logger.assert_called_once_with_args(expected_logger_args)
+
+  def test_logger_can_be_toggled_on(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=false
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_logger.assert_called_with_times(1)
+
+  def test_logger_can_be_toggled_off(self):
+    test_tool = TestScript.create(self.working_dir)
+    test_logger = TestScript.create(self.working_dir)
+
+    self._run_script_and_wait(f"""
+      ANDROID_ENABLE_TOOL_LOGGING=true
+      ANDROID_TOOL_LOGGER="{test_logger.executable}"
+      ANDROID_ENABLE_TOOL_LOGGING=false
+      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+    """)
+
+    test_logger.assert_not_called()
+
+  def _create_build_env_script(self) -> str:
+    return f"""
+      source {Path(self.working_dir.name).joinpath("build/make/envsetup.sh")}
+    """
+
+  def _run_script_and_wait(self, test_script: str) -> tuple[str, str]:
+    process = self._run_script_in_build_env(test_script)
+    returncode, out, err = self._wait_for_process(process)
+    logging.debug("script stdout: %s", out)
+    logging.debug("script stderr: %s", err)
+    self.assertEqual(returncode, EXII_RETURN_CODE)
+    return out, err
+
+  def _run_script_in_build_env(self, test_script: str) -> subprocess.Popen:
+    setup_build_env_script = self._create_build_env_script()
+    return subprocess.Popen(
+        setup_build_env_script + test_script,
+        shell=True,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        text=True,
+        start_new_session=True,
+        executable='/bin/bash'
+        )
+
+  def _wait_for_process(
+      self, process: subprocess.Popen
+  ) -> tuple[int, str, str]:
+    pgid = os.getpgid(process.pid)
+    out, err = process.communicate()
+    # Wait for all process in the same group to complete since the logger runs
+    # as a separate detached process.
+    self._wait_for_process_group(pgid)
+    return (process.returncode, out, err)
+
+  def _wait_for_process_group(self, pgid: int, timeout: int = 5):
+    """Waits for all subprocesses within the process group to complete."""
+    start_time = time.time()
+    while True:
+      if time.time() - start_time > timeout:
+        raise TimeoutError(
+            f"Process group did not complete after {timeout} seconds"
+        )
+      for pid in os.listdir("/proc"):
+        if pid.isdigit():
+          try:
+            if os.getpgid(int(pid)) == pgid:
+              time.sleep(0.1)
+              break
+          except (FileNotFoundError, PermissionError, ProcessLookupError):
+            pass
+      else:
+        # All processes have completed.
+        break
+
+
+@dataclasses.dataclass
+class TestScript:
+  executable: Path
+  output_file: Path
+
+  def create(temp_dir: Path, script_body: str = ""):
+    with tempfile.NamedTemporaryFile(dir=temp_dir.name, delete=False) as f:
+      output_file = f.name
+
+    with tempfile.NamedTemporaryFile(dir=temp_dir.name, delete=False) as f:
+      executable = f.name
+      executable_contents = textwrap.dedent(f"""
+      #!/bin/bash
+
+      echo "${{@}}" >> {output_file}
+      {script_body}
+      """)
+      f.write(executable_contents.encode("utf-8"))
+
+    os.chmod(f.name, os.stat(f.name).st_mode | stat.S_IEXEC)
+
+    return TestScript(executable, output_file)
+
+  def assert_called_with_times(self, expected_call_times: int):
+    lines = self._read_contents_from_output_file()
+    assert len(lines) == expected_call_times, (
+        f"Expect to call {expected_call_times} times, but actually called"
+        f" {len(lines)} times."
+    )
+
+  def assert_called_with_args(self, expected_args: str):
+    lines = self._read_contents_from_output_file()
+    assert len(lines) > 0
+    assert re.search(expected_args, lines[0]), (
+        f"Expect to call with args {expected_args}, but actually called with"
+        f" args {lines[0]}."
+    )
+
+  def assert_not_called(self):
+    self.assert_called_with_times(0)
+
+  def assert_called_once_with_args(self, expected_args: str):
+    self.assert_called_with_times(1)
+    self.assert_called_with_args(expected_args)
+
+  def _read_contents_from_output_file(self) -> list[str]:
+    with open(self.output_file, "r") as f:
+      return f.readlines()
+
+
+if __name__ == "__main__":
+  unittest.main()
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
index 6bd0d06..bf5e1a9 100644
--- a/tools/aconfig/Cargo.toml
+++ b/tools/aconfig/Cargo.toml
@@ -2,6 +2,7 @@
 
 members = [
     "aconfig",
+    "aconfig_device_paths",
     "aconfig_protos",
     "aconfig_storage_file",
     "aconfig_storage_read_api",
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
new file mode 100644
index 0000000..21aa9a9
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -0,0 +1,38 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+rust_defaults {
+    name: "libaconfig_device_paths.defaults",
+    edition: "2021",
+    clippy_lints: "android",
+    lints: "android",
+    srcs: ["src/lib.rs"],
+    rustlibs: [
+        "libaconfig_protos",
+        "libanyhow",
+        "libprotobuf",
+        "libregex",
+    ],
+}
+
+rust_library {
+    name: "libaconfig_device_paths",
+    crate_name: "aconfig_device_paths",
+    host_supported: true,
+    defaults: ["libaconfig_device_paths.defaults"],
+}
diff --git a/tools/aconfig/aconfig_device_paths/Cargo.toml b/tools/aconfig/aconfig_device_paths/Cargo.toml
new file mode 100644
index 0000000..dbe9b3a
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "aconfig_device_paths"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.82"
diff --git a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
new file mode 100644
index 0000000..3d2deb2
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
@@ -0,0 +1,6 @@
+[
+    "/system/etc/aconfig_flags.pb",
+    "/system_ext/etc/aconfig_flags.pb",
+    "/product/etc/aconfig_flags.pb",
+    "/vendor/etc/aconfig_flags.pb",
+]
diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs
new file mode 100644
index 0000000..7bb62f4
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/src/lib.rs
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! Library for finding all aconfig on-device protobuf file paths.
+
+use anyhow::Result;
+use std::path::PathBuf;
+
+use std::fs;
+
+/// Determine all paths that contain an aconfig protobuf file.
+pub fn parsed_flags_proto_paths() -> Result<Vec<PathBuf>> {
+    let mut result: Vec<PathBuf> = include!("../partition_aconfig_flags_paths.txt")
+        .map(|s| PathBuf::from(s.to_string()))
+        .to_vec();
+    for dir in fs::read_dir("/apex")? {
+        let dir = dir?;
+
+        // Only scan the currently active version of each mainline module; skip the @version dirs.
+        if dir.file_name().as_encoded_bytes().iter().any(|&b| b == b'@') {
+            continue;
+        }
+
+        let mut path = PathBuf::from("/apex");
+        path.push(dir.path());
+        path.push("etc");
+        path.push("aconfig_flags.pb");
+        if path.exists() {
+            result.push(path);
+        }
+    }
+
+    Ok(result)
+}
diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp
index b590312..d60ba92 100644
--- a/tools/aconfig/aconfig_storage_file/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/Android.bp
@@ -56,7 +56,7 @@
     min_sdk_version: "29",
 }
 
-cc_library_static {
+cc_library {
     name: "libaconfig_storage_protos_cc",
     proto: {
         export_proto_headers: true,
diff --git a/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto b/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto
index e1c1c7f..7de43ca 100644
--- a/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto
+++ b/tools/aconfig/aconfig_storage_file/protos/aconfig_storage_metadata.proto
@@ -27,7 +27,8 @@
   optional string flag_map = 4;
   optional string flag_val = 5;
   optional string flag_info = 6;
-  optional int64 timestamp = 7;
+  optional string local_overrides = 7;
+  optional int64 timestamp = 8;
 }
 
 message storage_files {
diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
index 01785e1..d57ca64 100644
--- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
@@ -66,8 +66,31 @@
   return Error() << "Unable to find storage files for container " << container;
 }
 
+
+namespace private_internal_api {
+
+/// Get mutable mapped file implementation.
+Result<MutableMappedStorageFile> get_mutable_mapped_file_impl(
+    std::string const& pb_file,
+    std::string const& container,
+    StorageFileType file_type) {
+  if (file_type != StorageFileType::flag_val &&
+      file_type != StorageFileType::flag_info) {
+    return Error() << "Cannot create mutable mapped file for this file type";
+  }
+
+  auto file_result = find_storage_file(pb_file, container, file_type);
+  if (!file_result.ok()) {
+    return Error() << file_result.error();
+  }
+
+  return map_mutable_storage_file(*file_result);
+}
+
+} // namespace private internal api
+
 /// Map a storage file
-static Result<MutableMappedStorageFile> map_storage_file(std::string const& file) {
+Result<MutableMappedStorageFile> map_mutable_storage_file(std::string const& file) {
   struct stat file_stat;
   if (stat(file.c_str(), &file_stat) < 0) {
     return ErrnoError() << "stat failed";
@@ -97,28 +120,6 @@
   return mapped_file;
 }
 
-namespace private_internal_api {
-
-/// Get mutable mapped file implementation.
-Result<MutableMappedStorageFile> get_mutable_mapped_file_impl(
-    std::string const& pb_file,
-    std::string const& container,
-    StorageFileType file_type) {
-  if (file_type != StorageFileType::flag_val &&
-      file_type != StorageFileType::flag_info) {
-    return Error() << "Cannot create mutable mapped file for this file type";
-  }
-
-  auto file_result = find_storage_file(pb_file, container, file_type);
-  if (!file_result.ok()) {
-    return Error() << file_result.error();
-  }
-
-  return map_storage_file(*file_result);
-}
-
-} // namespace private internal api
-
 /// Get mutable mapped file
 Result<MutableMappedStorageFile> get_mutable_mapped_file(
     std::string const& container,
diff --git a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
index 8699b88..e9e4ebb 100644
--- a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
+++ b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
@@ -26,6 +26,10 @@
 
 } // namespace private_internal_api
 
+/// Map a storage file
+Result<MutableMappedStorageFile> map_mutable_storage_file(
+    std::string const& file);
+
 /// Get mapped writeable storage file
 Result<MutableMappedStorageFile> get_mutable_mapped_file(
     std::string const& container,
diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
index 8b7e459..7148d06 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
@@ -443,10 +443,12 @@
             .unwrap();
             for i in 0..8 {
                 set_flag_is_sticky(&mut file, FlagValueType::Boolean, i, true).unwrap();
-                let attribute = get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
                 assert!((attribute & (FlagInfoBit::IsSticky as u8)) != 0);
                 set_flag_is_sticky(&mut file, FlagValueType::Boolean, i, false).unwrap();
-                let attribute = get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
                 assert!((attribute & (FlagInfoBit::IsSticky as u8)) == 0);
             }
         }
@@ -485,10 +487,12 @@
             .unwrap();
             for i in 0..8 {
                 set_flag_has_override(&mut file, FlagValueType::Boolean, i, true).unwrap();
-                let attribute = get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
                 assert!((attribute & (FlagInfoBit::HasOverride as u8)) != 0);
                 set_flag_has_override(&mut file, FlagValueType::Boolean, i, false).unwrap();
-                let attribute = get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
+                let attribute =
+                    get_flag_attribute_at_offset(&flag_info_path, FlagValueType::Boolean, i);
                 assert!((attribute & (FlagInfoBit::HasOverride as u8)) == 0);
             }
         }
diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp
index 4920a6f..2a02379 100644
--- a/tools/aconfig/aflags/Android.bp
+++ b/tools/aconfig/aflags/Android.bp
@@ -9,6 +9,7 @@
     lints: "android",
     srcs: ["src/main.rs"],
     rustlibs: [
+        "libaconfig_device_paths",
         "libaconfig_protos",
         "libaconfig_storage_read_api",
         "libaconfig_storage_file",
diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml
index cce7f9d..eeae295 100644
--- a/tools/aconfig/aflags/Cargo.toml
+++ b/tools/aconfig/aflags/Cargo.toml
@@ -13,3 +13,4 @@
 aconfig_storage_file = { version = "0.1.0", path = "../aconfig_storage_file" }
 aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" }
 clap = {version = "4.5.2" }
+aconfig_device_paths = { version = "0.1.0", path = "../aconfig_device_paths" }
diff --git a/tools/aconfig/aflags/src/device_config_source.rs b/tools/aconfig/aflags/src/device_config_source.rs
index 089f33d..cf6ab28 100644
--- a/tools/aconfig/aflags/src/device_config_source.rs
+++ b/tools/aconfig/aflags/src/device_config_source.rs
@@ -14,78 +14,17 @@
  * limitations under the License.
  */
 
-use crate::{Flag, FlagPermission, FlagSource, FlagValue, ValuePickedFrom};
-use aconfig_protos::ProtoFlagPermission as ProtoPermission;
-use aconfig_protos::ProtoFlagState as ProtoState;
-use aconfig_protos::ProtoParsedFlag;
-use aconfig_protos::ProtoParsedFlags;
+use crate::load_protos;
+use crate::{Flag, FlagSource, FlagValue, ValuePickedFrom};
+
 use anyhow::{anyhow, bail, Result};
 use regex::Regex;
-use std::collections::BTreeMap;
 use std::collections::HashMap;
 use std::process::Command;
-use std::{fs, str};
+use std::str;
 
 pub struct DeviceConfigSource {}
 
-fn convert_parsed_flag(flag: &ProtoParsedFlag) -> Flag {
-    let namespace = flag.namespace().to_string();
-    let package = flag.package().to_string();
-    let name = flag.name().to_string();
-
-    let container = if flag.container().is_empty() {
-        "system".to_string()
-    } else {
-        flag.container().to_string()
-    };
-
-    let value = match flag.state() {
-        ProtoState::ENABLED => FlagValue::Enabled,
-        ProtoState::DISABLED => FlagValue::Disabled,
-    };
-
-    let permission = match flag.permission() {
-        ProtoPermission::READ_ONLY => FlagPermission::ReadOnly,
-        ProtoPermission::READ_WRITE => FlagPermission::ReadWrite,
-    };
-
-    Flag {
-        namespace,
-        package,
-        name,
-        container,
-        value,
-        staged_value: None,
-        permission,
-        value_picked_from: ValuePickedFrom::Default,
-    }
-}
-
-fn read_pb_files() -> Result<Vec<Flag>> {
-    let mut flags: BTreeMap<String, Flag> = BTreeMap::new();
-    for partition in ["system", "system_ext", "product", "vendor"] {
-        let path = format!("/{partition}/etc/aconfig_flags.pb");
-        let Ok(bytes) = fs::read(&path) else {
-            eprintln!("warning: failed to read {}", path);
-            continue;
-        };
-        let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?;
-        for flag in parsed_flags.parsed_flag {
-            let key = format!("{}.{}", flag.package(), flag.name());
-            let container = if flag.container().is_empty() {
-                "system".to_string()
-            } else {
-                flag.container().to_string()
-            };
-
-            if container.eq(partition) {
-                flags.insert(key, convert_parsed_flag(&flag));
-            }
-        }
-    }
-    Ok(flags.values().cloned().collect())
-}
-
 fn parse_device_config(raw: &str) -> Result<HashMap<String, FlagValue>> {
     let mut flags = HashMap::new();
     let regex = Regex::new(r"(?m)^([[[:alnum:]]_]+/[[[:alnum:]]_\.]+)=(true|false)$")?;
@@ -180,7 +119,7 @@
 
 impl FlagSource for DeviceConfigSource {
     fn list_flags() -> Result<Vec<Flag>> {
-        let pb_flags = read_pb_files()?;
+        let pb_flags = load_protos::load()?;
         let dc_flags = read_device_config_flags()?;
         let staged_flags = read_staged_flags()?;
 
diff --git a/tools/aconfig/aflags/src/load_protos.rs b/tools/aconfig/aflags/src/load_protos.rs
new file mode 100644
index 0000000..90d8599
--- /dev/null
+++ b/tools/aconfig/aflags/src/load_protos.rs
@@ -0,0 +1,62 @@
+use crate::{Flag, FlagPermission, FlagValue, ValuePickedFrom};
+use aconfig_protos::ProtoFlagPermission as ProtoPermission;
+use aconfig_protos::ProtoFlagState as ProtoState;
+use aconfig_protos::ProtoParsedFlag;
+use aconfig_protos::ProtoParsedFlags;
+use anyhow::Result;
+use std::fs;
+use std::path::Path;
+
+// TODO(b/329875578): use container field directly instead of inferring.
+fn infer_container(path: &Path) -> String {
+    let path_str = path.to_string_lossy();
+    path_str
+        .strip_prefix("/apex/")
+        .or_else(|| path_str.strip_prefix('/'))
+        .unwrap_or(&path_str)
+        .strip_suffix("/etc/aconfig_flags.pb")
+        .unwrap_or(&path_str)
+        .to_string()
+}
+
+fn convert_parsed_flag(path: &Path, flag: &ProtoParsedFlag) -> Flag {
+    let namespace = flag.namespace().to_string();
+    let package = flag.package().to_string();
+    let name = flag.name().to_string();
+
+    let value = match flag.state() {
+        ProtoState::ENABLED => FlagValue::Enabled,
+        ProtoState::DISABLED => FlagValue::Disabled,
+    };
+
+    let permission = match flag.permission() {
+        ProtoPermission::READ_ONLY => FlagPermission::ReadOnly,
+        ProtoPermission::READ_WRITE => FlagPermission::ReadWrite,
+    };
+
+    Flag {
+        namespace,
+        package,
+        name,
+        container: infer_container(path),
+        value,
+        staged_value: None,
+        permission,
+        value_picked_from: ValuePickedFrom::Default,
+    }
+}
+
+pub(crate) fn load() -> Result<Vec<Flag>> {
+    let mut result = Vec::new();
+
+    let paths = aconfig_device_paths::parsed_flags_proto_paths()?;
+    for path in paths {
+        let bytes = fs::read(path.clone())?;
+        let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?;
+        for flag in parsed_flags.parsed_flag {
+            // TODO(b/334954748): enforce one-container-per-flag invariant.
+            result.push(convert_parsed_flag(&path, &flag));
+        }
+    }
+    Ok(result)
+}
diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs
index 1c453c5..4ce0d35 100644
--- a/tools/aconfig/aflags/src/main.rs
+++ b/tools/aconfig/aflags/src/main.rs
@@ -25,6 +25,8 @@
 mod aconfig_storage_source;
 use aconfig_storage_source::AconfigStorageSource;
 
+mod load_protos;
+
 #[derive(Clone, PartialEq, Debug)]
 enum FlagPermission {
     ReadOnly,
diff --git a/tools/check-flagged-apis/Android.bp b/tools/check-flagged-apis/Android.bp
new file mode 100644
index 0000000..ebd79c1
--- /dev/null
+++ b/tools/check-flagged-apis/Android.bp
@@ -0,0 +1,51 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_team: "trendy_team_updatable_sdk_apis",
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_defaults {
+    name: "check-flagged-apis-defaults",
+    srcs: [
+        "src/com/android/checkflaggedapis/Main.kt",
+    ],
+    static_libs: [
+        "libaconfig_java_proto_lite",
+        "metalava-signature-reader",
+        "metalava-tools-common-m2-deps",
+    ],
+}
+
+java_binary_host {
+    name: "check-flagged-apis",
+    defaults: [
+        "check-flagged-apis-defaults",
+    ],
+    main_class: "com.android.checkflaggedapis.Main",
+}
+
+java_test_host {
+    name: "check-flagged-apis-test",
+    defaults: [
+        "check-flagged-apis-defaults",
+    ],
+    srcs: [
+        "src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt",
+    ],
+    static_libs: [
+        "tradefed",
+    ],
+}
diff --git a/tools/check-flagged-apis/OWNERS b/tools/check-flagged-apis/OWNERS
new file mode 100644
index 0000000..289e21e
--- /dev/null
+++ b/tools/check-flagged-apis/OWNERS
@@ -0,0 +1,4 @@
+amhk@google.com
+gurpreetgs@google.com
+michaelwr@google.com
+paulduffin@google.com
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
new file mode 100644
index 0000000..d2b75d4
--- /dev/null
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.checkflaggedapis
+
+import android.aconfig.Aconfig
+import com.android.tradefed.testtype.DeviceJUnit4ClassRunner
+import com.android.tradefed.testtype.junit4.BaseHostJUnit4Test
+import java.io.ByteArrayInputStream
+import java.io.ByteArrayOutputStream
+import java.io.InputStream
+import org.junit.Assert.assertEquals
+import org.junit.Test
+import org.junit.runner.RunWith
+
+private val API_SIGNATURE =
+    """
+      // Signature format: 2.0
+      package android {
+        public final class Clazz {
+          ctor public Clazz();
+          field @FlaggedApi("android.flag.foo") public static final int FOO = 1; // 0x1
+        }
+      }
+"""
+        .trim()
+
+private val API_VERSIONS =
+    """
+      <?xml version="1.0" encoding="utf-8"?>
+      <api version="3">
+        <class name="android/Clazz" since="1">
+          <method name="&lt;init>()V"/>
+          <field name="FOO"/>
+        </class>
+      </api>
+"""
+        .trim()
+
+private fun generateFlagsProto(fooState: Aconfig.flag_state): InputStream {
+  val parsed_flag =
+      Aconfig.parsed_flag
+          .newBuilder()
+          .setPackage("android.flag")
+          .setName("foo")
+          .setState(fooState)
+          .setPermission(Aconfig.flag_permission.READ_ONLY)
+          .build()
+  val parsed_flags = Aconfig.parsed_flags.newBuilder().addParsedFlag(parsed_flag).build()
+  val binaryProto = ByteArrayOutputStream()
+  parsed_flags.writeTo(binaryProto)
+  return ByteArrayInputStream(binaryProto.toByteArray())
+}
+
+@RunWith(DeviceJUnit4ClassRunner::class)
+class CheckFlaggedApisTest : BaseHostJUnit4Test() {
+  @Test
+  fun testParseApiSignature() {
+    val expected = setOf(Pair(Symbol("android.Clazz.FOO"), Flag("android.flag.foo")))
+    val actual = parseApiSignature("in-memory", API_SIGNATURE.byteInputStream())
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testParseFlagValues() {
+    val expected: Map<Flag, Boolean> = mapOf(Flag("android.flag.foo") to true)
+    val actual = parseFlagValues(generateFlagsProto(Aconfig.flag_state.ENABLED))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testParseApiVersions() {
+    val expected: Set<Symbol> = setOf(Symbol("android.Clazz.FOO"))
+    val actual = parseApiVersions(API_VERSIONS.byteInputStream())
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testFindErrorsNoErrors() {
+    val expected = setOf<ApiError>()
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(Aconfig.flag_state.ENABLED)),
+            parseApiVersions(API_VERSIONS.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+
+  @Test
+  fun testFindErrorsDisabledFlaggedApiIsPresent() {
+    val expected =
+        setOf<ApiError>(
+            DisabledFlaggedApiIsPresentError(Symbol("android.Clazz.FOO"), Flag("android.flag.foo")))
+    val actual =
+        findErrors(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(Aconfig.flag_state.DISABLED)),
+            parseApiVersions(API_VERSIONS.byteInputStream()))
+    assertEquals(expected, actual)
+  }
+}
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
new file mode 100644
index 0000000..84564ba
--- /dev/null
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
@@ -0,0 +1,251 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@file:JvmName("Main")
+
+package com.android.checkflaggedapis
+
+import android.aconfig.Aconfig
+import com.android.tools.metalava.model.BaseItemVisitor
+import com.android.tools.metalava.model.FieldItem
+import com.android.tools.metalava.model.text.ApiFile
+import com.github.ajalt.clikt.core.CliktCommand
+import com.github.ajalt.clikt.core.ProgramResult
+import com.github.ajalt.clikt.parameters.options.help
+import com.github.ajalt.clikt.parameters.options.option
+import com.github.ajalt.clikt.parameters.options.required
+import com.github.ajalt.clikt.parameters.types.path
+import java.io.InputStream
+import javax.xml.parsers.DocumentBuilderFactory
+import org.w3c.dom.Node
+
+/**
+ * Class representing the fully qualified name of a class, method or field.
+ *
+ * This tool reads a multitude of input formats all of which represents the fully qualified path to
+ * a Java symbol slightly differently. To keep things consistent, all parsed APIs are converted to
+ * Symbols.
+ *
+ * All parts of the fully qualified name of the Symbol are separated by a dot, e.g.:
+ * <pre>
+ *   package.class.inner-class.field
+ * </pre>
+ */
+@JvmInline
+internal value class Symbol(val name: String) {
+  companion object {
+    private val FORBIDDEN_CHARS = listOf('/', '#', '$')
+
+    /** Create a new Symbol from a String that may include delimiters other than dot. */
+    fun create(name: String): Symbol {
+      var sanitizedName = name
+      for (ch in FORBIDDEN_CHARS) {
+        sanitizedName = sanitizedName.replace(ch, '.')
+      }
+      return Symbol(sanitizedName)
+    }
+  }
+
+  init {
+    require(!name.isEmpty()) { "empty string" }
+    for (ch in FORBIDDEN_CHARS) {
+      require(!name.contains(ch)) { "$name: contains $ch" }
+    }
+  }
+
+  override fun toString(): String = name.toString()
+}
+
+/**
+ * Class representing the fully qualified name of an aconfig flag.
+ *
+ * This includes both the flag's package and name, separated by a dot, e.g.:
+ * <pre>
+ *   com.android.aconfig.test.disabled_ro
+ * <pre>
+ */
+@JvmInline
+internal value class Flag(val name: String) {
+  override fun toString(): String = name.toString()
+}
+
+internal sealed class ApiError {
+  abstract val symbol: Symbol
+  abstract val flag: Flag
+}
+
+internal data class EnabledFlaggedApiNotPresentError(
+    override val symbol: Symbol,
+    override val flag: Flag
+) : ApiError() {
+  override fun toString(): String {
+    return "error: enabled @FlaggedApi not present in built artifact: symbol=$symbol flag=$flag"
+  }
+}
+
+internal data class DisabledFlaggedApiIsPresentError(
+    override val symbol: Symbol,
+    override val flag: Flag
+) : ApiError() {
+  override fun toString(): String {
+    return "error: disabled @FlaggedApi is present in built artifact: symbol=$symbol flag=$flag"
+  }
+}
+
+internal data class UnknownFlagError(override val symbol: Symbol, override val flag: Flag) :
+    ApiError() {
+  override fun toString(): String {
+    return "error: unknown flag: symbol=$symbol flag=$flag"
+  }
+}
+
+class CheckCommand :
+    CliktCommand(
+        help =
+            """
+Check that all flagged APIs are used in the correct way.
+
+This tool reads the API signature file and checks that all flagged APIs are used in the correct way.
+
+The tool will exit with a non-zero exit code if any flagged APIs are found to be used in the incorrect way.
+""") {
+  private val apiSignaturePath by
+      option("--api-signature")
+          .help(
+              """
+              Path to API signature file.
+              Usually named *current.txt.
+              Tip: `m frameworks-base-api-current.txt` will generate a file that includes all platform and mainline APIs.
+              """)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val flagValuesPath by
+      option("--flag-values")
+          .help(
+              """
+            Path to aconfig parsed_flags binary proto file.
+            Tip: `m all_aconfig_declarations` will generate a file that includes all information about all flags.
+            """)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val apiVersionsPath by
+      option("--api-versions")
+          .help(
+              """
+            Path to API versions XML file.
+            Usually named xml-versions.xml.
+            Tip: `m sdk dist` will generate a file that includes all platform and mainline APIs.
+            """)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+
+  override fun run() {
+    val flaggedSymbols =
+        apiSignaturePath.toFile().inputStream().use {
+          parseApiSignature(apiSignaturePath.toString(), it)
+        }
+    val flags = flagValuesPath.toFile().inputStream().use { parseFlagValues(it) }
+    val exportedSymbols = apiVersionsPath.toFile().inputStream().use { parseApiVersions(it) }
+    val errors = findErrors(flaggedSymbols, flags, exportedSymbols)
+    for (e in errors) {
+      println(e)
+    }
+    throw ProgramResult(errors.size)
+  }
+}
+
+internal fun parseApiSignature(path: String, input: InputStream): Set<Pair<Symbol, Flag>> {
+  // TODO(334870672): add support for classes and metods
+  val output = mutableSetOf<Pair<Symbol, Flag>>()
+  val visitor =
+      object : BaseItemVisitor() {
+        override fun visitField(field: FieldItem) {
+          val flag =
+              field.modifiers
+                  .findAnnotation("android.annotation.FlaggedApi")
+                  ?.findAttribute("value")
+                  ?.value
+                  ?.value() as? String
+          if (flag != null) {
+            val symbol = Symbol.create(field.baselineElementId())
+            output.add(Pair(symbol, Flag(flag)))
+          }
+        }
+      }
+  val codebase = ApiFile.parseApi(path, input)
+  codebase.accept(visitor)
+  return output
+}
+
+internal fun parseFlagValues(input: InputStream): Map<Flag, Boolean> {
+  val parsedFlags = Aconfig.parsed_flags.parseFrom(input).getParsedFlagList()
+  return parsedFlags.associateBy(
+      { Flag("${it.getPackage()}.${it.getName()}") },
+      { it.getState() == Aconfig.flag_state.ENABLED })
+}
+
+internal fun parseApiVersions(input: InputStream): Set<Symbol> {
+  fun Node.getAttribute(name: String): String? = getAttributes()?.getNamedItem(name)?.getNodeValue()
+
+  val output = mutableSetOf<Symbol>()
+  val factory = DocumentBuilderFactory.newInstance()
+  val parser = factory.newDocumentBuilder()
+  val document = parser.parse(input)
+  val fields = document.getElementsByTagName("field")
+  // ktfmt doesn't understand the `..<` range syntax; explicitly call .rangeUntil instead
+  for (i in 0.rangeUntil(fields.getLength())) {
+    val field = fields.item(i)
+    val fieldName = field.getAttribute("name")
+    val className =
+        requireNotNull(field.getParentNode()) { "Bad XML: top level <field> element" }
+            .getAttribute("name")
+    output.add(Symbol.create("$className.$fieldName"))
+  }
+  return output
+}
+
+/**
+ * Find errors in the given data.
+ *
+ * @param flaggedSymbolsInSource the set of symbols that are flagged in the source code
+ * @param flags the set of flags and their values
+ * @param symbolsInOutput the set of symbols that are present in the output
+ * @return the set of errors found
+ */
+internal fun findErrors(
+    flaggedSymbolsInSource: Set<Pair<Symbol, Flag>>,
+    flags: Map<Flag, Boolean>,
+    symbolsInOutput: Set<Symbol>
+): Set<ApiError> {
+  val errors = mutableSetOf<ApiError>()
+  for ((symbol, flag) in flaggedSymbolsInSource) {
+    try {
+      if (flags.getValue(flag)) {
+        if (!symbolsInOutput.contains(symbol)) {
+          errors.add(EnabledFlaggedApiNotPresentError(symbol, flag))
+        }
+      } else {
+        if (symbolsInOutput.contains(symbol)) {
+          errors.add(DisabledFlaggedApiIsPresentError(symbol, flag))
+        }
+      }
+    } catch (e: NoSuchElementException) {
+      errors.add(UnknownFlagError(symbol, flag))
+    }
+  }
+  return errors
+}
+
+fun main(args: Array<String>) = CheckCommand().main(args)