Merge "Add new script for metadata build target" into main
diff --git a/CleanSpec.mk b/CleanSpec.mk
index f8c96ff..8c30883 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -787,6 +787,13 @@
 $(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/)
 $(call add-clean-step, find $(OUT_DIR) -type f -name "*.jar" -print0 | xargs -0 rm -f)
 
+# Remove obsolete dexpreopt_config artifacts
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/dexpreopt_config/dexpreopt.config)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/dexpreopt_config/dexpreopt_soong.config)
+
+# Clear out Soong .intermediates directory regarding removal of hashed subdir
+$(call add-clean-step, rm -rf $(OUT_DIR)/soong/.intermediates)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/ci/Android.bp b/ci/Android.bp
index 066b83f..6d4ac35 100644
--- a/ci/Android.bp
+++ b/ci/Android.bp
@@ -14,6 +14,7 @@
 
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_adte",
 }
 
 python_test_host {
@@ -70,10 +71,37 @@
     },
 }
 
+python_test_host {
+    name: "optimized_targets_test",
+    main: "optimized_targets_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "optimized_targets_test.py",
+    ],
+    libs: [
+        "build_test_suites",
+        "pyfakefs",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":py3-cmd",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
 python_library_host {
     name: "build_test_suites",
     srcs: [
         "build_test_suites.py",
+        "optimized_targets.py",
+        "test_mapping_module_retriever.py",
+        "build_context.py",
     ],
 }
 
diff --git a/ci/build_context.py b/ci/build_context.py
new file mode 100644
index 0000000..cc48d53
--- /dev/null
+++ b/ci/build_context.py
@@ -0,0 +1,64 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Container class for build context with utility functions."""
+
+import re
+
+
+class BuildContext:
+
+  def __init__(self, build_context_dict: dict[str, any]):
+    self.enabled_build_features = set()
+    for opt in build_context_dict.get('enabledBuildFeatures', []):
+      self.enabled_build_features.add(opt.get('name'))
+    self.test_infos = set()
+    for test_info_dict in build_context_dict.get('testContext', dict()).get(
+        'testInfos', []
+    ):
+      self.test_infos.add(self.TestInfo(test_info_dict))
+
+  def build_target_used(self, target: str) -> bool:
+    return any(test.build_target_used(target) for test in self.test_infos)
+
+  class TestInfo:
+
+    _DOWNLOAD_OPTS = {
+        'test-config-only-zip',
+        'test-zip-file-filter',
+        'extra-host-shared-lib-zip',
+        'sandbox-tests-zips',
+        'additional-files-filter',
+        'cts-package-name',
+    }
+
+    def __init__(self, test_info_dict: dict[str, any]):
+      self.is_test_mapping = False
+      self.test_mapping_test_groups = set()
+      self.file_download_options = set()
+      for opt in test_info_dict.get('extraOptions', []):
+        key = opt.get('key')
+        if key == 'test-mapping-test-group':
+          self.is_test_mapping = True
+          self.test_mapping_test_groups.update(opt.get('values', set()))
+
+        if key in self._DOWNLOAD_OPTS:
+          self.file_download_options.update(opt.get('values', set()))
+
+    def build_target_used(self, target: str) -> bool:
+      # For all of a targets' outputs, check if any of the regexes used by tests
+      # to download artifacts would match it. If any of them do then this target
+      # is necessary.
+      regex = r'\b(%s)\b' % re.escape(target)
+      return any(re.search(regex, opt) for opt in self.file_download_options)
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 29ed50e..402880c 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -15,11 +15,22 @@
 """Build script for the CI `test_suites` target."""
 
 import argparse
+from dataclasses import dataclass
+import json
 import logging
 import os
 import pathlib
+import re
 import subprocess
 import sys
+from typing import Callable
+from build_context import BuildContext
+import optimized_targets
+
+
+REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
+SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
+LOG_PATH = 'logs/build_test_suites.log'
 
 
 class Error(Exception):
@@ -35,16 +46,65 @@
     self.return_code = return_code
 
 
-REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
-SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
+class BuildPlanner:
+  """Class in charge of determining how to optimize build targets.
+
+  Given the build context and targets to build it will determine a final list of
+  targets to build along with getting a set of packaging functions to package up
+  any output zip files needed by the build.
+  """
+
+  def __init__(
+      self,
+      build_context: BuildContext,
+      args: argparse.Namespace,
+      target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget],
+  ):
+    self.build_context = build_context
+    self.args = args
+    self.target_optimizations = target_optimizations
+
+  def create_build_plan(self):
+
+    if 'optimized_build' not in self.build_context.enabled_build_features:
+      return BuildPlan(set(self.args.extra_targets), set())
+
+    build_targets = set()
+    packaging_functions = set()
+    for target in self.args.extra_targets:
+      if self._unused_target_exclusion_enabled(
+          target
+      ) and not self.build_context.build_target_used(target):
+        continue
+
+      target_optimizer_getter = self.target_optimizations.get(target, None)
+      if not target_optimizer_getter:
+        build_targets.add(target)
+        continue
+
+      target_optimizer = target_optimizer_getter(
+          target, self.build_context, self.args
+      )
+      build_targets.update(target_optimizer.get_build_targets())
+      packaging_functions.add(target_optimizer.package_outputs)
+
+    return BuildPlan(build_targets, packaging_functions)
+
+  def _unused_target_exclusion_enabled(self, target: str) -> bool:
+    return (
+        f'{target}_unused_exclusion'
+        in self.build_context.enabled_build_features
+    )
 
 
-def get_top() -> pathlib.Path:
-  return pathlib.Path(os.environ['TOP'])
+@dataclass(frozen=True)
+class BuildPlan:
+  build_targets: set[str]
+  packaging_functions: set[Callable[..., None]]
 
 
 def build_test_suites(argv: list[str]) -> int:
-  """Builds the general-tests and any other test suites passed in.
+  """Builds all test suites passed in, optimizing based on the build_context content.
 
   Args:
     argv: The command line arguments passed in.
@@ -54,9 +114,14 @@
   """
   args = parse_args(argv)
   check_required_env()
+  build_context = BuildContext(load_build_context())
+  build_planner = BuildPlanner(
+      build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS
+  )
+  build_plan = build_planner.create_build_plan()
 
   try:
-    build_everything(args)
+    execute_build_plan(build_plan)
   except BuildFailureError as e:
     logging.error('Build command failed! Check build_log for details.')
     return e.return_code
@@ -64,6 +129,16 @@
   return 0
 
 
+def parse_args(argv: list[str]) -> argparse.Namespace:
+  argparser = argparse.ArgumentParser()
+
+  argparser.add_argument(
+      'extra_targets', nargs='*', help='Extra test suites to build.'
+  )
+
+  return argparser.parse_args(argv)
+
+
 def check_required_env():
   """Check for required env vars.
 
@@ -79,43 +154,40 @@
   raise Error(f'Missing required environment variables: {t}')
 
 
-def parse_args(argv):
-  argparser = argparse.ArgumentParser()
+def load_build_context():
+  build_context_path = pathlib.Path(os.environ.get('BUILD_CONTEXT', ''))
+  if build_context_path.is_file():
+    try:
+      with open(build_context_path, 'r') as f:
+        return json.load(f)
+    except json.decoder.JSONDecodeError as e:
+      raise Error(f'Failed to load JSON file: {build_context_path}')
 
-  argparser.add_argument(
-      'extra_targets', nargs='*', help='Extra test suites to build.'
-  )
-
-  return argparser.parse_args(argv)
+  logging.info('No BUILD_CONTEXT found, skipping optimizations.')
+  return empty_build_context()
 
 
-def build_everything(args: argparse.Namespace):
-  """Builds all tests (regardless of whether they are needed).
+def empty_build_context():
+  return {'enabledBuildFeatures': []}
 
-  Args:
-    args: The parsed arguments.
 
-  Raises:
-    BuildFailure: If the build command fails.
-  """
-  build_command = base_build_command(args, args.extra_targets)
+def execute_build_plan(build_plan: BuildPlan):
+  build_command = []
+  build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH))
+  build_command.append('--make-mode')
+  build_command.extend(build_plan.build_targets)
 
   try:
     run_command(build_command)
   except subprocess.CalledProcessError as e:
     raise BuildFailureError(e.returncode) from e
 
+  for packaging_function in build_plan.packaging_functions:
+    packaging_function()
 
-def base_build_command(
-    args: argparse.Namespace, extra_targets: set[str]
-) -> list[str]:
 
-  build_command = []
-  build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH))
-  build_command.append('--make-mode')
-  build_command.extend(extra_targets)
-
-  return build_command
+def get_top() -> pathlib.Path:
+  return pathlib.Path(os.environ['TOP'])
 
 
 def run_command(args: list[str], stdout=None):
@@ -123,4 +195,12 @@
 
 
 def main(argv):
+  dist_dir = os.environ.get('DIST_DIR')
+  if dist_dir:
+    log_file = pathlib.Path(dist_dir) / LOG_PATH
+    logging.basicConfig(
+        level=logging.DEBUG,
+        format='%(asctime)s %(levelname)s %(message)s',
+        filename=log_file,
+    )
   sys.exit(build_test_suites(argv))
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
index 08a79a3..f3ff6f4 100644
--- a/ci/build_test_suites_test.py
+++ b/ci/build_test_suites_test.py
@@ -14,7 +14,10 @@
 
 """Tests for build_test_suites.py"""
 
+import argparse
+import functools
 from importlib import resources
+import json
 import multiprocessing
 import os
 import pathlib
@@ -27,9 +30,12 @@
 import textwrap
 import time
 from typing import Callable
+import unittest
 from unittest import mock
+from build_context import BuildContext
 import build_test_suites
 import ci_test_lib
+import optimized_targets
 from pyfakefs import fake_filesystem_unittest
 
 
@@ -80,12 +86,20 @@
     with self.assertRaisesRegex(SystemExit, '42'):
       build_test_suites.main([])
 
+  def test_incorrectly_formatted_build_context_raises(self):
+    build_context = self.fake_top.joinpath('build_context')
+    build_context.touch()
+    os.environ['BUILD_CONTEXT'] = str(build_context)
+
+    with self.assert_raises_word(build_test_suites.Error, 'JSON'):
+      build_test_suites.main([])
+
   def test_build_success_returns(self):
     with self.assertRaisesRegex(SystemExit, '0'):
       build_test_suites.main([])
 
   def assert_raises_word(self, cls, word):
-    return self.assertRaisesRegex(build_test_suites.Error, rf'\b{word}\b')
+    return self.assertRaisesRegex(cls, rf'\b{word}\b')
 
   def _setup_working_build_env(self):
     self.fake_top = pathlib.Path('/fake/top')
@@ -222,6 +236,262 @@
       os.kill(p.pid, signal.SIGINT)
 
 
+class BuildPlannerTest(unittest.TestCase):
+
+  class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget):
+
+    def __init__(
+        self, target, build_context, args, output_targets, packaging_outputs
+    ):
+      super().__init__(target, build_context, args)
+      self.output_targets = output_targets
+      self.packaging_outputs = packaging_outputs
+
+    def get_build_targets_impl(self):
+      return self.output_targets
+
+    def package_outputs_impl(self):
+      self.packaging_outputs.add(f'packaging {" ".join(self.output_targets)}')
+
+    def get_enabled_flag(self):
+      return f'{self.target}_enabled'
+
+  def test_build_optimization_off_builds_everything(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_context=self.create_build_context(optimized_build_enabled=False),
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_targets, build_plan.build_targets)
+
+  def test_build_optimization_off_doesnt_package(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_context=self.create_build_context(optimized_build_enabled=False),
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertEqual(len(build_plan.packaging_functions), 0)
+
+  def test_build_optimization_on_optimizes_target(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        build_context=self.create_build_context(
+            enabled_build_features=[{'name': self.get_target_flag('target_1')}]
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    expected_targets = {self.get_optimized_target_name('target_1'), 'target_2'}
+    self.assertSetEqual(expected_targets, build_plan.build_targets)
+
+  def test_build_optimization_on_packages_target(self):
+    build_targets = {'target_1', 'target_2'}
+    packaging_outputs = set()
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        build_context=self.create_build_context(
+            enabled_build_features=[{'name': self.get_target_flag('target_1')}]
+        ),
+        packaging_outputs=packaging_outputs,
+    )
+
+    build_plan = build_planner.create_build_plan()
+    self.run_packaging_functions(build_plan)
+
+    optimized_target_name = self.get_optimized_target_name('target_1')
+    self.assertIn(f'packaging {optimized_target_name}', packaging_outputs)
+
+  def test_individual_build_optimization_off_doesnt_optimize(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_targets, build_plan.build_targets)
+
+  def test_individual_build_optimization_off_doesnt_package(self):
+    build_targets = {'target_1', 'target_2'}
+    packaging_outputs = set()
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        packaging_outputs=packaging_outputs,
+    )
+
+    build_plan = build_planner.create_build_plan()
+    self.run_packaging_functions(build_plan)
+
+    self.assertFalse(packaging_outputs)
+
+  def test_target_output_used_target_built(self):
+    build_target = 'test_target'
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=self.get_test_context(build_target),
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, {build_target})
+
+  def test_target_regex_used_target_built(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': [f'.*{build_target}.*\.zip'],
+    }]
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, {build_target})
+
+  def test_target_output_not_used_target_not_built(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = []
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, set())
+
+  def test_target_regex_matching_not_too_broad(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': [f'.*a{build_target}.*\.zip'],
+    }]
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, set())
+
+  def create_build_planner(
+      self,
+      build_targets: set[str],
+      build_context: BuildContext = None,
+      args: argparse.Namespace = None,
+      target_optimizations: dict[
+          str, optimized_targets.OptimizedBuildTarget
+      ] = None,
+      packaging_outputs: set[str] = set(),
+  ) -> build_test_suites.BuildPlanner:
+    if not build_context:
+      build_context = self.create_build_context()
+    if not args:
+      args = self.create_args(extra_build_targets=build_targets)
+    if not target_optimizations:
+      target_optimizations = self.create_target_optimizations(
+          build_context,
+          build_targets,
+          packaging_outputs,
+      )
+    return build_test_suites.BuildPlanner(
+        build_context, args, target_optimizations
+    )
+
+  def create_build_context(
+      self,
+      optimized_build_enabled: bool = True,
+      enabled_build_features: list[dict[str, str]] = [],
+      test_context: dict[str, any] = {},
+  ) -> BuildContext:
+    build_context_dict = {}
+    build_context_dict['enabledBuildFeatures'] = enabled_build_features
+    if optimized_build_enabled:
+      build_context_dict['enabledBuildFeatures'].append(
+          {'name': 'optimized_build'}
+      )
+    build_context_dict['testContext'] = test_context
+    return BuildContext(build_context_dict)
+
+  def create_args(
+      self, extra_build_targets: set[str] = set()
+  ) -> argparse.Namespace:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('extra_targets', nargs='*')
+    return parser.parse_args(extra_build_targets)
+
+  def create_target_optimizations(
+      self,
+      build_context: BuildContext,
+      build_targets: set[str],
+      packaging_outputs: set[str] = set(),
+  ):
+    target_optimizations = dict()
+    for target in build_targets:
+      target_optimizations[target] = functools.partial(
+          self.TestOptimizedBuildTarget,
+          output_targets={self.get_optimized_target_name(target)},
+          packaging_outputs=packaging_outputs,
+      )
+
+    return target_optimizations
+
+  def get_target_flag(self, target: str):
+    return f'{target}_enabled'
+
+  def get_optimized_target_name(self, target: str):
+    return f'{target}_optimized'
+
+  def run_packaging_functions(self, build_plan: build_test_suites.BuildPlan):
+    for packaging_function in build_plan.packaging_functions:
+      packaging_function()
+
+  def get_test_context(self, target: str):
+    return {
+        'testInfos': [
+            {
+                'name': 'atp_test',
+                'target': 'test_target',
+                'branch': 'branch',
+                'extraOptions': [{
+                    'key': 'additional-files-filter',
+                    'values': [f'{target}.zip'],
+                }],
+                'command': '/tf/command',
+                'extraBuildTargets': [
+                    'extra_build_target',
+                ],
+            },
+        ],
+    }
+
+
 def wait_until(
     condition_function: Callable[[], bool],
     timeout_secs: float = 3.0,
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
new file mode 100644
index 0000000..fddde17
--- /dev/null
+++ b/ci/optimized_targets.py
@@ -0,0 +1,185 @@
+#
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABC
+import argparse
+import functools
+from build_context import BuildContext
+import json
+import logging
+import os
+from typing import Self
+
+import test_mapping_module_retriever
+
+
+class OptimizedBuildTarget(ABC):
+  """A representation of an optimized build target.
+
+  This class will determine what targets to build given a given build_cotext and
+  will have a packaging function to generate any necessary output zips for the
+  build.
+  """
+
+  def __init__(
+      self,
+      target: str,
+      build_context: BuildContext,
+      args: argparse.Namespace,
+  ):
+    self.target = target
+    self.build_context = build_context
+    self.args = args
+
+  def get_build_targets(self) -> set[str]:
+    features = self.build_context.enabled_build_features
+    if self.get_enabled_flag() in features:
+      self.modules_to_build = self.get_build_targets_impl()
+      return self.modules_to_build
+
+    self.modules_to_build = {self.target}
+    return {self.target}
+
+  def package_outputs(self):
+    features = self.build_context.enabled_build_features
+    if self.get_enabled_flag() in features:
+      return self.package_outputs_impl()
+
+  def package_outputs_impl(self):
+    raise NotImplementedError(
+        f'package_outputs_impl not implemented in {type(self).__name__}'
+    )
+
+  def get_enabled_flag(self):
+    raise NotImplementedError(
+        f'get_enabled_flag not implemented in {type(self).__name__}'
+    )
+
+  def get_build_targets_impl(self) -> set[str]:
+    raise NotImplementedError(
+        f'get_build_targets_impl not implemented in {type(self).__name__}'
+    )
+
+
+class NullOptimizer(OptimizedBuildTarget):
+  """No-op target optimizer.
+
+  This will simply build the same target it was given and do nothing for the
+  packaging step.
+  """
+
+  def __init__(self, target):
+    self.target = target
+
+  def get_build_targets(self):
+    return {self.target}
+
+  def package_outputs(self):
+    pass
+
+
+class ChangeInfo:
+
+  def __init__(self, change_info_file_path):
+    try:
+      with open(change_info_file_path) as change_info_file:
+        change_info_contents = json.load(change_info_file)
+    except json.decoder.JSONDecodeError:
+      logging.error(f'Failed to load CHANGE_INFO: {change_info_file_path}')
+      raise
+
+    self._change_info_contents = change_info_contents
+
+  def find_changed_files(self) -> set[str]:
+    changed_files = set()
+
+    for change in self._change_info_contents['changes']:
+      project_path = change.get('projectPath') + '/'
+
+      for revision in change.get('revisions'):
+        for file_info in revision.get('fileInfos'):
+          changed_files.add(project_path + file_info.get('path'))
+
+    return changed_files
+
+class GeneralTestsOptimizer(OptimizedBuildTarget):
+  """general-tests optimizer
+
+  TODO(b/358215235): Implement
+
+  This optimizer reads in the list of changed files from the file located in
+  env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to
+  determine what test mapping modules will run for the given changes. It then
+  builds those modules and packages them in the same way general-tests.zip is
+  normally built.
+  """
+
+  # List of modules that are always required to be in general-tests.zip.
+  _REQUIRED_MODULES = frozenset(
+      ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util']
+  )
+
+  def get_build_targets_impl(self) -> set[str]:
+    change_info_file_path = os.environ.get('CHANGE_INFO')
+    if not change_info_file_path:
+      logging.info(
+          'No CHANGE_INFO env var found, general-tests optimization disabled.'
+      )
+      return {'general-tests'}
+
+    test_infos = self.build_context.test_infos
+    test_mapping_test_groups = set()
+    for test_info in test_infos:
+      is_test_mapping = test_info.is_test_mapping
+      current_test_mapping_test_groups = test_info.test_mapping_test_groups
+      uses_general_tests = test_info.build_target_used('general-tests')
+
+      if uses_general_tests and not is_test_mapping:
+        logging.info(
+            'Test uses general-tests.zip but is not test-mapping, general-tests'
+            ' optimization disabled.'
+        )
+        return {'general-tests'}
+
+      if is_test_mapping:
+        test_mapping_test_groups.update(current_test_mapping_test_groups)
+
+    change_info = ChangeInfo(change_info_file_path)
+    changed_files = change_info.find_changed_files()
+
+    test_mappings = test_mapping_module_retriever.GetTestMappings(
+        changed_files, set()
+    )
+
+    modules_to_build = set(self._REQUIRED_MODULES)
+
+    modules_to_build.update(
+        test_mapping_module_retriever.FindAffectedModules(
+            test_mappings, changed_files, test_mapping_test_groups
+        )
+    )
+
+    return modules_to_build
+
+  def get_enabled_flag(self):
+    return 'general_tests_optimized'
+
+  @classmethod
+  def get_optimized_targets(cls) -> dict[str, OptimizedBuildTarget]:
+    return {'general-tests': functools.partial(cls)}
+
+
+OPTIMIZED_BUILD_TARGETS = {}
+OPTIMIZED_BUILD_TARGETS.update(GeneralTestsOptimizer.get_optimized_targets())
diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py
new file mode 100644
index 0000000..919c193
--- /dev/null
+++ b/ci/optimized_targets_test.py
@@ -0,0 +1,206 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for optimized_targets.py"""
+
+import json
+import logging
+import os
+import pathlib
+import re
+import unittest
+from unittest import mock
+import optimized_targets
+from build_context import BuildContext
+from pyfakefs import fake_filesystem_unittest
+
+
+class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
+
+  def setUp(self):
+    self.setUpPyfakefs()
+
+    os_environ_patcher = mock.patch.dict('os.environ', {})
+    self.addCleanup(os_environ_patcher.stop)
+    self.mock_os_environ = os_environ_patcher.start()
+
+    self._setup_working_build_env()
+    self._write_change_info_file()
+    test_mapping_dir = pathlib.Path('/project/path/file/path')
+    test_mapping_dir.mkdir(parents=True)
+    self._write_test_mapping_file()
+
+  def _setup_working_build_env(self):
+    self.change_info_file = pathlib.Path('/tmp/change_info')
+
+    self.mock_os_environ.update({
+        'CHANGE_INFO': str(self.change_info_file),
+    })
+
+  def test_general_tests_optimized(self):
+    optimizer = self._create_general_tests_optimizer()
+
+    build_targets = optimizer.get_build_targets()
+
+    expected_build_targets = set(
+        optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES
+    )
+    expected_build_targets.add('test_mapping_module')
+
+    self.assertSetEqual(build_targets, expected_build_targets)
+
+  def test_no_change_info_no_optimization(self):
+    del os.environ['CHANGE_INFO']
+
+    optimizer = self._create_general_tests_optimizer()
+
+    build_targets = optimizer.get_build_targets()
+
+    self.assertSetEqual(build_targets, {'general-tests'})
+
+  def test_mapping_groups_unused_module_not_built(self):
+    test_context = self._create_test_context()
+    test_context['testInfos'][0]['extraOptions'] = [
+        {
+            'key': 'additional-files-filter',
+            'values': ['general-tests.zip'],
+        },
+        {
+            'key': 'test-mapping-test-group',
+            'values': ['unused-test-mapping-group'],
+        },
+    ]
+    optimizer = self._create_general_tests_optimizer(
+        build_context=self._create_build_context(test_context=test_context)
+    )
+
+    build_targets = optimizer.get_build_targets()
+
+    expected_build_targets = set(
+        optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES
+    )
+    self.assertSetEqual(build_targets, expected_build_targets)
+
+  def test_general_tests_used_by_non_test_mapping_test_no_optimization(self):
+    test_context = self._create_test_context()
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': ['general-tests.zip'],
+    }]
+    optimizer = self._create_general_tests_optimizer(
+        build_context=self._create_build_context(test_context=test_context)
+    )
+
+    build_targets = optimizer.get_build_targets()
+
+    self.assertSetEqual(build_targets, {'general-tests'})
+
+  def test_malformed_change_info_raises(self):
+    with open(self.change_info_file, 'w') as f:
+      f.write('not change info')
+
+    optimizer = self._create_general_tests_optimizer()
+
+    with self.assertRaises(json.decoder.JSONDecodeError):
+      build_targets = optimizer.get_build_targets()
+
+  def test_malformed_test_mapping_raises(self):
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      f.write('not test mapping')
+
+    optimizer = self._create_general_tests_optimizer()
+
+    with self.assertRaises(json.decoder.JSONDecodeError):
+      build_targets = optimizer.get_build_targets()
+
+  def _write_change_info_file(self):
+    change_info_contents = {
+        'changes': [{
+            'projectPath': '/project/path',
+            'revisions': [{
+                'fileInfos': [{
+                    'path': 'file/path/file_name',
+                }],
+            }],
+        }]
+    }
+
+    with open(self.change_info_file, 'w') as f:
+      json.dump(change_info_contents, f)
+
+  def _write_test_mapping_file(self):
+    test_mapping_contents = {
+        'test-mapping-group': [
+            {
+                'name': 'test_mapping_module',
+            },
+        ],
+    }
+
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      json.dump(test_mapping_contents, f)
+
+  def _create_general_tests_optimizer(
+      self, build_context: BuildContext = None
+  ):
+    if not build_context:
+      build_context = self._create_build_context()
+    return optimized_targets.GeneralTestsOptimizer(
+        'general-tests', build_context, None
+    )
+
+  def _create_build_context(
+      self,
+      general_tests_optimized: bool = True,
+      test_context: dict[str, any] = None,
+  ) -> BuildContext:
+    if not test_context:
+      test_context = self._create_test_context()
+    build_context_dict = {}
+    build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}]
+    if general_tests_optimized:
+      build_context_dict['enabledBuildFeatures'].append({'name': 'general_tests_optimized'})
+    build_context_dict['testContext'] = test_context
+    return BuildContext(build_context_dict)
+
+  def _create_test_context(self):
+    return {
+        'testInfos': [
+            {
+                'name': 'atp_test',
+                'target': 'test_target',
+                'branch': 'branch',
+                'extraOptions': [
+                    {
+                        'key': 'additional-files-filter',
+                        'values': ['general-tests.zip'],
+                    },
+                    {
+                        'key': 'test-mapping-test-group',
+                        'values': ['test-mapping-group'],
+                    },
+                ],
+                'command': '/tf/command',
+                'extraBuildTargets': [
+                    'extra_build_target',
+                ],
+            },
+        ],
+    }
+
+
+if __name__ == '__main__':
+  # Setup logging to be silent so unit tests can pass through TF.
+  logging.disable(logging.ERROR)
+  unittest.main()
diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py
index d2c13c0..c93cdd5 100644
--- a/ci/test_mapping_module_retriever.py
+++ b/ci/test_mapping_module_retriever.py
@@ -17,11 +17,13 @@
 modules are needed to build for the given list of changed files.
 TODO(lucafarsi): Deduplicate from artifact_helper.py
 """
+# TODO(lucafarsi): Share this logic with the original logic in
+# test_mapping_test_retriever.py
 
-from typing import Any, Dict, Set, Text
 import json
 import os
 import re
+from typing import Any
 
 # Regex to extra test name from the path of test config file.
 TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config'
@@ -39,7 +41,7 @@
 _COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?')
 
 
-def FilterComments(test_mapping_file: Text) -> Text:
+def FilterComments(test_mapping_file: str) -> str:
   """Remove comments in TEST_MAPPING file to valid format.
 
   Only '//' is regarded as comments.
@@ -52,8 +54,8 @@
   """
   return re.sub(_COMMENTS_RE, r'\1', test_mapping_file)
 
-def GetTestMappings(paths: Set[Text],
-                    checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]:
+def GetTestMappings(paths: set[str],
+                    checked_paths: set[str]) -> dict[str, dict[str, Any]]:
   """Get the affected TEST_MAPPING files.
 
   TEST_MAPPING files in source code are packaged into a build artifact
@@ -123,3 +125,68 @@
       pass
 
   return test_mappings
+
+
+def FindAffectedModules(
+    test_mappings: dict[str, Any],
+    changed_files: set[str],
+    test_mapping_test_groups: set[str],
+) -> set[str]:
+  """Find affected test modules.
+
+  Find the affected set of test modules that would run in a test mapping run based on the given test mappings, changed files, and test mapping test group.
+
+  Args:
+    test_mappings: A set of test mappings returned by GetTestMappings in the following format:
+      {
+        'test_mapping_file_path': {
+          'group_name' : [
+            'name': 'module_name',
+          ],
+        }
+      }
+    changed_files: A set of files changed for the given run.
+    test_mapping_test_groups: A set of test mapping test groups that are being considered for the given run.
+
+  Returns:
+    A set of test module names which would run for a test mapping test run with the given parameters.
+  """
+
+  modules = set()
+
+  for test_mapping in test_mappings.values():
+    for group_name, group in test_mapping.items():
+      # If a module is not in any of the test mapping groups being tested skip
+      # it.
+      if group_name not in test_mapping_test_groups:
+        continue
+
+      for entry in group:
+        module_name = entry.get('name')
+
+        if not module_name:
+          continue
+
+        file_patterns = entry.get('file_patterns')
+        if not file_patterns:
+          modules.add(module_name)
+          continue
+
+        if matches_file_patterns(file_patterns, changed_files):
+          modules.add(module_name)
+
+  return modules
+
+def MatchesFilePatterns(
+    file_patterns: list[set], changed_files: set[str]
+) -> bool:
+  """Checks if any of the changed files match any of the file patterns.
+
+  Args:
+    file_patterns: A list of file patterns to match against.
+    changed_files: A set of files to check against the file patterns.
+
+  Returns:
+    True if any of the changed files match any of the file patterns.
+  """
+  return any(re.search(pattern, "|".join(changed_files)) for pattern in file_patterns)
diff --git a/cogsetup.sh b/cogsetup.sh
index ef1485d..5c64a06 100644
--- a/cogsetup.sh
+++ b/cogsetup.sh
@@ -57,7 +57,7 @@
   fi
   function repo {
     if [[ "${PWD}" == /google/cog/* ]]; then
-      echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
+      echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
       return 1
     fi
     ${ORIG_REPO_PATH} "$@"
diff --git a/common/math.mk b/common/math.mk
index ecee474..829ceb5 100644
--- a/common/math.mk
+++ b/common/math.mk
@@ -315,8 +315,9 @@
 $(call math-expect,(call numbers_greater_or_equal_to,0,0 2 1 3),0 2 1 3)
 $(call math-expect,(call numbers_greater_or_equal_to,1,0 2 1 3 2),2 1 3 2)
 
-_INT_LIMIT_WORDS := $(foreach a,x x,$(foreach b,x x x x x x x x x x x x x x x x,\
-  $(foreach c,x x x x x x x x x x x x x x x x,x x x x x x x x x x x x x x x x)))
+# 10,001 = 10 ** 4 + 1, contains 10,001 x's, so 1 more than 10,000 (future) API level
+_INT_LIMIT_WORDS := x $(foreach a,0 1 2 3 4 5 6 7 8 9,$(foreach b,0 1 2 3 4 5 6 7 8 9,\
+  $(foreach c,0 1 2 3 4 5 6 7 8 9,x x x x x x x x x x)))
 
 define _int_encode
 $(if $(filter $(words x $(_INT_LIMIT_WORDS)),$(words $(wordlist 1,$(1),x $(_INT_LIMIT_WORDS)))),\
diff --git a/core/Makefile b/core/Makefile
index a215d31..bf2d48a 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -717,7 +717,7 @@
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
-  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT)),system,modules.load,,$(kmd))) \
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,$(GENERIC_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd)))))
@@ -1052,6 +1052,35 @@
 endif
 endif
 
+
+# -----------------------------------------------------------------
+# dtbo image
+ifdef BOARD_PREBUILT_DTBOIMAGE
+INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+	chmod +w $@
+	$(AVBTOOL) add_hash_footer \
+	    --image $@ \
+	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
+	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
+	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+
+$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_TARGET),$(BOARD_PREBUILT_DTBOIMAGE),$(PRODUCT_OUT)/:/)
+
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DTBOIMAGE_TARGET)
+else
+$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
+	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+endif
+
+endif # BOARD_PREBUILT_DTBOIMAGE
+
+# -----------------------------------------------------------------
+
 # -----------------------------------------------------------------
 # the ramdisk
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_RAMDISK_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
@@ -1075,6 +1104,16 @@
 
 BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
 
+ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
+# -l enables the legacy format used by the Linux kernel
+COMPRESSION_COMMAND_DEPS := $(LZ4)
+COMPRESSION_COMMAND := $(LZ4) -l -12 --favor-decSpeed
+RAMDISK_EXT := .lz4
+else
+COMPRESSION_COMMAND_DEPS := $(GZIP)
+COMPRESSION_COMMAND := $(GZIP)
+RAMDISK_EXT := .gz
+endif
 
 ifneq ($(BOARD_KERNEL_MODULES_16K),)
 
@@ -1153,6 +1192,33 @@
 
 endif
 
+# -----------------------------------------------------------------
+# 16KB dtbo image
+ifdef BOARD_PREBUILT_DTBOIMAGE_16KB
+INSTALLED_DTBOIMAGE_16KB_TARGET := $(PRODUCT_OUT)/dtbo_16k.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_DTBOIMAGE_16KB_TARGET): $(BOARD_PREBUILT_DTBOIMAGE_16KB) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
+	cp $(BOARD_PREBUILT_DTBOIMAGE_16KB) $@
+	chmod +w $@
+	$(AVBTOOL) add_hash_footer \
+	    --image $@ \
+	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
+	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
+	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
+
+$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_16KB_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_16KB_TARGET),$(BOARD_PREBUILT_DTBOIMAGE_16KB),$(PRODUCT_OUT)/:/)
+
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_DTBOIMAGE_16KB_TARGET)
+else
+$(INSTALLED_DTBOIMAGE_16KB_TARGET): $(BOARD_PREBUILT_DTBOIMAGE_16KB)
+	cp $(BOARD_PREBUILT_DTBOIMAGE_16KB) $@
+endif
+
+endif # BOARD_PREBUILT_DTBOIMAGE_16KB
+
+
 ifneq ($(BOARD_KERNEL_PATH_16K),)
 BUILT_KERNEL_16K_TARGET := $(PRODUCT_OUT)/kernel_16k
 
@@ -1175,36 +1241,34 @@
 .PHONY: bootimage_16k
 
 BUILT_BOOT_OTA_PACKAGE_16K := $(PRODUCT_OUT)/boot_ota_16k.zip
-$(BUILT_BOOT_OTA_PACKAGE_16K): $(OTA_FROM_RAW_IMG) $(BUILT_BOOTIMAGE_16K_TARGET) $(INSTALLED_BOOTIMAGE_TARGET) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8
+$(BUILT_BOOT_OTA_PACKAGE_16K):  $(OTA_FROM_RAW_IMG) \
+                                $(BUILT_BOOTIMAGE_16K_TARGET) \
+                                $(INSTALLED_BOOTIMAGE_TARGET) \
+                                $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8 \
+                                $(INSTALLED_DTBOIMAGE_16KB_TARGET) \
+                                $(INSTALLED_DTBOIMAGE_TARGET)
 	$(OTA_FROM_RAW_IMG) --package_key $(DEFAULT_SYSTEM_DEV_CERTIFICATE) \
                       --max_timestamp `cat $(BUILD_DATETIME_FILE)` \
                       --path $(HOST_OUT) \
-                      --partition_name boot \
+                      --partition_name $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),\
+                          $(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        boot$(comma)dtbo,\
+                        boot) \
                       --output $@ \
                       $(if $(BOARD_16K_OTA_USE_INCREMENTAL),\
                         $(INSTALLED_BOOTIMAGE_TARGET):$(BUILT_BOOTIMAGE_16K_TARGET),\
                         $(BUILT_BOOTIMAGE_16K_TARGET)\
-                      )
+                      )\
+                      $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),$(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        $(INSTALLED_DTBOIMAGE_16KB_TARGET))
 
 boototapackage_16k: $(BUILT_BOOT_OTA_PACKAGE_16K)
 .PHONY: boototapackage_16k
 
 endif
 
-
-ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
-# -l enables the legacy format used by the Linux kernel
-COMPRESSION_COMMAND_DEPS := $(LZ4)
-COMPRESSION_COMMAND := $(LZ4) -l -12 --favor-decSpeed
-RAMDISK_EXT := .lz4
-else
-COMPRESSION_COMMAND_DEPS := $(GZIP)
-COMPRESSION_COMMAND := $(GZIP)
-RAMDISK_EXT := .gz
-endif
-
+# The value of RAMDISK_NODE_LIST is defined in system/core/rootdir/Android.bp.
 # This file contains /dev nodes description added to the generic ramdisk
-RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list
 
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
@@ -1375,30 +1439,7 @@
 	@echo "make $@: ignoring dependencies"
 	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
 
-else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # BOARD_AVB_ENABLE != true
-
-# $1: boot image target
-define build_boot_supports_vboot
-  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned
-  $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1)
-  $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
-endef
-
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY)
-	$(call pretty,"Target boot image: $@")
-	$(call build_boot_supports_vboot,$@)
-
-$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
-$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
-
-.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
-	@echo "make $@: ignoring dependencies"
-	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_vboot,$(b)))
-
-else # PRODUCT_SUPPORTS_VBOOT != true
+else # BOARD_AVB_ENABLE != true
 
 # $1: boot image target
 define build_boot_novboot
@@ -1460,16 +1501,26 @@
 
 ifneq ($(BOARD_KERNEL_PATH_16K),)
 BUILT_BOOT_OTA_PACKAGE_4K := $(PRODUCT_OUT)/boot_ota_4k.zip
-$(BUILT_BOOT_OTA_PACKAGE_4K): $(OTA_FROM_RAW_IMG) $(INSTALLED_BOOTIMAGE_TARGET) $(BUILT_BOOTIMAGE_16K_TARGET) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8
+$(BUILT_BOOT_OTA_PACKAGE_4K): $(OTA_FROM_RAW_IMG) \
+                              $(INSTALLED_BOOTIMAGE_TARGET) \
+                              $(BUILT_BOOTIMAGE_16K_TARGET) \
+                              $(DEFAULT_SYSTEM_DEV_CERTIFICATE).pk8 \
+                              $(INSTALLED_DTBOIMAGE_TARGET) \
+                              $(INSTALLED_DTBOIMAGE_16KB_TARGET)
 	$(OTA_FROM_RAW_IMG) --package_key $(DEFAULT_SYSTEM_DEV_CERTIFICATE) \
                       --max_timestamp `cat $(BUILD_DATETIME_FILE)` \
                       --path $(HOST_OUT) \
-                      --partition_name boot \
+                      --partition_name $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),\
+                          $(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        boot$(comma)dtbo,\
+                        boot) \
                       --output $@ \
                       $(if $(BOARD_16K_OTA_USE_INCREMENTAL),\
                         $(BUILT_BOOTIMAGE_16K_TARGET):$(INSTALLED_BOOTIMAGE_TARGET),\
                         $(INSTALLED_BOOTIMAGE_TARGET)\
-                      )
+                      )\
+                      $(if $(and $(INSTALLED_DTBOIMAGE_TARGET),$(INSTALLED_DTBOIMAGE_16KB_TARGET)),\
+                        $(INSTALLED_DTBOIMAGE_TARGET))
 
 boototapackage_4k: $(BUILT_BOOT_OTA_PACKAGE_4K)
 .PHONY: boototapackage_4k
@@ -1697,6 +1748,30 @@
 $(call declare-1p-container,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),$(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DTB_IMAGE_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS) $(INTERNAL_VENDOR_BOOTCONDIG_TARGET),$(PRODUCT_OUT)/:/)
 VENDOR_NOTICE_DEPS += $(INSTALLED_VENDOR_BOOTIMAGE_TARGET)
+
+else # BUILDING_VENDOR_BOOT_IMAGE not defined, use prebuilt image
+
+ifdef BOARD_PREBUILT_VENDOR_BOOTIMAGE
+INTERNAL_PREBUILT_VENDOR_BOOTIMAGE := $(BOARD_PREBUILT_VENDOR_BOOTIMAGE)
+INSTALLED_VENDOR_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE) $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOT_KEY_PATH)
+	cp $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE) $@
+	chmod +w $@
+	$(AVBTOOL) add_hash_footer \
+	    --image $@ \
+	    $(call get-partition-size-argument,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)) \
+	    --partition_name vendor_boot $(INTERNAL_AVB_VENDOR_BOOT_SIGNING_ARGS) \
+	    $(BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS)
+else
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE)
+	cp $(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE) $@
+
+endif # BOARD_AVB_ENABLE
+$(call declare-1p-container,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_VENDOR_BOOTIMAGE_TARGET),$(INTERNAL_PREBUILT_VENDOR_BOOTIMAGE),$(PRODUCT_OUT)/:/)
+endif # BOARD_PREBUILT_VENDOR_BOOTIMAGE
 endif # BUILDING_VENDOR_BOOT_IMAGE
 
 # -----------------------------------------------------------------
@@ -2233,11 +2308,6 @@
 $(if $(PRODUCT_VENDOR_DLKM_VERITY_PARTITION),$(hide) echo "vendor_dlkm_verity_block_device=$(PRODUCT_VENDOR_DLKM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_ODM_DLKM_VERITY_PARTITION),$(hide) echo "odm_dlkm_verity_block_device=$(PRODUCT_ODM_DLKM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_SYSTEM_DLKM_VERITY_PARTITION),$(hide) echo "system_dlkm_verity_block_device=$(PRODUCT_SYSTEM_DLKM_VERITY_PARTITION)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCT_SUPPORTS_VBOOT)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(notdir $(FUTILITY))" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
 $(if $(BOARD_AVB_ENABLE), \
   $(hide) echo "avb_avbtool=$(notdir $(AVBTOOL))" >> $(1)$(newline) \
   $(if $(filter $(2),system), \
@@ -2747,15 +2817,9 @@
 # $(1): output file
 # $(2): optional kernel file
 define build-recoveryimage-target
-  $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
-    $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
-                 $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \
-                 --output $(1).unsigned, \
-    $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
-                 $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
-                 $(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1))
-  $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
-    $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
+  $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
+               $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
+               $(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1)
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
     $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot))), \
     $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
@@ -2766,9 +2830,6 @@
 endef
 
 recoveryimage-deps := $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel)
-ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-  recoveryimage-deps += $(VBOOT_SIGNER)
-endif
 ifeq (true,$(BOARD_AVB_ENABLE))
   recoveryimage-deps += $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
 endif
@@ -3355,8 +3416,10 @@
 # system image
 
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
+ifdef BUILDING_SYSTEM_IMAGE
 INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
     $(ALL_DEFAULT_INSTALLED_MODULES)))
+endif
 
 # Create symlink /system/vendor to /vendor if necessary.
 ifdef BOARD_USES_VENDORIMAGE
@@ -3495,6 +3558,24 @@
 endef
 
 $(eval $(call write-partition-file-list,$(systemimage_intermediates)/file_list.txt,$(TARGET_OUT),$(FULL_SYSTEMIMAGE_DEPS)))
+
+ifneq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),)
+file_list_diff := $(HOST_OUT_EXECUTABLES)/file_list_diff$(HOST_EXECUTABLE_SUFFIX)
+system_file_diff_timestamp := $(systemimage_intermediates)/file_diff.timestamp
+
+$(system_file_diff_timestamp): \
+	    $(systemimage_intermediates)/file_list.txt \
+	    $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \
+	    $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \
+	    $(file_list_diff)
+	$(file_list_diff) $(systemimage_intermediates)/file_list.txt \
+	  $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \
+	  $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
+	touch $@
+
+$(BUILT_SYSTEMIMAGE): $(system_file_diff_timestamp)
+endif
+
 # Used by soong sandwich to request the staging dir be built
 $(systemimage_intermediates)/staging_dir.stamp: $(filter $(TARGET_OUT)/%,$(FULL_SYSTEMIMAGE_DEPS))
 	touch $@
@@ -3502,8 +3583,19 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(BUILT_SYSTEMIMAGE): $(BOARD_AVB_SYSTEM_KEY_PATH)
 endif
+
+ifeq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
+ifeq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),)
+$(error PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE must be set if USE_SOONG_DEFINED_SYSTEM_IMAGE is true)
+endif
+soong_defined_system_image := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
+$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(soong_defined_system_image)
+$(eval $(call copy-one-file, $(soong_defined_system_image), $(BUILT_SYSTEMIMAGE)))
+soong_defined_system_image :=
+else
 $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt
 	$(call build-systemimage-target,$@)
+endif
 
 $(call declare-1p-container,$(BUILT_SYSTEMIMAGE),system/extras)
 $(call declare-container-license-deps,$(BUILT_SYSTEMIMAGE),$(FULL_SYSTEMIMAGE_DEPS),$(PRODUCT_OUT)/:/)
@@ -3584,10 +3676,10 @@
 # -----------------------------------------------------------------
 # data partition image
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT_DATA)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
+ifdef BUILDING_USERDATA_IMAGE
 INTERNAL_USERDATAIMAGE_FILES := \
     $(filter $(TARGET_OUT_DATA)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
 
-ifdef BUILDING_USERDATA_IMAGE
 userdataimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,userdata)
 BUILT_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img
@@ -4332,33 +4424,6 @@
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_SYSTEM_DLKMIMAGE),$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)))
 endif
 
-# -----------------------------------------------------------------
-# dtbo image
-ifdef BOARD_PREBUILT_DTBOIMAGE
-INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
-
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
-	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
-	chmod +w $@
-	$(AVBTOOL) add_hash_footer \
-	    --image $@ \
-	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
-	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
-	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
-
-$(call declare-1p-container,$(INSTALLED_DTBOIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_TARGET),$(BOARD_PREBUILT_DTBOIMAGE),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DTBOIMAGE_TARGET)
-else
-$(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
-	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
-endif
-
-endif # BOARD_PREBUILT_DTBOIMAGE
-
-# -----------------------------------------------------------------
 # Protected VM firmware image
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 
@@ -5072,6 +5137,7 @@
   $(TARGET_OUT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
   $(TARGET_OUT_PRODUCT)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
@@ -5124,6 +5190,7 @@
   $(TARGET_OUT_PRODUCT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
 
@@ -5142,6 +5209,7 @@
 	   --system_ext_path $(TARGET_OUT_SYSTEM_EXT) \
 	   --product_path $(TARGET_OUT_PRODUCT) \
 	   --vendor_path $(TARGET_OUT_VENDOR) \
+	   --odm_path $(TARGET_OUT_ODM) \
 	   --apex_path $(APEX_OUT)
 
 apex_files :=
@@ -5651,12 +5719,6 @@
   resize2fs \
   soong_zip \
 
-ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-INTERNAL_OTATOOLS_MODULES += \
-  futility-host \
-  vboot_signer
-endif
-
 INTERNAL_OTATOOLS_FILES := \
   $(filter $(HOST_OUT)/%,$(call module-installed-files,$(INTERNAL_OTATOOLS_MODULES)))
 
@@ -5692,10 +5754,6 @@
   $(sort $(shell find external/avb/test/data -type f -name "testkey_*.pem" -o \
       -name "atx_metadata.bin"))
 endif
-ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-INTERNAL_OTATOOLS_PACKAGE_FILES += \
-  $(sort $(shell find external/vboot_reference/tests/devkeys -type f))
-endif
 
 INTERNAL_OTATOOLS_RELEASETOOLS := \
   $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
@@ -6132,12 +6190,6 @@
     $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
     $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
   endif
-
-  # Not checking in board_config.mk, since AB_OTA_PARTITIONS may be updated in Android.mk (e.g. to
-  # additionally include radio or bootloader partitions).
-  ifeq ($(AB_OTA_PARTITIONS),)
-    $(error AB_OTA_PARTITIONS must be defined when using AB_OTA_UPDATER)
-  endif
 endif
 
 ifneq ($(AB_OTA_PARTITIONS),)
@@ -6708,6 +6760,10 @@
 	@# If breakpad symbols have been generated, add them to the zip.
 	$(hide) cp -R $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD
 endif
+ifdef BOARD_PREBUILT_VENDOR_BOOTIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 ifdef BOARD_PREBUILT_VENDORIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
@@ -6756,14 +6812,22 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif # BOARD_PREBUILT_DTBOIMAGE
-ifdef BUILT_KERNEL_16K_TARGET
+ifdef BOARD_KERNEL_PATH_16K
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(BUILT_KERNEL_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
-endif # BUILT_KERNEL_16K_TARGET
-ifdef BUILT_RAMDISK_16K_TARGET
+endif # BOARD_KERNEL_PATH_16K
+ifdef BOARD_KERNEL_MODULES_16K
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(BUILT_RAMDISK_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
-endif # BUILT_RAMDISK_16K_TARGET
+endif # BOARD_KERNEL_MODULES_16K
+ifdef BUILT_BOOTIMAGE_16K_TARGET
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(BUILT_BOOTIMAGE_16K_TARGET) $(zip_root)/IMAGES/
+endif # BUILT_BOOTIMAGE_16K_TARGET
+ifdef INSTALLED_DTBOIMAGE_16KB_TARGET
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_DTBOIMAGE_16KB_TARGET) $(zip_root)/IMAGES/
+endif # INSTALLED_DTBOIMAGE_16KB_TARGET
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
@@ -6831,6 +6895,33 @@
 	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize
 endif # BOARD_KERNEL_PAGESIZE
 endif # BUILDING_INIT_BOOT_IMAGE
+ifdef BOARD_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_EROFS_COMPRESS_HINTS) $(zip_root)/META/erofs_default_compress_hints.txt
+endif
+ifdef BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_erofs_compress_hints.txt
+endif
+ifdef BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_ext_erofs_compress_hints.txt
+endif
+ifdef BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/product_erofs_compress_hints.txt
+endif
+ifdef BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_erofs_compress_hints.txt
+endif
+ifdef BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_erofs_compress_hints.txt
+endif
+ifdef BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_dlkm_erofs_compress_hints.txt
+endif
+ifdef BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_dlkm_erofs_compress_hints.txt
+endif
+ifdef BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_dlkm_erofs_compress_hints.txt
+endif
 ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
 	$(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt
 endif
@@ -7055,18 +7146,19 @@
 
 endif
 
-$(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
-	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
-
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS :=
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 ifneq (,$(DEX_PREOPT_CONFIG_FOR_MAKE))
-	$(hide) cp $(DEX_PREOPT_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS += -e $(notdir $(DEX_PREOPT_CONFIG_FOR_MAKE)) -f $(DEX_PREOPT_CONFIG_FOR_MAKE)
 endif
 ifneq (,$(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE))
-	$(hide) cp $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS += -e $(notdir $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)) -f $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
 endif
 endif #!TARGET_BUILD_UNBUNDLED
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config
+
+$(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
+	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config $(PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS)
 
 .PHONY: dexpreopt_config_zip
 dexpreopt_config_zip: $(DEXPREOPT_CONFIG_ZIP)
@@ -7561,6 +7653,10 @@
 droidcore-unbundled: $(QEMU_VERIFIED_BOOT_PARAMS)
 
 endif
+
+# Preprocess files for emulator and sdk.
+-include development/build/tools/sdk-preprocess-files.mk
+
 # -----------------------------------------------------------------
 # The emulator package
 ifeq ($(BUILD_EMULATOR),true)
@@ -7802,9 +7898,78 @@
 $(call dist-for-goals,haiku-presubmit,$(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES))
 
 # -----------------------------------------------------------------
-# Extract platform fonts used in Layoutlib
+# Extract additional data files used in Layoutlib
 include $(BUILD_SYSTEM)/layoutlib_data.mk
 
+# -----------------------------------------------------------------
+# Desktop pack common variables.
+PACK_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_image
+IMAGES := $(INSTALLED_BOOTIMAGE_TARGET) \
+	$(INSTALLED_SUPERIMAGE_TARGET) \
+	$(INSTALLED_INIT_BOOT_IMAGE_TARGET) \
+	$(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
+	$(INSTALLED_VBMETAIMAGE_TARGET) \
+	$(INSTALLED_USERDATAIMAGE_TARGET)
+
+# -----------------------------------------------------------------
+# Desktop pack image hook.
+ifneq (,$(strip $(PACK_DESKTOP_FILESYSTEM_IMAGES)))
+PACK_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_image.bin
+
+$(PACK_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive
+
+PACKED_IMAGE_ARCHIVE_TARGET := $(PACK_IMAGE_TARGET).gz
+
+$(PACKED_IMAGE_ARCHIVE_TARGET): $(PACK_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-image
+pack-image: $(PACK_IMAGE_TARGET)
+
+endif # PACK_DESKTOP_FILESYSTEM_IMAGES
+
+# -----------------------------------------------------------------
+# Desktop pack recovery image hook.
+ifneq (,$(strip $(PACK_DESKTOP_RECOVERY_IMAGE)))
+PACK_RECOVERY_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_recovery_image.bin
+
+$(PACK_RECOVERY_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive --recovery
+
+PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET := $(PACK_RECOVERY_IMAGE_TARGET).gz
+
+$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET): $(PACK_RECOVERY_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_RECOVERY_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-recovery-image
+pack-recovery-image: $(PACK_RECOVERY_IMAGE_TARGET)
+
+endif # PACK_DESKTOP_RECOVERY_IMAGE
+
+# -----------------------------------------------------------------
+# Desktop pack update image hook.
+ifneq (,$(strip $(PACK_DESKTOP_UPDATE_IMAGE)))
+PACK_UPDATE_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_update_image.bin
+
+$(PACK_UPDATE_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive --update
+
+PACKED_UPDATE_IMAGE_ARCHIVE_TARGET := $(PACK_UPDATE_IMAGE_TARGET).gz
+
+$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET): $(PACK_UPDATE_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_UPDATE_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-update-image
+pack-update-image: $(PACK_UPDATE_IMAGE_TARGET)
+
+endif # PACK_DESKTOP_UPDATE_IMAGE
 
 # -----------------------------------------------------------------
 # OS Licensing
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index ea6ebd3..5fc8fd4 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -27,20 +27,36 @@
 # Add variables to the namespace below:
 
 $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
-$(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
+$(call soong_config_set_bool,ANDROID,BOARD_USES_RECOVERY_AS_BOOT,$(BOARD_USES_RECOVERY_AS_BOOT))
+$(call soong_config_set_bool,ANDROID,BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT,$(BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT))
 $(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS)
+$(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_VERSION)
 $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_COMPAT_VERSIONS)
 $(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER)
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
 
-# PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not.
-$(call add_soong_config_var_value,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true))
+# For Sanitizers
+$(call soong_config_set_bool,ANDROID,ASAN_ENABLED,$(if $(filter address,$(SANITIZE_TARGET)),true,false))
+$(call soong_config_set_bool,ANDROID,HWASAN_ENABLED,$(if $(filter hwaddress,$(SANITIZE_TARGET)),true,false))
+$(call soong_config_set_bool,ANDROID,SANITIZE_TARGET_SYSTEM_ENABLED,$(if $(filter true,$(SANITIZE_TARGET_SYSTEM)),true,false))
 
+# For init.environ.rc
+$(call soong_config_set_bool,ANDROID,GCOV_COVERAGE,$(NATIVE_COVERAGE))
+$(call soong_config_set_bool,ANDROID,CLANG_COVERAGE,$(CLANG_COVERAGE))
+$(call soong_config_set,ANDROID,SCUDO_ALLOCATION_RING_BUFFER_SIZE,$(PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE))
+
+# PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not.
+$(call soong_config_set_bool,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true))
+
+# For art modules
+$(call soong_config_set_bool,art_module,host_prefer_32_bit,$(if $(filter true,$(HOST_PREFER_32_BIT)),true,false))
 ifdef ART_DEBUG_OPT_FLAG
 $(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
 endif
+# The default value of ART_BUILD_HOST_DEBUG is true
+$(call soong_config_set_bool,art_module,art_build_host_debug,$(if $(filter false,$(ART_BUILD_HOST_DEBUG)),false,true))
 
 ifdef TARGET_BOARD_AUTO
   $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
@@ -55,24 +71,10 @@
 endif
 endif
 
-# TODO(b/308187800): some internal modules set `prefer` to true on the prebuilt apex module,
-# and set that to false when `ANDROID.module_build_from_source` is true.
-# Set this soong config variable to true for now, and cleanup `prefer` as part of b/308187800
-$(call add_soong_config_var_value,ANDROID,module_build_from_source,true)
-
-# Messaging app vars
-ifeq (eng,$(TARGET_BUILD_VARIANT))
-$(call soong_config_set,messaging,build_variant_eng,true)
-endif
-
 # Enable SystemUI optimizations by default unless explicitly set.
 SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
 
-# Enable Compose in SystemUI by default.
-SYSTEMUI_USE_COMPOSE ?= true
-$(call add_soong_config_var,ANDROID,SYSTEMUI_USE_COMPOSE)
-
 ifdef PRODUCT_AVF_ENABLED
 $(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
 endif
@@ -98,6 +100,7 @@
 $(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_device_assignment,$(RELEASE_AVF_ENABLE_DEVICE_ASSIGNMENT))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_dice_changes,$(RELEASE_AVF_ENABLE_DICE_CHANGES))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_early_vm,$(RELEASE_AVF_ENABLE_EARLY_VM))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_llpvm_changes,$(RELEASE_AVF_ENABLE_LLPVM_CHANGES))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_multi_tenant_microdroid_vm,$(RELEASE_AVF_ENABLE_MULTI_TENANT_MICRODROID_VM))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_network,$(RELEASE_AVF_ENABLE_NETWORK))
@@ -111,6 +114,8 @@
 
 $(call add_soong_config_var_value,ANDROID,release_binder_death_recipient_weak_from_jni,$(RELEASE_BINDER_DEATH_RECIPIENT_WEAK_FROM_JNI))
 
+$(call add_soong_config_var_value,ANDROID,release_libpower_no_lock_binder_txn,$(RELEASE_LIBPOWER_NO_LOCK_BINDER_TXN))
+
 $(call add_soong_config_var_value,ANDROID,release_package_libandroid_runtime_punch_holes,$(RELEASE_PACKAGE_LIBANDROID_RUNTIME_PUNCH_HOLES))
 
 $(call add_soong_config_var_value,ANDROID,release_selinux_data_data_ignore,$(RELEASE_SELINUX_DATA_DATA_IGNORE))
@@ -154,6 +159,7 @@
 
 # Add crashrecovery build flag to soong
 $(call soong_config_set,ANDROID,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+# Add crashrecovery file move flags to soong, for both platform and module
 ifeq (true,$(RELEASE_CRASHRECOVERY_FILE_MOVE))
   $(call soong_config_set,ANDROID,crashrecovery_files_in_module,true)
   $(call soong_config_set,ANDROID,crashrecovery_files_in_platform,false)
@@ -161,5 +167,18 @@
   $(call soong_config_set,ANDROID,crashrecovery_files_in_module,false)
   $(call soong_config_set,ANDROID,crashrecovery_files_in_platform,true)
 endif
-# Weirdly required because platform_bootclasspath is using AUTO namespace
-$(call soong_config_set,AUTO,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+# Required as platform_bootclasspath is using this namespace
+$(call soong_config_set,bootclasspath,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+
+# Enable Profiling module. Also used by platform_bootclasspath.
+$(call soong_config_set,ANDROID,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE))
+$(call soong_config_set,bootclasspath,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE))
+
+# Add perf-setup build flag to soong
+# Note: BOARD_PERFSETUP_SCRIPT location must be under platform_testing/scripts/perf-setup/.
+ifdef BOARD_PERFSETUP_SCRIPT
+  $(call soong_config_set,perf,board_perfsetup_script,$(notdir $(BOARD_PERFSETUP_SCRIPT)))
+endif
+
+# Add target_use_pan_display flag for hardware/libhardware:gralloc.default
+$(call soong_config_set_bool,gralloc,target_use_pan_display,$(if $(filter true,$(TARGET_USE_PAN_DISPLAY)),true,false))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 86028a9..5363e0f 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -340,7 +340,7 @@
 
 ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE))
   ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-    $(call pretty-error, LOCAL_SOONG_INSTALLED_MODULE can only be used from $(SOONG_ANDROID_MK))
+    $(call pretty-error, LOCAL_MODULE_MAKEFILE can only be used from $(SOONG_ANDROID_MK))
   endif
   # Use the install path requested by Soong.
   LOCAL_INSTALLED_MODULE := $(LOCAL_SOONG_INSTALLED_MODULE)
@@ -717,15 +717,25 @@
 endif
 
 ifeq ($(EXCLUDE_MCTS),true)
+ifeq (,$(filter $(LOCAL_MODULE),$(mcts_whitelist)))
   ifneq (,$(test_config))
     ifneq (,$(filter mcts-%,$(LOCAL_COMPATIBILITY_SUITE)))
       LOCAL_COMPATIBILITY_SUITE := $(filter-out cts,$(LOCAL_COMPATIBILITY_SUITE))
     endif
   endif
 endif
+endif
 
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
 
+ifeq ($(EXCLUDE_MCTS),true)
+  ifneq (,$(test_config))
+    ifneq (,$(filter mcts-%,$(LOCAL_COMPATIBILITY_SUITE)))
+      LOCAL_COMPATIBILITY_SUITE := $(filter-out cts,$(LOCAL_COMPATIBILITY_SUITE))
+    endif
+  endif
+endif
+
 # If we are building a native test or benchmark and its stem variants are not defined,
 # separate the multiple architectures into subdirectories of the testcase folder.
 arch_dir :=
@@ -766,6 +776,8 @@
   $(eval my_compat_dist_$(suite) := $(patsubst %:$(LOCAL_INSTALLED_MODULE),$(LOCAL_INSTALLED_MODULE):$(LOCAL_INSTALLED_MODULE),\
     $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
       $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem)))) \
+  $(eval my_compat_module_arch_dir_$(suite).$(my_register_name) :=) \
+  $(foreach dir,$(call compatibility_suite_dirs,$(suite),$(arch_dir)),$(eval my_compat_module_arch_dir_$(suite).$(my_register_name) += $(dir))) \
   $(eval my_compat_dist_config_$(suite) := ))
 
 ifneq (,$(LOCAL_SOONG_CLASSES_JAR))
@@ -965,6 +977,8 @@
     $(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
 ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE := \
     $(ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE) $(LOCAL_SOONG_MODULE_TYPE)
+ALL_MODULES.$(my_register_name).IS_SOONG_MODULE := \
+    $(if $(filter $(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK)),true)
 ifndef LOCAL_IS_HOST_MODULE
 ALL_MODULES.$(my_register_name).TARGET_BUILT := \
     $(ALL_MODULES.$(my_register_name).TARGET_BUILT) $(LOCAL_BUILT_MODULE)
@@ -1055,6 +1069,11 @@
       $(ALL_MODULES.$(my_register_name).ACONFIG_FILES) $(LOCAL_ACONFIG_FILES)
 endif
 
+ifdef LOCAL_FILESYSTEM_FILELIST
+  ALL_MODULES.$(my_register_name).FILESYSTEM_FILELIST := \
+      $(ALL_MODULES.$(my_register_name).FILESYSTEM_FILELIST) $(LOCAL_FILESYSTEM_FILELIST)
+endif
+
 ifndef LOCAL_SOONG_MODULE_INFO_JSON
   ALL_MAKE_MODULE_INFO_JSON_MODULES += $(my_register_name)
   ALL_MODULES.$(my_register_name).SHARED_LIBS := \
@@ -1266,6 +1285,8 @@
 endif
 endif
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=base_rules))
+
 ###########################################################
 # Ensure privileged applications always have LOCAL_PRIVILEGED_MODULE
 ###########################################################
diff --git a/core/binary.mk b/core/binary.mk
index f86b5a4..1e98bc0 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -205,8 +205,6 @@
     my_api_level := $(my_ndk_api)
   endif
 
-  my_ndk_source_root := \
-      $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
   my_built_ndk := $(SOONG_OUT_DIR)/ndk
   my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE)
   my_ndk_sysroot_include := \
@@ -239,16 +237,18 @@
   endif
 
   ifeq (system,$(LOCAL_NDK_STL_VARIANT))
+    my_ndk_source_root := \
+        $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
     my_system_shared_libraries += libstdc++
   else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := \
-      $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
-    my_ndk_stl_include_path += \
-      $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
+    my_llvm_dir := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)
+    my_libcxx_arch_dir := $(my_llvm_dir)/android_libc++/ndk/$($(LOCAL_2ND_ARCH_VAR_PREFIX)PREBUILT_LIBCXX_ARCH_DIR)
 
-    my_libcxx_libdir := \
-      $(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)
+    # Include the target-specific __config_site file followed by the generic libc++ headers.
+    my_ndk_stl_include_path := $(my_libcxx_arch_dir)/include/c++/v1
+    my_ndk_stl_include_path += $(my_llvm_dir)/include/c++/v1
+    my_libcxx_libdir := $(my_libcxx_arch_dir)/lib
 
     ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
       my_ndk_stl_static_lib := \
@@ -258,14 +258,7 @@
       my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
     endif
 
-    ifneq ($(my_ndk_api),current)
-      ifeq ($(call math_lt,$(my_ndk_api),21),true)
-        my_ndk_stl_include_path += $(my_ndk_source_root)/android/support/include
-        my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
-      endif
-    endif
-
-    my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
+    my_ndk_stl_static_lib += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBUNWIND)
     my_ldlibs += -ldl
   else # LOCAL_NDK_STL_VARIANT must be none
     # Do nothing.
@@ -1352,6 +1345,8 @@
 my_allowed_types := $(my_allowed_ndk_types) native:platform native:platform_vndk
 endif
 
+ALL_MODULES.$(my_register_name).WHOLE_STATIC_LIBS := $(my_whole_static_libraries)
+
 my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
 ifneq ($(filter-out STATIC_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
 my_link_deps += $(addprefix SHARED_LIBRARIES:,$(my_shared_libraries))
diff --git a/core/board_config.mk b/core/board_config.mk
index e184601..5606964 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -237,6 +237,7 @@
   .KATI_READONLY := TARGET_DEVICE_DIR
 endif
 
+$(call dump-phase-start,BOARD,,,, build/make/core/board_config.mk)
 ifndef RBC_PRODUCT_CONFIG
 include $(board_config_mk)
 else
@@ -261,6 +262,7 @@
 
   include $(OUT_DIR)/rbc/rbc_board_config_results.mk
 endif
+$(call dump-phase-end, build/make/core/board_config.mk)
 
 ifneq (,$(and $(TARGET_ARCH),$(TARGET_ARCH_SUITE)))
   $(error $(board_config_mk) erroneously sets both TARGET_ARCH and TARGET_ARCH_SUITE)
@@ -923,7 +925,9 @@
 ###########################################
 # Ensure consistency among TARGET_RECOVERY_UPDATER_LIBS, AB_OTA_UPDATER, and PRODUCT_OTA_FORCE_NON_AB_PACKAGE.
 TARGET_RECOVERY_UPDATER_LIBS ?=
-AB_OTA_UPDATER ?=
+ifeq ($(AB_OTA_UPDATER),)
+AB_OTA_UPDATER := true
+endif
 .KATI_READONLY := TARGET_RECOVERY_UPDATER_LIBS AB_OTA_UPDATER
 
 # Ensure that if PRODUCT_OTA_FORCE_NON_AB_PACKAGE == true, then AB_OTA_UPDATER must be true
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index f18747a..126482f 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -4,7 +4,10 @@
 
 $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-arm-android.a
 $(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-arm-android.a
+$(clang_2nd_arch_prefix)TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/arm/libunwind.a
 
 # Address sanitizer clang config
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
+
+$(clang_2nd_arch_prefix)PREBUILT_LIBCXX_ARCH_DIR := arm
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index 42bed0a..e7ab6cb 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -4,7 +4,10 @@
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-aarch64-android.a
 TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-aarch64-android.a
+TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/aarch64/libunwind.a
 
 # Address sanitizer clang config
 ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
+
+PREBUILT_LIBCXX_ARCH_DIR := aarch64
diff --git a/core/clang/TARGET_riscv64.mk b/core/clang/TARGET_riscv64.mk
index cfb5c7d..58c9c7b 100644
--- a/core/clang/TARGET_riscv64.mk
+++ b/core/clang/TARGET_riscv64.mk
@@ -4,7 +4,10 @@
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-riscv64-android.a
 TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-riscv64-android.a
+TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/riscv64/libunwind.a
 
 # Address sanitizer clang config
 ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
+
+PREBUILT_LIBCXX_ARCH_DIR := riscv64
diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk
index 5491a05..1a08c79 100644
--- a/core/clang/TARGET_x86.mk
+++ b/core/clang/TARGET_x86.mk
@@ -4,7 +4,10 @@
 
 $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686-android.a
 $(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-i686-android.a
+$(clang_2nd_arch_prefix)TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/i386/libunwind.a
 
 # Address sanitizer clang config
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
+
+$(clang_2nd_arch_prefix)PREBUILT_LIBCXX_ARCH_DIR := i386
diff --git a/core/clang/TARGET_x86_64.mk b/core/clang/TARGET_x86_64.mk
index 167db72..f39b41e 100644
--- a/core/clang/TARGET_x86_64.mk
+++ b/core/clang/TARGET_x86_64.mk
@@ -4,7 +4,10 @@
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64-android.a
 TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-x86_64-android.a
+TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/x86_64/libunwind.a
 
 # Address sanitizer clang config
 ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
+
+PREBUILT_LIBCXX_ARCH_DIR := x86_64
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 6192690..fed19e6 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -87,6 +87,7 @@
 LOCAL_EXTRA_FULL_TEST_CONFIGS:=
 LOCAL_EXTRACT_APK:=
 LOCAL_EXTRACT_DPI_APK:=
+LOCAL_FILESYSTEM_FILELIST:=
 LOCAL_FINDBUGS_FLAGS:=
 LOCAL_FORCE_STATIC_EXECUTABLE:=
 LOCAL_FULL_CLASSES_JACOCO_JAR:=
diff --git a/core/config.mk b/core/config.mk
index c7567e3..a294223 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -316,6 +316,19 @@
 $(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(strip $3))
 endef
 
+# soong_config_set_bool is the same as soong_config_set, but it will
+# also type the variable as a bool, so that when using select() expressions
+# in blueprint files they can use boolean values instead of strings.
+# It will only accept "true" for its value, any other value will be
+# treated as false.
+# $1 is the namespace. $2 is the variable name. $3 is the variable value.
+# Ex: $(call soong_config_set_bool,acme,COOL_FEATURE,true)
+define soong_config_set_bool
+$(call soong_config_define_internal,$1,$2) \
+$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(filter true,$3))
+$(eval SOONG_CONFIG_TYPE_$(strip $1)_$(strip $2):=bool)
+endef
+
 # soong_config_append appends to the value of the variable in the given Soong
 # config namespace. If the variable does not exist, it will be defined. If the
 # namespace does not  exist, it will be defined.
@@ -718,8 +731,6 @@
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
 BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata
 BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
-FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
-VBOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/vboot_signer
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -800,6 +811,12 @@
   BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
 endif
 
+ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),)
+  ifneq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),)
+    $(error Must not set NEED_AIDL_NDK_PLATFORM_BACKEND, but it is set to: $(NEED_AIDL_NDK_PLATFORM_BACKEND). Support will be removed.)
+  endif
+endif
+
 # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
 # devices if unset.
 ifndef BOARD_SYSTEMSDK_VERSIONS
@@ -828,9 +845,6 @@
   else
     min_systemsdk_version := $(PRODUCT_SHIPPING_API_LEVEL)
   endif
-  ifneq ($(call numbers_less_than,$(min_systemsdk_version),$(BOARD_SYSTEMSDK_VERSIONS)),)
-    $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to BOARD_API_LEVEL, BOARD_SHIPPING_API_LEVEL or PRODUCT_SHIPPING_API_LEVEL ($(min_systemsdk_version)))
-  endif
   ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
     ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
@@ -1238,31 +1252,64 @@
 # in the source tree.
 dont_bother_goals := out product-graph
 
+ifeq ($(TARGET_SYSTEM_PROP),)
+TARGET_SYSTEM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
+endif
+
+ifeq ($(TARGET_SYSTEM_EXT_PROP),)
+TARGET_SYSTEM_EXT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop)
+endif
+
+ifeq ($(TARGET_PRODUCT_PROP),)
+TARGET_PRODUCT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/product.prop)
+endif
+
+ifeq ($(TARGET_ODM_PROP),)
+TARGET_ODM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/odm.prop)
+endif
+
+.KATI_READONLY := \
+    TARGET_SYSTEM_PROP \
+    TARGET_SYSTEM_EXT_PROP \
+    TARGET_PRODUCT_PROP \
+    TARGET_ODM_PROP \
+
 include $(BUILD_SYSTEM)/sysprop_config.mk
 
 # Make ANDROID Soong config variables visible to Android.mk files, for
 # consistency with those defined in BoardConfig.mk files.
 include $(BUILD_SYSTEM)/android_soong_config_vars.mk
 
+# EMMA_INSTRUMENT is set to true when coverage is enabled. Creates a suffix to
+# differeciate the coverage version of ninja files. This will save 5 minutes of
+# build time used to regenerate ninja.
+ifeq (true,$(EMMA_INSTRUMENT))
+COVERAGE_SUFFIX := .coverage
+endif
+
+SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).variables
+SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).extra.variables
+
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ninja_config.mk
 include $(BUILD_SYSTEM)/soong_config.mk
 endif
 
+SOONG_VARIABLES :=
+SOONG_EXTRA_VARIABLES :=
+
 -include external/ltp/android/ltp_package_list.mk
 DEFAULT_DATA_OUT_MODULES := ltp $(ltp_packages)
 .KATI_READONLY := DEFAULT_DATA_OUT_MODULES
 
 include $(BUILD_SYSTEM)/dumpvar.mk
 
-ifneq ($(KEEP_VNDK),true)
 ifdef BOARD_VNDK_VERSION
 BOARD_VNDK_VERSION=
 endif
 ifdef PLATFORM_VNDK_VERSION
 PLATFORM_VNDK_VERSION=
 endif
-endif
 
 ifeq (true,$(FULL_SYSTEM_OPTIMIZE_JAVA))
 ifeq (false,$(SYSTEM_OPTIMIZE_JAVA))
diff --git a/core/copy_headers.mk b/core/copy_headers.mk
index 397ea62..2e82db7 100644
--- a/core/copy_headers.mk
+++ b/core/copy_headers.mk
@@ -50,4 +50,5 @@
 _chFrom :=
 _chTo :=
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=COPY_HEADERS))
 endif # LOCAL_COPY_HEADERS
diff --git a/core/definitions.mk b/core/definitions.mk
index dde0aa9..cd1b36e 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2584,7 +2584,28 @@
         @$(call emit-line,$(wordlist 98001,98500,$(1)),$(2))
         @$(call emit-line,$(wordlist 98501,99000,$(1)),$(2))
         @$(call emit-line,$(wordlist 99001,99500,$(1)),$(2))
-        @$(if $(wordlist 99501,99502,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
+        @$(call emit-line,$(wordlist 99501,100000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 100001,100500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 100501,101000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 101001,101500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 101501,102000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 102001,102500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 102501,103000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 103001,103500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 103501,104000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 104001,104500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 104501,105000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 105001,105500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 105501,106000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 106001,106500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 106501,107000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 107001,107500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 107501,108000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 108001,108500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 108501,109000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 109001,109500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 109501,110000,$(1)),$(2))
+        @$(if $(wordlist 110001,110002,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
 endef
 # Return jar arguments to compress files in a given directory
 # $(1): directory
@@ -2925,19 +2946,15 @@
   echo "Install path: $(patsubst $(PRODUCT_OUT)/%,%,$(PRIVATE_INSTALLED_MODULE))" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
-ART_VERIDEX_APPCOMPAT_SCRIPT:=$(HOST_OUT)/bin/appcompat.sh
+ART_VERIDEX_APPCOMPAT:=$(HOST_OUT)/bin/appcompat
 define run-appcompat
 $(hide) \
-  echo "appcompat.sh output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
-  PACKAGING=$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING ANDROID_LOG_TAGS="*:e" $(ART_VERIDEX_APPCOMPAT_SCRIPT) --dex-file=$@ --api-flags=$(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
+  echo "appcompat output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+  ANDROID_LOG_TAGS="*:e" $(ART_VERIDEX_APPCOMPAT) --dex-file=$@ 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
 appcompat-files = \
   $(AAPT2) \
-  $(ART_VERIDEX_APPCOMPAT_SCRIPT) \
-  $(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) \
-  $(HOST_OUT_EXECUTABLES)/veridex \
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/core_dex_intermediates/classes.dex \
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/oahl_dex_intermediates/classes.dex
+  $(ART_VERIDEX_APPCOMPAT) \
 else
 appcompat-header =
 run-appcompat =
@@ -3595,6 +3612,7 @@
     $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(my_compat_dist_test_data_$(suite))) \
+  $(eval COMPATIBILITY.$(suite).ARCH_DIRS.$(my_register_name) := $(my_compat_module_arch_dir_$(suite).$(my_register_name))) \
   $(eval COMPATIBILITY.$(suite).API_MAP_FILES += $$(my_compat_api_map_$(suite))) \
   $(eval COMPATIBILITY.$(suite).SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES += $(LOCAL_SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) \
   $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 26b8b17..906d7f0 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -13,25 +13,6 @@
 install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1)))))
 endif
 
-# We want to install the profile even if we are not using preopt since it is required to generate
-# the image on the device.
-ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED),$(PRODUCT_OUT))
-
-# Install boot images. Note that there can be multiple.
-my_boot_image_arch := TARGET_ARCH
-my_boot_image_out := $(PRODUCT_OUT)
-my_boot_image_syms := $(TARGET_OUT_UNSTRIPPED)
-DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE := \
-  $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(strip \
-    $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-    $(my_boot_image_module)))
-ifdef TARGET_2ND_ARCH
-  my_boot_image_arch := TARGET_2ND_ARCH
-  2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE := \
-    $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(strip \
-      $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-      $(my_boot_image_module)))
-endif
 # Install boot images for testing on host. We exclude framework image as it is not part of art manifest.
 my_boot_image_arch := HOST_ARCH
 my_boot_image_out := $(HOST_OUT)
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index d51de33..f1e9fb5 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,4 +1,4 @@
-DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config
+DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt${COVERAGE_SUFFIX}.config
 
 ENABLE_PREOPT := true
 ENABLE_PREOPT_BOOT_IMAGES := true
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 08e2da3..e7086b7 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -504,8 +504,8 @@
   _system_other := $(strip $(if $(strip $(BOARD_USES_SYSTEM_OTHER_ODEX)), \
     $(if $(strip $(SANITIZE_LITE)),, \
       $(if $(filter $(_dexname),$(PRODUCT_DEXPREOPT_SPEED_APPS))$(filter $(_dexname),$(PRODUCT_SYSTEM_SERVER_APPS)),, \
-        $(if $(strip $(foreach myfilter,$(SYSTEM_OTHER_ODEX_FILTER),$(filter system/$(myfilter),$(_dexlocation)))), \
-          system_other/)))))
+        $(if $(strip $(foreach myfilter,$(SYSTEM_OTHER_ODEX_FILTER),$(filter system/$(myfilter),$(_dexlocation))$(filter $(myfilter),$(_dexlocation)))), \
+            system_other/)))))
   # _dexdir has a trailing /
   _dexdir := $(_system_other)$(dir $(_dexlocation))
   my_dexpreopt_zip_contents := $(sort \
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
index 640fe10..eb4c822 100644
--- a/core/dumpconfig.mk
+++ b/core/dumpconfig.mk
@@ -56,7 +56,7 @@
 
 # Escape quotation marks for CSV, and wraps in quotation marks.
 define escape-for-csv
-"$(subst ","",$1)"
+"$(subst ","",$(subst $(newline), ,$1))"
 endef
 
 # Args:
@@ -68,7 +68,7 @@
 # Args:
 #   $(1): include stack
 define dump-import-done
-$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1))))
+$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)),$(filter-out $(1),$(MAKEFILE_LIST))))
 endef
 
 # Args:
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 93cec64..c063f60 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -50,13 +50,6 @@
 # Release config
 include $(BUILD_SYSTEM)/release_config.mk
 
-# Set default value of KEEP_VNDK.
-ifeq ($(RELEASE_DEPRECATE_VNDK),true)
-  KEEP_VNDK ?= false
-else
-  KEEP_VNDK ?= true
-endif
-
 # ---------------------------------------------------------------
 # Set up version information
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index fecf4f6..2a76c9d 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -110,4 +110,6 @@
 $(LOCAL_BUILT_MODULE): $(my_coverage_path)/$(GCNO_ARCHIVE)
 endif
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=EXECUTABLE))
+
 endif  # skip_build_from_source
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index 8a4b8c3..1181c66 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -43,3 +43,5 @@
 endif
 
 include $(BUILD_EXECUTABLE)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=FUZZ_TEST))
\ No newline at end of file
diff --git a/core/header_library_internal.mk b/core/header_library_internal.mk
index 35ee1bc..a21c853 100644
--- a/core/header_library_internal.mk
+++ b/core/header_library_internal.mk
@@ -19,3 +19,5 @@
 
 $(LOCAL_BUILT_MODULE):
 	$(hide) touch $@
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HEADER_LIBRARY))
\ No newline at end of file
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 2ff9ff2..7c79a1e 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -57,4 +57,6 @@
 $(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries) $(CLANG_CXX)
 	$(transform-host-o-to-executable)
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_EXECUTABLE))
+
 endif  # skip_build_from_source
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index d45da48..652eb0e 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -124,3 +124,5 @@
 ifeq ($(TURBINE_ENABLED),false)
 $(eval $(call copy-one-file,$(LOCAL_FULL_CLASSES_JACOCO_JAR),$(full_classes_header_jar)))
 endif
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_JAVA_LIBRARY))
\ No newline at end of file
diff --git a/core/host_prebuilt.mk b/core/host_prebuilt.mk
index 79f3ffa..7dc6704 100644
--- a/core/host_prebuilt.mk
+++ b/core/host_prebuilt.mk
@@ -17,3 +17,5 @@
 $(call record-module-type,HOST_PREBUILT)
 LOCAL_IS_HOST_MODULE := true
 include $(BUILD_MULTI_PREBUILT)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_PREBUILT))
\ No newline at end of file
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index ae8b798..22a02d4 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -53,4 +53,6 @@
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-o-to-shared-lib)
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_SHARED_LIBRARY))
+
 endif  # skip_build_from_source
diff --git a/core/host_static_library_internal.mk b/core/host_static_library_internal.mk
index 3946aa7..079c45e 100644
--- a/core/host_static_library_internal.mk
+++ b/core/host_static_library_internal.mk
@@ -23,3 +23,5 @@
 $(LOCAL_BUILT_MODULE): $(built_whole_libraries)
 $(LOCAL_BUILT_MODULE): $(all_objects)
 	$(transform-host-o-to-static-lib)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=HOST_STATIC_LIBRARY))
\ No newline at end of file
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 5491247..4959edd 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -38,8 +38,9 @@
       $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
           LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
     endif
+    my_libcxx_arch := $($(LOCAL_2ND_ARCH_VAR_PREFIX)PREBUILT_LIBCXX_ARCH_DIR)
     my_jni_shared_libraries += \
-        $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
+        $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/android_libc++/ndk/$(my_libcxx_arch)/lib/libc++_shared.so
   endif
 
   # Set the abi directory used by the local JNI shared libraries.
diff --git a/core/java_common.mk b/core/java_common.mk
index 65feb15..a21f062 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -32,6 +32,8 @@
     else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS))
       # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules
       LOCAL_JAVA_LANGUAGE_VERSION := 1.8
+    else ifeq ($(EXPERIMENTAL_TARGET_JAVA_VERSION_21),true)
+      LOCAL_JAVA_LANGUAGE_VERSION := 21
     else
       LOCAL_JAVA_LANGUAGE_VERSION := 17
     endif
diff --git a/core/java_library.mk b/core/java_library.mk
index 3ac03dc..97ce92c 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -88,3 +88,5 @@
 $(eval $(call copy-one-file,$(common_javalib.jar),$(LOCAL_BUILT_MODULE)))
 
 endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=JAVA_LIBRARY))
\ No newline at end of file
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
index 46393ac..4b6eea7 100644
--- a/core/java_prebuilt_internal.mk
+++ b/core/java_prebuilt_internal.mk
@@ -172,6 +172,12 @@
 endif
 endif
 
+# transitive-res-packages is only populated for Soong modules for now, but needs
+# to exist so that other Make modules can depend on it.  Create an empty file.
+my_transitive_res_packages := $(intermediates.COMMON)/transitive-res-packages
+$(my_transitive_res_packages):
+	touch $@
+
 my_res_package := $(intermediates.COMMON)/package-res.apk
 
 # We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
diff --git a/core/layoutlib_data.mk b/core/layoutlib_data.mk
index e45f7ef..e420a00 100644
--- a/core/layoutlib_data.mk
+++ b/core/layoutlib_data.mk
@@ -66,11 +66,19 @@
 # Resource files from frameworks/base/core/res/res
 LAYOUTLIB_RES := $(call intermediates-dir-for,PACKAGING,layoutlib-res,HOST,COMMON)
 LAYOUTLIB_RES_FILES := $(shell find frameworks/base/core/res/res -type f -not -path 'frameworks/base/core/res/res/values-m[nc]c*' | sort)
-$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES)
+EMULATED_OVERLAYS_FILES := $(shell find frameworks/base/packages/overlays/*/res/ | sort)
+DEVICE_OVERLAYS_FILES := $(shell find device/generic/goldfish/phone/overlay/frameworks/base/packages/overlays/*/AndroidOverlay/res/ | sort)
+$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES)
 	rm -rf $@
-	echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist.txt
-	$(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist.txt -o $(LAYOUTLIB_RES)/temp.zip
-	rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp.zip
+	echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist_res.txt
+	$(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist_res.txt -o $(LAYOUTLIB_RES)/temp_res.zip
+	echo $(EMULATED_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt
+	$(SOONG_ZIP) -C frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt -o $(LAYOUTLIB_RES)/temp_emulated_overlays.zip
+	echo $(DEVICE_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_device_overlays.txt
+	$(SOONG_ZIP) -C device/generic/goldfish/phone/overlay/frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_device_overlays.txt -o $(LAYOUTLIB_RES)/temp_device_overlays.zip
+	rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_res.zip
+	unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_emulated_overlays.zip
+	unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_device_overlays.zip
 	rm -rf $(LAYOUTLIB_RES)/compiled && mkdir $(LAYOUTLIB_RES)/compiled && $(HOST_OUT_EXECUTABLES)/aapt2 compile $(LAYOUTLIB_RES)/data/res/**/*.9.png -o $(LAYOUTLIB_RES)/compiled
 	printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.android.layoutlib" />' > $(LAYOUTLIB_RES)/AndroidManifest.xml
 	$(HOST_OUT_EXECUTABLES)/aapt2 link -R $(LAYOUTLIB_RES)/compiled/* -o $(LAYOUTLIB_RES)/compiled.apk --manifest $(LAYOUTLIB_RES)/AndroidManifest.xml
@@ -78,7 +86,7 @@
 	for f in $(LAYOUTLIB_RES)/compiled_apk/res/*; do mv "$$f" "$${f/-v4/}";done
 	for f in $(LAYOUTLIB_RES)/compiled_apk/res/**/*.9.png; do mv "$$f" "$${f/.9.png/.compiled.9.png}";done
 	cp -r $(LAYOUTLIB_RES)/compiled_apk/res $(LAYOUTLIB_RES)/data
-	$(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/res -o $@
+	$(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/ -o $@
 
 $(call dist-for-goals,layoutlib,$(LAYOUTLIB_RES)/layoutlib-res.zip:layoutlib_native/res.zip)
 
@@ -132,16 +140,26 @@
 	  echo $(_path),,,,,,Y,$f,,, >> $@; \
 	)
 
+	$(foreach f,$(EMULATED_OVERLAYS_FILES), \
+	  $(eval _path := $(subst frameworks/base/packages,data,$f)) \
+	  echo $(_path),,,,,,Y,$f,,, >> $@; \
+	)
+
+	$(foreach f,$(DEVICE_OVERLAYS_FILES), \
+	  $(eval _path := $(subst device/generic/goldfish/phone/overlay/frameworks/base/packages,data,$f)) \
+	  echo $(_path),,,,,,Y,$f,,, >> $@; \
+	)
+
 .PHONY: layoutlib-sbom
 layoutlib-sbom: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json
-$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES)
+$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES)
 	rm -rf $@
 	$(GEN_SBOM) --output_file $@ --metadata $(LAYOUTLIB_SBOM)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --module_name "layoutlib" --json
 
 $(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/layoutlib.spdx.json:layoutlib_native/sbom/layoutlib.spdx.json)
 
 # Generate SBOM of framework_res.jar that is created in release_layoutlib.sh.
-# The generated SBOM contains placeholders for release_layotlib.sh to substitute, and the placeholders include:
+# The generated SBOM contains placeholders for release_layoutlib.sh to substitute, and the placeholders include:
 # document name, document namespace, document creation info, organization and SHA1 value of framework_res.jar.
 GEN_SBOM_FRAMEWORK_RES := $(HOST_OUT_EXECUTABLES)/generate-sbom-framework_res
 .PHONY: layoutlib-framework_res-sbom
diff --git a/core/main.mk b/core/main.mk
index 62fa53d..5c280da 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -31,8 +31,7 @@
 .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),SOONG_CONFIG_$(n))
 .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),$(foreach k,$(SOONG_CONFIG_$(n)),SOONG_CONFIG_$(n)_$(k)))
 
-include $(SOONG_MAKEVARS_MK)
-
+include $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
 YACC :=$= $(BISON) -d
 
 include $(BUILD_SYSTEM)/clang/config.mk
@@ -276,12 +275,15 @@
 # Include all of the makefiles in the system
 #
 
-subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT).mk $(SOONG_ANDROID_MK)
+subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk $(SOONG_ANDROID_MK)
+
 # Android.mk files are only used on Linux builds, Mac only supports Android.bp
 ifeq ($(HOST_OS),linux)
   subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list)
 endif
-subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT).mk
+
+subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
+
 subdir_makefiles_total := $(words int $(subdir_makefiles) post finish)
 .KATI_READONLY := subdir_makefiles_total
 
@@ -688,11 +690,19 @@
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(eval prefix := ../../..)
+        $(if $(strip $(patsubst %x86,,$(COMPATIBILITY.$(suite).ARCH_DIRS.$(m)))), \
+          $(if $(strip $(patsubst %x86_64,,$(COMPATIBILITY.$(suite).ARCH_DIRS.$(m)))),$(eval prefix := ../..),),) \
+        $(eval link_target := $(prefix)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(eval symlink := $(COMPATIBILITY.$(suite).ARCH_DIRS.$(m))/shared_libs/$(notdir $(f)))\
+        $(eval COMPATIBILITY.$(suite).SYMLINKS := \
+          $$(COMPATIBILITY.$(suite).SYMLINKS) $(f):$(link_target):$(symlink))\
         $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
           $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
-          $(sort $(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES)))))))
+          $(sort $(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES))))))\
+  $(eval COMPATIBILITY.$(suite).SYMLINKS := $(sort $(COMPATIBILITY.$(suite).SYMLINKS))))
 endef
 
 $(call resolve-shared-libs-depes,TARGET_)
@@ -1850,48 +1860,34 @@
 filter_out_files += $(PRODUCT_OUT)/recovery/%
 endif
 
+# userdata.img
+ifndef BUILDING_USERDATA_IMAGE
+filter_out_files += $(PRODUCT_OUT)/data/%
+endif
+
 installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install))))
 else
 installed_files := $(apps_only_installed_files)
 endif  # TARGET_BUILD_APPS
 
-# sbom-metadata.csv contains all raw data collected in Make for generating SBOM in generate-sbom.py.
-# There are multiple columns and each identifies the source of an installed file for a specific case.
-# The columns and their uses are described as below:
-#   installed_file: the file path on device, e.g. /product/app/Browser2/Browser2.apk
-#   module_path: the path of the module that generates the installed file, e.g. packages/apps/Browser2
-#   soong_module_type: Soong module type, e.g. android_app, cc_binary
-#   is_prebuilt_make_module: Y, if the installed file is from a prebuilt Make module, see prebuilt_internal.mk
-#   product_copy_files: the installed file is from variable PRODUCT_COPY_FILES, e.g. device/google/cuttlefish/shared/config/init.product.rc:product/etc/init/init.rc
-#   kernel_module_copy_files: the installed file is from variable KERNEL_MODULE_COPY_FILES, similar to product_copy_files
-#   is_platform_generated: this is an aggregated value including some small cases instead of adding more columns. It is set to Y if any case is Y
-#       is_build_prop: build.prop in each partition, see sysprop.mk.
-#       is_notice_file: NOTICE.xml.gz in each partition, see Makefile.
-#       is_dexpreopt_image_profile: see the usage of DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED in Soong and Make
-#       is_product_system_other_avbkey: see INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET
-#       is_system_other_odex_marker: see INSTALLED_SYSTEM_OTHER_ODEX_MARKER
-#       is_event_log_tags_file: see variable event_log_tags_file in Makefile
-#       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
-#       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
-#       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
-#   build_output_path: the path of the built file, used to calculate checksum
-#   static_libraries/whole_static_libraries: list of module name of the static libraries the file links against, e.g. libclang_rt.builtins or libclang_rt.builtins_32
-#       Info of all static libraries of all installed files are collected in variable _all_static_libs that is used to list all the static library files in sbom-metadata.csv.
-#       See the second foreach loop in the rule of sbom-metadata.csv for the detailed info of static libraries collected in _all_static_libs.
-#   is_static_lib: whether the file is a static library
-
 metadata_list := $(OUT_DIR)/.module_paths/METADATA.list
 metadata_files := $(subst $(newline),$(space),$(file <$(metadata_list)))
-$(PRODUCT_OUT)/sbom-metadata.csv:
+
+# Create metadata for compliance support in Soong
+.PHONY: make-compliance-metadata
+make-compliance-metadata: \
+    $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv \
+    $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-modules.csv
+
+$(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv:
 	rm -f $@
-	echo 'installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib' >> $@
-	$(eval _all_static_libs :=)
+	echo 'installed_file,module_path,is_soong_module,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,static_libs,whole_static_libs,license_text' >> $@
 	$(foreach f,$(installed_files),\
 	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
 	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
 	  $(eval _build_output_path := $(PRODUCT_OUT)/$(_path_on_device)) \
 	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
-	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+	  $(eval _is_soong_module := $(ALL_MODULES.$(_module_name).IS_SOONG_MODULE)) \
 	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
 	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
 	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
@@ -1908,39 +1904,40 @@
 	  $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \
 	  $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \
 	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
-	  $(eval _static_libs := $(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES)) \
-	  $(eval _whole_static_libs := $(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES)) \
-	  $(foreach l,$(_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
-	  $(foreach l,$(_whole_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
-	  echo '/$(_path_on_device),$(_module_path),$(_soong_module_type),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_build_output_path),$(_static_libs),$(_whole_static_libs),' >> $@; \
-	)
-	$(foreach l,$(sort $(_all_static_libs)), \
-	  $(eval _lib_stem := $(call word-colon,1,$l)) \
-	  $(eval _module_path := $(call word-colon,2,$l)) \
-	  $(eval _soong_module_type := $(call word-colon,3,$l)) \
-	  $(eval _built_file := $(call word-colon,4,$l)) \
-	  $(eval _static_libs := $(ALL_STATIC_LIBRARIES.$l.STATIC_LIBRARIES)) \
-	  $(eval _whole_static_libs := $(ALL_STATIC_LIBRARIES.$l.WHOLE_STATIC_LIBRARIES)) \
-	  $(eval _is_static_lib := Y) \
-	  echo '$(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib)' >> $@; \
+	  $(eval _static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES))) \
+	  $(eval _whole_static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES))) \
+	  $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES))) \
+	  echo '$(_build_output_path),$(_module_path),$(_is_soong_module),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_static_libs),$(_whole_static_libs),$(_license_text)' >> $@; \
 	)
 
-# (TODO: b/272358583 find another way of always rebuilding sbom.spdx)
+$(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-modules.csv:
+	rm -f $@
+	echo 'name,module_path,module_class,module_type,static_libs,whole_static_libs,built_files,installed_files' >> $@
+	$(foreach m,$(ALL_MODULES), \
+	  $(eval _module_name := $m) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _make_module_class := $(ALL_MODULES.$(_module_name).CLASS)) \
+	  $(eval _make_module_type := $(ALL_MODULES.$(_module_name).MAKE_MODULE_TYPE)) \
+	  $(eval _static_libs := $(strip $(sort $(ALL_MODULES.$(_module_name).STATIC_LIBS)))) \
+	  $(eval _whole_static_libs := $(strip $(sort $(ALL_MODULES.$(_module_name).WHOLE_STATIC_LIBS)))) \
+	  $(eval _built_files := $(strip $(sort $(ALL_MODULES.$(_module_name).BUILT)))) \
+	  $(eval _installed_files := $(strip $(sort $(ALL_MODULES.$(_module_name).INSTALLED)))) \
+	  $(eval _is_soong_module := $(ALL_MODULES.$(_module_name).IS_SOONG_MODULE)) \
+	  $(if $(_is_soong_module),, \
+		echo '$(_module_name),$(_module_path),$(_make_module_class),$(_make_module_type),$(_static_libs),$(_whole_static_libs),$(_built_files),$(_installed_files)' >> $@; \
+	  ) \
+	)
+
+$(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/installed_files.stamp: $(installed_files)
+	touch $@
+
 # Remove the always_dirty_file.txt whenever the makefile is evaluated
 $(shell rm -f $(PRODUCT_OUT)/always_dirty_file.txt)
 $(PRODUCT_OUT)/always_dirty_file.txt:
 	touch $@
 
 .PHONY: sbom
-ifeq ($(TARGET_BUILD_APPS),)
-sbom: $(PRODUCT_OUT)/sbom.spdx.json
-$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
-$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM) $(installed_files) $(metadata_list) $(metadata_files) $(PRODUCT_OUT)/always_dirty_file.txt
-	rm -rf $@
-	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json
-
-$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
-else
+ifneq ($(TARGET_BUILD_APPS),)
 # Create build rules for generating SBOMs of unbundled APKs and APEXs
 # $1: sbom file
 # $2: sbom fragment file
diff --git a/core/multi_prebuilt.mk b/core/multi_prebuilt.mk
index c97d481..415401b 100644
--- a/core/multi_prebuilt.mk
+++ b/core/multi_prebuilt.mk
@@ -132,3 +132,5 @@
 prebuilt_static_java_libraries :=
 prebuilt_is_host :=
 prebuilt_module_tags :=
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=MULTI_PREBUILT))
\ No newline at end of file
diff --git a/core/native_test.mk b/core/native_test.mk
index 8b49fbd..c12b211 100644
--- a/core/native_test.mk
+++ b/core/native_test.mk
@@ -21,3 +21,5 @@
 endif
 
 include $(BUILD_EXECUTABLE)
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=NATIVE_TEST))
\ No newline at end of file
diff --git a/core/node_fns.mk b/core/node_fns.mk
index 144eb8b..d2cee9e 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -203,7 +203,7 @@
   $(call _expand-inherited-values,$(1),$(2),$(3),$(4))
 
   $(eval $(1).$(2).inherited :=)
-  $(eval _include_stack := $(wordlist 2,9999,$$(_include_stack)))
+  $(eval _include_stack := $(wordlist 2,9999,$(_include_stack)))
 endef
 
 #
diff --git a/core/package_internal.mk b/core/package_internal.mk
index a03a62b..a7eb572 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -731,3 +731,5 @@
     )
   endif
 endif
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=PACKAGE))
\ No newline at end of file
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index e715fd1..a77956b 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -22,58 +22,32 @@
 
 
 # -----------------------------------------------------------------
-# Release Config Flags
-
-# Create a summary file of build flags for each partition
-# $(1): built build flags json file
-# $(2): installed build flags json file
-# $(3): flag names
-define generate-partition-build-flag-file
-$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
-$(eval $(strip $(1)): PRIVATE_FLAG_NAMES := $(strip $(3)))
-$(strip $(1)):
-	mkdir -p $$(dir $$(PRIVATE_OUT))
-	echo '{' > $$(PRIVATE_OUT)
-	echo '"flags": [' >> $$(PRIVATE_OUT)
-	$$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
-		( \
-			printf '  { "name": "%s", "value": "%s", ' \
-					'$$(flag)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).VALUE)' \
-					; \
-			printf '"set": "%s", "default": "%s", "declared": "%s" }' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).SET_IN)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).DEFAULT)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).DECLARED_IN)' \
-					; \
-			printf '$$(if $$(filter $$(lastword $$(PRIVATE_FLAG_NAMES)),$$(flag)),,$$(comma))\n' ; \
-		) >> $$(PRIVATE_OUT) ; \
-	)
-	echo "]" >> $$(PRIVATE_OUT)
-	echo "}" >> $$(PRIVATE_OUT)
-$(call copy-one-file, $(1), $(2))
-endef
-
-$(foreach partition, $(_FLAG_PARTITIONS), \
-	$(eval build_flag_summaries.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/build_flags.json) \
-	$(eval $(call generate-partition-build-flag-file, \
-				$(TARGET_OUT_FLAGS)/$(partition)/build_flags.json, \
-				$(build_flag_summaries.$(partition)), \
-				$(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) \
-			) \
-	) \
-)
-
-
-# -----------------------------------------------------------------
 # Aconfig Flags
 
 # Create a summary file of build flags for each partition
 # $(1): built aconfig flags file (out)
 # $(2): installed aconfig flags file (out)
-# $(3): input aconfig files for the partition (in)
+# $(3): the partition (in)
+# $(4): input aconfig files for the partition (in)
 define generate-partition-aconfig-flag-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
+$(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
+$(strip $(1)): $(ACONFIG) $(strip $(4))
+	mkdir -p $$(dir $$(PRIVATE_OUT))
+	$$(if $$(PRIVATE_IN), \
+		$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
+			--filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \
+		echo -n > $$(PRIVATE_OUT) \
+	)
+$(call copy-one-file, $(1), $(2))
+endef
+
+# Create a summary file of build flags for each partition
+# $(1): built aconfig flags file (out)
+# $(2): installed aconfig flags file (out)
+# $(3): input aconfig files for the partition (in)
+define generate-global-aconfig-flag-file
+$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
 $(eval $(strip $(1)): PRIVATE_IN := $(strip $(3)))
 $(strip $(1)): $(ACONFIG) $(strip $(3))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
@@ -91,6 +65,7 @@
 	$(eval $(call generate-partition-aconfig-flag-file, \
 				$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
 				$(aconfig_flag_summaries_protobuf.$(partition)), \
+				$(partition), \
 				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
 					$(ALL_MODULES.$(m).ACONFIG_FILES) \
 				)), \
@@ -105,7 +80,7 @@
 
 .PHONY: device_aconfig_declarations
 device_aconfig_declarations: $(PRODUCT_OUT)/device_aconfig_declarations.pb
-$(eval $(call generate-partition-aconfig-flag-file, \
+$(eval $(call generate-global-aconfig-flag-file, \
 			$(TARGET_OUT_FLAGS)/device_aconfig_declarations.pb, \
 			$(PRODUCT_OUT)/device_aconfig_declarations.pb, \
 			$(sort $(required_aconfig_flags_files)) \
@@ -165,9 +140,7 @@
 				$(aconfig_storage_package_map.$(partition)), \
 				$(aconfig_storage_flag_map.$(partition)), \
 				$(aconfig_storage_flag_val.$(partition)), \
-				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
-					$(ALL_MODULES.$(m).ACONFIG_FILES) \
-				)), \
+				$(aconfig_flag_summaries_protobuf.$(partition)), \
 				$(partition), \
 	)) \
 )
diff --git a/core/phony_package.mk b/core/phony_package.mk
index 578d629..c978793 100644
--- a/core/phony_package.mk
+++ b/core/phony_package.mk
@@ -12,3 +12,5 @@
 	$(hide) echo "Fake: $@"
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=PHONY_PACKAGE))
\ No newline at end of file
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 9462640..d5261f4 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -63,3 +63,5 @@
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=PREBUILT))
\ No newline at end of file
diff --git a/core/product.mk b/core/product.mk
index 68d7721..8d86d92 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -26,6 +26,7 @@
 _product_single_value_vars += PRODUCT_MODEL
 _product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION
 _product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION
+_product_single_value_vars += PRODUCT_BASE_OS
 
 # Defines the ELF segment alignment for binaries (executables and shared libraries).
 # The ELF segment alignment has to be a PAGE_SIZE multiple. For example, if
@@ -160,7 +161,6 @@
 # List of jars to be included in the ART boot image for testing.
 _product_list_vars += PRODUCT_TEST_ONLY_ART_BOOT_IMAGE_JARS
 
-_product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
 _product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
 # List of system_server classpath jars on the platform.
 _product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
@@ -304,9 +304,6 @@
 # This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
 _product_single_value_vars += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
 
-# List of tags that will be used to gate blueprint modules from the build graph
-_product_list_vars += PRODUCT_INCLUDE_TAGS
-
 # List of directories that will be used to gate blueprint modules from the build graph
 _product_list_vars += PRODUCT_SOURCE_ROOT_DIRS
 
@@ -491,6 +488,14 @@
 # Enables 16KB developer option for device if set.
 _product_single_value_vars += PRODUCT_16K_DEVELOPER_OPTION
 
+# If set, adb root will be disabled (really ro.debuggable=0) in userdebug
+# builds. It's already off disabled in user builds. Eng builds are unaffected
+# by this flag.
+_product_single_value_vars += PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG
+
+# If set, build would generate system image from Soong-defined module.
+_product_single_value_vars += PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index f21c1c4..738d4cf 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -280,27 +280,6 @@
 
 current_product_makefile :=
 
-#############################################################################
-# Check product include tag allowlist
-BLUEPRINT_INCLUDE_TAGS_ALLOWLIST := \
-  com.android.mainline_go \
-  com.android.mainline \
-  mainline_module_prebuilt_nightly \
-  mainline_module_prebuilt_monthly_release
-.KATI_READONLY := BLUEPRINT_INCLUDE_TAGS_ALLOWLIST
-$(foreach include_tag,$(PRODUCT_INCLUDE_TAGS), \
-	$(if $(filter $(include_tag),$(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST)),,\
-	$(call pretty-error, $(include_tag) is not in BLUEPRINT_INCLUDE_TAGS_ALLOWLIST: $(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST))))
-# Create default PRODUCT_INCLUDE_TAGS
-ifeq (, $(PRODUCT_INCLUDE_TAGS))
-# Soong analysis is global: even though a module might not be relevant to a specific product (e.g. build_tools for aosp_arm),
-# we still analyse it.
-# This means that in setups where we two have two prebuilts of module_sdk, we need a "default" to use in analysis
-# This should be a no-op in aosp and internal since no Android.bp file contains blueprint_package_includes
-# Use the big android one and main-based prebuilts by default
-PRODUCT_INCLUDE_TAGS += com.android.mainline mainline_module_prebuilt_nightly
-endif
-
 # AOSP and Google products currently share the same `apex_contributions` in next.
 # This causes issues when building <aosp_product>-next-userdebug in main.
 # Create a temporary allowlist to ignore the google apexes listed in `contents` of apex_contributions of `next`
@@ -332,6 +311,14 @@
 
 TARGET_DEVICE := $(PRODUCT_DEVICE)
 
+# Allow overriding PLATFORM_BASE_OS when PRODUCT_BASE_OS is defined
+ifdef PRODUCT_BASE_OS
+  PLATFORM_BASE_OS := $(PRODUCT_BASE_OS)
+else
+  PLATFORM_BASE_OS := $(PLATFORM_BASE_OS_ENV_INPUT)
+endif
+.KATI_READONLY := PLATFORM_BASE_OS
+
 # TODO: also keep track of things like "port", "land" in product files.
 
 # Figure out which resoure configuration options to use for this
@@ -426,6 +413,10 @@
   TARGET_AAPT_CHARACTERISTICS := $(PRODUCT_CHARACTERISTICS)
 endif
 
+ifndef PRODUCT_SHIPPING_API_LEVEL
+  PRODUCT_SHIPPING_API_LEVEL := 10000
+endif
+
 ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
   ifneq (1,$(words $(PRODUCT_DEFAULT_DEV_CERTIFICATE)))
     $(error PRODUCT_DEFAULT_DEV_CERTIFICATE='$(PRODUCT_DEFAULT_DEV_CERTIFICATE)', \
@@ -578,11 +569,26 @@
 endif
 
 # This table maps sdk version 35 to vendor api level 202404 and assumes yearly
-# release for the same month.
+# release for the same month. If 10000 API level or more is used, which usually
+# represents 'current' or 'future' API levels, several zeros are added to
+# preserve ordering. Specifically API level 10,000 is converted to 10,000,000
+# which importantly is greater than 202404 = 202,404. This convention will break
+# in 100,000 CE, which is the closest multiple of 10 that doesn't break earlier
+# than 10,000 as an API level breaks.
 define sdk-to-vendor-api-level
-  $(if $(call math_lt_or_eq,$(1),34),$(1),20$(call int_subtract,$(1),11)04)
+$(if $(call math_lt_or_eq,$(1),34),$(1),$(if $(call math_lt,$(1),10000),20$(call int_subtract,$(1),11)04,$(1)000))
 endef
 
+ifneq ($(call sdk-to-vendor-api-level,34),34)
+$(error sdk-to-vendor-api-level is broken for pre-Trunk-Stable SDKs)
+endif
+ifneq ($(call sdk-to-vendor-api-level,35),202404)
+$(error sdk-to-vendor-api-level is broken for post-Trunk-Stable SDKs)
+endif
+ifneq ($(call sdk-to-vendor-api-level,10000),10000000)
+$(error sdk-to-vendor-api-level is broken for current $(call sdk-to-vendor-api-level,10000))
+endif
+
 ifdef PRODUCT_SHIPPING_VENDOR_API_LEVEL
 # Follow the version that is set manually.
   VSR_VENDOR_API_LEVEL := $(PRODUCT_SHIPPING_VENDOR_API_LEVEL)
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 59e2c95..20344f4 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -382,6 +382,11 @@
     _soong_config_namespace(g, nsname)
     g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value)
 
+def _soong_config_set_bool(g, nsname, var, value):
+    """Assigns the value to the variable in the namespace, and marks it as a boolean."""
+    _soong_config_set(g, nsname, var, _filter("true", value))
+    g["SOONG_CONFIG_TYPE_%s_%s" % (nsname, var)] = "bool"
+
 def _soong_config_append(g, nsname, var, value):
     """Appends to the value of the variable in the namespace."""
     _soong_config_namespace(g, nsname)
@@ -861,6 +866,7 @@
     soong_config_namespace = _soong_config_namespace,
     soong_config_append = _soong_config_append,
     soong_config_set = _soong_config_set,
+    soong_config_set_bool = _soong_config_set_bool,
     soong_config_get = _soong_config_get,
     abspath = _abspath,
     add_product_dex_preopt_module_config = _add_product_dex_preopt_module_config,
diff --git a/core/ravenwood_test_config_template.xml b/core/ravenwood_test_config_template.xml
index 16a22c0..2f21bae 100644
--- a/core/ravenwood_test_config_template.xml
+++ b/core/ravenwood_test_config_template.xml
@@ -18,10 +18,9 @@
     <option name="test-suite-tag" value="ravenwood" />
     <option name="test-suite-tag" value="ravenwood-tests" />
 
-    <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
+    <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" />
     <option name="use-ravenwood-resources" value="true" />
     <option name="exclude-paths" value="java" />
-    <option name="socket-timeout" value="10000" />
     <option name="null-device" value="true" />
 
     {EXTRA_CONFIGS}
diff --git a/core/release_config.mk b/core/release_config.mk
index 4fdd279..fe2170e 100644
--- a/core/release_config.mk
+++ b/core/release_config.mk
@@ -130,6 +130,10 @@
         # Disable the build flag in release-config.
         _args += --guard=false
     endif
+    _args += --allow-missing=true
+    ifneq (,$(TARGET_PRODUCT))
+        _args += --product $(TARGET_PRODUCT)
+    endif
     _flags_dir:=$(OUT_DIR)/soong/release-config
     _flags_file:=$(_flags_dir)/release_config-$(TARGET_PRODUCT)-$(TARGET_RELEASE).vars
     # release-config generates $(_flags_varmk)
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index 56d2312..b1d0c2f 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -18,7 +18,7 @@
     <option name="test-suite-tag" value="robolectric" />
     <option name="test-suite-tag" value="robolectric-tests" />
 
-    <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
+    <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" />
     <option name="exclude-paths" value="java" />
     <option name="use-robolectric-resources" value="true" />
 
diff --git a/core/rust_device_test_config_template.xml b/core/rust_device_test_config_template.xml
index bfd2f47..aacabcb 100644
--- a/core/rust_device_test_config_template.xml
+++ b/core/rust_device_test_config_template.xml
@@ -20,11 +20,11 @@
 
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
-        <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+        <option name="push" value="{MODULE}->{TEST_INSTALL_BASE}/{MODULE}" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.rust.RustBinaryTest" >
-        <option name="test-device-path" value="/data/local/tmp" />
+        <option name="test-device-path" value="{TEST_INSTALL_BASE}" />
         <option name="module-name" value="{MODULE}" />
     </test>
 </configuration>
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 2f510d9..ae34cb8 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -101,4 +101,6 @@
 $(LOCAL_BUILT_MODULE): $(my_coverage_path)/$(GCNO_ARCHIVE)
 endif
 
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=SHARED_LIBRARY))
+
 endif  # skip_build_from_source
diff --git a/core/soong_android_app_set.mk b/core/soong_android_app_set.mk
index ec3d8c8..d97980d 100644
--- a/core/soong_android_app_set.mk
+++ b/core/soong_android_app_set.mk
@@ -9,10 +9,6 @@
 LOCAL_BUILT_MODULE_STEM := package.apk
 LOCAL_INSTALLED_MODULE_STEM := $(notdir $(LOCAL_PREBUILT_MODULE_FILE))
 
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 3aa244c..df1cf2d 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -29,16 +29,6 @@
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
 
 
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_CLASSES_JAR)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR)
-
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index a1c6478..da60832 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -38,10 +38,6 @@
   endif
 endif
 
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-
 my_check_same_vndk_variants :=
 same_vndk_variants_stamp :=
 ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
@@ -61,7 +57,7 @@
   # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding
   # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant
   # isn't used at all and it may break in the downstream trees.
-  LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp)
+  LOCAL_ADDITIONAL_CHECKED_MODULE += $(same_vndk_variants_stamp)
 endif
 
 #######################################
diff --git a/core/soong_config.mk b/core/soong_config.mk
index be5a6e6..2e7cd9f 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -1,6 +1,5 @@
-SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
-SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables
-SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
+SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
+SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
 
 include $(BUILD_SYSTEM)/art_config.mk
 include $(BUILD_SYSTEM)/dex_preopt_config.mk
@@ -27,9 +26,10 @@
 $(shell mkdir -p $(dir $(SOONG_VARIABLES)))
 $(call json_start)
 
-$(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
+$(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT)$(COVERAGE_SUFFIX))
 
 $(call add_json_str,  BuildId,                           $(BUILD_ID))
+$(call add_json_str,  BuildFingerprintFile,              build_fingerprint.txt)
 $(call add_json_str,  BuildNumberFile,                   build_number.txt)
 $(call add_json_str,  BuildHostnameFile,                 build_hostname.txt)
 $(call add_json_str,  BuildThumbprintFile,               build_thumbprint.txt)
@@ -109,6 +109,8 @@
 $(call add_json_list, AAPTPrebuiltDPI,                   $(PRODUCT_AAPT_PREBUILT_DPI))
 
 $(call add_json_str,  DefaultAppCertificate,             $(PRODUCT_DEFAULT_DEV_CERTIFICATE))
+$(call add_json_list, ExtraOtaKeys,                      $(PRODUCT_EXTRA_OTA_KEYS))
+$(call add_json_list, ExtraOtaRecoveryKeys,              $(PRODUCT_EXTRA_RECOVERY_KEYS))
 $(call add_json_str,  MainlineSepolicyDevCertificates,   $(MAINLINE_SEPOLICY_DEV_CERTIFICATES))
 
 $(call add_json_str,  AppsDefaultVersionName,            $(APPS_DEFAULT_VERSION_NAME))
@@ -150,7 +152,6 @@
 $(call add_json_str,  VendorApiLevel,                    $(BOARD_API_LEVEL))
 $(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
 $(call add_json_list, DeviceSystemSdkVersions,           $(BOARD_SYSTEMSDK_VERSIONS))
-$(call add_json_str,  RecoverySnapshotVersion,           $(RECOVERY_SNAPSHOT_VERSION))
 $(call add_json_list, Platform_systemsdk_versions,       $(PLATFORM_SYSTEMSDK_VERSIONS))
 $(call add_json_bool, Malloc_low_memory,                 $(findstring true,$(MALLOC_SVELTE) $(MALLOC_LOW_MEMORY)))
 $(call add_json_bool, Malloc_zero_contents,              $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS))))
@@ -165,8 +166,6 @@
 $(call add_json_list, BootJars,                          $(PRODUCT_BOOT_JARS))
 $(call add_json_list, ApexBootJars,                      $(filter-out $(APEX_BOOT_JARS_EXCLUDED), $(PRODUCT_APEX_BOOT_JARS)))
 
-$(call add_json_bool, VndkSnapshotBuildArtifacts,        $(VNDK_SNAPSHOT_BUILD_ARTIFACTS))
-
 $(call add_json_map,  BuildFlags)
 $(foreach flag,$(_ALL_RELEASE_FLAGS),\
   $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).VALUE)))
@@ -176,24 +175,6 @@
   $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).TYPE)))
 $(call end_json_map)
 
-$(call add_json_bool, DirectedVendorSnapshot,            $(DIRECTED_VENDOR_SNAPSHOT))
-$(call add_json_map,  VendorSnapshotModules)
-$(foreach module,$(VENDOR_SNAPSHOT_MODULES),\
-  $(call add_json_bool,$(module),true))
-$(call end_json_map)
-
-$(call add_json_bool, DirectedRecoverySnapshot,          $(DIRECTED_RECOVERY_SNAPSHOT))
-$(call add_json_map,  RecoverySnapshotModules)
-$(foreach module,$(RECOVERY_SNAPSHOT_MODULES),\
-  $(call add_json_bool,$(module),true))
-$(call end_json_map)
-
-$(call add_json_list, VendorSnapshotDirsIncluded,        $(VENDOR_SNAPSHOT_DIRS_INCLUDED))
-$(call add_json_list, VendorSnapshotDirsExcluded,        $(VENDOR_SNAPSHOT_DIRS_EXCLUDED))
-$(call add_json_list, RecoverySnapshotDirsIncluded,      $(RECOVERY_SNAPSHOT_DIRS_INCLUDED))
-$(call add_json_list, RecoverySnapshotDirsExcluded,      $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
-$(call add_json_bool, HostFakeSnapshotEnabled,           $(HOST_FAKE_SNAPSHOT_ENABLE))
-
 $(call add_json_bool, MultitreeUpdateMeta,               $(filter true,$(TARGET_MULTITREE_UPDATE_META)))
 
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
@@ -225,6 +206,7 @@
 $(call add_json_str,  BoardSepolicyVers,                 $(BOARD_SEPOLICY_VERS))
 $(call add_json_str,  SystemExtSepolicyPrebuiltApiDir,   $(BOARD_SYSTEM_EXT_PREBUILT_DIR))
 $(call add_json_str,  ProductSepolicyPrebuiltApiDir,     $(BOARD_PRODUCT_PREBUILT_DIR))
+$(call add_json_str,  BoardPlatform,                     $(TARGET_BOARD_PLATFORM))
 
 $(call add_json_str,  PlatformSepolicyVersion,           $(PLATFORM_SEPOLICY_VERSION))
 $(call add_json_list, PlatformSepolicyCompatVersions,    $(PLATFORM_SEPOLICY_COMPAT_VERSIONS))
@@ -262,6 +244,18 @@
   $(call end_json_map))
 $(call end_json_map)
 
+# Add the types of the variables in VendorVars. Since this is much newer
+# than VendorVars, which has a history of just using string values for everything,
+# variables are assumed to be strings by default. For strings, SOONG_CONFIG_TYPE_*
+# will not be set, and they will not have an entry in the VendorVarTypes map.
+$(call add_json_map, VendorVarTypes)
+$(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\
+  $(call add_json_map, $(namespace))\
+  $(foreach key,$(sort $(SOONG_CONFIG_$(namespace))),\
+    $(if $(SOONG_CONFIG_TYPE_$(namespace)_$(key)),$(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_TYPE_$(namespace)_$(key))))))\
+  $(call end_json_map))
+$(call end_json_map)
+
 $(call add_json_bool, EnforceProductPartitionInterface,  $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)))
 $(call add_json_str,  DeviceCurrentApiLevelForVendorModules,  $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
 
@@ -282,7 +276,7 @@
 $(call add_json_bool, BoardMoveRecoveryResourcesToVendorBoot, $(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
 $(call add_json_str,  PrebuiltHiddenApiDir, $(BOARD_PREBUILT_HIDDENAPI_DIR))
 
-$(call add_json_str,  ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
+$(call add_json_str,  Shipping_api_level, $(PRODUCT_SHIPPING_API_LEVEL))
 
 $(call add_json_list, BuildBrokenPluginValidation,         $(BUILD_BROKEN_PLUGIN_VALIDATION))
 $(call add_json_bool, BuildBrokenClangProperty,            $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
@@ -297,6 +291,7 @@
 $(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES)))
 $(call add_json_list, BuildBrokenInputDirModules,          $(BUILD_BROKEN_INPUT_DIR_MODULES))
 $(call add_json_bool, BuildBrokenDontCheckSystemSdk,       $(filter true,$(BUILD_BROKEN_DONT_CHECK_SYSTEMSDK)))
+$(call add_json_bool, BuildBrokenDupSysprop,               $(filter true,$(BUILD_BROKEN_DUP_SYSPROP)))
 
 $(call add_json_list, BuildWarningBadOptionalUsesLibsAllowlist,    $(BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST))
 
@@ -313,7 +308,6 @@
 
 $(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
 
-$(call add_json_list, IncludeTags,                $(PRODUCT_INCLUDE_TAGS))
 $(call add_json_list, SourceRootDirs,             $(PRODUCT_SOURCE_ROOT_DIRS))
 
 $(call add_json_list, AfdoProfiles,                $(ALL_AFDO_PROFILES))
@@ -327,8 +321,6 @@
 
 $(call add_json_bool, ReleaseDefaultModuleBuildFromSource,   $(RELEASE_DEFAULT_MODULE_BUILD_FROM_SOURCE))
 
-$(call add_json_bool, KeepVndk, $(filter true,$(KEEP_VNDK)))
-
 $(call add_json_bool, CheckVendorSeappViolations, $(filter true,$(CHECK_VENDOR_SEAPP_VIOLATIONS)))
 
 $(call add_json_bool, BuildIgnoreApexContributionContents, $(PRODUCT_BUILD_IGNORE_APEX_CONTRIBUTION_CONTENTS))
@@ -349,6 +341,22 @@
 
 $(call add_json_list, OemProperties, $(PRODUCT_OEM_PROPERTIES))
 
+$(call add_json_list, SystemPropFiles, $(TARGET_SYSTEM_PROP))
+$(call add_json_list, SystemExtPropFiles, $(TARGET_SYSTEM_EXT_PROP))
+$(call add_json_list, ProductPropFiles, $(TARGET_PRODUCT_PROP))
+$(call add_json_list, OdmPropFiles, $(TARGET_ODM_PROP))
+
+# Do not set ArtTargetIncludeDebugBuild into any value if PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD is not set,
+# to have the same behavior from runtime_libart.mk.
+ifneq ($(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD),)
+$(call add_json_bool, ArtTargetIncludeDebugBuild, $(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD))
+endif
+
+_config_enable_uffd_gc := \
+  $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
+$(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc))
+_config_enable_uffd_gc :=
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
@@ -359,4 +367,6 @@
 	  rm $(SOONG_VARIABLES).tmp; \
 	fi)
 
+include $(BUILD_SYSTEM)/soong_extra_config.mk
+
 endif # CONFIGURE_SOONG
diff --git a/core/soong_extra_config.mk b/core/soong_extra_config.mk
new file mode 100644
index 0000000..00b5c0f
--- /dev/null
+++ b/core/soong_extra_config.mk
@@ -0,0 +1,106 @@
+$(call json_start)
+
+$(call add_json_str, DeviceCpuVariantRuntime,           $(TARGET_CPU_VARIANT_RUNTIME))
+$(call add_json_str, DeviceAbiList,                     $(TARGET_CPU_ABI_LIST))
+$(call add_json_str, DeviceAbiList32,                   $(TARGET_CPU_ABI_LIST_32_BIT))
+$(call add_json_str, DeviceAbiList64,                   $(TARGET_CPU_ABI_LIST_64_BIT))
+$(call add_json_str, DeviceSecondaryCpuVariantRuntime,  $(TARGET_2ND_CPU_VARIANT_RUNTIME))
+
+$(call add_json_str, Dex2oatTargetCpuVariantRuntime,         $(DEX2OAT_TARGET_CPU_VARIANT_RUNTIME))
+$(call add_json_str, Dex2oatTargetInstructionSetFeatures,    $(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
+$(call add_json_str, SecondaryDex2oatCpuVariantRuntime,      $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT_RUNTIME))
+$(call add_json_str, SecondaryDex2oatInstructionSetFeatures, $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
+
+$(call add_json_str, BoardPlatform,          $(TARGET_BOARD_PLATFORM))
+$(call add_json_str, BoardShippingApiLevel,  $(BOARD_SHIPPING_API_LEVEL))
+$(call add_json_str, ShippingApiLevel,       $(PRODUCT_SHIPPING_API_LEVEL))
+$(call add_json_str, ShippingVendorApiLevel, $(PRODUCT_SHIPPING_VENDOR_API_LEVEL))
+
+$(call add_json_str, ProductModel,                      $(PRODUCT_MODEL))
+$(call add_json_str, ProductModelForAttestation,        $(PRODUCT_MODEL_FOR_ATTESTATION))
+$(call add_json_str, ProductBrandForAttestation,        $(PRODUCT_BRAND_FOR_ATTESTATION))
+$(call add_json_str, ProductNameForAttestation,         $(PRODUCT_NAME_FOR_ATTESTATION))
+$(call add_json_str, ProductDeviceForAttestation,       $(PRODUCT_DEVICE_FOR_ATTESTATION))
+$(call add_json_str, ProductManufacturerForAttestation, $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))
+
+$(call add_json_str, SystemBrand, $(PRODUCT_SYSTEM_BRAND))
+$(call add_json_str, SystemDevice, $(PRODUCT_SYSTEM_DEVICE))
+$(call add_json_str, SystemManufacturer, $(PRODUCT_SYSTEM_MANUFACTURER))
+$(call add_json_str, SystemModel, $(PRODUCT_SYSTEM_MODEL))
+$(call add_json_str, SystemName, $(PRODUCT_SYSTEM_NAME))
+
+# Collapses ?= and = operators for system property variables. Also removes double quotes to prevent
+# malformed JSON. This change aligns with the existing behavior of sysprop.mk, which passes property
+# variables to the echo command, effectively discarding surrounding double quotes.
+define collapse-prop-pairs
+$(subst ",,$(call collapse-pairs,$(call collapse-pairs,$$($(1)),?=),=))
+endef
+
+$(call add_json_list, PRODUCT_SYSTEM_PROPERTIES,         $(call collapse-prop-pairs,PRODUCT_SYSTEM_PROPERTIES))
+$(call add_json_list, PRODUCT_SYSTEM_DEFAULT_PROPERTIES, $(call collapse-prop-pairs,PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
+$(call add_json_list, PRODUCT_SYSTEM_EXT_PROPERTIES,     $(call collapse-prop-pairs,PRODUCT_SYSTEM_EXT_PROPERTIES))
+$(call add_json_list, PRODUCT_VENDOR_PROPERTIES,         $(call collapse-prop-pairs,PRODUCT_VENDOR_PROPERTIES))
+$(call add_json_list, PRODUCT_PRODUCT_PROPERTIES,        $(call collapse-prop-pairs,PRODUCT_PRODUCT_PROPERTIES))
+$(call add_json_list, PRODUCT_ODM_PROPERTIES,            $(call collapse-prop-pairs,PRODUCT_ODM_PROPERTIES))
+$(call add_json_list, PRODUCT_PROPERTY_OVERRIDES,        $(call collapse-prop-pairs,PRODUCT_PROPERTY_OVERRIDES))
+
+$(call add_json_str, BootloaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME))
+
+$(call add_json_bool, SdkBuild, $(filter sdk sdk_addon,$(MAKECMDGOALS)))
+
+$(call add_json_str, SystemServerCompilerFilter, $(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
+
+$(call add_json_bool, Product16KDeveloperOption, $(filter true,$(PRODUCT_16K_DEVELOPER_OPTION)))
+
+$(call add_json_str, RecoveryDefaultRotation, $(TARGET_RECOVERY_DEFAULT_ROTATION))
+$(call add_json_str, RecoveryOverscanPercent, $(TARGET_RECOVERY_OVERSCAN_PERCENT))
+$(call add_json_str, RecoveryPixelFormat, $(TARGET_RECOVERY_PIXEL_FORMAT))
+
+ifdef AB_OTA_UPDATER
+$(call add_json_bool, AbOtaUpdater, $(filter true,$(AB_OTA_UPDATER)))
+$(call add_json_str, AbOtaPartitions, $(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS))))
+endif
+
+ifdef PRODUCT_USE_DYNAMIC_PARTITIONS
+$(call add_json_bool, UseDynamicPartitions, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITIONS)))
+endif
+
+ifdef PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+$(call add_json_bool, RetrofitDynamicPartitions, $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)))
+endif
+
+$(call add_json_bool, DontUseVabcOta, $(filter true,$(BOARD_DONT_USE_VABC_OTA)))
+
+$(call add_json_bool, FullTreble, $(filter true,$(PRODUCT_FULL_TREBLE)))
+
+$(call add_json_bool, NoBionicPageSizeMacro, $(filter true,$(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)))
+
+$(call add_json_bool, PropertySplitEnabled, $(filter true,$(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED)))
+
+$(call add_json_str, ScreenDensity, $(TARGET_SCREEN_DENSITY))
+
+$(call add_json_bool, UsesVulkan, $(filter true,$(TARGET_USES_VULKAN)))
+
+$(call add_json_bool, ZygoteForce64, $(filter true,$(ZYGOTE_FORCE_64)))
+
+$(call add_json_str, VendorSecurityPatch,       $(VENDOR_SECURITY_PATCH))
+$(call add_json_str, VendorImageFileSystemType, $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE))
+
+$(call add_json_list, BuildVersionTags, $(BUILD_VERSION_TAGS))
+
+$(call add_json_bool, ProductNotDebuggableInUserdebug, $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG))
+
+$(call add_json_bool, UsesProductImage, $(filter true,$(BOARD_USES_PRODUCTIMAGE)))
+
+$(call add_json_bool, TargetBoots16K, $(filter true,$(TARGET_BOOTS_16K)))
+
+$(call json_end)
+
+$(shell mkdir -p $(dir $(SOONG_EXTRA_VARIABLES)))
+$(file >$(SOONG_EXTRA_VARIABLES).tmp,$(json_contents))
+
+$(shell if ! cmp -s $(SOONG_EXTRA_VARIABLES).tmp $(SOONG_EXTRA_VARIABLES); then \
+	  mv $(SOONG_EXTRA_VARIABLES).tmp $(SOONG_EXTRA_VARIABLES); \
+	else \
+	  rm $(SOONG_EXTRA_VARIABLES).tmp; \
+	fi)
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 7f85231..8c3882f 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -21,19 +21,6 @@
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
 common_javalib.jar := $(intermediates.COMMON)/javalib.jar
 
-ifdef LOCAL_SOONG_AAR
-  LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_AAR)
-endif
-
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR)
-
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
@@ -115,16 +102,14 @@
     boot_jars := $(foreach pair,$(PRODUCT_BOOT_JARS), $(call word-colon,2,$(pair)))
     ifneq ($(filter $(LOCAL_MODULE),$(boot_jars)),) # is_boot_jar
       ifeq (true,$(WITH_DEXPREOPT))
-        # $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) contains modules that installs
-        # all of bootjars' dexpreopt files (.art, .oat, .vdex, ...)
+        # dex_bootjars singleton installs all of bootjars' dexpreopt files (.art, .oat, .vdex, ...)
+        # This includes both the primary and secondary arches.
         # Add them to the required list so they are installed alongside this module.
-        ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET += \
-          $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) \
-          $(2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE)
+        ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET += dex_bootjars
         # Copy $(LOCAL_BUILT_MODULE) and its dependencies when installing boot.art
         # so that dependencies of $(LOCAL_BUILT_MODULE) (which may include
         # jacoco-report-classes.jar) are copied for every build.
-        $(foreach m,$(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) $(2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE), \
+        $(foreach m,dex_bootjars, \
           $(eval $(call add-dependency,$(firstword $(call module-installed-files,$(m))),$(LOCAL_BUILT_MODULE))) \
         )
       endif
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 4a72a1f..dd1d8b5 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -229,3 +229,5 @@
 aar_classes_jar :=
 all_res_assets :=
 LOCAL_IS_STATIC_JAVA_LIBRARY :=
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=STATIC_JAVA_LIBRARY))
\ No newline at end of file
diff --git a/core/static_library_internal.mk b/core/static_library_internal.mk
index 0392460..844360e 100644
--- a/core/static_library_internal.mk
+++ b/core/static_library_internal.mk
@@ -41,3 +41,5 @@
 $(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries)
 	$(package-coverage-files)
 endif
+
+$(if $(my_register_name),$(eval ALL_MODULES.$(my_register_name).MAKE_MODULE_TYPE:=STATIC_LIBRARY))
\ No newline at end of file
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 47d8a41..dc6f2c4 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -33,34 +33,26 @@
     echo "# from generate-common-build-props" >> $(2);\
     echo "# These properties identify this partition image." >> $(2);\
     echo "####################################" >> $(2);\
-    $(if $(filter system,$(1)),\
-        echo "ro.product.$(1).brand=$(PRODUCT_SYSTEM_BRAND)" >> $(2);\
-        echo "ro.product.$(1).device=$(PRODUCT_SYSTEM_DEVICE)" >> $(2);\
-        echo "ro.product.$(1).manufacturer=$(PRODUCT_SYSTEM_MANUFACTURER)" >> $(2);\
-        echo "ro.product.$(1).model=$(PRODUCT_SYSTEM_MODEL)" >> $(2);\
-        echo "ro.product.$(1).name=$(PRODUCT_SYSTEM_NAME)" >> $(2);\
-      ,\
-        echo "ro.product.$(1).brand=$(PRODUCT_BRAND)" >> $(2);\
-        echo "ro.product.$(1).device=$(TARGET_DEVICE)" >> $(2);\
-        echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
-        echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
-        echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
-        if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-    )\
+    echo "ro.product.$(1).brand=$(PRODUCT_BRAND)" >> $(2);\
+    echo "ro.product.$(1).device=$(TARGET_DEVICE)" >> $(2);\
+    echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
+    echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
+    echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
+    if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\
+    fi; \
     $(if $(filter true,$(ZYGOTE_FORCE_64)),\
         $(if $(filter vendor,$(1)),\
             echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
@@ -226,50 +218,11 @@
 # -----------------------------------------------------------------
 # system/build.prop
 #
-# Note: parts of this file that can't be generated by the build-properties
-# macro are manually created as separate files and then fed into the macro
-
-buildinfo_prop := $(call intermediates-dir-for,ETC,buildinfo.prop)/buildinfo.prop
-
-ifdef TARGET_SYSTEM_PROP
-system_prop_file := $(TARGET_SYSTEM_PROP)
-else
-system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
-endif
-
-_prop_files_ := \
-  $(buildinfo_prop) \
-  $(system_prop_file)
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_SYSTEM_PROPERTIES \
-    PRODUCT_SYSTEM_PROPERTIES
-
-# TODO(b/117892318): deprecate this
-_prop_vars_ += \
-    PRODUCT_SYSTEM_DEFAULT_PROPERTIES
-
-ifndef property_overrides_split_enabled
-_prop_vars_ += \
-    ADDITIONAL_VENDOR_PROPERTIES \
-    PRODUCT_VENDOR_PROPERTIES
-endif
+# system/build.prop is built by Soong. See system-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_BUILD_PROP_TARGET := $(TARGET_OUT)/build.prop
 
-$(eval $(call build-properties,\
-    system,\
-    $(INSTALLED_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_BUILD_PROP_TARGET)))
-
 # -----------------------------------------------------------------
 # vendor/build.prop
 #
@@ -313,83 +266,18 @@
 # -----------------------------------------------------------------
 # product/etc/build.prop
 #
-
-_prop_files_ := $(if $(TARGET_PRODUCT_PROP),\
-    $(TARGET_PRODUCT_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/product.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_PRODUCT_PROPERTIES \
-    PRODUCT_PRODUCT_PROPERTIES
+# product/etc/build.prop is built by Soong. See product-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/etc/build.prop
 
-ifdef PRODUCT_OEM_PROPERTIES
-import_oem_prop := $(call intermediates-dir-for,ETC,import_oem_prop)/oem.prop
-
-$(import_oem_prop):
-	$(hide) echo "####################################" >> $@; \
-	        echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
-	        echo "####################################" >> $@;
-	$(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \
-	    echo "import /oem/oem.prop $(prop)" >> $@;)
-
-_footers_ := $(import_oem_prop)
-else
-_footers_ :=
-endif
-
-# Skip common /product properties generation if device released before R and
-# has no product partition. This is the first part of the check.
-ifeq ($(call math_lt,$(if $(PRODUCT_SHIPPING_API_LEVEL),$(PRODUCT_SHIPPING_API_LEVEL),30),30), true)
-  _skip_common_properties := true
-endif
-
-# The second part of the check - always generate common properties for the
-# devices with product partition regardless of shipping level.
-ifneq ($(BOARD_USES_PRODUCTIMAGE),)
-  _skip_common_properties :=
-endif
-
-$(eval $(call build-properties,\
-    product,\
-    $(INSTALLED_PRODUCT_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(_footers_),\
-    $(_skip_common_properties)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_PRODUCT_BUILD_PROP_TARGET)))
-
-_skip_common_properties :=
-
 # ----------------------------------------------------------------
 # odm/etc/build.prop
 #
-_prop_files_ := $(if $(TARGET_ODM_PROP),\
-    $(TARGET_ODM_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/odm.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_ODM_PROPERTIES \
-    PRODUCT_ODM_PROPERTIES
+# odm/etc/build.prop is built by Soong. See odm-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop
-$(eval $(call build-properties,\
-    odm,\
-    $(INSTALLED_ODM_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_ODM_BUILD_PROP_TARGET)))
 
 # ----------------------------------------------------------------
 # vendor_dlkm/etc/build.prop
@@ -442,25 +330,10 @@
 # -----------------------------------------------------------------
 # system_ext/etc/build.prop
 #
-_prop_files_ := $(if $(TARGET_SYSTEM_EXT_PROP),\
-    $(TARGET_SYSTEM_EXT_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := PRODUCT_SYSTEM_EXT_PROPERTIES
+# system_ext/etc/build.prop is built by Soong. See system-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_EXT)/etc/build.prop
-$(eval $(call build-properties,\
-    system_ext,\
-    $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET)))
 
 # ----------------------------------------------------------------
 # ramdisk/boot/etc/build.prop
diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk
index e8428c8..6906611 100644
--- a/core/sysprop_config.mk
+++ b/core/sysprop_config.mk
@@ -15,57 +15,9 @@
 )
 _additional_prop_var_names :=
 
-#
-# -----------------------------------------------------------------
-# Add the product-defined properties to the build properties.
-ifneq ($(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED), true)
-  ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
-else
-  ifndef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
-    ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
-  endif
-endif
-
-ADDITIONAL_SYSTEM_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
-
-# Set ro.llndk.api_level to show the maximum vendor API level that the LLNDK in
-# the system partition supports.
-ifdef RELEASE_BOARD_API_LEVEL
-ADDITIONAL_SYSTEM_PROPERTIES += ro.llndk.api_level=$(RELEASE_BOARD_API_LEVEL)
-endif
-
-# Sets ro.actionable_compatible_property.enabled to know on runtime whether the
-# allowed list of actionable compatible properties is enabled or not.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true
-
-# Add the system server compiler filter if they are specified for the product.
-ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
-ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
-endif
-
-# Add the 16K developer option if it is defined for the product.
-ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false
-endif
-
-# Enable core platform API violation warnings on userdebug and eng builds.
-ifneq ($(TARGET_BUILD_VARIANT),user)
-ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
-endif
-
-# Define ro.sanitize.<name> properties for all global sanitizers.
-ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
-
-# Sets the default value of ro.postinstall.fstab.prefix to /system.
-# Device board config should override the value to /product when needed by:
-#
-#     PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product
-#
-# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to
-# mount system_other partition.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.postinstall.fstab.prefix=/system
+$(KATI_obsolete_var ADDITIONAL_SYSTEM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
+$(KATI_obsolete_var ADDITIONAL_ODM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
+$(KATI_obsolete_var ADDITIONAL_PRODUCT_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
 
 # Add cpu properties for bionic and ART.
 ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH)
@@ -178,104 +130,16 @@
     ro.build.ab_update=$(AB_OTA_UPDATER)
 endif
 
-ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
-
 ifeq ($(AB_OTA_UPDATER),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 endif
 
-# Set this property for VTS to skip large page size tests on unsupported devices.
-ADDITIONAL_PRODUCT_PROPERTIES += \
-    ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
-
-ifeq ($(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.no_bionic_page_size_macro=true
-endif
-
 user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
-enable_target_debugging := true
-ifneq (,$(user_variant))
-  # Target is secure in user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=1
-  ADDITIONAL_SYSTEM_PROPERTIES += security.perf_harden=1
-
-  ifeq ($(user_variant),user)
-    ADDITIONAL_SYSTEM_PROPERTIES += ro.adb.secure=1
-  endif
-
-  ifneq ($(user_variant),userdebug)
-    # Disable debugging in plain user builds.
-    enable_target_debugging :=
-  endif
-
-  # Disallow mock locations by default for user builds
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=0
-
-else # !user_variant
-  # Turn on checkjni for non-user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.kernel.android.checkjni=1
-  # Set device insecure for non-user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=0
-  # Allow mock locations by default for non user builds
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=1
-endif # !user_variant
-
-ifeq (true,$(strip $(enable_target_debugging)))
-  # Target is more debuggable and adbd is on by default
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=1
-  # Enable Dalvik lock contention logging.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500
-else # !enable_target_debugging
-  # Target is less debuggable and adbd is off by default
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=0
-endif # !enable_target_debugging
-
-ifneq ($(filter sdk sdk_addon,$(MAKECMDGOALS)),)
-_is_sdk_build := true
-endif
-
-ifeq ($(TARGET_BUILD_VARIANT),eng)
-ifneq ($(filter ro.setupwizard.mode=ENABLED, $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))),)
-  # Don't require the setup wizard on eng builds
-  ADDITIONAL_SYSTEM_PROPERTIES := $(filter-out ro.setupwizard.mode=%,\
-          $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))) \
-          ro.setupwizard.mode=OPTIONAL
-endif
-ifndef _is_sdk_build
-  # To speedup startup of non-preopted builds, don't verify or compile the boot image.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.image-dex2oat-filter=extract
-endif
-# b/323566535
-ADDITIONAL_SYSTEM_PROPERTIES += init.svc_debug.no_fatal.zygote=true
-endif
-
-ifdef _is_sdk_build
-ADDITIONAL_SYSTEM_PROPERTIES += xmpp.auto-presence=true
-ADDITIONAL_SYSTEM_PROPERTIES += ro.config.nocheckin=yes
-endif
-
-_is_sdk_build :=
-
-ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android
-
-# This property is set by flashing debug boot image, so default to false.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0
 
 config_enable_uffd_gc := \
   $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
 
-# This is a temporary system property that controls the ART module. The plan is
-# to remove it by Aug 2025, at which time Mainline updates of the ART module
-# will ignore it as well.
-# If the value is "default", it will be mangled by post_process_props.py.
-ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc)
-
-ADDITIONAL_SYSTEM_PROPERTIES := $(strip $(ADDITIONAL_SYSTEM_PROPERTIES))
-ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES))
 ADDITIONAL_VENDOR_PROPERTIES := $(strip $(ADDITIONAL_VENDOR_PROPERTIES))
 
 .KATI_READONLY += \
-    ADDITIONAL_SYSTEM_PROPERTIES \
-    ADDITIONAL_PRODUCT_PROPERTIES \
     ADDITIONAL_VENDOR_PROPERTIES
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index c95f6e7..eb54fae 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -47,21 +47,16 @@
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
-	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
 	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list \
 	  -sha256
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
-	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
-	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list
 	grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
-	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 
 art-host-tests: $(art_host_tests_zip)
diff --git a/core/tasks/art.mk b/core/tasks/art.mk
new file mode 100644
index 0000000..ded6125
--- /dev/null
+++ b/core/tasks/art.mk
@@ -0,0 +1,26 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+########################################################################
+# clean-oat rules
+#
+
+.PHONY: clean-oat
+clean-oat: clean-oat-host clean-oat-target
+
+.PHONY: clean-oat-host
+clean-oat-host:
+	find $(OUT_DIR) '(' -name '*.oat' -o -name '*.odex' -o -name '*.art' -o -name '*.vdex' ')' -a -type f | xargs rm -f
+	rm -rf $(TMPDIR)/*/test-*/dalvik-cache/*
+	rm -rf $(TMPDIR)/android-data/dalvik-cache/*
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index b9f0988..294cb57 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -16,6 +16,8 @@
 test_suite_tradefed := cts-tradefed
 test_suite_dynamic_config := cts/tools/cts-tradefed/DynamicConfig.xml
 test_suite_readme := cts/tools/cts-tradefed/README
+test_suite_tools := $(HOST_OUT_JAVA_LIBRARIES)/ats_console_deploy.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/ats_olc_server_local_mode_deploy.jar
 
 $(call declare-1p-target,$(test_suite_dynamic_config),cts)
 $(call declare-1p-target,$(test_suite_readme),cts)
@@ -90,8 +92,16 @@
 
 cts_api_coverage_exe := $(HOST_OUT_EXECUTABLES)/cts-api-coverage
 dexdeps_exe := $(HOST_OUT_EXECUTABLES)/dexdeps
+cts_api_map_exe := $(HOST_OUT_EXECUTABLES)/cts-api-map
 
 coverage_out := $(HOST_OUT)/cts-api-coverage
+api_map_out := $(HOST_OUT)/cts-api-map
+
+cts_jar_files := $(api_map_out)/api_map_files.txt
+$(cts_jar_files): PRIVATE_API_MAP_FILES := $(sort $(COMPATIBILITY.cts.API_MAP_FILES))
+$(cts_jar_files):
+	mkdir -p $(dir $@)
+	echo $(PRIVATE_API_MAP_FILES) > $@
 
 api_xml_description := $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml
 
@@ -114,6 +124,14 @@
 cts_api_coverage_dependencies := $(cts_api_coverage_exe) $(dexdeps_exe) $(api_xml_description) $(napi_xml_description)
 cts_system_api_coverage_dependencies := $(cts_api_coverage_exe) $(dexdeps_exe) $(system_api_xml_description)
 
+cts-api-xml-api-map-report := $(api_map_out)/api-map.xml
+cts-api-html-api-map-report := $(api_map_out)/api-map.html
+cts-system-api-xml-api-map-report := $(api_map_out)/system-api-map.xml
+cts-system-api-html-api-map-report := $(api_map_out)/system-api-map.html
+
+cts_system_api_map_dependencies := $(cts_api_map_exe) $(system_api_xml_description) $(cts_jar_files)
+cts_api_map_dependencies := $(cts_api_map_exe) $(api_xml_description) $(cts_jar_files)
+
 android_cts_zip := $(HOST_OUT)/cts/android-cts.zip
 cts_verifier_apk := $(call intermediates-dir-for,APPS,CtsVerifier)/package.apk
 
@@ -192,6 +210,48 @@
 .PHONY: cts-coverage-report-all cts-api-coverage
 cts-coverage-report-all: cts-test-coverage cts-verifier-coverage cts-combined-coverage cts-combined-xml-coverage
 
+$(cts-system-api-xml-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-system-api-xml-api-map-report): PRIVATE_API_XML_DESC := $(system_api_xml_description)
+$(cts-system-api-xml-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-system-api-xml-api-map-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS System API MAP Report - XML",\
+			$(PRIVATE_JAR_FILES),xml)
+
+$(cts-system-api-html-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-system-api-html-api-map-report): PRIVATE_API_XML_DESC := $(system_api_xml_description)
+$(cts-system-api-html-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-system-api-html-api-map-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS System API MAP Report - HTML",\
+			$(PRIVATE_JAR_FILES),html)
+
+$(cts-api-xml-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-api-xml-api-map-report): PRIVATE_API_XML_DESC := $(api_xml_description)
+$(cts-api-xml-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-api-xml-api-map-report) : $(android_cts_zip) $(cts_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS API MAP Report - XML",\
+			$(PRIVATE_JAR_FILES),xml)
+
+$(cts-api-html-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe)
+$(cts-api-html-api-map-report): PRIVATE_API_XML_DESC := $(api_xml_description)
+$(cts-api-html-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files)
+$(cts-api-html-api-map-report) : $(android_cts_zip) $(cts_api_map_dependencies) | $(ACP)
+	$(call generate-api-map-report-cts,"CTS API MAP Report - HTML",\
+			$(PRIVATE_JAR_FILES),html)
+
+.PHONY: cts-system-api-xml-api-map
+cts-system-api-xml-api-map : $(cts-system-api-xml-api-map-report)
+
+.PHONY: cts-system-api-html-api-map
+cts-system-api-html-api-map : $(cts-system-api-html-api-map-report)
+
+.PHONY: cts-api-xml-api-map
+cts-api-xml-api-map : $(cts-api-xml-api-map-report)
+
+.PHONY: cts-api-html-api-map
+cts-api-html-api-map : $(cts-api-html-api-map-report)
+
+.PHONY: cts-api-map-all
+
 # Put the test coverage report in the dist dir if "cts-api-coverage" is among the build goals.
 $(call dist-for-goals, cts-api-coverage, $(cts-test-coverage-report):cts-test-coverage-report.html)
 $(call dist-for-goals, cts-api-coverage, $(cts-system-api-coverage-report):cts-system-api-coverage-report.html)
@@ -207,6 +267,17 @@
 ALL_TARGETS.$(cts-combined-coverage-report).META_LIC:=$(module_license_metadata)
 ALL_TARGETS.$(cts-combined-xml-coverage-report).META_LIC:=$(module_license_metadata)
 
+# Put the test api map report in the dist dir if "cts-api-map-all" is among the build goals.
+$(call dist-for-goals, cts-api-map-all, $(cts-system-api-xml-api-map-report):cts-system-api-xml-api-map-report.xml)
+$(call dist-for-goals, cts-api-map-all, $(cts-system-api-html-api-map-report):cts-system-api-html-api-map-report.html)
+$(call dist-for-goals, cts-api-map-all, $(cts-api-xml-api-map-report):cts-api-xml-api-map-report.xml)
+$(call dist-for-goals, cts-api-map-all, $(cts-api-html-api-map-report):cts-api-html-api-map-report.html)
+
+ALL_TARGETS.$(cts-system-api-xml-api-map-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-system-api-html-api-map-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-api-xml-api-map-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-api-html-api-map-report).META_LIC:=$(module_license_metadata)
+
 # Arguments;
 #  1 - Name of the report printed out on the screen
 #  2 - List of apk files that will be scanned to generate the report
@@ -217,23 +288,42 @@
 	@ echo $(1): file://$$(cd $(dir $@); pwd)/$(notdir $@)
 endef
 
+# Arguments;
+#  1 - Name of the report printed out on the screen
+#  2 - A file containing list of files that to be analyzed
+#  3 - Format of the report
+define generate-api-map-report-cts
+	$(hide) mkdir -p $(dir $@)
+	$(hide) $(PRIVATE_CTS_API_MAP_EXE) -j 8 -a $(PRIVATE_API_XML_DESC) -i $(2) -f $(3) -o $@
+	@ echo $(1): file://$$(cd $(dir $@); pwd)/$(notdir $@)
+endef
+
 # Reset temp vars
 cts_api_coverage_dependencies :=
 cts_system_api_coverage_dependencies :=
+cts_api_map_dependencies :=
+cts_system_api_map_dependencies :=
 cts-combined-coverage-report :=
 cts-combined-xml-coverage-report :=
 cts-verifier-coverage-report :=
 cts-test-coverage-report :=
 cts-system-api-coverage-report :=
 cts-system-api-xml-coverage-report :=
+cts-api-xml-api-map-report :=
+cts-api-html-api-map-report :=
+cts-system-api-xml-api-map-report :=
+cts-system-api-html-api-map-report :=
 api_xml_description :=
 api_text_description :=
 system_api_xml_description :=
 napi_xml_description :=
 napi_text_description :=
 coverage_out :=
+api_map_out :=
+cts_jar_files :=
 dexdeps_exe :=
 cts_api_coverage_exe :=
+cts_api_map_exe :=
 cts_verifier_apk :=
 android_cts_zip :=
 cts-dir :=
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
index 733a2e2..4cb23c0 100644
--- a/core/tasks/host-unit-tests.mk
+++ b/core/tasks/host-unit-tests.mk
@@ -29,15 +29,28 @@
     $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
     $(_cmf_src)))
 
+my_symlinks_for_host_unit_tests := $(foreach f,$(COMPATIBILITY.host-unit-tests.SYMLINKS),\
+	$(strip $(eval _cmf_tuple := $(subst :, ,$(f))) \
+	$(eval _cmf_dep := $(word 1,$(_cmf_tuple))) \
+	$(eval _cmf_src := $(word 2,$(_cmf_tuple))) \
+	$(eval _cmf_dest := $(word 3,$(_cmf_tuple))) \
+	$(call symlink-file,$(_cmf_dep),$(_cmf_src),$(_cmf_dest)) \
+	$(_cmf_dest)))
+
 $(host_unit_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_host_unit_tests)
 
-$(host_unit_tests_zip) : $(COMPATIBILITY.host-unit-tests.FILES) $(my_host_shared_lib_for_host_unit_tests) $(SOONG_ZIP)
+$(host_unit_tests_zip) : PRIVATE_SYMLINKS := $(my_symlinks_for_host_unit_tests)
+
+$(host_unit_tests_zip) : $(COMPATIBILITY.host-unit-tests.FILES) $(my_host_shared_lib_for_host_unit_tests) $(my_symlinks_for_host_unit_tests) $(SOONG_ZIP)
 	echo $(sort $(COMPATIBILITY.host-unit-tests.FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	echo "" >> $@-host-libs.list
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $@-host-libs.list; \
 	done
+	$(hide) for symlink in $(PRIVATE_SYMLINKS); do \
+	  echo $$symlink >> $@-host.list; \
+	done
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
diff --git a/core/tasks/mcts.mk b/core/tasks/mcts.mk
new file mode 100644
index 0000000..09a4191
--- /dev/null
+++ b/core/tasks/mcts.mk
@@ -0,0 +1,32 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(wildcard test/mts/README.md),)
+
+mcts_test_suites :=
+mcts_test_suites += mcts
+
+$(foreach module, $(mts_modules), $(eval mcts_test_suites += mcts-$(module)))
+
+$(foreach suite, $(mcts_test_suites), \
+	$(eval test_suite_name := $(suite)) \
+	$(eval test_suite_tradefed := mts-tradefed) \
+	$(eval test_suite_readme := test/mts/README.md) \
+	$(eval include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk) \
+	$(eval .PHONY: $(suite)) \
+	$(eval $(suite): $(compatibility_zip)) \
+	$(eval $(call dist-for-goals, $(suite), $(compatibility_zip))) \
+)
+
+endif
diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk
index c41de63..24adfc8 100644
--- a/core/tasks/meta-lic.mk
+++ b/core/tasks/meta-lic.mk
@@ -30,6 +30,23 @@
 $(eval $(call declare-1p-copy-files,device/google/atv,atv-component-overrides.xml))
 $(eval $(call declare-1p-copy-files,device/google/atv,tv_core_hardware.xml))
 
+# Moved here from device/google/bramble/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,media_profiles_V1_0.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,media_codecs_performance.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,task_profiles.json,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/bramble,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-1p-copy-files,device/google/bramble,audio_policy_configuration.xml))
+
 # Moved here from device/google/barbet/Android.mk
 $(eval $(call declare-copy-files-license-metadata,device/google/barbet,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/barbet,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
@@ -66,6 +83,40 @@
 $(eval $(call declare-1p-copy-files,device/google/coral,audio_policy_configuration.xml))
 $(eval $(call declare-1p-copy-files,device/google/coral,display_19260504575090817.xml))
 
+# Moved here from device/google/cuttlefish/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,.idc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.cutf_cvm.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,audio_policy.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish/shared/config,pci.ids,SPDX-license-identifier-BSD-3-Clause,notice,device/google/cuttlefish/shared/config/LICENSE_BSD,))
+
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,privapp-permissions-cuttlefish.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_profiles_V1_0.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs_performance.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,cuttlefish_excluded_hardware.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs_google_video.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,car_audio_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,audio_policy_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,preinstalled-packages-product-car-cuttlefish.xml))
+$(eval $(call declare-1p-copy-files,hardware/google/camera/devices,.json))
+
 # Moved here from device/google/gs101/Android.mk
 $(eval $(call declare-copy-files-license-metadata,device/google/gs101,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/gs101,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
@@ -147,3 +198,30 @@
 # Moved here from hardware/interfaces/tv/Android.mk
 $(eval $(call declare-1p-copy-files,hardware/interfaces/tv,tuner_vts_config_1_0.xml))
 $(eval $(call declare-1p-copy-files,hardware/interfaces/tv,tuner_vts_config_1_1.xml))
+
+# Moved here from device/generic/goldfish/Android.mk
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/data,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/input,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/wifi,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/camera,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,hals.conf))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.qemu-adb-keys.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.system_ext.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,.json))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,ueventd.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,wpa_supplicant.conf))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,media_profiles_V1_0.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,fstab.ranchu))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,display_settings.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,display_settings_freeform.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,device_state_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu-core.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu-net.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,audio_policy_configuration.xml))
+
+# Moved here from packages/services/Car/Android.mk
+$(eval $(call declare-1p-copy-files,packages/services/Car,))
+
+# Moved here from hardware/libhardware_legacy/Android.mk
+$(eval $(call declare-1p-copy-files,hardware/libhardware_legacy,))
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 7593668..0ca27d8 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -13,7 +13,7 @@
 $(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"')
 endef
 
-SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT).json
+SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT)${COVERAGE_SUFFIX}.json
 
 $(MODULE_INFO_JSON): PRIVATE_SOONG_MODULE_INFO := $(SOONG_MODULE_INFO)
 $(MODULE_INFO_JSON): PRIVATE_MERGE_JSON_OBJECTS := $(HOST_OUT_EXECUTABLES)/merge_module_info_json
diff --git a/core/tasks/recovery_snapshot.mk b/core/tasks/recovery_snapshot.mk
deleted file mode 100644
index 525273b..0000000
--- a/core/tasks/recovery_snapshot.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-current_makefile := $(lastword $(MAKEFILE_LIST))
-
-# RECOVERY_SNAPSHOT_VERSION must be set to 'current' in order to generate a recovery snapshot.
-ifeq ($(RECOVERY_SNAPSHOT_VERSION),current)
-
-.PHONY: recovery-snapshot
-recovery-snapshot: $(SOONG_RECOVERY_SNAPSHOT_ZIP)
-
-$(call dist-for-goals, recovery-snapshot, $(SOONG_RECOVERY_SNAPSHOT_ZIP))
-
-else # RECOVERY_SNAPSHOT_VERSION is NOT set to 'current'
-
-.PHONY: recovery-snapshot
-recovery-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
-recovery-snapshot:
-	$(call echo-error,$(PRIVATE_MAKEFILE),\
-		"CANNOT generate Recovery snapshot. RECOVERY_SNAPSHOT_VERSION must be set to 'current'.")
-	exit 1
-
-endif # RECOVERY_SNAPSHOT_VERSION
diff --git a/core/tasks/sts-lite.mk b/core/tasks/sts-sdk.mk
similarity index 61%
rename from core/tasks/sts-lite.mk
rename to core/tasks/sts-sdk.mk
index 65c65c3..b8ce5bf 100644
--- a/core/tasks/sts-lite.mk
+++ b/core/tasks/sts-sdk.mk
@@ -13,26 +13,25 @@
 # limitations under the License.
 
 ifneq ($(wildcard test/sts/README-sts-sdk.md),)
-test_suite_name := sts-lite
+test_suite_name := sts-sdk
 test_suite_tradefed := sts-tradefed
 test_suite_readme := test/sts/README-sts-sdk.md
 sts_sdk_zip := $(HOST_OUT)/$(test_suite_name)/sts-sdk.zip
 
 include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
-sts_sdk_samples := $(call intermediates-dir-for,ETC,sts-sdk-samples.zip)/sts-sdk-samples.zip
+sts_sdk_plugin_skel := $(call intermediates-dir-for,ETC,sts-sdk-plugin-skel.zip)/sts-sdk-plugin-skel.zip
 
-$(sts_sdk_zip): STS_LITE_ZIP := $(compatibility_zip)
-$(sts_sdk_zip): STS_SDK_SAMPLES := $(sts_sdk_samples)
-$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_samples)
-	rm -f $@ $(STS_LITE_ZIP)_filtered
-	$(ZIP2ZIP) -i $(STS_LITE_ZIP) -o $(STS_LITE_ZIP)_filtered \
-		-x android-sts-lite/tools/sts-tradefed-tests.jar \
-		'android-sts-lite/tools/*:sts-test/libs/' \
-		'android-sts-lite/testcases/*:sts-test/utils/' \
-		'android-sts-lite/jdk/**/*:sts-test/jdk/'
-	$(MERGE_ZIPS) $@ $(STS_LITE_ZIP)_filtered $(STS_SDK_SAMPLES)
-	rm -f $(STS_LITE_ZIP)_filtered
+$(sts_sdk_zip): STS_SDK_ZIP := $(compatibility_zip)
+$(sts_sdk_zip): STS_SDK_PLUGIN_SKEL := $(sts_sdk_plugin_skel)
+$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_plugin_skel)
+	rm -f $@ $(STS_SDK_ZIP)_filtered
+	$(ZIP2ZIP) -i $(STS_SDK_ZIP) -o $(STS_SDK_ZIP)_filtered \
+		-x android-sts-sdk/tools/sts-tradefed-tests.jar \
+		'android-sts-sdk/tools/*:plugin/src/main/resources/sts-tradefed-tools/' \
+		'android-sts-sdk/jdk/**/*:plugin/src/main/resources/jdk/'
+	$(MERGE_ZIPS) $@ $(STS_SDK_ZIP)_filtered $(STS_SDK_PLUGIN_SKEL)
+	rm -f $(STS_SDK_ZIP)_filtered
 
 .PHONY: sts-sdk
 sts-sdk: $(sts_sdk_zip)
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 4e78d89..86c23f8 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -26,7 +26,15 @@
 # Output variables:
 #   compatibility_zip: the path to the output zip file.
 
-test_suite_subdir := android-$(test_suite_name)
+special_mts_test_suites :=
+special_mts_test_suites += mcts
+special_mts_test_suites += $(mts_modules)
+ifneq ($(filter $(special_mts_test_suites),$(subst -, ,$(test_suite_name))),)
+	test_suite_subdir := android-mts
+else
+	test_suite_subdir := android-$(test_suite_name)
+endif
+
 out_dir := $(HOST_OUT)/$(test_suite_name)/$(test_suite_subdir)
 test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
 test_tools := $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
@@ -107,9 +115,9 @@
 compatibility_zip_deps += $(test_suite_notice_txt)
 compatibility_zip_resources += $(test_suite_notice_txt)
 
-compatibility_tests_list_zip := $(out_dir)-tests_list.zip
+compatibility_tests_list_zip := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)-tests_list.zip
 
-compatibility_zip := $(out_dir).zip
+compatibility_zip := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name).zip
 $(compatibility_zip) : .KATI_IMPLICIT_OUTPUTS := $(compatibility_tests_list_zip)
 $(compatibility_zip): PRIVATE_OUT_DIR := $(out_dir)
 $(compatibility_zip): PRIVATE_TOOLS := $(test_tools) $(test_suite_prebuilt_tools)
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
index 61bf136..47c360d 100644
--- a/core/tasks/tradefed-tests-list.mk
+++ b/core/tasks/tradefed-tests-list.mk
@@ -15,6 +15,11 @@
 # List all TradeFed tests from COMPATIBILITY.tradefed_tests_dir
 .PHONY: tradefed-tests-list
 
+COMPATIBILITY.tradefed_tests_dir := \
+  $(COMPATIBILITY.tradefed_tests_dir) \
+  tools/tradefederation/core/res/config \
+  tools/tradefederation/core/javatests/res/config
+
 tradefed_tests :=
 $(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \
   $(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml")))
diff --git a/core/tasks/vendor_snapshot.mk b/core/tasks/vendor_snapshot.mk
deleted file mode 100644
index 83c1379..0000000
--- a/core/tasks/vendor_snapshot.mk
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-current_makefile := $(lastword $(MAKEFILE_LIST))
-
-# BOARD_VNDK_VERSION must be set to 'current' in order to generate a vendor snapshot.
-ifeq ($(BOARD_VNDK_VERSION),current)
-
-.PHONY: vendor-snapshot
-vendor-snapshot: $(SOONG_VENDOR_SNAPSHOT_ZIP)
-
-$(call dist-for-goals, vendor-snapshot, $(SOONG_VENDOR_SNAPSHOT_ZIP))
-
-.PHONY: vendor-fake-snapshot
-vendor-fake-snapshot: $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP)
-
-$(call dist-for-goals, vendor-fake-snapshot, $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP):fake/$(notdir $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP)))
-
-else # BOARD_VNDK_VERSION is NOT set to 'current'
-
-.PHONY: vendor-snapshot
-vendor-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
-vendor-snapshot:
-	$(call echo-error,$(PRIVATE_MAKEFILE),\
-		"CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.")
-	exit 1
-
-.PHONY: vendor-fake-snapshot
-vendor-fake-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
-vendor-fake-snapshot:
-	$(call echo-error,$(PRIVATE_MAKEFILE),\
-		"CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.")
-	exit 1
-
-endif # BOARD_VNDK_VERSION
diff --git a/core/version_util.mk b/core/version_util.mk
index eb568be..0e34634 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -183,14 +183,17 @@
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH_TIMESTAMP
 
-ifndef PLATFORM_BASE_OS
-  # Used to indicate the base os applied to the device.
-  # Can be an arbitrary string, but must be a single word.
-  #
-  # If there is no $PLATFORM_BASE_OS set, keep it empty.
-  PLATFORM_BASE_OS :=
-endif
-.KATI_READONLY := PLATFORM_BASE_OS
+# PLATFORM_BASE_OS is used to indicate the base os applied
+# to the device. Can be an arbitrary string, but must be a
+# single word.
+#
+# If there is no $PLATFORM_BASE_OS set, keep it empty.
+#
+# PLATFORM_BASE_OS can either be set via an enviornment
+# variable, or set via the PRODUCT_BASE_OS product variable.
+PLATFORM_BASE_OS_ENV_INPUT := $(PLATFORM_BASE_OS)
+.KATI_READONLY := PLATFORM_BASE_OS_ENV_INPUT
+PLATFORM_BASE_OS :=
 
 ifndef BUILD_ID
   # Used to signify special builds.  E.g., branches and/or releases,
diff --git a/envsetup.sh b/envsetup.sh
index 647c106..06dadd3 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -385,6 +385,7 @@
         complete -F _bazel__complete -o nospace b
     fi
     complete -F _lunch lunch
+    complete -F _lunch_completion lunch2
 
     complete -F _complete_android_module_names pathmod
     complete -F _complete_android_module_names gomod
@@ -496,9 +497,18 @@
         return 1
     fi
 
+    _lunch_meat $product $release $variant
+}
+
+function _lunch_meat()
+{
+    local product=$1
+    local release=$2
+    local variant=$3
+
     TARGET_PRODUCT=$product \
-    TARGET_BUILD_VARIANT=$variant \
     TARGET_RELEASE=$release \
+    TARGET_BUILD_VARIANT=$variant \
     build_build_var_cache
     if [ $? -ne 0 ]
     then
@@ -519,14 +529,11 @@
     set_stuff_for_environment
     [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
 
-    if [ "${TARGET_BUILD_VARIANT}" = "userdebug" ] && [[  -z "${ANDROID_QUIET_BUILD}" ]]; then
-      echo
-      echo "Want FASTER LOCAL BUILDS? Use -eng instead of -userdebug (however for" \
-        "performance benchmarking continue to use userdebug)"
-    fi
-    if [ $used_lunch_menu -eq 1 ]; then
-      echo
-      echo "Hint: next time you can simply run 'lunch $selection'"
+    if [[ -z "${ANDROID_QUIET_BUILD}" ]]; then
+        local spam_for_lunch=$(gettop)/build/make/tools/envsetup/spam_for_lunch
+        if [[ -x $spam_for_lunch ]]; then
+            $spam_for_lunch
+        fi
     fi
 
     destroy_build_var_cache
@@ -553,6 +560,112 @@
     return 0
 }
 
+function _lunch_usage()
+{
+    (
+        echo "The lunch command selects the configuration to use for subsequent"
+        echo "Android builds."
+        echo
+        echo "Usage: lunch TARGET_PRODUCT [TARGET_RELEASE [TARGET_BUILD_VARIANT]]"
+        echo
+        echo "  Choose the product, release and variant to use. If not"
+        echo "  supplied, TARGET_RELEASE will be 'trunk_staging' and"
+        echo "  TARGET_BUILD_VARIANT will be 'eng'"
+        echo
+        echo
+        echo "Usage: lunch TARGET_PRODUCT-TARGET_RELEASE-TARGET_BUILD_VARIANT"
+        echo
+        echo "  Chose the product, release and variant to use. This"
+        echo "  legacy format is maintained for compatibility."
+        echo
+        echo
+        echo "Note that the previous interactive menu and list of hard-coded"
+        echo "list of curated targets has been removed. If you would like the"
+        echo "list of products, release configs for a particular product, or"
+        echo "variants, run list_products, list_release_configs, list_variants"
+        echo "respectively."
+        echo
+    ) 1>&2
+}
+
+function lunch2()
+{
+    if [[ $# -eq 1 && $1 = "--help" ]]; then
+        _lunch_usage
+        return 0
+    fi
+    if [[ $# -eq 0 ]]; then
+        echo "No target specified. See lunch --help" 1>&2
+        return 1
+    fi
+    if [[ $# -gt 3 ]]; then
+        echo "Too many parameters given. See lunch --help" 1>&2
+        return 1
+    fi
+
+    local product release variant
+
+    # Handle the legacy format
+    local legacy=$(echo $1 | grep "-")
+    if [[ $# -eq 1 && -n $legacy ]]; then
+        IFS="-" read -r product release variant <<< "$1"
+        if [[ -z "$product" ]] || [[ -z "$release" ]] || [[ -z "$variant" ]]; then
+            echo "Invalid lunch combo: $1" 1>&2
+            echo "Valid combos must be of the form <product>-<release>-<variant> when using" 1>&2
+            echo "the legacy format.  Run 'lunch --help' for usage." 1>&2
+            return 1
+        fi
+    fi
+
+    # Handle the new format.
+    if [[ -z $legacy ]]; then
+        product=$1
+        release=$2
+        if [[ -z $release ]]; then
+            release=trunk_staging
+        fi
+        variant=$3
+        if [[ -z $variant ]]; then
+            variant=eng
+        fi
+    fi
+
+    # Validate the selection and set all the environment stuff
+    _lunch_meat $product $release $variant
+}
+
+unset ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE
+unset ANDROID_LUNCH_COMPLETION_CHOSEN_PRODUCT
+unset ANDROID_LUNCH_COMPLETION_RELEASE_CACHE
+# Tab completion for lunch.
+function _lunch_completion()
+{
+    # Available products
+    if [[ $COMP_CWORD -eq 1 ]] ; then
+        if [[ -z $ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE ]]; then
+            ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE=$(list_products)
+        fi
+        COMPREPLY=( $(compgen -W "${ANDROID_LUNCH_COMPLETION_PRODUCT_CACHE}" -- "${COMP_WORDS[COMP_CWORD]}") )
+    fi
+
+    # Available release configs
+    if [[ $COMP_CWORD -eq 2 ]] ; then
+        if [[ -z $ANDROID_LUNCH_COMPLETION_RELEASE_CACHE || $ANDROID_LUNCH_COMPLETION_CHOSEN_PRODUCT != ${COMP_WORDS[1]} ]] ; then
+            ANDROID_LUNCH_COMPLETION_RELEASE_CACHE=$(list_releases ${COMP_WORDS[1]})
+            ANDROID_LUNCH_COMPLETION_CHOSEN_PRODUCT=${COMP_WORDS[1]}
+        fi
+        COMPREPLY=( $(compgen -W "${ANDROID_LUNCH_COMPLETION_RELEASE_CACHE}" -- "${COMP_WORDS[COMP_CWORD]}") )
+    fi
+
+    # Available variants
+    if [[ $COMP_CWORD -eq 3 ]] ; then
+        COMPREPLY=(user userdebug eng)
+    fi
+
+    return 0
+}
+
+
 # Configures the build to build unbundled apps.
 # Run tapas with one or more app names (from LOCAL_PACKAGE_NAME)
 function tapas()
@@ -752,47 +865,14 @@
     run_tool_with_logging "ADB" $ADB "${@}"
 }
 
-function run_tool_with_logging() {
-  # Run commands in a subshell for us to handle forced terminations with a trap
-  # handler.
-  (
-  local tool_tag="$1"
-  shift
-  local tool_binary="$1"
-  shift
-
-  # If the logger is not configured, run the original command and return.
-  if [[ -z "${ANDROID_TOOL_LOGGER}" ]]; then
-     "${tool_binary}" "${@}"
-     return $?
-  fi
-
-  # Otherwise, run the original command and call the logger when done.
-  local start_time
-  start_time=$(date +%s.%N)
-  local logger=${ANDROID_TOOL_LOGGER}
-
-  # Install a trap to call the logger even when the process terminates abnormally.
-  # The logger is run in the background and its output suppressed to avoid
-  # interference with the user flow.
-  trap '
-  exit_code=$?;
-  # Remove the trap to prevent duplicate log.
-  trap - EXIT;
-  "${logger}" \
-    --tool_tag="${tool_tag}" \
-    --start_timestamp="${start_time}" \
-    --end_timestamp="$(date +%s.%N)" \
-    --tool_args="$*" \
-    --exit_code="${exit_code}" \
-    ${ANDROID_TOOL_LOGGER_EXTRA_ARGS} \
-    > /dev/null 2>&1 &
-  exit ${exit_code}
-  ' SIGINT SIGTERM SIGQUIT EXIT
-
-  # Run the original command.
-  "${tool_binary}" "${@}"
-  )
+function fastboot() {
+    local FASTBOOT=$(command which fastboot)
+    if [ -z "$FASTBOOT" ]; then
+        echo "Command fastboot not found; try lunch (and building) first?"
+        return 1
+    fi
+    # Support tool event logging for fastboot command.
+    run_tool_with_logging "FASTBOOT" $FASTBOOT "${@}"
 }
 
 # communicate with a running device or emulator, set up necessary state,
@@ -1082,6 +1162,7 @@
 unset refreshmod
 unset resgrep
 unset rsgrep
+unset run_tool_with_logging
 unset sepgrep
 unset sgrep
 unset startviewserver
diff --git a/shell_utils.sh b/shell_utils.sh
index 450bb83..86f3f49 100644
--- a/shell_utils.sh
+++ b/shell_utils.sh
@@ -126,4 +126,27 @@
 }
 
 
+function log_tool_invocation()
+{
+    if [[ -z $ANDROID_TOOL_LOGGER ]]; then
+        return
+    fi
+
+    LOG_TOOL_TAG=$1
+    LOG_START_TIME=$(date +%s.%N)
+    trap '
+        exit_code=$?;
+        # Remove the trap to prevent duplicate log.
+        trap - EXIT;
+        $ANDROID_TOOL_LOGGER \
+                --tool_tag="${LOG_TOOL_TAG}" \
+                --start_timestamp="${LOG_START_TIME}" \
+                --end_timestamp="$(date +%s.%N)" \
+                --tool_args="$*" \
+                --exit_code="${exit_code}" \
+                ${ANDROID_TOOL_LOGGER_EXTRA_ARGS} \
+           > /dev/null 2>&1 &
+        exit ${exit_code}
+    ' SIGINT SIGTERM SIGQUIT EXIT
+}
 
diff --git a/target/board/Android.mk b/target/board/Android.mk
index decc345..8133af9 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -67,7 +67,6 @@
 $(GEN): $(DEVICE_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
 	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
 	PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-	PRODUCT_SHIPPING_API_LEVEL=$(PRODUCT_SHIPPING_API_LEVEL) \
 	$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
 		-i $(call normalize-path-list,$(PRIVATE_DEVICE_MANIFEST_FILE))
 
@@ -99,7 +98,6 @@
 $$(GEN): $$(my_fragment_files) $$(HOST_OUT_EXECUTABLES)/assemble_vintf
 	BOARD_SEPOLICY_VERS=$$(BOARD_SEPOLICY_VERS) \
 	PRODUCT_ENFORCE_VINTF_MANIFEST=$$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-	PRODUCT_SHIPPING_API_LEVEL=$$(PRODUCT_SHIPPING_API_LEVEL) \
 	$$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $$@ \
 		-i $$(call normalize-path-list,$$(PRIVATE_SRC_FILES))
 
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 5446064..acfc653 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -16,6 +16,7 @@
 
 # Base modules and settings for the product partition.
 PRODUCT_PACKAGES += \
+    build_flag_product \
     fs_config_dirs_product \
     fs_config_files_product \
     group_product \
@@ -24,3 +25,8 @@
     product_compatibility_matrix.xml \
     product_manifest.xml \
     selinux_policy_product \
+    product-build.prop \
+
+# Packages included only for eng or userdebug builds, previously debug tagged
+PRODUCT_PACKAGES_DEBUG += \
+    adb_keys \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 634bf66..9e34538 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -45,6 +45,7 @@
     bu \
     bugreport \
     bugreportz \
+    build_flag_system \
     cgroups.json \
     charger \
     cmd \
@@ -82,6 +83,7 @@
     CtsShimPrivPrebuilt \
     debuggerd\
     device_config \
+    DeviceDiagnostics \
     dmctl \
     dnsmasq \
     dmesgd \
@@ -90,7 +92,9 @@
     dump.erofs \
     dumpstate \
     dumpsys \
+    E2eeContactKeysProvider \
     e2fsck \
+    enhanced-confirmation.xml \
     ExtShared \
     flags_health_check \
     framework-graphics \
@@ -234,6 +238,7 @@
     org.apache.http.legacy \
     otacerts \
     PackageInstaller \
+    package-shareduid-allowlist.xml \
     passwd_system \
     perfetto \
     perfetto-extras \
@@ -275,6 +280,7 @@
     storaged \
     surfaceflinger \
     svc \
+    system-build.prop \
     task_profiles.json \
     tc \
     telecom \
@@ -286,6 +292,7 @@
     uiautomator \
     uinput \
     uncrypt \
+    uprobestats \
     usbd \
     vdc \
     vintf \
@@ -327,6 +334,13 @@
         com.android.nfcservices
 endif
 
+# Check if the build supports Profiling module
+ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true)
+    PRODUCT_PACKAGES += \
+       com.android.profiling \
+       trace_redactor
+endif
+
 ifeq ($(RELEASE_USE_WEBVIEW_BOOTSTRAP_MODULE),true)
     PRODUCT_PACKAGES += \
         com.android.webview.bootstrap
@@ -390,7 +404,6 @@
     BugReport \
     adb \
     adevice \
-    art-tools \
     atest \
     bcc \
     bit \
@@ -400,7 +413,7 @@
     flags_health_check \
     fsck.erofs \
     icu-data_host_i18n_apex \
-    icu_tzdata.dat_host_tzdata_apex \
+    tzdata_icu_res_files_host_prebuilts \
     idmap2 \
     incident_report \
     ld.mc \
@@ -421,6 +434,21 @@
     tz_version_host \
     tz_version_host_tzdata_apex \
 
+# For art-tools, if the dependencies have changed, please sync them to art/Android.bp as well.
+PRODUCT_HOST_PACKAGES += \
+    ahat \
+    dexdump \
+    hprof-conv
+# A subset of the tools are disabled when HOST_PREFER_32_BIT is defined as make reports that
+# they are not supported on host (b/129323791). This is likely due to art_apex disabling host
+# APEX builds when HOST_PREFER_32_BIT is set (b/120617876).
+ifneq ($(HOST_PREFER_32_BIT),true)
+PRODUCT_HOST_PACKAGES += \
+    dexlist \
+    oatdump
+endif
+
+
 PRODUCT_PACKAGES += init.usb.rc init.usb.configfs.rc
 
 PRODUCT_PACKAGES += etc_hosts
@@ -431,9 +459,13 @@
 PRODUCT_SYSTEM_PROPERTIES += debug.atrace.tags.enableflags=0
 PRODUCT_SYSTEM_PROPERTIES += persist.traced.enable=1
 
+# Include kernel configs.
+PRODUCT_PACKAGES += \
+    approved-ogki-builds.xml \
+    kernel-lifetimes.xml
+
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
-    adb_keys \
     adevice_fingerprint \
     arping \
     dmuserd \
@@ -498,3 +530,6 @@
 
 # Use "image" APEXes always.
 $(call inherit-product,$(SRC_TARGET_DIR)/product/updatable_apex.mk)
+
+$(call soong_config_set, bionic, large_system_property_node, $(RELEASE_LARGE_SYSTEM_PROPERTY_NODE))
+$(call soong_config_set, Aconfig, read_from_new_storage, $(RELEASE_READ_FROM_NEW_STORAGE))
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index 76f008f..febe537 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -16,6 +16,7 @@
 
 # Base modules and settings for the system_ext partition.
 PRODUCT_PACKAGES += \
+    build_flag_system_ext \
     fs_config_dirs_system_ext \
     fs_config_files_system_ext \
     group_system_ext \
@@ -23,6 +24,7 @@
     SatelliteClient \
     selinux_policy_system_ext \
     system_ext_manifest.xml \
+    system_ext-build.prop \
 
 # Base modules when shipping api level is less than or equal to 34
 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index ec3de75..a80e0b3 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -18,6 +18,7 @@
 PRODUCT_PACKAGES += \
     adbd.recovery \
     android.hardware.health@2.0-impl-default.recovery \
+    build_flag_vendor \
     cgroups.recovery.json \
     charger.recovery \
     init_second_stage.recovery \
@@ -72,6 +73,12 @@
     passwd_vendor \
     selinux_policy_nonsystem \
     shell_and_utilities_vendor \
+    odm-build.prop \
+
+# libhealthloop BPF filter. This is in base_vendor.mk because libhealthloop must
+# be a static library and because the Android build system ignores 'required'
+# sections for static libraries.
+PRODUCT_PACKAGES += filterPowerSupplyEvents.o
 
 # Base modules when shipping api level is less than or equal to 34
 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
@@ -104,3 +111,9 @@
 PRODUCT_PACKAGES += \
     adb_debug.prop \
     userdebug_plat_sepolicy.cil
+
+# On eng or userdebug builds, build in perf-setup-sh by default.
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+PRODUCT_PACKAGES += \
+    perf-setup-sh
+endif
diff --git a/target/product/build_variables.mk b/target/product/build_variables.mk
index 5fe5333..9fc9ff9 100644
--- a/target/product/build_variables.mk
+++ b/target/product/build_variables.mk
@@ -17,5 +17,8 @@
 # This file contains the trunk-stable flags that should be exported to all
 # Android targets.
 
+# Control libbinder client caching
+$(call soong_config_set, libbinder, release_libbinder_client_cache, $(RELEASE_LIBBINDER_CLIENT_CACHE))
+
 # Use the configured release of sqlite
 $(call soong_config_set, libsqlite3, release_package_libsqlite3, $(RELEASE_PACKAGE_LIBSQLITE3))
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 4a968d7..1a3f2cf 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -101,8 +101,16 @@
     PRODUCT_BOOT_JARS += \
         framework-nfc
 else
-    PRODUCT_APEX_BOOT_JARS := \
+    PRODUCT_APEX_BOOT_JARS += \
         com.android.nfcservices:framework-nfc
+    $(call soong_config_set,bootclasspath,nfc_apex_bootclasspath_fragment,true)
+endif
+
+# Check if build supports Profiling module.
+ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true)
+    PRODUCT_APEX_BOOT_JARS += \
+        com.android.profiling:framework-profiling \
+
 endif
 
 # List of system_server classpath jars delivered via apex.
@@ -153,6 +161,13 @@
     com.android.uwb:service-uwb \
     com.android.wifi:service-wifi \
 
+# Check if build supports Profiling module.
+ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true)
+    PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS += \
+        com.android.profiling:service-profiling \
+
+endif
+
 # Overrides the (apex, jar) pairs above when determining the on-device location. The format is:
 # <old_apex>:<old_jar>:<new_apex>:<new_jar>
 PRODUCT_CONFIGURED_JAR_LOCATION_OVERRIDES := \
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 4793657..0a09eb1 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -120,6 +120,10 @@
     init.zygote64.rc \
     init.zygote64_32.rc
 
+# Support Credential Manager
+PRODUCT_PACKAGES += \
+    android.software.credentials.prebuilt.xml
+
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
 
diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk
index 7fbc09d..4627fde 100644
--- a/target/product/go_defaults.mk
+++ b/target/product/go_defaults.mk
@@ -17,10 +17,7 @@
 # Inherit common Android Go defaults.
 $(call inherit-product, build/make/target/product/go_defaults_common.mk)
 
-PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/go_devices/release_config_map.mk)
-
-# TODO (b/342265627): Remove v/g/r once all the flags have been moved to v/g_s/b/r
-PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google/release/go_devices/release_config_map.mk)
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/go_devices/release_config_map.textproto)
 
 # Add the system properties.
 TARGET_SYSTEM_PROP += \
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index 5218f29..fd4047a 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -37,9 +37,9 @@
 # leave less information available via JDWP.
 PRODUCT_MINIMIZE_JAVA_DEBUG_INFO := true
 
-# Disable Scudo outside of eng builds to save RAM.
+# Use the low memory allocator outside of eng builds to save RSS.
 ifneq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
-  PRODUCT_DISABLE_SCUDO := true
+  MALLOC_LOW_MEMORY := true
 endif
 
 # Add the system properties.
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 5044a39..39428d2 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -51,16 +51,15 @@
     init.vndk-nodef.rc \
 
 
-# Overlay the GSI specific SystemUI setting
+# Overlay the GSI specific setting for framework and SystemUI
 ifneq ($(PRODUCT_IS_AUTOMOTIVE),true)
-    PRODUCT_PACKAGES += gsi_overlay_systemui
+    PRODUCT_PACKAGES += \
+        gsi_overlay_framework \
+        gsi_overlay_systemui \
     PRODUCT_COPY_FILES += \
         device/generic/common/overlays/overlay-config.xml:$(TARGET_COPY_OUT_SYSTEM_EXT)/overlay/config/config.xml
 endif
 
-# b/308878144 no more VNDK on 24Q1 and beyond
-KEEP_VNDK ?= false
-
 # Support additional VNDK snapshots
 PRODUCT_EXTRA_VNDK_VERSIONS := \
     30 \
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 3c401f3..3f3bd01 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -33,6 +33,7 @@
 $(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
 
 PRODUCT_PACKAGES += \
+    android.software.window_magnification.prebuilt.xml \
     BasicDreams \
     BlockedNumberProvider \
     BluetoothMidiService \
@@ -75,6 +76,10 @@
     vr \
 
 PRODUCT_PACKAGES += $(RELEASE_PACKAGE_VIRTUAL_CAMERA)
+# Set virtual_camera_service_enabled soong config variable based on the
+# RELEASE_PACKAGE_VIRTUAL_CAMERA build. virtual_camera_service_enabled soong config
+# variable is used to prevent accessing the service when it's not present in the build.
+$(call soong_config_set,vdm,virtual_camera_service_enabled,$(if $(RELEASE_PACKAGE_VIRTUAL_CAMERA),true,false))
 
 PRODUCT_SYSTEM_SERVER_APPS += \
     FusedLocation \
@@ -84,9 +89,6 @@
 
 PRODUCT_PACKAGES += framework-audio_effects.xml
 
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.software.window_magnification.xml:$(TARGET_COPY_OUT_SYSTEM)/etc/permissions/android.software.window_magnification.xml \
-
 PRODUCT_VENDOR_PROPERTIES += \
     ro.carrier?=unknown \
     ro.config.notification_sound?=OnTheHunt.ogg \
diff --git a/target/product/large_screen_common.mk b/target/product/large_screen_common.mk
new file mode 100644
index 0000000..3eb9ff0
--- /dev/null
+++ b/target/product/large_screen_common.mk
@@ -0,0 +1,21 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Window Extensions
+$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk)
+
+# Enable Settings 2-pane optimization for large-screen
+PRODUCT_SYSTEM_PROPERTIES += \
+    persist.settings.large_screen_opt.enabled=true
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 503c9b3..af3857e 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -21,6 +21,7 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base_system.mk)
 
 PRODUCT_PACKAGES += \
+    android.software.webview.prebuilt.xml \
     com.android.future.usb.accessory \
     com.android.mediadrm.signer \
     com.android.media.remotedisplay \
@@ -39,12 +40,9 @@
 PRODUCT_HOST_PACKAGES += \
     fsck.f2fs \
 
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
-
 ifneq (REL,$(PLATFORM_VERSION_CODENAME))
-PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.software.preview_sdk.xml:system/etc/permissions/android.software.preview_sdk.xml
+PRODUCT_PACKAGES += \
+    android.software.preview_sdk.prebuilt.xml
 endif
 
 # The order here is the same order they end up on the classpath, so it matters.
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index d9c3c9a..9e8afa8 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -176,4 +176,10 @@
     dalvik.vm.usap_pool_refill_delay_ms?=3000
 
 PRODUCT_SYSTEM_PROPERTIES += \
-    dalvik.vm.useartservice=true
+    dalvik.vm.useartservice=true \
+    dalvik.vm.enable_pr_dexopt=true
+
+# Copy preopted files from system_b on first boot.
+PRODUCT_SYSTEM_PROPERTIES += ro.cp_system_other_odex=1
+PRODUCT_PACKAGES += \
+  cppreopts.sh
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 1a07363..3d56a80 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -40,3 +40,6 @@
 ifeq ($(WITHOUT_CHECK_API),true)
   $(error WITHOUT_CHECK_API cannot be set to true for SDK product builds)
 endif
+
+# Include Wear flag values so that Wear-related APIs are build in sdks.
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/wear/release/release_config_map.textproto)
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
index 1e26d59..0d7b35e 100644
--- a/target/product/security/Android.bp
+++ b/target/product/security/Android.bp
@@ -25,3 +25,15 @@
     sub_dir: "security/fsverity",
     filename_from_src: true,
 }
+
+// otacerts: A keystore with the authorized keys in it, which is used to verify
+// the authenticity of downloaded OTA packages.
+// This module zips files defined in PRODUCT_DEFAULT_DEV_CERTIFICATE and
+// PRODUCT_EXTRA_OTA_KEYS for system or PRODUCT_EXTRA_RECOVERY_KEYS for recovery
+// image
+otacerts_zip {
+    name: "otacerts",
+    recovery_available: true,
+    relative_install_path: "security",
+    filename: "otacerts.zip",
+}
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 4bd8efc..138e5bb 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -10,59 +10,8 @@
     LOCAL_LICENSE_CONDITIONS := notice
     LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
     LOCAL_MODULE_CLASS := ETC
-    LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
+    LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT_ETC)/security
     LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
     include $(BUILD_PREBUILT)
   endif
 endif
-
-
-#######################################
-# otacerts: A keystore with the authorized keys in it, which is used to verify the authenticity of
-# downloaded OTA packages.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := otacerts
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_STEM := otacerts.zip
-LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/security
-include $(BUILD_SYSTEM)/base_rules.mk
-
-extra_ota_keys := $(addsuffix .x509.pem,$(PRODUCT_EXTRA_OTA_KEYS))
-
-$(LOCAL_BUILT_MODULE): PRIVATE_CERT := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
-$(LOCAL_BUILT_MODULE): PRIVATE_EXTRA_OTA_KEYS := $(extra_ota_keys)
-$(LOCAL_BUILT_MODULE): \
-	    $(SOONG_ZIP) \
-	    $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem \
-	    $(extra_ota_keys)
-	$(SOONG_ZIP) -o $@ -j -symlinks=false \
-	    $(addprefix -f ,$(PRIVATE_CERT) $(PRIVATE_EXTRA_OTA_KEYS))
-
-
-#######################################
-# otacerts for recovery image.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := otacerts.recovery
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_STEM := otacerts.zip
-LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
-include $(BUILD_SYSTEM)/base_rules.mk
-
-extra_recovery_keys := $(addsuffix .x509.pem,$(PRODUCT_EXTRA_RECOVERY_KEYS))
-
-$(LOCAL_BUILT_MODULE): PRIVATE_CERT := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
-$(LOCAL_BUILT_MODULE): PRIVATE_EXTRA_RECOVERY_KEYS := $(extra_recovery_keys)
-$(LOCAL_BUILT_MODULE): \
-	    $(SOONG_ZIP) \
-	    $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem \
-	    $(extra_recovery_keys)
-	$(SOONG_ZIP) -o $@ -j -symlinks=false \
-	    $(addprefix -f ,$(PRIVATE_CERT) $(PRIVATE_EXTRA_RECOVERY_KEYS))
diff --git a/target/product/userspace_reboot.mk b/target/product/userspace_reboot.mk
index f235d14..51feb07 100644
--- a/target/product/userspace_reboot.mk
+++ b/target/product/userspace_reboot.mk
@@ -14,6 +14,4 @@
 # limitations under the License.
 #
 
-# Inherit this when the target supports userspace reboot
-
-PRODUCT_VENDOR_PROPERTIES := init.userspace_reboot.is_supported=true
+# DEPRECATED! Do not inherit this.
diff --git a/target/product/vboot.mk b/target/product/vboot.mk
deleted file mode 100644
index 48a4883..0000000
--- a/target/product/vboot.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Copyright (C) 2015 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Provides dependencies necessary for verified boot
-
-PRODUCT_SUPPORTS_VBOOT := true
-
-# The dev key is used to sign boot and recovery images.
-# We expect this file to exist with the suffixes ".vbprivk" and ".vbpupk".
-# TODO: find a proper location for this
-PRODUCT_VBOOT_SIGNING_KEY := external/vboot_reference/tests/devkeys/kernel_data_key
-PRODUCT_VBOOT_SIGNING_SUBKEY := external/vboot_reference/tests/devkeys/kernel_subkey
diff --git a/target/product/virtual_ab_ota/OWNERS b/target/product/virtual_ab_ota/OWNERS
new file mode 100644
index 0000000..8eb0686
--- /dev/null
+++ b/target/product/virtual_ab_ota/OWNERS
@@ -0,0 +1,4 @@
+zhangkelvin@google.com
+dvander@google.com
+akailash@google.com
+
diff --git a/target/product/virtual_ab_ota/compression.mk b/target/product/virtual_ab_ota/compression.mk
index c964860..dc1ee3e 100644
--- a/target/product/virtual_ab_ota/compression.mk
+++ b/target/product/virtual_ab_ota/compression.mk
@@ -28,4 +28,5 @@
 PRODUCT_VIRTUAL_AB_COMPRESSION := true
 PRODUCT_PACKAGES += \
     snapuserd.vendor_ramdisk \
-    snapuserd
+    snapuserd \
+    snapuserd.recovery
diff --git a/target/product/virtual_ab_ota/compression_retrofit.mk b/target/product/virtual_ab_ota/compression_retrofit.mk
index 118d3f2..6c29cba 100644
--- a/target/product/virtual_ab_ota/compression_retrofit.mk
+++ b/target/product/virtual_ab_ota/compression_retrofit.mk
@@ -24,4 +24,5 @@
 # as well.
 PRODUCT_PACKAGES += \
     snapuserd.ramdisk \
-    snapuserd
+    snapuserd \
+    snapuserd.recovery
diff --git a/target/product/virtual_ab_ota/vabc_features.mk b/target/product/virtual_ab_ota/vabc_features.mk
index 1219763..e2745a1 100644
--- a/target/product/virtual_ab_ota/vabc_features.mk
+++ b/target/product/virtual_ab_ota/vabc_features.mk
@@ -40,6 +40,7 @@
 # ro.virtual_ab.compression.xor.enabled and ro.virtual_ab.io_uring.enabled
 # is also recommended
 #
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.read_ahead_size=16
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.o_direct.enabled=true
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.merge_thread_priority=19
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.worker_thread_priority=0
diff --git a/teams/Android.bp b/teams/Android.bp
index b3a5752..94585fc 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -4391,3 +4391,52 @@
     // go/trendy/manage/engineers/5955405559201792
     trendy_team_id: "5955405559201792",
 }
+
+team {
+    name: "trendy_team_android_media_better_together",
+
+    // go/trendy/manage/engineers/5617300451721216
+    trendy_team_id: "5617300451721216",
+}
+
+team {
+    name: "trendy_team_attack_tools",
+
+    // go/trendy/manage/engineers/4705629185081344
+    trendy_team_id: "4705629185081344",
+}
+
+team {
+    name: "trendy_team_android_media_solutions_editing",
+
+    // go/trendy/manage/engineers/5350750192762880
+    trendy_team_id: "5350750192762880",
+}
+
+team {
+    name: "trendy_team_android_media_solutions_playback",
+
+    // go/trendy/manage/engineers/6742515252559872
+    trendy_team_id: "6742515252559872",
+}
+
+team {
+    name: "trendy_team_android_telemetry_client_infra",
+
+    // go/trendy/manage/engineers/5403245077430272
+    trendy_team_id: "5403245077430272",
+}
+
+team {
+    name: "trendy_team_pte_sysui",
+
+    // go/trendy/manage/engineers/5185897463382016
+    trendy_team_id: "5185897463382016",
+}
+
+team {
+    name: "trendy_team_pixel_troubleshooting_app",
+
+    // go/trendy/manage/engineers/5097003746426880
+    trendy_team_id: "5097003746426880",
+}
diff --git a/tests/Android.bp b/tests/Android.bp
deleted file mode 100644
index 39debf5..0000000
--- a/tests/Android.bp
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2024 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-    default_team: "trendy_team_build",
-}
-
-python_test_host {
-    name: "run_tool_with_logging_test",
-    main: "run_tool_with_logging_test.py",
-    pkg_path: "testdata",
-    srcs: [
-        "run_tool_with_logging_test.py",
-    ],
-    test_options: {
-        unit_test: true,
-    },
-    data: [
-        ":envsetup_minimum.zip",
-        ":tool_event_logger",
-    ],
-    test_suites: [
-        "general-tests",
-    ],
-    version: {
-        py3: {
-            embedded_launcher: true,
-        },
-    },
-}
diff --git a/tests/run_tool_with_logging_test.py b/tests/run_tool_with_logging_test.py
deleted file mode 100644
index 6f9b59c..0000000
--- a/tests/run_tool_with_logging_test.py
+++ /dev/null
@@ -1,345 +0,0 @@
-# Copyright 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import dataclasses
-import glob
-from importlib import resources
-import logging
-import os
-from pathlib import Path
-import re
-import shutil
-import signal
-import stat
-import subprocess
-import sys
-import tempfile
-import textwrap
-import time
-import unittest
-import zipfile
-
-EXII_RETURN_CODE = 0
-INTERRUPTED_RETURN_CODE = 130
-
-
-class RunToolWithLoggingTest(unittest.TestCase):
-
-  @classmethod
-  def setUpClass(cls):
-    super().setUpClass()
-    # Configure to print logging to stdout.
-    logging.basicConfig(filename=None, level=logging.DEBUG)
-    console = logging.StreamHandler(sys.stdout)
-    logging.getLogger("").addHandler(console)
-
-  def setUp(self):
-    super().setUp()
-    self.working_dir = tempfile.TemporaryDirectory()
-    # Run all the tests from working_dir which is our temp Android build top.
-    os.chdir(self.working_dir.name)
-    # Extract envsetup.zip which contains the envsetup.sh and other dependent
-    # scripts required to set up the build environments.
-    with resources.files("testdata").joinpath("envsetup.zip").open("rb") as p:
-      with zipfile.ZipFile(p, "r") as zip_f:
-        zip_f.extractall()
-
-  def tearDown(self):
-    self.working_dir.cleanup()
-    super().tearDown()
-
-  def test_does_not_log_when_logger_var_empty(self):
-    test_tool = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER=""
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_tool.assert_called_once_with_args("arg1 arg2")
-
-  def test_does_not_log_with_logger_unset(self):
-    test_tool = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      unset ANDROID_TOOL_LOGGER
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_tool.assert_called_once_with_args("arg1 arg2")
-
-  def test_log_success_with_logger_enabled(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_tool.assert_called_once_with_args("arg1 arg2")
-    expected_logger_args = (
-        "--tool_tag=FAKE_TOOL --start_timestamp=\d+\.\d+ --end_timestamp="
-        "\d+\.\d+ --tool_args=arg1 arg2 --exit_code=0"
-    )
-    test_logger.assert_called_once_with_args(expected_logger_args)
-
-  def test_run_tool_output_is_same_with_and_without_logging(self):
-    test_tool = TestScript.create(self.working_dir, "echo 'tool called'")
-    test_logger = TestScript.create(self.working_dir)
-
-    run_tool_with_logging_stdout, run_tool_with_logging_stderr = (
-        self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-    )
-
-    run_tool_without_logging_stdout, run_tool_without_logging_stderr = (
-        self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      {test_tool.executable} arg1 arg2
-    """)
-    )
-
-    self.assertEqual(
-        run_tool_with_logging_stdout, run_tool_without_logging_stdout
-    )
-    self.assertEqual(
-        run_tool_with_logging_stderr, run_tool_without_logging_stderr
-    )
-
-  def test_logger_output_is_suppressed(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir, "echo 'logger called'")
-
-    run_tool_with_logging_output, _ = self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    self.assertNotIn("logger called", run_tool_with_logging_output)
-
-  def test_logger_error_is_suppressed(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(
-        self.working_dir, "echo 'logger failed' > /dev/stderr; exit 1"
-    )
-
-    _, err = self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    self.assertNotIn("logger failed", err)
-
-  def test_log_success_when_tool_interrupted(self):
-    test_tool = TestScript.create(self.working_dir, script_body="sleep 100")
-    test_logger = TestScript.create(self.working_dir)
-
-    process = self._run_script_in_build_env(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    pgid = os.getpgid(process.pid)
-    # Give sometime for the subprocess to start.
-    time.sleep(1)
-    # Kill the subprocess and any processes created in the same group.
-    os.killpg(pgid, signal.SIGINT)
-
-    returncode, _, _ = self._wait_for_process(process)
-    self.assertEqual(returncode, INTERRUPTED_RETURN_CODE)
-
-    expected_logger_args = (
-        "--tool_tag=FAKE_TOOL --start_timestamp=\d+\.\d+ --end_timestamp="
-        "\d+\.\d+ --tool_args=arg1 arg2 --exit_code=130"
-    )
-    test_logger.assert_called_once_with_args(expected_logger_args)
-
-  def test_logger_can_be_toggled_on(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER=""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_logger.assert_called_with_times(1)
-
-  def test_logger_can_be_toggled_off(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      ANDROID_TOOL_LOGGER=""
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_logger.assert_not_called()
-
-  def test_integration_tool_event_logger_dry_run(self):
-    test_tool = TestScript.create(self.working_dir)
-    logger_path = self._import_logger()
-
-    self._run_script_and_wait(f"""
-      TMPDIR="{self.working_dir.name}"
-      ANDROID_TOOL_LOGGER="{logger_path}"
-      ANDROID_TOOL_LOGGER_EXTRA_ARGS="--dry_run"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    self._assert_logger_dry_run()
-
-  def test_tool_args_do_not_fail_logger(self):
-    test_tool = TestScript.create(self.working_dir)
-    logger_path = self._import_logger()
-
-    self._run_script_and_wait(f"""
-      TMPDIR="{self.working_dir.name}"
-      ANDROID_TOOL_LOGGER="{logger_path}"
-      ANDROID_TOOL_LOGGER_EXTRA_ARGS="--dry_run"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} --tool-arg1
-    """)
-
-    self._assert_logger_dry_run()
-
-  def _import_logger(self) -> Path:
-    logger = "tool_event_logger"
-    logger_path = Path(self.working_dir.name).joinpath(logger)
-    with resources.as_file(resources.files("testdata").joinpath(logger)) as p:
-      shutil.copy(p, logger_path)
-    Path.chmod(logger_path, 0o755)
-    return logger_path
-
-  def _assert_logger_dry_run(self):
-    log_files = glob.glob(self.working_dir.name + "/tool_event_logger_*/*.log")
-    self.assertEqual(len(log_files), 1)
-
-    with open(log_files[0], "r") as f:
-      lines = f.readlines()
-      self.assertEqual(len(lines), 1)
-      self.assertIn("dry run", lines[0])
-
-  def _create_build_env_script(self) -> str:
-    return f"""
-      source {Path(self.working_dir.name).joinpath("build/make/envsetup.sh")}
-    """
-
-  def _run_script_and_wait(self, test_script: str) -> tuple[str, str]:
-    process = self._run_script_in_build_env(test_script)
-    returncode, out, err = self._wait_for_process(process)
-    logging.debug("script stdout: %s", out)
-    logging.debug("script stderr: %s", err)
-    self.assertEqual(returncode, EXII_RETURN_CODE)
-    return out, err
-
-  def _run_script_in_build_env(self, test_script: str) -> subprocess.Popen:
-    setup_build_env_script = self._create_build_env_script()
-    return subprocess.Popen(
-        setup_build_env_script + test_script,
-        shell=True,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        text=True,
-        start_new_session=True,
-        executable="/bin/bash",
-        )
-
-  def _wait_for_process(
-      self, process: subprocess.Popen
-  ) -> tuple[int, str, str]:
-    pgid = os.getpgid(process.pid)
-    out, err = process.communicate()
-    # Wait for all process in the same group to complete since the logger runs
-    # as a separate detached process.
-    self._wait_for_process_group(pgid)
-    return (process.returncode, out, err)
-
-  def _wait_for_process_group(self, pgid: int, timeout: int = 5):
-    """Waits for all subprocesses within the process group to complete."""
-    start_time = time.time()
-    while True:
-      if time.time() - start_time > timeout:
-        raise TimeoutError(
-            f"Process group did not complete after {timeout} seconds"
-        )
-      for pid in os.listdir("/proc"):
-        if pid.isdigit():
-          try:
-            if os.getpgid(int(pid)) == pgid:
-              time.sleep(0.1)
-              break
-          except (FileNotFoundError, PermissionError, ProcessLookupError):
-            pass
-      else:
-        # All processes have completed.
-        break
-
-
-@dataclasses.dataclass
-class TestScript:
-  executable: Path
-  output_file: Path
-
-  def create(temp_dir: Path, script_body: str = ""):
-    with tempfile.NamedTemporaryFile(dir=temp_dir.name, delete=False) as f:
-      output_file = f.name
-
-    with tempfile.NamedTemporaryFile(dir=temp_dir.name, delete=False) as f:
-      executable = f.name
-      executable_contents = textwrap.dedent(f"""
-      #!/bin/bash
-
-      echo "${{@}}" >> {output_file}
-      {script_body}
-      """)
-      f.write(executable_contents.encode("utf-8"))
-
-    Path.chmod(f.name, os.stat(f.name).st_mode | stat.S_IEXEC)
-
-    return TestScript(executable, output_file)
-
-  def assert_called_with_times(self, expected_call_times: int):
-    lines = self._read_contents_from_output_file()
-    assert len(lines) == expected_call_times, (
-        f"Expect to call {expected_call_times} times, but actually called"
-        f" {len(lines)} times."
-    )
-
-  def assert_called_with_args(self, expected_args: str):
-    lines = self._read_contents_from_output_file()
-    assert len(lines) > 0
-    assert re.search(expected_args, lines[0]), (
-        f"Expect to call with args {expected_args}, but actually called with"
-        f" args {lines[0]}."
-    )
-
-  def assert_not_called(self):
-    self.assert_called_with_times(0)
-
-  def assert_called_once_with_args(self, expected_args: str):
-    self.assert_called_with_times(1)
-    self.assert_called_with_args(expected_args)
-
-  def _read_contents_from_output_file(self) -> list[str]:
-    with open(self.output_file, "r") as f:
-      return f.readlines()
-
-
-if __name__ == "__main__":
-  unittest.main()
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 448d8cf..15e4187 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -98,6 +98,10 @@
     {
       // aconfig_storage file cpp integration tests
       "name": "aconfig_storage_file.test.cpp"
+    },
+    {
+      // aconfig_storage file java integration tests
+      "name": "aconfig_storage_file.test.java"
     }
   ],
   "postsubmit": [
diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs
index e743b2f..2c569da 100644
--- a/tools/aconfig/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/aconfig/src/codegen/cpp.rs
@@ -45,6 +45,8 @@
     let header = package.replace('.', "_");
     let package_macro = header.to_uppercase();
     let cpp_namespace = package.replace('.', "::");
+    ensure!(class_elements.len() > 0);
+    let container = class_elements[0].container.clone();
     ensure!(codegen::is_valid_name_ident(&header));
     let context = Context {
         header: &header,
@@ -56,6 +58,7 @@
         readwrite_count,
         is_test_mode: codegen_mode == CodegenMode::Test,
         class_elements,
+        container,
         allow_instrumentation,
     };
 
@@ -100,6 +103,7 @@
     pub readwrite_count: i32,
     pub is_test_mode: bool,
     pub class_elements: Vec<ClassElement>,
+    pub container: String,
     pub allow_instrumentation: bool,
 }
 
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index 3360ddd..dbc4ab5 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -1,18 +1,18 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 use anyhow::Result;
 use serde::Serialize;
@@ -20,22 +20,24 @@
 use std::path::PathBuf;
 use tinytemplate::TinyTemplate;
 
-use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
-
 use crate::codegen;
 use crate::codegen::CodegenMode;
 use crate::commands::OutputFile;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
+use std::collections::HashMap;
 
 pub fn generate_java_code<I>(
     package: &str,
     parsed_flags_iter: I,
     codegen_mode: CodegenMode,
+    flag_ids: HashMap<String, u16>,
+    allow_instrumentation: bool,
 ) -> Result<Vec<OutputFile>>
 where
     I: Iterator<Item = ProtoParsedFlag>,
 {
     let flag_elements: Vec<FlagElement> =
-        parsed_flags_iter.map(|pf| create_flag_element(package, &pf)).collect();
+        parsed_flags_iter.map(|pf| create_flag_element(package, &pf, flag_ids.clone())).collect();
     let namespace_flags = gen_flags_by_namespace(&flag_elements);
     let properties_set: BTreeSet<String> =
         flag_elements.iter().map(|fe| format_property_name(&fe.device_config_namespace)).collect();
@@ -43,7 +45,7 @@
     let library_exported = codegen_mode == CodegenMode::Exported;
     let runtime_lookup_required =
         flag_elements.iter().any(|elem| elem.is_read_write) || library_exported;
-
+    let container = (flag_elements.first().expect("zero template flags").container).to_string();
     let context = Context {
         flag_elements,
         namespace_flags,
@@ -52,6 +54,8 @@
         properties_set,
         package_name: package.to_string(),
         library_exported,
+        allow_instrumentation,
+        container,
     };
     let mut template = TinyTemplate::new();
     template.add_template("Flags.java", include_str!("../../templates/Flags.java.template"))?;
@@ -117,6 +121,8 @@
     pub properties_set: BTreeSet<String>,
     pub package_name: String,
     pub library_exported: bool,
+    pub allow_instrumentation: bool,
+    pub container: String,
 }
 
 #[derive(Serialize, Debug)]
@@ -127,23 +133,31 @@
 
 #[derive(Serialize, Clone, Debug)]
 struct FlagElement {
+    pub container: String,
     pub default_value: bool,
     pub device_config_namespace: String,
     pub device_config_flag: String,
     pub flag_name_constant_suffix: String,
+    pub flag_offset: u16,
     pub is_read_write: bool,
     pub method_name: String,
     pub properties: String,
 }
 
-fn create_flag_element(package: &str, pf: &ProtoParsedFlag) -> FlagElement {
+fn create_flag_element(
+    package: &str,
+    pf: &ProtoParsedFlag,
+    flag_offsets: HashMap<String, u16>,
+) -> FlagElement {
     let device_config_flag = codegen::create_device_config_ident(package, pf.name())
         .expect("values checked at flag parse time");
     FlagElement {
+        container: pf.container().to_string(),
         default_value: pf.state() == ProtoFlagState::ENABLED,
         device_config_namespace: pf.namespace().to_string(),
         device_config_flag,
         flag_name_constant_suffix: pf.name().to_ascii_uppercase(),
+        flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("),
         is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
         method_name: format_java_method_name(pf.name()),
         properties: format_property_name(pf.namespace()),
@@ -179,6 +193,7 @@
 #[cfg(test)]
 mod tests {
     use super::*;
+    use crate::commands::assign_flag_ids;
     use std::collections::HashMap;
 
     const EXPECTED_FEATUREFLAGS_COMMON_CONTENT: &str = r#"
@@ -477,70 +492,40 @@
         let mode = CodegenMode::Production;
         let modified_parsed_flags =
             crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let generated_files =
-            generate_java_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
-                .unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            modified_parsed_flags.into_iter(),
+            mode,
+            flag_ids,
+            false,
+        )
+        .unwrap();
         let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string()
             + r#"
             private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
         }"#;
 
-        let expect_featureflagsimpl_content = r#"
+        let expected_featureflagsmpl_content_0 = r#"
         package com.android.aconfig.test;
         // TODO(b/303773055): Remove the annotation after access issue is resolved.
         import android.compat.annotation.UnsupportedAppUsage;
         import android.provider.DeviceConfig;
         import android.provider.DeviceConfig.Properties;
+        "#;
+
+        let expected_featureflagsmpl_content_1 = r#"
         /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
-            private static boolean aconfig_test_is_cached = false;
-            private static boolean other_namespace_is_cached = false;
+            private static volatile boolean aconfig_test_is_cached = false;
+            private static volatile boolean other_namespace_is_cached = false;
             private static boolean disabledRw = false;
             private static boolean disabledRwExported = false;
             private static boolean disabledRwInOtherNamespace = false;
             private static boolean enabledRw = true;
-
-
-            private void load_overrides_aconfig_test() {
-                try {
-                    Properties properties = DeviceConfig.getProperties("aconfig_test");
-                    disabledRw =
-                        properties.getBoolean("com.android.aconfig.test.disabled_rw", false);
-                    disabledRwExported =
-                        properties.getBoolean("com.android.aconfig.test.disabled_rw_exported", false);
-                    enabledRw =
-                        properties.getBoolean("com.android.aconfig.test.enabled_rw", true);
-                } catch (NullPointerException e) {
-                    throw new RuntimeException(
-                        "Cannot read value from namespace aconfig_test "
-                        + "from DeviceConfig. It could be that the code using flag "
-                        + "executed before SettingsProvider initialization. Please use "
-                        + "fixed read-only flag by adding is_fixed_read_only: true in "
-                        + "flag declaration.",
-                        e
-                    );
-                }
-                aconfig_test_is_cached = true;
-            }
-
-            private void load_overrides_other_namespace() {
-                try {
-                    Properties properties = DeviceConfig.getProperties("other_namespace");
-                    disabledRwInOtherNamespace =
-                        properties.getBoolean("com.android.aconfig.test.disabled_rw_in_other_namespace", false);
-                } catch (NullPointerException e) {
-                    throw new RuntimeException(
-                        "Cannot read value from namespace other_namespace "
-                        + "from DeviceConfig. It could be that the code using flag "
-                        + "executed before SettingsProvider initialization. Please use "
-                        + "fixed read-only flag by adding is_fixed_read_only: true in "
-                        + "flag declaration.",
-                        e
-                    );
-                }
-                other_namespace_is_cached = true;
-            }
-
+        "#;
+        let expected_featureflagsmpl_content_2 = r#"
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
             @UnsupportedAppUsage
@@ -609,9 +594,238 @@
             }
         }
         "#;
+
+        let expect_featureflagsimpl_content_old = expected_featureflagsmpl_content_0.to_owned()
+            + expected_featureflagsmpl_content_1
+            + r#"
+            private void load_overrides_aconfig_test() {
+                try {
+                    Properties properties = DeviceConfig.getProperties("aconfig_test");
+                    disabledRw =
+                        properties.getBoolean(Flags.FLAG_DISABLED_RW, false);
+                    disabledRwExported =
+                        properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false);
+                    enabledRw =
+                        properties.getBoolean(Flags.FLAG_ENABLED_RW, true);
+                } catch (NullPointerException e) {
+                    throw new RuntimeException(
+                        "Cannot read value from namespace aconfig_test "
+                        + "from DeviceConfig. It could be that the code using flag "
+                        + "executed before SettingsProvider initialization. Please use "
+                        + "fixed read-only flag by adding is_fixed_read_only: true in "
+                        + "flag declaration.",
+                        e
+                    );
+                }
+                aconfig_test_is_cached = true;
+            }
+
+            private void load_overrides_other_namespace() {
+                try {
+                    Properties properties = DeviceConfig.getProperties("other_namespace");
+                    disabledRwInOtherNamespace =
+                        properties.getBoolean(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false);
+                } catch (NullPointerException e) {
+                    throw new RuntimeException(
+                        "Cannot read value from namespace other_namespace "
+                        + "from DeviceConfig. It could be that the code using flag "
+                        + "executed before SettingsProvider initialization. Please use "
+                        + "fixed read-only flag by adding is_fixed_read_only: true in "
+                        + "flag declaration.",
+                        e
+                    );
+                }
+                other_namespace_is_cached = true;
+            }"#
+            + expected_featureflagsmpl_content_2;
+
         let mut file_set = HashMap::from([
             ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
-            ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
+            (
+                "com/android/aconfig/test/FeatureFlagsImpl.java",
+                &expect_featureflagsimpl_content_old,
+            ),
+            ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
+            (
+                "com/android/aconfig/test/CustomFeatureFlags.java",
+                EXPECTED_CUSTOMFEATUREFLAGS_CONTENT,
+            ),
+            (
+                "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
+                EXPECTED_FAKEFEATUREFLAGSIMPL_CONTENT,
+            ),
+        ]);
+
+        for file in generated_files {
+            let file_path = file.path.to_str().unwrap();
+            assert!(file_set.contains_key(file_path), "Cannot find {}", file_path);
+            assert_eq!(
+                None,
+                crate::test::first_significant_code_diff(
+                    file_set.get(file_path).unwrap(),
+                    &String::from_utf8(file.contents).unwrap()
+                ),
+                "File {} content is not correct",
+                file_path
+            );
+            file_set.remove(file_path);
+        }
+
+        assert!(file_set.is_empty());
+
+        let parsed_flags = crate::test::parse_test_flags();
+        let mode = CodegenMode::Production;
+        let modified_parsed_flags =
+            crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            modified_parsed_flags.into_iter(),
+            mode,
+            flag_ids,
+            true,
+        )
+        .unwrap();
+
+        let expect_featureflagsimpl_content_new = expected_featureflagsmpl_content_0.to_owned()
+            + r#"
+            import android.aconfig.storage.StorageInternalReader;
+            import android.util.Log;
+            "#
+            + expected_featureflagsmpl_content_1
+            + r#"
+        StorageInternalReader reader;
+        boolean readFromNewStorage;
+
+        boolean useNewStorageValueAndDiscardOld = false;
+
+        private final static String TAG = "AconfigJavaCodegen";
+        private final static String SUCCESS_LOG = "success: %s value matches";
+        private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s";
+        private final static String ERROR_LOG = "error: failed to read flag value";
+
+        private void init() {
+            if (reader != null) return;
+            if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.storage_test_mission_1", false)) {
+                readFromNewStorage = true;
+                try {
+                    reader = new StorageInternalReader("system", "com.android.aconfig.test");
+                } catch (Exception e) {
+                    reader = null;
+                }
+            }
+
+            useNewStorageValueAndDiscardOld =
+                DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false);
+        }
+
+        private void load_overrides_aconfig_test() {
+            try {
+                Properties properties = DeviceConfig.getProperties("aconfig_test");
+                disabledRw =
+                    properties.getBoolean(Flags.FLAG_DISABLED_RW, false);
+                disabledRwExported =
+                    properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false);
+                enabledRw =
+                    properties.getBoolean(Flags.FLAG_ENABLED_RW, true);
+            } catch (NullPointerException e) {
+                throw new RuntimeException(
+                    "Cannot read value from namespace aconfig_test "
+                    + "from DeviceConfig. It could be that the code using flag "
+                    + "executed before SettingsProvider initialization. Please use "
+                    + "fixed read-only flag by adding is_fixed_read_only: true in "
+                    + "flag declaration.",
+                    e
+                );
+            }
+            aconfig_test_is_cached = true;
+            init();
+            if (readFromNewStorage && reader != null) {
+                boolean val;
+                try {
+                    val = reader.getBooleanFlagValue(1);
+                    if (val == disabledRw) {
+                        Log.i(TAG, String.format(SUCCESS_LOG, "disabledRw"));
+                    } else {
+                        Log.i(TAG, String.format(MISMATCH_LOG, "disabledRw", val, disabledRw));
+                    }
+
+                    if (useNewStorageValueAndDiscardOld) {
+                        disabledRw = val;
+                    }
+
+                    val = reader.getBooleanFlagValue(2);
+                    if (val == disabledRwExported) {
+                        Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwExported"));
+                    } else {
+                        Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwExported", val, disabledRwExported));
+                    }
+
+                    if (useNewStorageValueAndDiscardOld) {
+                        disabledRwExported = val;
+                    }
+
+                    val = reader.getBooleanFlagValue(8);
+                    if (val == enabledRw) {
+                        Log.i(TAG, String.format(SUCCESS_LOG, "enabledRw"));
+                    } else {
+                        Log.i(TAG, String.format(MISMATCH_LOG, "enabledRw", val, enabledRw));
+                    }
+
+                    if (useNewStorageValueAndDiscardOld) {
+                        enabledRw = val;
+                    }
+
+                } catch (Exception e) {
+                    Log.e(TAG, ERROR_LOG, e);
+                }
+            }
+        }
+
+        private void load_overrides_other_namespace() {
+            try {
+                Properties properties = DeviceConfig.getProperties("other_namespace");
+                disabledRwInOtherNamespace =
+                    properties.getBoolean(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false);
+            } catch (NullPointerException e) {
+                throw new RuntimeException(
+                    "Cannot read value from namespace other_namespace "
+                    + "from DeviceConfig. It could be that the code using flag "
+                    + "executed before SettingsProvider initialization. Please use "
+                    + "fixed read-only flag by adding is_fixed_read_only: true in "
+                    + "flag declaration.",
+                    e
+                );
+            }
+            other_namespace_is_cached = true;
+            init();
+            if (readFromNewStorage && reader != null) {
+                boolean val;
+                try {
+                    val = reader.getBooleanFlagValue(3);
+                    if (val == disabledRwInOtherNamespace) {
+                        Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwInOtherNamespace"));
+                    } else {
+                        Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwInOtherNamespace", val, disabledRwInOtherNamespace));
+                    }
+
+                    if (useNewStorageValueAndDiscardOld) {
+                        disabledRwInOtherNamespace = val;
+                    }
+
+                } catch (Exception e) {
+                    Log.e(TAG, ERROR_LOG, e);
+                }
+            }
+        }"# + expected_featureflagsmpl_content_2;
+
+        let mut file_set = HashMap::from([
+            ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
+            (
+                "com/android/aconfig/test/FeatureFlagsImpl.java",
+                &expect_featureflagsimpl_content_new,
+            ),
             ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
             (
                 "com/android/aconfig/test/CustomFeatureFlags.java",
@@ -647,9 +861,16 @@
         let mode = CodegenMode::Exported;
         let modified_parsed_flags =
             crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let generated_files =
-            generate_java_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
-                .unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            modified_parsed_flags.into_iter(),
+            mode,
+            flag_ids,
+            true,
+        )
+        .unwrap();
 
         let expect_flags_content = r#"
         package com.android.aconfig.test;
@@ -690,7 +911,7 @@
         import android.provider.DeviceConfig.Properties;
         /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
-            private static boolean aconfig_test_is_cached = false;
+            private static volatile boolean aconfig_test_is_cached = false;
             private static boolean disabledRwExported = false;
             private static boolean enabledFixedRoExported = false;
             private static boolean enabledRoExported = false;
@@ -700,11 +921,11 @@
                 try {
                     Properties properties = DeviceConfig.getProperties("aconfig_test");
                     disabledRwExported =
-                        properties.getBoolean("com.android.aconfig.test.disabled_rw_exported", false);
+                        properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false);
                     enabledFixedRoExported =
-                        properties.getBoolean("com.android.aconfig.test.enabled_fixed_ro_exported", false);
+                        properties.getBoolean(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, false);
                     enabledRoExported =
-                        properties.getBoolean("com.android.aconfig.test.enabled_ro_exported", false);
+                        properties.getBoolean(Flags.FLAG_ENABLED_RO_EXPORTED, false);
                 } catch (NullPointerException e) {
                     throw new RuntimeException(
                         "Cannot read value from namespace aconfig_test "
@@ -833,9 +1054,16 @@
         let mode = CodegenMode::Test;
         let modified_parsed_flags =
             crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let generated_files =
-            generate_java_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
-                .unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            modified_parsed_flags.into_iter(),
+            mode,
+            flag_ids,
+            true,
+        )
+        .unwrap();
 
         let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string()
             + r#"
@@ -850,69 +1078,58 @@
         "#;
         let expect_featureflagsimpl_content = r#"
         package com.android.aconfig.test;
-        // TODO(b/303773055): Remove the annotation after access issue is resolved.
-        import android.compat.annotation.UnsupportedAppUsage;
         /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean disabledRo() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean disabledRw() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean disabledRwExported() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean disabledRwInOtherNamespace() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean enabledFixedRo() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean enabledFixedRoExported() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean enabledRo() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean enabledRoExported() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
             }
             @Override
             @com.android.aconfig.annotations.AconfigFlagAccessor
-            @UnsupportedAppUsage
             public boolean enabledRw() {
                 throw new UnsupportedOperationException(
                     "Method is not implemented.");
@@ -958,9 +1175,16 @@
         let mode = CodegenMode::ForceReadOnly;
         let modified_parsed_flags =
             crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let generated_files =
-            generate_java_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
-                .unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated_files = generate_java_code(
+            crate::test::TEST_PACKAGE,
+            modified_parsed_flags.into_iter(),
+            mode,
+            flag_ids,
+            true,
+        )
+        .unwrap();
         let expect_featureflags_content = r#"
         package com.android.aconfig.test;
         // TODO(b/303773055): Remove the annotation after access issue is resolved.
diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs
index 33c3d37..6f3f7bf 100644
--- a/tools/aconfig/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/aconfig/src/codegen/rust.rs
@@ -20,26 +20,34 @@
 
 use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
 
+use std::collections::HashMap;
+
 use crate::codegen;
 use crate::codegen::CodegenMode;
 use crate::commands::OutputFile;
 
 pub fn generate_rust_code<I>(
     package: &str,
+    flag_ids: HashMap<String, u16>,
     parsed_flags_iter: I,
     codegen_mode: CodegenMode,
+    allow_instrumentation: bool,
 ) -> Result<OutputFile>
 where
     I: Iterator<Item = ProtoParsedFlag>,
 {
-    let template_flags: Vec<TemplateParsedFlag> =
-        parsed_flags_iter.map(|pf| TemplateParsedFlag::new(package, &pf)).collect();
+    let template_flags: Vec<TemplateParsedFlag> = parsed_flags_iter
+        .map(|pf| TemplateParsedFlag::new(package, flag_ids.clone(), &pf))
+        .collect();
     let has_readwrite = template_flags.iter().any(|item| item.readwrite);
+    let container = (template_flags.first().expect("zero template flags").container).to_string();
     let context = TemplateContext {
         package: package.to_string(),
         template_flags,
         modules: package.split('.').map(|s| s.to_string()).collect::<Vec<_>>(),
         has_readwrite,
+        allow_instrumentation,
+        container,
     };
     let mut template = TinyTemplate::new();
     template.add_template(
@@ -62,6 +70,8 @@
     pub template_flags: Vec<TemplateParsedFlag>,
     pub modules: Vec<String>,
     pub has_readwrite: bool,
+    pub allow_instrumentation: bool,
+    pub container: String,
 }
 
 #[derive(Serialize)]
@@ -69,25 +79,28 @@
     pub readwrite: bool,
     pub default_value: String,
     pub name: String,
+    pub container: String,
+    pub flag_offset: u16,
     pub device_config_namespace: String,
     pub device_config_flag: String,
 }
 
 impl TemplateParsedFlag {
     #[allow(clippy::nonminimal_bool)]
-    fn new(package: &str, pf: &ProtoParsedFlag) -> Self {
-        let template = TemplateParsedFlag {
+    fn new(package: &str, flag_offsets: HashMap<String, u16>, pf: &ProtoParsedFlag) -> Self {
+        Self {
             readwrite: pf.permission() == ProtoFlagPermission::READ_WRITE,
             default_value: match pf.state() {
                 ProtoFlagState::ENABLED => "true".to_string(),
                 ProtoFlagState::DISABLED => "false".to_string(),
             },
             name: pf.name().to_string(),
+            container: pf.container().to_string(),
+            flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("),
             device_config_namespace: pf.namespace().to_string(),
             device_config_flag: codegen::create_device_config_ident(package, pf.name())
                 .expect("values checked at flag parse time"),
-        };
-        template
+        }
     }
 }
 
@@ -97,36 +110,42 @@
 
     const PROD_EXPECTED: &str = r#"
 //! codegenerated rust flag lib
+use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
+use std::path::Path;
+use std::io::Write;
+use std::sync::LazyLock;
+use log::{log, LevelFilter, Level};
+
+static STORAGE_MIGRATION_MARKER_FILE: &str =
+    "/metadata/aconfig_test_missions/mission_1";
+static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
 
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
     /// flag value cache for disabled_rw
-    static ref CACHED_disabled_rw: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for disabled_rw_exported
-    static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for disabled_rw_in_other_namespace
-    static ref CACHED_disabled_rw_in_other_namespace: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.other_namespace",
         "com.android.aconfig.test.disabled_rw_in_other_namespace",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_rw
-    static ref CACHED_enabled_rw: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_rw",
-        "true") == "true";
-
-}
+        "true") == "true");
 
 impl FlagProvider {
     /// query flag disabled_ro
@@ -233,6 +252,692 @@
 }
 "#;
 
+    const PROD_INSTRUMENTED_EXPECTED: &str = r#"
+//! codegenerated rust flag lib
+use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
+use std::path::Path;
+use std::io::Write;
+use std::sync::LazyLock;
+use log::{log, LevelFilter, Level};
+
+static STORAGE_MIGRATION_MARKER_FILE: &str =
+    "/metadata/aconfig_test_missions/mission_1";
+static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
+
+/// flag provider
+pub struct FlagProvider;
+
+static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe {
+    get_mapped_storage_file("system", StorageFileType::PackageMap)
+    .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test"))
+    .map(|context| context.map(|c| c.boolean_start_index))
+});
+
+static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe {
+    get_mapped_storage_file("system", StorageFileType::FlagVal)
+});
+
+/// flag value cache for disabled_rw
+static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.aconfig_test",
+        "com.android.aconfig.test.disabled_rw",
+        "false") == "true";
+
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
+
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
+
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 1)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
+
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
+                }
+            }
+        }
+    }
+
+    result
+});
+
+/// flag value cache for disabled_rw_exported
+static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.aconfig_test",
+        "com.android.aconfig.test.disabled_rw_exported",
+        "false") == "true";
+
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
+
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
+
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 2)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
+
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
+                }
+            }
+        }
+    }
+
+    result
+});
+
+/// flag value cache for disabled_rw_in_other_namespace
+static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.other_namespace",
+        "com.android.aconfig.test.disabled_rw_in_other_namespace",
+        "false") == "true";
+
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
+
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
+
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 3)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
+
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
+                }
+            }
+        }
+    }
+
+    result
+});
+
+
+/// flag value cache for enabled_rw
+static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.aconfig_test",
+        "com.android.aconfig.test.enabled_rw",
+        "true") == "true";
+
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
+
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
+
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 8)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
+
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
+                }
+            }
+        }
+    }
+
+    result
+});
+
+impl FlagProvider {
+
+
+    /// query flag disabled_ro
+    pub fn disabled_ro(&self) -> bool {
+        false
+    }
+
+    /// query flag disabled_rw
+    pub fn disabled_rw(&self) -> bool {
+        *CACHED_disabled_rw
+    }
+
+    /// query flag disabled_rw_exported
+    pub fn disabled_rw_exported(&self) -> bool {
+        *CACHED_disabled_rw_exported
+    }
+
+    /// query flag disabled_rw_in_other_namespace
+    pub fn disabled_rw_in_other_namespace(&self) -> bool {
+        *CACHED_disabled_rw_in_other_namespace
+    }
+
+    /// query flag enabled_fixed_ro
+    pub fn enabled_fixed_ro(&self) -> bool {
+        true
+    }
+
+    /// query flag enabled_fixed_ro_exported
+    pub fn enabled_fixed_ro_exported(&self) -> bool {
+        true
+    }
+
+    /// query flag enabled_ro
+    pub fn enabled_ro(&self) -> bool {
+        true
+    }
+
+    /// query flag enabled_ro_exported
+    pub fn enabled_ro_exported(&self) -> bool {
+        true
+    }
+
+    /// query flag enabled_rw
+    pub fn enabled_rw(&self) -> bool {
+        *CACHED_enabled_rw
+    }
+
+
+}
+
+/// flag provider
+pub static PROVIDER: FlagProvider = FlagProvider;
+
+
+/// query flag disabled_ro
+#[inline(always)]
+pub fn disabled_ro() -> bool {
+
+
+    let result = false;
+    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        return result;
+    }
+
+    // This will be called multiple times. Subsequent calls after the first
+    // are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device(MIGRATION_LOG_TAG)
+            .with_max_level(LevelFilter::Info),
+    );
+
+    unsafe {
+        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
+            Ok(file) => file,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
+                return result;
+            }
+        };
+
+        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
+            Ok(Some(context)) => context,
+            Ok(None) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': did not get context");
+                return result;
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
+                return result;
+            }
+        };
+        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
+            Ok(val_map) => val_map,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
+                return result;
+            }
+        };
+        let value = match get_boolean_flag_value(&flag_val_map, 0 + package_read_context.boolean_start_index) {
+            Ok(val) => val,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
+                return result;
+            }
+        };
+
+        if result != value {
+            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'disabled_ro'. Legacy storage was {result}, new storage was {value}");
+        } else {
+            let default_value = false;
+        }
+    }
+
+    result
+
+}
+
+/// query flag disabled_rw
+#[inline(always)]
+pub fn disabled_rw() -> bool {
+    PROVIDER.disabled_rw()
+}
+
+/// query flag disabled_rw_exported
+#[inline(always)]
+pub fn disabled_rw_exported() -> bool {
+    PROVIDER.disabled_rw_exported()
+}
+
+/// query flag disabled_rw_in_other_namespace
+#[inline(always)]
+pub fn disabled_rw_in_other_namespace() -> bool {
+    PROVIDER.disabled_rw_in_other_namespace()
+}
+
+/// query flag enabled_fixed_ro
+#[inline(always)]
+pub fn enabled_fixed_ro() -> bool {
+
+
+    let result = true;
+    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        return result;
+    }
+
+    // This will be called multiple times. Subsequent calls after the first
+    // are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device(MIGRATION_LOG_TAG)
+            .with_max_level(LevelFilter::Info),
+    );
+
+    unsafe {
+        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
+            Ok(file) => file,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
+                return result;
+            }
+        };
+
+        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
+            Ok(Some(context)) => context,
+            Ok(None) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': did not get context");
+                return result;
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
+                return result;
+            }
+        };
+        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
+            Ok(val_map) => val_map,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
+                return result;
+            }
+        };
+        let value = match get_boolean_flag_value(&flag_val_map, 4 + package_read_context.boolean_start_index) {
+            Ok(val) => val,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
+                return result;
+            }
+        };
+
+        if result != value {
+            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro'. Legacy storage was {result}, new storage was {value}");
+        } else {
+            let default_value = true;
+        }
+    }
+
+    result
+
+}
+
+/// query flag enabled_fixed_ro_exported
+#[inline(always)]
+pub fn enabled_fixed_ro_exported() -> bool {
+
+
+    let result = true;
+    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        return result;
+    }
+
+    // This will be called multiple times. Subsequent calls after the first
+    // are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device(MIGRATION_LOG_TAG)
+            .with_max_level(LevelFilter::Info),
+    );
+
+    unsafe {
+        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
+            Ok(file) => file,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
+                return result;
+            }
+        };
+
+        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
+            Ok(Some(context)) => context,
+            Ok(None) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': did not get context");
+                return result;
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
+                return result;
+            }
+        };
+        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
+            Ok(val_map) => val_map,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
+                return result;
+            }
+        };
+        let value = match get_boolean_flag_value(&flag_val_map, 5 + package_read_context.boolean_start_index) {
+            Ok(val) => val,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
+                return result;
+            }
+        };
+
+        if result != value {
+            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro_exported'. Legacy storage was {result}, new storage was {value}");
+        } else {
+            let default_value = true;
+        }
+    }
+
+    result
+
+}
+
+/// query flag enabled_ro
+#[inline(always)]
+pub fn enabled_ro() -> bool {
+
+
+    let result = true;
+    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        return result;
+    }
+
+    // This will be called multiple times. Subsequent calls after the first
+    // are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device(MIGRATION_LOG_TAG)
+            .with_max_level(LevelFilter::Info),
+    );
+
+    unsafe {
+        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
+            Ok(file) => file,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
+                return result;
+            }
+        };
+
+        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
+            Ok(Some(context)) => context,
+            Ok(None) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': did not get context");
+                return result;
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
+                return result;
+            }
+        };
+        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
+            Ok(val_map) => val_map,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
+                return result;
+            }
+        };
+        let value = match get_boolean_flag_value(&flag_val_map, 6 + package_read_context.boolean_start_index) {
+            Ok(val) => val,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
+                return result;
+            }
+        };
+
+        if result != value {
+            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro'. Legacy storage was {result}, new storage was {value}");
+        } else {
+            let default_value = true;
+        }
+    }
+
+    result
+
+}
+
+/// query flag enabled_ro_exported
+#[inline(always)]
+pub fn enabled_ro_exported() -> bool {
+
+
+    let result = true;
+    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        return result;
+    }
+
+    // This will be called multiple times. Subsequent calls after the first
+    // are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device(MIGRATION_LOG_TAG)
+            .with_max_level(LevelFilter::Info),
+    );
+
+    unsafe {
+        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
+            Ok(file) => file,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
+                return result;
+            }
+        };
+
+        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
+            Ok(Some(context)) => context,
+            Ok(None) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': did not get context");
+                return result;
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
+                return result;
+            }
+        };
+        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
+            Ok(val_map) => val_map,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
+                return result;
+            }
+        };
+        let value = match get_boolean_flag_value(&flag_val_map, 7 + package_read_context.boolean_start_index) {
+            Ok(val) => val,
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
+                return result;
+            }
+        };
+
+        if result != value {
+            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro_exported'. Legacy storage was {result}, new storage was {value}");
+        } else {
+            let default_value = true;
+        }
+    }
+
+    result
+
+}
+
+/// query flag enabled_rw
+#[inline(always)]
+pub fn enabled_rw() -> bool {
+    PROVIDER.enabled_rw()
+}
+"#;
+
     const TEST_EXPECTED: &str = r#"
 //! codegenerated rust flag lib
 
@@ -492,30 +1197,36 @@
 
     const EXPORTED_EXPECTED: &str = r#"
 //! codegenerated rust flag lib
+use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
+use std::path::Path;
+use std::io::Write;
+use std::sync::LazyLock;
+use log::{log, LevelFilter, Level};
+
+static STORAGE_MIGRATION_MARKER_FILE: &str =
+    "/metadata/aconfig_test_missions/mission_1";
+static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
 
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
     /// flag value cache for disabled_rw_exported
-    static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_fixed_ro_exported
-    static ref CACHED_enabled_fixed_ro_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_fixed_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_fixed_ro_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_ro_exported
-    static ref CACHED_enabled_ro_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_ro_exported",
-        "false") == "true";
-
-}
+        "false") == "true");
 
 impl FlagProvider {
     /// query flag disabled_rw_exported
@@ -558,6 +1269,15 @@
 
     const FORCE_READ_ONLY_EXPECTED: &str = r#"
 //! codegenerated rust flag lib
+use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
+use std::path::Path;
+use std::io::Write;
+use std::sync::LazyLock;
+use log::{log, LevelFilter, Level};
+
+static STORAGE_MIGRATION_MARKER_FILE: &str =
+    "/metadata/aconfig_test_missions/mission_1";
+static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
 
 /// flag provider
 pub struct FlagProvider;
@@ -633,24 +1353,27 @@
     true
 }
 "#;
+    use crate::commands::assign_flag_ids;
 
-    fn test_generate_rust_code(mode: CodegenMode) {
+    fn test_generate_rust_code(mode: CodegenMode, allow_instrumentation: bool, expected: &str) {
         let parsed_flags = crate::test::parse_test_flags();
         let modified_parsed_flags =
             crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
-        let generated =
-            generate_rust_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
-                .unwrap();
+        let flag_ids =
+            assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap();
+        let generated = generate_rust_code(
+            crate::test::TEST_PACKAGE,
+            flag_ids,
+            modified_parsed_flags.into_iter(),
+            mode,
+            allow_instrumentation,
+        )
+        .unwrap();
         assert_eq!("src/lib.rs", format!("{}", generated.path.display()));
         assert_eq!(
             None,
             crate::test::first_significant_code_diff(
-                match mode {
-                    CodegenMode::Production => PROD_EXPECTED,
-                    CodegenMode::Test => TEST_EXPECTED,
-                    CodegenMode::Exported => EXPORTED_EXPECTED,
-                    CodegenMode::ForceReadOnly => FORCE_READ_ONLY_EXPECTED,
-                },
+                expected,
                 &String::from_utf8(generated.contents).unwrap()
             )
         );
@@ -658,21 +1381,26 @@
 
     #[test]
     fn test_generate_rust_code_for_prod() {
-        test_generate_rust_code(CodegenMode::Production);
+        test_generate_rust_code(CodegenMode::Production, false, PROD_EXPECTED);
+    }
+
+    #[test]
+    fn test_generate_rust_code_for_prod_instrumented() {
+        test_generate_rust_code(CodegenMode::Production, true, PROD_INSTRUMENTED_EXPECTED);
     }
 
     #[test]
     fn test_generate_rust_code_for_test() {
-        test_generate_rust_code(CodegenMode::Test);
+        test_generate_rust_code(CodegenMode::Test, false, TEST_EXPECTED);
     }
 
     #[test]
     fn test_generate_rust_code_for_exported() {
-        test_generate_rust_code(CodegenMode::Exported);
+        test_generate_rust_code(CodegenMode::Exported, false, EXPORTED_EXPECTED);
     }
 
     #[test]
     fn test_generate_rust_code_for_force_read_only() {
-        test_generate_rust_code(CodegenMode::ForceReadOnly);
+        test_generate_rust_code(CodegenMode::ForceReadOnly, false, FORCE_READ_ONLY_EXPECTED);
     }
 }
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index 6945fd4..1a14f64 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -17,7 +17,8 @@
 use anyhow::{bail, ensure, Context, Result};
 use itertools::Itertools;
 use protobuf::Message;
-use std::collections::HashMap;
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hasher;
 use std::io::Read;
 use std::path::PathBuf;
 
@@ -31,6 +32,7 @@
     ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag,
     ProtoParsedFlags, ProtoTracepoint,
 };
+use aconfig_storage_file::sip_hasher13::SipHasher13;
 use aconfig_storage_file::StorageFileType;
 
 pub struct Input {
@@ -191,15 +193,25 @@
     Ok(output)
 }
 
-pub fn create_java_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<Vec<OutputFile>> {
+pub fn create_java_lib(
+    mut input: Input,
+    codegen_mode: CodegenMode,
+    allow_instrumentation: bool,
+) -> Result<Vec<OutputFile>> {
     let parsed_flags = input.try_parse_flags()?;
     let modified_parsed_flags = modify_parsed_flags_based_on_mode(parsed_flags, codegen_mode)?;
     let Some(package) = find_unique_package(&modified_parsed_flags) else {
         bail!("no parsed flags, or the parsed flags use different packages");
     };
     let package = package.to_string();
-    let _flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
-    generate_java_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
+    let flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
+    generate_java_code(
+        &package,
+        modified_parsed_flags.into_iter(),
+        codegen_mode,
+        flag_ids,
+        allow_instrumentation,
+    )
 }
 
 pub fn create_cpp_lib(
@@ -228,7 +240,11 @@
     )
 }
 
-pub fn create_rust_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<OutputFile> {
+pub fn create_rust_lib(
+    mut input: Input,
+    codegen_mode: CodegenMode,
+    allow_instrumentation: bool,
+) -> Result<OutputFile> {
     // // TODO(327420679): Enable export mode for native flag library
     ensure!(
         codegen_mode != CodegenMode::Exported,
@@ -240,8 +256,14 @@
         bail!("no parsed flags, or the parsed flags use different packages");
     };
     let package = package.to_string();
-    let _flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
-    generate_rust_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
+    let flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
+    generate_rust_code(
+        &package,
+        flag_ids,
+        modified_parsed_flags.into_iter(),
+        codegen_mode,
+        allow_instrumentation,
+    )
 }
 
 pub fn create_storage(
@@ -390,12 +412,43 @@
     Ok(flag_ids)
 }
 
+#[allow(dead_code)] // TODO: b/316357686 - Use fingerprint in codegen to
+                    // protect hardcoded offset reads.
+pub fn compute_flag_offsets_fingerprint(flags_map: &HashMap<String, u16>) -> Result<u64> {
+    let mut hasher = SipHasher13::new();
+
+    // Need to sort to ensure the data is added to the hasher in the same order
+    // each run.
+    let sorted_map: BTreeMap<&String, &u16> = flags_map.iter().collect();
+
+    for (flag, offset) in sorted_map {
+        // See https://docs.rs/siphasher/latest/siphasher/#note for use of write
+        // over write_i16. Similarly, use to_be_bytes rather than to_ne_bytes to
+        // ensure consistency.
+        hasher.write(flag.as_bytes());
+        hasher.write(&offset.to_be_bytes());
+    }
+    Ok(hasher.finish())
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
     use aconfig_protos::ProtoFlagPurpose;
 
     #[test]
+    fn test_offset_fingerprint() {
+        let parsed_flags = crate::test::parse_test_flags();
+        let package = find_unique_package(&parsed_flags.parsed_flag).unwrap().to_string();
+        let flag_ids = assign_flag_ids(&package, parsed_flags.parsed_flag.iter()).unwrap();
+        let expected_fingerprint = 10709892481002252132u64;
+
+        let hash_result = compute_flag_offsets_fingerprint(&flag_ids);
+
+        assert_eq!(hash_result.unwrap(), expected_fingerprint);
+    }
+
+    #[test]
     fn test_parse_flags() {
         let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags
         aconfig_protos::parsed_flags::verify_fields(&parsed_flags).unwrap();
diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs
index 72be1c9..1fb64f9 100644
--- a/tools/aconfig/aconfig/src/main.rs
+++ b/tools/aconfig/aconfig/src/main.rs
@@ -72,6 +72,12 @@
                         .long("mode")
                         .value_parser(EnumValueParser::<CodegenMode>::new())
                         .default_value("production"),
+                )
+                .arg(
+                    Arg::new("allow-instrumentation")
+                        .long("allow-instrumentation")
+                        .value_parser(clap::value_parser!(bool))
+                        .default_value("false"),
                 ),
         )
         .subcommand(
@@ -96,6 +102,12 @@
                 .arg(Arg::new("cache").long("cache").required(true))
                 .arg(Arg::new("out").long("out").required(true))
                 .arg(
+                    Arg::new("allow-instrumentation")
+                        .long("allow-instrumentation")
+                        .value_parser(clap::value_parser!(bool))
+                        .default_value("false"),
+                )
+                .arg(
                     Arg::new("mode")
                         .long("mode")
                         .value_parser(EnumValueParser::<CodegenMode>::new())
@@ -237,8 +249,10 @@
         Some(("create-java-lib", sub_matches)) => {
             let cache = open_single_file(sub_matches, "cache")?;
             let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?;
-            let generated_files =
-                commands::create_java_lib(cache, *mode).context("failed to create java lib")?;
+            let allow_instrumentation =
+                get_required_arg::<bool>(sub_matches, "allow-instrumentation")?;
+            let generated_files = commands::create_java_lib(cache, *mode, *allow_instrumentation)
+                .context("failed to create java lib")?;
             let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
             generated_files
                 .iter()
@@ -259,8 +273,10 @@
         Some(("create-rust-lib", sub_matches)) => {
             let cache = open_single_file(sub_matches, "cache")?;
             let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?;
-            let generated_file =
-                commands::create_rust_lib(cache, *mode).context("failed to create rust lib")?;
+            let allow_instrumentation =
+                get_required_arg::<bool>(sub_matches, "allow-instrumentation")?;
+            let generated_file = commands::create_rust_lib(cache, *mode, *allow_instrumentation)
+                .context("failed to create rust lib")?;
             let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
             write_output_file_realtive_to_dir(&dir, &generated_file)?;
         }
diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
index 6235e69..9970b1f 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
@@ -1,18 +1,28 @@
 package {package_name};
+{{ -if not is_test_mode }}
 {{ if not library_exported- }}
 // TODO(b/303773055): Remove the annotation after access issue is resolved.
 import android.compat.annotation.UnsupportedAppUsage;
 {{ -endif }}
-{{ -if not is_test_mode }}
+
 {{ -if runtime_lookup_required }}
 import android.provider.DeviceConfig;
 import android.provider.DeviceConfig.Properties;
-{{ endif }}
+
+
+{{ -if not library_exported }}
+{{ -if allow_instrumentation }}
+import android.aconfig.storage.StorageInternalReader;
+import android.util.Log;
+{{ -endif }}
+{{ -endif }}
+
+{{ -endif }}
 /** @hide */
 public final class FeatureFlagsImpl implements FeatureFlags \{
 {{ -if runtime_lookup_required }}
 {{ -for namespace_with_flags in namespace_flags }}
-    private static boolean {namespace_with_flags.namespace}_is_cached = false;
+    private static volatile boolean {namespace_with_flags.namespace}_is_cached = false;
 {{ -endfor- }}
 
 {{ for flag in flag_elements }}
@@ -20,6 +30,35 @@
     private static boolean {flag.method_name} = {flag.default_value};
 {{ -endif }}
 {{ -endfor }}
+{{ -if not library_exported }}
+{{ -if allow_instrumentation }}
+    StorageInternalReader reader;
+    boolean readFromNewStorage;
+
+    boolean useNewStorageValueAndDiscardOld = false;
+
+    private final static String TAG = "AconfigJavaCodegen";
+    private final static String SUCCESS_LOG = "success: %s value matches";
+    private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s";
+    private final static String ERROR_LOG = "error: failed to read flag value";
+
+    private void init() \{
+        if (reader != null) return;
+        if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.storage_test_mission_1", false)) \{
+            readFromNewStorage = true;
+            try \{
+                reader = new StorageInternalReader("{container}", "{package_name}");
+            } catch (Exception e) \{
+                reader = null;
+            }
+        }
+
+        useNewStorageValueAndDiscardOld =
+            DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false);
+    }
+
+{{ -endif }}
+{{ -endif }}
 {{ for namespace_with_flags in namespace_flags }}
     private void load_overrides_{namespace_with_flags.namespace}() \{
         try \{
@@ -27,7 +66,7 @@
 {{ -for flag in namespace_with_flags.flags }}
 {{ -if flag.is_read_write }}
             {flag.method_name} =
-                properties.getBoolean("{flag.device_config_flag}", {flag.default_value});
+                properties.getBoolean(Flags.FLAG_{flag.flag_name_constant_suffix}, {flag.default_value});
 {{ -endif }}
 {{ -endfor }}
         } catch (NullPointerException e) \{
@@ -41,6 +80,34 @@
             );
         }
         {namespace_with_flags.namespace}_is_cached = true;
+{{ -if not library_exported }}
+{{ -if allow_instrumentation }}
+        init();
+        if (readFromNewStorage && reader != null) \{
+            boolean val;
+            try \{
+{{ -for flag in namespace_with_flags.flags }}
+{{ -if flag.is_read_write }}
+
+                val = reader.getBooleanFlagValue({flag.flag_offset});
+                if (val == {flag.method_name}) \{
+                    Log.i(TAG, String.format(SUCCESS_LOG, "{flag.method_name}"));
+                } else \{
+                    Log.i(TAG, String.format(MISMATCH_LOG, "{flag.method_name}", val, {flag.method_name}));
+                }
+
+                if (useNewStorageValueAndDiscardOld) \{
+                    {flag.method_name} = val;
+                }
+
+{{ -endif }}
+{{ -endfor }}
+            } catch (Exception e) \{
+                    Log.e(TAG, ERROR_LOG, e);
+            }
+        }
+{{ -endif }}
+{{ -endif }}
     }
 {{ endfor- }}
 {{ -endif }}{#- end of runtime_lookup_required #}
@@ -70,7 +137,6 @@
     @Override
 {{ -if not library_exported }}
     @com.android.aconfig.annotations.AconfigFlagAccessor
-    @UnsupportedAppUsage
 {{ -endif }}
     public boolean {flag.method_name}() \{
         throw new UnsupportedOperationException(
diff --git a/tools/aconfig/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template
index 38dda7d..b6012e7 100644
--- a/tools/aconfig/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/aconfig/templates/cpp_source_file.template
@@ -1,13 +1,13 @@
 #include "{header}.h"
 
 {{ if allow_instrumentation }}
+{{ if readwrite- }}
 #include <sys/stat.h>
 #include "aconfig_storage/aconfig_storage_read_api.hpp"
 #include <android/log.h>
-
-#define ALOGI(msg, ...)                                                        \
-  __android_log_print(ANDROID_LOG_INFO, "AconfigTestMission1", (msg), __VA_ARGS__)
-
+#define LOG_TAG "aconfig_cpp_codegen"
+#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
+{{ -endif }}
 {{ endif }}
 
 {{ if readwrite- }}
@@ -66,8 +66,68 @@
     class flag_provider : public flag_provider_interface \{
     public:
 
-        {{ -for item in class_elements }}
+        {{ if allow_instrumentation- }}
+        {{ if readwrite- }}
+        flag_provider()
+            {{ if readwrite- }}
+            : cache_({readwrite_count}, -1)
+            , boolean_start_index_()
+            {{ -else- }}
+            : boolean_start_index_()
+            {{ -endif }}
+            , flag_value_file_(nullptr)
+            , read_from_new_storage_(false)
+            , use_new_storage_value(false) \{
 
+            struct stat buffer;
+            if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) == 0) \{
+               read_from_new_storage_ = true;
+            } else \{
+               return;
+            }
+
+            auto package_map_file = aconfig_storage::get_mapped_file(
+                 "{container}",
+                 aconfig_storage::StorageFileType::package_map);
+            if (!package_map_file.ok()) \{
+                ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str());
+                return;
+            }
+
+            auto context = aconfig_storage::get_package_read_context(
+                **package_map_file, "{package}");
+            if (!context.ok()) \{
+                ALOGI("error: failed to get package read context: %s", context.error().c_str());
+                return;
+            }
+
+            // cache package boolean flag start index
+            boolean_start_index_ = context->boolean_start_index;
+
+            // unmap package map file and free memory
+            delete *package_map_file;
+
+            auto flag_value_file = aconfig_storage::get_mapped_file(
+                "{container}",
+                aconfig_storage::StorageFileType::flag_val);
+            if (!flag_value_file.ok()) \{
+                ALOGI("error: failed to get flag value file: %s", flag_value_file.error().c_str());
+                return;
+            }
+
+            // cache flag value file
+            flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>(
+                *flag_value_file);
+
+            use_new_storage_value = server_configurable_flags::GetServerConfigurableFlag(
+                "aconfig_flags.core_experiments_team_internal",
+                "com.android.providers.settings.use_new_storage_value",
+                "false") == "true";
+        }
+        {{ -endif }}
+        {{ -endif }}
+
+        {{ -for item in class_elements }}
         virtual bool {item.flag_name}() override \{
             {{ -if item.readwrite }}
             if (cache_[{item.readwrite_idx}] == -1) \{
@@ -76,6 +136,39 @@
                     "{item.device_config_flag}",
                     "{item.default_value}") == "true";
             }
+
+
+            {{ if allow_instrumentation- }}
+            if (read_from_new_storage_) \{
+                if (!flag_value_file_) \{
+                    ALOGI("error: failed to get flag {item.flag_name}: flag value file is null");
+                    return cache_[{item.readwrite_idx}];
+                }
+
+                auto value = aconfig_storage::get_boolean_flag_value(
+                    *flag_value_file_,
+                    boolean_start_index_ + {item.flag_offset});
+
+                if (!value.ok()) \{
+                    ALOGI("error: failed to read flag value: %s", value.error().c_str());
+                    return cache_[{item.readwrite_idx}];
+                }
+
+                bool expected_value = cache_[{item.readwrite_idx}];
+                if (*value != expected_value) \{
+                    ALOGI("error: {item.flag_name} value mismatch, new storage value is %s, old storage value is %s",
+                    *value ? "true" : "false", expected_value ? "true" : "false");
+                }
+
+                if (use_new_storage_value) \{
+                    return *value;
+                } else \{
+                    return expected_value;
+                }
+            }
+            {{ -endif }}
+
+
             return cache_[{item.readwrite_idx}];
             {{ -else }}
             {{ -if item.is_fixed_read_only }}
@@ -86,12 +179,21 @@
             {{ -endif }}
         }
         {{ -endfor }}
+
     {{ if readwrite- }}
     private:
         std::vector<int8_t> cache_ = std::vector<int8_t>({readwrite_count}, -1);
-    {{ -endif }}
-    };
+    {{ if allow_instrumentation- }}
+        uint32_t boolean_start_index_;
 
+        std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_;
+
+        bool read_from_new_storage_;
+        bool use_new_storage_value;
+    {{ -endif }}
+    {{ -endif }}
+
+    };
 
 {{ -endif }}
 
@@ -107,62 +209,6 @@
     {{ -if item.readwrite }}
     return {cpp_namespace}::{item.flag_name}();
     {{ -else }}
-    {{ if allow_instrumentation }}
-    auto result =
-        {{ if item.is_fixed_read_only }}
-	    {package_macro}_{item.flag_macro}
-	{{ else }}
-	    {item.default_value}
-	{{ endif }};
-
-    struct stat buffer;
-    if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) != 0) \{
-        return result;
-    }
-
-    auto package_map_file = aconfig_storage::get_mapped_file(
-        "{item.container}",
-        aconfig_storage::StorageFileType::package_map);
-    if (!package_map_file.ok()) \{
-        ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    auto package_read_context = aconfig_storage::get_package_read_context(
-        **package_map_file, "{package}");
-    if (!package_read_context.ok()) \{
-        ALOGI("error: failed to get package read context: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    delete *package_map_file;
-
-    auto flag_val_map = aconfig_storage::get_mapped_file(
-        "{item.container}",
-        aconfig_storage::StorageFileType::flag_val);
-    if (!flag_val_map.ok()) \{
-        ALOGI("error: failed to get flag val map: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    auto value = aconfig_storage::get_boolean_flag_value(
-        **flag_val_map,
-        package_read_context->boolean_start_index + {item.flag_offset});
-    if (!value.ok()) \{
-        ALOGI("error: failed to get flag val: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    delete *flag_val_map;
-
-    if (*value != result) \{
-        ALOGI("error: new storage value '%d' does not match current value '%d'", *value, result);
-    } else \{
-        ALOGI("success: new storage value was '%d, legacy storage was '%d'", *value, result);
-    }
-
-    return result;
-    {{ else }}
     {{ -if item.is_fixed_read_only }}
     return {package_macro}_{item.flag_macro};
     {{ -else }}
@@ -170,7 +216,6 @@
     {{ -endif }}
     {{ -endif }}
     {{ -endif }}
-    {{ -endif }}
 }
 
 {{ -if is_test_mode }}
@@ -185,5 +230,3 @@
      {cpp_namespace}::reset_flags();
 }
 {{ -endif }}
-
-
diff --git a/tools/aconfig/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template
index f9a2829..ea1c600 100644
--- a/tools/aconfig/aconfig/templates/rust.template
+++ b/tools/aconfig/aconfig/templates/rust.template
@@ -1,20 +1,105 @@
 //! codegenerated rust flag lib
+use aconfig_storage_read_api::\{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
+use std::path::Path;
+use std::io::Write;
+use std::sync::LazyLock;
+use log::\{log, LevelFilter, Level};
+
+static STORAGE_MIGRATION_MARKER_FILE: &str =
+    "/metadata/aconfig_test_missions/mission_1";
+static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
 
 /// flag provider
 pub struct FlagProvider;
 
 {{ if has_readwrite- }}
-lazy_static::lazy_static! \{
+{{ if allow_instrumentation }}
+static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{
+    get_mapped_storage_file("{container}", StorageFileType::PackageMap)
+    .and_then(|package_map| get_package_read_context(&package_map, "{package}"))
+    .map(|context| context.map(|c| c.boolean_start_index))
+});
+
+static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe \{
+    get_mapped_storage_file("{container}", StorageFileType::FlagVal)
+});
+{{ -endif }}
 {{ -for flag in template_flags }}
-    {{ -if flag.readwrite }}
-    /// flag value cache for {flag.name}
-    static ref CACHED_{flag.name}: bool = flags_rust::GetServerConfigurableFlag(
+
+{{ -if flag.readwrite }}
+/// flag value cache for {flag.name}
+{{ if allow_instrumentation }}
+static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{
+    let result = flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.{flag.device_config_namespace}",
         "{flag.device_config_flag}",
         "{flag.default_value}") == "true";
-    {{ -endif }}
+
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
+
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
+
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: \{err}"))
+            .and_then(|flag_val_map| \{
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: \{err}"))
+                    .and_then(|package_offset| \{
+                        match package_offset \{
+                            Some(offset) => \{
+                                get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset})
+                                    .map_err(|err| format!("failed to get flag: \{err}"))
+                            },
+                            None => Err("no context found for package '{package}'".to_string())
+                        }
+                    })
+                });
+
+        match aconfig_storage_result \{
+            Ok(storage_result) if storage_result == result => \{
+                if use_new_storage_value \{
+                    return storage_result;
+                } else \{
+                    return result;
+                }
+            },
+            Ok(storage_result) => \{
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}");
+                if use_new_storage_value \{
+                    return storage_result;
+                } else \{
+                    return result;
+                }
+            },
+            Err(err) => \{
+                log!(Level::Error, "AconfigTestMission1: error: \{err}");
+                if use_new_storage_value \{
+                    panic!("failed to read flag value: \{err}");
+                }
+            }
+        }
+    }
+
+    result
+    });
+{{ else }}
+static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
+    "aconfig_flags.{flag.device_config_namespace}",
+    "{flag.device_config_flag}",
+    "{flag.default_value}") == "true");
+{{ endif }}
+{{ -endif }}
 {{ -endfor }}
-}
 {{ -endif }}
 
 impl FlagProvider \{
@@ -22,11 +107,11 @@
 {{ for flag in template_flags }}
     /// query flag {flag.name}
     pub fn {flag.name}(&self) -> bool \{
-    {{ -if flag.readwrite }}
+        {{ -if flag.readwrite }}
         *CACHED_{flag.name}
-    {{ -else }}
+        {{ -else }}
         {flag.default_value}
-    {{ -endif }}
+        {{ -endif }}
     }
 {{ endfor }}
 
@@ -43,7 +128,67 @@
     PROVIDER.{flag.name}()
 {{ -else }}
 pub fn {flag.name}() -> bool \{
+    {{ if not allow_instrumentation }}
     {flag.default_value}
+    {{ else }}
+
+    let result = {flag.default_value};
+    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
+        return result;
+    }
+
+    // This will be called multiple times. Subsequent calls after the first
+    // are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device(MIGRATION_LOG_TAG)
+            .with_max_level(LevelFilter::Info),
+    );
+
+    unsafe \{
+        let package_map = match get_mapped_storage_file("{flag.container}", StorageFileType::PackageMap) \{
+            Ok(file) => file,
+            Err(err) => \{
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
+                return result;
+            }
+        };
+
+        let package_read_context = match get_package_read_context(&package_map, "{package}") \{
+            Ok(Some(context)) => context,
+            Ok(None) => \{
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': did not get context");
+                return result;
+            },
+            Err(err) => \{
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
+                return result;
+            }
+        };
+        let flag_val_map = match get_mapped_storage_file("{flag.container}", StorageFileType::FlagVal) \{
+            Ok(val_map) => val_map,
+            Err(err) => \{
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
+                return result;
+            }
+        };
+        let value = match get_boolean_flag_value(&flag_val_map, {flag.flag_offset} + package_read_context.boolean_start_index) \{
+            Ok(val) => val,
+            Err(err) => \{
+                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
+                return result;
+            }
+        };
+
+        if result != value \{
+            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for '{flag.name}'. Legacy storage was \{result}, new storage was \{value}");
+        } else \{
+            let default_value = {flag.default_value};
+        }
+    }
+
+    result
+    {{ endif }}
 {{ -endif }}
 }
 {{ endfor }}
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
index 2c771e0..95cecf4 100644
--- a/tools/aconfig/aconfig_device_paths/Android.bp
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -39,13 +39,16 @@
 
 genrule {
     name: "libaconfig_java_device_paths_src",
-    srcs: ["src/DevicePathsTemplate.java"],
-    out: ["DevicePaths.java"],
+    srcs: ["src/DeviceProtosTemplate.java"],
+    out: ["DeviceProtos.java"],
     tool_files: ["partition_aconfig_flags_paths.txt"],
-    cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)"
+    cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)",
 }
 
 java_library {
     name: "aconfig_device_paths_java",
     srcs: [":libaconfig_java_device_paths_src"],
+    static_libs: [
+        "libaconfig_java_proto_nano",
+    ],
 }
diff --git a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
similarity index 62%
rename from tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
rename to tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
index f27b9bd..58c58de 100644
--- a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
+++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
@@ -15,7 +15,12 @@
  */
 package android.aconfig;
 
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.nano.Aconfig.parsed_flags;
+
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -23,7 +28,7 @@
 /**
  * @hide
  */
-public class DevicePaths {
+public class DeviceProtos {
     static final String[] PATHS = {
         TEMPLATE
     };
@@ -31,12 +36,35 @@
     private static final String APEX_DIR = "/apex";
     private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb";
 
+    /**
+     * Returns a list of all on-device aconfig protos.
+     *
+     * May throw an exception if the protos can't be read at the call site. For
+     * example, some of the protos are in the apex/ partition, which is mounted
+     * somewhat late in the boot process.
+     *
+     * @throws IOException if we can't read one of the protos yet
+     * @return a list of all on-device aconfig protos
+     */
+    public static List<parsed_flag> loadAndParseFlagProtos() throws IOException {
+        ArrayList<parsed_flag> result = new ArrayList();
+
+        for (String path : parsedFlagsProtoPaths()) {
+            FileInputStream inputStream = new FileInputStream(path);
+            parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
+            for (parsed_flag flag : parsedFlags.parsedFlag) {
+                result.add(flag);
+            }
+        }
+
+        return result;
+    }
 
     /**
      * Returns the list of all on-device aconfig protos paths.
      * @hide
      */
-    public List<String> parsedFlagsProtoPaths() {
+    private static List<String> parsedFlagsProtoPaths() {
         ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
 
         File apexDirectory = new File(APEX_DIR);
diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs
index 7480b30..9ab9cea 100644
--- a/tools/aconfig/aconfig_device_paths/src/lib.rs
+++ b/tools/aconfig/aconfig_device_paths/src/lib.rs
@@ -30,9 +30,11 @@
         .collect()
 }
 
-/// Determine all paths that contain an aconfig protobuf file.
+/// Determines all paths that contain an aconfig protobuf file,
+/// filtering out nonexistent partition protobuf files.
 pub fn parsed_flags_proto_paths() -> Result<Vec<PathBuf>> {
-    let mut result: Vec<PathBuf> = read_partition_paths();
+    let mut result: Vec<PathBuf> =
+        read_partition_paths().into_iter().filter(|s| s.exists()).collect();
 
     for dir in fs::read_dir("/apex")? {
         let dir = dir?;
diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp
index e066e31..40b4464 100644
--- a/tools/aconfig/aconfig_storage_file/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/Android.bp
@@ -137,3 +137,29 @@
     min_sdk_version: "29",
     double_loadable: true,
 }
+
+// storage file parse api java library
+java_library {
+    name: "aconfig_storage_file_java",
+    srcs: [
+        "srcs/**/*.java",
+    ],
+    sdk_version: "core_current",
+    min_sdk_version: "29",
+    host_supported: true,
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+}
+
+// storage file parse api java library for core library
+java_library {
+    name: "aconfig_storage_file_java_none",
+    srcs: [
+        "srcs/**/*.java",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
+}
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
index 64b90ea..660edac 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
@@ -150,7 +150,7 @@
     /// Calculate node bucket index
     pub fn find_bucket_index(package_id: u32, flag_name: &str, num_buckets: u32) -> u32 {
         let full_flag_name = package_id.to_string() + "/" + flag_name;
-        get_bucket_index(&full_flag_name, num_buckets)
+        get_bucket_index(full_flag_name.as_bytes(), num_buckets)
     }
 }
 
diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs
index 26e9c1a..b6367ff 100644
--- a/tools/aconfig/aconfig_storage_file/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_file/src/lib.rs
@@ -37,19 +37,20 @@
 pub mod flag_value;
 pub mod package_table;
 pub mod protos;
+pub mod sip_hasher13;
 pub mod test_utils;
 
 use anyhow::anyhow;
 use std::cmp::Ordering;
-use std::collections::hash_map::DefaultHasher;
 use std::fs::File;
-use std::hash::{Hash, Hasher};
+use std::hash::Hasher;
 use std::io::Read;
 
 pub use crate::flag_info::{FlagInfoBit, FlagInfoHeader, FlagInfoList, FlagInfoNode};
 pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode};
 pub use crate::flag_value::{FlagValueHeader, FlagValueList};
 pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode};
+pub use crate::sip_hasher13::SipHasher13;
 
 use crate::AconfigStorageError::{
     BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType,
@@ -211,10 +212,12 @@
 }
 
 /// Get the corresponding bucket index given the key and number of buckets
-pub(crate) fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 {
-    let mut s = DefaultHasher::new();
-    val.hash(&mut s);
-    (s.finish() % num_buckets as u64) as u32
+pub(crate) fn get_bucket_index(val: &[u8], num_buckets: u32) -> u32 {
+    let mut s = SipHasher13::new();
+    s.write(val);
+    s.write_u8(0xff);
+    let ret = (s.finish() % num_buckets as u64) as u32;
+    ret
 }
 
 /// Read and parse bytes as u8
diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs
index b734972..007f86e 100644
--- a/tools/aconfig/aconfig_storage_file/src/package_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs
@@ -146,7 +146,7 @@
     /// construction side (aconfig binary) and consumption side (flag read lib)
     /// use the same method of hashing
     pub fn find_bucket_index(package: &str, num_buckets: u32) -> u32 {
-        get_bucket_index(&package, num_buckets)
+        get_bucket_index(package.as_bytes(), num_buckets)
     }
 }
 
diff --git a/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs
new file mode 100644
index 0000000..9be3175
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! An implementation of SipHash13
+
+use std::cmp;
+use std::mem;
+use std::ptr;
+use std::slice;
+
+use std::hash::Hasher;
+
+/// An implementation of SipHash 2-4.
+///
+#[derive(Debug, Clone, Default)]
+pub struct SipHasher13 {
+    k0: u64,
+    k1: u64,
+    length: usize, // how many bytes we've processed
+    state: State,  // hash State
+    tail: u64,     // unprocessed bytes le
+    ntail: usize,  // how many bytes in tail are valid
+}
+
+#[derive(Debug, Clone, Copy, Default)]
+#[repr(C)]
+struct State {
+    // v0, v2 and v1, v3 show up in pairs in the algorithm,
+    // and simd implementations of SipHash will use vectors
+    // of v02 and v13. By placing them in this order in the struct,
+    // the compiler can pick up on just a few simd optimizations by itself.
+    v0: u64,
+    v2: u64,
+    v1: u64,
+    v3: u64,
+}
+
+macro_rules! compress {
+    ($state:expr) => {{
+        compress!($state.v0, $state.v1, $state.v2, $state.v3)
+    }};
+    ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{
+        $v0 = $v0.wrapping_add($v1);
+        $v1 = $v1.rotate_left(13);
+        $v1 ^= $v0;
+        $v0 = $v0.rotate_left(32);
+        $v2 = $v2.wrapping_add($v3);
+        $v3 = $v3.rotate_left(16);
+        $v3 ^= $v2;
+        $v0 = $v0.wrapping_add($v3);
+        $v3 = $v3.rotate_left(21);
+        $v3 ^= $v0;
+        $v2 = $v2.wrapping_add($v1);
+        $v1 = $v1.rotate_left(17);
+        $v1 ^= $v2;
+        $v2 = $v2.rotate_left(32);
+    }};
+}
+
+/// Load an integer of the desired type from a byte stream, in LE order. Uses
+/// `copy_nonoverlapping` to let the compiler generate the most efficient way
+/// to load it from a possibly unaligned address.
+///
+/// Unsafe because: unchecked indexing at i..i+size_of(int_ty)
+macro_rules! load_int_le {
+    ($buf:expr, $i:expr, $int_ty:ident) => {{
+        debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len());
+        let mut data = 0 as $int_ty;
+        ptr::copy_nonoverlapping(
+            $buf.get_unchecked($i),
+            &mut data as *mut _ as *mut u8,
+            mem::size_of::<$int_ty>(),
+        );
+        data.to_le()
+    }};
+}
+
+/// Load an u64 using up to 7 bytes of a byte slice.
+///
+/// Unsafe because: unchecked indexing at start..start+len
+#[inline]
+unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
+    debug_assert!(len < 8);
+    let mut i = 0; // current byte index (from LSB) in the output u64
+    let mut out = 0;
+    if i + 3 < len {
+        out = load_int_le!(buf, start + i, u32) as u64;
+        i += 4;
+    }
+    if i + 1 < len {
+        out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8);
+        i += 2
+    }
+    if i < len {
+        out |= (*buf.get_unchecked(start + i) as u64) << (i * 8);
+        i += 1;
+    }
+    debug_assert_eq!(i, len);
+    out
+}
+
+impl SipHasher13 {
+    /// Creates a new `SipHasher13` with the two initial keys set to 0.
+    #[inline]
+    pub fn new() -> SipHasher13 {
+        SipHasher13::new_with_keys(0, 0)
+    }
+
+    /// Creates a `SipHasher13` that is keyed off the provided keys.
+    #[inline]
+    pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 {
+        let mut sip_hasher = SipHasher13 {
+            k0: key0,
+            k1: key1,
+            length: 0,
+            state: State { v0: 0, v1: 0, v2: 0, v3: 0 },
+            tail: 0,
+            ntail: 0,
+        };
+        sip_hasher.reset();
+        sip_hasher
+    }
+
+    #[inline]
+    fn c_rounds(state: &mut State) {
+        compress!(state);
+    }
+
+    #[inline]
+    fn d_rounds(state: &mut State) {
+        compress!(state);
+        compress!(state);
+        compress!(state);
+    }
+
+    #[inline]
+    fn reset(&mut self) {
+        self.length = 0;
+        self.state.v0 = self.k0 ^ 0x736f6d6570736575;
+        self.state.v1 = self.k1 ^ 0x646f72616e646f6d;
+        self.state.v2 = self.k0 ^ 0x6c7967656e657261;
+        self.state.v3 = self.k1 ^ 0x7465646279746573;
+        self.ntail = 0;
+    }
+
+    // Specialized write function that is only valid for buffers with len <= 8.
+    // It's used to force inlining of write_u8 and write_usize, those would normally be inlined
+    // except for composite types (that includes slices and str hashing because of delimiter).
+    // Without this extra push the compiler is very reluctant to inline delimiter writes,
+    // degrading performance substantially for the most common use cases.
+    #[inline]
+    fn short_write(&mut self, msg: &[u8]) {
+        debug_assert!(msg.len() <= 8);
+        let length = msg.len();
+        self.length += length;
+
+        let needed = 8 - self.ntail;
+        let fill = cmp::min(length, needed);
+        if fill == 8 {
+            // safe to call since msg hasn't been loaded
+            self.tail = unsafe { load_int_le!(msg, 0, u64) };
+        } else {
+            // safe to call since msg hasn't been loaded, and fill <= msg.len()
+            self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail);
+            if length < needed {
+                self.ntail += length;
+                return;
+            }
+        }
+        self.state.v3 ^= self.tail;
+        Self::c_rounds(&mut self.state);
+        self.state.v0 ^= self.tail;
+
+        // Buffered tail is now flushed, process new input.
+        self.ntail = length - needed;
+        // safe to call since number of `needed` bytes has been loaded
+        // and self.ntail + needed == msg.len()
+        self.tail = unsafe { u8to64_le(msg, needed, self.ntail) };
+    }
+}
+
+impl Hasher for SipHasher13 {
+    // see short_write comment for explanation
+    #[inline]
+    fn write_usize(&mut self, i: usize) {
+        // safe to call, since convert the pointer to u8
+        let bytes = unsafe {
+            slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::<usize>())
+        };
+        self.short_write(bytes);
+    }
+
+    // see short_write comment for explanation
+    #[inline]
+    fn write_u8(&mut self, i: u8) {
+        self.short_write(&[i]);
+    }
+
+    #[inline]
+    fn write(&mut self, msg: &[u8]) {
+        let length = msg.len();
+        self.length += length;
+
+        let mut needed = 0;
+
+        // loading unprocessed byte from last write
+        if self.ntail != 0 {
+            needed = 8 - self.ntail;
+            // safe to call, since msg hasn't been processed
+            // and cmp::min(length, needed) < 8
+            self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail;
+            if length < needed {
+                self.ntail += length;
+                return;
+            } else {
+                self.state.v3 ^= self.tail;
+                Self::c_rounds(&mut self.state);
+                self.state.v0 ^= self.tail;
+                self.ntail = 0;
+            }
+        }
+
+        // Buffered tail is now flushed, process new input.
+        let len = length - needed;
+        let left = len & 0x7;
+
+        let mut i = needed;
+        while i < len - left {
+            // safe to call since if i < len - left, it means msg has at least 1 byte to load
+            let mi = unsafe { load_int_le!(msg, i, u64) };
+
+            self.state.v3 ^= mi;
+            Self::c_rounds(&mut self.state);
+            self.state.v0 ^= mi;
+
+            i += 8;
+        }
+
+        // safe to call since if left == 0, since this call will load nothing
+        // if left > 0, it means there are number of `left` bytes in msg
+        self.tail = unsafe { u8to64_le(msg, i, left) };
+        self.ntail = left;
+    }
+
+    #[inline]
+    fn finish(&self) -> u64 {
+        let mut state = self.state;
+
+        let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail;
+
+        state.v3 ^= b;
+        Self::c_rounds(&mut state);
+        state.v0 ^= b;
+
+        state.v2 ^= 0xff;
+        Self::d_rounds(&mut state);
+
+        state.v0 ^ state.v1 ^ state.v2 ^ state.v3
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    use std::hash::{Hash, Hasher};
+    use std::string::String;
+
+    #[test]
+    // this test point locks down the value list serialization
+    fn test_sip_hash13_string_hash() {
+        let mut sip_hash13 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        test_str1.hash(&mut sip_hash13);
+        assert_eq!(17898838669067067585, sip_hash13.finish());
+
+        let test_str2 = String::from("adfadfadf adfafadadf 1231241241");
+        test_str2.hash(&mut sip_hash13);
+        assert_eq!(13543518987672889310, sip_hash13.finish());
+    }
+
+    #[test]
+    fn test_sip_hash13_write() {
+        let mut sip_hash13 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        sip_hash13.write(test_str1.as_bytes());
+        sip_hash13.write_u8(0xff);
+        assert_eq!(17898838669067067585, sip_hash13.finish());
+
+        let mut sip_hash132 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        sip_hash132.write(test_str1.as_bytes());
+        assert_eq!(9685440969685209025, sip_hash132.finish());
+        sip_hash132.write(test_str1.as_bytes());
+        assert_eq!(6719694176662736568, sip_hash132.finish());
+
+        let mut sip_hash133 = SipHasher13::new();
+        let test_str2 = String::from("abcdefg");
+        test_str2.hash(&mut sip_hash133);
+        assert_eq!(2492161047327640297, sip_hash133.finish());
+
+        let mut sip_hash134 = SipHasher13::new();
+        let test_str3 = String::from("abcdefgh");
+        test_str3.hash(&mut sip_hash134);
+        assert_eq!(6689927370435554326, sip_hash134.finish());
+    }
+
+    #[test]
+    fn test_sip_hash13_write_short() {
+        let mut sip_hash13 = SipHasher13::new();
+        sip_hash13.write_u8(0x61);
+        assert_eq!(4644417185603328019, sip_hash13.finish());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/BooleanFlagValue.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java
similarity index 65%
rename from tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/BooleanFlagValue.java
rename to tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java
index 11fe447..86a75f2 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/BooleanFlagValue.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java
@@ -1,4 +1,3 @@
-package android.aconfig.storage;
 /*
  * Copyright (C) 2024 The Android Open Source Project
  *
@@ -15,16 +14,14 @@
  * limitations under the License.
  */
 
-public class BooleanFlagValue {
-    public boolean mQuerySuccess;
-    public String mErrorMessage;
-    public boolean mFlagValue;
+package android.aconfig.storage;
 
-    public BooleanFlagValue(boolean querySuccess,
-            String errorMessage,
-            boolean value) {
-        mQuerySuccess = querySuccess;
-        mErrorMessage = errorMessage;
-        mFlagValue = value;
+public class AconfigStorageException extends RuntimeException {
+    public AconfigStorageException(String msg) {
+        super(msg);
+    }
+
+    public AconfigStorageException(String msg, Throwable cause) {
+        super(msg, cause);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
new file mode 100644
index 0000000..4bea083
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
+
+public class ByteBufferReader {
+
+    private ByteBuffer mByteBuffer;
+
+    public ByteBufferReader(ByteBuffer byteBuffer) {
+        this.mByteBuffer = byteBuffer;
+        this.mByteBuffer.order(ByteOrder.LITTLE_ENDIAN);
+    }
+
+    public int readByte() {
+        return Byte.toUnsignedInt(mByteBuffer.get());
+    }
+
+    public int readShort() {
+        return Short.toUnsignedInt(mByteBuffer.getShort());
+    }
+
+    public int readInt() {
+        return this.mByteBuffer.getInt();
+    }
+
+    public String readString() {
+        int length = readInt();
+        byte[] bytes = new byte[length];
+        mByteBuffer.get(bytes, 0, length);
+        return new String(bytes, StandardCharsets.UTF_8);
+    }
+
+    public int readByte(int i) {
+        return Byte.toUnsignedInt(mByteBuffer.get(i));
+    }
+
+    public void position(int newPosition) {
+        mByteBuffer.position(newPosition);
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java
new file mode 100644
index 0000000..b0b1b9b
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+public enum FileType {
+    PACKAGE_MAP(0),
+    FLAG_MAP(1),
+    FLAG_VAL(2),
+    FLAG_INFO(3);
+
+    public final int type;
+
+    FileType(int type) {
+        this.type = type;
+    }
+
+    public static FileType fromInt(int index) {
+        switch (index) {
+            case 0:
+                return PACKAGE_MAP;
+            case 1:
+                return FLAG_MAP;
+            case 2:
+                return FLAG_VAL;
+            case 3:
+                return FLAG_INFO;
+            default:
+                return null;
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
new file mode 100644
index 0000000..9838a7c
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import java.nio.ByteBuffer;
+import java.util.Objects;
+
+public class FlagTable {
+
+    private Header mHeader;
+    private ByteBufferReader mReader;
+
+    public static FlagTable fromBytes(ByteBuffer bytes) {
+        FlagTable flagTable = new FlagTable();
+        flagTable.mReader = new ByteBufferReader(bytes);
+        flagTable.mHeader = Header.fromBytes(flagTable.mReader);
+
+        return flagTable;
+    }
+
+    public Node get(int packageId, String flagName) {
+        int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
+        int bucketIndex = TableUtils.getBucketIndex(makeKey(packageId, flagName), numBuckets);
+
+        mReader.position(mHeader.mBucketOffset + bucketIndex * 4);
+        int nodeIndex = mReader.readInt();
+
+        while (nodeIndex != -1) {
+            mReader.position(nodeIndex);
+            Node node = Node.fromBytes(mReader);
+            if (Objects.equals(flagName, node.mFlagName) && packageId == node.mPackageId) {
+                return node;
+            }
+            nodeIndex = node.mNextOffset;
+        }
+
+        throw new AconfigStorageException("get cannot find flag: " + flagName);
+    }
+
+    public Header getHeader() {
+        return mHeader;
+    }
+
+    private static byte[] makeKey(int packageId, String flagName) {
+        StringBuilder ret = new StringBuilder();
+        return ret.append(packageId).append('/').append(flagName).toString().getBytes(UTF_8);
+    }
+
+    public static class Header {
+
+        private int mVersion;
+        private String mContainer;
+        private FileType mFileType;
+        private int mFileSize;
+        private int mNumFlags;
+        private int mBucketOffset;
+        private int mNodeOffset;
+
+        public static Header fromBytes(ByteBufferReader reader) {
+            Header header = new Header();
+            header.mVersion = reader.readInt();
+            header.mContainer = reader.readString();
+            header.mFileType = FileType.fromInt(reader.readByte());
+            header.mFileSize = reader.readInt();
+            header.mNumFlags = reader.readInt();
+            header.mBucketOffset = reader.readInt();
+            header.mNodeOffset = reader.readInt();
+
+            if (header.mFileType != FileType.FLAG_MAP) {
+                throw new AconfigStorageException("binary file is not a flag map");
+            }
+
+            return header;
+        }
+
+        public int getVersion() {
+            return mVersion;
+        }
+
+        public String getContainer() {
+            return mContainer;
+        }
+
+        public FileType getFileType() {
+            return mFileType;
+        }
+
+        public int getFileSize() {
+            return mFileSize;
+        }
+
+        public int getNumFlags() {
+            return mNumFlags;
+        }
+
+        public int getBucketOffset() {
+            return mBucketOffset;
+        }
+
+        public int getNodeOffset() {
+            return mNodeOffset;
+        }
+    }
+
+    public static class Node {
+
+        private String mFlagName;
+        private FlagType mFlagType;
+        private int mPackageId;
+        private int mFlagIndex;
+        private int mNextOffset;
+
+        public static Node fromBytes(ByteBufferReader reader) {
+            Node node = new Node();
+            node.mPackageId = reader.readInt();
+            node.mFlagName = reader.readString();
+            node.mFlagType = FlagType.fromInt(reader.readShort());
+            node.mFlagIndex = reader.readShort();
+            node.mNextOffset = reader.readInt();
+            node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset;
+            return node;
+        }
+
+        @Override
+        public int hashCode() {
+            return Objects.hash(mFlagName, mFlagType, mPackageId, mFlagIndex, mNextOffset);
+        }
+
+        @Override
+        public boolean equals(Object obj) {
+            if (this == obj) {
+                return true;
+            }
+
+            if (obj == null || !(obj instanceof Node)) {
+                return false;
+            }
+
+            Node other = (Node) obj;
+            return Objects.equals(mFlagName, other.mFlagName)
+                    && Objects.equals(mFlagType, other.mFlagType)
+                    && mPackageId == other.mPackageId
+                    && mFlagIndex == other.mFlagIndex
+                    && mNextOffset == other.mNextOffset;
+        }
+
+        public String getFlagName() {
+            return mFlagName;
+        }
+
+        public FlagType getFlagType() {
+            return mFlagType;
+        }
+
+        public int getPackageId() {
+            return mPackageId;
+        }
+
+        public int getFlagIndex() {
+            return mFlagIndex;
+        }
+
+        public int getNextOffset() {
+            return mNextOffset;
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagType.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagType.java
new file mode 100644
index 0000000..385e2d9
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagType.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+public enum FlagType {
+    ReadWriteBoolean (0),
+    ReadOnlyBoolean(1),
+    FixedReadOnlyBoolean(2);
+
+    public final int type;
+
+    FlagType(int type) {
+        this.type = type;
+    }
+
+    public static FlagType fromInt(int index) {
+        switch (index) {
+            case 0:
+                return ReadWriteBoolean;
+            case 1:
+                return ReadOnlyBoolean;
+            case 2:
+                return FixedReadOnlyBoolean;
+            default:
+                return null;
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
new file mode 100644
index 0000000..493436d
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import java.nio.ByteBuffer;
+
+public class FlagValueList {
+
+    private Header mHeader;
+    private ByteBufferReader mReader;
+
+    public static FlagValueList fromBytes(ByteBuffer bytes) {
+        FlagValueList flagValueList = new FlagValueList();
+        flagValueList.mReader = new ByteBufferReader(bytes);
+        flagValueList.mHeader = Header.fromBytes(flagValueList.mReader);
+        return flagValueList;
+    }
+
+    public boolean getBoolean(int index) {
+        return mReader.readByte(mHeader.mBooleanValueOffset + index) == 1;
+    }
+
+    public Header getHeader() {
+        return mHeader;
+    }
+
+    public int size() {
+        return mHeader.mNumFlags;
+    }
+
+    public static class Header {
+
+        private int mVersion;
+        private String mContainer;
+        private FileType mFileType;
+        private int mFileSize;
+        private int mNumFlags;
+        private int mBooleanValueOffset;
+
+        public static Header fromBytes(ByteBufferReader reader) {
+            Header header = new Header();
+            header.mVersion = reader.readInt();
+            header.mContainer = reader.readString();
+            header.mFileType = FileType.fromInt(reader.readByte());
+            header.mFileSize = reader.readInt();
+            header.mNumFlags = reader.readInt();
+            header.mBooleanValueOffset = reader.readInt();
+
+            if (header.mFileType != FileType.FLAG_VAL) {
+                throw new AconfigStorageException("binary file is not a flag value file");
+            }
+
+            return header;
+        }
+
+        public int getVersion() {
+            return mVersion;
+        }
+
+        public String getContainer() {
+            return mContainer;
+        }
+
+        public FileType getFileType() {
+            return mFileType;
+        }
+
+        public int getFileSize() {
+            return mFileSize;
+        }
+
+        public int getNumFlags() {
+            return mNumFlags;
+        }
+
+        public int getBooleanValueOffset() {
+            return mBooleanValueOffset;
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
new file mode 100644
index 0000000..773b882
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import java.nio.ByteBuffer;
+import java.util.Objects;
+
+public class PackageTable {
+
+    private Header mHeader;
+    private ByteBufferReader mReader;
+
+    public static PackageTable fromBytes(ByteBuffer bytes) {
+        PackageTable packageTable = new PackageTable();
+        packageTable.mReader = new ByteBufferReader(bytes);
+        packageTable.mHeader = Header.fromBytes(packageTable.mReader);
+
+        return packageTable;
+    }
+
+    public Node get(String packageName) {
+
+        int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
+        int bucketIndex = TableUtils.getBucketIndex(packageName.getBytes(UTF_8), numBuckets);
+
+        mReader.position(mHeader.mBucketOffset + bucketIndex * 4);
+        int nodeIndex = mReader.readInt();
+
+        while (nodeIndex != -1) {
+            mReader.position(nodeIndex);
+            Node node = Node.fromBytes(mReader);
+            if (Objects.equals(packageName, node.mPackageName)) {
+                return node;
+            }
+            nodeIndex = node.mNextOffset;
+        }
+
+        throw new AconfigStorageException("get cannot find package: " + packageName);
+    }
+
+    public Header getHeader() {
+        return mHeader;
+    }
+
+    public static class Header {
+
+        private int mVersion;
+        private String mContainer;
+        private FileType mFileType;
+        private int mFileSize;
+        private int mNumPackages;
+        private int mBucketOffset;
+        private int mNodeOffset;
+
+        public static Header fromBytes(ByteBufferReader reader) {
+            Header header = new Header();
+            header.mVersion = reader.readInt();
+            header.mContainer = reader.readString();
+            header.mFileType = FileType.fromInt(reader.readByte());
+            header.mFileSize = reader.readInt();
+            header.mNumPackages = reader.readInt();
+            header.mBucketOffset = reader.readInt();
+            header.mNodeOffset = reader.readInt();
+
+            if (header.mFileType != FileType.PACKAGE_MAP) {
+                throw new AconfigStorageException("binary file is not a package map");
+            }
+
+            return header;
+        }
+
+        public int getVersion() {
+            return mVersion;
+        }
+
+        public String getContainer() {
+            return mContainer;
+        }
+
+        public FileType getFileType() {
+            return mFileType;
+        }
+
+        public int getFileSize() {
+            return mFileSize;
+        }
+
+        public int getNumPackages() {
+            return mNumPackages;
+        }
+
+        public int getBucketOffset() {
+            return mBucketOffset;
+        }
+
+        public int getNodeOffset() {
+            return mNodeOffset;
+        }
+    }
+
+    public static class Node {
+
+        private String mPackageName;
+        private int mPackageId;
+        private int mBooleanStartIndex;
+        private int mNextOffset;
+
+        public static Node fromBytes(ByteBufferReader reader) {
+            Node node = new Node();
+            node.mPackageName = reader.readString();
+            node.mPackageId = reader.readInt();
+            node.mBooleanStartIndex = reader.readInt();
+            node.mNextOffset = reader.readInt();
+            node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset;
+            return node;
+        }
+
+        @Override
+        public int hashCode() {
+            return Objects.hash(mPackageName, mPackageId, mBooleanStartIndex, mNextOffset);
+        }
+
+        @Override
+        public boolean equals(Object obj) {
+            if (this == obj) {
+                return true;
+            }
+
+            if (obj == null || !(obj instanceof Node)) {
+                return false;
+            }
+
+            Node other = (Node) obj;
+            return Objects.equals(mPackageName, other.mPackageName)
+                    && mPackageId == other.mPackageId
+                    && mBooleanStartIndex == other.mBooleanStartIndex
+                    && mNextOffset == other.mNextOffset;
+        }
+
+        public String getPackageName() {
+            return mPackageName;
+        }
+
+        public int getPackageId() {
+            return mPackageId;
+        }
+
+        public int getBooleanStartIndex() {
+            return mBooleanStartIndex;
+        }
+
+        public int getNextOffset() {
+            return mNextOffset;
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java
new file mode 100644
index 0000000..64714ee
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+public class SipHasher13 {
+    static class State {
+        private long v0;
+        private long v2;
+        private long v1;
+        private long v3;
+
+        public State(long k0, long k1) {
+            v0 = k0 ^ 0x736f6d6570736575L;
+            v1 = k1 ^ 0x646f72616e646f6dL;
+            v2 = k0 ^ 0x6c7967656e657261L;
+            v3 = k1 ^ 0x7465646279746573L;
+        }
+
+        public void compress(long m) {
+            v3 ^= m;
+            cRounds();
+            v0 ^= m;
+        }
+
+        public long finish() {
+            v2 ^= 0xff;
+            dRounds();
+            return v0 ^ v1 ^ v2 ^ v3;
+        }
+
+        private void cRounds() {
+            v0 += v1;
+            v1 = Long.rotateLeft(v1, 13);
+            v1 ^= v0;
+            v0 = Long.rotateLeft(v0, 32);
+            v2 += v3;
+            v3 = Long.rotateLeft(v3, 16);
+            v3 ^= v2;
+            v0 += v3;
+            v3 = Long.rotateLeft(v3, 21);
+            v3 ^= v0;
+            v2 += v1;
+            v1 = Long.rotateLeft(v1, 17);
+            v1 ^= v2;
+            v2 = Long.rotateLeft(v2, 32);
+        }
+
+        private void dRounds() {
+            for (int i = 0; i < 3; i++) {
+                v0 += v1;
+                v1 = Long.rotateLeft(v1, 13);
+                v1 ^= v0;
+                v0 = Long.rotateLeft(v0, 32);
+                v2 += v3;
+                v3 = Long.rotateLeft(v3, 16);
+                v3 ^= v2;
+                v0 += v3;
+                v3 = Long.rotateLeft(v3, 21);
+                v3 ^= v0;
+                v2 += v1;
+                v1 = Long.rotateLeft(v1, 17);
+                v1 ^= v2;
+                v2 = Long.rotateLeft(v2, 32);
+            }
+        }
+    }
+
+    public static long hash(byte[] data) {
+        State state = new State(0, 0);
+        int len = data.length;
+        int left = len & 0x7;
+        int index = 0;
+
+        while (index < len - left) {
+            long mi = loadLe(data, index, 8);
+            index += 8;
+            state.compress(mi);
+        }
+
+        // padding the end with 0xff to be consistent with rust
+        long m = (0xffL << (left * 8)) | loadLe(data, index, left);
+        if (left == 0x7) {
+            // compress the m w-2
+            state.compress(m);
+            m = 0L;
+        }
+        // len adds 1 since padded 0xff
+        m |= (((len + 1) & 0xffL) << 56);
+        state.compress(m);
+
+        return state.finish();
+    }
+
+    private static long loadLe(byte[] data, int offset, int size) {
+        long m = 0;
+        for (int i = 0; i < size; i++) {
+            m |= (data[i + offset] & 0xffL) << (i * 8);
+        }
+        return m;
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
new file mode 100644
index 0000000..81168f5
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+public class TableUtils {
+
+    private static final int[] HASH_PRIMES =
+            new int[] {
+                7,
+                17,
+                29,
+                53,
+                97,
+                193,
+                389,
+                769,
+                1543,
+                3079,
+                6151,
+                12289,
+                24593,
+                49157,
+                98317,
+                196613,
+                393241,
+                786433,
+                1572869,
+                3145739,
+                6291469,
+                12582917,
+                25165843,
+                50331653,
+                100663319,
+                201326611,
+                402653189,
+                805306457,
+                1610612741
+            };
+
+    public static int getTableSize(int numEntries) {
+        for (int i : HASH_PRIMES) {
+            if (i < 2 * numEntries) continue;
+            return i;
+        }
+        throw new AconfigStorageException("Number of items in a hash table exceeds limit");
+    }
+
+    public static int getBucketIndex(byte[] val, int numBuckets) {
+        long hashVal = SipHasher13.hash(val);
+        return (int) Long.remainderUnsigned(hashVal, numBuckets);
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp
index 26b7800..12e4aca 100644
--- a/tools/aconfig/aconfig_storage_file/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp
@@ -1,4 +1,3 @@
-
 cc_test {
     name: "aconfig_storage_file.test.cpp",
     team: "trendy_team_android_core_experiments",
@@ -21,3 +20,28 @@
         "general-tests",
     ],
 }
+
+android_test {
+    name: "aconfig_storage_file.test.java",
+    team: "trendy_team_android_core_experiments",
+    srcs: [
+        "srcs/**/*.java",
+    ],
+    static_libs: [
+        "androidx.test.runner",
+        "junit",
+        "aconfig_storage_file_java",
+    ],
+    test_config: "AndroidStorageJaveTest.xml",
+    sdk_version: "test_current",
+    data: [
+        "package.map",
+        "flag.map",
+        "flag.val",
+        "flag.info",
+    ],
+    test_suites: [
+        "general-tests",
+    ],
+    jarjar_rules: "jarjar.txt",
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/AndroidManifest.xml b/tools/aconfig/aconfig_storage_file/tests/AndroidManifest.xml
new file mode 100644
index 0000000..5e01879
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/AndroidManifest.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  ~ Copyright (C) 2024 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="android.aconfig.storage.test">
+    <application>
+        <uses-library android:name="android.test.runner" />
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+                     android:targetPackage="android.aconfig.storage.test" />
+
+</manifest>
diff --git a/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml b/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml
new file mode 100644
index 0000000..2d52d44
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  ~ Copyright (C) 2024 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<configuration description="Test aconfig storage java tests">
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
+        <option name="test-file-name" value="aconfig_storage_file.test.java.apk" />
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
+        <option name="cleanup" value="true" />
+        <option name="push" value="package.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/package.map" />
+        <option name="push" value="flag.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.map" />
+        <option name="push" value="flag.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.val" />
+        <option name="push" value="flag.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.info" />
+    </target_preparer>
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="android.aconfig.storage.test" />
+        <option name="runtime-hint" value="1m" />
+    </test>
+</configuration>
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_storage_file/tests/jarjar.txt b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt
new file mode 100644
index 0000000..a6c17fa
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt
@@ -0,0 +1,15 @@
+rule android.aconfig.storage.AconfigStorageException android.aconfig.storage.test.AconfigStorageException
+rule android.aconfig.storage.FlagTable android.aconfig.storage.test.FlagTable
+rule android.aconfig.storage.PackageTable android.aconfig.storage.test.PackageTable
+rule android.aconfig.storage.ByteBufferReader android.aconfig.storage.test.ByteBufferReader
+rule android.aconfig.storage.FlagType android.aconfig.storage.test.FlagType
+rule android.aconfig.storage.SipHasher13 android.aconfig.storage.test.SipHasher13
+rule android.aconfig.storage.FileType android.aconfig.storage.test.FileType
+rule android.aconfig.storage.FlagValueList android.aconfig.storage.test.FlagValueList
+rule android.aconfig.storage.TableUtils android.aconfig.storage.test.TableUtils
+
+
+rule android.aconfig.storage.FlagTable$* android.aconfig.storage.test.FlagTable$@1
+rule android.aconfig.storage.PackageTable$* android.aconfig.storage.test.PackageTable$@1
+rule android.aconfig.storage.FlagValueList$* android.aconfig.storage.test.FlagValueList@1
+rule android.aconfig.storage.SipHasher13$* android.aconfig.storage.test.SipHasher13@1
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/ByteBufferReaderTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/ByteBufferReaderTest.java
new file mode 100644
index 0000000..66a8166
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/ByteBufferReaderTest.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+
+import android.aconfig.storage.ByteBufferReader;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
+
+@RunWith(JUnit4.class)
+public class ByteBufferReaderTest {
+
+    @Test
+    public void testReadByte() {
+        ByteBuffer buffer = ByteBuffer.allocate(1);
+        byte expect = 10;
+        buffer.put(expect).rewind();
+
+        ByteBufferReader reader = new ByteBufferReader(buffer);
+        assertEquals(expect, reader.readByte());
+    }
+
+    @Test
+    public void testReadShort() {
+        ByteBuffer buffer = ByteBuffer.allocate(4);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        short expect = Short.MAX_VALUE;
+        buffer.putShort(expect).rewind();
+
+        ByteBufferReader reader = new ByteBufferReader(buffer);
+        assertEquals(expect, reader.readShort());
+    }
+
+    @Test
+    public void testReadInt() {
+        ByteBuffer buffer = ByteBuffer.allocate(4);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        int expect = 10000;
+        buffer.putInt(expect).rewind();
+
+        ByteBufferReader reader = new ByteBufferReader(buffer);
+        assertEquals(expect, reader.readInt());
+    }
+
+    @Test
+    public void testReadString() {
+        String expect = "test read string";
+        byte[] bytes = expect.getBytes(StandardCharsets.UTF_8);
+
+        ByteBuffer buffer = ByteBuffer.allocate(expect.length() * 2 + 4);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        buffer.putInt(expect.length()).put(bytes).rewind();
+
+        ByteBufferReader reader = new ByteBufferReader(buffer);
+
+        assertEquals(expect, reader.readString());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java
new file mode 100644
index 0000000..fd40d4c
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+
+import android.aconfig.storage.FileType;
+import android.aconfig.storage.FlagTable;
+import android.aconfig.storage.FlagType;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class FlagTableTest {
+
+    @Test
+    public void testFlagTable_rightHeader() throws Exception {
+        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer());
+        FlagTable.Header header = flagTable.getHeader();
+        assertEquals(1, header.getVersion());
+        assertEquals("mockup", header.getContainer());
+        assertEquals(FileType.FLAG_MAP, header.getFileType());
+        assertEquals(321, header.getFileSize());
+        assertEquals(8, header.getNumFlags());
+        assertEquals(31, header.getBucketOffset());
+        assertEquals(99, header.getNodeOffset());
+    }
+
+    @Test
+    public void testFlagTable_rightNode() throws Exception {
+        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer());
+
+        FlagTable.Node node1 = flagTable.get(0, "enabled_ro");
+        FlagTable.Node node2 = flagTable.get(0, "enabled_rw");
+        FlagTable.Node node3 = flagTable.get(2, "enabled_rw");
+        FlagTable.Node node4 = flagTable.get(1, "disabled_rw");
+        FlagTable.Node node5 = flagTable.get(1, "enabled_fixed_ro");
+        FlagTable.Node node6 = flagTable.get(1, "enabled_ro");
+        FlagTable.Node node7 = flagTable.get(2, "enabled_fixed_ro");
+        FlagTable.Node node8 = flagTable.get(0, "disabled_rw");
+
+        assertEquals("enabled_ro", node1.getFlagName());
+        assertEquals("enabled_rw", node2.getFlagName());
+        assertEquals("enabled_rw", node3.getFlagName());
+        assertEquals("disabled_rw", node4.getFlagName());
+        assertEquals("enabled_fixed_ro", node5.getFlagName());
+        assertEquals("enabled_ro", node6.getFlagName());
+        assertEquals("enabled_fixed_ro", node7.getFlagName());
+        assertEquals("disabled_rw", node8.getFlagName());
+
+        assertEquals(0, node1.getPackageId());
+        assertEquals(0, node2.getPackageId());
+        assertEquals(2, node3.getPackageId());
+        assertEquals(1, node4.getPackageId());
+        assertEquals(1, node5.getPackageId());
+        assertEquals(1, node6.getPackageId());
+        assertEquals(2, node7.getPackageId());
+        assertEquals(0, node8.getPackageId());
+
+        assertEquals(FlagType.ReadOnlyBoolean, node1.getFlagType());
+        assertEquals(FlagType.ReadWriteBoolean, node2.getFlagType());
+        assertEquals(FlagType.ReadWriteBoolean, node3.getFlagType());
+        assertEquals(FlagType.ReadWriteBoolean, node4.getFlagType());
+        assertEquals(FlagType.FixedReadOnlyBoolean, node5.getFlagType());
+        assertEquals(FlagType.ReadOnlyBoolean, node6.getFlagType());
+        assertEquals(FlagType.FixedReadOnlyBoolean, node7.getFlagType());
+        assertEquals(FlagType.ReadWriteBoolean, node8.getFlagType());
+
+        assertEquals(1, node1.getFlagIndex());
+        assertEquals(2, node2.getFlagIndex());
+        assertEquals(1, node3.getFlagIndex());
+        assertEquals(0, node4.getFlagIndex());
+        assertEquals(1, node5.getFlagIndex());
+        assertEquals(2, node6.getFlagIndex());
+        assertEquals(0, node7.getFlagIndex());
+        assertEquals(0, node8.getFlagIndex());
+
+        assertEquals(-1, node1.getNextOffset());
+        assertEquals(151, node2.getNextOffset());
+        assertEquals(-1, node3.getNextOffset());
+        assertEquals(-1, node4.getNextOffset());
+        assertEquals(236, node5.getNextOffset());
+        assertEquals(-1, node6.getNextOffset());
+        assertEquals(-1, node7.getNextOffset());
+        assertEquals(-1, node8.getNextOffset());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
new file mode 100644
index 0000000..1b0de63
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import android.aconfig.storage.FileType;
+import android.aconfig.storage.FlagTable;
+import android.aconfig.storage.FlagValueList;
+import android.aconfig.storage.PackageTable;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class FlagValueListTest {
+
+    @Test
+    public void testFlagValueList_rightHeader() throws Exception {
+        FlagValueList flagValueList =
+                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer());
+        FlagValueList.Header header = flagValueList.getHeader();
+        assertEquals(1, header.getVersion());
+        assertEquals("mockup", header.getContainer());
+        assertEquals(FileType.FLAG_VAL, header.getFileType());
+        assertEquals(35, header.getFileSize());
+        assertEquals(8, header.getNumFlags());
+        assertEquals(27, header.getBooleanValueOffset());
+    }
+
+    @Test
+    public void testFlagValueList_rightNode() throws Exception {
+        FlagValueList flagValueList =
+                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer());
+
+        boolean[] expected = new boolean[] {false, true, true, false, true, true, true, true};
+        assertEquals(expected.length, flagValueList.size());
+
+        for (int i = 0; i < flagValueList.size(); i++) {
+            assertEquals(expected[i], flagValueList.getBoolean(i));
+        }
+    }
+
+    @Test
+    public void testFlagValueList_getValue() throws Exception {
+        PackageTable packageTable =
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer());
+        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer());
+
+        FlagValueList flagValueList =
+                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer());
+
+        PackageTable.Node pNode = packageTable.get("com.android.aconfig.storage.test_1");
+        FlagTable.Node fNode = flagTable.get(pNode.getPackageId(), "enabled_rw");
+        assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
+
+        pNode = packageTable.get("com.android.aconfig.storage.test_4");
+        fNode = flagTable.get(pNode.getPackageId(), "enabled_fixed_ro");
+        assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java
new file mode 100644
index 0000000..e7e19d8
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+
+import android.aconfig.storage.FileType;
+import android.aconfig.storage.PackageTable;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class PackageTableTest {
+
+    @Test
+    public void testPackageTable_rightHeader() throws Exception {
+        PackageTable packageTable =
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer());
+        PackageTable.Header header = packageTable.getHeader();
+        assertEquals(1, header.getVersion());
+        assertEquals("mockup", header.getContainer());
+        assertEquals(FileType.PACKAGE_MAP, header.getFileType());
+        assertEquals(209, header.getFileSize());
+        assertEquals(3, header.getNumPackages());
+        assertEquals(31, header.getBucketOffset());
+        assertEquals(59, header.getNodeOffset());
+    }
+
+    @Test
+    public void testPackageTable_rightNode() throws Exception {
+        PackageTable packageTable =
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer());
+
+        PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+        PackageTable.Node node2 = packageTable.get("com.android.aconfig.storage.test_2");
+        PackageTable.Node node4 = packageTable.get("com.android.aconfig.storage.test_4");
+
+        assertEquals("com.android.aconfig.storage.test_1", node1.getPackageName());
+        assertEquals("com.android.aconfig.storage.test_2", node2.getPackageName());
+        assertEquals("com.android.aconfig.storage.test_4", node4.getPackageName());
+
+        assertEquals(0, node1.getPackageId());
+        assertEquals(1, node2.getPackageId());
+        assertEquals(2, node4.getPackageId());
+
+        assertEquals(0, node1.getBooleanStartIndex());
+        assertEquals(3, node2.getBooleanStartIndex());
+        assertEquals(6, node4.getBooleanStartIndex());
+
+        assertEquals(159, node1.getNextOffset());
+        assertEquals(-1, node2.getNextOffset());
+        assertEquals(-1, node4.getNextOffset());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java
new file mode 100644
index 0000000..10620d2
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import android.aconfig.storage.SipHasher13;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class SipHasher13Test {
+    @Test
+    public void testSipHash_hashString() throws Exception {
+        String testStr = "com.google.android.test";
+        long result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0xF86572EFF9C4A0C1L, result);
+
+        testStr = "abcdefg";
+        result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0x2295EF44BD078AE9L, result);
+
+        testStr = "abcdefgh";
+        result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0x5CD7657FA7F96C16L, result);
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java b/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java
new file mode 100644
index 0000000..f35952d
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+
+public final class TestDataUtils {
+    private static final String TEST_PACKAGE_MAP_PATH = "package.map";
+    private static final String TEST_FLAG_MAP_PATH = "flag.map";
+    private static final String TEST_FLAG_VAL_PATH = "flag.val";
+    private static final String TEST_FLAG_INFO_PATH = "flag.info";
+
+    private static final String TESTDATA_PATH =
+            "/data/local/tmp/aconfig_storage_file_test_java/testdata/";
+
+    public static ByteBuffer getTestPackageMapByteBuffer() throws Exception {
+        return readFile(TESTDATA_PATH + TEST_PACKAGE_MAP_PATH);
+    }
+
+    public static ByteBuffer getTestFlagMapByteBuffer() throws Exception {
+        return readFile(TESTDATA_PATH + TEST_FLAG_MAP_PATH);
+    }
+
+    public static ByteBuffer getTestFlagValByteBuffer() throws Exception {
+        return readFile(TESTDATA_PATH + TEST_FLAG_VAL_PATH);
+    }
+
+    public static ByteBuffer getTestFlagInfoByteBuffer() throws Exception {
+        return readFile(TESTDATA_PATH + TEST_FLAG_INFO_PATH);
+    }
+
+    private static ByteBuffer readFile(String fileName) throws Exception {
+        InputStream input = new FileInputStream(fileName);
+        return ByteBuffer.wrap(input.readAllBytes());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp
index 3b124b1..f96b223 100644
--- a/tools/aconfig/aconfig_storage_read_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/Android.bp
@@ -87,6 +87,9 @@
     generated_sources: ["libcxx_aconfig_storage_read_api_bridge_code"],
     whole_static_libs: ["libaconfig_storage_read_api_cxx_bridge"],
     export_include_dirs: ["include"],
+    static_libs: [
+        "libbase",
+    ],
     host_supported: true,
     vendor_available: true,
     product_available: true,
@@ -104,6 +107,33 @@
     afdo: true,
 }
 
+soong_config_module_type {
+    name: "aconfig_lib_cc_shared_link_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "Aconfig",
+    bool_variables: [
+        "read_from_new_storage",
+    ],
+    properties: [
+        "shared_libs",
+    ],
+}
+
+soong_config_bool_variable {
+    name: "read_from_new_storage",
+}
+
+aconfig_lib_cc_shared_link_defaults {
+    name: "aconfig_lib_cc_shared_link.defaults",
+    soong_config_variables: {
+        read_from_new_storage: {
+            shared_libs: [
+                "libaconfig_storage_read_api_cc",
+            ],
+        },
+    },
+}
+
 cc_defaults {
     name: "aconfig_lib_cc_static_link.defaults",
     shared_libs: [
@@ -117,6 +147,7 @@
     crate_name: "aconfig_storage_read_api_rust_jni",
     srcs: ["srcs/lib.rs"],
     rustlibs: [
+        "libaconfig_storage_file",
         "libaconfig_storage_read_api",
         "libanyhow",
         "libjni",
@@ -127,7 +158,9 @@
 java_library {
     name: "libaconfig_storage_read_api_java",
     srcs: [
-        "srcs/**/*.java",
+        "srcs/android/aconfig/storage/AconfigStorageReadAPI.java",
+        "srcs/android/aconfig/storage/FlagReadContext.java",
+        "srcs/android/aconfig/storage/PackageReadContext.java",
     ],
     required: ["libaconfig_storage_read_api_rust_jni"],
     min_sdk_version: "UpsideDownCake",
@@ -136,3 +169,41 @@
         "//apex_available:platform",
     ],
 }
+
+java_library {
+    name: "aconfig_storage_reader_java",
+    srcs: [
+        "srcs/android/aconfig/storage/StorageInternalReader.java",
+    ],
+    libs: [
+        "unsupportedappusage",
+        "strict_mode_stub",
+    ],
+    static_libs: [
+        "aconfig_storage_file_java",
+    ],
+    sdk_version: "core_current",
+    host_supported: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+}
+
+java_library {
+    name: "aconfig_storage_reader_java_none",
+    srcs: [
+        "srcs/android/aconfig/storage/StorageInternalReader.java",
+    ],
+    libs: [
+        "unsupportedappusage-sdk-none",
+        "fake_device_config",
+    ],
+    static_libs: [
+        "aconfig_storage_file_java_none",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
index 97ada3a..8e0c4e1 100644
--- a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
+++ b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
@@ -1,3 +1,4 @@
+#include <android-base/unique_fd.h>
 #include <sys/mman.h>
 #include <sys/stat.h>
 #include <fcntl.h>
@@ -59,22 +60,22 @@
 
 /// Map a storage file
 Result<MappedStorageFile*> map_storage_file(std::string const& file) {
-  int fd = open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY);
-  if (fd == -1) {
+  android::base::unique_fd ufd(open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY));
+  if (ufd.get() == -1) {
     auto result = Result<MappedStorageFile*>();
     result.errmsg = std::string("failed to open ") + file + ": " + strerror(errno);
     return result;
   };
 
   struct stat fd_stat;
-  if (fstat(fd, &fd_stat) < 0) {
+  if (fstat(ufd.get(), &fd_stat) < 0) {
     auto result = Result<MappedStorageFile*>();
     result.errmsg = std::string("fstat failed: ") + strerror(errno);
     return result;
   }
   size_t file_size = fd_stat.st_size;
 
-  void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, fd, 0);
+  void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, ufd.get(), 0);
   if (map_result == MAP_FAILED) {
     auto result = Result<MappedStorageFile*>();
     result.errmsg = std::string("mmap failed: ") + strerror(errno);
diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
index 61f9e96..d76cf3f 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
@@ -53,7 +53,7 @@
 use package_table_query::find_package_read_context;
 
 use anyhow::anyhow;
-use memmap2::Mmap;
+pub use memmap2::Mmap;
 use std::fs::File;
 use std::io::Read;
 
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
index 7746b58..850c2b8 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
@@ -16,18 +16,14 @@
 
 package android.aconfig.storage;
 
+import dalvik.annotation.optimization.FastNative;
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
-import java.nio.channels.FileChannel.MapMode;
-
-import android.aconfig.storage.PackageReadContext;
-import android.aconfig.storage.FlagReadContext;
-import android.aconfig.storage.BooleanFlagValue;
-
-import dalvik.annotation.optimization.FastNative;
 
 public class AconfigStorageReadAPI {
 
@@ -50,9 +46,8 @@
     }
 
     // Map a storage file given container and file type
-    public static MappedByteBuffer getMappedFile(
-        String container,
-        StorageFileType type) throws IOException{
+    public static MappedByteBuffer getMappedFile(String container, StorageFileType type)
+            throws IOException {
         switch (type) {
             case PACKAGE_MAP:
                 return mapStorageFile(STORAGEDIR + "/maps/" + container + ".package.map");
@@ -68,19 +63,58 @@
     }
 
     // JNI interface to get package read context
+    // @param mappedFile: memory mapped package map file
+    // @param packageName: package name
+    // @throws IOException if the passed in file is not a valid package map file
     @FastNative
-    public static native PackageReadContext getPackageReadContext(
-        ByteBuffer mappedFile, String packageName);
+    private static native ByteBuffer getPackageReadContextImpl(
+            ByteBuffer mappedFile, String packageName) throws IOException;
+
+    // API to get package read context
+    // @param mappedFile: memory mapped package map file
+    // @param packageName: package name
+    // @throws IOException if the passed in file is not a valid package map file
+    public static PackageReadContext getPackageReadContext(
+            ByteBuffer mappedFile, String packageName) throws IOException {
+        ByteBuffer buffer = getPackageReadContextImpl(mappedFile, packageName);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        return new PackageReadContext(buffer.getInt(), buffer.getInt(4));
+    }
 
     // JNI interface to get flag read context
+    // @param mappedFile: memory mapped flag map file
+    // @param packageId: package id to represent a specific package, obtained from
+    // package map file
+    // @param flagName: flag name
+    // @throws IOException if the passed in file is not a valid flag map file
     @FastNative
-    public static native FlagReadContext getFlagReadContext(
-        ByteBuffer mappedFile, int packageId, String flagName);
+    private static native ByteBuffer getFlagReadContextImpl(
+            ByteBuffer mappedFile, int packageId, String flagName) throws IOException;
+
+    // API to get flag read context
+    // @param mappedFile: memory mapped flag map file
+    // @param packageId: package id to represent a specific package, obtained from
+    // package map file
+    // @param flagName: flag name
+    // @throws IOException if the passed in file is not a valid flag map file
+    public static FlagReadContext getFlagReadContext(
+            ByteBuffer mappedFile, int packageId, String flagName) throws IOException {
+        ByteBuffer buffer = getFlagReadContextImpl(mappedFile, packageId, flagName);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        return new FlagReadContext(buffer.getInt(), buffer.getInt(4));
+    }
 
     // JNI interface to get boolean flag value
+    // @param mappedFile: memory mapped flag value file
+    // @param flagIndex: flag global index in the flag value array
+    // @throws IOException if the passed in file is not a valid flag value file or the
+    // flag index went over the file boundary.
     @FastNative
-    public static native BooleanFlagValue getBooleanFlagValue(
-        ByteBuffer mappedFile, int flagIndex);
+    public static native boolean getBooleanFlagValue(ByteBuffer mappedFile, int flagIndex)
+            throws IOException;
+
+    @FastNative
+    public static native long hash(String packageName) throws IOException;
 
     static {
         System.loadLibrary("aconfig_storage_read_api_rust_jni");
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java
index 57a36ca..60559a9 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/FlagReadContext.java
@@ -16,20 +16,11 @@
  */
 
 public class FlagReadContext {
-    public boolean mQuerySuccess;
-    public String mErrorMessage;
-    public boolean mFlagExists;
     public StoredFlagType mFlagType;
     public int mFlagIndex;
 
-    public FlagReadContext(boolean querySuccess,
-            String errorMessage,
-            boolean flagExists,
-            int flagType,
+    public FlagReadContext(int flagType,
             int flagIndex) {
-        mQuerySuccess = querySuccess;
-        mErrorMessage = errorMessage;
-        mFlagExists = flagExists;
         mFlagType = StoredFlagType.fromInteger(flagType);
         mFlagIndex = flagIndex;
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java
index 60d6b66..b781d9b 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/PackageReadContext.java
@@ -16,20 +16,11 @@
  */
 
 public class PackageReadContext {
-    public boolean mQuerySuccess;
-    public String mErrorMessage;
-    public boolean mPackageExists;
     public int mPackageId;
     public int mBooleanStartIndex;
 
-    public PackageReadContext(boolean querySuccess,
-                              String errorMessage,
-                              boolean packageExists,
-                              int packageId,
+    public PackageReadContext(int packageId,
                               int booleanStartIndex) {
-        mQuerySuccess = querySuccess;
-        mErrorMessage = errorMessage;
-        mPackageExists = packageExists;
         mPackageId = packageId;
         mBooleanStartIndex = booleanStartIndex;
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
new file mode 100644
index 0000000..29ebee5
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import android.compat.annotation.UnsupportedAppUsage;
+import android.os.StrictMode;
+
+import java.io.Closeable;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+
+/** @hide */
+public class StorageInternalReader {
+
+    private static final String MAP_PATH = "/metadata/aconfig/maps/";
+    private static final String BOOT_PATH = "/metadata/aconfig/boot/";
+
+    private PackageTable mPackageTable;
+    private FlagValueList mFlagValueList;
+
+    private int mPackageBooleanStartOffset;
+
+    @UnsupportedAppUsage
+    public StorageInternalReader(String container, String packageName) {
+        this(packageName, MAP_PATH + container + ".package.map", BOOT_PATH + container + ".val");
+    }
+
+    @UnsupportedAppUsage
+    public StorageInternalReader(String packageName, String packageMapFile, String flagValueFile) {
+        StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
+        mPackageTable = PackageTable.fromBytes(mapStorageFile(packageMapFile));
+        mFlagValueList = FlagValueList.fromBytes(mapStorageFile(flagValueFile));
+        StrictMode.setThreadPolicy(oldPolicy);
+        mPackageBooleanStartOffset = getPackageBooleanStartOffset(packageName);
+    }
+
+    @UnsupportedAppUsage
+    public boolean getBooleanFlagValue(int index) {
+        index += mPackageBooleanStartOffset;
+        if (index >= mFlagValueList.size()) {
+            throw new AconfigStorageException("Fail to get boolean flag value");
+        }
+        return mFlagValueList.getBoolean(index);
+    }
+
+    private int getPackageBooleanStartOffset(String packageName) {
+        PackageTable.Node pNode = mPackageTable.get(packageName);
+        if (pNode == null) {
+            PackageTable.Header header = mPackageTable.getHeader();
+            throw new AconfigStorageException(
+                    String.format(
+                            "Fail to get package %s from container %s",
+                            packageName, header.getContainer()));
+        }
+        return pNode.getBooleanStartIndex();
+    }
+
+    // Map a storage file given file path
+    private static MappedByteBuffer mapStorageFile(String file) {
+        FileChannel channel = null;
+        try {
+            channel = FileChannel.open(Paths.get(file), StandardOpenOption.READ);
+            return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size());
+        } catch (Exception e) {
+            throw new AconfigStorageException(
+                    String.format("Fail to mmap storage file %s", file), e);
+        } finally {
+            quietlyDispose(channel);
+        }
+    }
+
+    private static void quietlyDispose(Closeable closable) {
+        try {
+            if (closable != null) {
+                closable.close();
+            }
+        } catch (Exception e) {
+            // no need to care, at least as of now
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
index e195eb8..f5f12bb 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
@@ -1,14 +1,16 @@
 //! aconfig storage read api java rust interlop
 
+use aconfig_storage_file::SipHasher13;
 use aconfig_storage_read_api::flag_table_query::find_flag_read_context;
 use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
 use aconfig_storage_read_api::package_table_query::find_package_read_context;
 use aconfig_storage_read_api::{FlagReadContext, PackageReadContext};
 
 use anyhow::Result;
-use jni::objects::{JByteBuffer, JClass, JString, JValue};
-use jni::sys::{jint, jobject};
+use jni::objects::{JByteBuffer, JClass, JString};
+use jni::sys::{jboolean, jint, jlong};
 use jni::JNIEnv;
+use std::hash::Hasher;
 
 /// Call rust find package read context
 fn get_package_read_context_java(
@@ -28,55 +30,42 @@
     Ok(find_package_read_context(buffer, &package_name)?)
 }
 
-/// Create java package read context return
-fn create_java_package_read_context(
-    env: &mut JNIEnv,
-    success_query: bool,
-    error_message: String,
-    pkg_found: bool,
-    pkg_id: u32,
-    start_index: u32,
-) -> jobject {
-    let query_success = JValue::Bool(success_query as u8);
-    let errmsg = env.new_string(error_message).expect("failed to create JString");
-    let package_exists = JValue::Bool(pkg_found as u8);
-    let package_id = JValue::Int(pkg_id as i32);
-    let boolean_start_index = JValue::Int(start_index as i32);
-    let context = env.new_object(
-        "android/aconfig/storage/PackageReadContext",
-        "(ZLjava/lang/String;ZII)V",
-        &[query_success, (&errmsg).into(), package_exists, package_id, boolean_start_index],
-    );
-    context.expect("failed to call PackageReadContext constructor").into_raw()
-}
-
 /// Get package read context JNI
 #[no_mangle]
 #[allow(unused)]
-pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getPackageReadContext<
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getPackageReadContextImpl<
     'local,
 >(
     mut env: JNIEnv<'local>,
     class: JClass<'local>,
     file: JByteBuffer<'local>,
     package: JString<'local>,
-) -> jobject {
+) -> JByteBuffer<'local> {
+    let mut package_id = -1;
+    let mut boolean_start_index = -1;
+
     match get_package_read_context_java(&mut env, file, package) {
-        Ok(context_opt) => match context_opt {
-            Some(context) => create_java_package_read_context(
-                &mut env,
-                true,
-                String::from(""),
-                true,
-                context.package_id,
-                context.boolean_start_index,
-            ),
-            None => create_java_package_read_context(&mut env, true, String::from(""), false, 0, 0),
-        },
+        Ok(context_opt) => {
+            if let Some(context) = context_opt {
+                package_id = context.package_id as i32;
+                boolean_start_index = context.boolean_start_index as i32;
+            }
+        }
         Err(errmsg) => {
-            create_java_package_read_context(&mut env, false, format!("{:?}", errmsg), false, 0, 0)
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
         }
     }
+
+    let mut bytes = Vec::new();
+    bytes.extend_from_slice(&package_id.to_le_bytes());
+    bytes.extend_from_slice(&boolean_start_index.to_le_bytes());
+    let (addr, len) = {
+        let buf = bytes.leak();
+        (buf.as_mut_ptr(), buf.len())
+    };
+    // SAFETY:
+    // The safety here is ensured as the content is ensured to be valid
+    unsafe { env.new_direct_byte_buffer(addr, len).expect("failed to create byte buffer") }
 }
 
 /// Call rust find flag read context
@@ -98,32 +87,10 @@
     Ok(find_flag_read_context(buffer, package_id as u32, &flag_name)?)
 }
 
-/// Create java flag read context return
-fn create_java_flag_read_context(
-    env: &mut JNIEnv,
-    success_query: bool,
-    error_message: String,
-    flg_found: bool,
-    flg_type: u32,
-    flg_index: u32,
-) -> jobject {
-    let query_success = JValue::Bool(success_query as u8);
-    let errmsg = env.new_string(error_message).expect("failed to create JString");
-    let flag_exists = JValue::Bool(flg_found as u8);
-    let flag_type = JValue::Int(flg_type as i32);
-    let flag_index = JValue::Int(flg_index as i32);
-    let context = env.new_object(
-        "android/aconfig/storage/FlagReadContext",
-        "(ZLjava/lang/String;ZII)V",
-        &[query_success, (&errmsg).into(), flag_exists, flag_type, flag_index],
-    );
-    context.expect("failed to call FlagReadContext constructor").into_raw()
-}
-
 /// Get flag read context JNI
 #[no_mangle]
 #[allow(unused)]
-pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getFlagReadContext<
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getFlagReadContextImpl<
     'local,
 >(
     mut env: JNIEnv<'local>,
@@ -131,41 +98,32 @@
     file: JByteBuffer<'local>,
     package_id: jint,
     flag: JString<'local>,
-) -> jobject {
+) -> JByteBuffer<'local> {
+    let mut flag_type = -1;
+    let mut flag_index = -1;
+
     match get_flag_read_context_java(&mut env, file, package_id, flag) {
-        Ok(context_opt) => match context_opt {
-            Some(context) => create_java_flag_read_context(
-                &mut env,
-                true,
-                String::from(""),
-                true,
-                context.flag_type as u32,
-                context.flag_index as u32,
-            ),
-            None => create_java_flag_read_context(&mut env, true, String::from(""), false, 9999, 0),
-        },
+        Ok(context_opt) => {
+            if let Some(context) = context_opt {
+                flag_type = context.flag_type as i32;
+                flag_index = context.flag_index as i32;
+            }
+        }
         Err(errmsg) => {
-            create_java_flag_read_context(&mut env, false, format!("{:?}", errmsg), false, 9999, 0)
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
         }
     }
-}
 
-/// Create java boolean flag value return
-fn create_java_boolean_flag_value(
-    env: &mut JNIEnv,
-    success_query: bool,
-    error_message: String,
-    value: bool,
-) -> jobject {
-    let query_success = JValue::Bool(success_query as u8);
-    let errmsg = env.new_string(error_message).expect("failed to create JString");
-    let flag_value = JValue::Bool(value as u8);
-    let context = env.new_object(
-        "android/aconfig/storage/BooleanFlagValue",
-        "(ZLjava/lang/String;Z)V",
-        &[query_success, (&errmsg).into(), flag_value],
-    );
-    context.expect("failed to call BooleanFlagValue constructor").into_raw()
+    let mut bytes = Vec::new();
+    bytes.extend_from_slice(&flag_type.to_le_bytes());
+    bytes.extend_from_slice(&flag_index.to_le_bytes());
+    let (addr, len) = {
+        let buf = bytes.leak();
+        (buf.as_mut_ptr(), buf.len())
+    };
+    // SAFETY:
+    // The safety here is ensured as the content is ensured to be valid
+    unsafe { env.new_direct_byte_buffer(addr, len).expect("failed to create byte buffer") }
 }
 
 /// Call rust find boolean flag value
@@ -193,11 +151,39 @@
     class: JClass<'local>,
     file: JByteBuffer<'local>,
     flag_index: jint,
-) -> jobject {
+) -> jboolean {
     match get_boolean_flag_value_java(&mut env, file, flag_index) {
-        Ok(value) => create_java_boolean_flag_value(&mut env, true, String::from(""), value),
+        Ok(value) => value as u8,
         Err(errmsg) => {
-            create_java_boolean_flag_value(&mut env, false, format!("{:?}", errmsg), false)
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+            0u8
         }
     }
 }
+
+/// Get flag value JNI
+#[no_mangle]
+#[allow(unused)]
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_hash<'local>(
+    mut env: JNIEnv<'local>,
+    class: JClass<'local>,
+    package_name: JString<'local>,
+) -> jlong {
+    match siphasher13_hash(&mut env, package_name) {
+        Ok(value) => value as jlong,
+        Err(errmsg) => {
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+            0i64
+        }
+    }
+}
+
+fn siphasher13_hash(env: &mut JNIEnv, package_name: JString) -> Result<u64> {
+    // SAFETY:
+    // The safety here is ensured as the flag name is guaranteed to be a java string
+    let flag_name: String = unsafe { env.get_string_unchecked(&package_name)?.into() };
+    let mut s = SipHasher13::new();
+    s.write(flag_name.as_bytes());
+    s.write_u8(0xff);
+    Ok(s.finish())
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
index cf4cfe6..191741e 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
@@ -16,27 +16,29 @@
 
 package android.aconfig.storage.test;
 
-import java.io.IOException;
-import java.nio.MappedByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+
+import android.aconfig.DeviceProtos;
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.storage.AconfigStorageReadAPI;
+import android.aconfig.storage.FlagReadContext;
+import android.aconfig.storage.FlagReadContext.StoredFlagType;
+import android.aconfig.storage.PackageReadContext;
+import android.aconfig.storage.SipHasher13;
+import android.aconfig.storage.StorageInternalReader;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
-import android.aconfig.storage.AconfigStorageReadAPI;
-import android.aconfig.storage.PackageReadContext;
-import android.aconfig.storage.FlagReadContext;
-import android.aconfig.storage.FlagReadContext.StoredFlagType;
-import android.aconfig.storage.BooleanFlagValue;
+import java.io.IOException;
+import java.nio.MappedByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
 
 @RunWith(JUnit4.class)
-public class AconfigStorageReadAPITest{
+public class AconfigStorageReadAPITest {
 
     private String mStorageDir = "/data/local/tmp/aconfig_java_api_test";
 
@@ -44,68 +46,66 @@
     public void testPackageContextQuery() {
         MappedByteBuffer packageMap = null;
         try {
-            packageMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.package.map");
-        } catch(IOException ex){
+            packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
         assertTrue(packageMap != null);
 
-        PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
-            packageMap, "com.android.aconfig.storage.test_1");
-        assertTrue(context.mQuerySuccess);
-        assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-        assertTrue(context.mPackageExists);
-        assertEquals(context.mPackageId, 0);
-        assertEquals(context.mBooleanStartIndex, 0);
+        try {
+            PackageReadContext context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_1");
+            assertEquals(context.mPackageId, 0);
+            assertEquals(context.mBooleanStartIndex, 0);
 
-        context = AconfigStorageReadAPI.getPackageReadContext(
-            packageMap, "com.android.aconfig.storage.test_2");
-        assertTrue(context.mQuerySuccess);
-        assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-        assertTrue(context.mPackageExists);
-        assertEquals(context.mPackageId, 1);
-        assertEquals(context.mBooleanStartIndex, 3);
+            context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_2");
+            assertEquals(context.mPackageId, 1);
+            assertEquals(context.mBooleanStartIndex, 3);
 
-        context = AconfigStorageReadAPI.getPackageReadContext(
-            packageMap, "com.android.aconfig.storage.test_4");
-        assertTrue(context.mQuerySuccess);
-        assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-        assertTrue(context.mPackageExists);
-        assertEquals(context.mPackageId, 2);
-        assertEquals(context.mBooleanStartIndex, 6);
+            context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_4");
+            assertEquals(context.mPackageId, 2);
+            assertEquals(context.mBooleanStartIndex, 6);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
     }
 
     @Test
     public void testNonExistPackageContextQuery() {
         MappedByteBuffer packageMap = null;
         try {
-            packageMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.package.map");
-        } catch(IOException ex){
+            packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
         assertTrue(packageMap != null);
 
-        PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
-            packageMap, "unknown");
-        assertTrue(context.mQuerySuccess);
-        assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-        assertFalse(context.mPackageExists);
-        assertEquals(context.mPackageId, 0);
-        assertEquals(context.mBooleanStartIndex, 0);
+        try {
+            PackageReadContext context =
+                    AconfigStorageReadAPI.getPackageReadContext(packageMap, "unknown");
+            assertEquals(context.mPackageId, -1);
+            assertEquals(context.mBooleanStartIndex, -1);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
     }
 
     @Test
     public void testFlagContextQuery() {
         MappedByteBuffer flagMap = null;
         try {
-            flagMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.flag.map");
-        } catch(IOException ex){
+            flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagMap!= null);
+        assertTrue(flagMap != null);
 
         class Baseline {
             public int mPackageId;
@@ -113,10 +113,8 @@
             public StoredFlagType mFlagType;
             public int mFlagIndex;
 
-            public Baseline(int packageId,
-                    String flagName,
-                    StoredFlagType flagType,
-                    int flagIndex) {
+            public Baseline(
+                    int packageId, String flagName, StoredFlagType flagType, int flagIndex) {
                 mPackageId = packageId;
                 mFlagName = flagName;
                 mFlagType = flagType;
@@ -134,14 +132,16 @@
         baselines.add(new Baseline(2, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 0));
         baselines.add(new Baseline(0, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
 
-        for (Baseline baseline : baselines) {
-            FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
-                flagMap, baseline.mPackageId,  baseline.mFlagName);
-            assertTrue(context.mQuerySuccess);
-            assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-            assertTrue(context.mFlagExists);
-            assertEquals(context.mFlagType, baseline.mFlagType);
-            assertEquals(context.mFlagIndex, baseline.mFlagIndex);
+        try {
+            for (Baseline baseline : baselines) {
+                FlagReadContext context =
+                        AconfigStorageReadAPI.getFlagReadContext(
+                                flagMap, baseline.mPackageId, baseline.mFlagName);
+                assertEquals(context.mFlagType, baseline.mFlagType);
+                assertEquals(context.mFlagIndex, baseline.mFlagIndex);
+            }
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
         }
     }
 
@@ -149,47 +149,44 @@
     public void testNonExistFlagContextQuery() {
         MappedByteBuffer flagMap = null;
         try {
-            flagMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.flag.map");
-        } catch(IOException ex){
+            flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagMap!= null);
+        assertTrue(flagMap != null);
 
-        FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
-            flagMap, 0,  "unknown");
-        assertTrue(context.mQuerySuccess);
-        assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-        assertFalse(context.mFlagExists);
-        assertEquals(context.mFlagType, null);
-        assertEquals(context.mFlagIndex, 0);
+        try {
+            FlagReadContext context =
+                    AconfigStorageReadAPI.getFlagReadContext(flagMap, 0, "unknown");
+            assertEquals(context.mFlagType, null);
+            assertEquals(context.mFlagIndex, -1);
 
-        context = AconfigStorageReadAPI.getFlagReadContext(
-            flagMap, 3,  "enabled_ro");
-        assertTrue(context.mQuerySuccess);
-        assertTrue(context.mErrorMessage, context.mErrorMessage.equals(""));
-        assertFalse(context.mFlagExists);
-        assertEquals(context.mFlagType, null);
-        assertEquals(context.mFlagIndex, 0);
+            context = AconfigStorageReadAPI.getFlagReadContext(flagMap, 3, "enabled_ro");
+            assertEquals(context.mFlagType, null);
+            assertEquals(context.mFlagIndex, -1);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
     }
 
     @Test
     public void testBooleanFlagValueQuery() {
         MappedByteBuffer flagVal = null;
         try {
-            flagVal = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/boot/mockup.val");
-        } catch(IOException ex){
+            flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagVal!= null);
+        assertTrue(flagVal != null);
 
         boolean[] baselines = {false, true, true, false, true, true, true, true};
         for (int i = 0; i < 8; ++i) {
-            BooleanFlagValue value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, i);
-            assertTrue(value.mQuerySuccess);
-            assertTrue(value.mErrorMessage, value.mErrorMessage.equals(""));
-            assertEquals(value.mFlagValue, baselines[i]);
+            try {
+                Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, i);
+                assertEquals(value, baselines[i]);
+            } catch (IOException ex) {
+                assertTrue(ex.toString(), false);
+            }
         }
     }
 
@@ -197,16 +194,77 @@
     public void testInvalidBooleanFlagValueQuery() {
         MappedByteBuffer flagVal = null;
         try {
-            flagVal = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/boot/mockup.val");
-        } catch(IOException ex){
+            flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagVal!= null);
+        assertTrue(flagVal != null);
 
-        BooleanFlagValue value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9);
-        String expectedErrmsg = "Flag value offset goes beyond the end of the file";
-        assertFalse(value.mQuerySuccess);
-        assertTrue(value.mErrorMessage, value.mErrorMessage.contains(expectedErrmsg));
+        try {
+            Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9);
+            assertTrue("should throw", false);
+        } catch (IOException ex) {
+            String expectedErrmsg = "invalid storage file byte offset";
+            assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg));
+        }
     }
- }
+
+    @Test
+    public void testRustJavaEqualHash() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+        for (parsed_flag flag : flags) {
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            long rHash = AconfigStorageReadAPI.hash(packageName);
+            long jHash = SipHasher13.hash(packageName.getBytes());
+            assertEquals(rHash, jHash);
+
+            String fullFlagName = packageName + "/" + flagName;
+            rHash = AconfigStorageReadAPI.hash(fullFlagName);
+            jHash = SipHasher13.hash(fullFlagName.getBytes());
+            assertEquals(rHash, jHash);
+        }
+    }
+
+    @Test
+    public void testRustJavaEqualFlag() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+
+        String mapPath = "/metadata/aconfig/maps/";
+        String flagsPath = "/metadata/aconfig/boot/";
+
+        for (parsed_flag flag : flags) {
+
+            String container = flag.container;
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            String fullFlagName = packageName + "/" + flagName;
+
+            MappedByteBuffer packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".package.map");
+            MappedByteBuffer flagMap =
+                    AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".flag.map");
+            MappedByteBuffer flagValList =
+                    AconfigStorageReadAPI.mapStorageFile(flagsPath + container + ".val");
+
+            PackageReadContext packageContext =
+                    AconfigStorageReadAPI.getPackageReadContext(packageMap, packageName);
+
+            FlagReadContext flagContext =
+                    AconfigStorageReadAPI.getFlagReadContext(
+                            flagMap, packageContext.mPackageId, flagName);
+
+            boolean rVal =
+                    AconfigStorageReadAPI.getBooleanFlagValue(
+                            flagValList,
+                            packageContext.mBooleanStartIndex + flagContext.mFlagIndex);
+
+            StorageInternalReader reader = new StorageInternalReader(container, packageName);
+            boolean jVal = reader.getBooleanFlagValue(flagContext.mFlagIndex);
+
+            long rHash = AconfigStorageReadAPI.hash(packageName);
+            long jHash = SipHasher13.hash(packageName.getBytes());
+            assertEquals(rVal, jVal);
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
index d94b2b4..3d4e9ad 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
@@ -1,7 +1,10 @@
 android_test {
     name: "aconfig_storage_read_api.test.java",
-    srcs: ["AconfigStorageReadAPITest.java"],
+    srcs: ["./**/*.java"],
     static_libs: [
+        "aconfig_device_paths_java",
+        "aconfig_storage_file_java",
+        "aconfig_storage_reader_java",
         "androidx.test.rules",
         "libaconfig_storage_read_api_java",
         "junit",
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java b/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java
new file mode 100644
index 0000000..3a1bba0
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import android.aconfig.storage.StorageInternalReader;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class StorageInternalReaderTest {
+
+    private String mStorageDir = "/data/local/tmp/aconfig_java_api_test";
+
+    @Test
+    public void testStorageInternalReader_getFlag() {
+
+        String packageMapFile = mStorageDir + "/maps/mockup.package.map";
+        String flagValueFile = mStorageDir + "/boot/mockup.val";
+
+        StorageInternalReader reader =
+                new StorageInternalReader(
+                        "com.android.aconfig.storage.test_1", packageMapFile, flagValueFile);
+        assertFalse(reader.getBooleanFlagValue(0));
+        assertTrue(reader.getBooleanFlagValue(1));
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
index cabc65e..7b43574 100644
--- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
@@ -1,6 +1,7 @@
 
 #include <android-base/file.h>
 #include <android-base/logging.h>
+#include <android-base/unique_fd.h>
 
 #include <sys/mman.h>
 #include <sys/stat.h>
@@ -13,8 +14,8 @@
 namespace aconfig_storage {
 
 /// Map a storage file
-android::base::Result<MutableMappedStorageFile*> map_mutable_storage_file(
-    std::string const& file) {
+android::base::Result<MutableMappedStorageFile *> map_mutable_storage_file(
+    std::string const &file) {
   struct stat file_stat;
   if (stat(file.c_str(), &file_stat) < 0) {
     return android::base::ErrnoError() << "stat failed";
@@ -26,13 +27,13 @@
 
   size_t file_size = file_stat.st_size;
 
-  const int fd = open(file.c_str(), O_RDWR | O_NOFOLLOW | O_CLOEXEC);
-  if (fd == -1) {
+  android::base::unique_fd ufd(open(file.c_str(), O_RDWR | O_NOFOLLOW | O_CLOEXEC));
+  if (ufd.get() == -1) {
     return android::base::ErrnoError() << "failed to open " << file;
   };
 
-  void* const map_result =
-      mmap(nullptr, file_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+  void *const map_result =
+      mmap(nullptr, file_size, PROT_READ | PROT_WRITE, MAP_SHARED, ufd.get(), 0);
   if (map_result == MAP_FAILED) {
     return android::base::ErrnoError() << "mmap failed";
   }
@@ -46,47 +47,56 @@
 
 /// Set boolean flag value
 android::base::Result<void> set_boolean_flag_value(
-    const MutableMappedStorageFile& file,
+    const MutableMappedStorageFile &file,
     uint32_t offset,
     bool value) {
   auto content = rust::Slice<uint8_t>(
-      static_cast<uint8_t*>(file.file_ptr), file.file_size);
+      static_cast<uint8_t *>(file.file_ptr), file.file_size);
   auto update_cxx = update_boolean_flag_value_cxx(content, offset, value);
   if (!update_cxx.update_success) {
     return android::base::Error() << update_cxx.error_message.c_str();
   }
+  if (!msync(static_cast<uint8_t *>(file.file_ptr) + update_cxx.offset, 1, MS_SYNC)) {
+    return android::base::ErrnoError() << "msync failed";
+  }
   return {};
 }
 
 /// Set if flag has server override
 android::base::Result<void> set_flag_has_server_override(
-    const MutableMappedStorageFile& file,
+    const MutableMappedStorageFile &file,
     FlagValueType value_type,
     uint32_t offset,
     bool value) {
   auto content = rust::Slice<uint8_t>(
-      static_cast<uint8_t*>(file.file_ptr), file.file_size);
+      static_cast<uint8_t *>(file.file_ptr), file.file_size);
   auto update_cxx = update_flag_has_server_override_cxx(
       content, static_cast<uint16_t>(value_type), offset, value);
   if (!update_cxx.update_success) {
     return android::base::Error() << update_cxx.error_message.c_str();
   }
+  if (!msync(static_cast<uint8_t *>(file.file_ptr) + update_cxx.offset, 1, MS_SYNC)) {
+    return android::base::ErrnoError() << "msync failed";
+  }
   return {};
 }
 
 /// Set if flag has local override
 android::base::Result<void> set_flag_has_local_override(
-    const MutableMappedStorageFile& file,
+    const MutableMappedStorageFile &file,
     FlagValueType value_type,
     uint32_t offset,
     bool value) {
   auto content = rust::Slice<uint8_t>(
-      static_cast<uint8_t*>(file.file_ptr), file.file_size);
+      static_cast<uint8_t *>(file.file_ptr), file.file_size);
   auto update_cxx = update_flag_has_local_override_cxx(
       content, static_cast<uint16_t>(value_type), offset, value);
   if (!update_cxx.update_success) {
     return android::base::Error() << update_cxx.error_message.c_str();
   }
+  if (!msync(static_cast<uint8_t *>(file.file_ptr) + update_cxx.offset, 1, MS_SYNC)) {
+    return android::base::ErrnoError() << "msync failed";
+  }
   return {};
 }
 
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
index 6f03f12..7e60713 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
@@ -67,13 +67,13 @@
     flag_type: FlagValueType,
     flag_index: u32,
     value: bool,
-) -> Result<(), AconfigStorageError> {
+) -> Result<usize, AconfigStorageError> {
     let (attribute, head) = get_flag_attribute_and_offset(buf, flag_type, flag_index)?;
     let has_override = (attribute & (FlagInfoBit::HasServerOverride as u8)) != 0;
     if has_override != value {
         buf[head] = (attribute ^ FlagInfoBit::HasServerOverride as u8).to_le_bytes()[0];
     }
-    Ok(())
+    Ok(head)
 }
 
 /// Set if flag has local override
@@ -82,13 +82,13 @@
     flag_type: FlagValueType,
     flag_index: u32,
     value: bool,
-) -> Result<(), AconfigStorageError> {
+) -> Result<usize, AconfigStorageError> {
     let (attribute, head) = get_flag_attribute_and_offset(buf, flag_type, flag_index)?;
     let has_override = (attribute & (FlagInfoBit::HasLocalOverride as u8)) != 0;
     if has_override != value {
         buf[head] = (attribute ^ FlagInfoBit::HasLocalOverride as u8).to_le_bytes()[0];
     }
-    Ok(())
+    Ok(head)
 }
 
 #[cfg(test)]
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
index 0938715..dd15c99 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
@@ -24,7 +24,7 @@
     buf: &mut [u8],
     flag_index: u32,
     flag_value: bool,
-) -> Result<(), AconfigStorageError> {
+) -> Result<usize, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
     if interpreted_header.version > FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
@@ -43,7 +43,7 @@
     }
 
     buf[head] = u8::from(flag_value).to_le_bytes()[0];
-    Ok(())
+    Ok(head)
 }
 
 #[cfg(test)]
diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
index aec28de..0396a63 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
@@ -194,18 +194,21 @@
     // Flag value update return for cc interlop
     pub struct BooleanFlagValueUpdateCXX {
         pub update_success: bool,
+        pub offset: usize,
         pub error_message: String,
     }
 
     // Flag has server override update return for cc interlop
     pub struct FlagHasServerOverrideUpdateCXX {
         pub update_success: bool,
+        pub offset: usize,
         pub error_message: String,
     }
 
     // Flag has local override update return for cc interlop
     pub struct FlagHasLocalOverrideUpdateCXX {
         pub update_success: bool,
+        pub offset: usize,
         pub error_message: String,
     }
 
@@ -251,11 +254,14 @@
     value: bool,
 ) -> ffi::BooleanFlagValueUpdateCXX {
     match crate::flag_value_update::update_boolean_flag_value(file, offset, value) {
-        Ok(()) => {
-            ffi::BooleanFlagValueUpdateCXX { update_success: true, error_message: String::from("") }
-        }
+        Ok(head) => ffi::BooleanFlagValueUpdateCXX {
+            update_success: true,
+            offset: head,
+            error_message: String::from(""),
+        },
         Err(errmsg) => ffi::BooleanFlagValueUpdateCXX {
             update_success: false,
+            offset: usize::MAX,
             error_message: format!("{:?}", errmsg),
         },
     }
@@ -272,18 +278,21 @@
             match crate::flag_info_update::update_flag_has_server_override(
                 file, value_type, offset, value,
             ) {
-                Ok(()) => ffi::FlagHasServerOverrideUpdateCXX {
+                Ok(head) => ffi::FlagHasServerOverrideUpdateCXX {
                     update_success: true,
+                    offset: head,
                     error_message: String::from(""),
                 },
                 Err(errmsg) => ffi::FlagHasServerOverrideUpdateCXX {
                     update_success: false,
+                    offset: usize::MAX,
                     error_message: format!("{:?}", errmsg),
                 },
             }
         }
         Err(errmsg) => ffi::FlagHasServerOverrideUpdateCXX {
             update_success: false,
+            offset: usize::MAX,
             error_message: format!("{:?}", errmsg),
         },
     }
@@ -300,18 +309,21 @@
             match crate::flag_info_update::update_flag_has_local_override(
                 file, value_type, offset, value,
             ) {
-                Ok(()) => ffi::FlagHasLocalOverrideUpdateCXX {
+                Ok(head) => ffi::FlagHasLocalOverrideUpdateCXX {
                     update_success: true,
+                    offset: head,
                     error_message: String::from(""),
                 },
                 Err(errmsg) => ffi::FlagHasLocalOverrideUpdateCXX {
                     update_success: false,
+                    offset: usize::MAX,
                     error_message: format!("{:?}", errmsg),
                 },
             }
         }
         Err(errmsg) => ffi::FlagHasLocalOverrideUpdateCXX {
             update_success: false,
+            offset: usize::MAX,
             error_message: format!("{:?}", errmsg),
         },
     }
diff --git a/tools/aconfig/aconfig_storage_write_api/tests/Android.bp b/tools/aconfig/aconfig_storage_write_api/tests/Android.bp
index f6409b7..5508dac 100644
--- a/tools/aconfig/aconfig_storage_write_api/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_write_api/tests/Android.bp
@@ -38,5 +38,10 @@
         "device-tests",
         "general-tests",
     ],
-    ldflags: ["-Wl,--allow-multiple-definition"],
+    generated_headers: [
+        "cxx-bridge-header",
+        "libcxx_aconfig_storage_read_api_bridge_header",
+    ],
+    generated_sources: ["libcxx_aconfig_storage_read_api_bridge_code"],
+    whole_static_libs: ["libaconfig_storage_read_api_cxx_bridge"],
 }
diff --git a/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp b/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp
index 31183fa..133f5a0 100644
--- a/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/tests/storage_write_api_test.cpp
@@ -25,6 +25,9 @@
 #include <android-base/file.h>
 #include <android-base/result.h>
 
+#include "rust/cxx.h"
+#include "aconfig_storage/lib.rs.h"
+
 using namespace android::base;
 
 namespace api = aconfig_storage;
@@ -85,6 +88,23 @@
     ASSERT_TRUE(value.ok());
     ASSERT_TRUE(*value);
   }
+
+  // load the file on disk and check has been updated
+  std::ifstream file(flag_val, std::ios::binary | std::ios::ate);
+  std::streamsize size = file.tellg();
+  file.seekg(0, std::ios::beg);
+
+  std::vector<uint8_t> buffer(size);
+  file.read(reinterpret_cast<char *>(buffer.data()), size);
+
+  auto content = rust::Slice<const uint8_t>(
+      buffer.data(), mapped_file->file_size);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto value_cxx = get_boolean_flag_value_cxx(content, offset);
+    ASSERT_TRUE(value_cxx.query_success);
+    ASSERT_TRUE(value_cxx.flag_value);
+  }
 }
 
 /// Negative test to lock down the error when querying flag value out of range
@@ -112,15 +132,43 @@
         *mapped_file, api::FlagValueType::Boolean, offset);
     ASSERT_TRUE(attribute.ok());
     ASSERT_TRUE(*attribute & api::FlagInfoBit::HasServerOverride);
+  }
 
-    update_result = api::set_flag_has_server_override(
+  // load the file on disk and check has been updated
+  std::ifstream file(flag_info, std::ios::binary | std::ios::ate);
+  std::streamsize size = file.tellg();
+  file.seekg(0, std::ios::beg);
+
+  std::vector<uint8_t> buffer(size);
+  file.read(reinterpret_cast<char *>(buffer.data()), size);
+
+  auto content = rust::Slice<const uint8_t>(
+      buffer.data(), mapped_file->file_size);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_TRUE(attribute.flag_attribute & api::FlagInfoBit::HasServerOverride);
+  }
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto update_result = api::set_flag_has_server_override(
         *mapped_file, api::FlagValueType::Boolean, offset, false);
     ASSERT_TRUE(update_result.ok());
-    attribute = api::get_flag_attribute(
+    auto attribute = api::get_flag_attribute(
         *mapped_file, api::FlagValueType::Boolean, offset);
     ASSERT_TRUE(attribute.ok());
     ASSERT_FALSE(*attribute & api::FlagInfoBit::HasServerOverride);
   }
+
+  std::ifstream file2(flag_info, std::ios::binary);
+  buffer.clear();
+  file2.read(reinterpret_cast<char *>(buffer.data()), size);
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_FALSE(attribute.flag_attribute & api::FlagInfoBit::HasServerOverride);
+  }
 }
 
 /// Test to lock down storage flag has local override update api
@@ -137,13 +185,41 @@
         *mapped_file, api::FlagValueType::Boolean, offset);
     ASSERT_TRUE(attribute.ok());
     ASSERT_TRUE(*attribute & api::FlagInfoBit::HasLocalOverride);
+  }
 
-    update_result = api::set_flag_has_local_override(
+  // load the file on disk and check has been updated
+  std::ifstream file(flag_info, std::ios::binary | std::ios::ate);
+  std::streamsize size = file.tellg();
+  file.seekg(0, std::ios::beg);
+
+  std::vector<uint8_t> buffer(size);
+  file.read(reinterpret_cast<char *>(buffer.data()), size);
+
+  auto content = rust::Slice<const uint8_t>(
+      buffer.data(), mapped_file->file_size);
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_TRUE(attribute.flag_attribute & api::FlagInfoBit::HasLocalOverride);
+  }
+
+  for (int offset = 0; offset < 8; ++offset) {
+    auto update_result = api::set_flag_has_local_override(
         *mapped_file, api::FlagValueType::Boolean, offset, false);
     ASSERT_TRUE(update_result.ok());
-    attribute = api::get_flag_attribute(
+    auto attribute = api::get_flag_attribute(
         *mapped_file, api::FlagValueType::Boolean, offset);
     ASSERT_TRUE(attribute.ok());
     ASSERT_FALSE(*attribute & api::FlagInfoBit::HasLocalOverride);
   }
+
+  std::ifstream file2(flag_info, std::ios::binary);
+  buffer.clear();
+  file2.read(reinterpret_cast<char *>(buffer.data()), size);
+  for (int offset = 0; offset < 8; ++offset) {
+    auto attribute = get_flag_attribute_cxx(content, api::FlagValueType::Boolean, offset);
+    ASSERT_TRUE(attribute.query_success);
+    ASSERT_FALSE(attribute.flag_attribute & api::FlagInfoBit::HasLocalOverride);
+  }
 }
diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp
index 2a02379..c48585a 100644
--- a/tools/aconfig/aflags/Android.bp
+++ b/tools/aconfig/aflags/Android.bp
@@ -11,6 +11,7 @@
     rustlibs: [
         "libaconfig_device_paths",
         "libaconfig_protos",
+        "libaconfigd_protos",
         "libaconfig_storage_read_api",
         "libaconfig_storage_file",
         "libanyhow",
diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml
index eeae295..7dc3436 100644
--- a/tools/aconfig/aflags/Cargo.toml
+++ b/tools/aconfig/aflags/Cargo.toml
@@ -9,6 +9,7 @@
 protobuf = "3.2.0"
 regex = "1.10.3"
 aconfig_protos = { path = "../aconfig_protos" }
+aconfigd_protos = { version = "0.1.0", path = "../../../../../system/server_configurable_flags/aconfigd"}
 nix = { version = "0.28.0", features = ["user"] }
 aconfig_storage_file = { version = "0.1.0", path = "../aconfig_storage_file" }
 aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" }
diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs
index 04140c7..b2fd3c9 100644
--- a/tools/aconfig/aflags/src/aconfig_storage_source.rs
+++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs
@@ -1,52 +1,125 @@
-use crate::{Flag, FlagPermission, FlagSource, FlagValue, ValuePickedFrom};
-use anyhow::{anyhow, Result};
-
-use std::fs::File;
-use std::io::Read;
+use crate::{Flag, FlagSource};
+use crate::{FlagPermission, FlagValue, ValuePickedFrom};
+use aconfigd_protos::{
+    ProtoFlagQueryReturnMessage, ProtoListStorageMessage, ProtoListStorageMessageMsg,
+    ProtoStorageRequestMessage, ProtoStorageRequestMessageMsg, ProtoStorageRequestMessages,
+    ProtoStorageReturnMessage, ProtoStorageReturnMessageMsg, ProtoStorageReturnMessages,
+};
+use anyhow::anyhow;
+use anyhow::Result;
+use protobuf::Message;
+use protobuf::SpecialFields;
+use std::io::{Read, Write};
+use std::net::Shutdown;
+use std::os::unix::net::UnixStream;
 
 pub struct AconfigStorageSource {}
 
-use aconfig_storage_file::protos::ProtoStorageFiles;
+fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> {
+    let (value, value_picked_from) = match (
+        &msg.boot_flag_value,
+        msg.default_flag_value,
+        msg.local_flag_value,
+        msg.has_local_override,
+    ) {
+        (_, _, Some(local), Some(has_local)) if has_local => {
+            (FlagValue::try_from(local.as_str())?, ValuePickedFrom::Local)
+        }
+        (Some(boot), Some(default), _, _) => {
+            let value = FlagValue::try_from(boot.as_str())?;
+            if *boot == default {
+                (value, ValuePickedFrom::Default)
+            } else {
+                (value, ValuePickedFrom::Server)
+            }
+        }
+        _ => return Err(anyhow!("missing override")),
+    };
 
-static STORAGE_INFO_FILE_PATH: &str = "/metadata/aconfig/persistent_storage_file_records.pb";
+    let staged_value = match (msg.boot_flag_value, msg.server_flag_value, msg.has_server_override) {
+        (Some(boot), Some(server), _) if boot == server => None,
+        (Some(boot), Some(server), Some(has_server)) if boot != server && has_server => {
+            Some(FlagValue::try_from(server.as_str())?)
+        }
+        _ => None,
+    };
+
+    let permission = match msg.is_readwrite {
+        Some(is_readwrite) => {
+            if is_readwrite {
+                FlagPermission::ReadWrite
+            } else {
+                FlagPermission::ReadOnly
+            }
+        }
+        None => return Err(anyhow!("missing permission")),
+    };
+
+    Ok(Flag {
+        name: msg.flag_name.ok_or(anyhow!("missing flag name"))?,
+        package: msg.package_name.ok_or(anyhow!("missing package name"))?,
+        value,
+        permission,
+        value_picked_from,
+        staged_value,
+        container: "-".to_string(),
+
+        // TODO: remove once DeviceConfig is not in the CLI.
+        namespace: "-".to_string(),
+    })
+}
+
+fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> {
+    let messages = ProtoStorageRequestMessages {
+        msgs: vec![ProtoStorageRequestMessage {
+            msg: Some(ProtoStorageRequestMessageMsg::ListStorageMessage(ProtoListStorageMessage {
+                msg: Some(ProtoListStorageMessageMsg::All(true)),
+                special_fields: SpecialFields::new(),
+            })),
+            special_fields: SpecialFields::new(),
+        }],
+        special_fields: SpecialFields::new(),
+    };
+
+    let mut socket = UnixStream::connect("/dev/socket/aconfigd")?;
+
+    let message_buffer = messages.write_to_bytes()?;
+    let mut message_length_buffer: [u8; 4] = [0; 4];
+    let message_size = &message_buffer.len();
+    message_length_buffer[0] = (message_size >> 24) as u8;
+    message_length_buffer[1] = (message_size >> 16) as u8;
+    message_length_buffer[2] = (message_size >> 8) as u8;
+    message_length_buffer[3] = *message_size as u8;
+    socket.write_all(&message_length_buffer)?;
+    socket.write_all(&message_buffer)?;
+    socket.shutdown(Shutdown::Write)?;
+
+    let mut response_length_buffer: [u8; 4] = [0; 4];
+    socket.read_exact(&mut response_length_buffer)?;
+    let response_length = u32::from_be_bytes(response_length_buffer) as usize;
+    let mut response_buffer = vec![0; response_length];
+    socket.read_exact(&mut response_buffer)?;
+
+    let response: ProtoStorageReturnMessages =
+        protobuf::Message::parse_from_bytes(&response_buffer)?;
+
+    match response.msgs.as_slice() {
+        [ProtoStorageReturnMessage {
+            msg: Some(ProtoStorageReturnMessageMsg::ListStorageMessage(list_storage_message)),
+            ..
+        }] => Ok(list_storage_message.flags.clone()),
+        _ => Err(anyhow!("unexpected response from aconfigd")),
+    }
+}
 
 impl FlagSource for AconfigStorageSource {
     fn list_flags() -> Result<Vec<Flag>> {
-        let mut result = Vec::new();
-
-        let mut file = File::open(STORAGE_INFO_FILE_PATH)?;
-        let mut bytes = Vec::new();
-        file.read_to_end(&mut bytes)?;
-        let storage_file_info: ProtoStorageFiles = protobuf::Message::parse_from_bytes(&bytes)?;
-
-        for file_info in storage_file_info.files {
-            let package_map =
-                file_info.package_map.ok_or(anyhow!("storage file is missing package map"))?;
-            let flag_map = file_info.flag_map.ok_or(anyhow!("storage file is missing flag map"))?;
-            let flag_val = file_info.flag_val.ok_or(anyhow!("storage file is missing flag val"))?;
-            let container =
-                file_info.container.ok_or(anyhow!("storage file is missing container"))?;
-
-            for listed_flag in aconfig_storage_file::list_flags(&package_map, &flag_map, &flag_val)?
-            {
-                result.push(Flag {
-                    name: listed_flag.flag_name,
-                    package: listed_flag.package_name,
-                    value: FlagValue::try_from(listed_flag.flag_value.as_str())?,
-                    container: container.to_string(),
-
-                    // TODO(b/324436145): delete namespace field once DeviceConfig isn't in CLI.
-                    namespace: "-".to_string(),
-
-                    // TODO(b/324436145): Populate with real values once API is available.
-                    staged_value: None,
-                    permission: FlagPermission::ReadOnly,
-                    value_picked_from: ValuePickedFrom::Default,
-                });
-            }
-        }
-
-        Ok(result)
+        read_from_socket()
+            .map(|query_messages| {
+                query_messages.iter().map(|message| convert(message.clone())).collect::<Vec<_>>()
+            })?
+            .into_iter()
+            .collect()
     }
 
     fn override_flag(_namespace: &str, _qualified_name: &str, _value: &str) -> Result<()> {
diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs
index 05c15bb..d8912a9 100644
--- a/tools/aconfig/aflags/src/main.rs
+++ b/tools/aconfig/aflags/src/main.rs
@@ -50,6 +50,7 @@
 enum ValuePickedFrom {
     Default,
     Server,
+    Local,
 }
 
 impl std::fmt::Display for ValuePickedFrom {
@@ -60,6 +61,7 @@
             match &self {
                 Self::Default => "default",
                 Self::Server => "server",
+                Self::Local => "local",
             }
         )
     }
@@ -233,8 +235,6 @@
 }
 
 fn set_flag(qualified_name: &str, value: &str) -> Result<()> {
-    ensure!(nix::unistd::Uid::current().is_root(), "must be root to mutate flags");
-
     let flags_binding = DeviceConfigSource::list_flags()?;
     let flag = flags_binding.iter().find(|f| f.qualified_name() == qualified_name).ok_or(
         anyhow!("no aconfig flag '{qualified_name}'. Does the flag have an .aconfig definition?"),
@@ -282,11 +282,15 @@
     Ok(result)
 }
 
-fn main() {
+fn main() -> Result<()> {
+    ensure!(nix::unistd::Uid::current().is_root(), "must be root");
+
     let cli = Cli::parse();
     let output = match cli.command {
         Command::List { use_new_storage: true, container } => {
-            list(FlagSourceType::AconfigStorage, container).map(Some)
+            list(FlagSourceType::AconfigStorage, container)
+                .map_err(|err| anyhow!("storage may not be enabled: {err}"))
+                .map(Some)
         }
         Command::List { use_new_storage: false, container } => {
             list(FlagSourceType::DeviceConfig, container).map(Some)
@@ -299,6 +303,8 @@
         Ok(None) => (),
         Err(message) => println!("Error: {message}"),
     }
+
+    Ok(())
 }
 
 #[cfg(test)]
diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp
index 4566bf9..7704742 100644
--- a/tools/aconfig/fake_device_config/Android.bp
+++ b/tools/aconfig/fake_device_config/Android.bp
@@ -13,10 +13,24 @@
 // limitations under the License.
 
 java_library {
-	name: "fake_device_config",
-	srcs: ["src/**/*.java"],
-	sdk_version: "none",
-	system_modules: "core-all-system-modules",
-	host_supported: true,
+    name: "fake_device_config",
+    srcs: [
+        "src/android/util/Log.java",
+        "src/android/provider/DeviceConfig.java",
+        "src/android/os/StrictMode.java",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
+    is_stubs_module: true,
 }
 
+java_library {
+    name: "strict_mode_stub",
+    srcs: [
+        "src/android/os/StrictMode.java",
+    ],
+    sdk_version: "core_current",
+    host_supported: true,
+    is_stubs_module: true,
+}
diff --git a/tools/ide_query/ide_query_proto/Android.bp b/tools/aconfig/fake_device_config/src/android/os/StrictMode.java
similarity index 62%
copy from tools/ide_query/ide_query_proto/Android.bp
copy to tools/aconfig/fake_device_config/src/android/os/StrictMode.java
index 70f15cd..6416252 100644
--- a/tools/ide_query/ide_query_proto/Android.bp
+++ b/tools/aconfig/fake_device_config/src/android/os/StrictMode.java
@@ -14,20 +14,16 @@
  * limitations under the License.
  */
 
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
+package android.os;
 
-cc_library_host_static {
-    name: "ide_query_proto",
-    srcs: [
-        "ide_query.proto",
-    ],
-    proto: {
-        export_proto_headers: true,
-        type: "full",
-        canonical_path_from_root: false,
-    },
-    compile_multilib: "64",
-    shared_libs: ["libprotobuf-cpp-full"],
+public class StrictMode {
+    public static ThreadPolicy allowThreadDiskReads() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public static void setThreadPolicy(final ThreadPolicy policy) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public static final class ThreadPolicy {}
 }
diff --git a/tools/aconfig/fake_device_config/src/android/util/Log.java b/tools/aconfig/fake_device_config/src/android/util/Log.java
new file mode 100644
index 0000000..3e7fd0f
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/util/Log.java
@@ -0,0 +1,15 @@
+package android.util;
+
+public final class Log {
+    public static int i(String tag, String msg) {
+        return 0;
+    }
+
+    public static int e(String tag, String msg) {
+        return 0;
+    }
+
+    public static int e(String tag, String msg, Throwable tr) {
+        return 0;
+    }
+}
diff --git a/tools/check-flagged-apis/check-flagged-apis.sh b/tools/check-flagged-apis/check-flagged-apis.sh
index d9934a1..8078cd8 100755
--- a/tools/check-flagged-apis/check-flagged-apis.sh
+++ b/tools/check-flagged-apis/check-flagged-apis.sh
@@ -14,8 +14,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# Run check-flagged-apis for public APIs and the three @SystemApi flavours
-# Usage: lunch <your-target> && source <this script>
+# Run check-flagged-apis for public APIs and the three @SystemApi flavours.
+#
+# This script expects an argument to tell it which subcommand of
+# check-flagged-apis to execute. Run the script without any arguments to see
+# the valid options.
+#
+# Remember to lunch to select the relevant release config before running this script.
 
 source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../shell_utils.sh
 require_top
@@ -43,6 +48,10 @@
         $MODULE_LIB_XML_VERSIONS
 }
 
+function noop() {
+    true
+}
+
 function aninja() {
     local T="$(gettop)"
     (\cd "${T}" && prebuilts/build-tools/linux-x86/bin/ninja -f out/combined-${TARGET_PRODUCT}.ninja "$@")
@@ -52,11 +61,11 @@
     aninja -t query device_"$1"_all_targets | grep -A1 -e input: | tail -n1
 }
 
-function run() {
+function run_check() {
     local errors=0
 
     echo "# current"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $PUBLIC_XML_VERSIONS
@@ -64,7 +73,7 @@
 
     echo
     echo "# system-current"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-system-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $SYSTEM_XML_VERSIONS
@@ -72,7 +81,7 @@
 
     echo
     echo "# system-server-current"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-system-server-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $SYSTEM_SERVER_XML_VERSONS
@@ -80,7 +89,7 @@
 
     echo
     echo "# module-lib"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-module-lib-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $MODULE_LIB_XML_VERSIONS
@@ -89,8 +98,39 @@
     return $errors
 }
 
-if [[ "$1" != "--skip-build" ]]; then
-    build && run
-else
-    run
+function run_list() {
+    echo "# current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# system-current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# system-server-current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-server-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# module-lib"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-module-lib-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+}
+
+build_cmd=build
+if [[ "$1" == "--skip-build" ]]; then
+    build_cmd=noop
+    shift 1
 fi
+
+case "$1" in
+    check) $build_cmd && run_check ;;
+    list) $build_cmd && run_list ;;
+    *) echo "usage: $(basename $0): [--skip-build] check|list"; exit 1
+esac
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
index 8e285f6..e07ac1d 100644
--- a/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
@@ -358,4 +358,23 @@
             parseApiVersions(API_VERSIONS.byteInputStream()))
     assertEquals(expected, actual)
   }
+
+  @Test
+  fun testListFlaggedApis() {
+    val expected =
+        listOf(
+            "android.flag.bar DISABLED android/Clazz/Builder",
+            "android.flag.foo ENABLED android/Clazz",
+            "android.flag.foo ENABLED android/Clazz/Clazz()",
+            "android.flag.foo ENABLED android/Clazz/FOO",
+            "android.flag.foo ENABLED android/Clazz/getErrorCode()",
+            "android.flag.foo ENABLED android/Clazz/innerClassArg(Landroid/Clazz/Builder;)",
+            "android.flag.foo ENABLED android/Clazz/setData(I[[ILandroid/util/Utility;)",
+            "android.flag.foo ENABLED android/Clazz/setVariableData(I[Landroid/util/Atom;)")
+    val actual =
+        listFlaggedApis(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(ENABLED, DISABLED)))
+    assertEquals(expected, actual)
+  }
 }
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
index 1d2440d..d323c20 100644
--- a/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
@@ -19,13 +19,14 @@
 
 import android.aconfig.Aconfig
 import com.android.tools.metalava.model.BaseItemVisitor
+import com.android.tools.metalava.model.CallableItem
 import com.android.tools.metalava.model.ClassItem
 import com.android.tools.metalava.model.FieldItem
 import com.android.tools.metalava.model.Item
-import com.android.tools.metalava.model.MethodItem
 import com.android.tools.metalava.model.text.ApiFile
 import com.github.ajalt.clikt.core.CliktCommand
 import com.github.ajalt.clikt.core.ProgramResult
+import com.github.ajalt.clikt.core.subcommands
 import com.github.ajalt.clikt.parameters.options.help
 import com.github.ajalt.clikt.parameters.options.option
 import com.github.ajalt.clikt.parameters.options.required
@@ -141,6 +142,33 @@
   }
 }
 
+val ARG_API_SIGNATURE = "--api-signature"
+val ARG_API_SIGNATURE_HELP =
+    """
+Path to API signature file.
+Usually named *current.txt.
+Tip: `m frameworks-base-api-current.txt` will generate a file that includes all platform and mainline APIs.
+"""
+
+val ARG_FLAG_VALUES = "--flag-values"
+val ARG_FLAG_VALUES_HELP =
+    """
+Path to aconfig parsed_flags binary proto file.
+Tip: `m all_aconfig_declarations` will generate a file that includes all information about all flags.
+"""
+
+val ARG_API_VERSIONS = "--api-versions"
+val ARG_API_VERSIONS_HELP =
+    """
+Path to API versions XML file.
+Usually named xml-versions.xml.
+Tip: `m sdk dist` will generate a file that includes all platform and mainline APIs.
+"""
+
+class MainCommand : CliktCommand() {
+  override fun run() {}
+}
+
 class CheckCommand :
     CliktCommand(
         help =
@@ -152,32 +180,18 @@
 The tool will exit with a non-zero exit code if any flagged APIs are found to be used in the incorrect way.
 """) {
   private val apiSignaturePath by
-      option("--api-signature")
-          .help(
-              """
-              Path to API signature file.
-              Usually named *current.txt.
-              Tip: `m frameworks-base-api-current.txt` will generate a file that includes all platform and mainline APIs.
-              """)
+      option(ARG_API_SIGNATURE)
+          .help(ARG_API_SIGNATURE_HELP)
           .path(mustExist = true, canBeDir = false, mustBeReadable = true)
           .required()
   private val flagValuesPath by
-      option("--flag-values")
-          .help(
-              """
-            Path to aconfig parsed_flags binary proto file.
-            Tip: `m all_aconfig_declarations` will generate a file that includes all information about all flags.
-            """)
+      option(ARG_FLAG_VALUES)
+          .help(ARG_FLAG_VALUES_HELP)
           .path(mustExist = true, canBeDir = false, mustBeReadable = true)
           .required()
   private val apiVersionsPath by
-      option("--api-versions")
-          .help(
-              """
-            Path to API versions XML file.
-            Usually named xml-versions.xml.
-            Tip: `m sdk dist` will generate a file that includes all platform and mainline APIs.
-            """)
+      option(ARG_API_VERSIONS)
+          .help(ARG_API_VERSIONS_HELP)
           .path(mustExist = true, canBeDir = false, mustBeReadable = true)
           .required()
 
@@ -196,6 +210,40 @@
   }
 }
 
+class ListCommand :
+    CliktCommand(
+        help =
+            """
+List all flagged APIs and corresponding flags.
+
+The output format is "<fully-qualified-name-of-flag> <state-of-flag> <API>", one line per API.
+
+The output can be post-processed by e.g. piping it to grep to filter out only enabled APIs, or all APIs guarded by a given flag.
+""") {
+  private val apiSignaturePath by
+      option(ARG_API_SIGNATURE)
+          .help(ARG_API_SIGNATURE_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val flagValuesPath by
+      option(ARG_FLAG_VALUES)
+          .help(ARG_FLAG_VALUES_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+
+  override fun run() {
+    val flaggedSymbols =
+        apiSignaturePath.toFile().inputStream().use {
+          parseApiSignature(apiSignaturePath.toString(), it)
+        }
+    val flags = flagValuesPath.toFile().inputStream().use { parseFlagValues(it) }
+    val output = listFlaggedApis(flaggedSymbols, flags)
+    if (output.isNotEmpty()) {
+      println(output.joinToString("\n"))
+    }
+  }
+}
+
 internal fun parseApiSignature(path: String, input: InputStream): Set<Pair<Symbol, Flag>> {
   val output = mutableSetOf<Pair<Symbol, Flag>>()
   val visitor =
@@ -226,15 +274,15 @@
           }
         }
 
-        override fun visitMethod(method: MethodItem) {
-          getFlagOrNull(method)?.let { flag ->
-            val methodName = buildString {
-              append(method.name())
+        override fun visitCallable(callable: CallableItem) {
+          getFlagOrNull(callable)?.let { flag ->
+            val callableSignature = buildString {
+              append(callable.name())
               append("(")
-              method.parameters().joinTo(this, separator = "") { it.type().internalName() }
+              callable.parameters().joinTo(this, separator = "") { it.type().internalName() }
               append(")")
             }
-            val symbol = Symbol.createMethod(method.containingClass().qualifiedName(), methodName)
+            val symbol = Symbol.createMethod(callable.containingClass().qualifiedName(), callableSignature)
             output.add(Pair(symbol, flag))
           }
         }
@@ -446,4 +494,35 @@
   return errors
 }
 
-fun main(args: Array<String>) = CheckCommand().main(args)
+/**
+ * Collect all known info about all @FlaggedApi annotated APIs.
+ *
+ * Each API will be represented as a String, on the format
+ * <pre>
+ *   &lt;fully-qualified-name-of-flag&lt; &lt;state-of-flag&lt; &lt;API&lt;
+ * </pre>
+ *
+ * @param flaggedSymbolsInSource the set of symbols that are flagged in the source code
+ * @param flags the set of flags and their values
+ * @return a list of Strings encoding API data using the format described above, sorted
+ *   alphabetically
+ */
+internal fun listFlaggedApis(
+    flaggedSymbolsInSource: Set<Pair<Symbol, Flag>>,
+    flags: Map<Flag, Boolean>
+): List<String> {
+  val output = mutableListOf<String>()
+  for ((symbol, flag) in flaggedSymbolsInSource) {
+    val flagState =
+        when (flags.get(flag)) {
+          true -> "ENABLED"
+          false -> "DISABLED"
+          null -> "UNKNOWN"
+        }
+    output.add("$flag $flagState ${symbol.toPrettyString()}")
+  }
+  output.sort()
+  return output
+}
+
+fun main(args: Array<String>) = MainCommand().subcommands(CheckCommand(), ListCommand()).main(args)
diff --git a/tools/envsetup/run_envsetup_tests b/tools/envsetup/run_envsetup_tests
new file mode 100755
index 0000000..5977448
--- /dev/null
+++ b/tools/envsetup/run_envsetup_tests
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import pathlib
+import subprocess
+import sys
+
+SOURCE_ENVSETUP="source build/make/envsetup.sh && "
+
+def update_display():
+    sys.stderr.write("passed\n")
+
+def go_to_root():
+    while True:
+        if os.path.exists("build/make/envsetup.sh"):
+            return
+        if os.getcwd() == "/":
+            sys.stderr.write("Can't find root of the source tree\n");
+            print("\nFAILED")
+            sys.exit(1)
+        os.chdir("..")
+
+def is_test(name, thing):
+    if not callable(thing):
+        return False
+    if name == "test":
+        return False
+    return name.startswith("test")
+
+
+def test(shell, command, expected_return, expected_stdout, expected_stderr, expected_env):
+    command += "; _rc=$?"
+    for env in expected_env.keys():
+        command += f"; echo ENV: {env}=\\\"${env}\\\""
+    command += "; exit $_rc"
+
+    cmd = [shell, "-c", command]
+    result = subprocess.run(cmd, capture_output=True, text=True)
+
+    status = True
+
+    if result.returncode != expected_return:
+        print()
+        print(f"Expected return code: {expected_return}")
+        print(f"Actual return code:   {result.returncode}")
+        status = False
+
+    printed_stdout = False
+    if expected_stdout and expected_stdout not in result.stdout:
+        print()
+        print(f"Expected stdout to contain:\n{expected_stdout}")
+        print(f"\nActual stdout:\n{result.stdout}")
+        printed_stdout = True
+        status = False
+
+    if expected_stderr and expected_stderr not in result.stderr:
+        print()
+        print(f"Expected stderr to contain:\n{expected_stderr}")
+        print(f"\nActual stderr:\n{result.stderr}")
+        status = False
+
+    env_failure = False
+    for k, v in expected_env.items():
+        if f"{k}=\"{v}\"" not in result.stdout:
+            print()
+            print(f"Expected environment variable {k} to be: {v} --- {k}=\"{v}\"")
+            env_failure = True
+            status = False
+
+    if env_failure and not printed_stdout:
+        print()
+        print("See stdout:")
+        print(result.stdout)
+
+    if not status:
+        print()
+        print("Command to reproduce:")
+        print(command)
+        print()
+
+    return status
+
+NO_LUNCH = {
+    "TARGET_PRODUCT": "",
+    "TARGET_RELEASE": "",
+    "TARGET_BUILD_VARIANT": "",
+}
+
+def test_invalid_lunch_target(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch invalid-trunk_staging-eng",
+         expected_return=1, expected_stdout=None,
+         expected_stderr="Cannot locate config makefile for product",
+         expected_env=NO_LUNCH)
+
+
+def test_aosp_arm(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch aosp_arm-trunk_staging-eng",
+         expected_return=0, expected_stdout=None, expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "eng",
+        })
+
+
+def test_lunch2_empty(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2",
+         expected_return=1, expected_stdout=None,
+         expected_stderr="No target specified. See lunch --help",
+         expected_env=NO_LUNCH)
+
+def test_lunch2_four_params(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2 a b c d",
+         expected_return=1, expected_stdout=None,
+         expected_stderr="Too many parameters given. See lunch --help",
+         expected_env=NO_LUNCH)
+
+def test_lunch2_aosp_arm(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2 aosp_arm",
+         expected_return=0, expected_stdout="=========", expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "eng",
+        })
+
+def test_lunch2_aosp_arm_trunk_staging(shell):
+    # Somewhat unfortunate because trunk_staging is the only config in
+    # aosp so we can't really test that this isn't just getting the default
+    return test(shell, SOURCE_ENVSETUP + "lunch2 aosp_arm trunk_staging",
+         expected_return=0, expected_stdout="=========", expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "eng",
+        })
+
+def test_lunch2_aosp_arm_trunk_staging_userdebug(shell):
+    return test(shell, SOURCE_ENVSETUP + "lunch2 aosp_arm trunk_staging userdebug",
+         expected_return=0, expected_stdout="=========", expected_stderr=None,
+         expected_env={
+            "TARGET_PRODUCT": "aosp_arm",
+            "TARGET_RELEASE": "trunk_staging",
+            "TARGET_BUILD_VARIANT": "userdebug",
+        })
+
+def test_list_products(shell):
+    return test(shell, "build/soong/bin/list_products",
+         expected_return=0, expected_stdout="aosp_arm", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_releases_param(shell):
+    return test(shell, "build/soong/bin/list_releases aosp_arm",
+         expected_return=0, expected_stdout="trunk_staging", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_releases_env(shell):
+    return test(shell, "TARGET_PRODUCT=aosp_arm build/soong/bin/list_releases",
+         expected_return=0, expected_stdout="trunk_staging", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_releases_no_product(shell):
+    return test(shell, "build/soong/bin/list_releases",
+         expected_return=1, expected_stdout=None, expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+def test_list_variants(shell):
+    return test(shell, "build/soong/bin/list_variants",
+         expected_return=0, expected_stdout="userdebug", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+
+def test_get_build_var_in_path(shell):
+    return test(shell, SOURCE_ENVSETUP + "which get_build_var ",
+         expected_return=0, expected_stdout="soong/bin", expected_stderr=None,
+         expected_env=NO_LUNCH)
+
+
+
+TESTS=sorted([(name, thing) for name, thing in locals().items() if is_test(name, thing)])
+
+def main():
+    if any([x.endswith("/soong/bin") for x in os.getenv("PATH").split(":")]):
+        sys.stderr.write("run_envsetup_tests must be run in a shell that has not sourced"
+                + " envsetup.sh\n\nFAILED\n")
+        return 1
+
+    go_to_root()
+
+    tests = TESTS
+    if len(sys.argv) > 1:
+        tests = [(name, func) for name, func in tests if name in sys.argv]
+
+    shells = ["/usr/bin/bash", "/usr/bin/zsh"]
+    total_count = len(tests) * len(shells)
+    index = 1
+    failed_tests = 0
+
+    for name, func in tests:
+        for shell in shells:
+            sys.stdout.write(f"\33[2K\r{index} of {total_count}: {name} in {shell}")
+            passed = func(shell)
+            if not passed:
+                failed_tests += 1
+            index += 1
+
+    if failed_tests > 0:
+        print(f"\n\nFAILED: {failed_tests} of {total_count}")
+        return 1
+    else:
+        print("\n\nSUCCESS")
+        return 0
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/tools/envsetup/spam_for_lunch b/tools/envsetup/spam_for_lunch
new file mode 100755
index 0000000..2e150a6
--- /dev/null
+++ b/tools/envsetup/spam_for_lunch
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This ad is kind of big, so only show it if this appears to be a clean build.
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../shell_utils.sh
+if [[ ! -e $(getoutdir)/soong/build.${TARGET_PRODUCT}.ninja ]]; then
+  echo
+  echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+  echo "  Wondering whether to use user, userdebug or eng?"
+  echo
+  echo "  user        The builds that ship to users. Reduced debugability."
+  echo "  userdebug   High fidelity to user builds but with some debugging options"
+  echo "              enabled. Best suited for performance testing or day-to-day use"
+  echo "              with debugging enabled."
+  echo "  eng         More debugging options enabled and faster build times, but"
+  echo "              runtime performance tradeoffs. Best suited for day-to-day"
+  echo "              local development when not doing performance testing."
+  echo "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+  echo
+fi
+
diff --git a/tools/filelistdiff/Android.bp b/tools/filelistdiff/Android.bp
new file mode 100644
index 0000000..632ada3
--- /dev/null
+++ b/tools/filelistdiff/Android.bp
@@ -0,0 +1,27 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+    name: "file_list_diff",
+    srcs: ["file_list_diff.py"],
+}
+
+prebuilt_etc_host {
+    name: "system_image_diff_allowlist",
+    src: "allowlist",
+}
diff --git a/tools/filelistdiff/allowlist b/tools/filelistdiff/allowlist
new file mode 100644
index 0000000..073a8de
--- /dev/null
+++ b/tools/filelistdiff/allowlist
@@ -0,0 +1,37 @@
+# Known diffs only in the KATI system image
+etc/NOTICE.xml.gz
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta
+lib/aaudio-aidl-cpp.so
+lib/android.hardware.biometrics.fingerprint@2.1.so
+lib/android.hardware.radio.config@1.0.so
+lib/android.hardware.radio.deprecated@1.0.so
+lib/android.hardware.radio@1.0.so
+lib/android.hardware.radio@1.1.so
+lib/android.hardware.radio@1.2.so
+lib/android.hardware.radio@1.3.so
+lib/android.hardware.radio@1.4.so
+lib/android.hardware.secure_element@1.0.so
+lib/com.android.media.aaudio-aconfig-cc.so
+lib/heapprofd_client.so
+lib/heapprofd_client_api.so
+lib/libaaudio.so
+lib/libaaudio_internal.so
+lib/libalarm_jni.so
+lib/libamidi.so
+lib/libcups.so
+lib/libjni_deviceAsWebcam.so
+lib/libprintspooler_jni.so
+lib/libvendorsupport.so
+lib/libwfds.so
+lib/libyuv.so
+
+# b/351258461
+adb_keys
+init.environ.rc
+
+# Known diffs only in the Soong system image
+lib/libhidcommand_jni.so
+lib/libuinputcommand_jni.so
\ No newline at end of file
diff --git a/tools/filelistdiff/file_list_diff.py b/tools/filelistdiff/file_list_diff.py
new file mode 100644
index 0000000..cdc5b2e
--- /dev/null
+++ b/tools/filelistdiff/file_list_diff.py
@@ -0,0 +1,66 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+
+COLOR_WARNING = '\033[93m'
+COLOR_ERROR = '\033[91m'
+COLOR_NORMAL = '\033[0m'
+
+def find_unique_items(kati_installed_files, soong_installed_files, allowlist, system_module_name):
+    with open(kati_installed_files, 'r') as kati_list_file, \
+            open(soong_installed_files, 'r') as soong_list_file, \
+            open(allowlist, 'r') as allowlist_file:
+        kati_files = set(kati_list_file.read().split())
+        soong_files = set(soong_list_file.read().split())
+        allowed_files = set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n'))))
+
+    def is_unknown_diff(filepath):
+        return not filepath in allowed_files
+
+    unique_in_kati = set(filter(is_unknown_diff, kati_files - soong_files))
+    unique_in_soong = set(filter(is_unknown_diff, soong_files - kati_files))
+
+    if unique_in_kati:
+        print(f'{COLOR_ERROR}Please add following modules into system image module {system_module_name}.{COLOR_NORMAL}')
+        print(f'{COLOR_WARNING}KATI only module(s):{COLOR_NORMAL}')
+        for item in sorted(unique_in_kati):
+            print(item)
+
+    if unique_in_soong:
+        if unique_in_kati:
+            print('')
+
+        print(f'{COLOR_ERROR}Please add following modules into build/make/target/product/base_system.mk.{COLOR_NORMAL}')
+        print(f'{COLOR_WARNING}Soong only module(s):{COLOR_NORMAL}')
+        for item in sorted(unique_in_soong):
+            print(item)
+
+    if unique_in_kati or unique_in_soong:
+        print('')
+        print(f'{COLOR_ERROR}FAILED: System image from KATI and SOONG differs from installed file list.{COLOR_NORMAL}')
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('kati_installed_file_list')
+    parser.add_argument('soong_installed_file_list')
+    parser.add_argument('allowlist')
+    parser.add_argument('system_module_name')
+    args = parser.parse_args()
+
+    find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.allowlist, args.system_module_name)
\ No newline at end of file
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
index cc97d1f..d0aed69 100644
--- a/tools/finalization/README.md
+++ b/tools/finalization/README.md
@@ -3,18 +3,19 @@
 
 ## Automation:
 1. [Environment setup](./environment.sh). Set values for varios finalization constants.
-2. [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
-3. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
-4. [Finalize SDK and submit](./step-1.sh). Do [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) step, create CLs, organize them into topic and send to Gerrit.
-  a. [Update SDK and submit](./update-step-1.sh). Same as above, but updates the existings CLs.
-5. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
-  a. [Update Android and submit](./update-step-2.sh). Same as above, but updates the existings CLs.
+1. [Finalize VINTF](./finalize-vintf-resources.sh). Prepare the branch for VINTF release.
+1. [Finalize SDK](./finalize-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
+1. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
+1. [Finalize VINTF and submit](./step-0.sh). Do Finalize VINTF step, create CLs, organize them into topic and send to Gerrit.
+1. [Finalize SDK and submit](./step-1.sh). Do Finalize SDK step, create CLs, organize them into topic and send to Gerrit.
+1. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
 
 ## CI:
 Performed in build targets in Finalization branches.
-1. [Finalization Step 1, git_main-fina-1-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-1-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
-3. [Finalization Step 2, git_main-fina-2-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-2-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
-5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
+1. [Finalization Step 0, git_main-fina-0-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-0-release). Test Finalize VINTF.
+1. [Finalization Step 1, git_main-fina-1-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-1-release). Test Finalize VINTF, Finalize SDK.
+1. [Finalization Step 2, git_main-fina-2-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-2-release). Test Finalize VINTF, Finalize SDK, and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
+1. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
 
 ## Utility:
 [Full cleanup](./cleanup.sh). Remove all local changes and switch each project into head-less state. This is the best state to sync/rebase/finalize the branch.
diff --git a/tools/finalization/build-step-0.sh b/tools/finalization/build-step-0.sh
new file mode 100755
index 0000000..f81b720
--- /dev/null
+++ b/tools/finalization/build-step-0.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright 2024 Google Inc. All rights reserved.
+
+set -ex
+
+function finalize_main_step0() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # VINTF finalization
+        source $top/build/make/tools/finalization/finalize-vintf-resources.sh
+    fi;
+}
+
+finalize_main_step0
+
diff --git a/tools/finalization/build-step-1-and-2.sh b/tools/finalization/build-step-1-and-2.sh
index 84e2782..ca22678 100755
--- a/tools/finalization/build-step-1-and-2.sh
+++ b/tools/finalization/build-step-1-and-2.sh
@@ -7,11 +7,16 @@
     source $top/build/make/tools/finalization/environment.sh
 
     if [ "$FINAL_STATE" = "unfinalized" ] ; then
-        # SDK codename -> int
-        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+        # VINTF finalization
+        source $top/build/make/tools/finalization/finalize-vintf-resources.sh
     fi;
 
-    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "sdk" ] ; then
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "vintf" ] ; then
+        # SDK codename -> int
+        source $top/build/make/tools/finalization/finalize-sdk-resources.sh
+    fi;
+
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "vintf" ] || [ "$FINAL_STATE" = "sdk" ] ; then
         # ADB, Platform/Mainline SDKs build and move to prebuilts
         source $top/build/make/tools/finalization/localonly-steps.sh
 
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
index 3d5eadb..7294698 100755
--- a/tools/finalization/build-step-1.sh
+++ b/tools/finalization/build-step-1.sh
@@ -7,8 +7,13 @@
     source $top/build/make/tools/finalization/environment.sh
 
     if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # VINTF finalization
+        source $top/build/make/tools/finalization/finalize-vintf-resources.sh
+    fi;
+
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "vintf" ] ; then
         # Build finalization artifacts.
-        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+        source $top/build/make/tools/finalization/finalize-sdk-resources.sh
     fi;
 }
 
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index d9c42c8..7961e8b 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -19,8 +19,14 @@
 
 # Options:
 # 'unfinalized' - branch is in development state,
-# 'sdk' - SDK/API is finalized
+# 'vintf' - VINTF is finalized
+# 'sdk' - VINTF and SDK/API are finalized
 # 'rel' - branch is finalized, switched to REL
-export FINAL_STATE='unfinalized'
+export FINAL_STATE='vintf'
 
-export BUILD_FROM_SOURCE_STUB=true
\ No newline at end of file
+export BUILD_FROM_SOURCE_STUB=true
+
+# FINAL versions for VINTF
+# TODO(b/323985297): The version must match with that from the release configuration.
+# Instead of hardcoding the version here, read it from a release configuration.
+export FINAL_BOARD_API_LEVEL='202404'
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index 245305b..59fe28c 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -40,9 +40,6 @@
     fi
     git -C "$top/cts" mv hostsidetests/theme/assets/${FINAL_PLATFORM_CODENAME} hostsidetests/theme/assets/${FINAL_PLATFORM_SDK_VERSION}
 
-    # system/sepolicy
-    system/sepolicy/tools/finalize-sdk-rel.sh "$top" "$FINAL_PLATFORM_SDK_VERSION"
-
     # prebuilts/abi-dumps/platform
     mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
     cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/"
@@ -52,10 +49,6 @@
     # prebuilts/abi-dumps/ndk
     #mkdir -p "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
     #cp -r "$top/prebuilts/abi-dumps/ndk/current/64/" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION/"
-    #if [ "$FINAL_STATE" != "sdk" ] || [ "$FINAL_PLATFORM_CODENAME" == "$CURRENT_PLATFORM_CODENAME" ] ; then
-        # prebuilts/abi-dumps/vndk
-        #mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
-    #fi;
 }
 
 finalize_sdk_rel
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-sdk-resources.sh
similarity index 91%
rename from tools/finalization/finalize-aidl-vndk-sdk-resources.sh
rename to tools/finalization/finalize-sdk-resources.sh
index 75379ff..596f803 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-sdk-resources.sh
@@ -96,7 +96,7 @@
         $modules_arg
 }
 
-function finalize_aidl_vndk_sdk_resources() {
+function finalize_sdk_resources() {
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
@@ -111,13 +111,6 @@
     # bionic/NDK
     finalize_bionic_ndk
 
-    # pre-finalization build target (trunk)
-    local aidl_m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_RELEASE=trunk TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
-    AIDL_TRANSITIVE_FREEZE=true $aidl_m aidl-freeze-api
-
-    # TODO(b/309880485)
-    # Add back create_reference_dumps and $top/build/make/target/product/gsi/current.txt
-
     # Finalize SDK
 
     # frameworks/libs/modules-utils
@@ -129,10 +122,6 @@
     local build_tools_source="$top/development/sdk/build_tools_source.prop_template"
     sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=${PLATFORM_SDK_VERSION}.0.0/g' $build_tools_source
 
-    # build/make
-    sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
-    cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$FINAL_PLATFORM_SDK_VERSION.txt"
-
     # build/soong
     local codename_version="\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}"
     if ! grep -q "$codename_version" "$top/build/soong/android/api_levels.go" ; then
@@ -179,5 +168,5 @@
     $sdk_m update-api
 }
 
-finalize_aidl_vndk_sdk_resources
+finalize_sdk_resources
 
diff --git a/tools/finalization/finalize-vintf-resources.sh b/tools/finalization/finalize-vintf-resources.sh
new file mode 100755
index 0000000..a55d8e1
--- /dev/null
+++ b/tools/finalization/finalize-vintf-resources.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_vintf_resources() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+    # environment needed to build dependencies and run scripts
+    # These should remain the same for all steps here to speed up build time
+    export ANDROID_BUILD_TOP="$top"
+    export ANDROID_HOST_OUT="$ANDROID_BUILD_TOP/out/host/linux-x86"
+    export ANDROID_PRODUCT_OUT="$ANDROID_BUILD_TOP/out/target/product/generic_arm64"
+    export PATH="$PATH:$ANDROID_HOST_OUT/bin/"
+    export TARGET_BUILD_VARIANT=userdebug
+    export DIST_DIR=out/dist
+    export TARGET_RELEASE=fina_0
+    export TARGET_PRODUCT=aosp_arm64
+
+    # TODO(b/314010764): finalize LL_NDK
+
+    # system/sepolicy
+    "$top/system/sepolicy/tools/finalize-vintf-resources.sh" "$top" "$FINAL_BOARD_API_LEVEL"
+
+    create_new_compat_matrix_and_kernel_configs
+
+    # pre-finalization build target (trunk)
+    local aidl_m="$top/build/soong/soong_ui.bash --make-mode"
+    AIDL_TRANSITIVE_FREEZE=true $aidl_m aidl-freeze-api
+}
+
+function create_new_compat_matrix_and_kernel_configs() {
+    # The compatibility matrix versions are bumped during vFRC
+    # These will change every time we have a new vFRC
+    local CURRENT_COMPATIBILITY_MATRIX_LEVEL='202404'
+    local NEXT_COMPATIBILITY_MATRIX_LEVEL='202504'
+    # The kernel configs need the letter of the Android release
+    local CURRENT_RELEASE_LETTER='v'
+    local NEXT_RELEASE_LETTER='w'
+
+
+    # build the targets required before touching the Android.bp/Android.mk files
+    local build_cmd="$top/build/soong/soong_ui.bash --make-mode"
+    $build_cmd bpmodify
+
+    "$top/prebuilts/build-tools/path/linux-x86/python3" "$top/hardware/interfaces/compatibility_matrices/bump.py" "$CURRENT_COMPATIBILITY_MATRIX_LEVEL" "$NEXT_COMPATIBILITY_MATRIX_LEVEL" "$CURRENT_RELEASE_LETTER" "$NEXT_RELEASE_LETTER"
+
+    # Freeze the current framework manifest file. This relies on the
+    # aosp_cf_x86_64-trunk_staging build target to get the right manifest
+    # fragments installed.
+    "$top/system/libhidl/vintfdata/freeze.sh" "$CURRENT_COMPATIBILITY_MATRIX_LEVEL"
+}
+
+function freeze_framework_manifest() {
+   ANDROID_PRODUCT_OUT=~/workspace/internal/main/out/target/product/vsoc_x86 ANDROID_BUILD_TOP=~/workspace/internal/main ANDROID_HOST_OUT=~/workspace/internal/main/out/host/linux-x86 ./freeze.sh 202404
+
+}
+
+
+finalize_vintf_resources
+
diff --git a/tools/finalization/step-0.sh b/tools/finalization/step-0.sh
new file mode 100755
index 0000000..e61c644
--- /dev/null
+++ b/tools/finalization/step-0.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+# Copyright 2024 Google Inc. All rights reserved.
+
+# Script to perform a 0th step of Android Finalization: VINTF finalization, create CLs and upload to Gerrit.
+
+set -ex
+
+function commit_step_0_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start "VINTF-$FINAL_BOARD_API_LEVEL-Finalization" ;
+            git add -A . ;
+            git commit -m "Vendor API level $FINAL_BOARD_API_LEVEL is now frozen" \
+                       -m "Ignore-AOSP-First: VINTF $FINAL_BOARD_API_LEVEL Finalization
+Bug: $FINAL_BUG_ID
+Test: build";
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function finalize_step_0_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    source $top/build/make/tools/finalization/finalize-vintf-resources.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    commit_step_0_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_step_0_main
diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh
index 0dd4b3a..0e483d5 100755
--- a/tools/finalization/step-1.sh
+++ b/tools/finalization/step-1.sh
@@ -21,10 +21,9 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
 
-    # vndk etc finalization
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    source $top/build/make/tools/finalization/finalize-sdk-resources.sh
 
     # move all changes to finalization branch/topic and upload to gerrit
     commit_step_1_changes
diff --git a/tools/finalization/step-2.sh b/tools/finalization/step-2.sh
index d0b24ae..356cad0 100755
--- a/tools/finalization/step-2.sh
+++ b/tools/finalization/step-2.sh
@@ -19,7 +19,7 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
 
     # prebuilts etc
     source $top/build/make/tools/finalization/finalize-sdk-rel.sh
diff --git a/tools/finalization/update-step-1.sh b/tools/finalization/update-step-1.sh
deleted file mode 100755
index b469988..0000000
--- a/tools/finalization/update-step-1.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-# Script to perform a 1st step of Android Finalization: API/SDK finalization, update CLs and upload to Gerrit.
-
-# WIP, does not work yet
-exit 10
-
-set -ex
-
-function update_step_1_changes() {
-    set +e
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            git stash -u ;
-            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
-            git stash pop ;
-            git add -A . ;
-            git commit --amend --no-edit ;
-            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
-        fi'
-}
-
-function update_step_1_main() {
-    local top="$(dirname "$0")"/../../../..
-    source $top/build/make/tools/finalization/environment.sh
-
-
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # vndk etc finalization
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
-
-    # update existing CLs and upload to gerrit
-    update_step_1_changes
-
-    # build to confirm everything is OK
-    AIDL_FROZEN_REL=true $m
-}
-
-update_step_1_main
diff --git a/tools/finalization/update-step-2.sh b/tools/finalization/update-step-2.sh
deleted file mode 100755
index d2b8592..0000000
--- a/tools/finalization/update-step-2.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
-
-# WIP, does not work yet
-exit 10
-
-set -ex
-
-function update_step_2_changes() {
-    set +e
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            git stash -u ;
-            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
-            git stash pop ;
-            git add -A . ;
-            git commit --amend --no-edit ;
-            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
-        fi'
-}
-
-function update_step_2_main() {
-    local top="$(dirname "$0")"/../../../..
-    source $top/build/make/tools/finalization/environment.sh
-
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # prebuilts etc
-    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
-
-    # move all changes to finalization branch/topic and upload to gerrit
-    update_step_2_changes
-
-    # build to confirm everything is OK
-    AIDL_FROZEN_REL=true $m
-}
-
-update_step_2_main
diff --git a/tools/ide_query/OWNERS b/tools/ide_query/OWNERS
new file mode 100644
index 0000000..914a9a2
--- /dev/null
+++ b/tools/ide_query/OWNERS
@@ -0,0 +1,4 @@
+ialiyev@google.com
+ivankirichenko@google.com
+kadircet@google.com
+michaelmerg@google.com
diff --git a/tools/ide_query/cc_analyzer/Android.bp b/tools/ide_query/cc_analyzer/Android.bp
index 3cbbb05..e85d445 100644
--- a/tools/ide_query/cc_analyzer/Android.bp
+++ b/tools/ide_query/cc_analyzer/Android.bp
@@ -58,7 +58,7 @@
     shared_libs: ["libclang-cpp_host"],
     static_libs: [
         "include_scanner",
-        "ide_query_proto",
+        "cc_analyzer_proto",
     ],
     defaults: ["ide_query_cc_analyzer_defaults"],
 }
@@ -72,7 +72,7 @@
         "libprotobuf-cpp-full",
     ],
     static_libs: [
-        "ide_query_proto",
+        "cc_analyzer_proto",
         "builtin_headers",
         "include_scanner",
         "analyzer",
diff --git a/tools/ide_query/cc_analyzer/analyzer.cc b/tools/ide_query/cc_analyzer/analyzer.cc
index bb7ca0b..4ccec54 100644
--- a/tools/ide_query/cc_analyzer/analyzer.cc
+++ b/tools/ide_query/cc_analyzer/analyzer.cc
@@ -20,9 +20,9 @@
 #include <utility>
 #include <vector>
 
+#include "cc_analyzer.pb.h"
 #include "clang/Tooling/CompilationDatabase.h"
 #include "clang/Tooling/JSONCompilationDatabase.h"
-#include "ide_query.pb.h"
 #include "include_scanner.h"
 #include "llvm/ADT/SmallString.h"
 #include "llvm/ADT/StringRef.h"
@@ -48,11 +48,11 @@
 }
 }  // namespace
 
-::ide_query::DepsResponse GetDeps(::ide_query::RepoState state) {
-  ::ide_query::DepsResponse results;
+::cc_analyzer::DepsResponse GetDeps(::cc_analyzer::RepoState state) {
+  ::cc_analyzer::DepsResponse results;
   auto db = LoadCompDB(state.comp_db_path());
   if (!db) {
-    results.mutable_status()->set_code(::ide_query::Status::FAILURE);
+    results.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
     results.mutable_status()->set_message(llvm::toString(db.takeError()));
     return results;
   }
@@ -63,7 +63,7 @@
     llvm::sys::path::append(abs_file, active_file);
     auto cmds = db->get()->getCompileCommands(active_file);
     if (cmds.empty()) {
-      result.mutable_status()->set_code(::ide_query::Status::FAILURE);
+      result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
       result.mutable_status()->set_message(
           llvm::Twine("Can't find compile flags for file: ", abs_file).str());
       continue;
@@ -80,11 +80,11 @@
   return results;
 }
 
-::ide_query::IdeAnalysis GetBuildInputs(::ide_query::RepoState state) {
+::cc_analyzer::IdeAnalysis GetBuildInputs(::cc_analyzer::RepoState state) {
   auto db = LoadCompDB(state.comp_db_path());
-  ::ide_query::IdeAnalysis results;
+  ::cc_analyzer::IdeAnalysis results;
   if (!db) {
-    results.mutable_status()->set_code(::ide_query::Status::FAILURE);
+    results.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
     results.mutable_status()->set_message(llvm::toString(db.takeError()));
     return results;
   }
@@ -97,7 +97,6 @@
     genfile_root_abs.push_back('/');
   }
 
-  results.set_build_artifact_root(state.out_dir());
   for (llvm::StringRef active_file : state.active_file_path()) {
     auto& result = *results.add_sources();
     result.set_path(active_file.str());
@@ -106,7 +105,7 @@
     llvm::sys::path::append(abs_file, active_file);
     auto cmds = db->get()->getCompileCommands(abs_file);
     if (cmds.empty()) {
-      result.mutable_status()->set_code(::ide_query::Status::FAILURE);
+      result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
       result.mutable_status()->set_message(
           llvm::Twine("Can't find compile flags for file: ", abs_file).str());
       continue;
@@ -114,7 +113,7 @@
     const auto& cmd = cmds.front();
     llvm::StringRef working_dir = cmd.Directory;
     if (!working_dir.consume_front(repo_dir)) {
-      result.mutable_status()->set_code(::ide_query::Status::FAILURE);
+      result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
       result.mutable_status()->set_message("Command working dir " +
                                            working_dir.str() +
                                            " outside repository " + repo_dir);
@@ -127,7 +126,7 @@
     auto includes =
         ScanIncludes(cmds.front(), llvm::vfs::createPhysicalFileSystem());
     if (!includes) {
-      result.mutable_status()->set_code(::ide_query::Status::FAILURE);
+      result.mutable_status()->set_code(::cc_analyzer::Status::FAILURE);
       result.mutable_status()->set_message(
           llvm::toString(includes.takeError()));
       continue;
diff --git a/tools/ide_query/cc_analyzer/analyzer.h b/tools/ide_query/cc_analyzer/analyzer.h
index 3133795..fd19082 100644
--- a/tools/ide_query/cc_analyzer/analyzer.h
+++ b/tools/ide_query/cc_analyzer/analyzer.h
@@ -17,17 +17,17 @@
 #ifndef _TOOLS_IDE_QUERY_CC_ANALYZER_ANALYZER_H_
 #define _TOOLS_IDE_QUERY_CC_ANALYZER_ANALYZER_H_
 
-#include "ide_query.pb.h"
+#include "cc_analyzer.pb.h"
 
 namespace tools::ide_query::cc_analyzer {
 
 // Scans the build graph and returns target names from the build graph to
 // generate all the dependencies for the active files.
-::ide_query::DepsResponse GetDeps(::ide_query::RepoState state);
+::cc_analyzer::DepsResponse GetDeps(::cc_analyzer::RepoState state);
 
 // Scans the sources and returns all the source files required for analyzing the
 // active files.
-::ide_query::IdeAnalysis GetBuildInputs(::ide_query::RepoState state);
+::cc_analyzer::IdeAnalysis GetBuildInputs(::cc_analyzer::RepoState state);
 
 }  // namespace tools::ide_query::cc_analyzer
 
diff --git a/tools/ide_query/cc_analyzer/main.cc b/tools/ide_query/cc_analyzer/main.cc
index 8e00c63..d86fc8c 100644
--- a/tools/ide_query/cc_analyzer/main.cc
+++ b/tools/ide_query/cc_analyzer/main.cc
@@ -28,7 +28,7 @@
 
 #include "analyzer.h"
 #include "google/protobuf/message.h"
-#include "ide_query.pb.h"
+#include "cc_analyzer.pb.h"
 #include "llvm/ADT/StringRef.h"
 #include "llvm/Support/CommandLine.h"
 #include "llvm/Support/TargetSelect.h"
@@ -48,9 +48,9 @@
     llvm::cl::desc("Print the list of headers to insert and remove"),
 };
 
-ide_query::IdeAnalysis ReturnError(llvm::StringRef message) {
-  ide_query::IdeAnalysis result;
-  result.mutable_status()->set_code(ide_query::Status::FAILURE);
+cc_analyzer::IdeAnalysis ReturnError(llvm::StringRef message) {
+  cc_analyzer::IdeAnalysis result;
+  result.mutable_status()->set_code(cc_analyzer::Status::FAILURE);
   result.mutable_status()->set_message(message.str());
   return result;
 }
@@ -61,7 +61,7 @@
   llvm::InitializeAllTargetInfos();
   llvm::cl::ParseCommandLineOptions(argc, argv);
 
-  ide_query::RepoState state;
+  cc_analyzer::RepoState state;
   if (!state.ParseFromFileDescriptor(STDIN_FILENO)) {
     llvm::errs() << "Failed to parse input!\n";
     return 1;
@@ -70,12 +70,12 @@
   std::unique_ptr<google::protobuf::Message> result;
   switch (mode) {
     case OpMode::DEPS: {
-      result = std::make_unique<ide_query::DepsResponse>(
+      result = std::make_unique<cc_analyzer::DepsResponse>(
           tools::ide_query::cc_analyzer::GetDeps(std::move(state)));
       break;
     }
     case OpMode::INPUTS: {
-      result = std::make_unique<ide_query::IdeAnalysis>(
+      result = std::make_unique<cc_analyzer::IdeAnalysis>(
           tools::ide_query::cc_analyzer::GetBuildInputs(std::move(state)));
       break;
     }
diff --git a/tools/ide_query/ide_query_proto/Android.bp b/tools/ide_query/cc_analyzer_proto/Android.bp
similarity index 93%
rename from tools/ide_query/ide_query_proto/Android.bp
rename to tools/ide_query/cc_analyzer_proto/Android.bp
index 70f15cd..0ed07b4 100644
--- a/tools/ide_query/ide_query_proto/Android.bp
+++ b/tools/ide_query/cc_analyzer_proto/Android.bp
@@ -19,9 +19,9 @@
 }
 
 cc_library_host_static {
-    name: "ide_query_proto",
+    name: "cc_analyzer_proto",
     srcs: [
-        "ide_query.proto",
+        "cc_analyzer.proto",
     ],
     proto: {
         export_proto_headers: true,
diff --git a/tools/ide_query/cc_analyzer_proto/cc_analyzer.pb.go b/tools/ide_query/cc_analyzer_proto/cc_analyzer.pb.go
new file mode 100644
index 0000000..debe5c0
--- /dev/null
+++ b/tools/ide_query/cc_analyzer_proto/cc_analyzer.pb.go
@@ -0,0 +1,789 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// 	protoc-gen-go v1.30.0
+// 	protoc        v3.21.12
+// source: cc_analyzer.proto
+
+package cc_analyzer_proto
+
+import (
+	protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+	protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+	reflect "reflect"
+	sync "sync"
+)
+
+const (
+	// Verify that this generated code is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+	// Verify that runtime/protoimpl is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+type Status_Code int32
+
+const (
+	Status_OK      Status_Code = 0
+	Status_FAILURE Status_Code = 1
+)
+
+// Enum value maps for Status_Code.
+var (
+	Status_Code_name = map[int32]string{
+		0: "OK",
+		1: "FAILURE",
+	}
+	Status_Code_value = map[string]int32{
+		"OK":      0,
+		"FAILURE": 1,
+	}
+)
+
+func (x Status_Code) Enum() *Status_Code {
+	p := new(Status_Code)
+	*p = x
+	return p
+}
+
+func (x Status_Code) String() string {
+	return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (Status_Code) Descriptor() protoreflect.EnumDescriptor {
+	return file_cc_analyzer_proto_enumTypes[0].Descriptor()
+}
+
+func (Status_Code) Type() protoreflect.EnumType {
+	return &file_cc_analyzer_proto_enumTypes[0]
+}
+
+func (x Status_Code) Number() protoreflect.EnumNumber {
+	return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use Status_Code.Descriptor instead.
+func (Status_Code) EnumDescriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{0, 0}
+}
+
+// Indicates the success/failure for analysis.
+type Status struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Code Status_Code `protobuf:"varint,1,opt,name=code,proto3,enum=cc_analyzer.Status_Code" json:"code,omitempty"`
+	// Details about the status, might be displayed to user.
+	Message *string `protobuf:"bytes,2,opt,name=message,proto3,oneof" json:"message,omitempty"`
+}
+
+func (x *Status) Reset() {
+	*x = Status{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[0]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Status) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Status) ProtoMessage() {}
+
+func (x *Status) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[0]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Status.ProtoReflect.Descriptor instead.
+func (*Status) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *Status) GetCode() Status_Code {
+	if x != nil {
+		return x.Code
+	}
+	return Status_OK
+}
+
+func (x *Status) GetMessage() string {
+	if x != nil && x.Message != nil {
+		return *x.Message
+	}
+	return ""
+}
+
+// Represents an Android checkout on user's workstation.
+type RepoState struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Absolute path for the checkout in the workstation.
+	// e.g. /home/user/work/android/
+	RepoDir string `protobuf:"bytes,1,opt,name=repo_dir,json=repoDir,proto3" json:"repo_dir,omitempty"`
+	// Relative to repo_dir.
+	ActiveFilePath []string `protobuf:"bytes,2,rep,name=active_file_path,json=activeFilePath,proto3" json:"active_file_path,omitempty"`
+	// Repository relative path to output directory in workstation.
+	OutDir string `protobuf:"bytes,3,opt,name=out_dir,json=outDir,proto3" json:"out_dir,omitempty"`
+	// Repository relative path to compile_commands.json in workstation.
+	CompDbPath string `protobuf:"bytes,4,opt,name=comp_db_path,json=compDbPath,proto3" json:"comp_db_path,omitempty"`
+}
+
+func (x *RepoState) Reset() {
+	*x = RepoState{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[1]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *RepoState) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*RepoState) ProtoMessage() {}
+
+func (x *RepoState) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[1]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use RepoState.ProtoReflect.Descriptor instead.
+func (*RepoState) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *RepoState) GetRepoDir() string {
+	if x != nil {
+		return x.RepoDir
+	}
+	return ""
+}
+
+func (x *RepoState) GetActiveFilePath() []string {
+	if x != nil {
+		return x.ActiveFilePath
+	}
+	return nil
+}
+
+func (x *RepoState) GetOutDir() string {
+	if x != nil {
+		return x.OutDir
+	}
+	return ""
+}
+
+func (x *RepoState) GetCompDbPath() string {
+	if x != nil {
+		return x.CompDbPath
+	}
+	return ""
+}
+
+// Provides all the targets that are pre-requisities for running language
+// services on active_file_paths.
+type DepsResponse struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Deps   []*DepsResponse_Deps `protobuf:"bytes,1,rep,name=deps,proto3" json:"deps,omitempty"`
+	Status *Status              `protobuf:"bytes,2,opt,name=status,proto3,oneof" json:"status,omitempty"`
+}
+
+func (x *DepsResponse) Reset() {
+	*x = DepsResponse{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[2]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *DepsResponse) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*DepsResponse) ProtoMessage() {}
+
+func (x *DepsResponse) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[2]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use DepsResponse.ProtoReflect.Descriptor instead.
+func (*DepsResponse) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{2}
+}
+
+func (x *DepsResponse) GetDeps() []*DepsResponse_Deps {
+	if x != nil {
+		return x.Deps
+	}
+	return nil
+}
+
+func (x *DepsResponse) GetStatus() *Status {
+	if x != nil {
+		return x.Status
+	}
+	return nil
+}
+
+// Returns all the information necessary for providing language services for the
+// active files.
+type GeneratedFile struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Path to the file relative to repository root.
+	Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
+	// The text of the generated file, if not provided contents will be read
+	// from the path above in user's workstation.
+	Contents []byte `protobuf:"bytes,2,opt,name=contents,proto3,oneof" json:"contents,omitempty"`
+}
+
+func (x *GeneratedFile) Reset() {
+	*x = GeneratedFile{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[3]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *GeneratedFile) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*GeneratedFile) ProtoMessage() {}
+
+func (x *GeneratedFile) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[3]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use GeneratedFile.ProtoReflect.Descriptor instead.
+func (*GeneratedFile) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{3}
+}
+
+func (x *GeneratedFile) GetPath() string {
+	if x != nil {
+		return x.Path
+	}
+	return ""
+}
+
+func (x *GeneratedFile) GetContents() []byte {
+	if x != nil {
+		return x.Contents
+	}
+	return nil
+}
+
+type SourceFile struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Path to the source file relative to repository root.
+	Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
+	// Working directory used by the build system. All the relative
+	// paths in compiler_arguments should be relative to this path.
+	// Relative to repository root.
+	WorkingDir string `protobuf:"bytes,2,opt,name=working_dir,json=workingDir,proto3" json:"working_dir,omitempty"`
+	// Compiler arguments to compile the source file. If multiple variants
+	// of the module being compiled are possible, the query script will choose
+	// one.
+	CompilerArguments []string `protobuf:"bytes,3,rep,name=compiler_arguments,json=compilerArguments,proto3" json:"compiler_arguments,omitempty"`
+	// Any generated files that are used in compiling the file.
+	Generated []*GeneratedFile `protobuf:"bytes,4,rep,name=generated,proto3" json:"generated,omitempty"`
+	// Paths to all of the sources, like build files, code generators,
+	// proto files etc. that were used during analysis. Used to figure
+	// out when a set of build artifacts are stale and the query tool
+	// must be re-run.
+	// Relative to repository root.
+	Deps []string `protobuf:"bytes,5,rep,name=deps,proto3" json:"deps,omitempty"`
+	// Represents analysis status for this particular file. e.g. not part
+	// of the build graph.
+	Status *Status `protobuf:"bytes,6,opt,name=status,proto3,oneof" json:"status,omitempty"`
+}
+
+func (x *SourceFile) Reset() {
+	*x = SourceFile{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[4]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *SourceFile) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*SourceFile) ProtoMessage() {}
+
+func (x *SourceFile) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[4]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use SourceFile.ProtoReflect.Descriptor instead.
+func (*SourceFile) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{4}
+}
+
+func (x *SourceFile) GetPath() string {
+	if x != nil {
+		return x.Path
+	}
+	return ""
+}
+
+func (x *SourceFile) GetWorkingDir() string {
+	if x != nil {
+		return x.WorkingDir
+	}
+	return ""
+}
+
+func (x *SourceFile) GetCompilerArguments() []string {
+	if x != nil {
+		return x.CompilerArguments
+	}
+	return nil
+}
+
+func (x *SourceFile) GetGenerated() []*GeneratedFile {
+	if x != nil {
+		return x.Generated
+	}
+	return nil
+}
+
+func (x *SourceFile) GetDeps() []string {
+	if x != nil {
+		return x.Deps
+	}
+	return nil
+}
+
+func (x *SourceFile) GetStatus() *Status {
+	if x != nil {
+		return x.Status
+	}
+	return nil
+}
+
+type IdeAnalysis struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Sources []*SourceFile `protobuf:"bytes,2,rep,name=sources,proto3" json:"sources,omitempty"`
+	// Status representing overall analysis.
+	// Should fail only when no analysis can be performed.
+	Status *Status `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
+}
+
+func (x *IdeAnalysis) Reset() {
+	*x = IdeAnalysis{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[5]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *IdeAnalysis) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*IdeAnalysis) ProtoMessage() {}
+
+func (x *IdeAnalysis) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[5]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use IdeAnalysis.ProtoReflect.Descriptor instead.
+func (*IdeAnalysis) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{5}
+}
+
+func (x *IdeAnalysis) GetSources() []*SourceFile {
+	if x != nil {
+		return x.Sources
+	}
+	return nil
+}
+
+func (x *IdeAnalysis) GetStatus() *Status {
+	if x != nil {
+		return x.Status
+	}
+	return nil
+}
+
+// Build dependencies of a source file for providing language services.
+type DepsResponse_Deps struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Relative to repo_dir.
+	SourceFile string `protobuf:"bytes,1,opt,name=source_file,json=sourceFile,proto3" json:"source_file,omitempty"`
+	// Build target to execute for generating dep.
+	BuildTarget []string `protobuf:"bytes,2,rep,name=build_target,json=buildTarget,proto3" json:"build_target,omitempty"`
+	Status      *Status  `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
+}
+
+func (x *DepsResponse_Deps) Reset() {
+	*x = DepsResponse_Deps{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_cc_analyzer_proto_msgTypes[6]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *DepsResponse_Deps) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*DepsResponse_Deps) ProtoMessage() {}
+
+func (x *DepsResponse_Deps) ProtoReflect() protoreflect.Message {
+	mi := &file_cc_analyzer_proto_msgTypes[6]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use DepsResponse_Deps.ProtoReflect.Descriptor instead.
+func (*DepsResponse_Deps) Descriptor() ([]byte, []int) {
+	return file_cc_analyzer_proto_rawDescGZIP(), []int{2, 0}
+}
+
+func (x *DepsResponse_Deps) GetSourceFile() string {
+	if x != nil {
+		return x.SourceFile
+	}
+	return ""
+}
+
+func (x *DepsResponse_Deps) GetBuildTarget() []string {
+	if x != nil {
+		return x.BuildTarget
+	}
+	return nil
+}
+
+func (x *DepsResponse_Deps) GetStatus() *Status {
+	if x != nil {
+		return x.Status
+	}
+	return nil
+}
+
+var File_cc_analyzer_proto protoreflect.FileDescriptor
+
+var file_cc_analyzer_proto_rawDesc = []byte{
+	0x0a, 0x11, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x70, 0x72,
+	0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72,
+	0x22, 0x7e, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2c, 0x0a, 0x04, 0x63, 0x6f,
+	0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e,
+	0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x43, 0x6f,
+	0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73,
+	0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x65, 0x73,
+	0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x22, 0x1b, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12,
+	0x06, 0x0a, 0x02, 0x4f, 0x4b, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x41, 0x49, 0x4c, 0x55,
+	0x52, 0x45, 0x10, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65,
+	0x22, 0x8b, 0x01, 0x0a, 0x09, 0x52, 0x65, 0x70, 0x6f, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x19,
+	0x0a, 0x08, 0x72, 0x65, 0x70, 0x6f, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+	0x52, 0x07, 0x72, 0x65, 0x70, 0x6f, 0x44, 0x69, 0x72, 0x12, 0x28, 0x0a, 0x10, 0x61, 0x63, 0x74,
+	0x69, 0x76, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20,
+	0x03, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x50,
+	0x61, 0x74, 0x68, 0x12, 0x17, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x03,
+	0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x44, 0x69, 0x72, 0x12, 0x20, 0x0a, 0x0c,
+	0x63, 0x6f, 0x6d, 0x70, 0x5f, 0x64, 0x62, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01,
+	0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x44, 0x62, 0x50, 0x61, 0x74, 0x68, 0x22, 0x89,
+	0x02, 0x0a, 0x0c, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+	0x32, 0x0a, 0x04, 0x64, 0x65, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e,
+	0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x44, 0x65, 0x70, 0x73,
+	0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x65, 0x70, 0x73, 0x52, 0x04, 0x64,
+	0x65, 0x70, 0x73, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20,
+	0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65,
+	0x72, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74,
+	0x75, 0x73, 0x88, 0x01, 0x01, 0x1a, 0x87, 0x01, 0x0a, 0x04, 0x44, 0x65, 0x70, 0x73, 0x12, 0x1f,
+	0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20,
+	0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12,
+	0x21, 0x0a, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18,
+	0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x54, 0x61, 0x72, 0x67,
+	0x65, 0x74, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01,
+	0x28, 0x0b, 0x32, 0x13, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72,
+	0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75,
+	0x73, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x42,
+	0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x51, 0x0a, 0x0d, 0x47, 0x65,
+	0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70,
+	0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12,
+	0x1f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28,
+	0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x88, 0x01, 0x01,
+	0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xfb, 0x01,
+	0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04,
+	0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68,
+	0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x69, 0x72, 0x18,
+	0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x44, 0x69,
+	0x72, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x61, 0x72,
+	0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x63,
+	0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73,
+	0x12, 0x38, 0x0a, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20,
+	0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65,
+	0x72, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52,
+	0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x65,
+	0x70, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x64, 0x65, 0x70, 0x73, 0x12, 0x30,
+	0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
+	0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61,
+	0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01,
+	0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x83, 0x01, 0x0a, 0x0b,
+	0x49, 0x64, 0x65, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x12, 0x31, 0x0a, 0x07, 0x73,
+	0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x63,
+	0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63,
+	0x65, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x30,
+	0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
+	0x2e, 0x63, 0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x2e, 0x53, 0x74, 0x61,
+	0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01,
+	0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x4a, 0x04, 0x08, 0x01, 0x10,
+	0x02, 0x42, 0x1d, 0x5a, 0x1b, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2f, 0x63,
+	0x63, 0x5f, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x7a, 0x65, 0x72, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
+	0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+}
+
+var (
+	file_cc_analyzer_proto_rawDescOnce sync.Once
+	file_cc_analyzer_proto_rawDescData = file_cc_analyzer_proto_rawDesc
+)
+
+func file_cc_analyzer_proto_rawDescGZIP() []byte {
+	file_cc_analyzer_proto_rawDescOnce.Do(func() {
+		file_cc_analyzer_proto_rawDescData = protoimpl.X.CompressGZIP(file_cc_analyzer_proto_rawDescData)
+	})
+	return file_cc_analyzer_proto_rawDescData
+}
+
+var file_cc_analyzer_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
+var file_cc_analyzer_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
+var file_cc_analyzer_proto_goTypes = []interface{}{
+	(Status_Code)(0),          // 0: cc_analyzer.Status.Code
+	(*Status)(nil),            // 1: cc_analyzer.Status
+	(*RepoState)(nil),         // 2: cc_analyzer.RepoState
+	(*DepsResponse)(nil),      // 3: cc_analyzer.DepsResponse
+	(*GeneratedFile)(nil),     // 4: cc_analyzer.GeneratedFile
+	(*SourceFile)(nil),        // 5: cc_analyzer.SourceFile
+	(*IdeAnalysis)(nil),       // 6: cc_analyzer.IdeAnalysis
+	(*DepsResponse_Deps)(nil), // 7: cc_analyzer.DepsResponse.Deps
+}
+var file_cc_analyzer_proto_depIdxs = []int32{
+	0, // 0: cc_analyzer.Status.code:type_name -> cc_analyzer.Status.Code
+	7, // 1: cc_analyzer.DepsResponse.deps:type_name -> cc_analyzer.DepsResponse.Deps
+	1, // 2: cc_analyzer.DepsResponse.status:type_name -> cc_analyzer.Status
+	4, // 3: cc_analyzer.SourceFile.generated:type_name -> cc_analyzer.GeneratedFile
+	1, // 4: cc_analyzer.SourceFile.status:type_name -> cc_analyzer.Status
+	5, // 5: cc_analyzer.IdeAnalysis.sources:type_name -> cc_analyzer.SourceFile
+	1, // 6: cc_analyzer.IdeAnalysis.status:type_name -> cc_analyzer.Status
+	1, // 7: cc_analyzer.DepsResponse.Deps.status:type_name -> cc_analyzer.Status
+	8, // [8:8] is the sub-list for method output_type
+	8, // [8:8] is the sub-list for method input_type
+	8, // [8:8] is the sub-list for extension type_name
+	8, // [8:8] is the sub-list for extension extendee
+	0, // [0:8] is the sub-list for field type_name
+}
+
+func init() { file_cc_analyzer_proto_init() }
+func file_cc_analyzer_proto_init() {
+	if File_cc_analyzer_proto != nil {
+		return
+	}
+	if !protoimpl.UnsafeEnabled {
+		file_cc_analyzer_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Status); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_cc_analyzer_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*RepoState); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_cc_analyzer_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*DepsResponse); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_cc_analyzer_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*GeneratedFile); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_cc_analyzer_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*SourceFile); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_cc_analyzer_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*IdeAnalysis); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_cc_analyzer_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*DepsResponse_Deps); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+	}
+	file_cc_analyzer_proto_msgTypes[0].OneofWrappers = []interface{}{}
+	file_cc_analyzer_proto_msgTypes[2].OneofWrappers = []interface{}{}
+	file_cc_analyzer_proto_msgTypes[3].OneofWrappers = []interface{}{}
+	file_cc_analyzer_proto_msgTypes[4].OneofWrappers = []interface{}{}
+	file_cc_analyzer_proto_msgTypes[5].OneofWrappers = []interface{}{}
+	file_cc_analyzer_proto_msgTypes[6].OneofWrappers = []interface{}{}
+	type x struct{}
+	out := protoimpl.TypeBuilder{
+		File: protoimpl.DescBuilder{
+			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+			RawDescriptor: file_cc_analyzer_proto_rawDesc,
+			NumEnums:      1,
+			NumMessages:   7,
+			NumExtensions: 0,
+			NumServices:   0,
+		},
+		GoTypes:           file_cc_analyzer_proto_goTypes,
+		DependencyIndexes: file_cc_analyzer_proto_depIdxs,
+		EnumInfos:         file_cc_analyzer_proto_enumTypes,
+		MessageInfos:      file_cc_analyzer_proto_msgTypes,
+	}.Build()
+	File_cc_analyzer_proto = out.File
+	file_cc_analyzer_proto_rawDesc = nil
+	file_cc_analyzer_proto_goTypes = nil
+	file_cc_analyzer_proto_depIdxs = nil
+}
diff --git a/tools/ide_query/cc_analyzer_proto/cc_analyzer.proto b/tools/ide_query/cc_analyzer_proto/cc_analyzer.proto
new file mode 100644
index 0000000..094eb49
--- /dev/null
+++ b/tools/ide_query/cc_analyzer_proto/cc_analyzer.proto
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+syntax = "proto3";
+
+package cc_analyzer;
+
+option go_package = "ide_query/cc_analyzer_proto";
+
+// Indicates the success/failure for analysis.
+message Status {
+  enum Code {
+    OK = 0;
+    FAILURE = 1;
+  }
+  Code code = 1;
+  // Details about the status, might be displayed to user.
+  optional string message = 2;
+}
+
+// Represents an Android checkout on user's workstation.
+message RepoState {
+  // Absolute path for the checkout in the workstation.
+  // e.g. /home/user/work/android/
+  string repo_dir = 1;
+  // Relative to repo_dir.
+  repeated string active_file_path = 2;
+  // Repository relative path to output directory in workstation.
+  string out_dir = 3;
+  // Repository relative path to compile_commands.json in workstation.
+  string comp_db_path = 4;
+}
+
+// Provides all the targets that are pre-requisities for running language
+// services on active_file_paths.
+message DepsResponse {
+  // Build dependencies of a source file for providing language services.
+  message Deps {
+    // Relative to repo_dir.
+    string source_file = 1;
+    // Build target to execute for generating dep.
+    repeated string build_target = 2;
+    optional Status status = 3;
+  }
+  repeated Deps deps = 1;
+  optional Status status = 2;
+}
+
+// Returns all the information necessary for providing language services for the
+// active files.
+message GeneratedFile {
+  // Path to the file relative to repository root.
+  string path = 1;
+
+  // The text of the generated file, if not provided contents will be read
+  // from the path above in user's workstation.
+  optional bytes contents = 2;
+}
+
+message SourceFile {
+  // Path to the source file relative to repository root.
+  string path = 1;
+
+  // Working directory used by the build system. All the relative
+  // paths in compiler_arguments should be relative to this path.
+  // Relative to repository root.
+  string working_dir = 2;
+
+  // Compiler arguments to compile the source file. If multiple variants
+  // of the module being compiled are possible, the query script will choose
+  // one.
+  repeated string compiler_arguments = 3;
+
+  // Any generated files that are used in compiling the file.
+  repeated GeneratedFile generated = 4;
+
+  // Paths to all of the sources, like build files, code generators,
+  // proto files etc. that were used during analysis. Used to figure
+  // out when a set of build artifacts are stale and the query tool
+  // must be re-run.
+  // Relative to repository root.
+  repeated string deps = 5;
+
+  // Represents analysis status for this particular file. e.g. not part
+  // of the build graph.
+  optional Status status = 6;
+}
+
+message IdeAnalysis {
+  repeated SourceFile sources = 2;
+
+  // Status representing overall analysis.
+  // Should fail only when no analysis can be performed.
+  optional Status status = 3;
+
+  reserved 1;
+}
diff --git a/tools/ide_query/cc_analyzer_proto/regen.sh b/tools/ide_query/cc_analyzer_proto/regen.sh
new file mode 100755
index 0000000..ef44f88
--- /dev/null
+++ b/tools/ide_query/cc_analyzer_proto/regen.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+aprotoc --go_out=paths=source_relative:. cc_analyzer.proto
diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go
index de84fbe..23c7abd 100644
--- a/tools/ide_query/ide_query.go
+++ b/tools/ide_query/ide_query.go
@@ -33,6 +33,7 @@
 	"strings"
 
 	"google.golang.org/protobuf/proto"
+	apb "ide_query/cc_analyzer_proto"
 	pb "ide_query/ide_query_proto"
 )
 
@@ -42,9 +43,6 @@
 	RepoDir        string
 	OutDir         string
 	ClangToolsRoot string
-
-	CcFiles   []string
-	JavaFiles []string
 }
 
 // LunchTarget is a parsed Android lunch target.
@@ -83,7 +81,7 @@
 
 func main() {
 	var env Env
-	env.OutDir = os.Getenv("OUT_DIR")
+	env.OutDir = strings.TrimSuffix(os.Getenv("OUT_DIR"), "/")
 	env.RepoDir = os.Getenv("ANDROID_BUILD_TOP")
 	env.ClangToolsRoot = os.Getenv("PREBUILTS_CLANG_TOOLS_ROOT")
 	flag.Var(&env.LunchTarget, "lunch_target", "The lunch target to query")
@@ -95,12 +93,13 @@
 		return
 	}
 
+	var ccFiles, javaFiles []string
 	for _, f := range files {
 		switch {
 		case strings.HasSuffix(f, ".java") || strings.HasSuffix(f, ".kt"):
-			env.JavaFiles = append(env.JavaFiles, f)
+			javaFiles = append(javaFiles, f)
 		case strings.HasSuffix(f, ".cc") || strings.HasSuffix(f, ".cpp") || strings.HasSuffix(f, ".h"):
-			env.CcFiles = append(env.CcFiles, f)
+			ccFiles = append(ccFiles, f)
 		default:
 			log.Printf("File %q is supported - will be skipped.", f)
 		}
@@ -110,28 +109,54 @@
 	// TODO(michaelmerg): Figure out if module_bp_java_deps.json and compile_commands.json is outdated.
 	runMake(ctx, env, "nothing")
 
-	javaModules, javaFileToModuleMap, err := loadJavaModules(&env)
+	javaModules, err := loadJavaModules(env)
 	if err != nil {
 		log.Printf("Failed to load java modules: %v", err)
 	}
-	toMake := getJavaTargets(javaFileToModuleMap)
 
-	ccTargets, status := getCCTargets(ctx, &env)
-	if status != nil && status.Code != pb.Status_OK {
-		log.Fatalf("Failed to query cc targets: %v", *status.Message)
-	}
-	toMake = append(toMake, ccTargets...)
-	fmt.Fprintf(os.Stderr, "Running make for modules: %v\n", strings.Join(toMake, ", "))
-	if err := runMake(ctx, env, toMake...); err != nil {
-		log.Printf("Building deps failed: %v", err)
+	var targets []string
+	javaTargetsByFile := findJavaModules(javaFiles, javaModules)
+	for _, t := range javaTargetsByFile {
+		targets = append(targets, t)
 	}
 
-	res := getJavaInputs(&env, javaModules, javaFileToModuleMap)
-	ccAnalysis := getCCInputs(ctx, &env)
-	proto.Merge(res, ccAnalysis)
+	ccTargets, err := getCCTargets(ctx, env, ccFiles)
+	if err != nil {
+		log.Fatalf("Failed to query cc targets: %v", err)
+	}
+	targets = append(targets, ccTargets...)
+	if len(targets) == 0 {
+		fmt.Println("No targets found.")
+		os.Exit(1)
+		return
+	}
 
-	res.BuildArtifactRoot = env.OutDir
-	data, err := proto.Marshal(res)
+	fmt.Fprintf(os.Stderr, "Running make for modules: %v\n", strings.Join(targets, ", "))
+	if err := runMake(ctx, env, targets...); err != nil {
+		log.Printf("Building modules failed: %v", err)
+	}
+
+	var analysis pb.IdeAnalysis
+	results, units := getJavaInputs(env, javaTargetsByFile, javaModules)
+	analysis.Results = results
+	analysis.Units = units
+	if err != nil && analysis.Error == nil {
+		analysis.Error = &pb.AnalysisError{
+			ErrorMessage: err.Error(),
+		}
+	}
+
+	results, units, err = getCCInputs(ctx, env, ccFiles)
+	analysis.Results = append(analysis.Results, results...)
+	analysis.Units = append(analysis.Units, units...)
+	if err != nil && analysis.Error == nil {
+		analysis.Error = &pb.AnalysisError{
+			ErrorMessage: err.Error(),
+		}
+	}
+
+	analysis.BuildOutDir = env.OutDir
+	data, err := proto.Marshal(&analysis)
 	if err != nil {
 		log.Fatalf("Failed to marshal result proto: %v", err)
 	}
@@ -141,22 +166,22 @@
 		log.Fatalf("Failed to write result proto: %v", err)
 	}
 
-	for _, s := range res.Sources {
-		fmt.Fprintf(os.Stderr, "%s: %v (Deps: %d, Generated: %d)\n", s.GetPath(), s.GetStatus(), len(s.GetDeps()), len(s.GetGenerated()))
+	for _, r := range analysis.Results {
+		fmt.Fprintf(os.Stderr, "%s: %+v\n", r.GetSourceFilePath(), r.GetStatus())
 	}
 }
 
-func repoState(env *Env) *pb.RepoState {
+func repoState(env Env, filePaths []string) *apb.RepoState {
 	const compDbPath = "soong/development/ide/compdb/compile_commands.json"
-	return &pb.RepoState{
+	return &apb.RepoState{
 		RepoDir:        env.RepoDir,
-		ActiveFilePath: env.CcFiles,
+		ActiveFilePath: filePaths,
 		OutDir:         env.OutDir,
 		CompDbPath:     path.Join(env.OutDir, compDbPath),
 	}
 }
 
-func runCCanalyzer(ctx context.Context, env *Env, mode string, in []byte) ([]byte, error) {
+func runCCanalyzer(ctx context.Context, env Env, mode string, in []byte) ([]byte, error) {
 	ccAnalyzerPath := path.Join(env.ClangToolsRoot, "bin/ide_query_cc_analyzer")
 	outBuffer := new(bytes.Buffer)
 
@@ -176,127 +201,205 @@
 }
 
 // Execute cc_analyzer and get all the targets that needs to be build for analyzing files.
-func getCCTargets(ctx context.Context, env *Env) ([]string, *pb.Status) {
-	state := repoState(env)
-	bytes, err := proto.Marshal(state)
+func getCCTargets(ctx context.Context, env Env, filePaths []string) ([]string, error) {
+	state, err := proto.Marshal(repoState(env, filePaths))
 	if err != nil {
 		log.Fatalln("Failed to serialize state:", err)
 	}
 
-	resp := new(pb.DepsResponse)
-	result, err := runCCanalyzer(ctx, env, "deps", bytes)
-	if marshal_err := proto.Unmarshal(result, resp); marshal_err != nil {
-		return nil, &pb.Status{
-			Code:    pb.Status_FAILURE,
-			Message: proto.String("Malformed response from cc_analyzer: " + marshal_err.Error()),
-		}
+	resp := new(apb.DepsResponse)
+	result, err := runCCanalyzer(ctx, env, "deps", state)
+	if err != nil {
+		return nil, err
+	}
+
+	if err := proto.Unmarshal(result, resp); err != nil {
+		return nil, fmt.Errorf("malformed response from cc_analyzer: %v", err)
 	}
 
 	var targets []string
-	if resp.Status != nil && resp.Status.Code != pb.Status_OK {
-		return targets, resp.Status
+	if resp.Status != nil && resp.Status.Code != apb.Status_OK {
+		return targets, fmt.Errorf("cc_analyzer failed: %v", resp.Status.Message)
 	}
+
 	for _, deps := range resp.Deps {
 		targets = append(targets, deps.BuildTarget...)
 	}
-
-	status := &pb.Status{Code: pb.Status_OK}
-	if err != nil {
-		status = &pb.Status{
-			Code:    pb.Status_FAILURE,
-			Message: proto.String(err.Error()),
-		}
-	}
-	return targets, status
+	return targets, nil
 }
 
-func getCCInputs(ctx context.Context, env *Env) *pb.IdeAnalysis {
-	state := repoState(env)
-	bytes, err := proto.Marshal(state)
+func getCCInputs(ctx context.Context, env Env, filePaths []string) ([]*pb.AnalysisResult, []*pb.BuildableUnit, error) {
+	state, err := proto.Marshal(repoState(env, filePaths))
 	if err != nil {
 		log.Fatalln("Failed to serialize state:", err)
 	}
 
-	resp := new(pb.IdeAnalysis)
-	result, err := runCCanalyzer(ctx, env, "inputs", bytes)
-	if marshal_err := proto.Unmarshal(result, resp); marshal_err != nil {
-		resp.Status = &pb.Status{
-			Code:    pb.Status_FAILURE,
-			Message: proto.String("Malformed response from cc_analyzer: " + marshal_err.Error()),
-		}
-		return resp
+	resp := new(apb.IdeAnalysis)
+	result, err := runCCanalyzer(ctx, env, "inputs", state)
+	if err != nil {
+		return nil, nil, fmt.Errorf("cc_analyzer failed:", err)
+	}
+	if err := proto.Unmarshal(result, resp); err != nil {
+		return nil, nil, fmt.Errorf("malformed response from cc_analyzer: %v", err)
+	}
+	if resp.Status != nil && resp.Status.Code != apb.Status_OK {
+		return nil, nil, fmt.Errorf("cc_analyzer failed: %v", resp.Status.Message)
 	}
 
-	if err != nil && (resp.Status == nil || resp.Status.Code == pb.Status_OK) {
-		resp.Status = &pb.Status{
-			Code:    pb.Status_FAILURE,
-			Message: proto.String(err.Error()),
+	var results []*pb.AnalysisResult
+	var units []*pb.BuildableUnit
+	for _, s := range resp.Sources {
+		status := &pb.AnalysisResult_Status{
+			Code: pb.AnalysisResult_Status_CODE_OK,
 		}
-	}
-	return resp
-}
-
-func getJavaTargets(javaFileToModuleMap map[string]*javaModule) []string {
-	var targets []string
-	for _, m := range javaFileToModuleMap {
-		targets = append(targets, m.Name)
-	}
-	return targets
-}
-
-func getJavaInputs(env *Env, javaModules map[string]*javaModule, javaFileToModuleMap map[string]*javaModule) *pb.IdeAnalysis {
-	var sources []*pb.SourceFile
-	type depsAndGenerated struct {
-		Deps      []string
-		Generated []*pb.GeneratedFile
-	}
-	moduleToDeps := make(map[string]*depsAndGenerated)
-	for _, f := range env.JavaFiles {
-		file := &pb.SourceFile{
-			Path: f,
-		}
-		sources = append(sources, file)
-
-		m := javaFileToModuleMap[f]
-		if m == nil {
-			file.Status = &pb.Status{
-				Code:    pb.Status_FAILURE,
-				Message: proto.String("File not found in any module."),
-			}
-			continue
+		if s.GetStatus().GetCode() != apb.Status_OK {
+			status.Code = pb.AnalysisResult_Status_CODE_BUILD_FAILED
+			status.StatusMessage = proto.String(s.GetStatus().GetMessage())
 		}
 
-		file.Status = &pb.Status{Code: pb.Status_OK}
-		if moduleToDeps[m.Name] != nil {
-			file.Generated = moduleToDeps[m.Name].Generated
-			file.Deps = moduleToDeps[m.Name].Deps
-			continue
+		result := &pb.AnalysisResult{
+			SourceFilePath: s.GetPath(),
+			UnitId:         s.GetPath(),
+			Status:         status,
 		}
+		results = append(results, result)
 
-		deps := transitiveDeps(m, javaModules)
 		var generated []*pb.GeneratedFile
-		outPrefix := env.OutDir + "/"
-		for _, d := range deps {
-			if relPath, ok := strings.CutPrefix(d, outPrefix); ok {
-				contents, err := os.ReadFile(d)
-				if err != nil {
-					fmt.Printf("Generated file %q not found - will be skipped.\n", d)
-					continue
-				}
+		for _, f := range s.Generated {
+			generated = append(generated, &pb.GeneratedFile{
+				Path:     f.GetPath(),
+				Contents: f.GetContents(),
+			})
+		}
+		genUnit := &pb.BuildableUnit{
+			Id:              "genfiles_for_" + s.GetPath(),
+			SourceFilePaths: s.GetDeps(),
+			GeneratedFiles:  generated,
+		}
 
-				generated = append(generated, &pb.GeneratedFile{
-					Path:     relPath,
-					Contents: contents,
-				})
+		unit := &pb.BuildableUnit{
+			Id:                s.GetPath(),
+			Language:          pb.Language_LANGUAGE_CPP,
+			SourceFilePaths:   []string{s.GetPath()},
+			CompilerArguments: s.GetCompilerArguments(),
+			DependencyIds:     []string{genUnit.GetId()},
+		}
+		units = append(units, unit, genUnit)
+	}
+	return results, units, nil
+}
+
+// findJavaModules tries to find the modules that cover the given file paths.
+// If a file is covered by multiple modules, the first module is returned.
+func findJavaModules(paths []string, modules map[string]*javaModule) map[string]string {
+	ret := make(map[string]string)
+	for name, module := range modules {
+		if strings.HasSuffix(name, ".impl") {
+			continue
+		}
+
+		for i, p := range paths {
+			if slices.Contains(module.Srcs, p) {
+				ret[p] = name
+				paths = append(paths[:i], paths[i+1:]...)
+				break
 			}
 		}
-		moduleToDeps[m.Name] = &depsAndGenerated{deps, generated}
-		file.Generated = generated
-		file.Deps = deps
+		if len(paths) == 0 {
+			break
+		}
 	}
-	return &pb.IdeAnalysis{
-		Sources: sources,
+	return ret
+}
+
+func getJavaInputs(env Env, modulesByPath map[string]string, modules map[string]*javaModule) ([]*pb.AnalysisResult, []*pb.BuildableUnit) {
+	var results []*pb.AnalysisResult
+	unitsById := make(map[string]*pb.BuildableUnit)
+	for p, moduleName := range modulesByPath {
+		r := &pb.AnalysisResult{
+			SourceFilePath: p,
+		}
+		results = append(results, r)
+
+		m := modules[moduleName]
+		if m == nil {
+			r.Status = &pb.AnalysisResult_Status{
+				Code:          pb.AnalysisResult_Status_CODE_NOT_FOUND,
+				StatusMessage: proto.String("File not found in any module."),
+			}
+			continue
+		}
+
+		r.UnitId = moduleName
+		r.Status = &pb.AnalysisResult_Status{Code: pb.AnalysisResult_Status_CODE_OK}
+		if unitsById[r.UnitId] != nil {
+			// File is covered by an already created unit.
+			continue
+		}
+
+		u := &pb.BuildableUnit{
+			Id:              moduleName,
+			Language:        pb.Language_LANGUAGE_JAVA,
+			SourceFilePaths: m.Srcs,
+		}
+		unitsById[u.Id] = u
+
+		q := list.New()
+		for _, d := range m.Deps {
+			q.PushBack(d)
+		}
+		for q.Len() > 0 {
+			name := q.Remove(q.Front()).(string)
+			mod := modules[name]
+			if mod == nil || unitsById[name] != nil {
+				continue
+			}
+
+			var paths []string
+			paths = append(paths, mod.Srcs...)
+			paths = append(paths, mod.SrcJars...)
+			paths = append(paths, mod.Jars...)
+			unitsById[name] = &pb.BuildableUnit{
+				Id:              name,
+				SourceFilePaths: mod.Srcs,
+				GeneratedFiles:  genFiles(env, paths),
+			}
+
+			for _, d := range mod.Deps {
+				q.PushBack(d)
+			}
+		}
 	}
+
+	units := make([]*pb.BuildableUnit, 0, len(unitsById))
+	for _, u := range unitsById {
+		units = append(units, u)
+	}
+	return results, units
+}
+
+// genFiles returns the generated files (paths that start with outDir/) for the
+// given paths. Generated files that do not exist are ignored.
+func genFiles(env Env, paths []string) []*pb.GeneratedFile {
+	prefix := env.OutDir + "/"
+	var ret []*pb.GeneratedFile
+	for _, p := range paths {
+		relPath, ok := strings.CutPrefix(p, prefix)
+		if !ok {
+			continue
+		}
+
+		contents, err := os.ReadFile(path.Join(env.RepoDir, p))
+		if err != nil {
+			continue
+		}
+
+		ret = append(ret, &pb.GeneratedFile{
+			Path:     relPath,
+			Contents: contents,
+		})
+	}
+	return ret
 }
 
 // runMake runs Soong build for the given modules.
@@ -308,6 +411,7 @@
 		"TARGET_PRODUCT=" + env.LunchTarget.Product,
 		"TARGET_RELEASE=" + env.LunchTarget.Release,
 		"TARGET_BUILD_VARIANT=" + env.LunchTarget.Variant,
+		"TARGET_BUILD_TYPE=release",
 		"-k",
 	}
 	args = append(args, modules...)
@@ -319,7 +423,6 @@
 }
 
 type javaModule struct {
-	Name    string
 	Path    []string `json:"path,omitempty"`
 	Deps    []string `json:"dependencies,omitempty"`
 	Srcs    []string `json:"srcs,omitempty"`
@@ -327,66 +430,23 @@
 	SrcJars []string `json:"srcjars,omitempty"`
 }
 
-func loadJavaModules(env *Env) (map[string]*javaModule, map[string]*javaModule, error) {
+func loadJavaModules(env Env) (map[string]*javaModule, error) {
 	javaDepsPath := path.Join(env.RepoDir, env.OutDir, "soong/module_bp_java_deps.json")
 	data, err := os.ReadFile(javaDepsPath)
 	if err != nil {
-		return nil, nil, err
+		return nil, err
 	}
 
-	var moduleMapping map[string]*javaModule // module name -> module
-	if err = json.Unmarshal(data, &moduleMapping); err != nil {
-		return nil, nil, err
+	var ret map[string]*javaModule // module name -> module
+	if err = json.Unmarshal(data, &ret); err != nil {
+		return nil, err
 	}
 
-	javaModules := make(map[string]*javaModule)
-	javaFileToModuleMap := make(map[string]*javaModule)
-	for name, module := range moduleMapping {
-		if strings.HasSuffix(name, "-jarjar") || strings.HasSuffix(name, ".impl") {
-			continue
-		}
-		module.Name = name
-		javaModules[name] = module
-		for _, src := range module.Srcs {
-			if !slices.Contains(env.JavaFiles, src) {
-				// We are only interested in active files.
-				continue
-			}
-			if javaFileToModuleMap[src] != nil {
-				// TODO(michaelmerg): Handle the case where a file is covered by multiple modules.
-				log.Printf("File %q found in module %q but is already covered by module %q", src, module.Name, javaFileToModuleMap[src].Name)
-				continue
-			}
-			javaFileToModuleMap[src] = module
+	// Add top level java_sdk_library for .impl modules.
+	for name, module := range ret {
+		if striped := strings.TrimSuffix(name, ".impl"); striped != name {
+			ret[striped] = module
 		}
 	}
-	return javaModules, javaFileToModuleMap, nil
-}
-
-func transitiveDeps(m *javaModule, modules map[string]*javaModule) []string {
-	var ret []string
-	q := list.New()
-	q.PushBack(m.Name)
-	seen := make(map[string]bool) // module names -> true
-	for q.Len() > 0 {
-		name := q.Remove(q.Front()).(string)
-		mod := modules[name]
-		if mod == nil {
-			continue
-		}
-
-		ret = append(ret, mod.Srcs...)
-		ret = append(ret, mod.SrcJars...)
-		ret = append(ret, mod.Jars...)
-		for _, d := range mod.Deps {
-			if seen[d] {
-				continue
-			}
-			seen[d] = true
-			q.PushBack(d)
-		}
-	}
-	slices.Sort(ret)
-	ret = slices.Compact(ret)
-	return ret
+	return ret, nil
 }
diff --git a/tools/ide_query/ide_query.sh b/tools/ide_query/ide_query.sh
index 2df48d0..6f9b0c4 100755
--- a/tools/ide_query/ide_query.sh
+++ b/tools/ide_query/ide_query.sh
@@ -32,6 +32,7 @@
       ;;
 esac
 
+export BUILD_ENV_SEQUENCE_NUMBER=13
 export ANDROID_BUILD_TOP=$TOP
 export OUT_DIR=${OUT_DIR}
 exec "${PREBUILTS_GO_ROOT}/bin/go" "run" "ide_query" "$@"
diff --git a/tools/ide_query/ide_query_proto/ide_query.pb.go b/tools/ide_query/ide_query_proto/ide_query.pb.go
index f3a016d..a190223 100644
--- a/tools/ide_query/ide_query_proto/ide_query.pb.go
+++ b/tools/ide_query/ide_query_proto/ide_query.pb.go
@@ -1,6 +1,21 @@
+//
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.25.0-devel
+// 	protoc-gen-go v1.30.0
 // 	protoc        v3.21.12
 // source: ide_query.proto
 
@@ -20,251 +35,113 @@
 	_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
 )
 
-type Status_Code int32
+type Language int32
 
 const (
-	Status_OK      Status_Code = 0
-	Status_FAILURE Status_Code = 1
+	Language_LANGUAGE_UNSPECIFIED Language = 0
+	Language_LANGUAGE_JAVA        Language = 1 // also includes Kotlin
+	Language_LANGUAGE_CPP         Language = 2
 )
 
-// Enum value maps for Status_Code.
+// Enum value maps for Language.
 var (
-	Status_Code_name = map[int32]string{
-		0: "OK",
-		1: "FAILURE",
+	Language_name = map[int32]string{
+		0: "LANGUAGE_UNSPECIFIED",
+		1: "LANGUAGE_JAVA",
+		2: "LANGUAGE_CPP",
 	}
-	Status_Code_value = map[string]int32{
-		"OK":      0,
-		"FAILURE": 1,
+	Language_value = map[string]int32{
+		"LANGUAGE_UNSPECIFIED": 0,
+		"LANGUAGE_JAVA":        1,
+		"LANGUAGE_CPP":         2,
 	}
 )
 
-func (x Status_Code) Enum() *Status_Code {
-	p := new(Status_Code)
+func (x Language) Enum() *Language {
+	p := new(Language)
 	*p = x
 	return p
 }
 
-func (x Status_Code) String() string {
+func (x Language) String() string {
 	return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
 }
 
-func (Status_Code) Descriptor() protoreflect.EnumDescriptor {
+func (Language) Descriptor() protoreflect.EnumDescriptor {
 	return file_ide_query_proto_enumTypes[0].Descriptor()
 }
 
-func (Status_Code) Type() protoreflect.EnumType {
+func (Language) Type() protoreflect.EnumType {
 	return &file_ide_query_proto_enumTypes[0]
 }
 
-func (x Status_Code) Number() protoreflect.EnumNumber {
+func (x Language) Number() protoreflect.EnumNumber {
 	return protoreflect.EnumNumber(x)
 }
 
-// Deprecated: Use Status_Code.Descriptor instead.
-func (Status_Code) EnumDescriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{0, 0}
-}
-
-// Indicates the success/failure for analysis.
-type Status struct {
-	state         protoimpl.MessageState
-	sizeCache     protoimpl.SizeCache
-	unknownFields protoimpl.UnknownFields
-
-	Code Status_Code `protobuf:"varint,1,opt,name=code,proto3,enum=ide_query.Status_Code" json:"code,omitempty"`
-	// Details about the status, might be displayed to user.
-	Message *string `protobuf:"bytes,2,opt,name=message,proto3,oneof" json:"message,omitempty"`
-}
-
-func (x *Status) Reset() {
-	*x = Status{}
-	if protoimpl.UnsafeEnabled {
-		mi := &file_ide_query_proto_msgTypes[0]
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		ms.StoreMessageInfo(mi)
-	}
-}
-
-func (x *Status) String() string {
-	return protoimpl.X.MessageStringOf(x)
-}
-
-func (*Status) ProtoMessage() {}
-
-func (x *Status) ProtoReflect() protoreflect.Message {
-	mi := &file_ide_query_proto_msgTypes[0]
-	if protoimpl.UnsafeEnabled && x != nil {
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		if ms.LoadMessageInfo() == nil {
-			ms.StoreMessageInfo(mi)
-		}
-		return ms
-	}
-	return mi.MessageOf(x)
-}
-
-// Deprecated: Use Status.ProtoReflect.Descriptor instead.
-func (*Status) Descriptor() ([]byte, []int) {
+// Deprecated: Use Language.Descriptor instead.
+func (Language) EnumDescriptor() ([]byte, []int) {
 	return file_ide_query_proto_rawDescGZIP(), []int{0}
 }
 
-func (x *Status) GetCode() Status_Code {
-	if x != nil {
-		return x.Code
+type AnalysisResult_Status_Code int32
+
+const (
+	AnalysisResult_Status_CODE_UNSPECIFIED  AnalysisResult_Status_Code = 0
+	AnalysisResult_Status_CODE_OK           AnalysisResult_Status_Code = 1
+	AnalysisResult_Status_CODE_NOT_FOUND    AnalysisResult_Status_Code = 2 // no target or module found for the source file.
+	AnalysisResult_Status_CODE_BUILD_FAILED AnalysisResult_Status_Code = 3
+)
+
+// Enum value maps for AnalysisResult_Status_Code.
+var (
+	AnalysisResult_Status_Code_name = map[int32]string{
+		0: "CODE_UNSPECIFIED",
+		1: "CODE_OK",
+		2: "CODE_NOT_FOUND",
+		3: "CODE_BUILD_FAILED",
 	}
-	return Status_OK
-}
-
-func (x *Status) GetMessage() string {
-	if x != nil && x.Message != nil {
-		return *x.Message
+	AnalysisResult_Status_Code_value = map[string]int32{
+		"CODE_UNSPECIFIED":  0,
+		"CODE_OK":           1,
+		"CODE_NOT_FOUND":    2,
+		"CODE_BUILD_FAILED": 3,
 	}
-	return ""
+)
+
+func (x AnalysisResult_Status_Code) Enum() *AnalysisResult_Status_Code {
+	p := new(AnalysisResult_Status_Code)
+	*p = x
+	return p
 }
 
-// Represents an Android checkout on user's workstation.
-type RepoState struct {
-	state         protoimpl.MessageState
-	sizeCache     protoimpl.SizeCache
-	unknownFields protoimpl.UnknownFields
-
-	// Absolute path for the checkout in the workstation.
-	// e.g. /home/user/work/android/
-	RepoDir string `protobuf:"bytes,1,opt,name=repo_dir,json=repoDir,proto3" json:"repo_dir,omitempty"`
-	// Relative to repo_dir.
-	ActiveFilePath []string `protobuf:"bytes,2,rep,name=active_file_path,json=activeFilePath,proto3" json:"active_file_path,omitempty"`
-	// Repository relative path to output directory in workstation.
-	OutDir string `protobuf:"bytes,3,opt,name=out_dir,json=outDir,proto3" json:"out_dir,omitempty"`
-	// Repository relative path to compile_commands.json in workstation.
-	CompDbPath string `protobuf:"bytes,4,opt,name=comp_db_path,json=compDbPath,proto3" json:"comp_db_path,omitempty"`
+func (x AnalysisResult_Status_Code) String() string {
+	return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
 }
 
-func (x *RepoState) Reset() {
-	*x = RepoState{}
-	if protoimpl.UnsafeEnabled {
-		mi := &file_ide_query_proto_msgTypes[1]
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		ms.StoreMessageInfo(mi)
-	}
+func (AnalysisResult_Status_Code) Descriptor() protoreflect.EnumDescriptor {
+	return file_ide_query_proto_enumTypes[1].Descriptor()
 }
 
-func (x *RepoState) String() string {
-	return protoimpl.X.MessageStringOf(x)
+func (AnalysisResult_Status_Code) Type() protoreflect.EnumType {
+	return &file_ide_query_proto_enumTypes[1]
 }
 
-func (*RepoState) ProtoMessage() {}
-
-func (x *RepoState) ProtoReflect() protoreflect.Message {
-	mi := &file_ide_query_proto_msgTypes[1]
-	if protoimpl.UnsafeEnabled && x != nil {
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		if ms.LoadMessageInfo() == nil {
-			ms.StoreMessageInfo(mi)
-		}
-		return ms
-	}
-	return mi.MessageOf(x)
+func (x AnalysisResult_Status_Code) Number() protoreflect.EnumNumber {
+	return protoreflect.EnumNumber(x)
 }
 
-// Deprecated: Use RepoState.ProtoReflect.Descriptor instead.
-func (*RepoState) Descriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{1}
+// Deprecated: Use AnalysisResult_Status_Code.Descriptor instead.
+func (AnalysisResult_Status_Code) EnumDescriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{3, 0, 0}
 }
 
-func (x *RepoState) GetRepoDir() string {
-	if x != nil {
-		return x.RepoDir
-	}
-	return ""
-}
-
-func (x *RepoState) GetActiveFilePath() []string {
-	if x != nil {
-		return x.ActiveFilePath
-	}
-	return nil
-}
-
-func (x *RepoState) GetOutDir() string {
-	if x != nil {
-		return x.OutDir
-	}
-	return ""
-}
-
-func (x *RepoState) GetCompDbPath() string {
-	if x != nil {
-		return x.CompDbPath
-	}
-	return ""
-}
-
-// Provides all the targets that are pre-requisities for running language
-// services on active_file_paths.
-type DepsResponse struct {
-	state         protoimpl.MessageState
-	sizeCache     protoimpl.SizeCache
-	unknownFields protoimpl.UnknownFields
-
-	Deps   []*DepsResponse_Deps `protobuf:"bytes,1,rep,name=deps,proto3" json:"deps,omitempty"`
-	Status *Status              `protobuf:"bytes,2,opt,name=status,proto3,oneof" json:"status,omitempty"`
-}
-
-func (x *DepsResponse) Reset() {
-	*x = DepsResponse{}
-	if protoimpl.UnsafeEnabled {
-		mi := &file_ide_query_proto_msgTypes[2]
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		ms.StoreMessageInfo(mi)
-	}
-}
-
-func (x *DepsResponse) String() string {
-	return protoimpl.X.MessageStringOf(x)
-}
-
-func (*DepsResponse) ProtoMessage() {}
-
-func (x *DepsResponse) ProtoReflect() protoreflect.Message {
-	mi := &file_ide_query_proto_msgTypes[2]
-	if protoimpl.UnsafeEnabled && x != nil {
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		if ms.LoadMessageInfo() == nil {
-			ms.StoreMessageInfo(mi)
-		}
-		return ms
-	}
-	return mi.MessageOf(x)
-}
-
-// Deprecated: Use DepsResponse.ProtoReflect.Descriptor instead.
-func (*DepsResponse) Descriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{2}
-}
-
-func (x *DepsResponse) GetDeps() []*DepsResponse_Deps {
-	if x != nil {
-		return x.Deps
-	}
-	return nil
-}
-
-func (x *DepsResponse) GetStatus() *Status {
-	if x != nil {
-		return x.Status
-	}
-	return nil
-}
-
-// Returns all the information necessary for providing language services for the
-// active files.
 type GeneratedFile struct {
 	state         protoimpl.MessageState
 	sizeCache     protoimpl.SizeCache
 	unknownFields protoimpl.UnknownFields
 
-	// Path to the file relative to IdeAnalysis.build_artifact_root.
+	// Path to the file relative to build_out_dir.
 	Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
 	// The text of the generated file, if not provided contents will be read
 	// from the path above in user's workstation.
@@ -274,7 +151,7 @@
 func (x *GeneratedFile) Reset() {
 	*x = GeneratedFile{}
 	if protoimpl.UnsafeEnabled {
-		mi := &file_ide_query_proto_msgTypes[3]
+		mi := &file_ide_query_proto_msgTypes[0]
 		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
 		ms.StoreMessageInfo(mi)
 	}
@@ -287,7 +164,7 @@
 func (*GeneratedFile) ProtoMessage() {}
 
 func (x *GeneratedFile) ProtoReflect() protoreflect.Message {
-	mi := &file_ide_query_proto_msgTypes[3]
+	mi := &file_ide_query_proto_msgTypes[0]
 	if protoimpl.UnsafeEnabled && x != nil {
 		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
 		if ms.LoadMessageInfo() == nil {
@@ -300,7 +177,7 @@
 
 // Deprecated: Use GeneratedFile.ProtoReflect.Descriptor instead.
 func (*GeneratedFile) Descriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{3}
+	return file_ide_query_proto_rawDescGZIP(), []int{0}
 }
 
 func (x *GeneratedFile) GetPath() string {
@@ -317,127 +194,29 @@
 	return nil
 }
 
-type SourceFile struct {
-	state         protoimpl.MessageState
-	sizeCache     protoimpl.SizeCache
-	unknownFields protoimpl.UnknownFields
-
-	// Path to the source file relative to repository root.
-	Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
-	// Working directory used by the build system. All the relative
-	// paths in compiler_arguments should be relative to this path.
-	// Relative to repository root.
-	WorkingDir string `protobuf:"bytes,2,opt,name=working_dir,json=workingDir,proto3" json:"working_dir,omitempty"`
-	// Compiler arguments to compile the source file. If multiple variants
-	// of the module being compiled are possible, the query script will choose
-	// one.
-	CompilerArguments []string `protobuf:"bytes,3,rep,name=compiler_arguments,json=compilerArguments,proto3" json:"compiler_arguments,omitempty"`
-	// Any generated files that are used in compiling the file.
-	Generated []*GeneratedFile `protobuf:"bytes,4,rep,name=generated,proto3" json:"generated,omitempty"`
-	// Paths to all of the sources, like build files, code generators,
-	// proto files etc. that were used during analysis. Used to figure
-	// out when a set of build artifacts are stale and the query tool
-	// must be re-run.
-	// Relative to repository root.
-	Deps []string `protobuf:"bytes,5,rep,name=deps,proto3" json:"deps,omitempty"`
-	// Represents analysis status for this particular file. e.g. not part
-	// of the build graph.
-	Status *Status `protobuf:"bytes,6,opt,name=status,proto3,oneof" json:"status,omitempty"`
-}
-
-func (x *SourceFile) Reset() {
-	*x = SourceFile{}
-	if protoimpl.UnsafeEnabled {
-		mi := &file_ide_query_proto_msgTypes[4]
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		ms.StoreMessageInfo(mi)
-	}
-}
-
-func (x *SourceFile) String() string {
-	return protoimpl.X.MessageStringOf(x)
-}
-
-func (*SourceFile) ProtoMessage() {}
-
-func (x *SourceFile) ProtoReflect() protoreflect.Message {
-	mi := &file_ide_query_proto_msgTypes[4]
-	if protoimpl.UnsafeEnabled && x != nil {
-		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
-		if ms.LoadMessageInfo() == nil {
-			ms.StoreMessageInfo(mi)
-		}
-		return ms
-	}
-	return mi.MessageOf(x)
-}
-
-// Deprecated: Use SourceFile.ProtoReflect.Descriptor instead.
-func (*SourceFile) Descriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{4}
-}
-
-func (x *SourceFile) GetPath() string {
-	if x != nil {
-		return x.Path
-	}
-	return ""
-}
-
-func (x *SourceFile) GetWorkingDir() string {
-	if x != nil {
-		return x.WorkingDir
-	}
-	return ""
-}
-
-func (x *SourceFile) GetCompilerArguments() []string {
-	if x != nil {
-		return x.CompilerArguments
-	}
-	return nil
-}
-
-func (x *SourceFile) GetGenerated() []*GeneratedFile {
-	if x != nil {
-		return x.Generated
-	}
-	return nil
-}
-
-func (x *SourceFile) GetDeps() []string {
-	if x != nil {
-		return x.Deps
-	}
-	return nil
-}
-
-func (x *SourceFile) GetStatus() *Status {
-	if x != nil {
-		return x.Status
-	}
-	return nil
-}
-
 type IdeAnalysis struct {
 	state         protoimpl.MessageState
 	sizeCache     protoimpl.SizeCache
 	unknownFields protoimpl.UnknownFields
 
-	// Path relative to repository root, containing all the artifacts
-	// generated by the build system. GeneratedFile.path are always
-	// relative to this directory.
-	BuildArtifactRoot string        `protobuf:"bytes,1,opt,name=build_artifact_root,json=buildArtifactRoot,proto3" json:"build_artifact_root,omitempty"`
-	Sources           []*SourceFile `protobuf:"bytes,2,rep,name=sources,proto3" json:"sources,omitempty"`
-	// Status representing overall analysis.
-	// Should fail only when no analysis can be performed.
-	Status *Status `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
+	// Directory that contains build outputs generated by the build system.
+	// Relative to repository root.
+	BuildOutDir string `protobuf:"bytes,1,opt,name=build_out_dir,json=buildOutDir,proto3" json:"build_out_dir,omitempty"`
+	// Working directory used by the build system.
+	// Relative to repository root.
+	WorkingDir string `protobuf:"bytes,4,opt,name=working_dir,json=workingDir,proto3" json:"working_dir,omitempty"`
+	// Only set if the whole query failed.
+	Error *AnalysisError `protobuf:"bytes,5,opt,name=error,proto3,oneof" json:"error,omitempty"`
+	// List of results, one per queried file.
+	Results []*AnalysisResult `protobuf:"bytes,6,rep,name=results,proto3" json:"results,omitempty"`
+	// List of buildable units directly or indirectly references by the results.
+	Units []*BuildableUnit `protobuf:"bytes,7,rep,name=units,proto3" json:"units,omitempty"`
 }
 
 func (x *IdeAnalysis) Reset() {
 	*x = IdeAnalysis{}
 	if protoimpl.UnsafeEnabled {
-		mi := &file_ide_query_proto_msgTypes[5]
+		mi := &file_ide_query_proto_msgTypes[1]
 		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
 		ms.StoreMessageInfo(mi)
 	}
@@ -450,7 +229,7 @@
 func (*IdeAnalysis) ProtoMessage() {}
 
 func (x *IdeAnalysis) ProtoReflect() protoreflect.Message {
-	mi := &file_ide_query_proto_msgTypes[5]
+	mi := &file_ide_query_proto_msgTypes[1]
 	if protoimpl.UnsafeEnabled && x != nil {
 		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
 		if ms.LoadMessageInfo() == nil {
@@ -463,45 +242,345 @@
 
 // Deprecated: Use IdeAnalysis.ProtoReflect.Descriptor instead.
 func (*IdeAnalysis) Descriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{5}
+	return file_ide_query_proto_rawDescGZIP(), []int{1}
 }
 
-func (x *IdeAnalysis) GetBuildArtifactRoot() string {
+func (x *IdeAnalysis) GetBuildOutDir() string {
 	if x != nil {
-		return x.BuildArtifactRoot
+		return x.BuildOutDir
 	}
 	return ""
 }
 
-func (x *IdeAnalysis) GetSources() []*SourceFile {
+func (x *IdeAnalysis) GetWorkingDir() string {
 	if x != nil {
-		return x.Sources
+		return x.WorkingDir
+	}
+	return ""
+}
+
+func (x *IdeAnalysis) GetError() *AnalysisError {
+	if x != nil {
+		return x.Error
 	}
 	return nil
 }
 
-func (x *IdeAnalysis) GetStatus() *Status {
+func (x *IdeAnalysis) GetResults() []*AnalysisResult {
+	if x != nil {
+		return x.Results
+	}
+	return nil
+}
+
+func (x *IdeAnalysis) GetUnits() []*BuildableUnit {
+	if x != nil {
+		return x.Units
+	}
+	return nil
+}
+
+type AnalysisError struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Human readable error message.
+	ErrorMessage string `protobuf:"bytes,1,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"`
+}
+
+func (x *AnalysisError) Reset() {
+	*x = AnalysisError{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_ide_query_proto_msgTypes[2]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *AnalysisError) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*AnalysisError) ProtoMessage() {}
+
+func (x *AnalysisError) ProtoReflect() protoreflect.Message {
+	mi := &file_ide_query_proto_msgTypes[2]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use AnalysisError.ProtoReflect.Descriptor instead.
+func (*AnalysisError) Descriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{2}
+}
+
+func (x *AnalysisError) GetErrorMessage() string {
+	if x != nil {
+		return x.ErrorMessage
+	}
+	return ""
+}
+
+type AnalysisResult struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Path to the source file that was queried, relative to repository root.
+	SourceFilePath string `protobuf:"bytes,1,opt,name=source_file_path,json=sourceFilePath,proto3" json:"source_file_path,omitempty"`
+	// Represents status for this result. e.g. not part of the build graph.
+	Status *AnalysisResult_Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"`
+	// ID of buildable unit that contains the source file.
+	// The ide_query script can choose the most relevant unit from multiple
+	// options.
+	UnitId string `protobuf:"bytes,3,opt,name=unit_id,json=unitId,proto3" json:"unit_id,omitempty"`
+	// Invalidation rule to check if the result is still valid.
+	Invalidation *Invalidation `protobuf:"bytes,4,opt,name=invalidation,proto3" json:"invalidation,omitempty"`
+}
+
+func (x *AnalysisResult) Reset() {
+	*x = AnalysisResult{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_ide_query_proto_msgTypes[3]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *AnalysisResult) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*AnalysisResult) ProtoMessage() {}
+
+func (x *AnalysisResult) ProtoReflect() protoreflect.Message {
+	mi := &file_ide_query_proto_msgTypes[3]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use AnalysisResult.ProtoReflect.Descriptor instead.
+func (*AnalysisResult) Descriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{3}
+}
+
+func (x *AnalysisResult) GetSourceFilePath() string {
+	if x != nil {
+		return x.SourceFilePath
+	}
+	return ""
+}
+
+func (x *AnalysisResult) GetStatus() *AnalysisResult_Status {
 	if x != nil {
 		return x.Status
 	}
 	return nil
 }
 
-// Build dependencies of a source file for providing language services.
-type DepsResponse_Deps struct {
+func (x *AnalysisResult) GetUnitId() string {
+	if x != nil {
+		return x.UnitId
+	}
+	return ""
+}
+
+func (x *AnalysisResult) GetInvalidation() *Invalidation {
+	if x != nil {
+		return x.Invalidation
+	}
+	return nil
+}
+
+type BuildableUnit struct {
 	state         protoimpl.MessageState
 	sizeCache     protoimpl.SizeCache
 	unknownFields protoimpl.UnknownFields
 
-	// Relative to repo_dir.
-	SourceFile string `protobuf:"bytes,1,opt,name=source_file,json=sourceFile,proto3" json:"source_file,omitempty"`
-	// Build target to execute for generating dep.
-	BuildTarget []string `protobuf:"bytes,2,rep,name=build_target,json=buildTarget,proto3" json:"build_target,omitempty"`
-	Status      *Status  `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
+	// Unique identifier of the buildable unit.
+	//
+	// Examples:
+	//   - Java: module or target name, e.g. "framework-bluetooth" or
+	//     "//third_party/hamcrest:hamcrest_java"
+	//   - C++: source file, e.g. "path/to/file.cc"
+	Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
+	// Language of the unit.
+	// Required for buildable units directly referenced by the AnalysisResult,
+	// e.g. the unit associated with the compilation stage for the source file.
+	Language Language `protobuf:"varint,2,opt,name=language,proto3,enum=ide_query.Language" json:"language,omitempty"`
+	// Source files that are part of this unit.
+	// Path to the file relative to working_dir.
+	SourceFilePaths []string `protobuf:"bytes,3,rep,name=source_file_paths,json=sourceFilePaths,proto3" json:"source_file_paths,omitempty"`
+	// Compiler arguments to compile the source files.
+	CompilerArguments []string `protobuf:"bytes,4,rep,name=compiler_arguments,json=compilerArguments,proto3" json:"compiler_arguments,omitempty"`
+	// List of generated files produced by this unit.
+	GeneratedFiles []*GeneratedFile `protobuf:"bytes,5,rep,name=generated_files,json=generatedFiles,proto3" json:"generated_files,omitempty"`
+	// List of other BuildableUnits this unit depend on.
+	DependencyIds []string `protobuf:"bytes,6,rep,name=dependency_ids,json=dependencyIds,proto3" json:"dependency_ids,omitempty"`
 }
 
-func (x *DepsResponse_Deps) Reset() {
-	*x = DepsResponse_Deps{}
+func (x *BuildableUnit) Reset() {
+	*x = BuildableUnit{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_ide_query_proto_msgTypes[4]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *BuildableUnit) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*BuildableUnit) ProtoMessage() {}
+
+func (x *BuildableUnit) ProtoReflect() protoreflect.Message {
+	mi := &file_ide_query_proto_msgTypes[4]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use BuildableUnit.ProtoReflect.Descriptor instead.
+func (*BuildableUnit) Descriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{4}
+}
+
+func (x *BuildableUnit) GetId() string {
+	if x != nil {
+		return x.Id
+	}
+	return ""
+}
+
+func (x *BuildableUnit) GetLanguage() Language {
+	if x != nil {
+		return x.Language
+	}
+	return Language_LANGUAGE_UNSPECIFIED
+}
+
+func (x *BuildableUnit) GetSourceFilePaths() []string {
+	if x != nil {
+		return x.SourceFilePaths
+	}
+	return nil
+}
+
+func (x *BuildableUnit) GetCompilerArguments() []string {
+	if x != nil {
+		return x.CompilerArguments
+	}
+	return nil
+}
+
+func (x *BuildableUnit) GetGeneratedFiles() []*GeneratedFile {
+	if x != nil {
+		return x.GeneratedFiles
+	}
+	return nil
+}
+
+func (x *BuildableUnit) GetDependencyIds() []string {
+	if x != nil {
+		return x.DependencyIds
+	}
+	return nil
+}
+
+// Invalidation rule to check if the result is still valid.
+// This should contain files/dirs that are not directly part of the build graph
+// but still affect the result. For example BUILD files, directory to the
+// toolchain or config files etc.
+type Invalidation struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// If any of these files change the result may become invalid.
+	// Path to the file relative to repository root.
+	FilePaths []string `protobuf:"bytes,1,rep,name=file_paths,json=filePaths,proto3" json:"file_paths,omitempty"`
+	// If any of these rules match a changed file the result may become invalid.
+	Wildcards []*Invalidation_Wildcard `protobuf:"bytes,4,rep,name=wildcards,proto3" json:"wildcards,omitempty"`
+}
+
+func (x *Invalidation) Reset() {
+	*x = Invalidation{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_ide_query_proto_msgTypes[5]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Invalidation) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Invalidation) ProtoMessage() {}
+
+func (x *Invalidation) ProtoReflect() protoreflect.Message {
+	mi := &file_ide_query_proto_msgTypes[5]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Invalidation.ProtoReflect.Descriptor instead.
+func (*Invalidation) Descriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{5}
+}
+
+func (x *Invalidation) GetFilePaths() []string {
+	if x != nil {
+		return x.FilePaths
+	}
+	return nil
+}
+
+func (x *Invalidation) GetWildcards() []*Invalidation_Wildcard {
+	if x != nil {
+		return x.Wildcards
+	}
+	return nil
+}
+
+// Indicates the success/failure for the query.
+type AnalysisResult_Status struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Code AnalysisResult_Status_Code `protobuf:"varint,1,opt,name=code,proto3,enum=ide_query.AnalysisResult_Status_Code" json:"code,omitempty"`
+	// Details about the status, might be displayed to user.
+	StatusMessage *string `protobuf:"bytes,2,opt,name=status_message,json=statusMessage,proto3,oneof" json:"status_message,omitempty"`
+}
+
+func (x *AnalysisResult_Status) Reset() {
+	*x = AnalysisResult_Status{}
 	if protoimpl.UnsafeEnabled {
 		mi := &file_ide_query_proto_msgTypes[6]
 		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
@@ -509,13 +588,13 @@
 	}
 }
 
-func (x *DepsResponse_Deps) String() string {
+func (x *AnalysisResult_Status) String() string {
 	return protoimpl.X.MessageStringOf(x)
 }
 
-func (*DepsResponse_Deps) ProtoMessage() {}
+func (*AnalysisResult_Status) ProtoMessage() {}
 
-func (x *DepsResponse_Deps) ProtoReflect() protoreflect.Message {
+func (x *AnalysisResult_Status) ProtoReflect() protoreflect.Message {
 	mi := &file_ide_query_proto_msgTypes[6]
 	if protoimpl.UnsafeEnabled && x != nil {
 		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
@@ -527,104 +606,190 @@
 	return mi.MessageOf(x)
 }
 
-// Deprecated: Use DepsResponse_Deps.ProtoReflect.Descriptor instead.
-func (*DepsResponse_Deps) Descriptor() ([]byte, []int) {
-	return file_ide_query_proto_rawDescGZIP(), []int{2, 0}
+// Deprecated: Use AnalysisResult_Status.ProtoReflect.Descriptor instead.
+func (*AnalysisResult_Status) Descriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{3, 0}
 }
 
-func (x *DepsResponse_Deps) GetSourceFile() string {
+func (x *AnalysisResult_Status) GetCode() AnalysisResult_Status_Code {
 	if x != nil {
-		return x.SourceFile
+		return x.Code
+	}
+	return AnalysisResult_Status_CODE_UNSPECIFIED
+}
+
+func (x *AnalysisResult_Status) GetStatusMessage() string {
+	if x != nil && x.StatusMessage != nil {
+		return *x.StatusMessage
 	}
 	return ""
 }
 
-func (x *DepsResponse_Deps) GetBuildTarget() []string {
-	if x != nil {
-		return x.BuildTarget
-	}
-	return nil
+type Invalidation_Wildcard struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Prefix of the file path (e.g. "path/to/")
+	Prefix *string `protobuf:"bytes,1,opt,name=prefix,proto3,oneof" json:"prefix,omitempty"`
+	// Suffix of the file path (e.g. "Android.bp")
+	Suffix *string `protobuf:"bytes,2,opt,name=suffix,proto3,oneof" json:"suffix,omitempty"`
+	// If false, the part of the path between the given `prefix` and `suffix`
+	// should not contain directory separators ('/').
+	CanCrossFolder *bool `protobuf:"varint,3,opt,name=can_cross_folder,json=canCrossFolder,proto3,oneof" json:"can_cross_folder,omitempty"`
 }
 
-func (x *DepsResponse_Deps) GetStatus() *Status {
-	if x != nil {
-		return x.Status
+func (x *Invalidation_Wildcard) Reset() {
+	*x = Invalidation_Wildcard{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_ide_query_proto_msgTypes[7]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
 	}
-	return nil
+}
+
+func (x *Invalidation_Wildcard) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Invalidation_Wildcard) ProtoMessage() {}
+
+func (x *Invalidation_Wildcard) ProtoReflect() protoreflect.Message {
+	mi := &file_ide_query_proto_msgTypes[7]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Invalidation_Wildcard.ProtoReflect.Descriptor instead.
+func (*Invalidation_Wildcard) Descriptor() ([]byte, []int) {
+	return file_ide_query_proto_rawDescGZIP(), []int{5, 0}
+}
+
+func (x *Invalidation_Wildcard) GetPrefix() string {
+	if x != nil && x.Prefix != nil {
+		return *x.Prefix
+	}
+	return ""
+}
+
+func (x *Invalidation_Wildcard) GetSuffix() string {
+	if x != nil && x.Suffix != nil {
+		return *x.Suffix
+	}
+	return ""
+}
+
+func (x *Invalidation_Wildcard) GetCanCrossFolder() bool {
+	if x != nil && x.CanCrossFolder != nil {
+		return *x.CanCrossFolder
+	}
+	return false
 }
 
 var File_ide_query_proto protoreflect.FileDescriptor
 
 var file_ide_query_proto_rawDesc = []byte{
 	0x0a, 0x0f, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74,
-	0x6f, 0x12, 0x09, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x22, 0x7c, 0x0a, 0x06,
-	0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2a, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01,
-	0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79,
-	0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f,
-	0x64, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20,
-	0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01,
-	0x01, 0x22, 0x1b, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x06, 0x0a, 0x02, 0x4f, 0x4b, 0x10,
-	0x00, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x41, 0x49, 0x4c, 0x55, 0x52, 0x45, 0x10, 0x01, 0x42, 0x0a,
-	0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x8b, 0x01, 0x0a, 0x09, 0x52,
-	0x65, 0x70, 0x6f, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x72, 0x65, 0x70, 0x6f,
-	0x5f, 0x64, 0x69, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x70, 0x6f,
-	0x44, 0x69, 0x72, 0x12, 0x28, 0x0a, 0x10, 0x61, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x66, 0x69,
-	0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x61,
-	0x63, 0x74, 0x69, 0x76, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x17, 0x0a,
-	0x07, 0x6f, 0x75, 0x74, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06,
-	0x6f, 0x75, 0x74, 0x44, 0x69, 0x72, 0x12, 0x20, 0x0a, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x5f, 0x64,
-	0x62, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f,
-	0x6d, 0x70, 0x44, 0x62, 0x50, 0x61, 0x74, 0x68, 0x22, 0x83, 0x02, 0x0a, 0x0c, 0x44, 0x65, 0x70,
-	0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x04, 0x64, 0x65, 0x70,
-	0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75,
-	0x65, 0x72, 0x79, 0x2e, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
-	0x2e, 0x44, 0x65, 0x70, 0x73, 0x52, 0x04, 0x64, 0x65, 0x70, 0x73, 0x12, 0x2e, 0x0a, 0x06, 0x73,
-	0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x69, 0x64,
-	0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00,
-	0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01, 0x1a, 0x85, 0x01, 0x0a, 0x04,
-	0x44, 0x65, 0x70, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x66,
-	0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63,
-	0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x74,
-	0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x62, 0x75, 0x69,
-	0x6c, 0x64, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x12, 0x2e, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74,
-	0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71,
-	0x75, 0x65, 0x72, 0x79, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73,
-	0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61,
-	0x74, 0x75, 0x73, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x51,
-	0x0a, 0x0d, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12,
-	0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70,
-	0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18,
-	0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74,
-	0x73, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74,
-	0x73, 0x22, 0xf7, 0x01, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65,
-	0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
-	0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x5f,
-	0x64, 0x69, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x69,
-	0x6e, 0x67, 0x44, 0x69, 0x72, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65,
-	0x72, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28,
-	0x09, 0x52, 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x41, 0x72, 0x67, 0x75, 0x6d,
-	0x65, 0x6e, 0x74, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65,
-	0x64, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75,
-	0x65, 0x72, 0x79, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c,
-	0x65, 0x52, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x12, 0x12, 0x0a, 0x04,
-	0x64, 0x65, 0x70, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x64, 0x65, 0x70, 0x73,
-	0x12, 0x2e, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b,
-	0x32, 0x11, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x53, 0x74, 0x61,
-	0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01,
-	0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0xa9, 0x01, 0x0a, 0x0b,
-	0x49, 0x64, 0x65, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x62,
-	0x75, 0x69, 0x6c, 0x64, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x72, 0x6f,
-	0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x41,
-	0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x2f, 0x0a, 0x07, 0x73,
-	0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x69,
-	0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46,
-	0x69, 0x6c, 0x65, 0x52, 0x07, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x2e, 0x0a, 0x06,
-	0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x69,
-	0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x48,
-	0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07,
-	0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x42, 0x1b, 0x5a, 0x19, 0x69, 0x64, 0x65, 0x5f, 0x71,
-	0x75, 0x65, 0x72, 0x79, 0x2f, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x70,
-	0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+	0x6f, 0x12, 0x09, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x22, 0x51, 0x0a, 0x0d,
+	0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a,
+	0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74,
+	0x68, 0x12, 0x1f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20,
+	0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x88,
+	0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x22,
+	0x82, 0x02, 0x0a, 0x0b, 0x49, 0x64, 0x65, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x12,
+	0x22, 0x0a, 0x0d, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6f, 0x75, 0x74, 0x5f, 0x64, 0x69, 0x72,
+	0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x4f, 0x75, 0x74,
+	0x44, 0x69, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x64,
+	0x69, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e,
+	0x67, 0x44, 0x69, 0x72, 0x12, 0x33, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x05, 0x20,
+	0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e,
+	0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52,
+	0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73,
+	0x75, 0x6c, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x64, 0x65,
+	0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52,
+	0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x2e,
+	0x0a, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e,
+	0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x61,
+	0x62, 0x6c, 0x65, 0x55, 0x6e, 0x69, 0x74, 0x52, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x42, 0x08,
+	0x0a, 0x06, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x4a, 0x04,
+	0x08, 0x03, 0x10, 0x04, 0x22, 0x34, 0x0a, 0x0d, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73,
+	0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d,
+	0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72,
+	0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0xa5, 0x03, 0x0a, 0x0e, 0x41,
+	0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x28, 0x0a,
+	0x10, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74,
+	0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46,
+	0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x38, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75,
+	0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75,
+	0x65, 0x72, 0x79, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x73, 0x75,
+	0x6c, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75,
+	0x73, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x6e, 0x69, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01,
+	0x28, 0x09, 0x52, 0x06, 0x75, 0x6e, 0x69, 0x74, 0x49, 0x64, 0x12, 0x3b, 0x0a, 0x0c, 0x69, 0x6e,
+	0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b,
+	0x32, 0x17, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x49, 0x6e, 0x76,
+	0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x69, 0x6e, 0x76, 0x61, 0x6c,
+	0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xd8, 0x01, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74,
+	0x75, 0x73, 0x12, 0x39, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e,
+	0x32, 0x25, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x41, 0x6e, 0x61,
+	0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74,
+	0x75, 0x73, 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x2a, 0x0a,
+	0x0e, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18,
+	0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x4d,
+	0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x22, 0x54, 0x0a, 0x04, 0x43, 0x6f, 0x64,
+	0x65, 0x12, 0x14, 0x0a, 0x10, 0x43, 0x4f, 0x44, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43,
+	0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x4f, 0x44, 0x45, 0x5f,
+	0x4f, 0x4b, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x43, 0x4f, 0x44, 0x45, 0x5f, 0x4e, 0x4f, 0x54,
+	0x5f, 0x46, 0x4f, 0x55, 0x4e, 0x44, 0x10, 0x02, 0x12, 0x15, 0x0a, 0x11, 0x43, 0x4f, 0x44, 0x45,
+	0x5f, 0x42, 0x55, 0x49, 0x4c, 0x44, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x42,
+	0x11, 0x0a, 0x0f, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61,
+	0x67, 0x65, 0x22, 0x95, 0x02, 0x0a, 0x0d, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x61, 0x62, 0x6c, 0x65,
+	0x55, 0x6e, 0x69, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+	0x52, 0x02, 0x69, 0x64, 0x12, 0x2f, 0x0a, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65,
+	0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65,
+	0x72, 0x79, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x52, 0x08, 0x6c, 0x61, 0x6e,
+	0x67, 0x75, 0x61, 0x67, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
+	0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09,
+	0x52, 0x0f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68,
+	0x73, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x61, 0x72,
+	0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x63,
+	0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73,
+	0x12, 0x41, 0x0a, 0x0f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x66, 0x69,
+	0x6c, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x69, 0x64, 0x65, 0x5f,
+	0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46,
+	0x69, 0x6c, 0x65, 0x52, 0x0e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69,
+	0x6c, 0x65, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63,
+	0x79, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x64, 0x65, 0x70,
+	0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x49, 0x64, 0x73, 0x22, 0x8e, 0x02, 0x0a, 0x0c, 0x49,
+	0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x66,
+	0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52,
+	0x09, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x12, 0x3e, 0x0a, 0x09, 0x77, 0x69,
+	0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e,
+	0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x49, 0x6e, 0x76, 0x61, 0x6c, 0x69,
+	0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x57, 0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x52,
+	0x09, 0x77, 0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x73, 0x1a, 0x9e, 0x01, 0x0a, 0x08, 0x57,
+	0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x12, 0x1b, 0x0a, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69,
+	0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69,
+	0x78, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x73, 0x75, 0x66, 0x66, 0x69, 0x78, 0x18, 0x02,
+	0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x73, 0x75, 0x66, 0x66, 0x69, 0x78, 0x88, 0x01,
+	0x01, 0x12, 0x2d, 0x0a, 0x10, 0x63, 0x61, 0x6e, 0x5f, 0x63, 0x72, 0x6f, 0x73, 0x73, 0x5f, 0x66,
+	0x6f, 0x6c, 0x64, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x0e, 0x63,
+	0x61, 0x6e, 0x43, 0x72, 0x6f, 0x73, 0x73, 0x46, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x88, 0x01, 0x01,
+	0x42, 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x42, 0x09, 0x0a, 0x07, 0x5f,
+	0x73, 0x75, 0x66, 0x66, 0x69, 0x78, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x63, 0x61, 0x6e, 0x5f, 0x63,
+	0x72, 0x6f, 0x73, 0x73, 0x5f, 0x66, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x2a, 0x49, 0x0a, 0x08, 0x4c,
+	0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x4c, 0x41, 0x4e, 0x47, 0x55,
+	0x41, 0x47, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10,
+	0x00, 0x12, 0x11, 0x0a, 0x0d, 0x4c, 0x41, 0x4e, 0x47, 0x55, 0x41, 0x47, 0x45, 0x5f, 0x4a, 0x41,
+	0x56, 0x41, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x4c, 0x41, 0x4e, 0x47, 0x55, 0x41, 0x47, 0x45,
+	0x5f, 0x43, 0x50, 0x50, 0x10, 0x02, 0x42, 0x1b, 0x5a, 0x19, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75,
+	0x65, 0x72, 0x79, 0x2f, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x70, 0x72,
+	0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
 }
 
 var (
@@ -639,32 +804,35 @@
 	return file_ide_query_proto_rawDescData
 }
 
-var file_ide_query_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
-var file_ide_query_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
+var file_ide_query_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
+var file_ide_query_proto_msgTypes = make([]protoimpl.MessageInfo, 8)
 var file_ide_query_proto_goTypes = []interface{}{
-	(Status_Code)(0),          // 0: ide_query.Status.Code
-	(*Status)(nil),            // 1: ide_query.Status
-	(*RepoState)(nil),         // 2: ide_query.RepoState
-	(*DepsResponse)(nil),      // 3: ide_query.DepsResponse
-	(*GeneratedFile)(nil),     // 4: ide_query.GeneratedFile
-	(*SourceFile)(nil),        // 5: ide_query.SourceFile
-	(*IdeAnalysis)(nil),       // 6: ide_query.IdeAnalysis
-	(*DepsResponse_Deps)(nil), // 7: ide_query.DepsResponse.Deps
+	(Language)(0),                   // 0: ide_query.Language
+	(AnalysisResult_Status_Code)(0), // 1: ide_query.AnalysisResult.Status.Code
+	(*GeneratedFile)(nil),           // 2: ide_query.GeneratedFile
+	(*IdeAnalysis)(nil),             // 3: ide_query.IdeAnalysis
+	(*AnalysisError)(nil),           // 4: ide_query.AnalysisError
+	(*AnalysisResult)(nil),          // 5: ide_query.AnalysisResult
+	(*BuildableUnit)(nil),           // 6: ide_query.BuildableUnit
+	(*Invalidation)(nil),            // 7: ide_query.Invalidation
+	(*AnalysisResult_Status)(nil),   // 8: ide_query.AnalysisResult.Status
+	(*Invalidation_Wildcard)(nil),   // 9: ide_query.Invalidation.Wildcard
 }
 var file_ide_query_proto_depIdxs = []int32{
-	0, // 0: ide_query.Status.code:type_name -> ide_query.Status.Code
-	7, // 1: ide_query.DepsResponse.deps:type_name -> ide_query.DepsResponse.Deps
-	1, // 2: ide_query.DepsResponse.status:type_name -> ide_query.Status
-	4, // 3: ide_query.SourceFile.generated:type_name -> ide_query.GeneratedFile
-	1, // 4: ide_query.SourceFile.status:type_name -> ide_query.Status
-	5, // 5: ide_query.IdeAnalysis.sources:type_name -> ide_query.SourceFile
-	1, // 6: ide_query.IdeAnalysis.status:type_name -> ide_query.Status
-	1, // 7: ide_query.DepsResponse.Deps.status:type_name -> ide_query.Status
-	8, // [8:8] is the sub-list for method output_type
-	8, // [8:8] is the sub-list for method input_type
-	8, // [8:8] is the sub-list for extension type_name
-	8, // [8:8] is the sub-list for extension extendee
-	0, // [0:8] is the sub-list for field type_name
+	4, // 0: ide_query.IdeAnalysis.error:type_name -> ide_query.AnalysisError
+	5, // 1: ide_query.IdeAnalysis.results:type_name -> ide_query.AnalysisResult
+	6, // 2: ide_query.IdeAnalysis.units:type_name -> ide_query.BuildableUnit
+	8, // 3: ide_query.AnalysisResult.status:type_name -> ide_query.AnalysisResult.Status
+	7, // 4: ide_query.AnalysisResult.invalidation:type_name -> ide_query.Invalidation
+	0, // 5: ide_query.BuildableUnit.language:type_name -> ide_query.Language
+	2, // 6: ide_query.BuildableUnit.generated_files:type_name -> ide_query.GeneratedFile
+	9, // 7: ide_query.Invalidation.wildcards:type_name -> ide_query.Invalidation.Wildcard
+	1, // 8: ide_query.AnalysisResult.Status.code:type_name -> ide_query.AnalysisResult.Status.Code
+	9, // [9:9] is the sub-list for method output_type
+	9, // [9:9] is the sub-list for method input_type
+	9, // [9:9] is the sub-list for extension type_name
+	9, // [9:9] is the sub-list for extension extendee
+	0, // [0:9] is the sub-list for field type_name
 }
 
 func init() { file_ide_query_proto_init() }
@@ -674,42 +842,6 @@
 	}
 	if !protoimpl.UnsafeEnabled {
 		file_ide_query_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
-			switch v := v.(*Status); i {
-			case 0:
-				return &v.state
-			case 1:
-				return &v.sizeCache
-			case 2:
-				return &v.unknownFields
-			default:
-				return nil
-			}
-		}
-		file_ide_query_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
-			switch v := v.(*RepoState); i {
-			case 0:
-				return &v.state
-			case 1:
-				return &v.sizeCache
-			case 2:
-				return &v.unknownFields
-			default:
-				return nil
-			}
-		}
-		file_ide_query_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
-			switch v := v.(*DepsResponse); i {
-			case 0:
-				return &v.state
-			case 1:
-				return &v.sizeCache
-			case 2:
-				return &v.unknownFields
-			default:
-				return nil
-			}
-		}
-		file_ide_query_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
 			switch v := v.(*GeneratedFile); i {
 			case 0:
 				return &v.state
@@ -721,19 +853,7 @@
 				return nil
 			}
 		}
-		file_ide_query_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
-			switch v := v.(*SourceFile); i {
-			case 0:
-				return &v.state
-			case 1:
-				return &v.sizeCache
-			case 2:
-				return &v.unknownFields
-			default:
-				return nil
-			}
-		}
-		file_ide_query_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
+		file_ide_query_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
 			switch v := v.(*IdeAnalysis); i {
 			case 0:
 				return &v.state
@@ -745,8 +865,68 @@
 				return nil
 			}
 		}
+		file_ide_query_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*AnalysisError); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_ide_query_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*AnalysisResult); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_ide_query_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*BuildableUnit); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_ide_query_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Invalidation); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
 		file_ide_query_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
-			switch v := v.(*DepsResponse_Deps); i {
+			switch v := v.(*AnalysisResult_Status); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_ide_query_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Invalidation_Wildcard); i {
 			case 0:
 				return &v.state
 			case 1:
@@ -759,18 +939,16 @@
 		}
 	}
 	file_ide_query_proto_msgTypes[0].OneofWrappers = []interface{}{}
-	file_ide_query_proto_msgTypes[2].OneofWrappers = []interface{}{}
-	file_ide_query_proto_msgTypes[3].OneofWrappers = []interface{}{}
-	file_ide_query_proto_msgTypes[4].OneofWrappers = []interface{}{}
-	file_ide_query_proto_msgTypes[5].OneofWrappers = []interface{}{}
+	file_ide_query_proto_msgTypes[1].OneofWrappers = []interface{}{}
 	file_ide_query_proto_msgTypes[6].OneofWrappers = []interface{}{}
+	file_ide_query_proto_msgTypes[7].OneofWrappers = []interface{}{}
 	type x struct{}
 	out := protoimpl.TypeBuilder{
 		File: protoimpl.DescBuilder{
 			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
 			RawDescriptor: file_ide_query_proto_rawDesc,
-			NumEnums:      1,
-			NumMessages:   7,
+			NumEnums:      2,
+			NumMessages:   8,
 			NumExtensions: 0,
 			NumServices:   0,
 		},
diff --git a/tools/ide_query/ide_query_proto/ide_query.proto b/tools/ide_query/ide_query_proto/ide_query.proto
index 3d7a8e7..13f349c 100644
--- a/tools/ide_query/ide_query_proto/ide_query.proto
+++ b/tools/ide_query/ide_query_proto/ide_query.proto
@@ -16,51 +16,11 @@
 syntax = "proto3";
 
 package ide_query;
+
 option go_package = "ide_query/ide_query_proto";
 
-// Indicates the success/failure for analysis.
-message Status {
-  enum Code {
-    OK = 0;
-    FAILURE = 1;
-  }
-  Code code = 1;
-  // Details about the status, might be displayed to user.
-  optional string message = 2;
-}
-
-// Represents an Android checkout on user's workstation.
-message RepoState {
-  // Absolute path for the checkout in the workstation.
-  // e.g. /home/user/work/android/
-  string repo_dir = 1;
-  // Relative to repo_dir.
-  repeated string active_file_path = 2;
-  // Repository relative path to output directory in workstation.
-  string out_dir = 3;
-  // Repository relative path to compile_commands.json in workstation.
-  string comp_db_path = 4;
-}
-
-// Provides all the targets that are pre-requisities for running language
-// services on active_file_paths.
-message DepsResponse {
-  // Build dependencies of a source file for providing language services.
-  message Deps {
-    // Relative to repo_dir.
-    string source_file = 1;
-    // Build target to execute for generating dep.
-    repeated string build_target = 2;
-    optional Status status = 3;
-  }
-  repeated Deps deps = 1;
-  optional Status status = 2;
-}
-
-// Returns all the information necessary for providing language services for the
-// active files.
 message GeneratedFile {
-  // Path to the file relative to IdeAnalysis.build_artifact_root.
+  // Path to the file relative to build_out_dir.
   string path = 1;
 
   // The text of the generated file, if not provided contents will be read
@@ -68,44 +28,100 @@
   optional bytes contents = 2;
 }
 
-message SourceFile {
-  // Path to the source file relative to repository root.
-  string path = 1;
-
-  // Working directory used by the build system. All the relative
-  // paths in compiler_arguments should be relative to this path.
-  // Relative to repository root.
-  string working_dir = 2;
-
-  // Compiler arguments to compile the source file. If multiple variants
-  // of the module being compiled are possible, the query script will choose
-  // one.
-  repeated string compiler_arguments = 3;
-
-  // Any generated files that are used in compiling the file.
-  repeated GeneratedFile generated = 4;
-
-  // Paths to all of the sources, like build files, code generators,
-  // proto files etc. that were used during analysis. Used to figure
-  // out when a set of build artifacts are stale and the query tool
-  // must be re-run.
-  // Relative to repository root.
-  repeated string deps = 5;
-
-  // Represents analysis status for this particular file. e.g. not part
-  // of the build graph.
-  optional Status status = 6;
-}
-
 message IdeAnalysis {
-  // Path relative to repository root, containing all the artifacts
-  // generated by the build system. GeneratedFile.path are always
-  // relative to this directory.
-  string build_artifact_root = 1;
+  // Directory that contains build outputs generated by the build system.
+  // Relative to repository root.
+  string build_out_dir = 1;
+  // Working directory used by the build system.
+  // Relative to repository root.
+  string working_dir = 4;
+  // Only set if the whole query failed.
+  optional AnalysisError error = 5;
+  // List of results, one per queried file.
+  repeated AnalysisResult results = 6;
+  // List of buildable units directly or indirectly references by the results.
+  repeated BuildableUnit units = 7;
 
-  repeated SourceFile sources = 2;
-
-  // Status representing overall analysis.
-  // Should fail only when no analysis can be performed.
-  optional Status status = 3;
+  reserved 2, 3;
 }
+
+message AnalysisError {
+  // Human readable error message.
+  string error_message = 1;
+}
+
+message AnalysisResult {
+  // Path to the source file that was queried, relative to repository root.
+  string source_file_path = 1;
+  // Indicates the success/failure for the query.
+  message Status {
+    enum Code {
+      CODE_UNSPECIFIED = 0;
+      CODE_OK = 1;
+      CODE_NOT_FOUND = 2;  // no target or module found for the source file.
+      CODE_BUILD_FAILED = 3;
+    }
+    Code code = 1;
+    // Details about the status, might be displayed to user.
+    optional string status_message = 2;
+  }
+  // Represents status for this result. e.g. not part of the build graph.
+  Status status = 2;
+  // ID of buildable unit that contains the source file.
+  // The ide_query script can choose the most relevant unit from multiple
+  // options.
+  string unit_id = 3;
+  // Invalidation rule to check if the result is still valid.
+  Invalidation invalidation = 4;
+}
+
+enum Language {
+  LANGUAGE_UNSPECIFIED = 0;
+  LANGUAGE_JAVA = 1;  // also includes Kotlin
+  LANGUAGE_CPP = 2;
+}
+
+message BuildableUnit {
+  // Unique identifier of the buildable unit.
+  //
+  // Examples:
+  //   - Java: module or target name, e.g. "framework-bluetooth" or
+  //   "//third_party/hamcrest:hamcrest_java"
+  //   - C++: source file, e.g. "path/to/file.cc"
+  string id = 1;
+  // Language of the unit.
+  // Required for buildable units directly referenced by the AnalysisResult,
+  // e.g. the unit associated with the compilation stage for the source file.
+  Language language = 2;
+  // Source files that are part of this unit.
+  // Path to the file relative to working_dir.
+  repeated string source_file_paths = 3;
+  // Compiler arguments to compile the source files.
+  repeated string compiler_arguments = 4;
+  // List of generated files produced by this unit.
+  repeated GeneratedFile generated_files = 5;
+  // List of other BuildableUnits this unit depend on.
+  repeated string dependency_ids = 6;
+}
+
+// Invalidation rule to check if the result is still valid.
+// This should contain files/dirs that are not directly part of the build graph
+// but still affect the result. For example BUILD files, directory to the
+// toolchain or config files etc.
+message Invalidation {
+  // If any of these files change the result may become invalid.
+  // Path to the file relative to repository root.
+  repeated string file_paths = 1;
+
+  message Wildcard {
+    // Prefix of the file path (e.g. "path/to/")
+    optional string prefix = 1;
+    // Suffix of the file path (e.g. "Android.bp")
+    optional string suffix = 2;
+    // If false, the part of the path between the given `prefix` and `suffix`
+    // should not contain directory separators ('/').
+    optional bool can_cross_folder = 3;
+  }
+  // If any of these rules match a changed file the result may become invalid.
+  repeated Wildcard wildcards = 4;
+}
\ No newline at end of file
diff --git a/tools/ide_query/ide_query_proto/Android.bp b/tools/ide_query/prober_scripts/cpp/Android.bp
similarity index 74%
copy from tools/ide_query/ide_query_proto/Android.bp
copy to tools/ide_query/prober_scripts/cpp/Android.bp
index 70f15cd..5190210 100644
--- a/tools/ide_query/ide_query_proto/Android.bp
+++ b/tools/ide_query/prober_scripts/cpp/Android.bp
@@ -18,16 +18,14 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-cc_library_host_static {
-    name: "ide_query_proto",
+cc_binary {
+    name: "ide_query_proberscript_cc",
     srcs: [
-        "ide_query.proto",
+        "general.cc",
+        "foo.proto",
     ],
+    cflags: ["-Wno-unused-parameter"],
     proto: {
-        export_proto_headers: true,
-        type: "full",
-        canonical_path_from_root: false,
+        type: "lite",
     },
-    compile_multilib: "64",
-    shared_libs: ["libprotobuf-cpp-full"],
 }
diff --git a/tools/ide_query/ide_query_proto/Android.bp b/tools/ide_query/prober_scripts/cpp/foo.proto
similarity index 62%
copy from tools/ide_query/ide_query_proto/Android.bp
copy to tools/ide_query/prober_scripts/cpp/foo.proto
index 70f15cd..5a85147 100644
--- a/tools/ide_query/ide_query_proto/Android.bp
+++ b/tools/ide_query/prober_scripts/cpp/foo.proto
@@ -14,20 +14,12 @@
  * limitations under the License.
  */
 
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
+syntax = "proto3";
 
-cc_library_host_static {
-    name: "ide_query_proto",
-    srcs: [
-        "ide_query.proto",
-    ],
-    proto: {
-        export_proto_headers: true,
-        type: "full",
-        canonical_path_from_root: false,
-    },
-    compile_multilib: "64",
-    shared_libs: ["libprotobuf-cpp-full"],
+package ide_query.prober_scripts;
+
+message ProtoMsg {
+  // Test proto field.
+  int64 some_field = 1;
+  //                   ^ some_field
 }
diff --git a/tools/ide_query/prober_scripts/cpp/general.cc b/tools/ide_query/prober_scripts/cpp/general.cc
new file mode 100644
index 0000000..0f0639b
--- /dev/null
+++ b/tools/ide_query/prober_scripts/cpp/general.cc
@@ -0,0 +1,119 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <vector>
+
+#include "foo.pb.h"
+
+using ide_query::prober_scripts::ProtoMsg;
+
+void Foo(int x, double y) {}
+float Foo(float x, float y) { return 0.0f; }
+
+void TestCompletion() {
+  // Test completion on protos and fuzzy matching of completion suggestions.
+
+  ProtoMsg foo;
+
+  // ^
+
+  // step
+  // workspace.waitForReady()
+  // type("f")
+  // completion.trigger()
+  // assert completion.items.filter(label="foo")
+  // delline()
+  // type("foo.sf")
+  // completion.trigger()
+  // assert completion.items.filter(
+  //  label="some_field.*",
+  //  insertText="some_field.*",
+  // )
+  // delline()
+
+  std::vector<int> v;
+
+  // ^
+
+  // step
+  // workspace.waitForReady()
+  // type("v.push")
+  // completion.trigger()
+  // assert completion.items.filter(label="push_back.*")
+  // delline()
+}
+
+void TestNavigation() {
+  std::vector<int> ints;
+  //               |   | ints
+  //      ^
+
+  // step
+  // ; Test navigation to definition on STL types.
+  // workspace.waitForReady()
+  // navigation.trigger()
+  // assert navigation.items.filter(path=".*/vector")
+
+  ints.push_back(0);
+  // ^
+
+  // step
+  // ; Test navigation to definition on local symbols.
+  // workspace.waitForReady()
+  // navigation.trigger()
+  // assert navigation.items.filter(path=".*/general.cc", range=ints)
+
+  ProtoMsg msg;
+  msg.set_some_field(0);
+  //          ^
+
+  // step
+  // ; Test navigation to definition on proto fields. We do not check for a
+  // ; specific target as it can be in generated code.
+  // workspace.waitForReady()
+  // navigation.trigger()
+  // assert navigation.items
+}
+
+void TestParameterInfo() {
+  std::vector<int> v;
+  v.push_back(0);
+  //          ^
+
+  // step
+  // ; Test the signature help for STL functions. We do not check for a specific
+  // ; text as it can be implementation-dependent.
+  // workspace.waitForReady()
+  // paraminfo.trigger()
+  // assert paraminfo.items
+
+  Foo(0, 0.0);
+  //      ^
+
+  // step
+  // ; Test the signature help for the function 'Foo' having two overloads.
+  // workspace.waitForReady()
+  // paraminfo.trigger()
+  // assert paraminfo.items.filter(
+  //  active=true,
+  //  label="Foo\\(int x, double y\\) -> void",
+  //  selection="double y",
+  // )
+  // assert paraminfo.items.filter(
+  //  active=false,
+  //  label="Foo\\(float x, float y\\) -> float",
+  // )
+}
+
+int main() { return 0; }
diff --git a/tools/ide_query/prober_scripts/cpp_suite.textpb b/tools/ide_query/prober_scripts/cpp_suite.textpb
new file mode 100644
index 0000000..8377269
--- /dev/null
+++ b/tools/ide_query/prober_scripts/cpp_suite.textpb
@@ -0,0 +1,5 @@
+tests: {
+  name: "general"
+  scripts: "build/make/tools/ide_query/prober_scripts/cpp/general.cc"
+  scripts: "build/make/tools/ide_query/prober_scripts/cpp/foo.proto"
+}
diff --git a/tools/ide_query/prober_scripts/ide_query.out b/tools/ide_query/prober_scripts/ide_query.out
new file mode 100644
index 0000000..cd7ce6d
--- /dev/null
+++ b/tools/ide_query/prober_scripts/ide_query.out
@@ -0,0 +1,239 @@
+
+out–a
+8build/make/tools/ide_query/prober_scripts/cpp/general.cc8prebuilts/clang/host/linux-x86/clang-r522817/bin/clang++-mthumb-Os-fomit-frame-pointer-mllvm-enable-shrink-wrap=false-O2-Wall-Wextra-Winit-self-Wpointer-arith-Wunguarded-availability-Werror=date-time-Werror=int-conversion-Werror=pragma-pack&-Werror=pragma-pack-suspicious-include-Werror=sizeof-array-div-Werror=string-plus-int'-Werror=unreachable-code-loop-increment"-Wno-error=deprecated-declarations-Wno-c99-designator-Wno-gnu-folding-constant"-Wno-inconsistent-missing-override-Wno-error=reorder-init-list-Wno-reorder-init-list-Wno-sign-compare-Wno-unused	-DANDROID-DNDEBUG-UDEBUG(-D__compiler_offsetof=__builtin_offsetof*-D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__	-faddrsig-fdebug-default-version=5-fcolor-diagnostics-ffp-contract=off-fno-exceptions-fno-strict-aliasing-fmessage-length=0#-fno-relaxed-template-template-args-gsimple-template-names-gz=zstd-no-canonical-prefixes-Wno-error=format"-fdebug-prefix-map=/proc/self/cwd=-ftrivial-auto-var-init=zero-g-ffunction-sections-fdata-sections-fno-short-enums-funwind-tables-fstack-protector-strong-Wa,--noexecstack-D_FORTIFY_SOURCE=2-Wstrict-aliasing=2-Werror=return-type-Werror=non-virtual-dtor-Werror=address-Werror=sequence-point-Werror=format-security-nostdlibinc-fdebug-info-for-profiling-msoft-float-march=armv7-a-mfloat-abi=softfp
+-mfpu=neon/-Ibuild/make/tools/ide_query/prober_scripts/cpp³-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cppÂ…-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto-D__LIBC_API__=10000-D__LIBM_API__=10000-D__LIBDL_API__=10000-Iexternal/protobuf/srcY-Iprebuilts/clang/host/linux-x86/clang-r522817/android_libc++/platform/arm/include/c++/v1=-Iprebuilts/clang/host/linux-x86/clang-r522817/include/c++/v1 -Ibionic/libc/async_safe/include-Isystem/logging/liblog/include'-Ibionic/libc/system_properties/include<-Isystem/core/property_service/libpropertyinfoparser/include-isystembionic/libc/include-isystembionic/libc/kernel/uapi/asm-arm-isystembionic/libc/kernel/uapi-isystembionic/libc/kernel/android/scsi-isystembionic/libc/kernel/android/uapi-targetarmv7a-linux-androideabi10000-DANDROID_STRICT-fPIE-Werror-Wno-unused-parameter-DGOOGLE_PROTOBUF_NO_RTTI-Wimplicit-fallthrough*-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS-Wno-gnu-include-next-fvisibility-inlines-hidden-mllvm-enable-shrink-wrap=false-std=gnu++20	-fno-rtti-Isystem/core/include-Isystem/logging/liblog/include-Isystem/media/audio/include-Ihardware/libhardware/include%-Ihardware/libhardware_legacy/include-Ihardware/ril/include-Iframeworks/native/include"-Iframeworks/native/opengl/include-Iframeworks/av/include-Werror=bool-operation -Werror=format-insufficient-args%-Werror=implicit-int-float-conversion-Werror=int-in-bool-context-Werror=int-to-pointer-cast-Werror=pointer-to-int-cast-Werror=xor-used-as-pow-Wno-void-pointer-to-enum-cast-Wno-void-pointer-to-int-cast-Wno-pointer-to-int-cast-Werror=fortify-source-Wno-unused-variable-Wno-missing-field-initializers-Wno-packed-non-pod-Werror=address-of-temporary+-Werror=incompatible-function-pointer-types-Werror=null-dereference-Werror=return-type"-Wno-tautological-constant-compare$-Wno-tautological-type-limit-compare"-Wno-implicit-int-float-conversion!-Wno-tautological-overlap-compare-Wno-deprecated-copy-Wno-range-loop-construct"-Wno-zero-as-null-pointer-constant)-Wno-deprecated-anon-enum-enum-conversion$-Wno-deprecated-enum-enum-conversion-Wno-pessimizing-move-Wno-non-c-typedef-for-linkage-Wno-align-mismatch"-Wno-error=unused-but-set-variable#-Wno-error=unused-but-set-parameter-Wno-error=deprecated-builtins-Wno-error=deprecated2-Wno-error=single-bit-bitfield-constant-conversion$-Wno-error=enum-constexpr-conversion-Wno-error=invalid-offsetof&-Wno-deprecated-dynamic-exception-spec8build/make/tools/ide_query/prober_scripts/cpp/general.cc"Õ?
+¶soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cpp/foo.pb.h™>// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: build/make/tools/ide_query/prober_scripts/cpp/foo.proto
+
+#ifndef GOOGLE_PROTOBUF_INCLUDED_build_2fmake_2ftools_2fide_5fquery_2fprober_5fscripts_2fcpp_2ffoo_2eproto
+#define GOOGLE_PROTOBUF_INCLUDED_build_2fmake_2ftools_2fide_5fquery_2fprober_5fscripts_2fcpp_2ffoo_2eproto
+
+#include <cstdint>
+#include <limits>
+#include <string>
+
+#include <google/protobuf/port_def.inc>
+#if PROTOBUF_VERSION < 3021000
+#error This file was generated by a newer version of protoc which is
+#error incompatible with your Protocol Buffer headers. Please update
+#error your headers.
+#endif
+#if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
+#error This file was generated by an older version of protoc which is
+#error incompatible with your Protocol Buffer headers. Please
+#error regenerate this file with a newer version of protoc.
+#endif
+
+#include <google/protobuf/port_undef.inc>
+#include <google/protobuf/io/coded_stream.h>
+#include <google/protobuf/arena.h>
+#include <google/protobuf/arenastring.h>
+#include <google/protobuf/generated_message_util.h>
+#include <google/protobuf/metadata_lite.h>
+#include <google/protobuf/message_lite.h>
+#include <google/protobuf/repeated_field.h>  // IWYU pragma: export
+#include <google/protobuf/extension_set.h>  // IWYU pragma: export
+// @@protoc_insertion_point(includes)
+#include <google/protobuf/port_def.inc>
+#define PROTOBUF_INTERNAL_EXPORT_build_2fmake_2ftools_2fide_5fquery_2fprober_5fscripts_2fcpp_2ffoo_2eproto
+PROTOBUF_NAMESPACE_OPEN
+namespace internal {
+class AnyMetadata;
+}  // namespace internal
+PROTOBUF_NAMESPACE_CLOSE
+
+// Internal implementation detail -- do not use these members.
+struct TableStruct_build_2fmake_2ftools_2fide_5fquery_2fprober_5fscripts_2fcpp_2ffoo_2eproto {
+  static const ::uint32_t offsets[];
+};
+namespace ide_query {
+namespace prober_scripts {
+class ProtoMsg;
+struct ProtoMsgDefaultTypeInternal;
+extern ProtoMsgDefaultTypeInternal _ProtoMsg_default_instance_;
+}  // namespace prober_scripts
+}  // namespace ide_query
+PROTOBUF_NAMESPACE_OPEN
+template<> ::ide_query::prober_scripts::ProtoMsg* Arena::CreateMaybeMessage<::ide_query::prober_scripts::ProtoMsg>(Arena*);
+PROTOBUF_NAMESPACE_CLOSE
+namespace ide_query {
+namespace prober_scripts {
+
+// ===================================================================
+
+class ProtoMsg final :
+    public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:ide_query.prober_scripts.ProtoMsg) */ {
+ public:
+  inline ProtoMsg() : ProtoMsg(nullptr) {}
+  ~ProtoMsg() override;
+  explicit PROTOBUF_CONSTEXPR ProtoMsg(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
+
+  ProtoMsg(const ProtoMsg& from);
+  ProtoMsg(ProtoMsg&& from) noexcept
+    : ProtoMsg() {
+    *this = ::std::move(from);
+  }
+
+  inline ProtoMsg& operator=(const ProtoMsg& from) {
+    if (this == &from) return *this;
+    CopyFrom(from);
+    return *this;
+  }
+  inline ProtoMsg& operator=(ProtoMsg&& from) noexcept {
+    if (this == &from) return *this;
+    if (GetOwningArena() == from.GetOwningArena()
+  #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
+        && GetOwningArena() != nullptr
+  #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
+    ) {
+      InternalSwap(&from);
+    } else {
+      CopyFrom(from);
+    }
+    return *this;
+  }
+
+  static const ProtoMsg& default_instance() {
+    return *internal_default_instance();
+  }
+  static inline const ProtoMsg* internal_default_instance() {
+    return reinterpret_cast<const ProtoMsg*>(
+               &_ProtoMsg_default_instance_);
+  }
+  static constexpr int kIndexInFileMessages =
+    0;
+
+  friend void swap(ProtoMsg& a, ProtoMsg& b) {
+    a.Swap(&b);
+  }
+  inline void Swap(ProtoMsg* other) {
+    if (other == this) return;
+  #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
+    if (GetOwningArena() != nullptr &&
+        GetOwningArena() == other->GetOwningArena()) {
+   #else  // PROTOBUF_FORCE_COPY_IN_SWAP
+    if (GetOwningArena() == other->GetOwningArena()) {
+  #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
+      InternalSwap(other);
+    } else {
+      ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
+    }
+  }
+  void UnsafeArenaSwap(ProtoMsg* other) {
+    if (other == this) return;
+    GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
+    InternalSwap(other);
+  }
+
+  // implements Message ----------------------------------------------
+
+  ProtoMsg* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
+    return CreateMaybeMessage<ProtoMsg>(arena);
+  }
+  ProtoMsg* New() const {
+    return New(nullptr);
+  }
+  void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
+  void CopyFrom(const ProtoMsg& from);
+  void MergeFrom(const ProtoMsg& from);
+  PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
+  bool IsInitialized() const final;
+
+  size_t ByteSizeLong() const final;
+  const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
+  ::uint8_t* _InternalSerialize(
+      ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
+  int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
+
+  private:
+  void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
+  void SharedDtor();
+  void SetCachedSize(int size) const;
+  void InternalSwap(ProtoMsg* other);
+
+  private:
+  friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
+  static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
+    return "ide_query.prober_scripts.ProtoMsg";
+  }
+  protected:
+  explicit ProtoMsg(::PROTOBUF_NAMESPACE_ID::Arena* arena,
+                       bool is_message_owned = false);
+  public:
+
+  std::string GetTypeName() const final;
+
+  // nested types ----------------------------------------------------
+
+  // accessors -------------------------------------------------------
+
+  enum : int {
+    kSomeFieldFieldNumber = 1,
+  };
+  // int64 some_field = 1;
+  void clear_some_field();
+  ::int64_t some_field() const;
+  void set_some_field(::int64_t value);
+  private:
+  ::int64_t _internal_some_field() const;
+  void _internal_set_some_field(::int64_t value);
+  public:
+
+  // @@protoc_insertion_point(class_scope:ide_query.prober_scripts.ProtoMsg)
+ private:
+  class _Internal;
+
+  template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
+  typedef void InternalArenaConstructable_;
+  typedef void DestructorSkippable_;
+  struct Impl_ {
+    ::int64_t some_field_;
+    mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
+  };
+  union { Impl_ _impl_; };
+  friend struct ::TableStruct_build_2fmake_2ftools_2fide_5fquery_2fprober_5fscripts_2fcpp_2ffoo_2eproto;
+};
+// ===================================================================
+
+
+// ===================================================================
+
+#ifdef __GNUC__
+  #pragma GCC diagnostic push
+  #pragma GCC diagnostic ignored "-Wstrict-aliasing"
+#endif  // __GNUC__
+// ProtoMsg
+
+// int64 some_field = 1;
+inline void ProtoMsg::clear_some_field() {
+  _impl_.some_field_ = ::int64_t{0};
+}
+inline ::int64_t ProtoMsg::_internal_some_field() const {
+  return _impl_.some_field_;
+}
+inline ::int64_t ProtoMsg::some_field() const {
+  // @@protoc_insertion_point(field_get:ide_query.prober_scripts.ProtoMsg.some_field)
+  return _internal_some_field();
+}
+inline void ProtoMsg::_internal_set_some_field(::int64_t value) {
+  
+  _impl_.some_field_ = value;
+}
+inline void ProtoMsg::set_some_field(::int64_t value) {
+  _internal_set_some_field(value);
+  // @@protoc_insertion_point(field_set:ide_query.prober_scripts.ProtoMsg.some_field)
+}
+
+#ifdef __GNUC__
+  #pragma GCC diagnostic pop
+#endif  // __GNUC__
+
+// @@protoc_insertion_point(namespace_scope)
+
+}  // namespace prober_scripts
+}  // namespace ide_query
+
+// @@protoc_insertion_point(global_scope)
+
+#include <google/protobuf/port_undef.inc>
+#endif  // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_build_2fmake_2ftools_2fide_5fquery_2fprober_5fscripts_2fcpp_2ffoo_2eproto
diff --git a/tools/ide_query/prober_scripts/jvm/Foo.java b/tools/ide_query/prober_scripts/jvm/Foo.java
new file mode 100644
index 0000000..a043f72
--- /dev/null
+++ b/tools/ide_query/prober_scripts/jvm/Foo.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package jvm;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+
+/** Foo class. */
+public final class Foo {
+
+  void testCompletion() {
+    ArrayList<Integer> list = new ArrayList<>();
+    System.out.println(list);
+
+    // ^
+
+    // step
+    // ; Test completion on the standard types.
+    // type("list.")
+    // completion.trigger()
+    // assert completion.items.filter(label="add.*")
+  }
+}
diff --git a/tools/ide_query/prober_scripts/jvm/suite.textpb b/tools/ide_query/prober_scripts/jvm/suite.textpb
new file mode 100644
index 0000000..460e08c
--- /dev/null
+++ b/tools/ide_query/prober_scripts/jvm/suite.textpb
@@ -0,0 +1,4 @@
+tests: {
+  name: "general"
+  scripts: "build/make/tools/ide_query/prober_scripts/jvm/Foo.java"
+}
diff --git a/tools/ide_query/prober_scripts/regen.sh b/tools/ide_query/prober_scripts/regen.sh
new file mode 100755
index 0000000..2edfe53
--- /dev/null
+++ b/tools/ide_query/prober_scripts/regen.sh
@@ -0,0 +1,33 @@
+#!/bin/bash -e
+
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script is used to generate the ide_query.out file.
+#
+# The ide_query.out file is a pre-computed result of running ide_query.sh
+# on a set of files. This allows the prober to run its tests without running
+# ide_query.sh. The prober doesn't check-out the full source code, so it
+# can't run ide_query.sh itself.
+
+cd $(dirname $BASH_SOURCE)
+source $(pwd)/../../../shell_utils.sh
+require_top
+
+files_to_build=(
+  build/make/tools/ide_query/prober_scripts/cpp/general.cc
+)
+
+cd ${TOP}
+build/make/tools/ide_query/ide_query.sh --lunch_target=aosp_arm-trunk_staging-eng ${files_to_build[@]} > build/make/tools/ide_query/prober_scripts/ide_query.out
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 9b134f2..cf7e2ae 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -96,6 +96,7 @@
     ],
     libs: [
         "apex_manifest",
+        "releasetools_apex_utils",
         "releasetools_common",
     ],
     required: [
@@ -107,7 +108,7 @@
 python_library_host {
     name: "ota_metadata_proto",
     srcs: [
-       "ota_metadata.proto",
+        "ota_metadata.proto",
     ],
     proto: {
         canonical_path_from_root: false,
@@ -117,7 +118,7 @@
 cc_library_static {
     name: "ota_metadata_proto_cc",
     srcs: [
-       "ota_metadata.proto",
+        "ota_metadata.proto",
     ],
     host_supported: true,
     recovery_available: true,
@@ -144,7 +145,7 @@
             static_libs: ["libprotobuf-java-nano"],
         },
     },
-    visibility: ["//frameworks/base:__subpackages__"]
+    visibility: ["//frameworks/base:__subpackages__"],
 }
 
 python_defaults {
@@ -436,7 +437,7 @@
     name: "check_target_files_vintf",
     defaults: [
         "releasetools_binary_defaults",
-        "releasetools_check_target_files_vintf_defaults"
+        "releasetools_check_target_files_vintf_defaults",
     ],
 }
 
@@ -546,13 +547,14 @@
     defaults: ["releasetools_binary_defaults"],
     srcs: [
         "sign_target_files_apks.py",
-        "payload_signer.py",
-        "ota_signing_utils.py",
+        "ota_from_raw_img.py",
     ],
     libs: [
         "releasetools_add_img_to_target_files",
         "releasetools_apex_utils",
         "releasetools_common",
+        "ota_metadata_proto",
+        "ota_utils_lib",
     ],
 }
 
@@ -632,7 +634,7 @@
     data: [
         "testdata/**/*",
         ":com.android.apex.compressed.v1",
-        ":com.android.apex.vendor.foo.with_vintf"
+        ":com.android.apex.vendor.foo.with_vintf",
     ],
     target: {
         darwin: {
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3abef3b..54df955 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -36,6 +36,8 @@
 
 APEX_PUBKEY = 'apex_pubkey'
 
+# Partitions supporting APEXes
+PARTITIONS = ['system', 'system_ext', 'product', 'vendor', 'odm']
 
 class ApexInfoError(Exception):
   """An Exception raised during Apex Information command."""
@@ -550,7 +552,7 @@
   if not isinstance(input_file, str):
     raise RuntimeError("must pass filepath to target-files zip or directory")
   apex_infos = []
-  for partition in ['system', 'system_ext', 'product', 'vendor']:
+  for partition in PARTITIONS:
     apex_infos.extend(GetApexInfoForPartition(input_file, partition))
   return apex_infos
 
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index b8dcd84..dc123ef 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -30,6 +30,7 @@
 import sys
 import zipfile
 
+import apex_utils
 import common
 from apex_manifest import ParseApexManifest
 
@@ -229,7 +230,7 @@
   apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host')
   cmd = [apex_host, '--tool_path', OPTIONS.search_path]
   cmd += ['--apex_path', dirmap['/apex']]
-  for p in ['system', 'system_ext', 'product', 'vendor']:
+  for p in apex_utils.PARTITIONS:
     if '/' + p in dirmap:
       cmd += ['--' + p + '_path', dirmap['/' + p]]
   common.RunAndCheckOutput(cmd)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index d91a713..edd4366 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -898,7 +898,7 @@
       if key.endswith("selinux_fc"):
         fc_basename = os.path.basename(d[key])
         fc_config = os.path.join(input_file, "META", fc_basename)
-        assert os.path.exists(fc_config)
+        assert os.path.exists(fc_config), "{} does not exist".format(fc_config)
 
         d[key] = fc_config
 
@@ -907,9 +907,10 @@
     d["root_fs_config"] = os.path.join(
         input_file, "META", "root_filesystem_config.txt")
 
+    partitions = ["system", "vendor", "system_ext", "product", "odm",
+                  "vendor_dlkm", "odm_dlkm", "system_dlkm"]
     # Redirect {partition}_base_fs_file for each of the named partitions.
-    for part_name in ["system", "vendor", "system_ext", "product", "odm",
-                      "vendor_dlkm", "odm_dlkm", "system_dlkm"]:
+    for part_name in partitions:
       key_name = part_name + "_base_fs_file"
       if key_name not in d:
         continue
@@ -922,6 +923,25 @@
             "Failed to find %s base fs file: %s", part_name, base_fs_file)
         del d[key_name]
 
+    # Redirecting helper for optional properties like erofs_compress_hints
+    def redirect_file(prop, filename):
+      if prop not in d:
+        return
+      config_file = os.path.join(input_file, "META/" + filename)
+      if os.path.exists(config_file):
+        d[prop] = config_file
+      else:
+        logger.warning(
+            "Failed to find %s fro %s", filename, prop)
+        del d[prop]
+
+    # Redirect erofs_[default_]compress_hints files
+    redirect_file("erofs_default_compress_hints",
+                  "erofs_default_compress_hints.txt")
+    for part in partitions:
+      redirect_file(part + "_erofs_compress_hints",
+                    part + "_erofs_compress_hints.txt")
+
   def makeint(key):
     if key in d:
       d[key] = int(d[key], 0)
@@ -1784,12 +1804,7 @@
   if has_ramdisk:
     cmd.extend(["--ramdisk", ramdisk_img.name])
 
-  img_unsigned = None
-  if info_dict.get("vboot"):
-    img_unsigned = tempfile.NamedTemporaryFile()
-    cmd.extend(["--output", img_unsigned.name])
-  else:
-    cmd.extend(["--output", img.name])
+  cmd.extend(["--output", img.name])
 
   if partition_name == "recovery":
     if info_dict.get("include_recovery_dtbo") == "true":
@@ -1801,28 +1816,6 @@
 
   RunAndCheckOutput(cmd)
 
-  # Sign the image if vboot is non-empty.
-  if info_dict.get("vboot"):
-    path = "/" + partition_name
-    img_keyblock = tempfile.NamedTemporaryFile()
-    # We have switched from the prebuilt futility binary to using the tool
-    # (futility-host) built from the source. Override the setting in the old
-    # TF.zip.
-    futility = info_dict["futility"]
-    if futility.startswith("prebuilts/"):
-      futility = "futility-host"
-    cmd = [info_dict["vboot_signer_cmd"], futility,
-           img_unsigned.name, info_dict["vboot_key"] + ".vbpubk",
-           info_dict["vboot_key"] + ".vbprivk",
-           info_dict["vboot_subkey"] + ".vbprivk",
-           img_keyblock.name,
-           img.name]
-    RunAndCheckOutput(cmd)
-
-    # Clean up the temp files.
-    img_unsigned.close()
-    img_keyblock.close()
-
   # AVB: if enabled, calculate and add hash to boot.img or recovery.img.
   if info_dict.get("avb_enable") == "true":
     avbtool = info_dict["avb_avbtool"]
@@ -2461,12 +2454,23 @@
         "Failed to obtain minSdkVersion for {}: aapt2 return code {}:\n{}\n{}".format(
             apk_name, proc.returncode, stdoutdata, stderrdata))
 
+  is_split_apk = False
   for line in stdoutdata.split("\n"):
+    # See b/353837347 , split APKs do not have sdk version defined,
+    # so we default to 21 as split APKs are only supported since SDK
+    # 21.
+    if (re.search(r"split=[\"'].*[\"']", line)):
+      is_split_apk = True
     # Due to ag/24161708, looking for lines such as minSdkVersion:'23',minSdkVersion:'M'
     # or sdkVersion:'23', sdkVersion:'M'.
     m = re.match(r'(?:minSdkVersion|sdkVersion):\'([^\']*)\'', line)
     if m:
       return m.group(1)
+  if is_split_apk:
+    logger.info("%s is a split APK, it does not have minimum SDK version"
+                " defined. Defaulting to 21 because split APK isn't supported"
+                " before that.", apk_name)
+    return 21
   raise ExternalError("No minSdkVersion returned by aapt2 for apk: {}".format(apk_name))
 
 
@@ -2815,6 +2819,7 @@
             break
         elif handler(o, a):
           success = True
+          break
       if not success:
         raise ValueError("unknown option \"%s\"" % (o,))
 
@@ -3003,7 +3008,7 @@
     zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
-def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
+def ZipWriteStr(zip_file: zipfile.ZipFile, zinfo_or_arcname, data, perms=None,
                 compress_type=None):
   """Wrap zipfile.writestr() function to work around the zip64 limit.
 
diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py
index c186940..03b44f1 100644
--- a/tools/releasetools/ota_from_raw_img.py
+++ b/tools/releasetools/ota_from_raw_img.py
@@ -54,7 +54,7 @@
       prog=argv[0], description="Given a series of .img files, produces a full OTA package that installs thoese images")
   parser.add_argument("images", nargs="+", type=str,
                       help="List of images to generate OTA")
-  parser.add_argument("--partition_names", nargs='+', type=str,
+  parser.add_argument("--partition_names", nargs='?', type=str,
                       help="Partition names to install the images, default to basename of the image(no file name extension)")
   parser.add_argument('--output', type=str,
                       help='Paths to output merged ota', required=True)
@@ -74,18 +74,20 @@
       old_imgs[i], args.images[i] = img.split(":", maxsplit=1)
 
   if not args.partition_names:
-    args.partition_names = [os.path.os.path.splitext(os.path.basename(path))[
+    args.partition_names = [os.path.splitext(os.path.basename(path))[
         0] for path in args.images]
+  else:
+    args.partition_names = args.partition_names.split(",")
   with tempfile.NamedTemporaryFile() as unsigned_payload, tempfile.NamedTemporaryFile() as dynamic_partition_info_file:
     dynamic_partition_info_file.writelines(
         [b"virtual_ab=true\n", b"super_partition_groups=\n"])
     dynamic_partition_info_file.flush()
     cmd = [ResolveBinaryPath("delta_generator", args.search_path)]
-    cmd.append("--partition_names=" + ",".join(args.partition_names))
+    cmd.append("--partition_names=" + ":".join(args.partition_names))
     cmd.append("--dynamic_partition_info_file=" +
                dynamic_partition_info_file.name)
-    cmd.append("--old_partitions=" + ",".join(old_imgs))
-    cmd.append("--new_partitions=" + ",".join(args.images))
+    cmd.append("--old_partitions=" + ":".join(old_imgs))
+    cmd.append("--new_partitions=" + ":".join(args.images))
     cmd.append("--out_file=" + unsigned_payload.name)
     cmd.append("--is_partial_update")
     if args.max_timestamp:
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index b8f848f..b485440 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -189,6 +189,8 @@
 from xml.etree import ElementTree
 
 import add_img_to_target_files
+import ota_from_raw_img
+import ota_utils
 import apex_utils
 import common
 import payload_signer
@@ -579,7 +581,61 @@
         filename.endswith("/prop.default")
 
 
-def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info,
+def RegenerateKernelPartitions(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info):
+  """Re-generate boot and dtbo partitions using new signing configuration"""
+  if OPTIONS.input_tmp is None:
+    OPTIONS.input_tmp = common.UnzipTemp(input_tf_zip.filename, [
+                                "*/boot.img", "*/dtbo.img"])
+  else:
+    common.UnzipToDir(input_tf_zip, OPTIONS.input_tmp, [
+                                "*/boot.img", "*/dtbo.img"])
+  unzip_dir = OPTIONS.input_tmp
+  image_dir = os.path.join(unzip_dir, "IMAGES")
+  shutil.rmtree(image_dir)
+  os.makedirs(image_dir, exist_ok=True)
+
+  boot_image = common.GetBootableImage(
+      "IMAGES/boot.img", "boot.img", unzip_dir, "BOOT", misc_info)
+  if boot_image:
+    boot_image.WriteToDir(unzip_dir)
+    boot_image = os.path.join(unzip_dir, boot_image.name)
+    common.ZipWrite(output_tf_zip, boot_image, "IMAGES/boot.img",
+                    compress_type=zipfile.ZIP_STORED)
+  add_img_to_target_files.AddDtbo(output_tf_zip)
+  return unzip_dir
+
+
+def RegenerateBootOTA(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info, filename, input_ota):
+  if filename not in ["VENDOR/boot_otas/boot_ota_4k.zip", "SYSTEM/boot_otas/boot_ota_4k.zip"]:
+    # We only need to re-generate 4K boot OTA, for other OTA packages
+    # simply copy as is
+    with input_tf_zip.open(filename, "r") as in_fp:
+      shutil.copyfileobj(in_fp, input_ota)
+      input_ota.flush()
+    return
+  timestamp = misc_info["build.prop"].GetProp(
+      "ro.system.build.date.utc")
+  unzip_dir = RegenerateKernelPartitions(
+      input_tf_zip, output_tf_zip, misc_info)
+  signed_boot_image = os.path.join(unzip_dir, "IMAGES/boot.img")
+  signed_dtbo_image = os.path.join(unzip_dir, "IMAGES/dtbo.img")
+
+  if not os.path.exists(signed_boot_image):
+    logger.warn("Need to re-generate boot OTA {} but failed to get signed boot image. 16K dev option will be impacted, after rolling back to 4K user would need to sideload/flash their device to continue receiving OTAs.")
+    return
+  logger.info(
+      "Re-generating boot OTA {} with timestamp {}".format(filename, timestamp))
+  args = ["ota_from_raw_img", "--package_key", OPTIONS.package_key,
+          "--max_timestamp", timestamp, "--output", input_ota.name]
+  if os.path.exists(signed_dtbo_image):
+    args.extend(["--partition_name", "boot,dtbo",
+                signed_boot_image, signed_dtbo_image])
+  else:
+    args.extend(["--partition_name", "boot", signed_boot_image])
+  ota_from_raw_img.main(args)
+
+
+def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
                        compressed_extension):
@@ -593,6 +649,14 @@
     # Sets this to zero for targets without APK files.
     maxsize = 0
 
+  # Replace the AVB signing keys, if any.
+  ReplaceAvbSigningKeys(misc_info)
+  OPTIONS.info_dict = misc_info
+
+  # Rewrite the props in AVB signing args.
+  if misc_info.get('avb_enable') == 'true':
+    RewriteAvbProps(misc_info)
+
   for info in input_tf_zip.infolist():
     filename = info.filename
     if filename.startswith("IMAGES/"):
@@ -670,9 +734,9 @@
     elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename):
       logger.info("Re-signing OTA package {}".format(filename))
       with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota:
-        with input_tf_zip.open(filename, "r") as in_fp:
-          shutil.copyfileobj(in_fp, input_ota)
-          input_ota.flush()
+        RegenerateBootOTA(input_tf_zip, output_tf_zip,
+                          misc_info, filename, input_ota)
+
         SignOtaPackage(input_ota.name, output_ota.name)
         common.ZipWrite(output_tf_zip, output_ota.name, filename,
                         compress_type=zipfile.ZIP_STORED)
@@ -811,17 +875,18 @@
         common.ZipWrite(output_tf_zip, image.name, filename)
     # A non-APK file; copy it verbatim.
     else:
-      common.ZipWriteStr(output_tf_zip, out_info, data)
+      try:
+        entry = output_tf_zip.getinfo(filename)
+        if output_tf_zip.read(entry) != data:
+          logger.warn(
+              "Output zip contains duplicate entries for %s with different contents", filename)
+        continue
+      except KeyError:
+        common.ZipWriteStr(output_tf_zip, out_info, data)
 
   if OPTIONS.replace_ota_keys:
     ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
 
-  # Replace the AVB signing keys, if any.
-  ReplaceAvbSigningKeys(misc_info)
-
-  # Rewrite the props in AVB signing args.
-  if misc_info.get('avb_enable') == 'true':
-    RewriteAvbProps(misc_info)
 
   # Write back misc_info with the latest values.
   ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
index 2b2b573..6901b06 100644
--- a/tools/sbom/Android.bp
+++ b/tools/sbom/Android.bp
@@ -33,6 +33,23 @@
     ],
 }
 
+python_binary_host {
+    name: "gen_sbom",
+    srcs: [
+        "gen_sbom.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "metadata_file_proto_py",
+        "libprotobuf-python",
+        "sbom_lib",
+    ],
+}
+
 python_library_host {
     name: "sbom_lib",
     srcs: [
@@ -91,4 +108,4 @@
     libs: [
         "sbom_lib",
     ],
-}
\ No newline at end of file
+}
diff --git a/tools/sbom/gen_sbom.py b/tools/sbom/gen_sbom.py
new file mode 100644
index 0000000..a203258
--- /dev/null
+++ b/tools/sbom/gen_sbom.py
@@ -0,0 +1,926 @@
+# !/usr/bin/env python3
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+  gen_sbom.py --output_file out/soong/sbom/aosp_cf_x86_64_phone/sbom.spdx \
+              --metadata out/soong/metadata/aosp_cf_x86_64_phone/metadata.db \
+              --product_out out/target/vsoc_x86_64
+              --soong_out out/soong
+              --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+              --product_mfr=Google
+"""
+
+import argparse
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import pathlib
+import queue
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+import sqlite3
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-existent installed files:'
+ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP = 'No module found for static dependency files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+SOONG_PREBUILT_MODULE_TYPES = [
+    'android_app_import',
+    'android_library_import',
+    'cc_prebuilt_binary',
+    'cc_prebuilt_library',
+    'cc_prebuilt_library_headers',
+    'cc_prebuilt_library_shared',
+    'cc_prebuilt_library_static',
+    'cc_prebuilt_object',
+    'dex_import',
+    'java_import',
+    'java_sdk_library_import',
+    'java_system_modules_import',
+    'libclang_rt_prebuilt_library_static',
+    'libclang_rt_prebuilt_library_shared',
+    'llvm_prebuilt_library_static',
+    'ndk_prebuilt_object',
+    'ndk_prebuilt_shared_stl',
+    'nkd_prebuilt_static_stl',
+    'prebuilt_apex',
+    'prebuilt_bootclasspath_fragment',
+    'prebuilt_dsp',
+    'prebuilt_firmware',
+    'prebuilt_kernel_modules',
+    'prebuilt_rfsa',
+    'prebuilt_root',
+    'rust_prebuilt_dylib',
+    'rust_prebuilt_library',
+    'rust_prebuilt_rlib',
+    'vndk_prebuilt_shared',
+]
+
+THIRD_PARTY_IDENTIFIER_TYPES = [
+    # Types defined in metadata_file.proto
+    'Git',
+    'SVN',
+    'Hg',
+    'Darcs',
+    'VCS',
+    'Archive',
+    'PrebuiltByAlphabet',
+    'LocalSource',
+    'Other',
+    # OSV ecosystems defined at https://ossf.github.io/osv-schema/#affectedpackage-field.
+    'Go',
+    'npm',
+    'OSS-Fuzz',
+    'PyPI',
+    'RubyGems',
+    'crates.io',
+    'Hackage',
+    'GHC',
+    'Packagist',
+    'Maven',
+    'NuGet',
+    'Linux',
+    'Debian',
+    'Alpine',
+    'Hex',
+    'Android',
+    'GitHub Actions',
+    'Pub',
+    'ConanCenter',
+    'Rocky Linux',
+    'AlmaLinux',
+    'Bitnami',
+    'Photon OS',
+    'CRAN',
+    'Bioconductor',
+    'SwiftURL'
+]
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode')
+  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+  parser.add_argument('--metadata', required=True, help='The metadata DB file path.')
+  parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.')
+  parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong')
+  parser.add_argument('--build_version', required=True, help='The build version.')
+  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def new_package_id(package_name, type):
+  return f'SPDXRef-{type}-{sbom_data.encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+  return f'SPDXRef-{sbom_data.encode_for_spdxid(file_path)}'
+
+
+def new_license_id(license_name):
+  return f'LicenseRef-{sbom_data.encode_for_spdxid(license_name)}'
+
+
+def checksum(file_path):
+  h = hashlib.sha1()
+  if os.path.islink(file_path):
+    h.update(os.readlink(file_path).encode('utf-8'))
+  else:
+    with open(file_path, 'rb') as f:
+      h.update(f.read())
+  return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+  return (file_metadata['soong_module_type'] and
+          file_metadata['soong_module_type'] in SOONG_PREBUILT_MODULE_TYPES)
+
+
+def is_source_package(file_metadata):
+  module_path = file_metadata['module_path']
+  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+  module_path = file_metadata['module_path']
+  if module_path:
+    return (module_path.startswith('prebuilts/') or
+            is_soong_prebuilt_module(file_metadata) or
+            file_metadata['is_prebuilt_make_module'])
+
+  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+    return True
+
+  return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+  """Return source package info exists in its METADATA file, currently including name, security tag
+  and external SBOM reference.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  if not metadata_file_path:
+    return file_metadata['module_path'], []
+
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  external_refs = []
+  for tag in metadata_proto.third_party.security.tag:
+    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+      external_refs.append(
+          sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                       type=sbom_data.PackageExternalRefType.cpe23Type,
+                                       locator=tag.removeprefix(NVD_CPE23)))
+    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+      external_refs.append(
+          sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                       type=sbom_data.PackageExternalRefType.cpe22Type,
+                                       locator=tag.removeprefix(NVD_CPE23)))
+
+  if metadata_proto.name:
+    return metadata_proto.name, external_refs
+  else:
+    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+  """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+  module path or kernel module's source path if the installed file is a kernel module.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  name = None
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.name:
+      name = metadata_proto.name
+    else:
+      name = metadata_file_path
+  elif file_metadata['module_path']:
+    name = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+    name = os.path.dirname(src_path)
+
+  return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+  """Search for METADATA file of a package and return its path."""
+  metadata_path = ''
+  if file_metadata['module_path']:
+    metadata_path = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+    metadata_path = os.path.dirname(metadata_path)
+
+  return metadata_path
+
+
+def get_package_version(metadata_file_path):
+  """Return a package's version in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+  """Return a package's homepage URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.homepage:
+    return metadata_proto.third_party.homepage
+  for url in metadata_proto.third_party.url:
+    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+      return url.value
+
+  return None
+
+
+def get_package_download_location(metadata_file_path):
+  """Return a package's code repository URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.url:
+    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+      return urls[0].value
+    elif len(urls) > 1:
+      return urls[1].value
+
+  return None
+
+
+def get_license_text(license_files):
+  license_text = ''
+  for license_file in license_files:
+    if args.debug:
+      license_text += '#### Content from ' + license_file + '\n'
+    else:
+      license_text += pathlib.Path(license_file).read_text(errors='replace') + '\n\n'
+  return license_text
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+  """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+  package, a UPSTREAM package and an external SBOM document reference if sbom_ref defined in its
+  METADATA file.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  external_doc_ref = None
+  packages = []
+  relationships = []
+  licenses = []
+
+  # Info from METADATA file
+  homepage = get_package_homepage(metadata_file_path)
+  version = get_package_version(metadata_file_path)
+  download_location = get_package_download_location(metadata_file_path)
+
+  lics = db.get_package_licenses(installed_file_metadata['module_path'])
+  if not lics:
+    lics = db.get_package_licenses(metadata_file_path)
+
+  if lics:
+    for license_name, license_files in lics.items():
+      if not license_files:
+        continue
+      license_id = new_license_id(license_name)
+      if license_name not in licenses_text:
+        licenses_text[license_name] = get_license_text(license_files.split(' '))
+      licenses.append(sbom_data.License(id=license_id, name=license_name, text=licenses_text[license_name]))
+
+  if is_source_package(installed_file_metadata):
+    # Source fork packages
+    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+    source_package_id = new_package_id(name, PKG_SOURCE)
+    source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+                                       download_location=sbom_data.VALUE_NONE,
+                                       supplier='Organization: ' + args.product_mfr,
+                                       external_refs=external_refs)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier=(
+                                               'Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+                                         download_location=download_location)
+    packages += [source_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=source_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+
+    for license in licenses:
+      source_package.declared_license_ids.append(license.id)
+      upstream_package.declared_license_ids.append(license.id)
+
+  elif is_prebuilt_package(installed_file_metadata):
+    # Prebuilt fork packages
+    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+    prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+                                         name=name,
+                                         download_location=sbom_data.VALUE_NONE,
+                                         version=version if version else args.build_version,
+                                         supplier='Organization: ' + args.product_mfr)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier=(
+                                               'Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+                                         download_location=download_location)
+    packages += [prebuilt_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=prebuilt_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+    for license in licenses:
+      prebuilt_package.declared_license_ids.append(license.id)
+      upstream_package.declared_license_ids.append(license.id)
+
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+      sbom_url = metadata_proto.third_party.sbom_ref.url
+      sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+      upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+      if sbom_url and sbom_checksum and upstream_element_id:
+        doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{sbom_data.encode_for_spdxid(name)}'
+        external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+                                                               uri=sbom_url,
+                                                               checksum=sbom_checksum)
+        relationships.append(
+            sbom_data.Relationship(id1=upstream_package_id,
+                                   relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                   id2=doc_ref_id + ':' + upstream_element_id))
+
+  return external_doc_ref, packages, relationships, licenses
+
+
+def save_report(report_file_path, report):
+  with open(report_file_path, 'w', encoding='utf-8') as report_file:
+    for type, issues in report.items():
+      report_file.write(type + '\n')
+      for issue in issues:
+        report_file.write('\t' + issue + '\n')
+      report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+  installed_file = installed_file_metadata['installed_file']
+  module_path = installed_file_metadata['module_path']
+  product_copy_files = installed_file_metadata['product_copy_files']
+  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+  is_platform_generated = installed_file_metadata['is_platform_generated']
+
+  if (not module_path and
+      not product_copy_files and
+      not kernel_module_copy_files and
+      not is_platform_generated and
+      not installed_file.endswith('.fsv_meta')):
+    report[ISSUE_NO_METADATA].append(installed_file)
+    return False
+
+  return True
+
+
+# Validate identifiers in a package's METADATA.
+# 1) Only known identifier type is allowed
+# 2) Only one identifier's primary_source can be true
+def validate_package_metadata(metadata_file_path, package_metadata):
+  primary_source_found = False
+  for identifier in package_metadata.third_party.identifier:
+    if identifier.type not in THIRD_PARTY_IDENTIFIER_TYPES:
+      sys.exit(f'Unknown value of third_party.identifier.type in {metadata_file_path}/METADATA: {identifier.type}.')
+    if primary_source_found and identifier.primary_source:
+      sys.exit(
+          f'Field "primary_source" is set to true in multiple third_party.identifier in {metadata_file_path}/METADATA.')
+    primary_source_found = identifier.primary_source
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+  if metadata_file_path:
+    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+            installed_file_metadata['installed_file'],
+            installed_file_metadata['module_path'],
+            metadata_file_path + '/METADATA'))
+
+    package_metadata = metadata_file_pb2.Metadata()
+    with open(metadata_file_path + '/METADATA', 'rt') as f:
+      text_format.Parse(f.read(), package_metadata)
+
+    validate_package_metadata(metadata_file_path, package_metadata)
+
+    if not metadata_file_path in metadata_file_protos:
+      metadata_file_protos[metadata_file_path] = package_metadata
+      if not package_metadata.name:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+      if not package_metadata.third_party.version:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+            f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+      for tag in package_metadata.third_party.security.tag:
+        if not tag.startswith(NVD_CPE23):
+          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+  else:
+    report[ISSUE_NO_METADATA_FILE].append(
+        "installed_file: {}, module_path: {}".format(
+            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+# If a file is from a source fork or prebuilt fork package, add its package information to SBOM
+def add_package_of_file(file_id, file_metadata, doc, report):
+  metadata_file_path = get_metadata_file_path(file_metadata)
+  report_metadata_file(metadata_file_path, file_metadata, report)
+
+  external_doc_ref, pkgs, rels, licenses = get_sbom_fragments(file_metadata, metadata_file_path)
+  if len(pkgs) > 0:
+    if external_doc_ref:
+      doc.add_external_ref(external_doc_ref)
+    for p in pkgs:
+      doc.add_package(p)
+    for rel in rels:
+      doc.add_relationship(rel)
+    fork_package_id = pkgs[0].id  # The first package should be the source/prebuilt fork package
+    doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                id2=fork_package_id))
+    for license in licenses:
+      doc.add_license(license)
+
+
+# Add STATIC_LINK relationship for static dependencies of a file
+def add_static_deps_of_file(file_id, file_metadata, doc):
+  if not file_metadata['static_dep_files'] and not file_metadata['whole_static_dep_files']:
+    return
+  static_dep_files = []
+  if file_metadata['static_dep_files']:
+    static_dep_files += file_metadata['static_dep_files'].split(' ')
+  if file_metadata['whole_static_dep_files']:
+    static_dep_files += file_metadata['whole_static_dep_files'].split(' ')
+
+  for dep_file in static_dep_files:
+    # Static libs are not shipped on devices, so names are derived from .intermediates paths.
+    doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                id2=new_file_id(
+                                                  dep_file.removeprefix(args.soong_out + '/.intermediates/'))))
+
+
+def add_licenses_of_file(file_id, file_metadata, doc):
+  lics = db.get_module_licenses(file_metadata.get('name', ''), file_metadata['module_path'])
+  if lics:
+    file = next(f for f in doc.files if file_id == f.id)
+    for license_name, license_files in lics.items():
+      if not license_files:
+        continue
+      license_id = new_license_id(license_name)
+      file.concluded_license_ids.append(license_id)
+      if license_name not in licenses_text:
+        license_text = get_license_text(license_files.split(' '))
+        licenses_text[license_name] = license_text
+
+      doc.add_license(sbom_data.License(id=license_id, name=license_name, text=licenses_text[license_name]))
+
+
+def get_all_transitive_static_dep_files_of_installed_files(installed_files_metadata, db, report):
+  # Find all transitive static dep files of all installed files
+  q = queue.Queue()
+  for installed_file_metadata in installed_files_metadata:
+    if installed_file_metadata['static_dep_files']:
+      for f in installed_file_metadata['static_dep_files'].split(' '):
+        q.put(f)
+    if installed_file_metadata['whole_static_dep_files']:
+      for f in installed_file_metadata['whole_static_dep_files'].split(' '):
+        q.put(f)
+
+  all_static_dep_files = {}
+  while not q.empty():
+    dep_file = q.get()
+    if dep_file in all_static_dep_files:
+      # It has been processed
+      continue
+
+    all_static_dep_files[dep_file] = True
+    soong_module = db.get_soong_module_of_built_file(dep_file)
+    if not soong_module:
+      # This should not happen, add to report[ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP]
+      report[ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP].append(f)
+      continue
+
+    if soong_module['static_dep_files']:
+      for f in soong_module['static_dep_files'].split(' '):
+        if f not in all_static_dep_files:
+          q.put(f)
+    if soong_module['whole_static_dep_files']:
+      for f in soong_module['whole_static_dep_files'].split(' '):
+        if f not in all_static_dep_files:
+          q.put(f)
+
+  return sorted(all_static_dep_files.keys())
+
+
+class MetadataDb:
+  def __init__(self, db):
+    self.conn = sqlite3.connect(':memory')
+    self.conn.row_factory = sqlite3.Row
+    with sqlite3.connect(db) as c:
+      c.backup(self.conn)
+    self.reorg()
+
+  def reorg(self):
+    # package_license table
+    self.conn.execute("create table package_license as "
+                      "select name as package, pkg_default_applicable_licenses as license "
+                      "from modules "
+                      "where module_type = 'package' ")
+    cursor = self.conn.execute("select package,license from package_license where license like '% %'")
+    multi_licenses_packages = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for p in multi_licenses_packages:
+      licenses = p['license'].strip().split(' ')
+      for lic in licenses:
+        rows.append((p['package'], lic))
+    self.conn.executemany('insert into package_license values (?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from package_license where license like '% %'")
+    self.conn.commit()
+
+    # module_license table
+    self.conn.execute("create table module_license as "
+                      "select distinct name as module, package, licenses as license "
+                      "from modules "
+                      "where licenses != '' ")
+    cursor = self.conn.execute("select module,package,license from module_license where license like '% %'")
+    multi_licenses_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_licenses_modules:
+      licenses = m['license'].strip().split(' ')
+      for lic in licenses:
+        rows.append((m['module'], m['package'],lic))
+    self.conn.executemany('insert into module_license values (?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_license where license like '% %'")
+    self.conn.commit()
+
+    # module_installed_file table
+    self.conn.execute("create table module_installed_file as "
+                      "select id as module_id, name as module_name, package, installed_files as installed_file "
+                      "from modules "
+                      "where installed_files != '' ")
+    cursor = self.conn.execute("select module_id, module_name, package, installed_file "
+                               "from module_installed_file where installed_file like '% %'")
+    multi_installed_file_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_installed_file_modules:
+      installed_files = m['installed_file'].strip().split(' ')
+      for f in installed_files:
+        rows.append((m['module_id'], m['module_name'], m['package'], f))
+    self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_installed_file where installed_file like '% %'")
+    self.conn.commit()
+
+    # module_built_file table
+    self.conn.execute("create table module_built_file as "
+                      "select id as module_id, name as module_name, package, built_files as built_file "
+                      "from modules "
+                      "where built_files != '' ")
+    cursor = self.conn.execute("select module_id, module_name, package, built_file "
+                               "from module_built_file where built_file like '% %'")
+    multi_built_file_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_built_file_modules:
+      built_files = m['installed_file'].strip().split(' ')
+      for f in built_files:
+        rows.append((m['module_id'], m['module_name'], m['package'], f))
+    self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_built_file where built_file like '% %'")
+    self.conn.commit()
+
+
+    # Indexes
+    self.conn.execute('create index idx_modules_id on modules (id)')
+    self.conn.execute('create index idx_modules_name on modules (name)')
+    self.conn.execute('create index idx_package_licnese_package on package_license (package)')
+    self.conn.execute('create index idx_package_licnese_license on package_license (license)')
+    self.conn.execute('create index idx_module_licnese_module on module_license (module)')
+    self.conn.execute('create index idx_module_licnese_license on module_license (license)')
+    self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)')
+    self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)')
+    self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)')
+    self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)')
+    self.conn.commit()
+
+    if args.debug:
+      with sqlite3.connect(os.path.dirname(args.metadata) + '/compliance-metadata-debug.db') as c:
+        self.conn.backup(c)
+
+
+  def get_installed_files(self):
+    # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata
+    cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata')
+    rows = cursor.fetchall()
+    cursor.close()
+    installed_files_metadata = []
+    for row in rows:
+      metadata = dict(zip(row.keys(), row))
+      installed_files_metadata.append(metadata)
+    return installed_files_metadata
+
+  def get_soong_modules(self):
+    # Get all records from table modules, which contains metadata of all soong modules
+    cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules')
+    rows = cursor.fetchall()
+    cursor.close()
+    soong_modules = []
+    for row in rows:
+      soong_module = dict(zip(row.keys(), row))
+      soong_modules.append(soong_module)
+    return soong_modules
+
+  def get_package_licenses(self, package):
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from package_license pl join modules m on pl.license = m.name '
+                               'where pl.package = ?',
+                               ('//' + package,))
+    rows = cursor.fetchall()
+    licenses = {}
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    return licenses
+
+  def get_module_licenses(self, module_name, package):
+    licenses = {}
+    # If property "licenses" is defined on module
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from module_license ml join modules m on ml.license = m.name '
+                               'where ml.module = ? and ml.package = ?',
+                               (module_name, package))
+    rows = cursor.fetchall()
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    if len(licenses) > 0:
+      return licenses
+
+    # Use default package license
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from package_license pl join modules m on pl.license = m.name '
+                               'where pl.package = ?',
+                               ('//' + package,))
+    rows = cursor.fetchall()
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    return licenses
+
+  def get_soong_module_of_installed_file(self, installed_file):
+    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
+                               'from modules m join module_installed_file mif on m.id = mif.module_id '
+                               'where mif.installed_file = ?',
+                               (installed_file,))
+    rows = cursor.fetchall()
+    cursor.close()
+    if rows:
+      soong_module = dict(zip(rows[0].keys(), rows[0]))
+      return soong_module
+
+    return None
+
+  def get_soong_module_of_built_file(self, built_file):
+    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
+                               'from modules m join module_built_file mbf on m.id = mbf.module_id '
+                               'where mbf.built_file = ?',
+                               (built_file,))
+    rows = cursor.fetchall()
+    cursor.close()
+    if rows:
+      soong_module = dict(zip(rows[0].keys(), rows[0]))
+      return soong_module
+
+    return None
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  global db
+  db = MetadataDb(args.metadata)
+  global metadata_file_protos
+  metadata_file_protos = {}
+  global licenses_text
+  licenses_text = {}
+
+  product_package_id = sbom_data.SPDXID_PRODUCT
+  product_package_name = sbom_data.PACKAGE_NAME_PRODUCT
+  product_package = sbom_data.Package(id=product_package_id,
+                                      name=product_package_name,
+                                      download_location=sbom_data.VALUE_NONE,
+                                      version=args.build_version,
+                                      supplier='Organization: ' + args.product_mfr,
+                                      files_analyzed=True)
+  doc_name = args.build_version
+  doc = sbom_data.Document(name=doc_name,
+                           namespace=f'https://www.google.com/sbom/spdx/android/{doc_name}',
+                           creators=['Organization: ' + args.product_mfr],
+                           describes=product_package_id)
+
+  doc.packages.append(product_package)
+  doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                                        download_location=sbom_data.VALUE_NONE,
+                                        version=args.build_version,
+                                        supplier='Organization: ' + args.product_mfr,
+                                        declared_license_ids=[sbom_data.SPDXID_LICENSE_APACHE]))
+
+  # Report on some issues and information
+  report = {
+      ISSUE_NO_METADATA: [],
+      ISSUE_NO_METADATA_FILE: [],
+      ISSUE_METADATA_FILE_INCOMPLETE: [],
+      ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+      ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+      ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP: [],
+      INFO_METADATA_FOUND_FOR_PACKAGE: [],
+  }
+
+  # Get installed files and corresponding make modules' metadata if an installed file is from a make module.
+  installed_files_metadata = db.get_installed_files()
+
+  # Find which Soong module an installed file is from and merge metadata from Make and Soong
+  for installed_file_metadata in installed_files_metadata:
+    soong_module = db.get_soong_module_of_installed_file(installed_file_metadata['installed_file'])
+    if soong_module:
+      # Merge soong metadata to make metadata
+      installed_file_metadata.update(soong_module)
+    else:
+      # For make modules soong_module_type should be empty
+      installed_file_metadata['soong_module_type'] = ''
+      installed_file_metadata['static_dep_files'] = ''
+      installed_file_metadata['whole_static_dep_files'] = ''
+
+  # Scan the metadata and create the corresponding package and file records in SPDX
+  for installed_file_metadata in installed_files_metadata:
+    installed_file = installed_file_metadata['installed_file']
+    module_path = installed_file_metadata['module_path']
+    product_copy_files = installed_file_metadata['product_copy_files']
+    kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+    build_output_path = installed_file
+    installed_file = installed_file.removeprefix(args.product_out)
+
+    if not installed_file_has_metadata(installed_file_metadata, report):
+      continue
+    if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+      report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+      continue
+
+    file_id = new_file_id(installed_file)
+    sha1 = checksum(build_output_path)
+    f = sbom_data.File(id=file_id, name=installed_file, checksum=sha1)
+    doc.files.append(f)
+    product_package.file_ids.append(file_id)
+
+    if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+      add_package_of_file(file_id, installed_file_metadata, doc, report)
+
+    elif module_path or installed_file_metadata['is_platform_generated']:
+      # File from PLATFORM package
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+      if installed_file_metadata['is_platform_generated']:
+        f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE]
+
+    elif product_copy_files:
+      # Format of product_copy_files: <source path>:<dest path>
+      src_path = product_copy_files.split(':')[0]
+      # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+      # so process them as files from PLATFORM package
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+      if installed_file_metadata['license_text']:
+        if installed_file_metadata['license_text'] == 'build/soong/licenses/LICENSE':
+          f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE]
+
+    elif installed_file.endswith('.fsv_meta'):
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+      f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE]
+
+    elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+      # For the four files generated for _dlkm, _ramdisk partitions
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+
+    # Process static dependencies of the installed file
+    add_static_deps_of_file(file_id, installed_file_metadata, doc)
+
+    # Add licenses of the installed file
+    add_licenses_of_file(file_id, installed_file_metadata, doc)
+
+  # Add all static library files to SBOM
+  for dep_file in get_all_transitive_static_dep_files_of_installed_files(installed_files_metadata, db, report):
+    filepath = dep_file.removeprefix(args.soong_out + '/.intermediates/')
+    file_id = new_file_id(filepath)
+    # SHA1 of empty string. Sometimes .a files might not be built.
+    sha1 = 'SHA1: da39a3ee5e6b4b0d3255bfef95601890afd80709'
+    if os.path.islink(dep_file) or os.path.isfile(dep_file):
+      sha1 = checksum(dep_file)
+    doc.files.append(sbom_data.File(id=file_id,
+                                    name=filepath,
+                                    checksum=sha1))
+    file_metadata = {
+        'installed_file': dep_file,
+        'is_prebuilt_make_module': False
+    }
+    file_metadata.update(db.get_soong_module_of_built_file(dep_file))
+    add_package_of_file(file_id, file_metadata, doc, report)
+
+    # Add relationships for static deps of static libraries
+    add_static_deps_of_file(file_id, file_metadata, doc)
+
+    # Add licenses of the static lib
+    add_licenses_of_file(file_id, file_metadata, doc)
+
+  # Save SBOM records to output file
+  doc.generate_packages_verification_code()
+  doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+  prefix = args.output_file
+  if prefix.endswith('.spdx'):
+    prefix = prefix.removesuffix('.spdx')
+  elif prefix.endswith('.spdx.json'):
+    prefix = prefix.removesuffix('.spdx.json')
+
+  output_file = prefix + '.spdx'
+  with open(output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file)
+  if args.json:
+    with open(prefix + '.spdx.json', 'w', encoding="utf-8") as file:
+      sbom_writers.JSONWriter.write(doc, file)
+
+  save_report(prefix + '-gen-report.txt', report)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py
index d0d232d..27f3d2e 100644
--- a/tools/sbom/generate-sbom-framework_res.py
+++ b/tools/sbom/generate-sbom-framework_res.py
@@ -80,7 +80,8 @@
 
   resource_file_spdxids = []
   for file in layoutlib_sbom[sbom_writers.PropNames.FILES]:
-    if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'):
+    file_path = file[sbom_writers.PropNames.FILE_NAME]
+    if file_path.startswith('data/res/') or file_path.startswith('data/overlays/'):
       resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID])
 
   doc.relationships = [
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
index b5ac8a5..fc5c704 100644
--- a/tools/sbom/sbom_data.py
+++ b/tools/sbom/sbom_data.py
@@ -30,6 +30,7 @@
 SPDXID_DOC = 'SPDXRef-DOCUMENT'
 SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
 SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+SPDXID_LICENSE_APACHE = 'LicenseRef-Android-Apache-2.0'
 
 PACKAGE_NAME_PRODUCT = 'PRODUCT'
 PACKAGE_NAME_PLATFORM = 'PLATFORM'
@@ -50,7 +51,7 @@
   cpe23Type = 'cpe23Type'
 
 
-@dataclass
+@dataclass(frozen=True)
 class PackageExternalRef:
   category: PackageExternalRefCategory
   type: PackageExternalRefType
@@ -68,6 +69,7 @@
   verification_code: str = None
   file_ids: List[str] = field(default_factory=list)
   external_refs: List[PackageExternalRef] = field(default_factory=list)
+  declared_license_ids: List[str] = field(default_factory=list)
 
 
 @dataclass
@@ -75,6 +77,7 @@
   id: str
   name: str
   checksum: str
+  concluded_license_ids: List[str] = field(default_factory=list)
 
 
 class RelationshipType:
@@ -85,20 +88,27 @@
   STATIC_LINK = 'STATIC_LINK'
 
 
-@dataclass
+@dataclass(frozen=True)
 class Relationship:
   id1: str
   relationship: RelationshipType
   id2: str
 
 
-@dataclass
+@dataclass(frozen=True)
 class DocumentExternalReference:
   id: str
   uri: str
   checksum: str
 
 
+@dataclass(frozen=True)
+class License:
+  id: str
+  text: str
+  name: str
+
+
 @dataclass
 class Document:
   name: str
@@ -111,20 +121,30 @@
   packages: List[Package] = field(default_factory=list)
   files: List[File] = field(default_factory=list)
   relationships: List[Relationship] = field(default_factory=list)
+  licenses: List[License] = field(default_factory=list)
 
   def add_external_ref(self, external_ref):
     if not any(external_ref.uri == ref.uri for ref in self.external_refs):
       self.external_refs.append(external_ref)
 
   def add_package(self, package):
-    if not any(package.id == p.id for p in self.packages):
+    p = next((p for p in self.packages if package.id == p.id), None)
+    if not p:
       self.packages.append(package)
+    else:
+      for license_id in package.declared_license_ids:
+        if license_id not in p.declared_license_ids:
+          p.declared_license_ids.append(license_id)
 
   def add_relationship(self, rel):
     if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
                for r in self.relationships):
       self.relationships.append(rel)
 
+  def add_license(self, license):
+    if not any(license.id == l.id for l in self.licenses):
+      self.licenses.append(license)
+
   def generate_packages_verification_code(self):
     for package in self.packages:
       if not package.file_ids:
diff --git a/tools/sbom/sbom_data_test.py b/tools/sbom/sbom_data_test.py
index 69bc9d2..9d987c4 100644
--- a/tools/sbom/sbom_data_test.py
+++ b/tools/sbom/sbom_data_test.py
@@ -23,6 +23,7 @@
 SUPPLIER_UPSTREAM = 'Organization: upstream'
 
 SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_PREBUILT_PACKAGE2 = 'SPDXRef-PREBUILT-package2'
 SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
 SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
 
@@ -31,6 +32,9 @@
 SPDXID_FILE3 = 'SPDXRef-file3'
 SPDXID_FILE4 = 'SPDXRef-file4'
 
+SPDXID_LICENSE1 = "SPDXRef-License-1"
+SPDXID_LICENSE2 = "SPDXRef-License-2"
+
 
 class SBOMDataTest(unittest.TestCase):
 
@@ -134,6 +138,47 @@
     self.sbom_doc.generate_packages_verification_code()
     self.assertEqual(expected_package_verification_code, self.sbom_doc.packages[0].verification_code)
 
+  def test_add_package_(self):
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                ))
+    p = next((p for p in self.sbom_doc.packages if p.id == SPDXID_PREBUILT_PACKAGE2), None)
+    self.assertNotEqual(p, None)
+    self.assertEqual(p.declared_license_ids, [])
+
+    # Add same package with license 1
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                declared_license_ids=[SPDXID_LICENSE1]
+                                                ))
+    self.assertEqual(p.declared_license_ids, [SPDXID_LICENSE1])
+
+    # Add same package with license 2
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                declared_license_ids=[SPDXID_LICENSE2]
+                                                ))
+    self.assertEqual(p.declared_license_ids, [SPDXID_LICENSE1, SPDXID_LICENSE2])
+
+    # Add same package with license 2 again
+    self.sbom_doc.add_package(sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE2,
+                                                name='Prebuilt package2',
+                                                download_location=sbom_data.VALUE_NONE,
+                                                supplier=SUPPLIER_GOOGLE,
+                                                version=BUILD_FINGER_PRINT,
+                                                declared_license_ids=[SPDXID_LICENSE2]
+                                                ))
+    self.assertEqual(p.declared_license_ids, [SPDXID_LICENSE1, SPDXID_LICENSE2])
+
 
 if __name__ == '__main__':
   unittest.main(verbosity=2)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
index 1cb864d..26b3c57 100644
--- a/tools/sbom/sbom_writers.py
+++ b/tools/sbom/sbom_writers.py
@@ -64,6 +64,11 @@
   # Relationship
   RELATIONSHIP = 'Relationship'
 
+  # License
+  LICENSE_ID = 'LicenseID'
+  LICENSE_NAME = 'LicenseName'
+  LICENSE_EXTRACTED_TEXT = 'ExtractedText'
+
 
 class TagValueWriter:
   @staticmethod
@@ -99,6 +104,12 @@
       tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
     if package.supplier:
       tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+
+    license = sbom_data.VALUE_NOASSERTION
+    if package.declared_license_ids:
+      license = ' OR '.join(package.declared_license_ids)
+    tagvalues.append(f'{Tags.PACKAGE_LICENSE_DECLARED}: {license}')
+
     if package.verification_code:
       tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
     if package.external_refs:
@@ -155,8 +166,12 @@
       f'{Tags.FILE_NAME}: {file.name}',
       f'{Tags.SPDXID}: {file.id}',
       f'{Tags.FILE_CHECKSUM}: {file.checksum}',
-      '',
     ]
+    license = sbom_data.VALUE_NOASSERTION
+    if file.concluded_license_ids:
+      license = ' OR '.join(file.concluded_license_ids)
+    tagvalues.append(f'{Tags.FILE_LICENSE_CONCLUDED}: {license}')
+    tagvalues.append('')
 
     return tagvalues
 
@@ -194,6 +209,22 @@
     return tagvalues
 
   @staticmethod
+  def marshal_license(license):
+    tagvalues = []
+    tagvalues.append(f'{Tags.LICENSE_ID}: {license.id}')
+    tagvalues.append(f'{Tags.LICENSE_NAME}: {license.name}')
+    tagvalues.append(f'{Tags.LICENSE_EXTRACTED_TEXT}: <text>{license.text}</text>')
+    return tagvalues
+
+  @staticmethod
+  def marshal_licenses(sbom_doc):
+    tagvalues = []
+    for license in sbom_doc.licenses:
+      tagvalues += TagValueWriter.marshal_license(license)
+      tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
   def write(sbom_doc, file, fragment=False):
     content = []
     if not fragment:
@@ -202,6 +233,7 @@
     tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc, fragment)
     content += tagvalues
     content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+    content += TagValueWriter.marshal_licenses(sbom_doc)
     file.write('\n'.join(content))
 
 
@@ -236,11 +268,13 @@
   PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
   PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
   PACKAGE_HAS_FILES = 'hasFiles'
+  PACKAGE_LICENSE_DECLARED = 'licenseDeclared'
 
   # File
   FILES = 'files'
   FILE_NAME = 'fileName'
   FILE_CHECKSUMS = 'checksums'
+  FILE_LICENSE_CONCLUDED = 'licenseConcluded'
 
   # Relationship
   RELATIONSHIPS = 'relationships'
@@ -248,6 +282,12 @@
   REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
   REL_TYPE = 'relationshipType'
 
+  # License
+  LICENSES = 'hasExtractedLicensingInfos'
+  LICENSE_ID = 'licenseId'
+  LICENSE_NAME = 'name'
+  LICENSE_EXTRACTED_TEXT = 'extractedText'
+
 
 class JSONWriter:
   @staticmethod
@@ -294,6 +334,9 @@
         package[PropNames.PACKAGE_VERSION] = p.version
       if p.supplier:
         package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+      package[PropNames.PACKAGE_LICENSE_DECLARED] = sbom_data.VALUE_NOASSERTION
+      if p.declared_license_ids:
+        package[PropNames.PACKAGE_LICENSE_DECLARED] = ' OR '.join(p.declared_license_ids)
       if p.verification_code:
         package[PropNames.PACKAGE_VERIFICATION_CODE] = {
           PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
@@ -329,6 +372,9 @@
         PropNames.ALGORITHM: checksum[0],
         PropNames.CHECKSUM_VALUE: checksum[1],
       }]
+      file[PropNames.FILE_LICENSE_CONCLUDED] = sbom_data.VALUE_NOASSERTION
+      if f.concluded_license_ids:
+        file[PropNames.FILE_LICENSE_CONCLUDED] = ' OR '.join(f.concluded_license_ids)
       files.append(file)
     return {PropNames.FILES: files}
 
@@ -347,10 +393,22 @@
     return {PropNames.RELATIONSHIPS: relationships}
 
   @staticmethod
+  def marshal_licenses(sbom_doc):
+    licenses = []
+    for l in sbom_doc.licenses:
+      licenses.append({
+          PropNames.LICENSE_ID: l.id,
+          PropNames.LICENSE_NAME: l.name,
+          PropNames.LICENSE_EXTRACTED_TEXT: f'<text>{l.text}</text>'
+      })
+    return {PropNames.LICENSES: licenses}
+
+  @staticmethod
   def write(sbom_doc, file):
     doc = {}
     doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
     doc.update(JSONWriter.marshal_packages(sbom_doc))
     doc.update(JSONWriter.marshal_files(sbom_doc))
     doc.update(JSONWriter.marshal_relationships(sbom_doc))
+    doc.update(JSONWriter.marshal_licenses(sbom_doc))
     file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
index cf85e01..f9f5230 100644
--- a/tools/sbom/sbom_writers_test.py
+++ b/tools/sbom/sbom_writers_test.py
@@ -33,6 +33,14 @@
 SPDXID_FILE3 = 'SPDXRef-file3'
 SPDXID_FILE4 = 'SPDXRef-file4'
 
+SPDXID_LICENSE_1 = 'LicenseRef-Android-License-1'
+SPDXID_LICENSE_2 = 'LicenseRef-Android-License-2'
+SPDXID_LICENSE_3 = 'LicenseRef-Android-License-3'
+
+LICENSE_APACHE_TEXT = "LICENSE_APACHE"
+LICENSE1_TEXT = 'LICENSE 1'
+LICENSE2_TEXT = 'LICENSE 2'
+LICENSE3_TEXT = 'LICENSE 3'
 
 class SBOMWritersTest(unittest.TestCase):
 
@@ -63,6 +71,7 @@
                         download_location=sbom_data.VALUE_NONE,
                         supplier=SUPPLIER_GOOGLE,
                         version=BUILD_FINGER_PRINT,
+                        declared_license_ids=[sbom_data.SPDXID_LICENSE_APACHE]
                         ))
 
     self.sbom_doc.add_package(
@@ -71,6 +80,7 @@
                         download_location=sbom_data.VALUE_NONE,
                         supplier=SUPPLIER_GOOGLE,
                         version=BUILD_FINGER_PRINT,
+                        declared_license_ids=[SPDXID_LICENSE_1],
                         ))
 
     self.sbom_doc.add_package(
@@ -79,6 +89,7 @@
                         download_location=sbom_data.VALUE_NONE,
                         supplier=SUPPLIER_GOOGLE,
                         version=BUILD_FINGER_PRINT,
+                        declared_license_ids=[SPDXID_LICENSE_2, SPDXID_LICENSE_3],
                         external_refs=[sbom_data.PackageExternalRef(
                           category=sbom_data.PackageExternalRefCategory.SECURITY,
                           type=sbom_data.PackageExternalRefType.cpe22Type,
@@ -90,6 +101,7 @@
                         name='Upstream package1',
                         supplier=SUPPLIER_UPSTREAM,
                         version='1.1',
+                        declared_license_ids=[SPDXID_LICENSE_2, SPDXID_LICENSE_3],
                         ))
 
     self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
@@ -97,11 +109,11 @@
                                                           id2=SPDXID_UPSTREAM_PACKAGE1))
 
     self.sbom_doc.files.append(
-      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111', concluded_license_ids=[sbom_data.SPDXID_LICENSE_APACHE]))
     self.sbom_doc.files.append(
-      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222', concluded_license_ids=[SPDXID_LICENSE_1]))
     self.sbom_doc.files.append(
-      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333', concluded_license_ids=[SPDXID_LICENSE_2, SPDXID_LICENSE_3]))
     self.sbom_doc.files.append(
       sbom_data.File(id=SPDXID_FILE4, name='file4.a', checksum='SHA1: 44444'))
 
@@ -120,6 +132,11 @@
                                                           id2=SPDXID_FILE4
                                                           ))
 
+    self.sbom_doc.add_license(sbom_data.License(sbom_data.SPDXID_LICENSE_APACHE, LICENSE_APACHE_TEXT, "License-Apache"))
+    self.sbom_doc.add_license(sbom_data.License(SPDXID_LICENSE_1, LICENSE1_TEXT, "License-1"))
+    self.sbom_doc.add_license(sbom_data.License(SPDXID_LICENSE_2, LICENSE2_TEXT, "License-2"))
+    self.sbom_doc.add_license(sbom_data.License(SPDXID_LICENSE_3, LICENSE3_TEXT, "License-3"))
+
     # SBOM fragment of a APK
     self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
                                                  namespace='http://www.google.com/sbom/spdx/android',
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
index 53936c5..a877810 100644
--- a/tools/sbom/testdata/expected_json_sbom.spdx.json
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -31,6 +31,7 @@
             "filesAnalyzed": true,
             "versionInfo": "build_finger_print",
             "supplier": "Organization: Google",
+            "licenseDeclared": "NOASSERTION",
             "packageVerificationCode": {
                 "packageVerificationCodeValue": "123456"
             },
@@ -46,7 +47,8 @@
             "downloadLocation": "NONE",
             "filesAnalyzed": false,
             "versionInfo": "build_finger_print",
-            "supplier": "Organization: Google"
+            "supplier": "Organization: Google",
+            "licenseDeclared": "LicenseRef-Android-Apache-2.0"
         },
         {
             "name": "Prebuilt package1",
@@ -54,7 +56,8 @@
             "downloadLocation": "NONE",
             "filesAnalyzed": false,
             "versionInfo": "build_finger_print",
-            "supplier": "Organization: Google"
+            "supplier": "Organization: Google",
+            "licenseDeclared": "LicenseRef-Android-License-1"
         },
         {
             "name": "Source package1",
@@ -63,6 +66,7 @@
             "filesAnalyzed": false,
             "versionInfo": "build_finger_print",
             "supplier": "Organization: Google",
+            "licenseDeclared": "LicenseRef-Android-License-2 OR LicenseRef-Android-License-3",
             "externalRefs": [
                 {
                     "referenceCategory": "SECURITY",
@@ -77,7 +81,8 @@
             "downloadLocation": "NOASSERTION",
             "filesAnalyzed": false,
             "versionInfo": "1.1",
-            "supplier": "Organization: upstream"
+            "supplier": "Organization: upstream",
+            "licenseDeclared": "LicenseRef-Android-License-2 OR LicenseRef-Android-License-3"
         }
     ],
     "files": [
@@ -89,7 +94,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "11111"
                 }
-            ]
+            ],
+            "licenseConcluded": "LicenseRef-Android-Apache-2.0"
         },
         {
             "fileName": "/bin/file2",
@@ -99,7 +105,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "22222"
                 }
-            ]
+            ],
+            "licenseConcluded": "LicenseRef-Android-License-1"
         },
         {
             "fileName": "/bin/file3",
@@ -109,7 +116,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "33333"
                 }
-            ]
+            ],
+            "licenseConcluded": "LicenseRef-Android-License-2 OR LicenseRef-Android-License-3"
         },
         {
             "fileName": "file4.a",
@@ -119,7 +127,8 @@
                     "algorithm": "SHA1",
                     "checksumValue": "44444"
                 }
-            ]
+            ],
+            "licenseConcluded": "NOASSERTION"
         }
     ],
     "relationships": [
@@ -148,5 +157,27 @@
             "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
             "relationshipType": "VARIANT_OF"
         }
+    ],
+    "hasExtractedLicensingInfos": [
+        {
+            "licenseId": "LicenseRef-Android-Apache-2.0",
+            "name": "License-Apache",
+            "extractedText": "<text>LICENSE_APACHE</text>"
+        },
+        {
+            "licenseId": "LicenseRef-Android-License-1",
+            "name": "License-1",
+            "extractedText": "<text>LICENSE 1</text>"
+        },
+        {
+            "licenseId": "LicenseRef-Android-License-2",
+            "name": "License-2",
+            "extractedText": "<text>LICENSE 2</text>"
+        },
+        {
+            "licenseId": "LicenseRef-Android-License-3",
+            "name": "License-3",
+            "extractedText": "<text>LICENSE 3</text>"
+        }
     ]
 }
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
index e6fd17e..1c54410 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -10,6 +10,7 @@
 FileName: file4.a
 SPDXID: SPDXRef-file4
 FileChecksum: SHA1: 44444
+LicenseConcluded: NOASSERTION
 
 PackageName: PRODUCT
 SPDXID: SPDXRef-PRODUCT
@@ -17,6 +18,7 @@
 FilesAnalyzed: true
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: NOASSERTION
 PackageVerificationCode: 123456
 
 Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
@@ -24,14 +26,17 @@
 FileName: /bin/file1
 SPDXID: SPDXRef-file1
 FileChecksum: SHA1: 11111
+LicenseConcluded: LicenseRef-Android-Apache-2.0
 
 FileName: /bin/file2
 SPDXID: SPDXRef-file2
 FileChecksum: SHA1: 22222
+LicenseConcluded: LicenseRef-Android-License-1
 
 FileName: /bin/file3
 SPDXID: SPDXRef-file3
 FileChecksum: SHA1: 33333
+LicenseConcluded: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 PackageName: PLATFORM
 SPDXID: SPDXRef-PLATFORM
@@ -39,6 +44,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-Apache-2.0
 
 PackageName: Prebuilt package1
 SPDXID: SPDXRef-PREBUILT-package1
@@ -46,6 +52,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-1
 
 PackageName: Source package1
 SPDXID: SPDXRef-SOURCE-package1
@@ -53,6 +60,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
 
 PackageName: Upstream package1
@@ -61,6 +69,7 @@
 FilesAnalyzed: false
 PackageVersion: 1.1
 PackageSupplier: Organization: upstream
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
 
@@ -68,3 +77,19 @@
 Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
 Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
 Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4
+
+LicenseID: LicenseRef-Android-Apache-2.0
+LicenseName: License-Apache
+ExtractedText: <text>LICENSE_APACHE</text>
+
+LicenseID: LicenseRef-Android-License-1
+LicenseName: License-1
+ExtractedText: <text>LICENSE 1</text>
+
+LicenseID: LicenseRef-Android-License-2
+LicenseName: License-2
+ExtractedText: <text>LICENSE 2</text>
+
+LicenseID: LicenseRef-Android-License-3
+LicenseName: License-3
+ExtractedText: <text>LICENSE 3</text>
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
index 428d7e3..36afc8b 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
@@ -10,6 +10,7 @@
 FileName: file4.a
 SPDXID: SPDXRef-file4
 FileChecksum: SHA1: 44444
+LicenseConcluded: NOASSERTION
 
 Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-file4
 
@@ -19,19 +20,23 @@
 FilesAnalyzed: true
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: NOASSERTION
 PackageVerificationCode: 123456
 
 FileName: /bin/file1
 SPDXID: SPDXRef-file1
 FileChecksum: SHA1: 11111
+LicenseConcluded: LicenseRef-Android-Apache-2.0
 
 FileName: /bin/file2
 SPDXID: SPDXRef-file2
 FileChecksum: SHA1: 22222
+LicenseConcluded: LicenseRef-Android-License-1
 
 FileName: /bin/file3
 SPDXID: SPDXRef-file3
 FileChecksum: SHA1: 33333
+LicenseConcluded: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 PackageName: PLATFORM
 SPDXID: SPDXRef-PLATFORM
@@ -39,6 +44,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-Apache-2.0
 
 PackageName: Prebuilt package1
 SPDXID: SPDXRef-PREBUILT-package1
@@ -46,6 +52,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-1
 
 PackageName: Source package1
 SPDXID: SPDXRef-SOURCE-package1
@@ -53,6 +60,7 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
 
 PackageName: Upstream package1
@@ -61,6 +69,7 @@
 FilesAnalyzed: false
 PackageVersion: 1.1
 PackageSupplier: Organization: upstream
+PackageLicenseDeclared: LicenseRef-Android-License-2 OR LicenseRef-Android-License-3
 
 Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
 
@@ -68,3 +77,19 @@
 Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
 Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
 Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4
+
+LicenseID: LicenseRef-Android-Apache-2.0
+LicenseName: License-Apache
+ExtractedText: <text>LICENSE_APACHE</text>
+
+LicenseID: LicenseRef-Android-License-1
+LicenseName: License-1
+ExtractedText: <text>LICENSE 1</text>
+
+LicenseID: LicenseRef-Android-License-2
+LicenseName: License-2
+ExtractedText: <text>LICENSE 2</text>
+
+LicenseID: LicenseRef-Android-License-3
+LicenseName: License-3
+ExtractedText: <text>LICENSE 3</text>
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
index a00c291..4b14a4b 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -1,6 +1,7 @@
 FileName: /bin/file1.apk
 SPDXID: SPDXRef-file1
 FileChecksum: SHA1: 11111
+LicenseConcluded: NOASSERTION
 
 PackageName: Unbundled apk package
 SPDXID: SPDXRef-SOURCE-package1
@@ -8,5 +9,6 @@
 FilesAnalyzed: false
 PackageVersion: build_finger_print
 PackageSupplier: Organization: Google
+PackageLicenseDeclared: NOASSERTION
 
 Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/tool_event_logger/proto/tool_event.proto b/tools/tool_event_logger/proto/tool_event.proto
index 61e28a2..ef71eac 100644
--- a/tools/tool_event_logger/proto/tool_event.proto
+++ b/tools/tool_event_logger/proto/tool_event.proto
@@ -27,6 +27,8 @@
   string source_root = 3;
   // Name of the tool used.
   string tool_tag = 6;
+  // Name of the host workstation.
+  string host_name = 7;
 
   oneof event {
     InvocationStarted invocation_started = 4;
diff --git a/tools/tool_event_logger/tool_event_logger.py b/tools/tool_event_logger/tool_event_logger.py
index 65a9696..b249d91 100644
--- a/tools/tool_event_logger/tool_event_logger.py
+++ b/tools/tool_event_logger/tool_event_logger.py
@@ -38,6 +38,7 @@
       tool_tag: str,
       invocation_id: str,
       user_name: str,
+      host_name: str,
       source_root: str,
       platform_version: str,
       python_version: str,
@@ -46,6 +47,7 @@
     self.tool_tag = tool_tag
     self.invocation_id = invocation_id
     self.user_name = user_name
+    self.host_name = host_name
     self.source_root = source_root
     self.platform_version = platform_version
     self.python_version = python_version
@@ -57,6 +59,7 @@
         tool_tag=tool_tag,
         invocation_id=str(uuid.uuid4()),
         user_name=getpass.getuser(),
+        host_name=platform.node(),
         source_root=os.environ.get('ANDROID_BUILD_TOP', ''),
         platform_version=platform.platform(),
         python_version=platform.python_version(),
@@ -110,6 +113,7 @@
         tool_tag=self.tool_tag,
         invocation_id=self.invocation_id,
         user_name=self.user_name,
+        host_name=self.host_name,
         source_root=self.source_root,
     )
 
diff --git a/tools/tool_event_logger/tool_event_logger_test.py b/tools/tool_event_logger/tool_event_logger_test.py
index 34b6c35..788812a 100644
--- a/tools/tool_event_logger/tool_event_logger_test.py
+++ b/tools/tool_event_logger/tool_event_logger_test.py
@@ -25,6 +25,7 @@
 
 TEST_INVOCATION_ID = 'test_invocation_id'
 TEST_USER_NAME = 'test_user'
+TEST_HOST_NAME = 'test_host_name'
 TEST_TOOL_TAG = 'test_tool'
 TEST_SOURCE_ROOT = 'test_source_root'
 TEST_PLATFORM_VERSION = 'test_platform_version'
@@ -41,6 +42,7 @@
         TEST_TOOL_TAG,
         TEST_INVOCATION_ID,
         TEST_USER_NAME,
+        TEST_HOST_NAME,
         TEST_SOURCE_ROOT,
         TEST_PLATFORM_VERSION,
         TEST_PYTHON_VERSION,
@@ -65,6 +67,7 @@
     log_event = tool_event_pb2.ToolEvent.FromString(sent_event.source_extension)
     self.assertEqual(log_event.invocation_id, TEST_INVOCATION_ID)
     self.assertEqual(log_event.user_name, TEST_USER_NAME)
+    self.assertEqual(log_event.host_name, TEST_HOST_NAME)
     self.assertEqual(log_event.tool_tag, TEST_TOOL_TAG)
     self.assertEqual(log_event.source_root, TEST_SOURCE_ROOT)
 
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index f32f90b..3d65bc0 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -200,7 +200,7 @@
     }
 
     if (verbose)
-        printf("Verification %s\n", foundBad ? "FAILED" : "succesful");
+        printf("Verification %s\n", foundBad ? "FAILED" : "successful");
 
     return foundBad ? 1 : 0;
 }