Merge "Move checkbuild targets of Soong modules into Soong" into main
diff --git a/ci/Android.bp b/ci/Android.bp
index 104f517..6d4ac35 100644
--- a/ci/Android.bp
+++ b/ci/Android.bp
@@ -71,11 +71,37 @@
     },
 }
 
+python_test_host {
+    name: "optimized_targets_test",
+    main: "optimized_targets_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "optimized_targets_test.py",
+    ],
+    libs: [
+        "build_test_suites",
+        "pyfakefs",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":py3-cmd",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
 python_library_host {
     name: "build_test_suites",
     srcs: [
         "build_test_suites.py",
         "optimized_targets.py",
+        "test_mapping_module_retriever.py",
+        "build_context.py",
     ],
 }
 
diff --git a/ci/build_context.py b/ci/build_context.py
new file mode 100644
index 0000000..cc48d53
--- /dev/null
+++ b/ci/build_context.py
@@ -0,0 +1,64 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Container class for build context with utility functions."""
+
+import re
+
+
+class BuildContext:
+
+  def __init__(self, build_context_dict: dict[str, any]):
+    self.enabled_build_features = set()
+    for opt in build_context_dict.get('enabledBuildFeatures', []):
+      self.enabled_build_features.add(opt.get('name'))
+    self.test_infos = set()
+    for test_info_dict in build_context_dict.get('testContext', dict()).get(
+        'testInfos', []
+    ):
+      self.test_infos.add(self.TestInfo(test_info_dict))
+
+  def build_target_used(self, target: str) -> bool:
+    return any(test.build_target_used(target) for test in self.test_infos)
+
+  class TestInfo:
+
+    _DOWNLOAD_OPTS = {
+        'test-config-only-zip',
+        'test-zip-file-filter',
+        'extra-host-shared-lib-zip',
+        'sandbox-tests-zips',
+        'additional-files-filter',
+        'cts-package-name',
+    }
+
+    def __init__(self, test_info_dict: dict[str, any]):
+      self.is_test_mapping = False
+      self.test_mapping_test_groups = set()
+      self.file_download_options = set()
+      for opt in test_info_dict.get('extraOptions', []):
+        key = opt.get('key')
+        if key == 'test-mapping-test-group':
+          self.is_test_mapping = True
+          self.test_mapping_test_groups.update(opt.get('values', set()))
+
+        if key in self._DOWNLOAD_OPTS:
+          self.file_download_options.update(opt.get('values', set()))
+
+    def build_target_used(self, target: str) -> bool:
+      # For all of a targets' outputs, check if any of the regexes used by tests
+      # to download artifacts would match it. If any of them do then this target
+      # is necessary.
+      regex = r'\b(%s)\b' % re.escape(target)
+      return any(re.search(regex, opt) for opt in self.file_download_options)
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 75dd9f2..402880c 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -24,6 +24,7 @@
 import subprocess
 import sys
 from typing import Callable
+from build_context import BuildContext
 import optimized_targets
 
 
@@ -53,18 +54,9 @@
   any output zip files needed by the build.
   """
 
-  _DOWNLOAD_OPTS = {
-      'test-config-only-zip',
-      'test-zip-file-filter',
-      'extra-host-shared-lib-zip',
-      'sandbox-tests-zips',
-      'additional-files-filter',
-      'cts-package-name',
-  }
-
   def __init__(
       self,
-      build_context: dict[str, any],
+      build_context: BuildContext,
       args: argparse.Namespace,
       target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget],
   ):
@@ -74,9 +66,7 @@
 
   def create_build_plan(self):
 
-    if 'optimized_build' not in self.build_context.get(
-        'enabledBuildFeatures', []
-    ):
+    if 'optimized_build' not in self.build_context.enabled_build_features:
       return BuildPlan(set(self.args.extra_targets), set())
 
     build_targets = set()
@@ -84,7 +74,7 @@
     for target in self.args.extra_targets:
       if self._unused_target_exclusion_enabled(
           target
-      ) and not self._build_target_used(target):
+      ) and not self.build_context.build_target_used(target):
         continue
 
       target_optimizer_getter = self.target_optimizations.get(target, None)
@@ -101,59 +91,11 @@
     return BuildPlan(build_targets, packaging_functions)
 
   def _unused_target_exclusion_enabled(self, target: str) -> bool:
-    return f'{target}_unused_exclusion' in self.build_context.get(
-        'enabledBuildFeatures', []
+    return (
+        f'{target}_unused_exclusion'
+        in self.build_context.enabled_build_features
     )
 
-  def _build_target_used(self, target: str) -> bool:
-    """Determines whether this target's outputs are used by the test configurations listed in the build context."""
-    file_download_regexes = self._aggregate_file_download_regexes()
-    # For all of a targets' outputs, check if any of the regexes used by tests
-    # to download artifacts would match it. If any of them do then this target
-    # is necessary.
-    for artifact in self._get_target_potential_outputs(target):
-      for regex in file_download_regexes:
-        if re.match(regex, artifact):
-          return True
-    return False
-
-  def _get_target_potential_outputs(self, target: str) -> set[str]:
-    tests_suffix = '-tests'
-    if target.endswith('tests'):
-      tests_suffix = ''
-    # This is a list of all the potential zips output by the test suite targets.
-    # If the test downloads artifacts from any of these zips, we will be
-    # conservative and avoid skipping the tests.
-    return {
-        f'{target}.zip',
-        f'android-{target}.zip',
-        f'android-{target}-verifier.zip',
-        f'{target}{tests_suffix}_list.zip',
-        f'android-{target}{tests_suffix}_list.zip',
-        f'{target}{tests_suffix}_host-shared-libs.zip',
-        f'android-{target}{tests_suffix}_host-shared-libs.zip',
-        f'{target}{tests_suffix}_configs.zip',
-        f'android-{target}{tests_suffix}_configs.zip',
-    }
-
-  def _aggregate_file_download_regexes(self) -> set[re.Pattern]:
-    """Lists out all test config options to specify targets to download.
-
-    These come in the form of regexes.
-    """
-    all_regexes = set()
-    for test_info in self._get_test_infos():
-      for opt in test_info.get('extraOptions', []):
-        # check the known list of options for downloading files.
-        if opt.get('key') in self._DOWNLOAD_OPTS:
-          all_regexes.update(
-              re.compile(value) for value in opt.get('values', [])
-          )
-    return all_regexes
-
-  def _get_test_infos(self):
-    return self.build_context.get('testContext', dict()).get('testInfos', [])
-
 
 @dataclass(frozen=True)
 class BuildPlan:
@@ -172,7 +114,7 @@
   """
   args = parse_args(argv)
   check_required_env()
-  build_context = load_build_context()
+  build_context = BuildContext(load_build_context())
   build_planner = BuildPlanner(
       build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS
   )
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
index 25c072e..f3ff6f4 100644
--- a/ci/build_test_suites_test.py
+++ b/ci/build_test_suites_test.py
@@ -32,6 +32,7 @@
 from typing import Callable
 import unittest
 from unittest import mock
+from build_context import BuildContext
 import build_test_suites
 import ci_test_lib
 import optimized_targets
@@ -282,7 +283,7 @@
     build_planner = self.create_build_planner(
         build_targets=build_targets,
         build_context=self.create_build_context(
-            enabled_build_features={self.get_target_flag('target_1')}
+            enabled_build_features=[{'name': self.get_target_flag('target_1')}]
         ),
     )
 
@@ -297,7 +298,7 @@
     build_planner = self.create_build_planner(
         build_targets=build_targets,
         build_context=self.create_build_context(
-            enabled_build_features={self.get_target_flag('target_1')},
+            enabled_build_features=[{'name': self.get_target_flag('target_1')}]
         ),
         packaging_outputs=packaging_outputs,
     )
@@ -337,7 +338,7 @@
         build_targets={build_target},
         build_context=self.create_build_context(
             test_context=self.get_test_context(build_target),
-            enabled_build_features={'test_target_unused_exclusion'},
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
         ),
     )
 
@@ -356,7 +357,7 @@
         build_targets={build_target},
         build_context=self.create_build_context(
             test_context=test_context,
-            enabled_build_features={'test_target_unused_exclusion'},
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
         ),
     )
 
@@ -372,7 +373,26 @@
         build_targets={build_target},
         build_context=self.create_build_context(
             test_context=test_context,
-            enabled_build_features={'test_target_unused_exclusion'},
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, set())
+
+  def test_target_regex_matching_not_too_broad(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': [f'.*a{build_target}.*\.zip'],
+    }]
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
         ),
     )
 
@@ -383,7 +403,7 @@
   def create_build_planner(
       self,
       build_targets: set[str],
-      build_context: dict[str, any] = None,
+      build_context: BuildContext = None,
       args: argparse.Namespace = None,
       target_optimizations: dict[
           str, optimized_targets.OptimizedBuildTarget
@@ -407,15 +427,17 @@
   def create_build_context(
       self,
       optimized_build_enabled: bool = True,
-      enabled_build_features: set[str] = set(),
+      enabled_build_features: list[dict[str, str]] = [],
       test_context: dict[str, any] = {},
-  ) -> dict[str, any]:
-    build_context = {}
-    build_context['enabledBuildFeatures'] = enabled_build_features
+  ) -> BuildContext:
+    build_context_dict = {}
+    build_context_dict['enabledBuildFeatures'] = enabled_build_features
     if optimized_build_enabled:
-      build_context['enabledBuildFeatures'].add('optimized_build')
-    build_context['testContext'] = test_context
-    return build_context
+      build_context_dict['enabledBuildFeatures'].append(
+          {'name': 'optimized_build'}
+      )
+    build_context_dict['testContext'] = test_context
+    return BuildContext(build_context_dict)
 
   def create_args(
       self, extra_build_targets: set[str] = set()
@@ -426,7 +448,7 @@
 
   def create_target_optimizations(
       self,
-      build_context: dict[str, any],
+      build_context: BuildContext,
       build_targets: set[str],
       packaging_outputs: set[str] = set(),
   ):
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
index 8a529c7..fddde17 100644
--- a/ci/optimized_targets.py
+++ b/ci/optimized_targets.py
@@ -14,9 +14,15 @@
 # limitations under the License.
 
 from abc import ABC
-from typing import Self
 import argparse
 import functools
+from build_context import BuildContext
+import json
+import logging
+import os
+from typing import Self
+
+import test_mapping_module_retriever
 
 
 class OptimizedBuildTarget(ABC):
@@ -30,7 +36,7 @@
   def __init__(
       self,
       target: str,
-      build_context: dict[str, any],
+      build_context: BuildContext,
       args: argparse.Namespace,
   ):
     self.target = target
@@ -38,13 +44,16 @@
     self.args = args
 
   def get_build_targets(self) -> set[str]:
-    features = self.build_context.get('enabledBuildFeatures', [])
+    features = self.build_context.enabled_build_features
     if self.get_enabled_flag() in features:
-      return self.get_build_targets_impl()
+      self.modules_to_build = self.get_build_targets_impl()
+      return self.modules_to_build
+
+    self.modules_to_build = {self.target}
     return {self.target}
 
   def package_outputs(self):
-    features = self.build_context.get('enabledBuildFeatures', [])
+    features = self.build_context.enabled_build_features
     if self.get_enabled_flag() in features:
       return self.package_outputs_impl()
 
@@ -81,6 +90,30 @@
     pass
 
 
+class ChangeInfo:
+
+  def __init__(self, change_info_file_path):
+    try:
+      with open(change_info_file_path) as change_info_file:
+        change_info_contents = json.load(change_info_file)
+    except json.decoder.JSONDecodeError:
+      logging.error(f'Failed to load CHANGE_INFO: {change_info_file_path}')
+      raise
+
+    self._change_info_contents = change_info_contents
+
+  def find_changed_files(self) -> set[str]:
+    changed_files = set()
+
+    for change in self._change_info_contents['changes']:
+      project_path = change.get('projectPath') + '/'
+
+      for revision in change.get('revisions'):
+        for file_info in revision.get('fileInfos'):
+          changed_files.add(project_path + file_info.get('path'))
+
+    return changed_files
+
 class GeneralTestsOptimizer(OptimizedBuildTarget):
   """general-tests optimizer
 
@@ -93,8 +126,55 @@
   normally built.
   """
 
+  # List of modules that are always required to be in general-tests.zip.
+  _REQUIRED_MODULES = frozenset(
+      ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util']
+  )
+
+  def get_build_targets_impl(self) -> set[str]:
+    change_info_file_path = os.environ.get('CHANGE_INFO')
+    if not change_info_file_path:
+      logging.info(
+          'No CHANGE_INFO env var found, general-tests optimization disabled.'
+      )
+      return {'general-tests'}
+
+    test_infos = self.build_context.test_infos
+    test_mapping_test_groups = set()
+    for test_info in test_infos:
+      is_test_mapping = test_info.is_test_mapping
+      current_test_mapping_test_groups = test_info.test_mapping_test_groups
+      uses_general_tests = test_info.build_target_used('general-tests')
+
+      if uses_general_tests and not is_test_mapping:
+        logging.info(
+            'Test uses general-tests.zip but is not test-mapping, general-tests'
+            ' optimization disabled.'
+        )
+        return {'general-tests'}
+
+      if is_test_mapping:
+        test_mapping_test_groups.update(current_test_mapping_test_groups)
+
+    change_info = ChangeInfo(change_info_file_path)
+    changed_files = change_info.find_changed_files()
+
+    test_mappings = test_mapping_module_retriever.GetTestMappings(
+        changed_files, set()
+    )
+
+    modules_to_build = set(self._REQUIRED_MODULES)
+
+    modules_to_build.update(
+        test_mapping_module_retriever.FindAffectedModules(
+            test_mappings, changed_files, test_mapping_test_groups
+        )
+    )
+
+    return modules_to_build
+
   def get_enabled_flag(self):
-    return 'general-tests-optimized'
+    return 'general_tests_optimized'
 
   @classmethod
   def get_optimized_targets(cls) -> dict[str, OptimizedBuildTarget]:
diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py
new file mode 100644
index 0000000..919c193
--- /dev/null
+++ b/ci/optimized_targets_test.py
@@ -0,0 +1,206 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for optimized_targets.py"""
+
+import json
+import logging
+import os
+import pathlib
+import re
+import unittest
+from unittest import mock
+import optimized_targets
+from build_context import BuildContext
+from pyfakefs import fake_filesystem_unittest
+
+
+class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
+
+  def setUp(self):
+    self.setUpPyfakefs()
+
+    os_environ_patcher = mock.patch.dict('os.environ', {})
+    self.addCleanup(os_environ_patcher.stop)
+    self.mock_os_environ = os_environ_patcher.start()
+
+    self._setup_working_build_env()
+    self._write_change_info_file()
+    test_mapping_dir = pathlib.Path('/project/path/file/path')
+    test_mapping_dir.mkdir(parents=True)
+    self._write_test_mapping_file()
+
+  def _setup_working_build_env(self):
+    self.change_info_file = pathlib.Path('/tmp/change_info')
+
+    self.mock_os_environ.update({
+        'CHANGE_INFO': str(self.change_info_file),
+    })
+
+  def test_general_tests_optimized(self):
+    optimizer = self._create_general_tests_optimizer()
+
+    build_targets = optimizer.get_build_targets()
+
+    expected_build_targets = set(
+        optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES
+    )
+    expected_build_targets.add('test_mapping_module')
+
+    self.assertSetEqual(build_targets, expected_build_targets)
+
+  def test_no_change_info_no_optimization(self):
+    del os.environ['CHANGE_INFO']
+
+    optimizer = self._create_general_tests_optimizer()
+
+    build_targets = optimizer.get_build_targets()
+
+    self.assertSetEqual(build_targets, {'general-tests'})
+
+  def test_mapping_groups_unused_module_not_built(self):
+    test_context = self._create_test_context()
+    test_context['testInfos'][0]['extraOptions'] = [
+        {
+            'key': 'additional-files-filter',
+            'values': ['general-tests.zip'],
+        },
+        {
+            'key': 'test-mapping-test-group',
+            'values': ['unused-test-mapping-group'],
+        },
+    ]
+    optimizer = self._create_general_tests_optimizer(
+        build_context=self._create_build_context(test_context=test_context)
+    )
+
+    build_targets = optimizer.get_build_targets()
+
+    expected_build_targets = set(
+        optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES
+    )
+    self.assertSetEqual(build_targets, expected_build_targets)
+
+  def test_general_tests_used_by_non_test_mapping_test_no_optimization(self):
+    test_context = self._create_test_context()
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': ['general-tests.zip'],
+    }]
+    optimizer = self._create_general_tests_optimizer(
+        build_context=self._create_build_context(test_context=test_context)
+    )
+
+    build_targets = optimizer.get_build_targets()
+
+    self.assertSetEqual(build_targets, {'general-tests'})
+
+  def test_malformed_change_info_raises(self):
+    with open(self.change_info_file, 'w') as f:
+      f.write('not change info')
+
+    optimizer = self._create_general_tests_optimizer()
+
+    with self.assertRaises(json.decoder.JSONDecodeError):
+      build_targets = optimizer.get_build_targets()
+
+  def test_malformed_test_mapping_raises(self):
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      f.write('not test mapping')
+
+    optimizer = self._create_general_tests_optimizer()
+
+    with self.assertRaises(json.decoder.JSONDecodeError):
+      build_targets = optimizer.get_build_targets()
+
+  def _write_change_info_file(self):
+    change_info_contents = {
+        'changes': [{
+            'projectPath': '/project/path',
+            'revisions': [{
+                'fileInfos': [{
+                    'path': 'file/path/file_name',
+                }],
+            }],
+        }]
+    }
+
+    with open(self.change_info_file, 'w') as f:
+      json.dump(change_info_contents, f)
+
+  def _write_test_mapping_file(self):
+    test_mapping_contents = {
+        'test-mapping-group': [
+            {
+                'name': 'test_mapping_module',
+            },
+        ],
+    }
+
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      json.dump(test_mapping_contents, f)
+
+  def _create_general_tests_optimizer(
+      self, build_context: BuildContext = None
+  ):
+    if not build_context:
+      build_context = self._create_build_context()
+    return optimized_targets.GeneralTestsOptimizer(
+        'general-tests', build_context, None
+    )
+
+  def _create_build_context(
+      self,
+      general_tests_optimized: bool = True,
+      test_context: dict[str, any] = None,
+  ) -> BuildContext:
+    if not test_context:
+      test_context = self._create_test_context()
+    build_context_dict = {}
+    build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}]
+    if general_tests_optimized:
+      build_context_dict['enabledBuildFeatures'].append({'name': 'general_tests_optimized'})
+    build_context_dict['testContext'] = test_context
+    return BuildContext(build_context_dict)
+
+  def _create_test_context(self):
+    return {
+        'testInfos': [
+            {
+                'name': 'atp_test',
+                'target': 'test_target',
+                'branch': 'branch',
+                'extraOptions': [
+                    {
+                        'key': 'additional-files-filter',
+                        'values': ['general-tests.zip'],
+                    },
+                    {
+                        'key': 'test-mapping-test-group',
+                        'values': ['test-mapping-group'],
+                    },
+                ],
+                'command': '/tf/command',
+                'extraBuildTargets': [
+                    'extra_build_target',
+                ],
+            },
+        ],
+    }
+
+
+if __name__ == '__main__':
+  # Setup logging to be silent so unit tests can pass through TF.
+  logging.disable(logging.ERROR)
+  unittest.main()
diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py
index d2c13c0..c93cdd5 100644
--- a/ci/test_mapping_module_retriever.py
+++ b/ci/test_mapping_module_retriever.py
@@ -17,11 +17,13 @@
 modules are needed to build for the given list of changed files.
 TODO(lucafarsi): Deduplicate from artifact_helper.py
 """
+# TODO(lucafarsi): Share this logic with the original logic in
+# test_mapping_test_retriever.py
 
-from typing import Any, Dict, Set, Text
 import json
 import os
 import re
+from typing import Any
 
 # Regex to extra test name from the path of test config file.
 TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config'
@@ -39,7 +41,7 @@
 _COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?')
 
 
-def FilterComments(test_mapping_file: Text) -> Text:
+def FilterComments(test_mapping_file: str) -> str:
   """Remove comments in TEST_MAPPING file to valid format.
 
   Only '//' is regarded as comments.
@@ -52,8 +54,8 @@
   """
   return re.sub(_COMMENTS_RE, r'\1', test_mapping_file)
 
-def GetTestMappings(paths: Set[Text],
-                    checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]:
+def GetTestMappings(paths: set[str],
+                    checked_paths: set[str]) -> dict[str, dict[str, Any]]:
   """Get the affected TEST_MAPPING files.
 
   TEST_MAPPING files in source code are packaged into a build artifact
@@ -123,3 +125,68 @@
       pass
 
   return test_mappings
+
+
+def FindAffectedModules(
+    test_mappings: dict[str, Any],
+    changed_files: set[str],
+    test_mapping_test_groups: set[str],
+) -> set[str]:
+  """Find affected test modules.
+
+  Find the affected set of test modules that would run in a test mapping run based on the given test mappings, changed files, and test mapping test group.
+
+  Args:
+    test_mappings: A set of test mappings returned by GetTestMappings in the following format:
+      {
+        'test_mapping_file_path': {
+          'group_name' : [
+            'name': 'module_name',
+          ],
+        }
+      }
+    changed_files: A set of files changed for the given run.
+    test_mapping_test_groups: A set of test mapping test groups that are being considered for the given run.
+
+  Returns:
+    A set of test module names which would run for a test mapping test run with the given parameters.
+  """
+
+  modules = set()
+
+  for test_mapping in test_mappings.values():
+    for group_name, group in test_mapping.items():
+      # If a module is not in any of the test mapping groups being tested skip
+      # it.
+      if group_name not in test_mapping_test_groups:
+        continue
+
+      for entry in group:
+        module_name = entry.get('name')
+
+        if not module_name:
+          continue
+
+        file_patterns = entry.get('file_patterns')
+        if not file_patterns:
+          modules.add(module_name)
+          continue
+
+        if matches_file_patterns(file_patterns, changed_files):
+          modules.add(module_name)
+
+  return modules
+
+def MatchesFilePatterns(
+    file_patterns: list[set], changed_files: set[str]
+) -> bool:
+  """Checks if any of the changed files match any of the file patterns.
+
+  Args:
+    file_patterns: A list of file patterns to match against.
+    changed_files: A set of files to check against the file patterns.
+
+  Returns:
+    True if any of the changed files match any of the file patterns.
+  """
+  return any(re.search(pattern, "|".join(changed_files)) for pattern in file_patterns)
diff --git a/cogsetup.sh b/cogsetup.sh
index ef1485d..5c64a06 100644
--- a/cogsetup.sh
+++ b/cogsetup.sh
@@ -57,7 +57,7 @@
   fi
   function repo {
     if [[ "${PWD}" == /google/cog/* ]]; then
-      echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
+      echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
       return 1
     fi
     ${ORIG_REPO_PATH} "$@"
diff --git a/core/Makefile b/core/Makefile
index 96588e3..bf2d48a 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -717,7 +717,7 @@
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
-  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT)),system,modules.load,,$(kmd))) \
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,$(GENERIC_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd)))))
@@ -1267,9 +1267,8 @@
 
 endif
 
-
+# The value of RAMDISK_NODE_LIST is defined in system/core/rootdir/Android.bp.
 # This file contains /dev nodes description added to the generic ramdisk
-RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list
 
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
@@ -3417,8 +3416,10 @@
 # system image
 
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
+ifdef BUILDING_SYSTEM_IMAGE
 INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
     $(ALL_DEFAULT_INSTALLED_MODULES)))
+endif
 
 # Create symlink /system/vendor to /vendor if necessary.
 ifdef BOARD_USES_VENDORIMAGE
@@ -3675,10 +3676,10 @@
 # -----------------------------------------------------------------
 # data partition image
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT_DATA)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
+ifdef BUILDING_USERDATA_IMAGE
 INTERNAL_USERDATAIMAGE_FILES := \
     $(filter $(TARGET_OUT_DATA)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
 
-ifdef BUILDING_USERDATA_IMAGE
 userdataimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,userdata)
 BUILT_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img
@@ -5136,6 +5137,7 @@
   $(TARGET_OUT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
   $(TARGET_OUT_PRODUCT)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
@@ -5188,6 +5190,7 @@
   $(TARGET_OUT_PRODUCT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
 
@@ -5206,6 +5209,7 @@
 	   --system_ext_path $(TARGET_OUT_SYSTEM_EXT) \
 	   --product_path $(TARGET_OUT_PRODUCT) \
 	   --vendor_path $(TARGET_OUT_VENDOR) \
+	   --odm_path $(TARGET_OUT_ODM) \
 	   --apex_path $(APEX_OUT)
 
 apex_files :=
@@ -6808,14 +6812,22 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif # BOARD_PREBUILT_DTBOIMAGE
-ifdef BUILT_KERNEL_16K_TARGET
+ifdef BOARD_KERNEL_PATH_16K
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(BUILT_KERNEL_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
-endif # BUILT_KERNEL_16K_TARGET
-ifdef BUILT_RAMDISK_16K_TARGET
+endif # BOARD_KERNEL_PATH_16K
+ifdef BOARD_KERNEL_MODULES_16K
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(BUILT_RAMDISK_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
-endif # BUILT_RAMDISK_16K_TARGET
+endif # BOARD_KERNEL_MODULES_16K
+ifdef BUILT_BOOTIMAGE_16K_TARGET
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(BUILT_BOOTIMAGE_16K_TARGET) $(zip_root)/IMAGES/
+endif # BUILT_BOOTIMAGE_16K_TARGET
+ifdef INSTALLED_DTBOIMAGE_16KB_TARGET
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_DTBOIMAGE_16KB_TARGET) $(zip_root)/IMAGES/
+endif # INSTALLED_DTBOIMAGE_16KB_TARGET
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
@@ -6883,6 +6895,33 @@
 	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize
 endif # BOARD_KERNEL_PAGESIZE
 endif # BUILDING_INIT_BOOT_IMAGE
+ifdef BOARD_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_EROFS_COMPRESS_HINTS) $(zip_root)/META/erofs_default_compress_hints.txt
+endif
+ifdef BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_erofs_compress_hints.txt
+endif
+ifdef BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_ext_erofs_compress_hints.txt
+endif
+ifdef BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/product_erofs_compress_hints.txt
+endif
+ifdef BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_erofs_compress_hints.txt
+endif
+ifdef BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_erofs_compress_hints.txt
+endif
+ifdef BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_dlkm_erofs_compress_hints.txt
+endif
+ifdef BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_dlkm_erofs_compress_hints.txt
+endif
+ifdef BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_dlkm_erofs_compress_hints.txt
+endif
 ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
 	$(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt
 endif
@@ -7859,7 +7898,7 @@
 $(call dist-for-goals,haiku-presubmit,$(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES))
 
 # -----------------------------------------------------------------
-# Extract platform fonts used in Layoutlib
+# Extract additional data files used in Layoutlib
 include $(BUILD_SYSTEM)/layoutlib_data.mk
 
 # -----------------------------------------------------------------
@@ -7885,10 +7924,11 @@
 $(PACKED_IMAGE_ARCHIVE_TARGET): $(PACK_IMAGE_TARGET) | $(GZIP)
 	$(GZIP) -fk $(PACK_IMAGE_TARGET)
 
-droidcore-unbundled: $(PACKED_IMAGE_ARCHIVE_TARGET)
-
 $(call dist-for-goals,dist_files,$(PACKED_IMAGE_ARCHIVE_TARGET))
 
+.PHONY: pack-image
+pack-image: $(PACK_IMAGE_TARGET)
+
 endif # PACK_DESKTOP_FILESYSTEM_IMAGES
 
 # -----------------------------------------------------------------
@@ -7904,13 +7944,34 @@
 $(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET): $(PACK_RECOVERY_IMAGE_TARGET) | $(GZIP)
 	$(GZIP) -fk $(PACK_RECOVERY_IMAGE_TARGET)
 
-droidcore-unbundled: $(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET)
-
 $(call dist-for-goals,dist_files,$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET))
 
+.PHONY: pack-recovery-image
+pack-recovery-image: $(PACK_RECOVERY_IMAGE_TARGET)
+
 endif # PACK_DESKTOP_RECOVERY_IMAGE
 
 # -----------------------------------------------------------------
+# Desktop pack update image hook.
+ifneq (,$(strip $(PACK_DESKTOP_UPDATE_IMAGE)))
+PACK_UPDATE_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_update_image.bin
+
+$(PACK_UPDATE_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive --update
+
+PACKED_UPDATE_IMAGE_ARCHIVE_TARGET := $(PACK_UPDATE_IMAGE_TARGET).gz
+
+$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET): $(PACK_UPDATE_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_UPDATE_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-update-image
+pack-update-image: $(PACK_UPDATE_IMAGE_TARGET)
+
+endif # PACK_DESKTOP_UPDATE_IMAGE
+
+# -----------------------------------------------------------------
 # OS Licensing
 
 include $(BUILD_SYSTEM)/os_licensing.mk
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 915f55f..5fc8fd4 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -71,11 +71,6 @@
 endif
 endif
 
-# TODO(b/308187800): some internal modules set `prefer` to true on the prebuilt apex module,
-# and set that to false when `ANDROID.module_build_from_source` is true.
-# Set this soong config variable to true for now, and cleanup `prefer` as part of b/308187800
-$(call add_soong_config_var_value,ANDROID,module_build_from_source,true)
-
 # Enable SystemUI optimizations by default unless explicitly set.
 SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 1135003..5363e0f 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -340,7 +340,7 @@
 
 ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE))
   ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-    $(call pretty-error, LOCAL_SOONG_INSTALLED_MODULE can only be used from $(SOONG_ANDROID_MK))
+    $(call pretty-error, LOCAL_MODULE_MAKEFILE can only be used from $(SOONG_ANDROID_MK))
   endif
   # Use the install path requested by Soong.
   LOCAL_INSTALLED_MODULE := $(LOCAL_SOONG_INSTALLED_MODULE)
diff --git a/core/board_config.mk b/core/board_config.mk
index d3f0493..5606964 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -237,6 +237,7 @@
   .KATI_READONLY := TARGET_DEVICE_DIR
 endif
 
+$(call dump-phase-start,BOARD,,,, build/make/core/board_config.mk)
 ifndef RBC_PRODUCT_CONFIG
 include $(board_config_mk)
 else
@@ -261,6 +262,7 @@
 
   include $(OUT_DIR)/rbc/rbc_board_config_results.mk
 endif
+$(call dump-phase-end, build/make/core/board_config.mk)
 
 ifneq (,$(and $(TARGET_ARCH),$(TARGET_ARCH_SUITE)))
   $(error $(board_config_mk) erroneously sets both TARGET_ARCH and TARGET_ARCH_SUITE)
diff --git a/core/config.mk b/core/config.mk
index bd905dc..a294223 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -811,6 +811,12 @@
   BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
 endif
 
+ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),)
+  ifneq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),)
+    $(error Must not set NEED_AIDL_NDK_PLATFORM_BACKEND, but it is set to: $(NEED_AIDL_NDK_PLATFORM_BACKEND). Support will be removed.)
+  endif
+endif
+
 # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
 # devices if unset.
 ifndef BOARD_SYSTEMSDK_VERSIONS
@@ -1254,7 +1260,19 @@
 TARGET_SYSTEM_EXT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop)
 endif
 
-.KATI_READONLY += TARGET_SYSTEM_PROP TARGET_SYSTEM_EXT_PROP
+ifeq ($(TARGET_PRODUCT_PROP),)
+TARGET_PRODUCT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/product.prop)
+endif
+
+ifeq ($(TARGET_ODM_PROP),)
+TARGET_ODM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/odm.prop)
+endif
+
+.KATI_READONLY := \
+    TARGET_SYSTEM_PROP \
+    TARGET_SYSTEM_EXT_PROP \
+    TARGET_PRODUCT_PROP \
+    TARGET_ODM_PROP \
 
 include $(BUILD_SYSTEM)/sysprop_config.mk
 
@@ -1262,8 +1280,15 @@
 # consistency with those defined in BoardConfig.mk files.
 include $(BUILD_SYSTEM)/android_soong_config_vars.mk
 
-SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables
-SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).extra.variables
+# EMMA_INSTRUMENT is set to true when coverage is enabled. Creates a suffix to
+# differeciate the coverage version of ninja files. This will save 5 minutes of
+# build time used to regenerate ninja.
+ifeq (true,$(EMMA_INSTRUMENT))
+COVERAGE_SUFFIX := .coverage
+endif
+
+SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).variables
+SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).extra.variables
 
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ninja_config.mk
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index d51de33..f1e9fb5 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,4 +1,4 @@
-DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config
+DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt${COVERAGE_SUFFIX}.config
 
 ENABLE_PREOPT := true
 ENABLE_PREOPT_BOOT_IMAGES := true
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
index 640fe10..eb4c822 100644
--- a/core/dumpconfig.mk
+++ b/core/dumpconfig.mk
@@ -56,7 +56,7 @@
 
 # Escape quotation marks for CSV, and wraps in quotation marks.
 define escape-for-csv
-"$(subst ","",$1)"
+"$(subst ","",$(subst $(newline), ,$1))"
 endef
 
 # Args:
@@ -68,7 +68,7 @@
 # Args:
 #   $(1): include stack
 define dump-import-done
-$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1))))
+$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)),$(filter-out $(1),$(MAKEFILE_LIST))))
 endef
 
 # Args:
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
index 46393ac..4b6eea7 100644
--- a/core/java_prebuilt_internal.mk
+++ b/core/java_prebuilt_internal.mk
@@ -172,6 +172,12 @@
 endif
 endif
 
+# transitive-res-packages is only populated for Soong modules for now, but needs
+# to exist so that other Make modules can depend on it.  Create an empty file.
+my_transitive_res_packages := $(intermediates.COMMON)/transitive-res-packages
+$(my_transitive_res_packages):
+	touch $@
+
 my_res_package := $(intermediates.COMMON)/package-res.apk
 
 # We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
diff --git a/core/layoutlib_data.mk b/core/layoutlib_data.mk
index e45f7ef..e420a00 100644
--- a/core/layoutlib_data.mk
+++ b/core/layoutlib_data.mk
@@ -66,11 +66,19 @@
 # Resource files from frameworks/base/core/res/res
 LAYOUTLIB_RES := $(call intermediates-dir-for,PACKAGING,layoutlib-res,HOST,COMMON)
 LAYOUTLIB_RES_FILES := $(shell find frameworks/base/core/res/res -type f -not -path 'frameworks/base/core/res/res/values-m[nc]c*' | sort)
-$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES)
+EMULATED_OVERLAYS_FILES := $(shell find frameworks/base/packages/overlays/*/res/ | sort)
+DEVICE_OVERLAYS_FILES := $(shell find device/generic/goldfish/phone/overlay/frameworks/base/packages/overlays/*/AndroidOverlay/res/ | sort)
+$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES)
 	rm -rf $@
-	echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist.txt
-	$(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist.txt -o $(LAYOUTLIB_RES)/temp.zip
-	rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp.zip
+	echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist_res.txt
+	$(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist_res.txt -o $(LAYOUTLIB_RES)/temp_res.zip
+	echo $(EMULATED_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt
+	$(SOONG_ZIP) -C frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt -o $(LAYOUTLIB_RES)/temp_emulated_overlays.zip
+	echo $(DEVICE_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_device_overlays.txt
+	$(SOONG_ZIP) -C device/generic/goldfish/phone/overlay/frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_device_overlays.txt -o $(LAYOUTLIB_RES)/temp_device_overlays.zip
+	rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_res.zip
+	unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_emulated_overlays.zip
+	unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_device_overlays.zip
 	rm -rf $(LAYOUTLIB_RES)/compiled && mkdir $(LAYOUTLIB_RES)/compiled && $(HOST_OUT_EXECUTABLES)/aapt2 compile $(LAYOUTLIB_RES)/data/res/**/*.9.png -o $(LAYOUTLIB_RES)/compiled
 	printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.android.layoutlib" />' > $(LAYOUTLIB_RES)/AndroidManifest.xml
 	$(HOST_OUT_EXECUTABLES)/aapt2 link -R $(LAYOUTLIB_RES)/compiled/* -o $(LAYOUTLIB_RES)/compiled.apk --manifest $(LAYOUTLIB_RES)/AndroidManifest.xml
@@ -78,7 +86,7 @@
 	for f in $(LAYOUTLIB_RES)/compiled_apk/res/*; do mv "$$f" "$${f/-v4/}";done
 	for f in $(LAYOUTLIB_RES)/compiled_apk/res/**/*.9.png; do mv "$$f" "$${f/.9.png/.compiled.9.png}";done
 	cp -r $(LAYOUTLIB_RES)/compiled_apk/res $(LAYOUTLIB_RES)/data
-	$(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/res -o $@
+	$(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/ -o $@
 
 $(call dist-for-goals,layoutlib,$(LAYOUTLIB_RES)/layoutlib-res.zip:layoutlib_native/res.zip)
 
@@ -132,16 +140,26 @@
 	  echo $(_path),,,,,,Y,$f,,, >> $@; \
 	)
 
+	$(foreach f,$(EMULATED_OVERLAYS_FILES), \
+	  $(eval _path := $(subst frameworks/base/packages,data,$f)) \
+	  echo $(_path),,,,,,Y,$f,,, >> $@; \
+	)
+
+	$(foreach f,$(DEVICE_OVERLAYS_FILES), \
+	  $(eval _path := $(subst device/generic/goldfish/phone/overlay/frameworks/base/packages,data,$f)) \
+	  echo $(_path),,,,,,Y,$f,,, >> $@; \
+	)
+
 .PHONY: layoutlib-sbom
 layoutlib-sbom: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json
-$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES)
+$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES)
 	rm -rf $@
 	$(GEN_SBOM) --output_file $@ --metadata $(LAYOUTLIB_SBOM)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --module_name "layoutlib" --json
 
 $(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/layoutlib.spdx.json:layoutlib_native/sbom/layoutlib.spdx.json)
 
 # Generate SBOM of framework_res.jar that is created in release_layoutlib.sh.
-# The generated SBOM contains placeholders for release_layotlib.sh to substitute, and the placeholders include:
+# The generated SBOM contains placeholders for release_layoutlib.sh to substitute, and the placeholders include:
 # document name, document namespace, document creation info, organization and SHA1 value of framework_res.jar.
 GEN_SBOM_FRAMEWORK_RES := $(HOST_OUT_EXECUTABLES)/generate-sbom-framework_res
 .PHONY: layoutlib-framework_res-sbom
diff --git a/core/main.mk b/core/main.mk
index 27ba526..5c280da 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -31,8 +31,7 @@
 .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),SOONG_CONFIG_$(n))
 .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),$(foreach k,$(SOONG_CONFIG_$(n)),SOONG_CONFIG_$(n)_$(k)))
 
-include $(SOONG_MAKEVARS_MK)
-
+include $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
 YACC :=$= $(BISON) -d
 
 include $(BUILD_SYSTEM)/clang/config.mk
@@ -276,12 +275,15 @@
 # Include all of the makefiles in the system
 #
 
-subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT).mk $(SOONG_ANDROID_MK)
+subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk $(SOONG_ANDROID_MK)
+
 # Android.mk files are only used on Linux builds, Mac only supports Android.bp
 ifeq ($(HOST_OS),linux)
   subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list)
 endif
-subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT).mk
+
+subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
+
 subdir_makefiles_total := $(words int $(subdir_makefiles) post finish)
 .KATI_READONLY := subdir_makefiles_total
 
@@ -1858,6 +1860,11 @@
 filter_out_files += $(PRODUCT_OUT)/recovery/%
 endif
 
+# userdata.img
+ifndef BUILDING_USERDATA_IMAGE
+filter_out_files += $(PRODUCT_OUT)/data/%
+endif
+
 installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install))))
 else
 installed_files := $(apps_only_installed_files)
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 59e2c95..20344f4 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -382,6 +382,11 @@
     _soong_config_namespace(g, nsname)
     g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value)
 
+def _soong_config_set_bool(g, nsname, var, value):
+    """Assigns the value to the variable in the namespace, and marks it as a boolean."""
+    _soong_config_set(g, nsname, var, _filter("true", value))
+    g["SOONG_CONFIG_TYPE_%s_%s" % (nsname, var)] = "bool"
+
 def _soong_config_append(g, nsname, var, value):
     """Appends to the value of the variable in the namespace."""
     _soong_config_namespace(g, nsname)
@@ -861,6 +866,7 @@
     soong_config_namespace = _soong_config_namespace,
     soong_config_append = _soong_config_append,
     soong_config_set = _soong_config_set,
+    soong_config_set_bool = _soong_config_set_bool,
     soong_config_get = _soong_config_get,
     abspath = _abspath,
     add_product_dex_preopt_module_config = _add_product_dex_preopt_module_config,
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 5fca203..2e7cd9f 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -1,5 +1,5 @@
-SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
-SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
+SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
+SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
 
 include $(BUILD_SYSTEM)/art_config.mk
 include $(BUILD_SYSTEM)/dex_preopt_config.mk
@@ -26,7 +26,7 @@
 $(shell mkdir -p $(dir $(SOONG_VARIABLES)))
 $(call json_start)
 
-$(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
+$(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT)$(COVERAGE_SUFFIX))
 
 $(call add_json_str,  BuildId,                           $(BUILD_ID))
 $(call add_json_str,  BuildFingerprintFile,              build_fingerprint.txt)
@@ -206,6 +206,7 @@
 $(call add_json_str,  BoardSepolicyVers,                 $(BOARD_SEPOLICY_VERS))
 $(call add_json_str,  SystemExtSepolicyPrebuiltApiDir,   $(BOARD_SYSTEM_EXT_PREBUILT_DIR))
 $(call add_json_str,  ProductSepolicyPrebuiltApiDir,     $(BOARD_PRODUCT_PREBUILT_DIR))
+$(call add_json_str,  BoardPlatform,                     $(TARGET_BOARD_PLATFORM))
 
 $(call add_json_str,  PlatformSepolicyVersion,           $(PLATFORM_SEPOLICY_VERSION))
 $(call add_json_list, PlatformSepolicyCompatVersions,    $(PLATFORM_SEPOLICY_COMPAT_VERSIONS))
@@ -342,6 +343,8 @@
 
 $(call add_json_list, SystemPropFiles, $(TARGET_SYSTEM_PROP))
 $(call add_json_list, SystemExtPropFiles, $(TARGET_SYSTEM_EXT_PROP))
+$(call add_json_list, ProductPropFiles, $(TARGET_PRODUCT_PROP))
+$(call add_json_list, OdmPropFiles, $(TARGET_ODM_PROP))
 
 # Do not set ArtTargetIncludeDebugBuild into any value if PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD is not set,
 # to have the same behavior from runtime_libart.mk.
diff --git a/core/soong_extra_config.mk b/core/soong_extra_config.mk
index 76da0d7..00b5c0f 100644
--- a/core/soong_extra_config.mk
+++ b/core/soong_extra_config.mk
@@ -90,6 +90,10 @@
 
 $(call add_json_bool, ProductNotDebuggableInUserdebug, $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG))
 
+$(call add_json_bool, UsesProductImage, $(filter true,$(BOARD_USES_PRODUCTIMAGE)))
+
+$(call add_json_bool, TargetBoots16K, $(filter true,$(TARGET_BOOTS_16K)))
+
 $(call json_end)
 
 $(shell mkdir -p $(dir $(SOONG_EXTRA_VARIABLES)))
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 6d65e19..dc6f2c4 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -266,83 +266,18 @@
 # -----------------------------------------------------------------
 # product/etc/build.prop
 #
-
-_prop_files_ := $(if $(TARGET_PRODUCT_PROP),\
-    $(TARGET_PRODUCT_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/product.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_PRODUCT_PROPERTIES \
-    PRODUCT_PRODUCT_PROPERTIES
+# product/etc/build.prop is built by Soong. See product-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/etc/build.prop
 
-ifdef PRODUCT_OEM_PROPERTIES
-import_oem_prop := $(call intermediates-dir-for,ETC,import_oem_prop)/oem.prop
-
-$(import_oem_prop):
-	$(hide) echo "####################################" >> $@; \
-	        echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
-	        echo "####################################" >> $@;
-	$(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \
-	    echo "import /oem/oem.prop $(prop)" >> $@;)
-
-_footers_ := $(import_oem_prop)
-else
-_footers_ :=
-endif
-
-# Skip common /product properties generation if device released before R and
-# has no product partition. This is the first part of the check.
-ifeq ($(call math_lt,$(if $(PRODUCT_SHIPPING_API_LEVEL),$(PRODUCT_SHIPPING_API_LEVEL),30),30), true)
-  _skip_common_properties := true
-endif
-
-# The second part of the check - always generate common properties for the
-# devices with product partition regardless of shipping level.
-ifneq ($(BOARD_USES_PRODUCTIMAGE),)
-  _skip_common_properties :=
-endif
-
-$(eval $(call build-properties,\
-    product,\
-    $(INSTALLED_PRODUCT_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(_footers_),\
-    $(_skip_common_properties)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_PRODUCT_BUILD_PROP_TARGET)))
-
-_skip_common_properties :=
-
 # ----------------------------------------------------------------
 # odm/etc/build.prop
 #
-_prop_files_ := $(if $(TARGET_ODM_PROP),\
-    $(TARGET_ODM_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/odm.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_ODM_PROPERTIES \
-    PRODUCT_ODM_PROPERTIES
+# odm/etc/build.prop is built by Soong. See odm-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop
-$(eval $(call build-properties,\
-    odm,\
-    $(INSTALLED_ODM_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_ODM_BUILD_PROP_TARGET)))
 
 # ----------------------------------------------------------------
 # vendor_dlkm/etc/build.prop
@@ -395,7 +330,7 @@
 # -----------------------------------------------------------------
 # system_ext/etc/build.prop
 #
-# system_ext/build.prop is built by Soong. See system-build.prop module in
+# system_ext/etc/build.prop is built by Soong. See system-build.prop module in
 # build/soong/Android.bp.
 
 INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_EXT)/etc/build.prop
diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk
index 543b86b..6906611 100644
--- a/core/sysprop_config.mk
+++ b/core/sysprop_config.mk
@@ -16,24 +16,8 @@
 _additional_prop_var_names :=
 
 $(KATI_obsolete_var ADDITIONAL_SYSTEM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
-
-# Add the system server compiler filter if they are specified for the product.
-ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
-ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
-endif
-
-# Add the 16K developer option if it is defined for the product.
-ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false
-endif
-
-ifeq ($(TARGET_BOOTS_16K),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=16384
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=4096
-endif
+$(KATI_obsolete_var ADDITIONAL_ODM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
+$(KATI_obsolete_var ADDITIONAL_PRODUCT_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
 
 # Add cpu properties for bionic and ART.
 ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH)
@@ -146,35 +130,16 @@
     ro.build.ab_update=$(AB_OTA_UPDATER)
 endif
 
-ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
-
 ifeq ($(AB_OTA_UPDATER),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 endif
 
-# Set this property for VTS to skip large page size tests on unsupported devices.
-ADDITIONAL_PRODUCT_PROPERTIES += \
-    ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
-
-ifeq ($(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.no_bionic_page_size_macro=true
-endif
-
 user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
 
 config_enable_uffd_gc := \
   $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
 
-# This is a temporary system property that controls the ART module. The plan is
-# to remove it by Aug 2025, at which time Mainline updates of the ART module
-# will ignore it as well.
-# If the value is "default", it will be mangled by post_process_props.py.
-ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc)
-
-ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES))
 ADDITIONAL_VENDOR_PROPERTIES := $(strip $(ADDITIONAL_VENDOR_PROPERTIES))
 
 .KATI_READONLY += \
-    ADDITIONAL_PRODUCT_PROPERTIES \
     ADDITIONAL_VENDOR_PROPERTIES
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index c95f6e7..eb54fae 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -47,21 +47,16 @@
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
-	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
 	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list \
 	  -sha256
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
-	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
-	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list
 	grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
-	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 
 art-host-tests: $(art_host_tests_zip)
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 7593668..0ca27d8 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -13,7 +13,7 @@
 $(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"')
 endef
 
-SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT).json
+SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT)${COVERAGE_SUFFIX}.json
 
 $(MODULE_INFO_JSON): PRIVATE_SOONG_MODULE_INFO := $(SOONG_MODULE_INFO)
 $(MODULE_INFO_JSON): PRIVATE_MERGE_JSON_OBJECTS := $(HOST_OUT_EXECUTABLES)/merge_module_info_json
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
index 61bf136..47c360d 100644
--- a/core/tasks/tradefed-tests-list.mk
+++ b/core/tasks/tradefed-tests-list.mk
@@ -15,6 +15,11 @@
 # List all TradeFed tests from COMPATIBILITY.tradefed_tests_dir
 .PHONY: tradefed-tests-list
 
+COMPATIBILITY.tradefed_tests_dir := \
+  $(COMPATIBILITY.tradefed_tests_dir) \
+  tools/tradefederation/core/res/config \
+  tools/tradefederation/core/javatests/res/config
+
 tradefed_tests :=
 $(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \
   $(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml")))
diff --git a/target/product/aosp_product.mk b/target/product/aosp_product.mk
index 3a5b622..c426afd 100644
--- a/target/product/aosp_product.mk
+++ b/target/product/aosp_product.mk
@@ -30,7 +30,6 @@
 # More AOSP packages
 PRODUCT_PACKAGES += \
     initial-package-stopped-states-aosp.xml \
-    messaging \
     PhotoTable \
     preinstalled-packages-platform-aosp-product.xml \
     ThemePicker \
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 0ac220b..acfc653 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -25,3 +25,8 @@
     product_compatibility_matrix.xml \
     product_manifest.xml \
     selinux_policy_product \
+    product-build.prop \
+
+# Packages included only for eng or userdebug builds, previously debug tagged
+PRODUCT_PACKAGES_DEBUG += \
+    adb_keys \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 5b54051..9e34538 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -466,7 +466,6 @@
 
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
-    adb_keys \
     adevice_fingerprint \
     arping \
     dmuserd \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 5b1cae5..a80e0b3 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -73,6 +73,7 @@
     passwd_vendor \
     selinux_policy_nonsystem \
     shell_and_utilities_vendor \
+    odm-build.prop \
 
 # libhealthloop BPF filter. This is in base_vendor.mk because libhealthloop must
 # be a static library and because the Android build system ignores 'required'
diff --git a/target/product/large_screen_common.mk b/target/product/large_screen_common.mk
new file mode 100644
index 0000000..3eb9ff0
--- /dev/null
+++ b/target/product/large_screen_common.mk
@@ -0,0 +1,21 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Window Extensions
+$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk)
+
+# Enable Settings 2-pane optimization for large-screen
+PRODUCT_SYSTEM_PROPERTIES += \
+    persist.settings.large_screen_opt.enabled=true
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 91b272c..138e5bb 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -10,7 +10,7 @@
     LOCAL_LICENSE_CONDITIONS := notice
     LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
     LOCAL_MODULE_CLASS := ETC
-    LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
+    LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT_ETC)/security
     LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
     include $(BUILD_PREBUILT)
   endif
diff --git a/target/product/userspace_reboot.mk b/target/product/userspace_reboot.mk
index f235d14..51feb07 100644
--- a/target/product/userspace_reboot.mk
+++ b/target/product/userspace_reboot.mk
@@ -14,6 +14,4 @@
 # limitations under the License.
 #
 
-# Inherit this when the target supports userspace reboot
-
-PRODUCT_VENDOR_PROPERTIES := init.userspace_reboot.is_supported=true
+# DEPRECATED! Do not inherit this.
diff --git a/teams/Android.bp b/teams/Android.bp
index a9699d2..94585fc 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -4414,8 +4414,29 @@
 }
 
 team {
+    name: "trendy_team_android_media_solutions_playback",
+
+    // go/trendy/manage/engineers/6742515252559872
+    trendy_team_id: "6742515252559872",
+}
+
+team {
     name: "trendy_team_android_telemetry_client_infra",
 
     // go/trendy/manage/engineers/5403245077430272
     trendy_team_id: "5403245077430272",
 }
+
+team {
+    name: "trendy_team_pte_sysui",
+
+    // go/trendy/manage/engineers/5185897463382016
+    trendy_team_id: "5185897463382016",
+}
+
+team {
+    name: "trendy_team_pixel_troubleshooting_app",
+
+    // go/trendy/manage/engineers/5097003746426880
+    trendy_team_id: "5097003746426880",
+}
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index a74ef85..dbc4ab5 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -698,6 +698,8 @@
         StorageInternalReader reader;
         boolean readFromNewStorage;
 
+        boolean useNewStorageValueAndDiscardOld = false;
+
         private final static String TAG = "AconfigJavaCodegen";
         private final static String SUCCESS_LOG = "success: %s value matches";
         private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s";
@@ -713,6 +715,9 @@
                     reader = null;
                 }
             }
+
+            useNewStorageValueAndDiscardOld =
+                DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false);
         }
 
         private void load_overrides_aconfig_test() {
@@ -746,7 +751,7 @@
                         Log.i(TAG, String.format(MISMATCH_LOG, "disabledRw", val, disabledRw));
                     }
 
-                    if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) {
+                    if (useNewStorageValueAndDiscardOld) {
                         disabledRw = val;
                     }
 
@@ -757,7 +762,7 @@
                         Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwExported", val, disabledRwExported));
                     }
 
-                    if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) {
+                    if (useNewStorageValueAndDiscardOld) {
                         disabledRwExported = val;
                     }
 
@@ -768,7 +773,7 @@
                         Log.i(TAG, String.format(MISMATCH_LOG, "enabledRw", val, enabledRw));
                     }
 
-                    if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) {
+                    if (useNewStorageValueAndDiscardOld) {
                         enabledRw = val;
                     }
 
@@ -805,7 +810,7 @@
                         Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwInOtherNamespace", val, disabledRwInOtherNamespace));
                     }
 
-                    if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) {
+                    if (useNewStorageValueAndDiscardOld) {
                         disabledRwInOtherNamespace = val;
                     }
 
diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs
index 1292e0a..6f3f7bf 100644
--- a/tools/aconfig/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/aconfig/src/codegen/rust.rs
@@ -113,6 +113,7 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
 static STORAGE_MIGRATION_MARKER_FILE: &str =
@@ -122,32 +123,29 @@
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
     /// flag value cache for disabled_rw
-    static ref CACHED_disabled_rw: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for disabled_rw_exported
-    static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for disabled_rw_in_other_namespace
-    static ref CACHED_disabled_rw_in_other_namespace: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.other_namespace",
         "com.android.aconfig.test.disabled_rw_in_other_namespace",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_rw
-    static ref CACHED_enabled_rw: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_rw",
-        "true") == "true";
-
-}
+        "true") == "true");
 
 impl FlagProvider {
     /// query flag disabled_ro
@@ -259,6 +257,7 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
 static STORAGE_MIGRATION_MARKER_FILE: &str =
@@ -268,282 +267,276 @@
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
+static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe {
+    get_mapped_storage_file("system", StorageFileType::PackageMap)
+    .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test"))
+    .map(|context| context.map(|c| c.boolean_start_index))
+});
 
-    static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe {
-        get_mapped_storage_file("system", StorageFileType::PackageMap)
-        .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test"))
-        .map(|context| context.map(|c| c.boolean_start_index))
-    };
+static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe {
+    get_mapped_storage_file("system", StorageFileType::FlagVal)
+});
 
-    static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe {
-        get_mapped_storage_file("system", StorageFileType::FlagVal)
-    };
-    /// flag value cache for disabled_rw
+/// flag value cache for disabled_rw
+static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.aconfig_test",
+        "com.android.aconfig.test.disabled_rw",
+        "false") == "true";
 
-    static ref CACHED_disabled_rw: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.aconfig_test",
-            "com.android.aconfig.test.disabled_rw",
-            "false") == "true";
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
 
-        let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.core_experiments_team_internal",
-            "com.android.providers.settings.use_new_storage_value",
-            "false") == "true";
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
 
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 1)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
 
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 1)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
-                            }
-                        })
-                    });
-
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}");
-                    if use_new_storage_value {
-                        panic!("failed to read flag value: {err}");
-                    }
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
                 }
             }
         }
+    }
 
-        result
-        };
+    result
+});
 
-    /// flag value cache for disabled_rw_exported
+/// flag value cache for disabled_rw_exported
+static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.aconfig_test",
+        "com.android.aconfig.test.disabled_rw_exported",
+        "false") == "true";
 
-    static ref CACHED_disabled_rw_exported: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.aconfig_test",
-            "com.android.aconfig.test.disabled_rw_exported",
-            "false") == "true";
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
 
-        let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.core_experiments_team_internal",
-            "com.android.providers.settings.use_new_storage_value",
-            "false") == "true";
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
 
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 2)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
 
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 2)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
-                            }
-                        })
-                    });
-
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}");
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}");
-                    if use_new_storage_value {
-                        panic!("failed to read flag value: {err}");
-                    }
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
                 }
             }
         }
+    }
 
-        result
-        };
+    result
+});
 
-    /// flag value cache for disabled_rw_in_other_namespace
+/// flag value cache for disabled_rw_in_other_namespace
+static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.other_namespace",
+        "com.android.aconfig.test.disabled_rw_in_other_namespace",
+        "false") == "true";
 
-    static ref CACHED_disabled_rw_in_other_namespace: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.other_namespace",
-            "com.android.aconfig.test.disabled_rw_in_other_namespace",
-            "false") == "true";
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
 
-        let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.core_experiments_team_internal",
-            "com.android.providers.settings.use_new_storage_value",
-            "false") == "true";
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
 
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 3)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
 
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 3)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
-                            }
-                        })
-                    });
-
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}");
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}");
-                    if use_new_storage_value {
-                        panic!("failed to read flag value: {err}");
-                    }
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
                 }
             }
         }
+    }
 
-        result
-        };
+    result
+});
 
-    /// flag value cache for enabled_rw
 
-    static ref CACHED_enabled_rw: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.aconfig_test",
-            "com.android.aconfig.test.enabled_rw",
-            "true") == "true";
+/// flag value cache for enabled_rw
+static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| {
+    let result = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.aconfig_test",
+        "com.android.aconfig.test.enabled_rw",
+        "true") == "true";
 
-        let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.core_experiments_team_internal",
-            "com.android.providers.settings.use_new_storage_value",
-            "false") == "true";
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
 
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
 
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 8)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
-                            }
-                        })
-                    });
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 8)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+                        }
+                    })
+                });
 
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
-                    if use_new_storage_value {
-                        return storage_result;
-                    } else {
-                        return result;
-                    }
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}");
-                    if use_new_storage_value {
-                        panic!("failed to read flag value: {err}");
-                    }
+        match aconfig_storage_result {
+            Ok(storage_result) if storage_result == result => {
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Ok(storage_result) => {
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
+                if use_new_storage_value {
+                    return storage_result;
+                } else {
+                    return result;
+                }
+            },
+            Err(err) => {
+                log!(Level::Error, "AconfigTestMission1: error: {err}");
+                if use_new_storage_value {
+                    panic!("failed to read flag value: {err}");
                 }
             }
         }
+    }
 
-        result
-        };
-
-}
+    result
+});
 
 impl FlagProvider {
 
@@ -1207,6 +1200,7 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
 static STORAGE_MIGRATION_MARKER_FILE: &str =
@@ -1216,26 +1210,23 @@
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
     /// flag value cache for disabled_rw_exported
-    static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_fixed_ro_exported
-    static ref CACHED_enabled_fixed_ro_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_fixed_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_fixed_ro_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_ro_exported
-    static ref CACHED_enabled_ro_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_ro_exported",
-        "false") == "true";
-
-}
+        "false") == "true");
 
 impl FlagProvider {
     /// query flag disabled_rw_exported
@@ -1281,6 +1272,7 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
 static STORAGE_MIGRATION_MARKER_FILE: &str =
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index 59f0662..1a14f64 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -17,7 +17,8 @@
 use anyhow::{bail, ensure, Context, Result};
 use itertools::Itertools;
 use protobuf::Message;
-use std::collections::HashMap;
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hasher;
 use std::io::Read;
 use std::path::PathBuf;
 
@@ -31,6 +32,7 @@
     ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag,
     ProtoParsedFlags, ProtoTracepoint,
 };
+use aconfig_storage_file::sip_hasher13::SipHasher13;
 use aconfig_storage_file::StorageFileType;
 
 pub struct Input {
@@ -410,12 +412,43 @@
     Ok(flag_ids)
 }
 
+#[allow(dead_code)] // TODO: b/316357686 - Use fingerprint in codegen to
+                    // protect hardcoded offset reads.
+pub fn compute_flag_offsets_fingerprint(flags_map: &HashMap<String, u16>) -> Result<u64> {
+    let mut hasher = SipHasher13::new();
+
+    // Need to sort to ensure the data is added to the hasher in the same order
+    // each run.
+    let sorted_map: BTreeMap<&String, &u16> = flags_map.iter().collect();
+
+    for (flag, offset) in sorted_map {
+        // See https://docs.rs/siphasher/latest/siphasher/#note for use of write
+        // over write_i16. Similarly, use to_be_bytes rather than to_ne_bytes to
+        // ensure consistency.
+        hasher.write(flag.as_bytes());
+        hasher.write(&offset.to_be_bytes());
+    }
+    Ok(hasher.finish())
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
     use aconfig_protos::ProtoFlagPurpose;
 
     #[test]
+    fn test_offset_fingerprint() {
+        let parsed_flags = crate::test::parse_test_flags();
+        let package = find_unique_package(&parsed_flags.parsed_flag).unwrap().to_string();
+        let flag_ids = assign_flag_ids(&package, parsed_flags.parsed_flag.iter()).unwrap();
+        let expected_fingerprint = 10709892481002252132u64;
+
+        let hash_result = compute_flag_offsets_fingerprint(&flag_ids);
+
+        assert_eq!(hash_result.unwrap(), expected_fingerprint);
+    }
+
+    #[test]
     fn test_parse_flags() {
         let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags
         aconfig_protos::parsed_flags::verify_fields(&parsed_flags).unwrap();
diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
index 96e7623..9970b1f 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
@@ -35,6 +35,8 @@
     StorageInternalReader reader;
     boolean readFromNewStorage;
 
+    boolean useNewStorageValueAndDiscardOld = false;
+
     private final static String TAG = "AconfigJavaCodegen";
     private final static String SUCCESS_LOG = "success: %s value matches";
     private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s";
@@ -50,6 +52,9 @@
                 reader = null;
             }
         }
+
+        useNewStorageValueAndDiscardOld =
+            DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false);
     }
 
 {{ -endif }}
@@ -91,7 +96,7 @@
                     Log.i(TAG, String.format(MISMATCH_LOG, "{flag.method_name}", val, {flag.method_name}));
                 }
 
-                if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) \{
+                if (useNewStorageValueAndDiscardOld) \{
                     {flag.method_name} = val;
                 }
 
diff --git a/tools/aconfig/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template
index 77a9984..ea1c600 100644
--- a/tools/aconfig/aconfig/templates/rust.template
+++ b/tools/aconfig/aconfig/templates/rust.template
@@ -2,6 +2,7 @@
 use aconfig_storage_read_api::\{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::\{log, LevelFilter, Level};
 
 static STORAGE_MIGRATION_MARKER_FILE: &str =
@@ -12,95 +13,93 @@
 pub struct FlagProvider;
 
 {{ if has_readwrite- }}
-lazy_static::lazy_static! \{
-    {{ if allow_instrumentation }}
-    static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe \{
-        get_mapped_storage_file("{container}", StorageFileType::PackageMap)
-        .and_then(|package_map| get_package_read_context(&package_map, "{package}"))
-        .map(|context| context.map(|c| c.boolean_start_index))
-    };
+{{ if allow_instrumentation }}
+static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{
+    get_mapped_storage_file("{container}", StorageFileType::PackageMap)
+    .and_then(|package_map| get_package_read_context(&package_map, "{package}"))
+    .map(|context| context.map(|c| c.boolean_start_index))
+});
 
-    static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe \{
-        get_mapped_storage_file("{container}", StorageFileType::FlagVal)
-    };
-    {{ -endif }}
-
+static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe \{
+    get_mapped_storage_file("{container}", StorageFileType::FlagVal)
+});
+{{ -endif }}
 {{ -for flag in template_flags }}
-    {{ -if flag.readwrite }}
-    /// flag value cache for {flag.name}
-    {{ if allow_instrumentation }}
-    static ref CACHED_{flag.name}: bool = \{
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.{flag.device_config_namespace}",
-            "{flag.device_config_flag}",
-            "{flag.default_value}") == "true";
 
-        let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.core_experiments_team_internal",
-            "com.android.providers.settings.use_new_storage_value",
-            "false") == "true";
-
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
-
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: \{err}"))
-                .and_then(|flag_val_map| \{
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: \{err}"))
-                        .and_then(|package_offset| \{
-                            match package_offset \{
-                                Some(offset) => \{
-                                    get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset})
-                                        .map_err(|err| format!("failed to get flag: \{err}"))
-                                },
-                                None => Err("no context found for package '{package}'".to_string())
-                            }
-                        })
-                    });
-
-            match aconfig_storage_result \{
-                Ok(storage_result) if storage_result == result => \{
-                    if use_new_storage_value \{
-                        return storage_result;
-                    } else \{
-                        return result;
-                    }
-                },
-                Ok(storage_result) => \{
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}");
-                    if use_new_storage_value \{
-                        return storage_result;
-                    } else \{
-                        return result;
-                    }
-                },
-                Err(err) => \{
-                    log!(Level::Error, "AconfigTestMission1: error: \{err}");
-                    if use_new_storage_value \{
-                        panic!("failed to read flag value: \{err}");
-                    }
-                }
-            }
-        }
-
-        result
-        };
-    {{ else }}
-    static ref CACHED_{flag.name}: bool = flags_rust::GetServerConfigurableFlag(
+{{ -if flag.readwrite }}
+/// flag value cache for {flag.name}
+{{ if allow_instrumentation }}
+static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{
+    let result = flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.{flag.device_config_namespace}",
         "{flag.device_config_flag}",
         "{flag.default_value}") == "true";
-    {{ endif }}
-    {{ -endif }}
+
+    let use_new_storage_value = flags_rust::GetServerConfigurableFlag(
+        "aconfig_flags.core_experiments_team_internal",
+        "com.android.providers.settings.use_new_storage_value",
+        "false") == "true";
+
+    if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device(MIGRATION_LOG_TAG)
+                .with_max_level(LevelFilter::Info));
+
+        let aconfig_storage_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: \{err}"))
+            .and_then(|flag_val_map| \{
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: \{err}"))
+                    .and_then(|package_offset| \{
+                        match package_offset \{
+                            Some(offset) => \{
+                                get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset})
+                                    .map_err(|err| format!("failed to get flag: \{err}"))
+                            },
+                            None => Err("no context found for package '{package}'".to_string())
+                        }
+                    })
+                });
+
+        match aconfig_storage_result \{
+            Ok(storage_result) if storage_result == result => \{
+                if use_new_storage_value \{
+                    return storage_result;
+                } else \{
+                    return result;
+                }
+            },
+            Ok(storage_result) => \{
+                log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}");
+                if use_new_storage_value \{
+                    return storage_result;
+                } else \{
+                    return result;
+                }
+            },
+            Err(err) => \{
+                log!(Level::Error, "AconfigTestMission1: error: \{err}");
+                if use_new_storage_value \{
+                    panic!("failed to read flag value: \{err}");
+                }
+            }
+        }
+    }
+
+    result
+    });
+{{ else }}
+static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
+    "aconfig_flags.{flag.device_config_namespace}",
+    "{flag.device_config_flag}",
+    "{flag.default_value}") == "true");
+{{ endif }}
+{{ -endif }}
 {{ -endfor }}
-}
 {{ -endif }}
 
 impl FlagProvider \{
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
index 2d943de..95cecf4 100644
--- a/tools/aconfig/aconfig_device_paths/Android.bp
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -39,8 +39,8 @@
 
 genrule {
     name: "libaconfig_java_device_paths_src",
-    srcs: ["src/DevicePathsTemplate.java"],
-    out: ["DevicePaths.java"],
+    srcs: ["src/DeviceProtosTemplate.java"],
+    out: ["DeviceProtos.java"],
     tool_files: ["partition_aconfig_flags_paths.txt"],
     cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)",
 }
@@ -48,5 +48,7 @@
 java_library {
     name: "aconfig_device_paths_java",
     srcs: [":libaconfig_java_device_paths_src"],
-    sdk_version: "core_current",
+    static_libs: [
+        "libaconfig_java_proto_nano",
+    ],
 }
diff --git a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
similarity index 62%
rename from tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
rename to tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
index 16355a3..58c58de 100644
--- a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
+++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
@@ -15,7 +15,12 @@
  */
 package android.aconfig;
 
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.nano.Aconfig.parsed_flags;
+
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -23,7 +28,7 @@
 /**
  * @hide
  */
-public class DevicePaths {
+public class DeviceProtos {
     static final String[] PATHS = {
         TEMPLATE
     };
@@ -31,12 +36,35 @@
     private static final String APEX_DIR = "/apex";
     private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb";
 
+    /**
+     * Returns a list of all on-device aconfig protos.
+     *
+     * May throw an exception if the protos can't be read at the call site. For
+     * example, some of the protos are in the apex/ partition, which is mounted
+     * somewhat late in the boot process.
+     *
+     * @throws IOException if we can't read one of the protos yet
+     * @return a list of all on-device aconfig protos
+     */
+    public static List<parsed_flag> loadAndParseFlagProtos() throws IOException {
+        ArrayList<parsed_flag> result = new ArrayList();
+
+        for (String path : parsedFlagsProtoPaths()) {
+            FileInputStream inputStream = new FileInputStream(path);
+            parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
+            for (parsed_flag flag : parsedFlags.parsedFlag) {
+                result.add(flag);
+            }
+        }
+
+        return result;
+    }
 
     /**
      * Returns the list of all on-device aconfig protos paths.
      * @hide
      */
-    public static List<String> parsedFlagsProtoPaths() {
+    private static List<String> parsedFlagsProtoPaths() {
         ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
 
         File apexDirectory = new File(APEX_DIR);
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
index 64b90ea..660edac 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
@@ -150,7 +150,7 @@
     /// Calculate node bucket index
     pub fn find_bucket_index(package_id: u32, flag_name: &str, num_buckets: u32) -> u32 {
         let full_flag_name = package_id.to_string() + "/" + flag_name;
-        get_bucket_index(&full_flag_name, num_buckets)
+        get_bucket_index(full_flag_name.as_bytes(), num_buckets)
     }
 }
 
diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs
index 26e9c1a..b6367ff 100644
--- a/tools/aconfig/aconfig_storage_file/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_file/src/lib.rs
@@ -37,19 +37,20 @@
 pub mod flag_value;
 pub mod package_table;
 pub mod protos;
+pub mod sip_hasher13;
 pub mod test_utils;
 
 use anyhow::anyhow;
 use std::cmp::Ordering;
-use std::collections::hash_map::DefaultHasher;
 use std::fs::File;
-use std::hash::{Hash, Hasher};
+use std::hash::Hasher;
 use std::io::Read;
 
 pub use crate::flag_info::{FlagInfoBit, FlagInfoHeader, FlagInfoList, FlagInfoNode};
 pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode};
 pub use crate::flag_value::{FlagValueHeader, FlagValueList};
 pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode};
+pub use crate::sip_hasher13::SipHasher13;
 
 use crate::AconfigStorageError::{
     BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType,
@@ -211,10 +212,12 @@
 }
 
 /// Get the corresponding bucket index given the key and number of buckets
-pub(crate) fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 {
-    let mut s = DefaultHasher::new();
-    val.hash(&mut s);
-    (s.finish() % num_buckets as u64) as u32
+pub(crate) fn get_bucket_index(val: &[u8], num_buckets: u32) -> u32 {
+    let mut s = SipHasher13::new();
+    s.write(val);
+    s.write_u8(0xff);
+    let ret = (s.finish() % num_buckets as u64) as u32;
+    ret
 }
 
 /// Read and parse bytes as u8
diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs
index b734972..007f86e 100644
--- a/tools/aconfig/aconfig_storage_file/src/package_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs
@@ -146,7 +146,7 @@
     /// construction side (aconfig binary) and consumption side (flag read lib)
     /// use the same method of hashing
     pub fn find_bucket_index(package: &str, num_buckets: u32) -> u32 {
-        get_bucket_index(&package, num_buckets)
+        get_bucket_index(package.as_bytes(), num_buckets)
     }
 }
 
diff --git a/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs
new file mode 100644
index 0000000..9be3175
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! An implementation of SipHash13
+
+use std::cmp;
+use std::mem;
+use std::ptr;
+use std::slice;
+
+use std::hash::Hasher;
+
+/// An implementation of SipHash 2-4.
+///
+#[derive(Debug, Clone, Default)]
+pub struct SipHasher13 {
+    k0: u64,
+    k1: u64,
+    length: usize, // how many bytes we've processed
+    state: State,  // hash State
+    tail: u64,     // unprocessed bytes le
+    ntail: usize,  // how many bytes in tail are valid
+}
+
+#[derive(Debug, Clone, Copy, Default)]
+#[repr(C)]
+struct State {
+    // v0, v2 and v1, v3 show up in pairs in the algorithm,
+    // and simd implementations of SipHash will use vectors
+    // of v02 and v13. By placing them in this order in the struct,
+    // the compiler can pick up on just a few simd optimizations by itself.
+    v0: u64,
+    v2: u64,
+    v1: u64,
+    v3: u64,
+}
+
+macro_rules! compress {
+    ($state:expr) => {{
+        compress!($state.v0, $state.v1, $state.v2, $state.v3)
+    }};
+    ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{
+        $v0 = $v0.wrapping_add($v1);
+        $v1 = $v1.rotate_left(13);
+        $v1 ^= $v0;
+        $v0 = $v0.rotate_left(32);
+        $v2 = $v2.wrapping_add($v3);
+        $v3 = $v3.rotate_left(16);
+        $v3 ^= $v2;
+        $v0 = $v0.wrapping_add($v3);
+        $v3 = $v3.rotate_left(21);
+        $v3 ^= $v0;
+        $v2 = $v2.wrapping_add($v1);
+        $v1 = $v1.rotate_left(17);
+        $v1 ^= $v2;
+        $v2 = $v2.rotate_left(32);
+    }};
+}
+
+/// Load an integer of the desired type from a byte stream, in LE order. Uses
+/// `copy_nonoverlapping` to let the compiler generate the most efficient way
+/// to load it from a possibly unaligned address.
+///
+/// Unsafe because: unchecked indexing at i..i+size_of(int_ty)
+macro_rules! load_int_le {
+    ($buf:expr, $i:expr, $int_ty:ident) => {{
+        debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len());
+        let mut data = 0 as $int_ty;
+        ptr::copy_nonoverlapping(
+            $buf.get_unchecked($i),
+            &mut data as *mut _ as *mut u8,
+            mem::size_of::<$int_ty>(),
+        );
+        data.to_le()
+    }};
+}
+
+/// Load an u64 using up to 7 bytes of a byte slice.
+///
+/// Unsafe because: unchecked indexing at start..start+len
+#[inline]
+unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
+    debug_assert!(len < 8);
+    let mut i = 0; // current byte index (from LSB) in the output u64
+    let mut out = 0;
+    if i + 3 < len {
+        out = load_int_le!(buf, start + i, u32) as u64;
+        i += 4;
+    }
+    if i + 1 < len {
+        out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8);
+        i += 2
+    }
+    if i < len {
+        out |= (*buf.get_unchecked(start + i) as u64) << (i * 8);
+        i += 1;
+    }
+    debug_assert_eq!(i, len);
+    out
+}
+
+impl SipHasher13 {
+    /// Creates a new `SipHasher13` with the two initial keys set to 0.
+    #[inline]
+    pub fn new() -> SipHasher13 {
+        SipHasher13::new_with_keys(0, 0)
+    }
+
+    /// Creates a `SipHasher13` that is keyed off the provided keys.
+    #[inline]
+    pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 {
+        let mut sip_hasher = SipHasher13 {
+            k0: key0,
+            k1: key1,
+            length: 0,
+            state: State { v0: 0, v1: 0, v2: 0, v3: 0 },
+            tail: 0,
+            ntail: 0,
+        };
+        sip_hasher.reset();
+        sip_hasher
+    }
+
+    #[inline]
+    fn c_rounds(state: &mut State) {
+        compress!(state);
+    }
+
+    #[inline]
+    fn d_rounds(state: &mut State) {
+        compress!(state);
+        compress!(state);
+        compress!(state);
+    }
+
+    #[inline]
+    fn reset(&mut self) {
+        self.length = 0;
+        self.state.v0 = self.k0 ^ 0x736f6d6570736575;
+        self.state.v1 = self.k1 ^ 0x646f72616e646f6d;
+        self.state.v2 = self.k0 ^ 0x6c7967656e657261;
+        self.state.v3 = self.k1 ^ 0x7465646279746573;
+        self.ntail = 0;
+    }
+
+    // Specialized write function that is only valid for buffers with len <= 8.
+    // It's used to force inlining of write_u8 and write_usize, those would normally be inlined
+    // except for composite types (that includes slices and str hashing because of delimiter).
+    // Without this extra push the compiler is very reluctant to inline delimiter writes,
+    // degrading performance substantially for the most common use cases.
+    #[inline]
+    fn short_write(&mut self, msg: &[u8]) {
+        debug_assert!(msg.len() <= 8);
+        let length = msg.len();
+        self.length += length;
+
+        let needed = 8 - self.ntail;
+        let fill = cmp::min(length, needed);
+        if fill == 8 {
+            // safe to call since msg hasn't been loaded
+            self.tail = unsafe { load_int_le!(msg, 0, u64) };
+        } else {
+            // safe to call since msg hasn't been loaded, and fill <= msg.len()
+            self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail);
+            if length < needed {
+                self.ntail += length;
+                return;
+            }
+        }
+        self.state.v3 ^= self.tail;
+        Self::c_rounds(&mut self.state);
+        self.state.v0 ^= self.tail;
+
+        // Buffered tail is now flushed, process new input.
+        self.ntail = length - needed;
+        // safe to call since number of `needed` bytes has been loaded
+        // and self.ntail + needed == msg.len()
+        self.tail = unsafe { u8to64_le(msg, needed, self.ntail) };
+    }
+}
+
+impl Hasher for SipHasher13 {
+    // see short_write comment for explanation
+    #[inline]
+    fn write_usize(&mut self, i: usize) {
+        // safe to call, since convert the pointer to u8
+        let bytes = unsafe {
+            slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::<usize>())
+        };
+        self.short_write(bytes);
+    }
+
+    // see short_write comment for explanation
+    #[inline]
+    fn write_u8(&mut self, i: u8) {
+        self.short_write(&[i]);
+    }
+
+    #[inline]
+    fn write(&mut self, msg: &[u8]) {
+        let length = msg.len();
+        self.length += length;
+
+        let mut needed = 0;
+
+        // loading unprocessed byte from last write
+        if self.ntail != 0 {
+            needed = 8 - self.ntail;
+            // safe to call, since msg hasn't been processed
+            // and cmp::min(length, needed) < 8
+            self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail;
+            if length < needed {
+                self.ntail += length;
+                return;
+            } else {
+                self.state.v3 ^= self.tail;
+                Self::c_rounds(&mut self.state);
+                self.state.v0 ^= self.tail;
+                self.ntail = 0;
+            }
+        }
+
+        // Buffered tail is now flushed, process new input.
+        let len = length - needed;
+        let left = len & 0x7;
+
+        let mut i = needed;
+        while i < len - left {
+            // safe to call since if i < len - left, it means msg has at least 1 byte to load
+            let mi = unsafe { load_int_le!(msg, i, u64) };
+
+            self.state.v3 ^= mi;
+            Self::c_rounds(&mut self.state);
+            self.state.v0 ^= mi;
+
+            i += 8;
+        }
+
+        // safe to call since if left == 0, since this call will load nothing
+        // if left > 0, it means there are number of `left` bytes in msg
+        self.tail = unsafe { u8to64_le(msg, i, left) };
+        self.ntail = left;
+    }
+
+    #[inline]
+    fn finish(&self) -> u64 {
+        let mut state = self.state;
+
+        let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail;
+
+        state.v3 ^= b;
+        Self::c_rounds(&mut state);
+        state.v0 ^= b;
+
+        state.v2 ^= 0xff;
+        Self::d_rounds(&mut state);
+
+        state.v0 ^ state.v1 ^ state.v2 ^ state.v3
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    use std::hash::{Hash, Hasher};
+    use std::string::String;
+
+    #[test]
+    // this test point locks down the value list serialization
+    fn test_sip_hash13_string_hash() {
+        let mut sip_hash13 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        test_str1.hash(&mut sip_hash13);
+        assert_eq!(17898838669067067585, sip_hash13.finish());
+
+        let test_str2 = String::from("adfadfadf adfafadadf 1231241241");
+        test_str2.hash(&mut sip_hash13);
+        assert_eq!(13543518987672889310, sip_hash13.finish());
+    }
+
+    #[test]
+    fn test_sip_hash13_write() {
+        let mut sip_hash13 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        sip_hash13.write(test_str1.as_bytes());
+        sip_hash13.write_u8(0xff);
+        assert_eq!(17898838669067067585, sip_hash13.finish());
+
+        let mut sip_hash132 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        sip_hash132.write(test_str1.as_bytes());
+        assert_eq!(9685440969685209025, sip_hash132.finish());
+        sip_hash132.write(test_str1.as_bytes());
+        assert_eq!(6719694176662736568, sip_hash132.finish());
+
+        let mut sip_hash133 = SipHasher13::new();
+        let test_str2 = String::from("abcdefg");
+        test_str2.hash(&mut sip_hash133);
+        assert_eq!(2492161047327640297, sip_hash133.finish());
+
+        let mut sip_hash134 = SipHasher13::new();
+        let test_str3 = String::from("abcdefgh");
+        test_str3.hash(&mut sip_hash134);
+        assert_eq!(6689927370435554326, sip_hash134.finish());
+    }
+
+    #[test]
+    fn test_sip_hash13_write_short() {
+        let mut sip_hash13 = SipHasher13::new();
+        sip_hash13.write_u8(0x61);
+        assert_eq!(4644417185603328019, sip_hash13.finish());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
index 1c72364..4bea083 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
@@ -48,6 +48,10 @@
         return new String(bytes, StandardCharsets.UTF_8);
     }
 
+    public int readByte(int i) {
+        return Byte.toUnsignedInt(mByteBuffer.get(i));
+    }
+
     public void position(int newPosition) {
         mByteBuffer.position(newPosition);
     }
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
index e85fdee..9838a7c 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
@@ -16,41 +16,50 @@
 
 package android.aconfig.storage;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Objects;
 
 public class FlagTable {
 
     private Header mHeader;
-    private Map<String, Node> mNodeMap;
+    private ByteBufferReader mReader;
 
     public static FlagTable fromBytes(ByteBuffer bytes) {
         FlagTable flagTable = new FlagTable();
-        ByteBufferReader reader = new ByteBufferReader(bytes);
-        Header header = Header.fromBytes(reader);
-        flagTable.mHeader = header;
-        flagTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumFlags));
-        reader.position(header.mNodeOffset);
-        for (int i = 0; i < header.mNumFlags; i++) {
-            Node node = Node.fromBytes(reader);
-            flagTable.mNodeMap.put(makeKey(node.mPackageId, node.mFlagName), node);
-        }
+        flagTable.mReader = new ByteBufferReader(bytes);
+        flagTable.mHeader = Header.fromBytes(flagTable.mReader);
+
         return flagTable;
     }
 
     public Node get(int packageId, String flagName) {
-        return mNodeMap.get(makeKey(packageId, flagName));
+        int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
+        int bucketIndex = TableUtils.getBucketIndex(makeKey(packageId, flagName), numBuckets);
+
+        mReader.position(mHeader.mBucketOffset + bucketIndex * 4);
+        int nodeIndex = mReader.readInt();
+
+        while (nodeIndex != -1) {
+            mReader.position(nodeIndex);
+            Node node = Node.fromBytes(mReader);
+            if (Objects.equals(flagName, node.mFlagName) && packageId == node.mPackageId) {
+                return node;
+            }
+            nodeIndex = node.mNextOffset;
+        }
+
+        throw new AconfigStorageException("get cannot find flag: " + flagName);
     }
 
     public Header getHeader() {
         return mHeader;
     }
 
-    private static String makeKey(int packageId, String flagName) {
+    private static byte[] makeKey(int packageId, String flagName) {
         StringBuilder ret = new StringBuilder();
-        return ret.append(packageId).append('/').append(flagName).toString();
+        return ret.append(packageId).append('/').append(flagName).toString().getBytes(UTF_8);
     }
 
     public static class Header {
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
index 0ddc147..493436d 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
@@ -17,33 +17,21 @@
 package android.aconfig.storage;
 
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
 
 public class FlagValueList {
 
     private Header mHeader;
-    private List<Boolean> mList;
-
-    private int mSize;
+    private ByteBufferReader mReader;
 
     public static FlagValueList fromBytes(ByteBuffer bytes) {
         FlagValueList flagValueList = new FlagValueList();
-        ByteBufferReader reader = new ByteBufferReader(bytes);
-        Header header = Header.fromBytes(reader);
-        flagValueList.mHeader = header;
-        flagValueList.mList = new ArrayList(header.mNumFlags);
-        reader.position(header.mBooleanValueOffset);
-        for (int i = 0; i < header.mNumFlags; i++) {
-            boolean val = reader.readByte() == 1;
-            flagValueList.mList.add(val);
-        }
-        flagValueList.mSize = flagValueList.mList.size();
+        flagValueList.mReader = new ByteBufferReader(bytes);
+        flagValueList.mHeader = Header.fromBytes(flagValueList.mReader);
         return flagValueList;
     }
 
-    public boolean get(int index) {
-        return mList.get(index);
+    public boolean getBoolean(int index) {
+        return mReader.readByte(mHeader.mBooleanValueOffset + index) == 1;
     }
 
     public Header getHeader() {
@@ -51,7 +39,7 @@
     }
 
     public int size() {
-        return mSize;
+        return mHeader.mNumFlags;
     }
 
     public static class Header {
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
index d04e1ac..773b882 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
@@ -16,32 +16,42 @@
 
 package android.aconfig.storage;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Objects;
 
 public class PackageTable {
 
     private Header mHeader;
-    private Map<String, Node> mNodeMap;
+    private ByteBufferReader mReader;
 
     public static PackageTable fromBytes(ByteBuffer bytes) {
         PackageTable packageTable = new PackageTable();
-        ByteBufferReader reader = new ByteBufferReader(bytes);
-        Header header = Header.fromBytes(reader);
-        packageTable.mHeader = header;
-        packageTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumPackages));
-        reader.position(header.mNodeOffset);
-        for (int i = 0; i < header.mNumPackages; i++) {
-            Node node = Node.fromBytes(reader);
-            packageTable.mNodeMap.put(node.mPackageName, node);
-        }
+        packageTable.mReader = new ByteBufferReader(bytes);
+        packageTable.mHeader = Header.fromBytes(packageTable.mReader);
+
         return packageTable;
     }
 
     public Node get(String packageName) {
-        return mNodeMap.get(packageName);
+
+        int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
+        int bucketIndex = TableUtils.getBucketIndex(packageName.getBytes(UTF_8), numBuckets);
+
+        mReader.position(mHeader.mBucketOffset + bucketIndex * 4);
+        int nodeIndex = mReader.readInt();
+
+        while (nodeIndex != -1) {
+            mReader.position(nodeIndex);
+            Node node = Node.fromBytes(mReader);
+            if (Objects.equals(packageName, node.mPackageName)) {
+                return node;
+            }
+            nodeIndex = node.mNextOffset;
+        }
+
+        throw new AconfigStorageException("get cannot find package: " + packageName);
     }
 
     public Header getHeader() {
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java
new file mode 100644
index 0000000..64714ee
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+public class SipHasher13 {
+    static class State {
+        private long v0;
+        private long v2;
+        private long v1;
+        private long v3;
+
+        public State(long k0, long k1) {
+            v0 = k0 ^ 0x736f6d6570736575L;
+            v1 = k1 ^ 0x646f72616e646f6dL;
+            v2 = k0 ^ 0x6c7967656e657261L;
+            v3 = k1 ^ 0x7465646279746573L;
+        }
+
+        public void compress(long m) {
+            v3 ^= m;
+            cRounds();
+            v0 ^= m;
+        }
+
+        public long finish() {
+            v2 ^= 0xff;
+            dRounds();
+            return v0 ^ v1 ^ v2 ^ v3;
+        }
+
+        private void cRounds() {
+            v0 += v1;
+            v1 = Long.rotateLeft(v1, 13);
+            v1 ^= v0;
+            v0 = Long.rotateLeft(v0, 32);
+            v2 += v3;
+            v3 = Long.rotateLeft(v3, 16);
+            v3 ^= v2;
+            v0 += v3;
+            v3 = Long.rotateLeft(v3, 21);
+            v3 ^= v0;
+            v2 += v1;
+            v1 = Long.rotateLeft(v1, 17);
+            v1 ^= v2;
+            v2 = Long.rotateLeft(v2, 32);
+        }
+
+        private void dRounds() {
+            for (int i = 0; i < 3; i++) {
+                v0 += v1;
+                v1 = Long.rotateLeft(v1, 13);
+                v1 ^= v0;
+                v0 = Long.rotateLeft(v0, 32);
+                v2 += v3;
+                v3 = Long.rotateLeft(v3, 16);
+                v3 ^= v2;
+                v0 += v3;
+                v3 = Long.rotateLeft(v3, 21);
+                v3 ^= v0;
+                v2 += v1;
+                v1 = Long.rotateLeft(v1, 17);
+                v1 ^= v2;
+                v2 = Long.rotateLeft(v2, 32);
+            }
+        }
+    }
+
+    public static long hash(byte[] data) {
+        State state = new State(0, 0);
+        int len = data.length;
+        int left = len & 0x7;
+        int index = 0;
+
+        while (index < len - left) {
+            long mi = loadLe(data, index, 8);
+            index += 8;
+            state.compress(mi);
+        }
+
+        // padding the end with 0xff to be consistent with rust
+        long m = (0xffL << (left * 8)) | loadLe(data, index, left);
+        if (left == 0x7) {
+            // compress the m w-2
+            state.compress(m);
+            m = 0L;
+        }
+        // len adds 1 since padded 0xff
+        m |= (((len + 1) & 0xffL) << 56);
+        state.compress(m);
+
+        return state.finish();
+    }
+
+    private static long loadLe(byte[] data, int offset, int size) {
+        long m = 0;
+        for (int i = 0; i < size; i++) {
+            m |= (data[i + offset] & 0xffL) << (i * 8);
+        }
+        return m;
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
index 714b53b..81168f5 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
@@ -58,4 +58,9 @@
         }
         throw new AconfigStorageException("Number of items in a hash table exceeds limit");
     }
+
+    public static int getBucketIndex(byte[] val, int numBuckets) {
+        long hashVal = SipHasher13.hash(val);
+        return (int) Long.remainderUnsigned(hashVal, numBuckets);
+    }
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp
index e2e225d..12e4aca 100644
--- a/tools/aconfig/aconfig_storage_file/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp
@@ -30,9 +30,10 @@
     static_libs: [
         "androidx.test.runner",
         "junit",
+        "aconfig_storage_file_java",
     ],
     test_config: "AndroidStorageJaveTest.xml",
-    certificate: "platform",
+    sdk_version: "test_current",
     data: [
         "package.map",
         "flag.map",
@@ -42,4 +43,5 @@
     test_suites: [
         "general-tests",
     ],
+    jarjar_rules: "jarjar.txt",
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/jarjar.txt b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt
new file mode 100644
index 0000000..a6c17fa
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt
@@ -0,0 +1,15 @@
+rule android.aconfig.storage.AconfigStorageException android.aconfig.storage.test.AconfigStorageException
+rule android.aconfig.storage.FlagTable android.aconfig.storage.test.FlagTable
+rule android.aconfig.storage.PackageTable android.aconfig.storage.test.PackageTable
+rule android.aconfig.storage.ByteBufferReader android.aconfig.storage.test.ByteBufferReader
+rule android.aconfig.storage.FlagType android.aconfig.storage.test.FlagType
+rule android.aconfig.storage.SipHasher13 android.aconfig.storage.test.SipHasher13
+rule android.aconfig.storage.FileType android.aconfig.storage.test.FileType
+rule android.aconfig.storage.FlagValueList android.aconfig.storage.test.FlagValueList
+rule android.aconfig.storage.TableUtils android.aconfig.storage.test.TableUtils
+
+
+rule android.aconfig.storage.FlagTable$* android.aconfig.storage.test.FlagTable$@1
+rule android.aconfig.storage.PackageTable$* android.aconfig.storage.test.PackageTable$@1
+rule android.aconfig.storage.FlagValueList$* android.aconfig.storage.test.FlagValueList@1
+rule android.aconfig.storage.SipHasher13$* android.aconfig.storage.test.SipHasher13@1
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
index c18590a..1b0de63 100644
--- a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
@@ -53,7 +53,7 @@
         assertEquals(expected.length, flagValueList.size());
 
         for (int i = 0; i < flagValueList.size(); i++) {
-            assertEquals(expected[i], flagValueList.get(i));
+            assertEquals(expected[i], flagValueList.getBoolean(i));
         }
     }
 
@@ -68,10 +68,10 @@
 
         PackageTable.Node pNode = packageTable.get("com.android.aconfig.storage.test_1");
         FlagTable.Node fNode = flagTable.get(pNode.getPackageId(), "enabled_rw");
-        assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
+        assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
 
         pNode = packageTable.get("com.android.aconfig.storage.test_4");
         fNode = flagTable.get(pNode.getPackageId(), "enabled_fixed_ro");
-        assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
+        assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
     }
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java
new file mode 100644
index 0000000..10620d2
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import android.aconfig.storage.SipHasher13;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class SipHasher13Test {
+    @Test
+    public void testSipHash_hashString() throws Exception {
+        String testStr = "com.google.android.test";
+        long result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0xF86572EFF9C4A0C1L, result);
+
+        testStr = "abcdefg";
+        result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0x2295EF44BD078AE9L, result);
+
+        testStr = "abcdefgh";
+        result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0x5CD7657FA7F96C16L, result);
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp
index 9e950a6..f96b223 100644
--- a/tools/aconfig/aconfig_storage_read_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/Android.bp
@@ -147,6 +147,7 @@
     crate_name: "aconfig_storage_read_api_rust_jni",
     srcs: ["srcs/lib.rs"],
     rustlibs: [
+        "libaconfig_storage_file",
         "libaconfig_storage_read_api",
         "libanyhow",
         "libjni",
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
index 406ff24..850c2b8 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
@@ -16,18 +16,14 @@
 
 package android.aconfig.storage;
 
+import dalvik.annotation.optimization.FastNative;
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
-import java.nio.channels.FileChannel.MapMode;
-
-import android.aconfig.storage.PackageReadContext;
-import android.aconfig.storage.FlagReadContext;
-
-import dalvik.annotation.optimization.FastNative;
 
 public class AconfigStorageReadAPI {
 
@@ -50,9 +46,8 @@
     }
 
     // Map a storage file given container and file type
-    public static MappedByteBuffer getMappedFile(
-        String container,
-        StorageFileType type) throws IOException{
+    public static MappedByteBuffer getMappedFile(String container, StorageFileType type)
+            throws IOException {
         switch (type) {
             case PACKAGE_MAP:
                 return mapStorageFile(STORAGEDIR + "/maps/" + container + ".package.map");
@@ -73,14 +68,14 @@
     // @throws IOException if the passed in file is not a valid package map file
     @FastNative
     private static native ByteBuffer getPackageReadContextImpl(
-        ByteBuffer mappedFile, String packageName) throws IOException;
+            ByteBuffer mappedFile, String packageName) throws IOException;
 
     // API to get package read context
     // @param mappedFile: memory mapped package map file
     // @param packageName: package name
     // @throws IOException if the passed in file is not a valid package map file
-    static public PackageReadContext getPackageReadContext (
-        ByteBuffer mappedFile, String packageName) throws IOException {
+    public static PackageReadContext getPackageReadContext(
+            ByteBuffer mappedFile, String packageName) throws IOException {
         ByteBuffer buffer = getPackageReadContextImpl(mappedFile, packageName);
         buffer.order(ByteOrder.LITTLE_ENDIAN);
         return new PackageReadContext(buffer.getInt(), buffer.getInt(4));
@@ -94,7 +89,7 @@
     // @throws IOException if the passed in file is not a valid flag map file
     @FastNative
     private static native ByteBuffer getFlagReadContextImpl(
-        ByteBuffer mappedFile, int packageId, String flagName) throws IOException;
+            ByteBuffer mappedFile, int packageId, String flagName) throws IOException;
 
     // API to get flag read context
     // @param mappedFile: memory mapped flag map file
@@ -103,7 +98,7 @@
     // @param flagName: flag name
     // @throws IOException if the passed in file is not a valid flag map file
     public static FlagReadContext getFlagReadContext(
-        ByteBuffer mappedFile, int packageId, String flagName) throws IOException {
+            ByteBuffer mappedFile, int packageId, String flagName) throws IOException {
         ByteBuffer buffer = getFlagReadContextImpl(mappedFile, packageId, flagName);
         buffer.order(ByteOrder.LITTLE_ENDIAN);
         return new FlagReadContext(buffer.getInt(), buffer.getInt(4));
@@ -115,8 +110,11 @@
     // @throws IOException if the passed in file is not a valid flag value file or the
     // flag index went over the file boundary.
     @FastNative
-    public static native boolean getBooleanFlagValue(
-        ByteBuffer mappedFile, int flagIndex) throws IOException;
+    public static native boolean getBooleanFlagValue(ByteBuffer mappedFile, int flagIndex)
+            throws IOException;
+
+    @FastNative
+    public static native long hash(String packageName) throws IOException;
 
     static {
         System.loadLibrary("aconfig_storage_read_api_rust_jni");
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
index 71c1c0d..29ebee5 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
@@ -56,7 +56,7 @@
         if (index >= mFlagValueList.size()) {
             throw new AconfigStorageException("Fail to get boolean flag value");
         }
-        return mFlagValueList.get(index);
+        return mFlagValueList.getBoolean(index);
     }
 
     private int getPackageBooleanStartOffset(String packageName) {
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
index 304a059..f5f12bb 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
@@ -1,5 +1,6 @@
 //! aconfig storage read api java rust interlop
 
+use aconfig_storage_file::SipHasher13;
 use aconfig_storage_read_api::flag_table_query::find_flag_read_context;
 use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
 use aconfig_storage_read_api::package_table_query::find_package_read_context;
@@ -7,8 +8,9 @@
 
 use anyhow::Result;
 use jni::objects::{JByteBuffer, JClass, JString};
-use jni::sys::{jboolean, jint};
+use jni::sys::{jboolean, jint, jlong};
 use jni::JNIEnv;
+use std::hash::Hasher;
 
 /// Call rust find package read context
 fn get_package_read_context_java(
@@ -158,3 +160,30 @@
         }
     }
 }
+
+/// Get flag value JNI
+#[no_mangle]
+#[allow(unused)]
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_hash<'local>(
+    mut env: JNIEnv<'local>,
+    class: JClass<'local>,
+    package_name: JString<'local>,
+) -> jlong {
+    match siphasher13_hash(&mut env, package_name) {
+        Ok(value) => value as jlong,
+        Err(errmsg) => {
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+            0i64
+        }
+    }
+}
+
+fn siphasher13_hash(env: &mut JNIEnv, package_name: JString) -> Result<u64> {
+    // SAFETY:
+    // The safety here is ensured as the flag name is guaranteed to be a java string
+    let flag_name: String = unsafe { env.get_string_unchecked(&package_name)?.into() };
+    let mut s = SipHasher13::new();
+    s.write(flag_name.as_bytes());
+    s.write_u8(0xff);
+    Ok(s.finish())
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
index a26b257..191741e 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
@@ -16,28 +16,29 @@
 
 package android.aconfig.storage.test;
 
-import java.io.IOException;
-import java.nio.MappedByteBuffer;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+
+import android.aconfig.DeviceProtos;
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.storage.AconfigStorageReadAPI;
+import android.aconfig.storage.FlagReadContext;
+import android.aconfig.storage.FlagReadContext.StoredFlagType;
+import android.aconfig.storage.PackageReadContext;
+import android.aconfig.storage.SipHasher13;
+import android.aconfig.storage.StorageInternalReader;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 
-import android.aconfig.storage.AconfigStorageReadAPI;
-import android.aconfig.storage.PackageReadContext;
-import android.aconfig.storage.FlagReadContext;
-import android.aconfig.storage.FlagReadContext.StoredFlagType;
+import java.io.IOException;
+import java.nio.MappedByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
 
 @RunWith(JUnit4.class)
-public class AconfigStorageReadAPITest{
+public class AconfigStorageReadAPITest {
 
     private String mStorageDir = "/data/local/tmp/aconfig_java_api_test";
 
@@ -45,26 +46,29 @@
     public void testPackageContextQuery() {
         MappedByteBuffer packageMap = null;
         try {
-            packageMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.package.map");
-        } catch(IOException ex){
+            packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
         assertTrue(packageMap != null);
 
         try {
-            PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "com.android.aconfig.storage.test_1");
+            PackageReadContext context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_1");
             assertEquals(context.mPackageId, 0);
             assertEquals(context.mBooleanStartIndex, 0);
 
-            context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "com.android.aconfig.storage.test_2");
+            context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_2");
             assertEquals(context.mPackageId, 1);
             assertEquals(context.mBooleanStartIndex, 3);
 
-            context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "com.android.aconfig.storage.test_4");
+            context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_4");
             assertEquals(context.mPackageId, 2);
             assertEquals(context.mBooleanStartIndex, 6);
         } catch (IOException ex) {
@@ -76,19 +80,19 @@
     public void testNonExistPackageContextQuery() {
         MappedByteBuffer packageMap = null;
         try {
-            packageMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.package.map");
-        } catch(IOException ex){
+            packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
         assertTrue(packageMap != null);
 
         try {
-            PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "unknown");
+            PackageReadContext context =
+                    AconfigStorageReadAPI.getPackageReadContext(packageMap, "unknown");
             assertEquals(context.mPackageId, -1);
             assertEquals(context.mBooleanStartIndex, -1);
-        } catch(IOException ex){
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
     }
@@ -97,12 +101,11 @@
     public void testFlagContextQuery() {
         MappedByteBuffer flagMap = null;
         try {
-            flagMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.flag.map");
-        } catch(IOException ex){
+            flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagMap!= null);
+        assertTrue(flagMap != null);
 
         class Baseline {
             public int mPackageId;
@@ -110,10 +113,8 @@
             public StoredFlagType mFlagType;
             public int mFlagIndex;
 
-            public Baseline(int packageId,
-                    String flagName,
-                    StoredFlagType flagType,
-                    int flagIndex) {
+            public Baseline(
+                    int packageId, String flagName, StoredFlagType flagType, int flagIndex) {
                 mPackageId = packageId;
                 mFlagName = flagName;
                 mFlagType = flagType;
@@ -133,8 +134,9 @@
 
         try {
             for (Baseline baseline : baselines) {
-                FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
-                    flagMap, baseline.mPackageId,  baseline.mFlagName);
+                FlagReadContext context =
+                        AconfigStorageReadAPI.getFlagReadContext(
+                                flagMap, baseline.mPackageId, baseline.mFlagName);
                 assertEquals(context.mFlagType, baseline.mFlagType);
                 assertEquals(context.mFlagIndex, baseline.mFlagIndex);
             }
@@ -147,21 +149,19 @@
     public void testNonExistFlagContextQuery() {
         MappedByteBuffer flagMap = null;
         try {
-            flagMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.flag.map");
-        } catch(IOException ex){
+            flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map");
+        } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagMap!= null);
+        assertTrue(flagMap != null);
 
         try {
-            FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
-                flagMap, 0,  "unknown");
+            FlagReadContext context =
+                    AconfigStorageReadAPI.getFlagReadContext(flagMap, 0, "unknown");
             assertEquals(context.mFlagType, null);
             assertEquals(context.mFlagIndex, -1);
 
-            context = AconfigStorageReadAPI.getFlagReadContext(
-                flagMap, 3,  "enabled_ro");
+            context = AconfigStorageReadAPI.getFlagReadContext(flagMap, 3, "enabled_ro");
             assertEquals(context.mFlagType, null);
             assertEquals(context.mFlagIndex, -1);
         } catch (IOException ex) {
@@ -173,12 +173,11 @@
     public void testBooleanFlagValueQuery() {
         MappedByteBuffer flagVal = null;
         try {
-            flagVal = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/boot/mockup.val");
+            flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val");
         } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagVal!= null);
+        assertTrue(flagVal != null);
 
         boolean[] baselines = {false, true, true, false, true, true, true, true};
         for (int i = 0; i < 8; ++i) {
@@ -195,12 +194,11 @@
     public void testInvalidBooleanFlagValueQuery() {
         MappedByteBuffer flagVal = null;
         try {
-            flagVal = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/boot/mockup.val");
+            flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val");
         } catch (IOException ex) {
             assertTrue(ex.toString(), false);
         }
-        assertTrue(flagVal!= null);
+        assertTrue(flagVal != null);
 
         try {
             Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9);
@@ -210,4 +208,63 @@
             assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg));
         }
     }
- }
+
+    @Test
+    public void testRustJavaEqualHash() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+        for (parsed_flag flag : flags) {
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            long rHash = AconfigStorageReadAPI.hash(packageName);
+            long jHash = SipHasher13.hash(packageName.getBytes());
+            assertEquals(rHash, jHash);
+
+            String fullFlagName = packageName + "/" + flagName;
+            rHash = AconfigStorageReadAPI.hash(fullFlagName);
+            jHash = SipHasher13.hash(fullFlagName.getBytes());
+            assertEquals(rHash, jHash);
+        }
+    }
+
+    @Test
+    public void testRustJavaEqualFlag() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+
+        String mapPath = "/metadata/aconfig/maps/";
+        String flagsPath = "/metadata/aconfig/boot/";
+
+        for (parsed_flag flag : flags) {
+
+            String container = flag.container;
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            String fullFlagName = packageName + "/" + flagName;
+
+            MappedByteBuffer packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".package.map");
+            MappedByteBuffer flagMap =
+                    AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".flag.map");
+            MappedByteBuffer flagValList =
+                    AconfigStorageReadAPI.mapStorageFile(flagsPath + container + ".val");
+
+            PackageReadContext packageContext =
+                    AconfigStorageReadAPI.getPackageReadContext(packageMap, packageName);
+
+            FlagReadContext flagContext =
+                    AconfigStorageReadAPI.getFlagReadContext(
+                            flagMap, packageContext.mPackageId, flagName);
+
+            boolean rVal =
+                    AconfigStorageReadAPI.getBooleanFlagValue(
+                            flagValList,
+                            packageContext.mBooleanStartIndex + flagContext.mFlagIndex);
+
+            StorageInternalReader reader = new StorageInternalReader(container, packageName);
+            boolean jVal = reader.getBooleanFlagValue(flagContext.mFlagIndex);
+
+            long rHash = AconfigStorageReadAPI.hash(packageName);
+            long jHash = SipHasher13.hash(packageName.getBytes());
+            assertEquals(rVal, jVal);
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
index 11b3824..3d4e9ad 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
@@ -2,6 +2,8 @@
     name: "aconfig_storage_read_api.test.java",
     srcs: ["./**/*.java"],
     static_libs: [
+        "aconfig_device_paths_java",
+        "aconfig_storage_file_java",
         "aconfig_storage_reader_java",
         "androidx.test.rules",
         "libaconfig_storage_read_api_java",
diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp
index d6a1f22..7704742 100644
--- a/tools/aconfig/fake_device_config/Android.bp
+++ b/tools/aconfig/fake_device_config/Android.bp
@@ -22,6 +22,7 @@
     sdk_version: "none",
     system_modules: "core-all-system-modules",
     host_supported: true,
+    is_stubs_module: true,
 }
 
 java_library {
@@ -31,4 +32,5 @@
     ],
     sdk_version: "core_current",
     host_supported: true,
+    is_stubs_module: true,
 }
diff --git a/tools/filelistdiff/allowlist b/tools/filelistdiff/allowlist
index 943f955..073a8de 100644
--- a/tools/filelistdiff/allowlist
+++ b/tools/filelistdiff/allowlist
@@ -1,55 +1,5 @@
 # Known diffs only in the KATI system image
 etc/NOTICE.xml.gz
-etc/compatconfig/TeleService-platform-compat-config.xml
-etc/compatconfig/calendar-provider-compat-config.xml
-etc/compatconfig/contacts-provider-platform-compat-config.xml
-etc/compatconfig/documents-ui-compat-config.xml
-etc/compatconfig/framework-location-compat-config.xml
-etc/compatconfig/framework-platform-compat-config.xml
-etc/compatconfig/icu4j-platform-compat-config.xml
-etc/compatconfig/services-platform-compat-config.xml
-etc/permissions/android.software.credentials.xml
-etc/permissions/android.software.preview_sdk.xml
-etc/permissions/android.software.webview.xml
-etc/permissions/android.software.window_magnification.xml
-etc/permissions/com.android.adservices.sdksandbox.xml
-etc/security/otacerts.zip
-etc/vintf/compatibility_matrix.202404.xml
-etc/vintf/compatibility_matrix.202504.xml
-etc/vintf/compatibility_matrix.5.xml
-etc/vintf/compatibility_matrix.6.xml
-etc/vintf/compatibility_matrix.7.xml
-etc/vintf/compatibility_matrix.8.xml
-etc/vintf/compatibility_matrix.device.xml
-etc/vintf/manifest.xml
-framework/boot-apache-xml.vdex
-framework/boot-apache-xml.vdex.fsv_meta
-framework/boot-bouncycastle.vdex
-framework/boot-bouncycastle.vdex.fsv_meta
-framework/boot-core-icu4j.vdex
-framework/boot-core-icu4j.vdex.fsv_meta
-framework/boot-core-libart.vdex
-framework/boot-core-libart.vdex.fsv_meta
-framework/boot-ext.vdex
-framework/boot-ext.vdex.fsv_meta
-framework/boot-framework-adservices.vdex
-framework/boot-framework-adservices.vdex.fsv_meta
-framework/boot-framework-graphics.vdex
-framework/boot-framework-graphics.vdex.fsv_meta
-framework/boot-framework-location.vdex
-framework/boot-framework-location.vdex.fsv_meta
-framework/boot-framework.vdex
-framework/boot-framework.vdex.fsv_meta
-framework/boot-ims-common.vdex
-framework/boot-ims-common.vdex.fsv_meta
-framework/boot-okhttp.vdex
-framework/boot-okhttp.vdex.fsv_meta
-framework/boot-telephony-common.vdex
-framework/boot-telephony-common.vdex.fsv_meta
-framework/boot-voip-common.vdex
-framework/boot-voip-common.vdex.fsv_meta
-framework/boot.vdex
-framework/boot.vdex.fsv_meta
 framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex
 framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta
 framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 9b134f2..cf7e2ae 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -96,6 +96,7 @@
     ],
     libs: [
         "apex_manifest",
+        "releasetools_apex_utils",
         "releasetools_common",
     ],
     required: [
@@ -107,7 +108,7 @@
 python_library_host {
     name: "ota_metadata_proto",
     srcs: [
-       "ota_metadata.proto",
+        "ota_metadata.proto",
     ],
     proto: {
         canonical_path_from_root: false,
@@ -117,7 +118,7 @@
 cc_library_static {
     name: "ota_metadata_proto_cc",
     srcs: [
-       "ota_metadata.proto",
+        "ota_metadata.proto",
     ],
     host_supported: true,
     recovery_available: true,
@@ -144,7 +145,7 @@
             static_libs: ["libprotobuf-java-nano"],
         },
     },
-    visibility: ["//frameworks/base:__subpackages__"]
+    visibility: ["//frameworks/base:__subpackages__"],
 }
 
 python_defaults {
@@ -436,7 +437,7 @@
     name: "check_target_files_vintf",
     defaults: [
         "releasetools_binary_defaults",
-        "releasetools_check_target_files_vintf_defaults"
+        "releasetools_check_target_files_vintf_defaults",
     ],
 }
 
@@ -546,13 +547,14 @@
     defaults: ["releasetools_binary_defaults"],
     srcs: [
         "sign_target_files_apks.py",
-        "payload_signer.py",
-        "ota_signing_utils.py",
+        "ota_from_raw_img.py",
     ],
     libs: [
         "releasetools_add_img_to_target_files",
         "releasetools_apex_utils",
         "releasetools_common",
+        "ota_metadata_proto",
+        "ota_utils_lib",
     ],
 }
 
@@ -632,7 +634,7 @@
     data: [
         "testdata/**/*",
         ":com.android.apex.compressed.v1",
-        ":com.android.apex.vendor.foo.with_vintf"
+        ":com.android.apex.vendor.foo.with_vintf",
     ],
     target: {
         darwin: {
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3abef3b..54df955 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -36,6 +36,8 @@
 
 APEX_PUBKEY = 'apex_pubkey'
 
+# Partitions supporting APEXes
+PARTITIONS = ['system', 'system_ext', 'product', 'vendor', 'odm']
 
 class ApexInfoError(Exception):
   """An Exception raised during Apex Information command."""
@@ -550,7 +552,7 @@
   if not isinstance(input_file, str):
     raise RuntimeError("must pass filepath to target-files zip or directory")
   apex_infos = []
-  for partition in ['system', 'system_ext', 'product', 'vendor']:
+  for partition in PARTITIONS:
     apex_infos.extend(GetApexInfoForPartition(input_file, partition))
   return apex_infos
 
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index b8dcd84..dc123ef 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -30,6 +30,7 @@
 import sys
 import zipfile
 
+import apex_utils
 import common
 from apex_manifest import ParseApexManifest
 
@@ -229,7 +230,7 @@
   apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host')
   cmd = [apex_host, '--tool_path', OPTIONS.search_path]
   cmd += ['--apex_path', dirmap['/apex']]
-  for p in ['system', 'system_ext', 'product', 'vendor']:
+  for p in apex_utils.PARTITIONS:
     if '/' + p in dirmap:
       cmd += ['--' + p + '_path', dirmap['/' + p]]
   common.RunAndCheckOutput(cmd)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index f6f6944..edd4366 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -898,7 +898,7 @@
       if key.endswith("selinux_fc"):
         fc_basename = os.path.basename(d[key])
         fc_config = os.path.join(input_file, "META", fc_basename)
-        assert os.path.exists(fc_config)
+        assert os.path.exists(fc_config), "{} does not exist".format(fc_config)
 
         d[key] = fc_config
 
@@ -907,9 +907,10 @@
     d["root_fs_config"] = os.path.join(
         input_file, "META", "root_filesystem_config.txt")
 
+    partitions = ["system", "vendor", "system_ext", "product", "odm",
+                  "vendor_dlkm", "odm_dlkm", "system_dlkm"]
     # Redirect {partition}_base_fs_file for each of the named partitions.
-    for part_name in ["system", "vendor", "system_ext", "product", "odm",
-                      "vendor_dlkm", "odm_dlkm", "system_dlkm"]:
+    for part_name in partitions:
       key_name = part_name + "_base_fs_file"
       if key_name not in d:
         continue
@@ -922,6 +923,25 @@
             "Failed to find %s base fs file: %s", part_name, base_fs_file)
         del d[key_name]
 
+    # Redirecting helper for optional properties like erofs_compress_hints
+    def redirect_file(prop, filename):
+      if prop not in d:
+        return
+      config_file = os.path.join(input_file, "META/" + filename)
+      if os.path.exists(config_file):
+        d[prop] = config_file
+      else:
+        logger.warning(
+            "Failed to find %s fro %s", filename, prop)
+        del d[prop]
+
+    # Redirect erofs_[default_]compress_hints files
+    redirect_file("erofs_default_compress_hints",
+                  "erofs_default_compress_hints.txt")
+    for part in partitions:
+      redirect_file(part + "_erofs_compress_hints",
+                    part + "_erofs_compress_hints.txt")
+
   def makeint(key):
     if key in d:
       d[key] = int(d[key], 0)
@@ -2988,7 +3008,7 @@
     zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
-def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
+def ZipWriteStr(zip_file: zipfile.ZipFile, zinfo_or_arcname, data, perms=None,
                 compress_type=None):
   """Wrap zipfile.writestr() function to work around the zip64 limit.
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index b8f848f..b485440 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -189,6 +189,8 @@
 from xml.etree import ElementTree
 
 import add_img_to_target_files
+import ota_from_raw_img
+import ota_utils
 import apex_utils
 import common
 import payload_signer
@@ -579,7 +581,61 @@
         filename.endswith("/prop.default")
 
 
-def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info,
+def RegenerateKernelPartitions(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info):
+  """Re-generate boot and dtbo partitions using new signing configuration"""
+  if OPTIONS.input_tmp is None:
+    OPTIONS.input_tmp = common.UnzipTemp(input_tf_zip.filename, [
+                                "*/boot.img", "*/dtbo.img"])
+  else:
+    common.UnzipToDir(input_tf_zip, OPTIONS.input_tmp, [
+                                "*/boot.img", "*/dtbo.img"])
+  unzip_dir = OPTIONS.input_tmp
+  image_dir = os.path.join(unzip_dir, "IMAGES")
+  shutil.rmtree(image_dir)
+  os.makedirs(image_dir, exist_ok=True)
+
+  boot_image = common.GetBootableImage(
+      "IMAGES/boot.img", "boot.img", unzip_dir, "BOOT", misc_info)
+  if boot_image:
+    boot_image.WriteToDir(unzip_dir)
+    boot_image = os.path.join(unzip_dir, boot_image.name)
+    common.ZipWrite(output_tf_zip, boot_image, "IMAGES/boot.img",
+                    compress_type=zipfile.ZIP_STORED)
+  add_img_to_target_files.AddDtbo(output_tf_zip)
+  return unzip_dir
+
+
+def RegenerateBootOTA(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info, filename, input_ota):
+  if filename not in ["VENDOR/boot_otas/boot_ota_4k.zip", "SYSTEM/boot_otas/boot_ota_4k.zip"]:
+    # We only need to re-generate 4K boot OTA, for other OTA packages
+    # simply copy as is
+    with input_tf_zip.open(filename, "r") as in_fp:
+      shutil.copyfileobj(in_fp, input_ota)
+      input_ota.flush()
+    return
+  timestamp = misc_info["build.prop"].GetProp(
+      "ro.system.build.date.utc")
+  unzip_dir = RegenerateKernelPartitions(
+      input_tf_zip, output_tf_zip, misc_info)
+  signed_boot_image = os.path.join(unzip_dir, "IMAGES/boot.img")
+  signed_dtbo_image = os.path.join(unzip_dir, "IMAGES/dtbo.img")
+
+  if not os.path.exists(signed_boot_image):
+    logger.warn("Need to re-generate boot OTA {} but failed to get signed boot image. 16K dev option will be impacted, after rolling back to 4K user would need to sideload/flash their device to continue receiving OTAs.")
+    return
+  logger.info(
+      "Re-generating boot OTA {} with timestamp {}".format(filename, timestamp))
+  args = ["ota_from_raw_img", "--package_key", OPTIONS.package_key,
+          "--max_timestamp", timestamp, "--output", input_ota.name]
+  if os.path.exists(signed_dtbo_image):
+    args.extend(["--partition_name", "boot,dtbo",
+                signed_boot_image, signed_dtbo_image])
+  else:
+    args.extend(["--partition_name", "boot", signed_boot_image])
+  ota_from_raw_img.main(args)
+
+
+def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
                        compressed_extension):
@@ -593,6 +649,14 @@
     # Sets this to zero for targets without APK files.
     maxsize = 0
 
+  # Replace the AVB signing keys, if any.
+  ReplaceAvbSigningKeys(misc_info)
+  OPTIONS.info_dict = misc_info
+
+  # Rewrite the props in AVB signing args.
+  if misc_info.get('avb_enable') == 'true':
+    RewriteAvbProps(misc_info)
+
   for info in input_tf_zip.infolist():
     filename = info.filename
     if filename.startswith("IMAGES/"):
@@ -670,9 +734,9 @@
     elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename):
       logger.info("Re-signing OTA package {}".format(filename))
       with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota:
-        with input_tf_zip.open(filename, "r") as in_fp:
-          shutil.copyfileobj(in_fp, input_ota)
-          input_ota.flush()
+        RegenerateBootOTA(input_tf_zip, output_tf_zip,
+                          misc_info, filename, input_ota)
+
         SignOtaPackage(input_ota.name, output_ota.name)
         common.ZipWrite(output_tf_zip, output_ota.name, filename,
                         compress_type=zipfile.ZIP_STORED)
@@ -811,17 +875,18 @@
         common.ZipWrite(output_tf_zip, image.name, filename)
     # A non-APK file; copy it verbatim.
     else:
-      common.ZipWriteStr(output_tf_zip, out_info, data)
+      try:
+        entry = output_tf_zip.getinfo(filename)
+        if output_tf_zip.read(entry) != data:
+          logger.warn(
+              "Output zip contains duplicate entries for %s with different contents", filename)
+        continue
+      except KeyError:
+        common.ZipWriteStr(output_tf_zip, out_info, data)
 
   if OPTIONS.replace_ota_keys:
     ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
 
-  # Replace the AVB signing keys, if any.
-  ReplaceAvbSigningKeys(misc_info)
-
-  # Rewrite the props in AVB signing args.
-  if misc_info.get('avb_enable') == 'true':
-    RewriteAvbProps(misc_info)
 
   # Write back misc_info with the latest values.
   ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py
index d0d232d..27f3d2e 100644
--- a/tools/sbom/generate-sbom-framework_res.py
+++ b/tools/sbom/generate-sbom-framework_res.py
@@ -80,7 +80,8 @@
 
   resource_file_spdxids = []
   for file in layoutlib_sbom[sbom_writers.PropNames.FILES]:
-    if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'):
+    file_path = file[sbom_writers.PropNames.FILE_NAME]
+    if file_path.startswith('data/res/') or file_path.startswith('data/overlays/'):
       resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID])
 
   doc.relationships = [