Merge "Move license metadata declaration of bramble/Android.mk into meta-lic.mk" into main
diff --git a/CleanSpec.mk b/CleanSpec.mk
index f8c96ff..f562279 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -787,6 +787,10 @@
 $(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/)
 $(call add-clean-step, find $(OUT_DIR) -type f -name "*.jar" -print0 | xargs -0 rm -f)
 
+# Remove obsolete dexpreopt_config artifacts
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/dexpreopt_config/dexpreopt.config)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/dexpreopt_config/dexpreopt_soong.config)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/ci/Android.bp b/ci/Android.bp
index 066b83f..104f517 100644
--- a/ci/Android.bp
+++ b/ci/Android.bp
@@ -14,6 +14,7 @@
 
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_adte",
 }
 
 python_test_host {
@@ -74,6 +75,7 @@
     name: "build_test_suites",
     srcs: [
         "build_test_suites.py",
+        "optimized_targets.py",
     ],
 }
 
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 29ed50e..6e1f88c 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -15,11 +15,19 @@
 """Build script for the CI `test_suites` target."""
 
 import argparse
+from dataclasses import dataclass
+import json
 import logging
 import os
 import pathlib
 import subprocess
 import sys
+from typing import Callable
+import optimized_targets
+
+
+REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
+SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
 
 
 class Error(Exception):
@@ -35,16 +43,54 @@
     self.return_code = return_code
 
 
-REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
-SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
+class BuildPlanner:
+  """Class in charge of determining how to optimize build targets.
+
+  Given the build context and targets to build it will determine a final list of
+  targets to build along with getting a set of packaging functions to package up
+  any output zip files needed by the build.
+  """
+
+  def __init__(
+      self,
+      build_context: dict[str, any],
+      args: argparse.Namespace,
+      target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget],
+  ):
+    self.build_context = build_context
+    self.args = args
+    self.target_optimizations = target_optimizations
+
+  def create_build_plan(self):
+
+    if 'optimized_build' not in self.build_context['enabled_build_features']:
+      return BuildPlan(set(self.args.extra_targets), set())
+
+    build_targets = set()
+    packaging_functions = set()
+    for target in self.args.extra_targets:
+      target_optimizer_getter = self.target_optimizations.get(target, None)
+      if not target_optimizer_getter:
+        build_targets.add(target)
+        continue
+
+      target_optimizer = target_optimizer_getter(
+          target, self.build_context, self.args
+      )
+      build_targets.update(target_optimizer.get_build_targets())
+      packaging_functions.add(target_optimizer.package_outputs)
+
+    return BuildPlan(build_targets, packaging_functions)
 
 
-def get_top() -> pathlib.Path:
-  return pathlib.Path(os.environ['TOP'])
+@dataclass(frozen=True)
+class BuildPlan:
+  build_targets: set[str]
+  packaging_functions: set[Callable[..., None]]
 
 
 def build_test_suites(argv: list[str]) -> int:
-  """Builds the general-tests and any other test suites passed in.
+  """Builds all test suites passed in, optimizing based on the build_context content.
 
   Args:
     argv: The command line arguments passed in.
@@ -54,9 +100,14 @@
   """
   args = parse_args(argv)
   check_required_env()
+  build_context = load_build_context()
+  build_planner = BuildPlanner(
+      build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS
+  )
+  build_plan = build_planner.create_build_plan()
 
   try:
-    build_everything(args)
+    execute_build_plan(build_plan)
   except BuildFailureError as e:
     logging.error('Build command failed! Check build_log for details.')
     return e.return_code
@@ -64,6 +115,16 @@
   return 0
 
 
+def parse_args(argv: list[str]) -> argparse.Namespace:
+  argparser = argparse.ArgumentParser()
+
+  argparser.add_argument(
+      'extra_targets', nargs='*', help='Extra test suites to build.'
+  )
+
+  return argparser.parse_args(argv)
+
+
 def check_required_env():
   """Check for required env vars.
 
@@ -79,43 +140,40 @@
   raise Error(f'Missing required environment variables: {t}')
 
 
-def parse_args(argv):
-  argparser = argparse.ArgumentParser()
+def load_build_context():
+  build_context_path = pathlib.Path(os.environ.get('BUILD_CONTEXT', ''))
+  if build_context_path.is_file():
+    try:
+      with open(build_context_path, 'r') as f:
+        return json.load(f)
+    except json.decoder.JSONDecodeError as e:
+      raise Error(f'Failed to load JSON file: {build_context_path}')
 
-  argparser.add_argument(
-      'extra_targets', nargs='*', help='Extra test suites to build.'
-  )
-
-  return argparser.parse_args(argv)
+  logging.info('No BUILD_CONTEXT found, skipping optimizations.')
+  return empty_build_context()
 
 
-def build_everything(args: argparse.Namespace):
-  """Builds all tests (regardless of whether they are needed).
+def empty_build_context():
+  return {'enabled_build_features': []}
 
-  Args:
-    args: The parsed arguments.
 
-  Raises:
-    BuildFailure: If the build command fails.
-  """
-  build_command = base_build_command(args, args.extra_targets)
+def execute_build_plan(build_plan: BuildPlan):
+  build_command = []
+  build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH))
+  build_command.append('--make-mode')
+  build_command.extend(build_plan.build_targets)
 
   try:
     run_command(build_command)
   except subprocess.CalledProcessError as e:
     raise BuildFailureError(e.returncode) from e
 
+  for packaging_function in build_plan.packaging_functions:
+    packaging_function()
 
-def base_build_command(
-    args: argparse.Namespace, extra_targets: set[str]
-) -> list[str]:
 
-  build_command = []
-  build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH))
-  build_command.append('--make-mode')
-  build_command.extend(extra_targets)
-
-  return build_command
+def get_top() -> pathlib.Path:
+  return pathlib.Path(os.environ['TOP'])
 
 
 def run_command(args: list[str], stdout=None):
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
index 08a79a3..a9ff3fb 100644
--- a/ci/build_test_suites_test.py
+++ b/ci/build_test_suites_test.py
@@ -14,7 +14,9 @@
 
 """Tests for build_test_suites.py"""
 
+import argparse
 from importlib import resources
+import json
 import multiprocessing
 import os
 import pathlib
@@ -27,9 +29,11 @@
 import textwrap
 import time
 from typing import Callable
+import unittest
 from unittest import mock
 import build_test_suites
 import ci_test_lib
+import optimized_targets
 from pyfakefs import fake_filesystem_unittest
 
 
@@ -80,12 +84,20 @@
     with self.assertRaisesRegex(SystemExit, '42'):
       build_test_suites.main([])
 
+  def test_incorrectly_formatted_build_context_raises(self):
+    build_context = self.fake_top.joinpath('build_context')
+    build_context.touch()
+    os.environ['BUILD_CONTEXT'] = str(build_context)
+
+    with self.assert_raises_word(build_test_suites.Error, 'JSON'):
+      build_test_suites.main([])
+
   def test_build_success_returns(self):
     with self.assertRaisesRegex(SystemExit, '0'):
       build_test_suites.main([])
 
   def assert_raises_word(self, cls, word):
-    return self.assertRaisesRegex(build_test_suites.Error, rf'\b{word}\b')
+    return self.assertRaisesRegex(cls, rf'\b{word}\b')
 
   def _setup_working_build_env(self):
     self.fake_top = pathlib.Path('/fake/top')
@@ -222,6 +234,171 @@
       os.kill(p.pid, signal.SIGINT)
 
 
+class BuildPlannerTest(unittest.TestCase):
+
+  class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget):
+
+    def __init__(self, output_targets):
+      self.output_targets = output_targets
+
+    def get_build_targets(self):
+      return self.output_targets
+
+    def package_outputs(self):
+      return f'packaging {" ".join(self.output_targets)}'
+
+  def test_build_optimization_off_builds_everything(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_context=self.create_build_context(optimized_build_enabled=False),
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_targets, build_plan.build_targets)
+
+  def test_build_optimization_off_doesnt_package(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_context=self.create_build_context(optimized_build_enabled=False),
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertEqual(len(build_plan.packaging_functions), 0)
+
+  def test_build_optimization_on_optimizes_target(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        build_context=self.create_build_context(
+            enabled_build_features={self.get_target_flag('target_1')}
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    expected_targets = {self.get_optimized_target_name('target_1'), 'target_2'}
+    self.assertSetEqual(expected_targets, build_plan.build_targets)
+
+  def test_build_optimization_on_packages_target(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+        build_context=self.create_build_context(
+            enabled_build_features={self.get_target_flag('target_1')}
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    optimized_target_name = self.get_optimized_target_name('target_1')
+    self.assertIn(
+        f'packaging {optimized_target_name}',
+        self.run_packaging_functions(build_plan),
+    )
+
+  def test_individual_build_optimization_off_doesnt_optimize(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_targets, build_plan.build_targets)
+
+  def test_individual_build_optimization_off_doesnt_package(self):
+    build_targets = {'target_1', 'target_2'}
+    build_planner = self.create_build_planner(
+        build_targets=build_targets,
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    expected_packaging_function_outputs = {None, None}
+    self.assertSetEqual(
+        expected_packaging_function_outputs,
+        self.run_packaging_functions(build_plan),
+    )
+
+  def create_build_planner(
+      self,
+      build_targets: set[str],
+      build_context: dict[str, any] = None,
+      args: argparse.Namespace = None,
+      target_optimizations: dict[
+          str, optimized_targets.OptimizedBuildTarget
+      ] = None,
+  ) -> build_test_suites.BuildPlanner:
+    if not build_context:
+      build_context = self.create_build_context()
+    if not args:
+      args = self.create_args(extra_build_targets=build_targets)
+    if not target_optimizations:
+      target_optimizations = self.create_target_optimizations(
+          build_context, build_targets
+      )
+    return build_test_suites.BuildPlanner(
+        build_context, args, target_optimizations
+    )
+
+  def create_build_context(
+      self,
+      optimized_build_enabled: bool = True,
+      enabled_build_features: set[str] = set(),
+      test_context: dict[str, any] = {},
+  ) -> dict[str, any]:
+    build_context = {}
+    build_context['enabled_build_features'] = enabled_build_features
+    if optimized_build_enabled:
+      build_context['enabled_build_features'].add('optimized_build')
+    build_context['test_context'] = test_context
+    return build_context
+
+  def create_args(
+      self, extra_build_targets: set[str] = set()
+  ) -> argparse.Namespace:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('extra_targets', nargs='*')
+    return parser.parse_args(extra_build_targets)
+
+  def create_target_optimizations(
+      self, build_context: dict[str, any], build_targets: set[str]
+  ):
+    target_optimizations = dict()
+    for target in build_targets:
+      target_optimizations[target] = (
+          lambda target, build_context, args: optimized_targets.get_target_optimizer(
+              target,
+              self.get_target_flag(target),
+              build_context,
+              self.TestOptimizedBuildTarget(
+                  {self.get_optimized_target_name(target)}
+              ),
+          )
+      )
+
+    return target_optimizations
+
+  def get_target_flag(self, target: str):
+    return f'{target}_enabled'
+
+  def get_optimized_target_name(self, target: str):
+    return f'{target}_optimized'
+
+  def run_packaging_functions(
+      self, build_plan: build_test_suites.BuildPlan
+  ) -> set[str]:
+    output = set()
+    for packaging_function in build_plan.packaging_functions:
+      output.add(packaging_function())
+
+    return output
+
+
 def wait_until(
     condition_function: Callable[[], bool],
     timeout_secs: float = 3.0,
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
new file mode 100644
index 0000000..224c8c0
--- /dev/null
+++ b/ci/optimized_targets.py
@@ -0,0 +1,69 @@
+#
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABC
+
+
+class OptimizedBuildTarget(ABC):
+  """A representation of an optimized build target.
+
+  This class will determine what targets to build given a given build_cotext and
+  will have a packaging function to generate any necessary output zips for the
+  build.
+  """
+
+  def __init__(self, build_context, args):
+    self.build_context = build_context
+    self.args = args
+
+  def get_build_targets(self):
+    pass
+
+  def package_outputs(self):
+    pass
+
+
+class NullOptimizer(OptimizedBuildTarget):
+  """No-op target optimizer.
+
+  This will simply build the same target it was given and do nothing for the
+  packaging step.
+  """
+
+  def __init__(self, target):
+    self.target = target
+
+  def get_build_targets(self):
+    return {self.target}
+
+  def package_outputs(self):
+    pass
+
+
+def get_target_optimizer(target, enabled_flag, build_context, optimizer):
+  if enabled_flag in build_context['enabled_build_features']:
+    return optimizer
+
+  return NullOptimizer(target)
+
+
+# To be written as:
+#    'target': lambda target, build_context, args: get_target_optimizer(
+#        target,
+#        'target_enabled_flag',
+#        build_context,
+#        TargetOptimizer(build_context, args),
+#    )
+OPTIMIZED_BUILD_TARGETS = dict()
diff --git a/core/Makefile b/core/Makefile
index 9845437..a16365d 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -7055,18 +7055,19 @@
 
 endif
 
-$(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
-	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
-
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS :=
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 ifneq (,$(DEX_PREOPT_CONFIG_FOR_MAKE))
-	$(hide) cp $(DEX_PREOPT_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS += -e $(notdir $(DEX_PREOPT_CONFIG_FOR_MAKE)) -f $(DEX_PREOPT_CONFIG_FOR_MAKE)
 endif
 ifneq (,$(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE))
-	$(hide) cp $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
+$(DEXPREOPT_CONFIG_ZIP): PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS += -e $(notdir $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)) -f $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
 endif
 endif #!TARGET_BUILD_UNBUNDLED
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config
+
+$(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
+	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config $(PRIVATE_DEXPREOPT_CONFIG_ZIP_PARAMS)
 
 .PHONY: dexpreopt_config_zip
 dexpreopt_config_zip: $(DEXPREOPT_CONFIG_ZIP)
diff --git a/core/definitions.mk b/core/definitions.mk
index dde0aa9..51def29 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2925,19 +2925,15 @@
   echo "Install path: $(patsubst $(PRODUCT_OUT)/%,%,$(PRIVATE_INSTALLED_MODULE))" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
-ART_VERIDEX_APPCOMPAT_SCRIPT:=$(HOST_OUT)/bin/appcompat.sh
+ART_VERIDEX_APPCOMPAT:=$(HOST_OUT)/bin/appcompat
 define run-appcompat
 $(hide) \
-  echo "appcompat.sh output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
-  PACKAGING=$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING ANDROID_LOG_TAGS="*:e" $(ART_VERIDEX_APPCOMPAT_SCRIPT) --dex-file=$@ --api-flags=$(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
+  echo "appcompat output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+  ANDROID_LOG_TAGS="*:e" $(ART_VERIDEX_APPCOMPAT) --dex-file=$@ 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
 appcompat-files = \
   $(AAPT2) \
-  $(ART_VERIDEX_APPCOMPAT_SCRIPT) \
-  $(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) \
-  $(HOST_OUT_EXECUTABLES)/veridex \
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/core_dex_intermediates/classes.dex \
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/oahl_dex_intermediates/classes.dex
+  $(ART_VERIDEX_APPCOMPAT) \
 else
 appcompat-header =
 run-appcompat =
diff --git a/core/node_fns.mk b/core/node_fns.mk
index 144eb8b..d2cee9e 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -203,7 +203,7 @@
   $(call _expand-inherited-values,$(1),$(2),$(3),$(4))
 
   $(eval $(1).$(2).inherited :=)
-  $(eval _include_stack := $(wordlist 2,9999,$$(_include_stack)))
+  $(eval _include_stack := $(wordlist 2,9999,$(_include_stack)))
 endef
 
 #
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index e715fd1..a96ea8f 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -22,50 +22,6 @@
 
 
 # -----------------------------------------------------------------
-# Release Config Flags
-
-# Create a summary file of build flags for each partition
-# $(1): built build flags json file
-# $(2): installed build flags json file
-# $(3): flag names
-define generate-partition-build-flag-file
-$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
-$(eval $(strip $(1)): PRIVATE_FLAG_NAMES := $(strip $(3)))
-$(strip $(1)):
-	mkdir -p $$(dir $$(PRIVATE_OUT))
-	echo '{' > $$(PRIVATE_OUT)
-	echo '"flags": [' >> $$(PRIVATE_OUT)
-	$$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
-		( \
-			printf '  { "name": "%s", "value": "%s", ' \
-					'$$(flag)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).VALUE)' \
-					; \
-			printf '"set": "%s", "default": "%s", "declared": "%s" }' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).SET_IN)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).DEFAULT)' \
-					'$$(_ALL_RELEASE_FLAGS.$$(flag).DECLARED_IN)' \
-					; \
-			printf '$$(if $$(filter $$(lastword $$(PRIVATE_FLAG_NAMES)),$$(flag)),,$$(comma))\n' ; \
-		) >> $$(PRIVATE_OUT) ; \
-	)
-	echo "]" >> $$(PRIVATE_OUT)
-	echo "}" >> $$(PRIVATE_OUT)
-$(call copy-one-file, $(1), $(2))
-endef
-
-$(foreach partition, $(_FLAG_PARTITIONS), \
-	$(eval build_flag_summaries.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/build_flags.json) \
-	$(eval $(call generate-partition-build-flag-file, \
-				$(TARGET_OUT_FLAGS)/$(partition)/build_flags.json, \
-				$(build_flag_summaries.$(partition)), \
-				$(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) \
-			) \
-	) \
-)
-
-
-# -----------------------------------------------------------------
 # Aconfig Flags
 
 # Create a summary file of build flags for each partition
diff --git a/core/product.mk b/core/product.mk
index 68d7721..7908e1d 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -304,9 +304,6 @@
 # This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
 _product_single_value_vars += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
 
-# List of tags that will be used to gate blueprint modules from the build graph
-_product_list_vars += PRODUCT_INCLUDE_TAGS
-
 # List of directories that will be used to gate blueprint modules from the build graph
 _product_list_vars += PRODUCT_SOURCE_ROOT_DIRS
 
@@ -491,6 +488,11 @@
 # Enables 16KB developer option for device if set.
 _product_single_value_vars += PRODUCT_16K_DEVELOPER_OPTION
 
+# If set, adb root will be disabled (really ro.debuggable=0) in userdebug
+# builds. It's already off disabled in user builds. Eng builds are unaffected
+# by this flag.
+_product_single_value_vars += PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index f21c1c4..f939690 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -280,27 +280,6 @@
 
 current_product_makefile :=
 
-#############################################################################
-# Check product include tag allowlist
-BLUEPRINT_INCLUDE_TAGS_ALLOWLIST := \
-  com.android.mainline_go \
-  com.android.mainline \
-  mainline_module_prebuilt_nightly \
-  mainline_module_prebuilt_monthly_release
-.KATI_READONLY := BLUEPRINT_INCLUDE_TAGS_ALLOWLIST
-$(foreach include_tag,$(PRODUCT_INCLUDE_TAGS), \
-	$(if $(filter $(include_tag),$(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST)),,\
-	$(call pretty-error, $(include_tag) is not in BLUEPRINT_INCLUDE_TAGS_ALLOWLIST: $(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST))))
-# Create default PRODUCT_INCLUDE_TAGS
-ifeq (, $(PRODUCT_INCLUDE_TAGS))
-# Soong analysis is global: even though a module might not be relevant to a specific product (e.g. build_tools for aosp_arm),
-# we still analyse it.
-# This means that in setups where we two have two prebuilts of module_sdk, we need a "default" to use in analysis
-# This should be a no-op in aosp and internal since no Android.bp file contains blueprint_package_includes
-# Use the big android one and main-based prebuilts by default
-PRODUCT_INCLUDE_TAGS += com.android.mainline mainline_module_prebuilt_nightly
-endif
-
 # AOSP and Google products currently share the same `apex_contributions` in next.
 # This causes issues when building <aosp_product>-next-userdebug in main.
 # Create a temporary allowlist to ignore the google apexes listed in `contents` of apex_contributions of `next`
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 4fbff0a..dd7e4e6 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -313,7 +313,6 @@
 
 $(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
 
-$(call add_json_list, IncludeTags,                $(PRODUCT_INCLUDE_TAGS))
 $(call add_json_list, SourceRootDirs,             $(PRODUCT_SOURCE_ROOT_DIRS))
 
 $(call add_json_list, AfdoProfiles,                $(ALL_AFDO_PROFILES))
diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk
index e8428c8..6e3da72 100644
--- a/core/sysprop_config.mk
+++ b/core/sysprop_config.mk
@@ -195,6 +195,7 @@
 
 user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
 enable_target_debugging := true
+enable_dalvik_lock_contention_logging := true
 ifneq (,$(user_variant))
   # Target is secure in user builds.
   ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=1
@@ -207,6 +208,13 @@
   ifneq ($(user_variant),userdebug)
     # Disable debugging in plain user builds.
     enable_target_debugging :=
+    enable_dalvik_lock_contention_logging :=
+  else
+    # Disable debugging in userdebug builds if PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG
+    # is set.
+    ifneq (,$(strip $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG)))
+      enable_target_debugging :=
+    endif
   endif
 
   # Disallow mock locations by default for user builds
@@ -221,16 +229,22 @@
   ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=1
 endif # !user_variant
 
+ifeq (true,$(strip $(enable_dalvik_lock_contention_logging)))
+  # Enable Dalvik lock contention logging.
+  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500
+endif # !enable_dalvik_lock_contention_logging
+
 ifeq (true,$(strip $(enable_target_debugging)))
   # Target is more debuggable and adbd is on by default
   ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=1
-  # Enable Dalvik lock contention logging.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500
 else # !enable_target_debugging
   # Target is less debuggable and adbd is off by default
   ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=0
 endif # !enable_target_debugging
 
+enable_target_debugging:=
+enable_dalvik_lock_contention_logging:=
+
 ifneq ($(filter sdk sdk_addon,$(MAKECMDGOALS)),)
 _is_sdk_build := true
 endif
diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk
index 9b006b3..a94a016 100644
--- a/core/tasks/meta-lic.mk
+++ b/core/tasks/meta-lic.mk
@@ -164,3 +164,24 @@
 # Moved here from hardware/interfaces/tv/Android.mk
 $(eval $(call declare-1p-copy-files,hardware/interfaces/tv,tuner_vts_config_1_0.xml))
 $(eval $(call declare-1p-copy-files,hardware/interfaces/tv,tuner_vts_config_1_1.xml))
+
+# Moved here from device/generic/goldfish/Android.mk
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/data,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/input,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/wifi,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish/camera,))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,hals.conf))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.qemu-adb-keys.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.system_ext.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,.json))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,ueventd.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,wpa_supplicant.conf))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,media_profiles_V1_0.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu.rc))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,fstab.ranchu))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,display_settings.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,display_settings_freeform.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,device_state_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu-core.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,init.ranchu-net.sh))
+$(eval $(call declare-1p-copy-files,device/generic/goldfish,audio_policy_configuration.xml))
diff --git a/envsetup.sh b/envsetup.sh
index 352d664..06dadd3 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -865,47 +865,14 @@
     run_tool_with_logging "ADB" $ADB "${@}"
 }
 
-function run_tool_with_logging() {
-  # Run commands in a subshell for us to handle forced terminations with a trap
-  # handler.
-  (
-  local tool_tag="$1"
-  shift
-  local tool_binary="$1"
-  shift
-
-  # If the logger is not configured, run the original command and return.
-  if [[ -z "${ANDROID_TOOL_LOGGER}" ]]; then
-     "${tool_binary}" "${@}"
-     return $?
-  fi
-
-  # Otherwise, run the original command and call the logger when done.
-  local start_time
-  start_time=$(date +%s.%N)
-  local logger=${ANDROID_TOOL_LOGGER}
-
-  # Install a trap to call the logger even when the process terminates abnormally.
-  # The logger is run in the background and its output suppressed to avoid
-  # interference with the user flow.
-  trap '
-  exit_code=$?;
-  # Remove the trap to prevent duplicate log.
-  trap - EXIT;
-  "${logger}" \
-    --tool_tag="${tool_tag}" \
-    --start_timestamp="${start_time}" \
-    --end_timestamp="$(date +%s.%N)" \
-    --tool_args="$*" \
-    --exit_code="${exit_code}" \
-    ${ANDROID_TOOL_LOGGER_EXTRA_ARGS} \
-    > /dev/null 2>&1 &
-  exit ${exit_code}
-  ' SIGINT SIGTERM SIGQUIT EXIT
-
-  # Run the original command.
-  "${tool_binary}" "${@}"
-  )
+function fastboot() {
+    local FASTBOOT=$(command which fastboot)
+    if [ -z "$FASTBOOT" ]; then
+        echo "Command fastboot not found; try lunch (and building) first?"
+        return 1
+    fi
+    # Support tool event logging for fastboot command.
+    run_tool_with_logging "FASTBOOT" $FASTBOOT "${@}"
 }
 
 # communicate with a running device or emulator, set up necessary state,
@@ -1195,6 +1162,7 @@
 unset refreshmod
 unset resgrep
 unset rsgrep
+unset run_tool_with_logging
 unset sepgrep
 unset sgrep
 unset startviewserver
diff --git a/shell_utils.sh b/shell_utils.sh
index 450bb83..86f3f49 100644
--- a/shell_utils.sh
+++ b/shell_utils.sh
@@ -126,4 +126,27 @@
 }
 
 
+function log_tool_invocation()
+{
+    if [[ -z $ANDROID_TOOL_LOGGER ]]; then
+        return
+    fi
+
+    LOG_TOOL_TAG=$1
+    LOG_START_TIME=$(date +%s.%N)
+    trap '
+        exit_code=$?;
+        # Remove the trap to prevent duplicate log.
+        trap - EXIT;
+        $ANDROID_TOOL_LOGGER \
+                --tool_tag="${LOG_TOOL_TAG}" \
+                --start_timestamp="${LOG_START_TIME}" \
+                --end_timestamp="$(date +%s.%N)" \
+                --tool_args="$*" \
+                --exit_code="${exit_code}" \
+                ${ANDROID_TOOL_LOGGER_EXTRA_ARGS} \
+           > /dev/null 2>&1 &
+        exit ${exit_code}
+    ' SIGINT SIGTERM SIGQUIT EXIT
+}
 
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index 7a9325d..783ed3b 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -58,6 +58,9 @@
 AB_OTA_UPDATER := true
 AB_OTA_PARTITIONS ?= system
 
+# Set widevine apex signed with dev key
+$(call soong_config_set,widevine,use_devkey,true)
+
 #
 # Special settings for GSI releasing
 #
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 595940d..e9ca482 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -60,6 +60,9 @@
 AB_OTA_UPDATER := true
 AB_OTA_PARTITIONS ?= system
 
+# Set widevine apex signed with dev key
+$(call soong_config_set,widevine,use_devkey,true)
+
 #
 # Special settings for GSI releasing
 #
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 5446064..0ac220b 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -16,6 +16,7 @@
 
 # Base modules and settings for the product partition.
 PRODUCT_PACKAGES += \
+    build_flag_product \
     fs_config_dirs_product \
     fs_config_files_product \
     group_product \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 634bf66..98adba5 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -45,6 +45,7 @@
     bu \
     bugreport \
     bugreportz \
+    build_flag_system \
     cgroups.json \
     charger \
     cmd \
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index 76f008f..92ca227 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -16,6 +16,7 @@
 
 # Base modules and settings for the system_ext partition.
 PRODUCT_PACKAGES += \
+    build_flag_system_ext \
     fs_config_dirs_system_ext \
     fs_config_files_system_ext \
     group_system_ext \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index ec3de75..1854f97 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -18,6 +18,7 @@
 PRODUCT_PACKAGES += \
     adbd.recovery \
     android.hardware.health@2.0-impl-default.recovery \
+    build_flag_vendor \
     cgroups.recovery.json \
     charger.recovery \
     init_second_stage.recovery \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index d9c3c9a..dc78368 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -176,4 +176,5 @@
     dalvik.vm.usap_pool_refill_delay_ms?=3000
 
 PRODUCT_SYSTEM_PROPERTIES += \
-    dalvik.vm.useartservice=true
+    dalvik.vm.useartservice=true \
+    dalvik.vm.enable_pr_dexopt=true
diff --git a/teams/Android.bp b/teams/Android.bp
index b3a5752..084dad1 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -4391,3 +4391,10 @@
     // go/trendy/manage/engineers/5955405559201792
     trendy_team_id: "5955405559201792",
 }
+
+team {
+    name: "trendy_team_android_media_better_together",
+
+    // go/trendy/manage/engineers/5617300451721216
+    trendy_team_id: "5617300451721216",
+}
\ No newline at end of file
diff --git a/tests/Android.bp b/tests/Android.bp
deleted file mode 100644
index 39debf5..0000000
--- a/tests/Android.bp
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2024 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-    default_team: "trendy_team_build",
-}
-
-python_test_host {
-    name: "run_tool_with_logging_test",
-    main: "run_tool_with_logging_test.py",
-    pkg_path: "testdata",
-    srcs: [
-        "run_tool_with_logging_test.py",
-    ],
-    test_options: {
-        unit_test: true,
-    },
-    data: [
-        ":envsetup_minimum.zip",
-        ":tool_event_logger",
-    ],
-    test_suites: [
-        "general-tests",
-    ],
-    version: {
-        py3: {
-            embedded_launcher: true,
-        },
-    },
-}
diff --git a/tests/run_tool_with_logging_test.py b/tests/run_tool_with_logging_test.py
deleted file mode 100644
index 6f9b59c..0000000
--- a/tests/run_tool_with_logging_test.py
+++ /dev/null
@@ -1,345 +0,0 @@
-# Copyright 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import dataclasses
-import glob
-from importlib import resources
-import logging
-import os
-from pathlib import Path
-import re
-import shutil
-import signal
-import stat
-import subprocess
-import sys
-import tempfile
-import textwrap
-import time
-import unittest
-import zipfile
-
-EXII_RETURN_CODE = 0
-INTERRUPTED_RETURN_CODE = 130
-
-
-class RunToolWithLoggingTest(unittest.TestCase):
-
-  @classmethod
-  def setUpClass(cls):
-    super().setUpClass()
-    # Configure to print logging to stdout.
-    logging.basicConfig(filename=None, level=logging.DEBUG)
-    console = logging.StreamHandler(sys.stdout)
-    logging.getLogger("").addHandler(console)
-
-  def setUp(self):
-    super().setUp()
-    self.working_dir = tempfile.TemporaryDirectory()
-    # Run all the tests from working_dir which is our temp Android build top.
-    os.chdir(self.working_dir.name)
-    # Extract envsetup.zip which contains the envsetup.sh and other dependent
-    # scripts required to set up the build environments.
-    with resources.files("testdata").joinpath("envsetup.zip").open("rb") as p:
-      with zipfile.ZipFile(p, "r") as zip_f:
-        zip_f.extractall()
-
-  def tearDown(self):
-    self.working_dir.cleanup()
-    super().tearDown()
-
-  def test_does_not_log_when_logger_var_empty(self):
-    test_tool = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER=""
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_tool.assert_called_once_with_args("arg1 arg2")
-
-  def test_does_not_log_with_logger_unset(self):
-    test_tool = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      unset ANDROID_TOOL_LOGGER
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_tool.assert_called_once_with_args("arg1 arg2")
-
-  def test_log_success_with_logger_enabled(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_tool.assert_called_once_with_args("arg1 arg2")
-    expected_logger_args = (
-        "--tool_tag=FAKE_TOOL --start_timestamp=\d+\.\d+ --end_timestamp="
-        "\d+\.\d+ --tool_args=arg1 arg2 --exit_code=0"
-    )
-    test_logger.assert_called_once_with_args(expected_logger_args)
-
-  def test_run_tool_output_is_same_with_and_without_logging(self):
-    test_tool = TestScript.create(self.working_dir, "echo 'tool called'")
-    test_logger = TestScript.create(self.working_dir)
-
-    run_tool_with_logging_stdout, run_tool_with_logging_stderr = (
-        self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-    )
-
-    run_tool_without_logging_stdout, run_tool_without_logging_stderr = (
-        self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      {test_tool.executable} arg1 arg2
-    """)
-    )
-
-    self.assertEqual(
-        run_tool_with_logging_stdout, run_tool_without_logging_stdout
-    )
-    self.assertEqual(
-        run_tool_with_logging_stderr, run_tool_without_logging_stderr
-    )
-
-  def test_logger_output_is_suppressed(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir, "echo 'logger called'")
-
-    run_tool_with_logging_output, _ = self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    self.assertNotIn("logger called", run_tool_with_logging_output)
-
-  def test_logger_error_is_suppressed(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(
-        self.working_dir, "echo 'logger failed' > /dev/stderr; exit 1"
-    )
-
-    _, err = self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    self.assertNotIn("logger failed", err)
-
-  def test_log_success_when_tool_interrupted(self):
-    test_tool = TestScript.create(self.working_dir, script_body="sleep 100")
-    test_logger = TestScript.create(self.working_dir)
-
-    process = self._run_script_in_build_env(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    pgid = os.getpgid(process.pid)
-    # Give sometime for the subprocess to start.
-    time.sleep(1)
-    # Kill the subprocess and any processes created in the same group.
-    os.killpg(pgid, signal.SIGINT)
-
-    returncode, _, _ = self._wait_for_process(process)
-    self.assertEqual(returncode, INTERRUPTED_RETURN_CODE)
-
-    expected_logger_args = (
-        "--tool_tag=FAKE_TOOL --start_timestamp=\d+\.\d+ --end_timestamp="
-        "\d+\.\d+ --tool_args=arg1 arg2 --exit_code=130"
-    )
-    test_logger.assert_called_once_with_args(expected_logger_args)
-
-  def test_logger_can_be_toggled_on(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER=""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_logger.assert_called_with_times(1)
-
-  def test_logger_can_be_toggled_off(self):
-    test_tool = TestScript.create(self.working_dir)
-    test_logger = TestScript.create(self.working_dir)
-
-    self._run_script_and_wait(f"""
-      ANDROID_TOOL_LOGGER="{test_logger.executable}"
-      ANDROID_TOOL_LOGGER=""
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    test_logger.assert_not_called()
-
-  def test_integration_tool_event_logger_dry_run(self):
-    test_tool = TestScript.create(self.working_dir)
-    logger_path = self._import_logger()
-
-    self._run_script_and_wait(f"""
-      TMPDIR="{self.working_dir.name}"
-      ANDROID_TOOL_LOGGER="{logger_path}"
-      ANDROID_TOOL_LOGGER_EXTRA_ARGS="--dry_run"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
-    """)
-
-    self._assert_logger_dry_run()
-
-  def test_tool_args_do_not_fail_logger(self):
-    test_tool = TestScript.create(self.working_dir)
-    logger_path = self._import_logger()
-
-    self._run_script_and_wait(f"""
-      TMPDIR="{self.working_dir.name}"
-      ANDROID_TOOL_LOGGER="{logger_path}"
-      ANDROID_TOOL_LOGGER_EXTRA_ARGS="--dry_run"
-      run_tool_with_logging "FAKE_TOOL" {test_tool.executable} --tool-arg1
-    """)
-
-    self._assert_logger_dry_run()
-
-  def _import_logger(self) -> Path:
-    logger = "tool_event_logger"
-    logger_path = Path(self.working_dir.name).joinpath(logger)
-    with resources.as_file(resources.files("testdata").joinpath(logger)) as p:
-      shutil.copy(p, logger_path)
-    Path.chmod(logger_path, 0o755)
-    return logger_path
-
-  def _assert_logger_dry_run(self):
-    log_files = glob.glob(self.working_dir.name + "/tool_event_logger_*/*.log")
-    self.assertEqual(len(log_files), 1)
-
-    with open(log_files[0], "r") as f:
-      lines = f.readlines()
-      self.assertEqual(len(lines), 1)
-      self.assertIn("dry run", lines[0])
-
-  def _create_build_env_script(self) -> str:
-    return f"""
-      source {Path(self.working_dir.name).joinpath("build/make/envsetup.sh")}
-    """
-
-  def _run_script_and_wait(self, test_script: str) -> tuple[str, str]:
-    process = self._run_script_in_build_env(test_script)
-    returncode, out, err = self._wait_for_process(process)
-    logging.debug("script stdout: %s", out)
-    logging.debug("script stderr: %s", err)
-    self.assertEqual(returncode, EXII_RETURN_CODE)
-    return out, err
-
-  def _run_script_in_build_env(self, test_script: str) -> subprocess.Popen:
-    setup_build_env_script = self._create_build_env_script()
-    return subprocess.Popen(
-        setup_build_env_script + test_script,
-        shell=True,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        text=True,
-        start_new_session=True,
-        executable="/bin/bash",
-        )
-
-  def _wait_for_process(
-      self, process: subprocess.Popen
-  ) -> tuple[int, str, str]:
-    pgid = os.getpgid(process.pid)
-    out, err = process.communicate()
-    # Wait for all process in the same group to complete since the logger runs
-    # as a separate detached process.
-    self._wait_for_process_group(pgid)
-    return (process.returncode, out, err)
-
-  def _wait_for_process_group(self, pgid: int, timeout: int = 5):
-    """Waits for all subprocesses within the process group to complete."""
-    start_time = time.time()
-    while True:
-      if time.time() - start_time > timeout:
-        raise TimeoutError(
-            f"Process group did not complete after {timeout} seconds"
-        )
-      for pid in os.listdir("/proc"):
-        if pid.isdigit():
-          try:
-            if os.getpgid(int(pid)) == pgid:
-              time.sleep(0.1)
-              break
-          except (FileNotFoundError, PermissionError, ProcessLookupError):
-            pass
-      else:
-        # All processes have completed.
-        break
-
-
-@dataclasses.dataclass
-class TestScript:
-  executable: Path
-  output_file: Path
-
-  def create(temp_dir: Path, script_body: str = ""):
-    with tempfile.NamedTemporaryFile(dir=temp_dir.name, delete=False) as f:
-      output_file = f.name
-
-    with tempfile.NamedTemporaryFile(dir=temp_dir.name, delete=False) as f:
-      executable = f.name
-      executable_contents = textwrap.dedent(f"""
-      #!/bin/bash
-
-      echo "${{@}}" >> {output_file}
-      {script_body}
-      """)
-      f.write(executable_contents.encode("utf-8"))
-
-    Path.chmod(f.name, os.stat(f.name).st_mode | stat.S_IEXEC)
-
-    return TestScript(executable, output_file)
-
-  def assert_called_with_times(self, expected_call_times: int):
-    lines = self._read_contents_from_output_file()
-    assert len(lines) == expected_call_times, (
-        f"Expect to call {expected_call_times} times, but actually called"
-        f" {len(lines)} times."
-    )
-
-  def assert_called_with_args(self, expected_args: str):
-    lines = self._read_contents_from_output_file()
-    assert len(lines) > 0
-    assert re.search(expected_args, lines[0]), (
-        f"Expect to call with args {expected_args}, but actually called with"
-        f" args {lines[0]}."
-    )
-
-  def assert_not_called(self):
-    self.assert_called_with_times(0)
-
-  def assert_called_once_with_args(self, expected_args: str):
-    self.assert_called_with_times(1)
-    self.assert_called_with_args(expected_args)
-
-  def _read_contents_from_output_file(self) -> list[str]:
-    with open(self.output_file, "r") as f:
-      return f.readlines()
-
-
-if __name__ == "__main__":
-  unittest.main()
diff --git a/tools/check-flagged-apis/check-flagged-apis.sh b/tools/check-flagged-apis/check-flagged-apis.sh
index d9934a1..8078cd8 100755
--- a/tools/check-flagged-apis/check-flagged-apis.sh
+++ b/tools/check-flagged-apis/check-flagged-apis.sh
@@ -14,8 +14,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# Run check-flagged-apis for public APIs and the three @SystemApi flavours
-# Usage: lunch <your-target> && source <this script>
+# Run check-flagged-apis for public APIs and the three @SystemApi flavours.
+#
+# This script expects an argument to tell it which subcommand of
+# check-flagged-apis to execute. Run the script without any arguments to see
+# the valid options.
+#
+# Remember to lunch to select the relevant release config before running this script.
 
 source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../shell_utils.sh
 require_top
@@ -43,6 +48,10 @@
         $MODULE_LIB_XML_VERSIONS
 }
 
+function noop() {
+    true
+}
+
 function aninja() {
     local T="$(gettop)"
     (\cd "${T}" && prebuilts/build-tools/linux-x86/bin/ninja -f out/combined-${TARGET_PRODUCT}.ninja "$@")
@@ -52,11 +61,11 @@
     aninja -t query device_"$1"_all_targets | grep -A1 -e input: | tail -n1
 }
 
-function run() {
+function run_check() {
     local errors=0
 
     echo "# current"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $PUBLIC_XML_VERSIONS
@@ -64,7 +73,7 @@
 
     echo
     echo "# system-current"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-system-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $SYSTEM_XML_VERSIONS
@@ -72,7 +81,7 @@
 
     echo
     echo "# system-server-current"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-system-server-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $SYSTEM_SERVER_XML_VERSONS
@@ -80,7 +89,7 @@
 
     echo
     echo "# module-lib"
-    check-flagged-apis \
+    check-flagged-apis check \
         --api-signature $(path_to_api_signature_file "frameworks-base-api-module-lib-current.txt") \
         --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb \
         --api-versions $MODULE_LIB_XML_VERSIONS
@@ -89,8 +98,39 @@
     return $errors
 }
 
-if [[ "$1" != "--skip-build" ]]; then
-    build && run
-else
-    run
+function run_list() {
+    echo "# current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# system-current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# system-server-current"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-system-server-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+
+    echo
+    echo "# module-lib"
+    check-flagged-apis list \
+        --api-signature $(path_to_api_signature_file "frameworks-base-api-module-lib-current.txt") \
+        --flag-values $(gettop)/out/soong/.intermediates/all_aconfig_declarations.pb
+}
+
+build_cmd=build
+if [[ "$1" == "--skip-build" ]]; then
+    build_cmd=noop
+    shift 1
 fi
+
+case "$1" in
+    check) $build_cmd && run_check ;;
+    list) $build_cmd && run_list ;;
+    *) echo "usage: $(basename $0): [--skip-build] check|list"; exit 1
+esac
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
index 8e285f6..e07ac1d 100644
--- a/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/CheckFlaggedApisTest.kt
@@ -358,4 +358,23 @@
             parseApiVersions(API_VERSIONS.byteInputStream()))
     assertEquals(expected, actual)
   }
+
+  @Test
+  fun testListFlaggedApis() {
+    val expected =
+        listOf(
+            "android.flag.bar DISABLED android/Clazz/Builder",
+            "android.flag.foo ENABLED android/Clazz",
+            "android.flag.foo ENABLED android/Clazz/Clazz()",
+            "android.flag.foo ENABLED android/Clazz/FOO",
+            "android.flag.foo ENABLED android/Clazz/getErrorCode()",
+            "android.flag.foo ENABLED android/Clazz/innerClassArg(Landroid/Clazz/Builder;)",
+            "android.flag.foo ENABLED android/Clazz/setData(I[[ILandroid/util/Utility;)",
+            "android.flag.foo ENABLED android/Clazz/setVariableData(I[Landroid/util/Atom;)")
+    val actual =
+        listFlaggedApis(
+            parseApiSignature("in-memory", API_SIGNATURE.byteInputStream()),
+            parseFlagValues(generateFlagsProto(ENABLED, DISABLED)))
+    assertEquals(expected, actual)
+  }
 }
diff --git a/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
index 1d2440d..1125d39 100644
--- a/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
+++ b/tools/check-flagged-apis/src/com/android/checkflaggedapis/Main.kt
@@ -26,6 +26,7 @@
 import com.android.tools.metalava.model.text.ApiFile
 import com.github.ajalt.clikt.core.CliktCommand
 import com.github.ajalt.clikt.core.ProgramResult
+import com.github.ajalt.clikt.core.subcommands
 import com.github.ajalt.clikt.parameters.options.help
 import com.github.ajalt.clikt.parameters.options.option
 import com.github.ajalt.clikt.parameters.options.required
@@ -141,6 +142,33 @@
   }
 }
 
+val ARG_API_SIGNATURE = "--api-signature"
+val ARG_API_SIGNATURE_HELP =
+    """
+Path to API signature file.
+Usually named *current.txt.
+Tip: `m frameworks-base-api-current.txt` will generate a file that includes all platform and mainline APIs.
+"""
+
+val ARG_FLAG_VALUES = "--flag-values"
+val ARG_FLAG_VALUES_HELP =
+    """
+Path to aconfig parsed_flags binary proto file.
+Tip: `m all_aconfig_declarations` will generate a file that includes all information about all flags.
+"""
+
+val ARG_API_VERSIONS = "--api-versions"
+val ARG_API_VERSIONS_HELP =
+    """
+Path to API versions XML file.
+Usually named xml-versions.xml.
+Tip: `m sdk dist` will generate a file that includes all platform and mainline APIs.
+"""
+
+class MainCommand : CliktCommand() {
+  override fun run() {}
+}
+
 class CheckCommand :
     CliktCommand(
         help =
@@ -152,32 +180,18 @@
 The tool will exit with a non-zero exit code if any flagged APIs are found to be used in the incorrect way.
 """) {
   private val apiSignaturePath by
-      option("--api-signature")
-          .help(
-              """
-              Path to API signature file.
-              Usually named *current.txt.
-              Tip: `m frameworks-base-api-current.txt` will generate a file that includes all platform and mainline APIs.
-              """)
+      option(ARG_API_SIGNATURE)
+          .help(ARG_API_SIGNATURE_HELP)
           .path(mustExist = true, canBeDir = false, mustBeReadable = true)
           .required()
   private val flagValuesPath by
-      option("--flag-values")
-          .help(
-              """
-            Path to aconfig parsed_flags binary proto file.
-            Tip: `m all_aconfig_declarations` will generate a file that includes all information about all flags.
-            """)
+      option(ARG_FLAG_VALUES)
+          .help(ARG_FLAG_VALUES_HELP)
           .path(mustExist = true, canBeDir = false, mustBeReadable = true)
           .required()
   private val apiVersionsPath by
-      option("--api-versions")
-          .help(
-              """
-            Path to API versions XML file.
-            Usually named xml-versions.xml.
-            Tip: `m sdk dist` will generate a file that includes all platform and mainline APIs.
-            """)
+      option(ARG_API_VERSIONS)
+          .help(ARG_API_VERSIONS_HELP)
           .path(mustExist = true, canBeDir = false, mustBeReadable = true)
           .required()
 
@@ -196,6 +210,40 @@
   }
 }
 
+class ListCommand :
+    CliktCommand(
+        help =
+            """
+List all flagged APIs and corresponding flags.
+
+The output format is "<fully-qualified-name-of-flag> <state-of-flag> <API>", one line per API.
+
+The output can be post-processed by e.g. piping it to grep to filter out only enabled APIs, or all APIs guarded by a given flag.
+""") {
+  private val apiSignaturePath by
+      option(ARG_API_SIGNATURE)
+          .help(ARG_API_SIGNATURE_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+  private val flagValuesPath by
+      option(ARG_FLAG_VALUES)
+          .help(ARG_FLAG_VALUES_HELP)
+          .path(mustExist = true, canBeDir = false, mustBeReadable = true)
+          .required()
+
+  override fun run() {
+    val flaggedSymbols =
+        apiSignaturePath.toFile().inputStream().use {
+          parseApiSignature(apiSignaturePath.toString(), it)
+        }
+    val flags = flagValuesPath.toFile().inputStream().use { parseFlagValues(it) }
+    val output = listFlaggedApis(flaggedSymbols, flags)
+    if (output.isNotEmpty()) {
+      println(output.joinToString("\n"))
+    }
+  }
+}
+
 internal fun parseApiSignature(path: String, input: InputStream): Set<Pair<Symbol, Flag>> {
   val output = mutableSetOf<Pair<Symbol, Flag>>()
   val visitor =
@@ -446,4 +494,35 @@
   return errors
 }
 
-fun main(args: Array<String>) = CheckCommand().main(args)
+/**
+ * Collect all known info about all @FlaggedApi annotated APIs.
+ *
+ * Each API will be represented as a String, on the format
+ * <pre>
+ *   &lt;fully-qualified-name-of-flag&lt; &lt;state-of-flag&lt; &lt;API&lt;
+ * </pre>
+ *
+ * @param flaggedSymbolsInSource the set of symbols that are flagged in the source code
+ * @param flags the set of flags and their values
+ * @return a list of Strings encoding API data using the format described above, sorted
+ *   alphabetically
+ */
+internal fun listFlaggedApis(
+    flaggedSymbolsInSource: Set<Pair<Symbol, Flag>>,
+    flags: Map<Flag, Boolean>
+): List<String> {
+  val output = mutableListOf<String>()
+  for ((symbol, flag) in flaggedSymbolsInSource) {
+    val flagState =
+        when (flags.get(flag)) {
+          true -> "ENABLED"
+          false -> "DISABLED"
+          null -> "UNKNOWN"
+        }
+    output.add("$flag $flagState ${symbol.toPrettyString()}")
+  }
+  output.sort()
+  return output
+}
+
+fun main(args: Array<String>) = MainCommand().subcommands(CheckCommand(), ListCommand()).main(args)