Merge "check-flagged-apis.sh: use correct path to generated API signature files" into main
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 56da574..18d245c 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -59,17 +59,6 @@
MODULE_BUILD_FROM_SOURCE := $(BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE)
endif
-ifneq (,$(ART_MODULE_BUILD_FROM_SOURCE))
- # Keep an explicit setting.
-else ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
- # Build ART modules from source if they are listed in TARGET_BUILD_APPS.
- ART_MODULE_BUILD_FROM_SOURCE := true
-else
- # Do the same as other modules by default.
- ART_MODULE_BUILD_FROM_SOURCE := $(MODULE_BUILD_FROM_SOURCE)
-endif
-
-$(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
ifdef ART_DEBUG_OPT_FLAG
$(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
endif
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 4c92814..b594193 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -393,8 +393,8 @@
logtags_sources := $(filter %.logtags,$(LOCAL_SRC_FILES)) $(LOCAL_LOGTAGS_FILES)
-ifneq ($(strip $(logtags_sources)),)
-event_log_tags := $(foreach f,$(addprefix $(LOCAL_PATH)/,$(logtags_sources)),$(call clean-path,$(f)))
+ifneq ($(strip $(logtags_sources) $(LOCAL_SOONG_LOGTAGS_FILES)),)
+event_log_tags := $(foreach f,$(LOCAL_SOONG_LOGTAGS_FILES) $(addprefix $(LOCAL_PATH)/,$(logtags_sources)),$(call clean-path,$(f)))
else
event_log_tags :=
endif
diff --git a/core/binary.mk b/core/binary.mk
index b17ab00..f86b5a4 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1196,6 +1196,17 @@
endif
###################################################################
+## When compiling a memtag_stack enabled target, use the .memtag_stack variant
+## of any static dependencies (where they exist).
+##################################################################
+ifneq ($(filter memtag_stack,$(my_sanitize)),)
+ my_whole_static_libraries := $(call use_soong_sanitized_static_libraries,\
+ $(my_whole_static_libraries),memtag_stack)
+ my_static_libraries := $(call use_soong_sanitized_static_libraries,\
+ $(my_static_libraries),memtag_stack)
+endif
+
+###################################################################
## When compiling against API imported module, use API import stub
## libraries.
##################################################################
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 5481d50..fb42878 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -264,6 +264,7 @@
LOCAL_SOONG_LICENSE_METADATA :=
LOCAL_SOONG_LINK_TYPE :=
LOCAL_SOONG_LINT_REPORTS :=
+LOCAL_SOONG_LOGTAGS_FILES :=
LOCAL_SOONG_MODULE_INFO_JSON :=
LOCAL_SOONG_MODULE_TYPE :=
LOCAL_SOONG_PROGUARD_DICT :=
diff --git a/core/release_config.mk b/core/release_config.mk
index 3e51af5..bb51980 100644
--- a/core/release_config.mk
+++ b/core/release_config.mk
@@ -41,6 +41,7 @@
# which has OWNERS control. If it isn't let others define their own.
# TODO: Remove wildcard for build/release one when all branch manifests
# have updated.
+_must_protobuf :=
config_map_files := $(wildcard build/release/release_config_map.mk) \
$(wildcard vendor/google_shared/build/release/release_config_map.mk) \
$(if $(wildcard vendor/google/release/release_config_map.mk), \
@@ -53,13 +54,85 @@
) \
)
+protobuf_map_files := $(wildcard build/release/release_config_map.textproto) \
+ $(wildcard vendor/google_shared/build/release/release_config_map.textproto) \
+ $(if $(wildcard vendor/google/release/release_config_map.textproto), \
+ vendor/google/release/release_config_map.textproto, \
+ $(sort \
+ $(wildcard device/*/release/release_config_map.textproto) \
+ $(wildcard device/*/*/release/release_config_map.textproto) \
+ $(wildcard vendor/*/release/release_config_map.textproto) \
+ $(wildcard vendor/*/*/release/release_config_map.textproto) \
+ ) \
+ )
+
# PRODUCT_RELEASE_CONFIG_MAPS is set by Soong using an initial run of product
# config to capture only the list of config maps needed by the build.
# Keep them in the order provided, but remove duplicates.
+# Treat .mk and .textproto as equal for duplicate elimination, but force
+# protobuf if any PRODUCT_RELEASE_CONFIG_MAPS specify .textproto.
$(foreach map,$(PRODUCT_RELEASE_CONFIG_MAPS), \
- $(if $(filter $(map),$(config_map_files)),,$(eval config_map_files += $(map))) \
+ $(if $(filter $(basename $(map)),$(basename $(config_map_files))),, \
+ $(eval config_map_files += $(map))) \
+ $(if $(filter $(basename $(map)).textproto,$(map)),$(eval _must_protobuf := true)) \
)
+
+# If we are missing the textproto version of any of $(config_map_files), we cannot use protobuf.
+_can_protobuf := true
+$(foreach map,$(config_map_files), \
+ $(if $(wildcard $(basename $(map)).textproto),,$(eval _can_protobuf :=)) \
+)
+# If we are missing the mk version of any of $(protobuf_map_files), we must use protobuf.
+$(foreach map,$(protobuf_map_files), \
+ $(if $(wildcard $(basename $(map)).mk),,$(eval _must_protobuf := true)) \
+)
+
+ifneq (,$(_must_protobuf))
+ ifeq (,$(_can_protobuf))
+ # We must use protobuf, but we cannot use protobuf.
+ $(error release config is a mixture of .scl and .textproto)
+ endif
+endif
+
+_use_protobuf :=
+ifneq (,$(_must_protobuf))
+ _use_protobuf := true
+else
+ ifneq ($(_can_protobuf),)
+ # Determine the default
+ $(foreach map,$(config_map_files), \
+ $(if $(wildcard $(dir $(map))/build_config/DEFAULT=proto),$(eval _use_protobuf := true)) \
+ $(if $(wildcard $(dir $(map))/build_config/DEFAULT=make),$(eval _use_protobuf := )) \
+ )
+ # Update for this specific release config only (no inheritance).
+ $(foreach map,$(config_map_files), \
+ $(if $(wildcard $(dir $(map))/build_config/$(TARGET_RELEASE)=proto),$(eval _use_protobuf := true)) \
+ $(if $(wildcard $(dir $(map))/build_config/$(TARGET_RELEASE)=make),$(eval _use_protobuf := )) \
+ )
+ endif
+endif
+
+ifneq (,$(_use_protobuf))
+ # The .textproto files are the canonical source of truth.
+ _args := $(foreach map,$(config_map_files), --map $(map) )
+ ifneq (,$(_must_protobuf))
+ # Disable the build flag in release-config.
+ _args += --guard=false
+ endif
+ $(KATI_shell_no_rerun $(OUT_DIR)/release-config $(_args) >$(OUT_DIR)/release-config.out && touch -t 200001010000 $(OUT_DIR)/release-config.out)
+ $(if $(filter-out 0,$(.SHELLSTATUS)),$(error release-config failed to run))
+ # This will also set _all_release_configs for us.
+ $(eval include $(OUT_DIR)/soong/release-config/release_config-$(TARGET_PRODUCT)-$(TARGET_RELEASE).mk)
+ $(KATI_extra_file_deps $(OUT_DIR)/release-config $(config_map_files))
+ ifeq (,$(_must_protobuf)$(RELEASE_BUILD_FLAGS_IN_PROTOBUF))
+ _use_protobuf :=
+ endif
+endif
+ifeq (,$(_use_protobuf))
+ # The .mk files are the canonical source of truth.
+
+
# Declare an alias release-config
#
# This should be used to declare a release as an alias of another, meaning no
@@ -144,6 +217,9 @@
$(error Alias release config "$(r)" may not specify release config files $(_all_release_configs.$(r).FILES))\
)))
+# Use makefiles
+endif
+
ifeq ($(TARGET_RELEASE),)
# We allow some internal paths to explicitly set TARGET_RELEASE to the
# empty string. For the most part, 'make' treats unset and empty string as
@@ -167,6 +243,7 @@
endif
endif
+ifeq (,$(_use_protobuf))
# Choose flag files
# Don't sort this, use it in the order they gave us.
# Do allow duplicate entries, retaining only the first usage.
@@ -196,6 +273,9 @@
$(error invalid use of apply-release-config-overrides)
endef
+# use makefiles
+endif
+
# TODO: Remove this check after enough people have sourced lunch that we don't
# need to worry about it trying to do get_build_vars TARGET_RELEASE. Maybe after ~9/2023
ifneq ($(CALLED_FROM_SETUP),true)
@@ -207,15 +287,20 @@
endif
.KATI_READONLY := TARGET_RELEASE
+ifeq (,$(_use_protobuf))
$(foreach config, $(_all_release_configs), \
$(eval _all_release_configs.$(config).DECLARED_IN:= ) \
$(eval _all_release_configs.$(config).FILES:= ) \
)
+applied_releases:=
+# use makefiles
+endif
_all_release_configs:=
config_map_files:=
-applied_releases:=
+protobuf_map_files:=
+ifeq (,$(_use_protobuf))
# -----------------------------------------------------------------
# Flag declarations and values
# -----------------------------------------------------------------
@@ -252,3 +337,8 @@
# outside of the source tree.
$(call run-starlark,$(OUT_DIR)/release_config_entrypoint.scl,$(OUT_DIR)/release_config_entrypoint.scl,--allow_external_entrypoint)
+# use makefiles
+endif
+_can_protobuf :=
+_must_protobuf :=
+_use_protobuf :=
diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk
index c630bcc..9a8dfff 100644
--- a/core/tasks/meta-lic.mk
+++ b/core/tasks/meta-lic.mk
@@ -87,6 +87,9 @@
# Moved here from device/sample/Android.mk
$(eval $(call declare-1p-copy-files,device/sample,))
+# Moved here from device/google/trout/Android.mk
+$(eval $(call declare-1p-copy-files,device/google/trout,))
+
# Moved here from frameworks/av/media/Android.mk
$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.conf))
$(eval $(call declare-1p-copy-files,frameworks/av/media/libeffects,audio_effects.xml))
diff --git a/envsetup.sh b/envsetup.sh
index ab43ada..1ef9a54 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1134,8 +1134,9 @@
--tool_tag "${tool_tag}" \
--start_timestamp "${start_time}" \
--end_timestamp "$(date +%s.%N)" \
- --tool_args \""${@}"\" \
+ --tool_args "$*" \
--exit_code "${exit_code}" \
+ ${ANDROID_TOOL_LOGGER_EXTRA_ARGS} \
> /dev/null 2>&1 &
exit ${exit_code}
' SIGINT SIGTERM SIGQUIT EXIT
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 57e8275..22284b1 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -486,6 +486,11 @@
# Enable dirty image object binning to reduce dirty pages in the image.
PRODUCT_PACKAGES += dirty-image-objects
+# Enable go/perfetto-persistent-tracing for eng builds
+ifneq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
+ PRODUCT_PRODUCT_PROPERTIES += persist.debug.perfetto.persistent_sysui_tracing_for_bugreport=1
+endif
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
# Ensure all trunk-stable flags are available.
diff --git a/teams/Android.bp b/teams/Android.bp
index 78efa61..e5886bd 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -4351,6 +4351,13 @@
}
team {
+ name: "trendy_team_pixel_pearl",
+
+ // go/trendy/manage/engineers/6326219602231296
+ trendy_team_id: "6326219602231296",
+}
+
+team {
name: "trendy_team_ar_sensors_context_hub",
// go/trendy/manage/engineers/4776371090259968
diff --git a/tests/Android.bp b/tests/Android.bp
index d3964e5..39debf5 100644
--- a/tests/Android.bp
+++ b/tests/Android.bp
@@ -29,6 +29,7 @@
},
data: [
":envsetup_minimum.zip",
+ ":tool_event_logger",
],
test_suites: [
"general-tests",
diff --git a/tests/run_tool_with_logging_test.py b/tests/run_tool_with_logging_test.py
index 1eb78f1..215d992 100644
--- a/tests/run_tool_with_logging_test.py
+++ b/tests/run_tool_with_logging_test.py
@@ -13,20 +13,22 @@
# limitations under the License.
import dataclasses
+import glob
from importlib import resources
import logging
import os
from pathlib import Path
import re
+import shutil
import signal
import stat
import subprocess
+import sys
import tempfile
import textwrap
import time
import unittest
import zipfile
-import sys
EXII_RETURN_CODE = 0
INTERRUPTED_RETURN_CODE = 130
@@ -40,7 +42,7 @@
# Configure to print logging to stdout.
logging.basicConfig(filename=None, level=logging.DEBUG)
console = logging.StreamHandler(sys.stdout)
- logging.getLogger('').addHandler(console)
+ logging.getLogger("").addHandler(console)
def setUp(self):
super().setUp()
@@ -49,7 +51,7 @@
os.chdir(self.working_dir.name)
# Extract envsetup.zip which contains the envsetup.sh and other dependent
# scripts required to set up the build environments.
- with resources.files("testdata").joinpath("envsetup.zip").open('rb') as p:
+ with resources.files("testdata").joinpath("envsetup.zip").open("rb") as p:
with zipfile.ZipFile(p, "r") as zip_f:
zip_f.extractall()
@@ -118,7 +120,7 @@
test_tool.assert_called_once_with_args("arg1 arg2")
expected_logger_args = (
"--tool_tag FAKE_TOOL --start_timestamp \d+\.\d+ --end_timestamp"
- ' \d+\.\d+ --tool_args "arg1 arg2" --exit_code 0'
+ " \d+\.\d+ --tool_args arg1 arg2 --exit_code 0"
)
test_logger.assert_called_once_with_args(expected_logger_args)
@@ -196,7 +198,7 @@
expected_logger_args = (
"--tool_tag FAKE_TOOL --start_timestamp \d+\.\d+ --end_timestamp"
- ' \d+\.\d+ --tool_args "arg1 arg2" --exit_code 130'
+ " \d+\.\d+ --tool_args arg1 arg2 --exit_code 130"
)
test_logger.assert_called_once_with_args(expected_logger_args)
@@ -226,6 +228,37 @@
test_logger.assert_not_called()
+ def test_integration_tool_event_logger_dry_run(self):
+ test_tool = TestScript.create(self.working_dir)
+ logger_path = self._import_logger()
+
+ self._run_script_and_wait(f"""
+ TMPDIR="{self.working_dir.name}"
+ ANDROID_ENABLE_TOOL_LOGGING=true
+ ANDROID_TOOL_LOGGER="{logger_path}"
+ ANDROID_TOOL_LOGGER_EXTRA_ARGS="--dry_run"
+ run_tool_with_logging "FAKE_TOOL" {test_tool.executable} arg1 arg2
+ """)
+
+ self._assert_logger_dry_run()
+
+ def _import_logger(self) -> Path:
+ logger = "tool_event_logger"
+ logger_path = Path(self.working_dir.name).joinpath(logger)
+ with resources.as_file(resources.files("testdata").joinpath(logger)) as p:
+ shutil.copy(p, logger_path)
+ Path.chmod(logger_path, 0o755)
+ return logger_path
+
+ def _assert_logger_dry_run(self):
+ log_files = glob.glob(self.working_dir.name + "/tool_event_logger_*/*.log")
+ self.assertEqual(len(log_files), 1)
+
+ with open(log_files[0], "r") as f:
+ lines = f.readlines()
+ self.assertEqual(len(lines), 1)
+ self.assertIn("dry run", lines[0])
+
def _create_build_env_script(self) -> str:
return f"""
source {Path(self.working_dir.name).joinpath("build/make/envsetup.sh")}
@@ -248,7 +281,7 @@
stderr=subprocess.PIPE,
text=True,
start_new_session=True,
- executable='/bin/bash'
+ executable="/bin/bash",
)
def _wait_for_process(
@@ -301,7 +334,7 @@
""")
f.write(executable_contents.encode("utf-8"))
- os.chmod(f.name, os.stat(f.name).st_mode | stat.S_IEXEC)
+ Path.chmod(f.name, os.stat(f.name).st_mode | stat.S_IEXEC)
return TestScript(executable, output_file)
diff --git a/tools/Android.bp b/tools/Android.bp
index 0a55ed4..59831a6 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -115,3 +115,11 @@
},
},
}
+
+python_binary_host {
+ name: "merge-event-log-tags",
+ srcs: [
+ "event_log_tags.py",
+ "merge-event-log-tags.py",
+ ],
+}
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
index 9e040a5..bf50f71 100644
--- a/tools/releasetools/create_brick_ota.py
+++ b/tools/releasetools/create_brick_ota.py
@@ -45,10 +45,10 @@
partitions_to_wipe = PARTITIONS_TO_WIPE
if extra_wipe_partitions is not None:
partitions_to_wipe = PARTITIONS_TO_WIPE + extra_wipe_partitions.split(",")
- ota_metadata = ["ota-type=BRICK", "post-timestamp=9999999999",
- "pre-device=" + product_name]
- if serialno is not None:
- ota_metadata.append("serialno=" + serialno)
+ ota_metadata = ["ota-type=BRICK", "post-timestamp=9999999999",
+ "pre-device=" + product_name]
+ if serialno is not None:
+ ota_metadata.append("serialno=" + serialno)
# recovery requiers product name to be a | separated list
product_name = product_name.replace(",", "|")
with zipfile.ZipFile(output_path, "w") as zfp:
diff --git a/tools/whichgit b/tools/whichgit
index 8cf84f5..55c8c6f 100755
--- a/tools/whichgit
+++ b/tools/whichgit
@@ -1,6 +1,7 @@
#!/usr/bin/env python3
import argparse
+import itertools
import os
import subprocess
import sys
@@ -10,15 +11,34 @@
check=True, capture_output=True, text=True).stdout.strip()
+def get_all_modules():
+ product_out = subprocess.run(["build/soong/soong_ui.bash", "--dumpvar-mode", "--abs", "PRODUCT_OUT"],
+ check=True, capture_output=True, text=True).stdout.strip()
+ result = subprocess.run(["cat", product_out + "/all_modules.txt"], check=True, capture_output=True, text=True)
+ return result.stdout.strip().split("\n")
+
+
+def batched(iterable, n):
+ # introduced in itertools 3.12, could delete once that's universally available
+ if n < 1:
+ raise ValueError('n must be at least one')
+ it = iter(iterable)
+ while batch := tuple(itertools.islice(it, n)):
+ yield batch
+
+
def get_sources(modules):
- result = subprocess.run(["./prebuilts/build-tools/linux-x86/bin/ninja", "-f",
- "out/combined-" + os.environ["TARGET_PRODUCT"] + ".ninja",
- "-t", "inputs", "-d", ] + modules,
- stderr=subprocess.STDOUT, stdout=subprocess.PIPE, check=False, text=True)
- if result.returncode != 0:
- sys.stderr.write(result.stdout)
- sys.exit(1)
- return set([f for f in result.stdout.split("\n") if not f.startswith("out/")])
+ sources = set()
+ for module_group in batched(modules, 40_000):
+ result = subprocess.run(["./prebuilts/build-tools/linux-x86/bin/ninja", "-f",
+ "out/combined-" + os.environ["TARGET_PRODUCT"] + ".ninja",
+ "-t", "inputs", "-d", ] + list(module_group),
+ stderr=subprocess.STDOUT, stdout=subprocess.PIPE, check=False, text=True)
+ if result.returncode != 0:
+ sys.stderr.write(result.stdout)
+ sys.exit(1)
+ sources.update(set([f for f in result.stdout.split("\n") if not f.startswith("out/")]))
+ return sources
def m_nothing():
@@ -57,13 +77,13 @@
# Argument parsing
ap = argparse.ArgumentParser(description="List the required git projects for the given modules")
ap.add_argument("--products", nargs="*",
- help="The TARGET_PRODUCT to check. If not provided just uses whatever has"
- + " already been built")
+ help="One or more TARGET_PRODUCT to check, or \"*\" for all. If not provided"
+ + "just uses whatever has already been built")
ap.add_argument("--variants", nargs="*",
help="The TARGET_BUILD_VARIANTS to check. If not provided just uses whatever has"
+ " already been built, or eng if --products is supplied")
ap.add_argument("--modules", nargs="*",
- help="The build modules to check, or droid if not supplied")
+ help="The build modules to check, or \"*\" for all, or droid if not supplied")
ap.add_argument("--why", nargs="*",
help="Also print the input files used in these projects, or \"*\" for all")
ap.add_argument("--unused", help="List the unused git projects for the given modules rather than"
@@ -72,22 +92,33 @@
modules = args.modules if args.modules else ["droid"]
+ match args.products:
+ case ["*"]:
+ products = get_build_var("all_named_products").split(" ")
+ case _:
+ products = args.products
+
# Get the list of sources for all of the requested build combos
- if not args.products and not args.variants:
+ if not products and not args.variants:
+ m_nothing()
+ if args.modules == ["*"]:
+ modules = get_all_modules()
sources = get_sources(modules)
else:
- if not args.products:
+ if not products:
sys.stderr.write("Error: --products must be supplied if --variants is supplied")
sys.exit(1)
sources = set()
build_num = 1
- for product in args.products:
+ for product in products:
os.environ["TARGET_PRODUCT"] = product
variants = args.variants if args.variants else ["user", "userdebug", "eng"]
for variant in variants:
- sys.stderr.write(f"Analyzing build {build_num} of {len(args.products)*len(variants)}\r")
+ sys.stderr.write(f"Analyzing build {build_num} of {len(products)*len(variants)}\r")
os.environ["TARGET_BUILD_VARIANT"] = variant
m_nothing()
+ if args.modules == ["*"]:
+ modules = get_all_modules()
sources.update(get_sources(modules))
build_num += 1
sys.stderr.write("\n\n")