Merge "Export libacryl related flags to soong" into main
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 5171781..acf81e4 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -42,6 +42,12 @@
$(call add_soong_config_var,ANDROID,ADDITIONAL_M4DEFS,$(if $(BOARD_SEPOLICY_M4DEFS),$(addprefix -D,$(BOARD_SEPOLICY_M4DEFS))))
+# For bootable/recovery
+RECOVERY_API_VERSION := 3
+RECOVERY_FSTAB_VERSION := 2
+$(call soong_config_set, recovery, recovery_api_version, $(RECOVERY_API_VERSION))
+$(call soong_config_set, recovery, recovery_fstab_version, $(RECOVERY_FSTAB_VERSION))
+
# For Sanitizers
$(call soong_config_set_bool,ANDROID,ASAN_ENABLED,$(if $(filter address,$(SANITIZE_TARGET)),true,false))
$(call soong_config_set_bool,ANDROID,HWASAN_ENABLED,$(if $(filter hwaddress,$(SANITIZE_TARGET)),true,false))
@@ -219,11 +225,13 @@
$(call soong_config_set_bool,video_codec,board_use_codec2_hidl_1_2,$(if $(filter true,$(BOARD_USE_CODEC2_HIDL_1_2)),true,false))
$(call soong_config_set_bool,video_codec,board_support_mfc_enc_bt2020,$(if $(filter true,$(BOARD_SUPPORT_MFC_ENC_BT2020)),true,false))
$(call soong_config_set_bool,video_codec,board_support_flexible_p010,$(if $(filter true,$(BOARD_SUPPORT_FLEXIBLE_P010)),true,false))
-$(call soong_config_set,video_codec,board_support_mfc_version,$(BOARD_SUPPORT_MFC_VERSION))
$(call soong_config_set_bool,video_codec,board_use_codec2_aidl,$(if $(BOARD_USE_CODEC2_AIDL),true,false))
$(call soong_config_set,video_codec,board_gpu_type,$(BOARD_GPU_TYPE))
$(call soong_config_set_bool,video_codec,board_use_small_secure_memory,$(if $(filter true,$(BOARD_USE_SMALL_SECURE_MEMORY)),true,false))
-ifneq ($(BOARD_USE_MAX_SECURE_RESOURCE),)
+ifdef BOARD_SUPPORT_MFC_VERSION
+ $(call soong_config_set,video_codec,board_support_mfc_version,$(BOARD_SUPPORT_MFC_VERSION))
+endif
+ifdef BOARD_USE_MAX_SECURE_RESOURCE
$(call soong_config_set,video_codec,board_use_max_secure_resource,$(BOARD_USE_MAX_SECURE_RESOURCE))
endif
diff --git a/core/config.mk b/core/config.mk
index 2df9a2d..f9ba38c 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -844,12 +844,6 @@
.KATI_READONLY := BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
ifdef PRODUCT_SHIPPING_API_LEVEL
- board_api_level := $(firstword $(BOARD_API_LEVEL) $(BOARD_SHIPPING_API_LEVEL))
- ifneq (,$(board_api_level))
- min_systemsdk_version := $(call math_min,$(board_api_level),$(PRODUCT_SHIPPING_API_LEVEL))
- else
- min_systemsdk_version := $(PRODUCT_SHIPPING_API_LEVEL)
- endif
ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
$(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
diff --git a/core/product_config.mk b/core/product_config.mk
index 738d4cf..3e1f120 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -602,7 +602,12 @@
# Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
# In this case, the VSR API level is the minimum of the PRODUCT_SHIPPING_API_LEVEL
# and RELEASE_BOARD_API_LEVEL
- VSR_VENDOR_API_LEVEL := $(call math_min,$(VSR_VENDOR_API_LEVEL),$(RELEASE_BOARD_API_LEVEL))
+ board_api_level := $(RELEASE_BOARD_API_LEVEL)
+ ifdef BOARD_API_LEVEL_PROP_OVERRIDE
+ board_api_level := $(BOARD_API_LEVEL_PROP_OVERRIDE)
+ endif
+ VSR_VENDOR_API_LEVEL := $(call math_min,$(VSR_VENDOR_API_LEVEL),$(board_api_level))
+ board_api_level :=
endif
endif
.KATI_READONLY := VSR_VENDOR_API_LEVEL
diff --git a/core/soong_config.mk b/core/soong_config.mk
index a511d5c..a5cbfc9 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -150,6 +150,7 @@
$(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
$(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
$(call add_json_str, VendorApiLevel, $(BOARD_API_LEVEL))
+$(call add_json_str, VendorApiLevelPropOverride, $(BOARD_API_LEVEL_PROP_OVERRIDE))
$(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS))
$(call add_json_list, DeviceSystemSdkVersions, $(BOARD_SYSTEMSDK_VERSIONS))
$(call add_json_list, Platform_systemsdk_versions, $(PLATFORM_SYSTEMSDK_VERSIONS))
diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk
index 6906611..1991503 100644
--- a/core/sysprop_config.mk
+++ b/core/sysprop_config.mk
@@ -91,8 +91,12 @@
# Build system set BOARD_API_LEVEL to show the api level of the vendor API surface.
# This must not be altered outside of build system.
ifdef BOARD_API_LEVEL
-ADDITIONAL_VENDOR_PROPERTIES += \
- ro.board.api_level=$(BOARD_API_LEVEL)
+ ADDITIONAL_VENDOR_PROPERTIES += \
+ ro.board.api_level?=$(BOARD_API_LEVEL)
+ ifdef BOARD_API_LEVEL_PROP_OVERRIDE
+ ADDITIONAL_VENDOR_PROPERTIES += \
+ ro.board.api_level=$(BOARD_API_LEVEL_PROP_OVERRIDE)
+ endif
endif
# RELEASE_BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen.
ifdef RELEASE_BOARD_API_LEVEL_FROZEN
diff --git a/tools/aconfig/aflags/src/load_protos.rs b/tools/aconfig/aflags/src/load_protos.rs
index 90d8599..f201d8f 100644
--- a/tools/aconfig/aflags/src/load_protos.rs
+++ b/tools/aconfig/aflags/src/load_protos.rs
@@ -60,3 +60,10 @@
}
Ok(result)
}
+
+pub(crate) fn list_containers() -> Result<Vec<String>> {
+ Ok(aconfig_device_paths::parsed_flags_proto_paths()?
+ .into_iter()
+ .map(|p| infer_container(&p))
+ .collect())
+}
diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs
index 07b7243..8173bc2 100644
--- a/tools/aconfig/aflags/src/main.rs
+++ b/tools/aconfig/aflags/src/main.rs
@@ -253,6 +253,14 @@
FlagSourceType::DeviceConfig => DeviceConfigSource::list_flags()?,
FlagSourceType::AconfigStorage => AconfigStorageSource::list_flags()?,
};
+
+ if let Some(ref c) = container {
+ ensure!(
+ load_protos::list_containers()?.contains(c),
+ format!("container '{}' not found", &c)
+ );
+ }
+
let flags = (Filter { container }).apply(&flags_unfiltered);
let padding_info = PaddingInfo {
longest_flag_col: flags.iter().map(|f| f.qualified_name().len()).max().unwrap_or(0),
@@ -298,7 +306,7 @@
Command::List { container } => {
if aconfig_flags::auto_generated::enable_only_new_storage() {
list(FlagSourceType::AconfigStorage, container)
- .map_err(|err| anyhow!("storage may not be enabled: {err}"))
+ .map_err(|err| anyhow!("could not list flags: {err}"))
.map(Some)
} else {
list(FlagSourceType::DeviceConfig, container).map(Some)
diff --git a/tools/edit_monitor/Android.bp b/tools/edit_monitor/Android.bp
index fe4f213..e613563 100644
--- a/tools/edit_monitor/Android.bp
+++ b/tools/edit_monitor/Android.bp
@@ -74,6 +74,21 @@
},
}
+python_test_host {
+ name: "edit_monitor_integration_test",
+ main: "edit_monitor_integration_test.py",
+ pkg_path: "testdata",
+ srcs: [
+ "edit_monitor_integration_test.py",
+ ],
+ test_options: {
+ unit_test: true,
+ },
+ data: [
+ ":edit_monitor",
+ ],
+}
+
python_binary_host {
name: "edit_monitor",
pkg_path: "edit_monitor",
diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py
index 892c292..c0a57ab 100644
--- a/tools/edit_monitor/daemon_manager.py
+++ b/tools/edit_monitor/daemon_manager.py
@@ -13,24 +13,32 @@
# limitations under the License.
+import getpass
import hashlib
import logging
import multiprocessing
import os
import pathlib
+import platform
import signal
import subprocess
import sys
import tempfile
import time
+from atest.metrics import clearcut_client
+from atest.proto import clientanalytics_pb2
+from proto import edit_event_pb2
-DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 1
+DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 5
DEFAULT_MONITOR_INTERVAL_SECONDS = 5
-DEFAULT_MEMORY_USAGE_THRESHOLD = 2000
+DEFAULT_MEMORY_USAGE_THRESHOLD = 2 * 1024 # 2GB
DEFAULT_CPU_USAGE_THRESHOLD = 200
DEFAULT_REBOOT_TIMEOUT_SECONDS = 60 * 60 * 24
BLOCK_SIGN_FILE = "edit_monitor_block_sign"
+# Enum of the Clearcut log source defined under
+# /google3/wireless/android/play/playlog/proto/log_source_enum.proto
+LOG_SOURCE = 2524
def default_daemon_target():
@@ -46,11 +54,16 @@
binary_path: str,
daemon_target: callable = default_daemon_target,
daemon_args: tuple = (),
+ cclient: clearcut_client.Clearcut | None = None,
):
self.binary_path = binary_path
self.daemon_target = daemon_target
self.daemon_args = daemon_args
+ self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE)
+ self.user_name = getpass.getuser()
+ self.host_name = platform.node()
+ self.source_root = os.environ.get("ANDROID_BUILD_TOP", "")
self.pid = os.getpid()
self.daemon_process = None
@@ -70,13 +83,20 @@
logging.warning("Block sign found, exiting...")
return
- if self.binary_path.startswith('/google/cog/'):
+ if self.binary_path.startswith("/google/cog/"):
logging.warning("Edit monitor for cog is not supported, exiting...")
return
- self._stop_any_existing_instance()
- self._write_pid_to_pidfile()
- self._start_daemon_process()
+ try:
+ self._stop_any_existing_instance()
+ self._write_pid_to_pidfile()
+ self._start_daemon_process()
+ except Exception as e:
+ logging.exception("Failed to start daemon manager with error %s", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
+ raise e
def monitor_daemon(
self,
@@ -118,6 +138,9 @@
logging.error(
"Daemon process is consuming too much resource, killing..."
),
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE
+ )
self._terminate_process(self.daemon_process.pid)
logging.info(
@@ -131,14 +154,24 @@
def stop(self):
"""Stops the daemon process and removes the pidfile."""
- logging.debug("in daemon manager cleanup.")
+ logging.info("in daemon manager cleanup.")
try:
- if self.daemon_process and self.daemon_process.is_alive():
- self._terminate_process(self.daemon_process.pid)
+ if self.daemon_process:
+ # The daemon process might already in termination process,
+ # wait some time before kill it explicitly.
+ self._wait_for_process_terminate(self.daemon_process.pid, 1)
+ if self.daemon_process.is_alive():
+ self._terminate_process(self.daemon_process.pid)
self._remove_pidfile()
- logging.debug("Successfully stopped daemon manager.")
+ logging.info("Successfully stopped daemon manager.")
except Exception as e:
logging.exception("Failed to stop daemon manager with error %s", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+ )
+ sys.exit(1)
+ finally:
+ self.cclient.flush_events()
def reboot(self):
"""Reboots the current process.
@@ -146,7 +179,7 @@
Stops the current daemon manager and reboots the entire process based on
the binary file. Exits directly If the binary file no longer exists.
"""
- logging.debug("Rebooting process based on binary %s.", self.binary_path)
+ logging.info("Rebooting process based on binary %s.", self.binary_path)
# Stop the current daemon manager first.
self.stop()
@@ -160,6 +193,9 @@
os.execv(self.binary_path, sys.argv)
except OSError as e:
logging.exception("Failed to reboot process with error: %s.", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
+ )
sys.exit(1) # Indicate an error occurred
def cleanup(self):
@@ -171,6 +207,7 @@
that requires immediate cleanup to prevent damanger to the system.
"""
logging.debug("Start cleaning up all existing instances.")
+ self._send_error_event_to_clearcut(edit_event_pb2.EditEvent.FORCE_CLEANUP)
try:
# First places a block sign to prevent any edit monitor process to start.
@@ -227,6 +264,7 @@
p = multiprocessing.Process(
target=self.daemon_target, args=self.daemon_args
)
+ p.daemon = True
p.start()
logging.info("Start subprocess with PID %d", p.pid)
@@ -299,36 +337,28 @@
return pid_file_path
def _get_process_memory_percent(self, pid: int) -> float:
- try:
- with open(f"/proc/{pid}/stat", "r") as f:
- stat_data = f.readline().split()
- # RSS is the 24th field in /proc/[pid]/stat
- rss_pages = int(stat_data[23])
- return rss_pages * 4 / 1024 # Covert to MB
- except (FileNotFoundError, IndexError, ValueError, IOError) as e:
- logging.exception("Failed to get memory usage.")
- raise e
+ with open(f"/proc/{pid}/stat", "r") as f:
+ stat_data = f.readline().split()
+ # RSS is the 24th field in /proc/[pid]/stat
+ rss_pages = int(stat_data[23])
+ return rss_pages * 4 / 1024 # Covert to MB
def _get_process_cpu_percent(self, pid: int, interval: int = 1) -> float:
- try:
- total_start_time = self._get_total_cpu_time(pid)
- with open("/proc/uptime", "r") as f:
- uptime_start = float(f.readline().split()[0])
+ total_start_time = self._get_total_cpu_time(pid)
+ with open("/proc/uptime", "r") as f:
+ uptime_start = float(f.readline().split()[0])
- time.sleep(interval)
+ time.sleep(interval)
- total_end_time = self._get_total_cpu_time(pid)
- with open("/proc/uptime", "r") as f:
- uptime_end = float(f.readline().split()[0])
+ total_end_time = self._get_total_cpu_time(pid)
+ with open("/proc/uptime", "r") as f:
+ uptime_end = float(f.readline().split()[0])
- return (
- (total_end_time - total_start_time)
- / (uptime_end - uptime_start)
- * 100
- )
- except (FileNotFoundError, IndexError, ValueError, IOError) as e:
- logging.exception("Failed to get CPU usage.")
- raise e
+ return (
+ (total_end_time - total_start_time)
+ / (uptime_end - uptime_start)
+ * 100
+ )
def _get_total_cpu_time(self, pid: int) -> float:
with open(f"/proc/{str(pid)}/stat", "r") as f:
@@ -350,4 +380,19 @@
except (FileNotFoundError, IOError, ValueError, TypeError):
logging.exception("Failed to get pid from file path: %s", file)
- return pids
\ No newline at end of file
+ return pids
+
+ def _send_error_event_to_clearcut(self, error_type):
+ edit_monitor_error_event_proto = edit_event_pb2.EditEvent(
+ user_name=self.user_name,
+ host_name=self.host_name,
+ source_root=self.source_root,
+ )
+ edit_monitor_error_event_proto.edit_monitor_error_event.CopyFrom(
+ edit_event_pb2.EditEvent.EditMonitorErrorEvent(error_type=error_type)
+ )
+ log_event = clientanalytics_pb2.LogEvent(
+ event_time_ms=int(time.time() * 1000),
+ source_extension=edit_monitor_error_event_proto.SerializeToString(),
+ )
+ self.cclient.log(log_event)
diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py
index 72442c6..e132000 100644
--- a/tools/edit_monitor/daemon_manager_test.py
+++ b/tools/edit_monitor/daemon_manager_test.py
@@ -26,6 +26,7 @@
import unittest
from unittest import mock
from edit_monitor import daemon_manager
+from proto import edit_event_pb2
TEST_BINARY_FILE = '/path/to/test_binary'
@@ -133,7 +134,8 @@
def test_start_return_directly_if_in_cog_env(self):
dm = daemon_manager.DaemonManager(
- '/google/cog/cloud/user/workspace/edit_monitor')
+ '/google/cog/cloud/user/workspace/edit_monitor'
+ )
dm.start()
# Verify no daemon process is started.
self.assertIsNone(dm.daemon_process)
@@ -148,9 +150,13 @@
with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f:
f.write('123456')
- with self.assertRaises(OSError) as error:
- dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+ fake_cclient = FakeClearcutClient()
+ with self.assertRaises(OSError):
+ dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient)
dm.start()
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
def test_start_failed_to_write_pidfile(self):
pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
@@ -160,40 +166,63 @@
# Makes the directory read-only so write pidfile will fail.
os.chmod(pid_file_path_dir, 0o555)
- with self.assertRaises(PermissionError) as error:
- dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+ fake_cclient = FakeClearcutClient()
+ with self.assertRaises(PermissionError):
+ dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient)
dm.start()
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
def test_start_failed_to_start_daemon_process(self):
- with self.assertRaises(TypeError) as error:
+ fake_cclient = FakeClearcutClient()
+ with self.assertRaises(TypeError):
dm = daemon_manager.DaemonManager(
- TEST_BINARY_FILE, daemon_target='wrong_target', daemon_args=(1)
+ TEST_BINARY_FILE,
+ daemon_target='wrong_target',
+ daemon_args=(1),
+ cclient=fake_cclient,
)
dm.start()
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
def test_monitor_daemon_subprocess_killed_high_memory_usage(self):
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
TEST_BINARY_FILE,
daemon_target=memory_consume_daemon_target,
daemon_args=(2,),
+ cclient=fake_cclient,
)
dm.start()
dm.monitor_daemon(interval=1, memory_threshold=2)
self.assertTrue(dm.max_memory_usage >= 2)
self.assert_no_subprocess_running()
+ self._assert_error_event_logged(
+ fake_cclient,
+ edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE,
+ )
def test_monitor_daemon_subprocess_killed_high_cpu_usage(self):
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
TEST_BINARY_FILE,
daemon_target=cpu_consume_daemon_target,
daemon_args=(20,),
+ cclient=fake_cclient,
)
dm.start()
dm.monitor_daemon(interval=1, cpu_threshold=20)
self.assertTrue(dm.max_cpu_usage >= 20)
self.assert_no_subprocess_running()
+ self._assert_error_event_logged(
+ fake_cclient,
+ edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE,
+ )
@mock.patch('subprocess.check_output')
def test_monitor_daemon_failed_does_not_matter(self, mock_output):
@@ -207,7 +236,8 @@
)
dm = daemon_manager.DaemonManager(
- binary_file.name, daemon_target=long_running_daemon
+ binary_file.name,
+ daemon_target=long_running_daemon,
)
dm.start()
dm.monitor_daemon(reboot_timeout=0.5)
@@ -226,27 +256,42 @@
@mock.patch('os.kill')
def test_stop_failed_to_kill_daemon_process(self, mock_kill):
mock_kill.side_effect = OSError('Unknown OSError')
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
- TEST_BINARY_FILE, daemon_target=long_running_daemon
+ TEST_BINARY_FILE,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
)
- dm.start()
- dm.stop()
- self.assertTrue(dm.daemon_process.is_alive())
- self.assertTrue(dm.pid_file_path.exists())
+ with self.assertRaises(SystemExit):
+ dm.start()
+ dm.stop()
+ self.assertTrue(dm.daemon_process.is_alive())
+ self.assertTrue(dm.pid_file_path.exists())
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+ )
@mock.patch('os.remove')
def test_stop_failed_to_remove_pidfile(self, mock_remove):
mock_remove.side_effect = OSError('Unknown OSError')
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
- TEST_BINARY_FILE, daemon_target=long_running_daemon
+ TEST_BINARY_FILE,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
)
- dm.start()
- dm.stop()
- self.assert_no_subprocess_running()
- self.assertTrue(dm.pid_file_path.exists())
+ with self.assertRaises(SystemExit):
+ dm.start()
+ dm.stop()
+ self.assert_no_subprocess_running()
+ self.assertTrue(dm.pid_file_path.exists())
+
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+ )
@mock.patch('os.execv')
def test_reboot_success(self, mock_execv):
@@ -273,7 +318,7 @@
)
dm.start()
- with self.assertRaises(SystemExit) as cm:
+ with self.assertRaises(SystemExit):
dm.reboot()
mock_execv.assert_not_called()
self.assertEqual(cm.exception.code, 0)
@@ -281,18 +326,24 @@
@mock.patch('os.execv')
def test_reboot_failed(self, mock_execv):
mock_execv.side_effect = OSError('Unknown OSError')
+ fake_cclient = FakeClearcutClient()
binary_file = tempfile.NamedTemporaryFile(
dir=self.working_dir.name, delete=False
)
dm = daemon_manager.DaemonManager(
- binary_file.name, daemon_target=long_running_daemon
+ binary_file.name,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
)
dm.start()
- with self.assertRaises(SystemExit) as cm:
+ with self.assertRaises(SystemExit):
dm.reboot()
self.assertEqual(cm.exception.code, 1)
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
+ )
def assert_run_simple_daemon_success(self):
damone_output_file = tempfile.NamedTemporaryFile(
@@ -374,6 +425,33 @@
f.write(str(p.pid))
return p
+ def _assert_error_event_logged(self, fake_cclient, error_type):
+ error_events = fake_cclient.get_sent_events()
+ self.assertEquals(len(error_events), 1)
+ self.assertEquals(
+ edit_event_pb2.EditEvent.FromString(
+ error_events[0].source_extension
+ ).edit_monitor_error_event.error_type,
+ error_type,
+ )
+
+
+class FakeClearcutClient:
+
+ def __init__(self):
+ self.pending_log_events = []
+ self.sent_log_event = []
+
+ def log(self, log_event):
+ self.pending_log_events.append(log_event)
+
+ def flush_events(self):
+ self.sent_log_event.extend(self.pending_log_events)
+ self.pending_log_events.clear()
+
+ def get_sent_events(self):
+ return self.sent_log_event + self.pending_log_events
+
if __name__ == '__main__':
unittest.main()
diff --git a/tools/edit_monitor/edit_monitor.py b/tools/edit_monitor/edit_monitor.py
index 386daf7..ab528e8 100644
--- a/tools/edit_monitor/edit_monitor.py
+++ b/tools/edit_monitor/edit_monitor.py
@@ -17,7 +17,9 @@
import logging
import multiprocessing.connection
import os
+import pathlib
import platform
+import threading
import time
from atest.metrics import clearcut_client
@@ -30,22 +32,36 @@
# Enum of the Clearcut log source defined under
# /google3/wireless/android/play/playlog/proto/log_source_enum.proto
LOG_SOURCE = 2524
+DEFAULT_FLUSH_INTERVAL_SECONDS = 5
+DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD = 100
class ClearcutEventHandler(PatternMatchingEventHandler):
def __init__(
- self, path: str, cclient: clearcut_client.Clearcut | None = None
+ self,
+ path: str,
+ flush_interval_sec: int,
+ single_events_size_threshold: int,
+ is_dry_run: bool = False,
+ cclient: clearcut_client.Clearcut | None = None,
):
super().__init__(patterns=["*"], ignore_directories=True)
self.root_monitoring_path = path
+ self.flush_interval_sec = flush_interval_sec
+ self.single_events_size_threshold = single_events_size_threshold
+ self.is_dry_run = is_dry_run
self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE)
self.user_name = getpass.getuser()
self.host_name = platform.node()
self.source_root = os.environ.get("ANDROID_BUILD_TOP", "")
+ self.pending_events = []
+ self._scheduled_log_thread = None
+ self._pending_events_lock = threading.Lock()
+
def on_moved(self, event: FileSystemEvent):
self._log_edit_event(event, edit_event_pb2.EditEvent.MOVE)
@@ -60,15 +76,33 @@
def flushall(self):
logging.info("flushing all pending events.")
+ if self._scheduled_log_thread:
+ logging.info("canceling log thread")
+ self._scheduled_log_thread.cancel()
+ self._scheduled_log_thread = None
+
+ self._log_clearcut_events()
self.cclient.flush_events()
def _log_edit_event(
self, event: FileSystemEvent, edit_type: edit_event_pb2.EditEvent.EditType
):
- event_time = time.time()
-
- logging.info("%s: %s", event.event_type, event.src_path)
try:
+ event_time = time.time()
+
+ if self._is_hidden_file(pathlib.Path(event.src_path)):
+ logging.debug("ignore hidden file: %s.", event.src_path)
+ return
+
+ if not self._is_under_git_project(pathlib.Path(event.src_path)):
+ logging.debug(
+ "ignore file %s which does not belong to a git project",
+ event.src_path,
+ )
+ return
+
+ logging.info("%s: %s", event.event_type, event.src_path)
+
event_proto = edit_event_pb2.EditEvent(
user_name=self.user_name,
host_name=self.host_name,
@@ -79,18 +113,78 @@
file_path=event.src_path, edit_type=edit_type
)
)
- clearcut_log_event = clientanalytics_pb2.LogEvent(
+ with self._pending_events_lock:
+ self.pending_events.append((event_proto, event_time))
+ if not self._scheduled_log_thread:
+ logging.debug(
+ "Scheduling thread to run in %d seconds", self.flush_interval_sec
+ )
+ self._scheduled_log_thread = threading.Timer(
+ self.flush_interval_sec, self._log_clearcut_events
+ )
+ self._scheduled_log_thread.start()
+
+ except Exception:
+ logging.exception("Failed to log edit event.")
+
+ def _is_hidden_file(self, file_path: pathlib.Path) -> bool:
+ return any(
+ part.startswith(".")
+ for part in file_path.relative_to(self.root_monitoring_path).parts
+ )
+
+ def _is_under_git_project(self, file_path: pathlib.Path) -> bool:
+ root_path = pathlib.Path(self.root_monitoring_path).resolve()
+ return any(
+ root_path.joinpath(dir).joinpath('.git').exists()
+ for dir in file_path.relative_to(root_path).parents
+ )
+
+ def _log_clearcut_events(self):
+ with self._pending_events_lock:
+ self._scheduled_log_thread = None
+ edit_events = self.pending_events
+ self.pending_events = []
+
+ pending_events_size = len(edit_events)
+ if pending_events_size > self.single_events_size_threshold:
+ logging.info(
+ "got %d events in %d seconds, sending aggregated events instead",
+ pending_events_size,
+ self.flush_interval_sec,
+ )
+ aggregated_event_time = edit_events[0][1]
+ aggregated_event_proto = edit_event_pb2.EditEvent(
+ user_name=self.user_name,
+ host_name=self.host_name,
+ source_root=self.source_root,
+ )
+ aggregated_event_proto.aggregated_edit_event.CopyFrom(
+ edit_event_pb2.EditEvent.AggregatedEditEvent(
+ num_edits=pending_events_size
+ )
+ )
+ edit_events = [(aggregated_event_proto, aggregated_event_time)]
+
+ if self.is_dry_run:
+ logging.info("Sent %d edit events in dry run.", len(edit_events))
+ return
+
+ for event_proto, event_time in edit_events:
+ log_event = clientanalytics_pb2.LogEvent(
event_time_ms=int(event_time * 1000),
source_extension=event_proto.SerializeToString(),
)
+ self.cclient.log(log_event)
- self.cclient.log(clearcut_log_event)
- except Exception:
- logging.exception("Failed to log edit event.")
+ logging.info("sent %d edit events", len(edit_events))
def start(
path: str,
+ is_dry_run: bool = False,
+ flush_interval_sec: int = DEFAULT_FLUSH_INTERVAL_SECONDS,
+ single_events_size_threshold: int = DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD,
cclient: clearcut_client.Clearcut | None = None,
pipe_sender: multiprocessing.connection.Connection | None = None,
):
@@ -104,7 +198,8 @@
cclient: The clearcut client to send the edit logs.
conn: the sender of the pipe to communicate with the deamon manager.
"""
- event_handler = ClearcutEventHandler(path, cclient)
+ event_handler = ClearcutEventHandler(
+ path, flush_interval_sec, single_events_size_threshold, is_dry_run, cclient)
observer = Observer()
logging.info("Starting observer on path %s.", path)
diff --git a/tools/edit_monitor/edit_monitor_integration_test.py b/tools/edit_monitor/edit_monitor_integration_test.py
new file mode 100644
index 0000000..d7dc7f1
--- /dev/null
+++ b/tools/edit_monitor/edit_monitor_integration_test.py
@@ -0,0 +1,135 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration tests for Edit Monitor."""
+
+import glob
+from importlib import resources
+import logging
+import os
+import pathlib
+import shutil
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+
+
+class EditMonitorIntegrationTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ # Configure to print logging to stdout.
+ logging.basicConfig(filename=None, level=logging.DEBUG)
+ console = logging.StreamHandler(sys.stdout)
+ logging.getLogger("").addHandler(console)
+
+ def setUp(self):
+ super().setUp()
+ self.working_dir = tempfile.TemporaryDirectory()
+ self.root_monitoring_path = pathlib.Path(self.working_dir.name).joinpath(
+ "files"
+ )
+ self.root_monitoring_path.mkdir()
+ self.edit_monitor_binary_path = self._import_executable("edit_monitor")
+
+ def tearDown(self):
+ self.working_dir.cleanup()
+ super().tearDown()
+
+ def test_log_single_edit_event_success(self):
+ p = self._start_edit_monitor_process()
+
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath(".git").touch()
+
+ # Create and modify a file.
+ test_file = self.root_monitoring_path.joinpath("test.txt")
+ with open(test_file, "w") as f:
+ f.write("something")
+
+ # Move the file.
+ test_file_moved = self.root_monitoring_path.joinpath("new_test.txt")
+ test_file.rename(test_file_moved)
+
+ # Delete the file.
+ test_file_moved.unlink()
+
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.communicate()
+
+ self.assertEqual(self._get_logged_events_num(), 4)
+
+ def _start_edit_monitor_process(self):
+ command = f"""
+ export TMPDIR="{self.working_dir.name}"
+ {self.edit_monitor_binary_path} --path={self.root_monitoring_path} --dry_run"""
+ p = subprocess.Popen(
+ command,
+ shell=True,
+ text=True,
+ start_new_session=True,
+ executable="/bin/bash",
+ )
+ self._wait_for_observer_start(time_out=5)
+ return p
+
+ def _wait_for_observer_start(self, time_out):
+ start_time = time.time()
+
+ while time.time() < start_time + time_out:
+ log_files = glob.glob(self.working_dir.name + "/edit_monitor_*/*.log")
+ if log_files:
+ with open(log_files[0], "r") as f:
+ for line in f:
+ logging.debug("initial log: %s", line)
+ if line.rstrip("\n").endswith("Observer started."):
+ return
+ else:
+ time.sleep(1)
+
+ self.fail(f"Observer not started in {time_out} seconds.")
+
+ def _get_logged_events_num(self):
+ log_files = glob.glob(self.working_dir.name + "/edit_monitor_*/*.log")
+ self.assertEqual(len(log_files), 1)
+
+ with open(log_files[0], "r") as f:
+ for line in f:
+ logging.debug("complete log: %s", line)
+ if line.rstrip("\n").endswith("in dry run."):
+ return int(line.split(":")[-1].split(" ")[2])
+
+ return 0
+
+ def _import_executable(self, executable_name: str) -> pathlib.Path:
+ binary_dir = pathlib.Path(self.working_dir.name).joinpath("binary")
+ binary_dir.mkdir()
+ executable_path = binary_dir.joinpath(executable_name)
+ with resources.as_file(
+ resources.files("testdata").joinpath(executable_name)
+ ) as binary:
+ shutil.copy(binary, executable_path)
+ executable_path.chmod(0o755)
+ return executable_path
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tools/edit_monitor/edit_monitor_test.py b/tools/edit_monitor/edit_monitor_test.py
index fdccd44..64a3871 100644
--- a/tools/edit_monitor/edit_monitor_test.py
+++ b/tools/edit_monitor/edit_monitor_test.py
@@ -53,7 +53,9 @@
self.working_dir.cleanup()
super().tearDown()
- def test_log_edit_event_success(self):
+ def test_log_single_edit_event_success(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
fake_cclient = FakeClearcutClient(
log_output_file=self.log_event_dir.joinpath('logs.output')
)
@@ -125,7 +127,108 @@
).single_edit_event,
)
+
+ def test_log_aggregated_edit_event_success(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create 6 test files
+ for i in range(6):
+ test_file = self.root_monitoring_path.joinpath('test_' + str(i))
+ test_file.touch()
+
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 1)
+
+ expected_aggregated_edit_event = (
+ edit_event_pb2.EditEvent.AggregatedEditEvent(
+ num_edits=6,
+ )
+ )
+
+ self.assertEqual(
+ expected_aggregated_edit_event,
+ edit_event_pb2.EditEvent.FromString(
+ logged_events[0].source_extension
+ ).aggregated_edit_event,
+ )
+
+ def test_do_not_log_edit_event_for_directory_change(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a sub directory
+ self.root_monitoring_path.joinpath('test_dir').mkdir()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
+ def test_do_not_log_edit_event_for_hidden_file(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a hidden file.
+ self.root_monitoring_path.joinpath('.test.txt').touch()
+ # Create a hidden dir.
+ hidden_dir = self.root_monitoring_path.joinpath('.test')
+ hidden_dir.mkdir()
+ hidden_dir.joinpath('test.txt').touch()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
+ def test_do_not_log_edit_event_for_non_git_project_file(self):
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a file.
+ self.root_monitoring_path.joinpath('test.txt').touch()
+ # Create a file under a sub dir.
+ sub_dir = self.root_monitoring_path.joinpath('.test')
+ sub_dir.mkdir()
+ sub_dir.joinpath('test.txt').touch()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
def test_log_edit_event_fail(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
fake_cclient = FakeClearcutClient(
log_output_file=self.log_event_dir.joinpath('logs.output'),
raise_log_exception=True,
@@ -150,7 +253,7 @@
# Start edit monitor in a subprocess.
p = multiprocessing.Process(
target=edit_monitor.start,
- args=(str(self.root_monitoring_path.resolve()), cclient, sender),
+ args=(str(self.root_monitoring_path.resolve()), False, 0.5, 5, cclient, sender),
)
p.daemon = True
p.start()
diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py
index 40574ad..49385f1 100644
--- a/tools/edit_monitor/main.py
+++ b/tools/edit_monitor/main.py
@@ -42,6 +42,12 @@
)
parser.add_argument(
+ '--dry_run',
+ action='store_true',
+ help='Dry run the edit monitor. This starts the edit monitor process without actually send the edit logs to clearcut.',
+ )
+
+ parser.add_argument(
'--force_cleanup',
action='store_true',
help=(
@@ -51,18 +57,31 @@
),
)
+ parser.add_argument(
+ '--verbose',
+ action='store_true',
+ help=(
+ 'Log verbose info in the log file for debugging purpose.'
+ ),
+ )
+
return parser
-def configure_logging():
+def configure_logging(verbose=False):
root_logging_dir = tempfile.mkdtemp(prefix='edit_monitor_')
_, log_path = tempfile.mkstemp(dir=root_logging_dir, suffix='.log')
log_fmt = '%(asctime)s %(filename)s:%(lineno)s:%(levelname)s: %(message)s'
date_fmt = '%Y-%m-%d %H:%M:%S'
+ log_level = logging.DEBUG if verbose else logging.INFO
+
logging.basicConfig(
- filename=log_path, level=logging.DEBUG, format=log_fmt, datefmt=date_fmt
+ filename=log_path, level=log_level, format=log_fmt, datefmt=date_fmt
)
+ # Filter out logs from inotify_buff to prevent log pollution.
+ logging.getLogger('watchdog.observers.inotify_buffer').addFilter(
+ lambda record: record.filename != 'inotify_buffer.py')
print(f'logging to file {log_path}')
@@ -73,11 +92,15 @@
def main(argv: list[str]):
args = create_arg_parser().parse_args(argv[1:])
+ configure_logging(args.verbose)
+ if args.dry_run:
+ logging.info('This is a dry run.')
dm = daemon_manager.DaemonManager(
binary_path=argv[0],
daemon_target=edit_monitor.start,
- daemon_args=(args.path,),
+ daemon_args=(args.path, args.dry_run),
)
+
if args.force_cleanup:
dm.cleanup()
@@ -92,5 +115,4 @@
if __name__ == '__main__':
signal.signal(signal.SIGTERM, term_signal_handler)
- configure_logging()
main(sys.argv)
diff --git a/tools/edit_monitor/proto/edit_event.proto b/tools/edit_monitor/proto/edit_event.proto
index b3630bc..dc3d3f6 100644
--- a/tools/edit_monitor/proto/edit_event.proto
+++ b/tools/edit_monitor/proto/edit_event.proto
@@ -36,8 +36,6 @@
// Event that logs errors happened in the edit monitor.
message EditMonitorErrorEvent {
ErrorType error_type = 1;
- string error_msg = 2;
- string stack_trace = 3;
}
// ------------------------
diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go
index ec937fe..c7cf5ed 100644
--- a/tools/ide_query/ide_query.go
+++ b/tools/ide_query/ide_query.go
@@ -293,11 +293,19 @@
// If a file is covered by multiple modules, the first module is returned.
func findJavaModules(paths []string, modules map[string]*javaModule) map[string]string {
ret := make(map[string]string)
- for name, module := range modules {
+ // A file may be part of multiple modules. To make the result deterministic,
+ // check the modules in sorted order.
+ keys := make([]string, 0, len(modules))
+ for name := range modules {
+ keys = append(keys, name)
+ }
+ slices.Sort(keys)
+ for _, name := range keys {
if strings.HasSuffix(name, ".impl") {
continue
}
+ module := modules[name]
for i, p := range paths {
if slices.Contains(module.Srcs, p) {
ret[p] = name
diff --git a/tools/metadata/Android.bp b/tools/metadata/Android.bp
deleted file mode 100644
index 77d106d..0000000
--- a/tools/metadata/Android.bp
+++ /dev/null
@@ -1,16 +0,0 @@
-package {
- default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-blueprint_go_binary {
- name: "metadata",
- deps: [
- "soong-testing-test_spec_proto",
- "soong-testing-code_metadata_proto",
- "soong-testing-code_metadata_internal_proto",
- "golang-protobuf-proto",
- ],
- srcs: [
- "generator.go",
- ]
-}
\ No newline at end of file
diff --git a/tools/metadata/OWNERS b/tools/metadata/OWNERS
deleted file mode 100644
index 03bcdf1..0000000
--- a/tools/metadata/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-dariofreni@google.com
-joeo@google.com
-ronish@google.com
-caditya@google.com
diff --git a/tools/metadata/generator.go b/tools/metadata/generator.go
deleted file mode 100644
index b7668be..0000000
--- a/tools/metadata/generator.go
+++ /dev/null
@@ -1,328 +0,0 @@
-package main
-
-import (
- "flag"
- "fmt"
- "io"
- "log"
- "os"
- "sort"
- "strings"
- "sync"
-
- "android/soong/testing/code_metadata_internal_proto"
- "android/soong/testing/code_metadata_proto"
- "android/soong/testing/test_spec_proto"
- "google.golang.org/protobuf/proto"
-)
-
-type keyToLocksMap struct {
- locks sync.Map
-}
-
-func (kl *keyToLocksMap) GetLockForKey(key string) *sync.Mutex {
- mutex, _ := kl.locks.LoadOrStore(key, &sync.Mutex{})
- return mutex.(*sync.Mutex)
-}
-
-// Define a struct to hold the combination of team ID and multi-ownership flag for validation
-type sourceFileAttributes struct {
- TeamID string
- MultiOwnership bool
- Path string
-}
-
-func getSortedKeys(syncMap *sync.Map) []string {
- var allKeys []string
- syncMap.Range(
- func(key, _ interface{}) bool {
- allKeys = append(allKeys, key.(string))
- return true
- },
- )
-
- sort.Strings(allKeys)
- return allKeys
-}
-
-// writeProtoToFile marshals a protobuf message and writes it to a file
-func writeProtoToFile(outputFile string, message proto.Message) {
- data, err := proto.Marshal(message)
- if err != nil {
- log.Fatal(err)
- }
- file, err := os.Create(outputFile)
- if err != nil {
- log.Fatal(err)
- }
- defer file.Close()
-
- _, err = file.Write(data)
- if err != nil {
- log.Fatal(err)
- }
-}
-
-func readFileToString(filePath string) string {
- file, err := os.Open(filePath)
- if err != nil {
- log.Fatal(err)
- }
- defer file.Close()
-
- data, err := io.ReadAll(file)
- if err != nil {
- log.Fatal(err)
- }
- return string(data)
-}
-
-func writeEmptyOutputProto(outputFile string, metadataRule string) {
- file, err := os.Create(outputFile)
- if err != nil {
- log.Fatal(err)
- }
- var message proto.Message
- if metadataRule == "test_spec" {
- message = &test_spec_proto.TestSpec{}
- } else if metadataRule == "code_metadata" {
- message = &code_metadata_proto.CodeMetadata{}
- }
- data, err := proto.Marshal(message)
- if err != nil {
- log.Fatal(err)
- }
- defer file.Close()
-
- _, err = file.Write([]byte(data))
- if err != nil {
- log.Fatal(err)
- }
-}
-
-func processTestSpecProtobuf(
- filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
- errCh chan error, wg *sync.WaitGroup,
-) {
- defer wg.Done()
-
- fileContent := strings.TrimRight(readFileToString(filePath), "\n")
- testData := test_spec_proto.TestSpec{}
- err := proto.Unmarshal([]byte(fileContent), &testData)
- if err != nil {
- errCh <- err
- return
- }
-
- ownershipMetadata := testData.GetOwnershipMetadataList()
- for _, metadata := range ownershipMetadata {
- key := metadata.GetTargetName()
- lock := keyLocks.GetLockForKey(key)
- lock.Lock()
-
- value, loaded := ownershipMetadataMap.LoadOrStore(
- key, []*test_spec_proto.TestSpec_OwnershipMetadata{metadata},
- )
- if loaded {
- existingMetadata := value.([]*test_spec_proto.TestSpec_OwnershipMetadata)
- isDuplicate := false
- for _, existing := range existingMetadata {
- if metadata.GetTrendyTeamId() != existing.GetTrendyTeamId() {
- errCh <- fmt.Errorf(
- "Conflicting trendy team IDs found for %s at:\n%s with teamId"+
- ": %s,\n%s with teamId: %s",
- key,
- metadata.GetPath(), metadata.GetTrendyTeamId(), existing.GetPath(),
- existing.GetTrendyTeamId(),
- )
-
- lock.Unlock()
- return
- }
- if metadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && metadata.GetPath() == existing.GetPath() {
- isDuplicate = true
- break
- }
- }
- if !isDuplicate {
- existingMetadata = append(existingMetadata, metadata)
- ownershipMetadataMap.Store(key, existingMetadata)
- }
- }
-
- lock.Unlock()
- }
-}
-
-// processCodeMetadataProtobuf processes CodeMetadata protobuf files
-func processCodeMetadataProtobuf(
- filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap,
- errCh chan error, wg *sync.WaitGroup,
-) {
- defer wg.Done()
-
- fileContent := strings.TrimRight(readFileToString(filePath), "\n")
- internalCodeData := code_metadata_internal_proto.CodeMetadataInternal{}
- err := proto.Unmarshal([]byte(fileContent), &internalCodeData)
- if err != nil {
- errCh <- err
- return
- }
-
- // Process each TargetOwnership entry
- for _, internalMetadata := range internalCodeData.GetTargetOwnershipList() {
- key := internalMetadata.GetTargetName()
- lock := keyLocks.GetLockForKey(key)
- lock.Lock()
-
- for _, srcFile := range internalMetadata.GetSourceFiles() {
- srcFileKey := srcFile
- srcFileLock := keyLocks.GetLockForKey(srcFileKey)
- srcFileLock.Lock()
- attributes := sourceFileAttributes{
- TeamID: internalMetadata.GetTrendyTeamId(),
- MultiOwnership: internalMetadata.GetMultiOwnership(),
- Path: internalMetadata.GetPath(),
- }
-
- existingAttributes, exists := sourceFileMetadataMap.Load(srcFileKey)
- if exists {
- existing := existingAttributes.(sourceFileAttributes)
- if attributes.TeamID != existing.TeamID && (!attributes.MultiOwnership || !existing.MultiOwnership) {
- errCh <- fmt.Errorf(
- "Conflict found for source file %s covered at %s with team ID: %s. Existing team ID: %s and path: %s."+
- " If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+
- "Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.",
- srcFile, internalMetadata.GetPath(), attributes.TeamID, existing.TeamID, existing.Path,
- )
- srcFileLock.Unlock()
- lock.Unlock()
- return
- }
- } else {
- // Store the metadata if no conflict
- sourceFileMetadataMap.Store(srcFileKey, attributes)
- }
- srcFileLock.Unlock()
- }
-
- value, loaded := ownershipMetadataMap.LoadOrStore(
- key, []*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership{internalMetadata},
- )
- if loaded {
- existingMetadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
- isDuplicate := false
- for _, existing := range existingMetadata {
- if internalMetadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && internalMetadata.GetPath() == existing.GetPath() {
- isDuplicate = true
- break
- }
- }
- if !isDuplicate {
- existingMetadata = append(existingMetadata, internalMetadata)
- ownershipMetadataMap.Store(key, existingMetadata)
- }
- }
-
- lock.Unlock()
- }
-}
-
-func main() {
- inputFile := flag.String("inputFile", "", "Input file path")
- outputFile := flag.String("outputFile", "", "Output file path")
- rule := flag.String(
- "rule", "", "Metadata rule (Hint: test_spec or code_metadata)",
- )
- flag.Parse()
-
- if *inputFile == "" || *outputFile == "" || *rule == "" {
- fmt.Println("Usage: metadata -rule <rule> -inputFile <input file path> -outputFile <output file path>")
- os.Exit(1)
- }
-
- inputFileData := strings.TrimRight(readFileToString(*inputFile), "\n")
- filePaths := strings.Split(inputFileData, " ")
- if len(filePaths) == 1 && filePaths[0] == "" {
- writeEmptyOutputProto(*outputFile, *rule)
- return
- }
- ownershipMetadataMap := &sync.Map{}
- keyLocks := &keyToLocksMap{}
- errCh := make(chan error, len(filePaths))
- var wg sync.WaitGroup
-
- switch *rule {
- case "test_spec":
- for _, filePath := range filePaths {
- wg.Add(1)
- go processTestSpecProtobuf(
- filePath, ownershipMetadataMap, keyLocks, errCh, &wg,
- )
- }
-
- wg.Wait()
- close(errCh)
-
- for err := range errCh {
- log.Fatal(err)
- }
-
- allKeys := getSortedKeys(ownershipMetadataMap)
- var allMetadata []*test_spec_proto.TestSpec_OwnershipMetadata
-
- for _, key := range allKeys {
- value, _ := ownershipMetadataMap.Load(key)
- metadataList := value.([]*test_spec_proto.TestSpec_OwnershipMetadata)
- allMetadata = append(allMetadata, metadataList...)
- }
-
- testSpec := &test_spec_proto.TestSpec{
- OwnershipMetadataList: allMetadata,
- }
- writeProtoToFile(*outputFile, testSpec)
- break
- case "code_metadata":
- sourceFileMetadataMap := &sync.Map{}
- for _, filePath := range filePaths {
- wg.Add(1)
- go processCodeMetadataProtobuf(
- filePath, ownershipMetadataMap, sourceFileMetadataMap, keyLocks, errCh, &wg,
- )
- }
-
- wg.Wait()
- close(errCh)
-
- for err := range errCh {
- log.Fatal(err)
- }
-
- sortedKeys := getSortedKeys(ownershipMetadataMap)
- allMetadata := make([]*code_metadata_proto.CodeMetadata_TargetOwnership, 0)
- for _, key := range sortedKeys {
- value, _ := ownershipMetadataMap.Load(key)
- metadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
- for _, m := range metadata {
- targetName := m.GetTargetName()
- path := m.GetPath()
- trendyTeamId := m.GetTrendyTeamId()
-
- allMetadata = append(allMetadata, &code_metadata_proto.CodeMetadata_TargetOwnership{
- TargetName: &targetName,
- Path: &path,
- TrendyTeamId: &trendyTeamId,
- SourceFiles: m.GetSourceFiles(),
- })
- }
- }
-
- finalMetadata := &code_metadata_proto.CodeMetadata{
- TargetOwnershipList: allMetadata,
- }
- writeProtoToFile(*outputFile, finalMetadata)
- break
- default:
- log.Fatalf("No specific processing implemented for rule '%s'.\n", *rule)
- }
-}
diff --git a/tools/metadata/go.mod b/tools/metadata/go.mod
deleted file mode 100644
index e9d04b1..0000000
--- a/tools/metadata/go.mod
+++ /dev/null
@@ -1,7 +0,0 @@
-module android/soong/tools/metadata
-
-require google.golang.org/protobuf v0.0.0
-
-replace google.golang.org/protobuf v0.0.0 => ../../../external/golang-protobuf
-
-go 1.18
\ No newline at end of file
diff --git a/tools/metadata/go.work b/tools/metadata/go.work
deleted file mode 100644
index f2cdf8e..0000000
--- a/tools/metadata/go.work
+++ /dev/null
@@ -1,11 +0,0 @@
-go 1.18
-
-use (
- .
- ../../../../external/golang-protobuf
- ../../../soong/testing/test_spec_proto
- ../../../soong/testing/code_metadata_proto
- ../../../soong/testing/code_metadata_proto_internal
-)
-
-replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
diff --git a/tools/metadata/testdata/emptyInputFile.txt b/tools/metadata/testdata/emptyInputFile.txt
deleted file mode 100644
index 8b13789..0000000
--- a/tools/metadata/testdata/emptyInputFile.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/tools/metadata/testdata/expectedCodeMetadataOutput.txt b/tools/metadata/testdata/expectedCodeMetadataOutput.txt
deleted file mode 100644
index 755cf40..0000000
--- a/tools/metadata/testdata/expectedCodeMetadataOutput.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-bar
-Android.bp12346"b.java
-
-foo
-Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/expectedOutputFile.txt b/tools/metadata/testdata/expectedOutputFile.txt
deleted file mode 100644
index b0d382f..0000000
--- a/tools/metadata/testdata/expectedOutputFile.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
\ No newline at end of file
diff --git a/tools/metadata/testdata/file1.txt b/tools/metadata/testdata/file1.txt
deleted file mode 100644
index 81beed0..0000000
--- a/tools/metadata/testdata/file1.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
diff --git a/tools/metadata/testdata/file2.txt b/tools/metadata/testdata/file2.txt
deleted file mode 100644
index 32a753f..0000000
--- a/tools/metadata/testdata/file2.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
diff --git a/tools/metadata/testdata/file3.txt b/tools/metadata/testdata/file3.txt
deleted file mode 100644
index 81beed0..0000000
--- a/tools/metadata/testdata/file3.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
diff --git a/tools/metadata/testdata/file4.txt b/tools/metadata/testdata/file4.txt
deleted file mode 100644
index 6a75900..0000000
--- a/tools/metadata/testdata/file4.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-one
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
diff --git a/tools/metadata/testdata/file5.txt b/tools/metadata/testdata/file5.txt
deleted file mode 100644
index d8de064..0000000
--- a/tools/metadata/testdata/file5.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-foo
-Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file6.txt b/tools/metadata/testdata/file6.txt
deleted file mode 100644
index 9c7cdcd..0000000
--- a/tools/metadata/testdata/file6.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-bar
-Android.bp12346"b.java
diff --git a/tools/metadata/testdata/file7.txt b/tools/metadata/testdata/file7.txt
deleted file mode 100644
index d8de064..0000000
--- a/tools/metadata/testdata/file7.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-foo
-Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file8.txt b/tools/metadata/testdata/file8.txt
deleted file mode 100644
index a931690..0000000
--- a/tools/metadata/testdata/file8.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-foo
-Android.gp12346"a.java
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutput.txt b/tools/metadata/testdata/generatedCodeMetadataOutput.txt
deleted file mode 100644
index 755cf40..0000000
--- a/tools/metadata/testdata/generatedCodeMetadataOutput.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-bar
-Android.bp12346"b.java
-
-foo
-Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
deleted file mode 100644
index 755cf40..0000000
--- a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-bar
-Android.bp12346"b.java
-
-foo
-Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/generatedEmptyOutputFile.txt b/tools/metadata/testdata/generatedEmptyOutputFile.txt
deleted file mode 100644
index e69de29..0000000
--- a/tools/metadata/testdata/generatedEmptyOutputFile.txt
+++ /dev/null
diff --git a/tools/metadata/testdata/generatedOutputFile.txt b/tools/metadata/testdata/generatedOutputFile.txt
deleted file mode 100644
index b0d382f..0000000
--- a/tools/metadata/testdata/generatedOutputFile.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadata.txt b/tools/metadata/testdata/inputCodeMetadata.txt
deleted file mode 100644
index 7a81b7d..0000000
--- a/tools/metadata/testdata/inputCodeMetadata.txt
+++ /dev/null
@@ -1 +0,0 @@
-file5.txt file6.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadataNegative.txt b/tools/metadata/testdata/inputCodeMetadataNegative.txt
deleted file mode 100644
index 26668e4..0000000
--- a/tools/metadata/testdata/inputCodeMetadataNegative.txt
+++ /dev/null
@@ -1 +0,0 @@
-file7.txt file8.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputFiles.txt b/tools/metadata/testdata/inputFiles.txt
deleted file mode 100644
index e44bc94..0000000
--- a/tools/metadata/testdata/inputFiles.txt
+++ /dev/null
@@ -1 +0,0 @@
-file1.txt file2.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputFilesNegativeCase.txt b/tools/metadata/testdata/inputFilesNegativeCase.txt
deleted file mode 100644
index a37aa3f..0000000
--- a/tools/metadata/testdata/inputFilesNegativeCase.txt
+++ /dev/null
@@ -1 +0,0 @@
-file3.txt file4.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/metadata_test.go b/tools/metadata/testdata/metadata_test.go
deleted file mode 100644
index 314add3..0000000
--- a/tools/metadata/testdata/metadata_test.go
+++ /dev/null
@@ -1,119 +0,0 @@
-package main
-
-import (
- "fmt"
- "io/ioutil"
- "os/exec"
- "strings"
- "testing"
-)
-
-func TestMetadata(t *testing.T) {
- cmd := exec.Command(
- "metadata", "-rule", "test_spec", "-inputFile", "./inputFiles.txt", "-outputFile",
- "./generatedOutputFile.txt",
- )
- stderr, err := cmd.CombinedOutput()
- if err != nil {
- t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
- }
-
- // Read the contents of the expected output file
- expectedOutput, err := ioutil.ReadFile("./expectedOutputFile.txt")
- if err != nil {
- t.Fatalf("Error reading expected output file: %s", err)
- }
-
- // Read the contents of the generated output file
- generatedOutput, err := ioutil.ReadFile("./generatedOutputFile.txt")
- if err != nil {
- t.Fatalf("Error reading generated output file: %s", err)
- }
-
- fmt.Println()
-
- // Compare the contents
- if string(expectedOutput) != string(generatedOutput) {
- t.Errorf("Generated file contents do not match the expected output")
- }
-}
-
-func TestMetadataNegativeCase(t *testing.T) {
- cmd := exec.Command(
- "metadata", "-rule", "test_spec", "-inputFile", "./inputFilesNegativeCase.txt", "-outputFile",
- "./generatedOutputFileNegativeCase.txt",
- )
- stderr, err := cmd.CombinedOutput()
- if err == nil {
- t.Fatalf(
- "Expected an error, but the metadata command executed successfully. Output: %s",
- stderr,
- )
- }
-
- expectedError := "Conflicting trendy team IDs found for java-test-module" +
- "-name-one at:\nAndroid.bp with teamId: 12346," +
- "\nAndroid.bp with teamId: 12345"
- if !strings.Contains(
- strings.TrimSpace(string(stderr)), strings.TrimSpace(expectedError),
- ) {
- t.Errorf(
- "Unexpected error message. Expected to contain: %s, Got: %s",
- expectedError, stderr,
- )
- }
-}
-
-func TestEmptyInputFile(t *testing.T) {
- cmd := exec.Command(
- "metadata", "-rule", "test_spec", "-inputFile", "./emptyInputFile.txt", "-outputFile",
- "./generatedEmptyOutputFile.txt",
- )
- stderr, err := cmd.CombinedOutput()
- if err != nil {
- t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
- }
-
- // Read the contents of the generated output file
- generatedOutput, err := ioutil.ReadFile("./generatedEmptyOutputFile.txt")
- if err != nil {
- t.Fatalf("Error reading generated output file: %s", err)
- }
-
- fmt.Println()
-
- // Compare the contents
- if string(generatedOutput) != "\n" {
- t.Errorf("Generated file contents do not match the expected output")
- }
-}
-
-func TestCodeMetadata(t *testing.T) {
- cmd := exec.Command(
- "metadata", "-rule", "code_metadata", "-inputFile", "./inputCodeMetadata.txt", "-outputFile",
- "./generatedCodeMetadataOutputFile.txt",
- )
- stderr, err := cmd.CombinedOutput()
- if err != nil {
- t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
- }
-
- // Read the contents of the expected output file
- expectedOutput, err := ioutil.ReadFile("./expectedCodeMetadataOutput.txt")
- if err != nil {
- t.Fatalf("Error reading expected output file: %s", err)
- }
-
- // Read the contents of the generated output file
- generatedOutput, err := ioutil.ReadFile("./generatedCodeMetadataOutputFile.txt")
- if err != nil {
- t.Fatalf("Error reading generated output file: %s", err)
- }
-
- fmt.Println()
-
- // Compare the contents
- if string(expectedOutput) != string(generatedOutput) {
- t.Errorf("Generated file contents do not match the expected output")
- }
-}
diff --git a/tools/metadata/testdata/outputFile.txt b/tools/metadata/testdata/outputFile.txt
deleted file mode 100644
index b0d382f..0000000
--- a/tools/metadata/testdata/outputFile.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
\ No newline at end of file