Merge "Remove the "metadata" module" into main
diff --git a/core/Makefile b/core/Makefile
index 81ae6f7..638d2b9 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1676,12 +1676,13 @@
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)"
endif
-ifdef INTERNAL_BOOTCONFIG
+ifneq (, $(INTERNAL_BOOTCONFIG)$(INTERNAL_BOOTCONFIG_FILE))
INTERNAL_VENDOR_BOOTCONFIG_TARGET := $(PRODUCT_OUT)/vendor-bootconfig.img
$(INTERNAL_VENDOR_BOOTCONFIG_TARGET):
rm -f $@
$(foreach param,$(INTERNAL_BOOTCONFIG), \
printf "%s\n" $(param) >> $@;)
+ cat $(INTERNAL_BOOTCONFIG_FILE) >> $@
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_bootconfig $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
endif
@@ -3488,12 +3489,14 @@
# $(2): The partition's staging directory
# $(3): Files to include in the partition
define write-partition-file-list
+$(1): PRIVATE_FILES := $(subst $(2)/,,$(filter $(2)/%,$(3)))
+$(1): PRIVATE_EXTRA_INSTALL_ZIPS := $(call relevant-extra-install-zips,$(filter $(2)/%,$(3)))
$(1): $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(foreach p,$(call relevant-extra-install-zips,$(filter $(2)/%,$(3))),$(call word-colon,3,$(p)))
@echo Writing $$@
rm -f $$@
echo -n > $$@
- $$(foreach f,$(subst $(2)/,,$(filter $(2)/%,$(3))),echo "$$(f)" >> $$@$$(newline))
- $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $(call relevant-extra-install-zips,$(filter $(2)/%,$(3))) >> $$@
+ $$(foreach f,$$(PRIVATE_FILES),echo "$$(f)" >> $$@$$(newline))
+ $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $$(PRIVATE_EXTRA_INSTALL_ZIPS) >> $$@
endef
# -----------------------------------------------------------------
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index e5a8e44..70991c6 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -221,3 +221,8 @@
$(call soong_config_set_bool,video_codec,board_support_flexible_p010,$(if $(filter true,$(BOARD_SUPPORT_FLEXIBLE_P010)),true,false))
$(call soong_config_set,video_codec,board_support_mfc_version,$(BOARD_SUPPORT_MFC_VERSION))
$(call soong_config_set_bool,video_codec,board_use_codec2_aidl,$(if $(BOARD_USE_CODEC2_AIDL),true,false))
+$(call soong_config_set,video_codec,board_gpu_type,$(BOARD_GPU_TYPE))
+$(call soong_config_set_bool,video_codec,board_use_small_secure_memory,$(if $(filter true,$(BOARD_USE_SMALL_SECURE_MEMORY)),true,false))
+ifneq ($(BOARD_USE_MAX_SECURE_RESOURCE),)
+ $(call soong_config_set,video_codec,board_use_max_secure_resource,$(BOARD_USE_MAX_SECURE_RESOURCE))
+endif
diff --git a/core/binary.mk b/core/binary.mk
index 3481144..ea862be 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -174,7 +174,7 @@
endif
endif
-my_ndk_sysroot_include :=
+my_ndk_sysroot :=
my_ndk_sysroot_lib :=
my_api_level := 10000
@@ -207,11 +207,9 @@
my_built_ndk := $(SOONG_OUT_DIR)/ndk
my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE)
- my_ndk_sysroot_include := \
- $(my_built_ndk)/sysroot/usr/include \
- $(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \
+ my_ndk_sysroot := $(my_built_ndk)/sysroot
- my_ndk_sysroot_lib := $(my_built_ndk)/sysroot/usr/lib/$(my_ndk_triple)/$(my_ndk_api)
+ my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/lib/$(my_ndk_triple)/$(my_ndk_api)
# The bionic linker now has support for packed relocations and gnu style
# hashes (which are much faster!), but shipping to older devices requires
@@ -1628,19 +1626,6 @@
###########################################################
ifndef LOCAL_IS_HOST_MODULE
-ifeq ($(call module-in-vendor-or-product),true)
- my_target_global_c_includes :=
- my_target_global_c_system_includes := $(TARGET_OUT_HEADERS)
-else ifdef LOCAL_SDK_VERSION
- my_target_global_c_includes :=
- my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
-else
- my_target_global_c_includes := $(SRC_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
- my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
- $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
-endif
-
my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
my_target_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
@@ -1656,6 +1641,22 @@
my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
endif # my_use_clang_lld
+ifeq ($(call module-in-vendor-or-product),true)
+ my_target_global_c_includes :=
+ my_target_global_c_system_includes := $(TARGET_OUT_HEADERS)
+ my_target_global_cflags += -nostdlibinc
+else ifdef LOCAL_SDK_VERSION
+ my_target_global_c_includes :=
+ my_target_global_c_system_includes := $(my_ndk_stl_include_path)
+ my_target_global_cflags += --sysroot $(my_ndk_sysroot)
+else
+ my_target_global_c_includes := $(SRC_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
+ my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
+ $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
+ my_target_global_cflags += -nostdlibinc
+endif
+
my_target_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)TRIPLE)
ifndef LOCAL_IS_HOST_MODULE
my_target_triple_flag := -target $(my_target_triple)$(my_api_level)
diff --git a/core/board_config.mk b/core/board_config.mk
index 5606964..38baa0a 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -27,6 +27,7 @@
_board_strip_readonly_list += BOARD_KERNEL_CMDLINE
_board_strip_readonly_list += BOARD_BOOT_HEADER_VERSION
_board_strip_readonly_list += BOARD_BOOTCONFIG
+_board_strip_readonly_list += BOARD_BOOTCONFIG_FILE
_board_strip_readonly_list += BOARD_KERNEL_BASE
_board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO
_board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT
@@ -311,9 +312,10 @@
.KATI_READONLY := $(_board_strip_readonly_list)
INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
-ifneq (,$(BOARD_BOOTCONFIG))
+ifneq (,$(BOARD_BOOTCONFIG)$(BOARD_BOOTCONFIG_FILE))
INTERNAL_KERNEL_CMDLINE += bootconfig
INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
+ INTERNAL_BOOTCONFIG_FILE := $(BOARD_BOOTCONFIG_FILE)
endif
ifneq ($(filter %64,$(TARGET_ARCH)),)
diff --git a/core/config.mk b/core/config.mk
index 192c8b2..2df9a2d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -432,13 +432,6 @@
endif
.KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
-ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE
- TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE)
-else
- TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
-endif
-.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE
-
# Boolean variable determining if AOSP relies on bionic's PAGE_SIZE macro.
ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)
@@ -817,6 +810,18 @@
endif
endif
+ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE
+ TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE)
+else ifeq (true,$(TARGET_BUILD_UNBUNDLED))
+ # unbundled builds may not have updated build sources
+ TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
+else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),)
+ TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := true
+else
+ TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
+endif
+.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE
+
# Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
# devices if unset.
ifndef BOARD_SYSTEMSDK_VERSIONS
diff --git a/core/main.mk b/core/main.mk
index 6095587..24055e8 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -290,7 +290,7 @@
$(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
# Build bootloader.img/radio.img, and unpack the partitions.
-include $(BUILD_SYSTEM)/tasks/tools/update_bootloader_radio_image.mk
+-include vendor/google/build/tasks/tools/update_bootloader_radio_image.mk
# For an unbundled image, we can skip blueprint_tools because unbundled image
# aims to remove a large number framework projects from the manifest, the
diff --git a/core/ravenwood_test_config_template.xml b/core/ravenwood_test_config_template.xml
index 2f21bae..9e9dd76 100644
--- a/core/ravenwood_test_config_template.xml
+++ b/core/ravenwood_test_config_template.xml
@@ -22,6 +22,7 @@
<option name="use-ravenwood-resources" value="true" />
<option name="exclude-paths" value="java" />
<option name="null-device" value="true" />
+ <option name="do-not-swallow-runner-errors" value="true" />
{EXTRA_CONFIGS}
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 8c57ce6..a511d5c 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -182,8 +182,10 @@
$(call add_json_bool, Uml, $(filter true,$(TARGET_USER_MODE_LINUX)))
$(call add_json_str, VendorPath, $(TARGET_COPY_OUT_VENDOR))
+$(call add_json_bool, BuildingVendorImage, $(BUILDING_VENDOR_IMAGE))
$(call add_json_str, OdmPath, $(TARGET_COPY_OUT_ODM))
$(call add_json_str, ProductPath, $(TARGET_COPY_OUT_PRODUCT))
+$(call add_json_bool, BuildingProductImage, $(BUILDING_PRODUCT_IMAGE))
$(call add_json_str, SystemExtPath, $(TARGET_COPY_OUT_SYSTEM_EXT))
$(call add_json_bool, MinimizeJavaDebugInfo, $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
@@ -424,6 +426,11 @@
$(call add_json_list, ProductPackages, $(PRODUCT_PACKAGES))
$(call add_json_list, ProductPackagesDebug, $(PRODUCT_PACKAGES_DEBUG))
+ $(call add_json_map, ProductCopyFiles)
+ $(foreach pair,$(PRODUCT_COPY_FILES),\
+ $(call add_json_str,$(word 1,$(subst :, ,$(pair))),$(word 2,$(subst :, ,$(pair)))))
+ $(call end_json_map)
+
$(call end_json_map)
$(call json_end)
diff --git a/core/tasks/tools/update_bootloader_radio_image.mk b/core/tasks/tools/update_bootloader_radio_image.mk
deleted file mode 100644
index adb86ea..0000000
--- a/core/tasks/tools/update_bootloader_radio_image.mk
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (C) 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http:#www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-ifeq ($(USES_DEVICE_GOOGLE_ZUMA),true)
- -include vendor/google_devices/zuma/prebuilts/misc_bins/update_bootloader_radio_image.mk
-endif
-ifeq ($(USES_DEVICE_GOOGLE_ZUMAPRO),true)
- -include vendor/google_devices/zumapro/prebuilts/misc_bins/update_bootloader_radio_image.mk
-endif
-ifeq ($(USES_DEVICE_GOOGLE_LAGUNA),true)
- -include vendor/google_devices/laguna/prebuilts/misc_bins/update_bootloader_radio_image.mk
-endif
-ifeq ($(USES_DEVICE_GOOGLE_MALIBU),true)
- -include vendor/google_devices/malibu/prebuilts/misc_bins/update_bootloader_radio_image.mk
-endif
diff --git a/envsetup.sh b/envsetup.sh
index 3fed5ae..554a220 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -362,7 +362,6 @@
packages/modules/adb/adb.bash
system/core/fastboot/fastboot.bash
tools/asuite/asuite.sh
- prebuilts/bazel/common/bazel-complete.bash
)
# Completion can be disabled selectively to allow users to use non-standard completion.
# e.g.
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 74ed82d..4a27b7d 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -287,6 +287,7 @@
tombstoned \
traced \
traced_probes \
+ tradeinmode \
tune2fs \
uiautomator \
uinput \
diff --git a/target/product/generic/Android.bp b/target/product/generic/Android.bp
index f86774b..c980959 100644
--- a/target/product/generic/Android.bp
+++ b/target/product/generic/Android.bp
@@ -536,6 +536,7 @@
"tombstoned", // base_system
"traced", // base_system
"traced_probes", // base_system
+ "tradeinmode", // base_system
"tune2fs", // base_system
"uiautomator", // base_system
"uinput", // base_system
@@ -785,6 +786,7 @@
"libbinder_ndk",
"libbinder_rpc_unstable",
"libcamera2ndk",
+ "libcgrouprc", // llndk library
"libclang_rt.asan",
"libcompiler_rt",
"libcutils", // used by many libs
diff --git a/teams/Android.bp b/teams/Android.bp
index 4c40287..21f5222 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -13,6 +13,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+// DON'T ADD NEW RULES HERE. For more details refer to
+// go/new-android-ownership-model
+
package {
default_applicable_licenses: ["Android-Apache-2.0"],
}
@@ -4475,3 +4478,13 @@
// go/trendy/manage/engineers/6303298703949824
trendy_team_id: "6303298703949824",
}
+
+team {
+ name: "trendy_team_desktop_stats",
+
+ // go/trendy/manage/engineers/5440764114206720
+ trendy_team_id: "5440764114206720",
+}
+
+// DON'T ADD NEW RULES HERE. For more details refer to
+// go/new-android-ownership-model
diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs
index edb4fd3..e184efe 100644
--- a/tools/aconfig/aconfig/src/main.rs
+++ b/tools/aconfig/aconfig/src/main.rs
@@ -51,8 +51,7 @@
.subcommand(
Command::new("create-cache")
.arg(Arg::new("package").long("package").required(true))
- // TODO(b/312769710): Make this argument required.
- .arg(Arg::new("container").long("container"))
+ .arg(Arg::new("container").long("container").required(true))
.arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
.arg(Arg::new("values").long("values").action(ArgAction::Append))
.arg(
diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
index 884f148..5104cd0 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
@@ -44,6 +44,7 @@
pub use aconfig_storage_file::{AconfigStorageError, FlagValueType, StorageFileType};
pub use flag_table_query::FlagReadContext;
+pub use mapped_file::map_file;
pub use package_table_query::PackageReadContext;
use aconfig_storage_file::read_u32_from_bytes;
@@ -114,13 +115,13 @@
/// Get the boolean flag value.
///
-/// \input file: mapped flag file
+/// \input file: a byte slice, can be either &Mmap or &MapMut
/// \input index: boolean flag offset
///
/// \return
/// If the provide offset is valid, it returns the boolean flag value, otherwise it
/// returns the error message.
-pub fn get_boolean_flag_value(file: &Mmap, index: u32) -> Result<bool, AconfigStorageError> {
+pub fn get_boolean_flag_value(file: &[u8], index: u32) -> Result<bool, AconfigStorageError> {
find_boolean_flag_value(file, index)
}
@@ -148,7 +149,7 @@
/// Get the flag attribute.
///
-/// \input file: mapped flag info file
+/// \input file: a byte slice, can be either &Mmap or &MapMut
/// \input flag_type: flag value type
/// \input flag_index: flag index
///
@@ -156,7 +157,7 @@
/// If the provide offset is valid, it returns the flag attribute bitfiled, otherwise it
/// returns the error message.
pub fn get_flag_attribute(
- file: &Mmap,
+ file: &[u8],
flag_type: FlagValueType,
flag_index: u32,
) -> Result<u8, AconfigStorageError> {
diff --git a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
index 32dbed8..2c1884a 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
@@ -28,7 +28,7 @@
/// The memory mapped file may have undefined behavior if there are writes to this
/// file after being mapped. Ensure no writes can happen to this file while this
/// mapping stays alive.
-unsafe fn map_file(file_path: &str) -> Result<Mmap, AconfigStorageError> {
+pub unsafe fn map_file(file_path: &str) -> Result<Mmap, AconfigStorageError> {
let file = File::open(file_path)
.map_err(|errmsg| FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg)))?;
unsafe {
diff --git a/tools/aconfig/aconfig_storage_write_api/Android.bp b/tools/aconfig/aconfig_storage_write_api/Android.bp
index 0f1962c..4c882b4 100644
--- a/tools/aconfig/aconfig_storage_write_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_write_api/Android.bp
@@ -16,6 +16,11 @@
"libaconfig_storage_file",
"libaconfig_storage_read_api",
],
+ min_sdk_version: "34",
+ apex_available: [
+ "//apex_available:anyapex",
+ "//apex_available:platform",
+ ],
}
rust_library {
diff --git a/tools/edit_monitor/Android.bp b/tools/edit_monitor/Android.bp
index 3497821..fe4f213 100644
--- a/tools/edit_monitor/Android.bp
+++ b/tools/edit_monitor/Android.bp
@@ -35,6 +35,12 @@
pkg_path: "edit_monitor",
srcs: [
"daemon_manager.py",
+ "edit_monitor.py",
+ ],
+ libs: [
+ "asuite_cc_client",
+ "edit_event_proto",
+ "watchdog",
],
}
@@ -53,6 +59,21 @@
},
}
+python_test_host {
+ name: "edit_monitor_test",
+ main: "edit_monitor_test.py",
+ pkg_path: "edit_monitor",
+ srcs: [
+ "edit_monitor_test.py",
+ ],
+ libs: [
+ "edit_monitor_lib",
+ ],
+ test_options: {
+ unit_test: true,
+ },
+}
+
python_binary_host {
name: "edit_monitor",
pkg_path: "edit_monitor",
diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py
index 892c292..29746e4 100644
--- a/tools/edit_monitor/daemon_manager.py
+++ b/tools/edit_monitor/daemon_manager.py
@@ -13,17 +13,22 @@
# limitations under the License.
+import getpass
import hashlib
import logging
import multiprocessing
import os
import pathlib
+import platform
import signal
import subprocess
import sys
import tempfile
import time
+from atest.metrics import clearcut_client
+from atest.proto import clientanalytics_pb2
+from proto import edit_event_pb2
DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 1
DEFAULT_MONITOR_INTERVAL_SECONDS = 5
@@ -31,6 +36,9 @@
DEFAULT_CPU_USAGE_THRESHOLD = 200
DEFAULT_REBOOT_TIMEOUT_SECONDS = 60 * 60 * 24
BLOCK_SIGN_FILE = "edit_monitor_block_sign"
+# Enum of the Clearcut log source defined under
+# /google3/wireless/android/play/playlog/proto/log_source_enum.proto
+LOG_SOURCE = 2524
def default_daemon_target():
@@ -46,11 +54,16 @@
binary_path: str,
daemon_target: callable = default_daemon_target,
daemon_args: tuple = (),
+ cclient: clearcut_client.Clearcut | None = None,
):
self.binary_path = binary_path
self.daemon_target = daemon_target
self.daemon_args = daemon_args
+ self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE)
+ self.user_name = getpass.getuser()
+ self.host_name = platform.node()
+ self.source_root = os.environ.get("ANDROID_BUILD_TOP", "")
self.pid = os.getpid()
self.daemon_process = None
@@ -70,13 +83,20 @@
logging.warning("Block sign found, exiting...")
return
- if self.binary_path.startswith('/google/cog/'):
+ if self.binary_path.startswith("/google/cog/"):
logging.warning("Edit monitor for cog is not supported, exiting...")
return
- self._stop_any_existing_instance()
- self._write_pid_to_pidfile()
- self._start_daemon_process()
+ try:
+ self._stop_any_existing_instance()
+ self._write_pid_to_pidfile()
+ self._start_daemon_process()
+ except Exception as e:
+ logging.exception("Failed to start daemon manager with error %s", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
+ raise e
def monitor_daemon(
self,
@@ -118,6 +138,9 @@
logging.error(
"Daemon process is consuming too much resource, killing..."
),
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE
+ )
self._terminate_process(self.daemon_process.pid)
logging.info(
@@ -133,12 +156,22 @@
logging.debug("in daemon manager cleanup.")
try:
- if self.daemon_process and self.daemon_process.is_alive():
- self._terminate_process(self.daemon_process.pid)
+ if self.daemon_process:
+ # The daemon process might already in termination process,
+ # wait some time before kill it explicitly.
+ self._wait_for_process_terminate(self.daemon_process.pid, 1)
+ if self.daemon_process.is_alive():
+ self._terminate_process(self.daemon_process.pid)
self._remove_pidfile()
logging.debug("Successfully stopped daemon manager.")
except Exception as e:
logging.exception("Failed to stop daemon manager with error %s", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+ )
+ sys.exit(1)
+ finally:
+ self.cclient.flush_events()
def reboot(self):
"""Reboots the current process.
@@ -160,6 +193,9 @@
os.execv(self.binary_path, sys.argv)
except OSError as e:
logging.exception("Failed to reboot process with error: %s.", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
+ )
sys.exit(1) # Indicate an error occurred
def cleanup(self):
@@ -171,6 +207,7 @@
that requires immediate cleanup to prevent damanger to the system.
"""
logging.debug("Start cleaning up all existing instances.")
+ self._send_error_event_to_clearcut(edit_event_pb2.EditEvent.FORCE_CLEANUP)
try:
# First places a block sign to prevent any edit monitor process to start.
@@ -227,6 +264,7 @@
p = multiprocessing.Process(
target=self.daemon_target, args=self.daemon_args
)
+ p.daemon = True
p.start()
logging.info("Start subprocess with PID %d", p.pid)
@@ -350,4 +388,19 @@
except (FileNotFoundError, IOError, ValueError, TypeError):
logging.exception("Failed to get pid from file path: %s", file)
- return pids
\ No newline at end of file
+ return pids
+
+ def _send_error_event_to_clearcut(self, error_type):
+ edit_monitor_error_event_proto = edit_event_pb2.EditEvent(
+ user_name=self.user_name,
+ host_name=self.host_name,
+ source_root=self.source_root,
+ )
+ edit_monitor_error_event_proto.edit_monitor_error_event.CopyFrom(
+ edit_event_pb2.EditEvent.EditMonitorErrorEvent(error_type=error_type)
+ )
+ log_event = clientanalytics_pb2.LogEvent(
+ event_time_ms=int(time.time() * 1000),
+ source_extension=edit_monitor_error_event_proto.SerializeToString(),
+ )
+ self.cclient.log(log_event)
diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py
index 72442c6..e132000 100644
--- a/tools/edit_monitor/daemon_manager_test.py
+++ b/tools/edit_monitor/daemon_manager_test.py
@@ -26,6 +26,7 @@
import unittest
from unittest import mock
from edit_monitor import daemon_manager
+from proto import edit_event_pb2
TEST_BINARY_FILE = '/path/to/test_binary'
@@ -133,7 +134,8 @@
def test_start_return_directly_if_in_cog_env(self):
dm = daemon_manager.DaemonManager(
- '/google/cog/cloud/user/workspace/edit_monitor')
+ '/google/cog/cloud/user/workspace/edit_monitor'
+ )
dm.start()
# Verify no daemon process is started.
self.assertIsNone(dm.daemon_process)
@@ -148,9 +150,13 @@
with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f:
f.write('123456')
- with self.assertRaises(OSError) as error:
- dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+ fake_cclient = FakeClearcutClient()
+ with self.assertRaises(OSError):
+ dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient)
dm.start()
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
def test_start_failed_to_write_pidfile(self):
pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
@@ -160,40 +166,63 @@
# Makes the directory read-only so write pidfile will fail.
os.chmod(pid_file_path_dir, 0o555)
- with self.assertRaises(PermissionError) as error:
- dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+ fake_cclient = FakeClearcutClient()
+ with self.assertRaises(PermissionError):
+ dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient)
dm.start()
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
def test_start_failed_to_start_daemon_process(self):
- with self.assertRaises(TypeError) as error:
+ fake_cclient = FakeClearcutClient()
+ with self.assertRaises(TypeError):
dm = daemon_manager.DaemonManager(
- TEST_BINARY_FILE, daemon_target='wrong_target', daemon_args=(1)
+ TEST_BINARY_FILE,
+ daemon_target='wrong_target',
+ daemon_args=(1),
+ cclient=fake_cclient,
)
dm.start()
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
def test_monitor_daemon_subprocess_killed_high_memory_usage(self):
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
TEST_BINARY_FILE,
daemon_target=memory_consume_daemon_target,
daemon_args=(2,),
+ cclient=fake_cclient,
)
dm.start()
dm.monitor_daemon(interval=1, memory_threshold=2)
self.assertTrue(dm.max_memory_usage >= 2)
self.assert_no_subprocess_running()
+ self._assert_error_event_logged(
+ fake_cclient,
+ edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE,
+ )
def test_monitor_daemon_subprocess_killed_high_cpu_usage(self):
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
TEST_BINARY_FILE,
daemon_target=cpu_consume_daemon_target,
daemon_args=(20,),
+ cclient=fake_cclient,
)
dm.start()
dm.monitor_daemon(interval=1, cpu_threshold=20)
self.assertTrue(dm.max_cpu_usage >= 20)
self.assert_no_subprocess_running()
+ self._assert_error_event_logged(
+ fake_cclient,
+ edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE,
+ )
@mock.patch('subprocess.check_output')
def test_monitor_daemon_failed_does_not_matter(self, mock_output):
@@ -207,7 +236,8 @@
)
dm = daemon_manager.DaemonManager(
- binary_file.name, daemon_target=long_running_daemon
+ binary_file.name,
+ daemon_target=long_running_daemon,
)
dm.start()
dm.monitor_daemon(reboot_timeout=0.5)
@@ -226,27 +256,42 @@
@mock.patch('os.kill')
def test_stop_failed_to_kill_daemon_process(self, mock_kill):
mock_kill.side_effect = OSError('Unknown OSError')
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
- TEST_BINARY_FILE, daemon_target=long_running_daemon
+ TEST_BINARY_FILE,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
)
- dm.start()
- dm.stop()
- self.assertTrue(dm.daemon_process.is_alive())
- self.assertTrue(dm.pid_file_path.exists())
+ with self.assertRaises(SystemExit):
+ dm.start()
+ dm.stop()
+ self.assertTrue(dm.daemon_process.is_alive())
+ self.assertTrue(dm.pid_file_path.exists())
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+ )
@mock.patch('os.remove')
def test_stop_failed_to_remove_pidfile(self, mock_remove):
mock_remove.side_effect = OSError('Unknown OSError')
+ fake_cclient = FakeClearcutClient()
dm = daemon_manager.DaemonManager(
- TEST_BINARY_FILE, daemon_target=long_running_daemon
+ TEST_BINARY_FILE,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
)
- dm.start()
- dm.stop()
- self.assert_no_subprocess_running()
- self.assertTrue(dm.pid_file_path.exists())
+ with self.assertRaises(SystemExit):
+ dm.start()
+ dm.stop()
+ self.assert_no_subprocess_running()
+ self.assertTrue(dm.pid_file_path.exists())
+
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+ )
@mock.patch('os.execv')
def test_reboot_success(self, mock_execv):
@@ -273,7 +318,7 @@
)
dm.start()
- with self.assertRaises(SystemExit) as cm:
+ with self.assertRaises(SystemExit):
dm.reboot()
mock_execv.assert_not_called()
self.assertEqual(cm.exception.code, 0)
@@ -281,18 +326,24 @@
@mock.patch('os.execv')
def test_reboot_failed(self, mock_execv):
mock_execv.side_effect = OSError('Unknown OSError')
+ fake_cclient = FakeClearcutClient()
binary_file = tempfile.NamedTemporaryFile(
dir=self.working_dir.name, delete=False
)
dm = daemon_manager.DaemonManager(
- binary_file.name, daemon_target=long_running_daemon
+ binary_file.name,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
)
dm.start()
- with self.assertRaises(SystemExit) as cm:
+ with self.assertRaises(SystemExit):
dm.reboot()
self.assertEqual(cm.exception.code, 1)
+ self._assert_error_event_logged(
+ fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
+ )
def assert_run_simple_daemon_success(self):
damone_output_file = tempfile.NamedTemporaryFile(
@@ -374,6 +425,33 @@
f.write(str(p.pid))
return p
+ def _assert_error_event_logged(self, fake_cclient, error_type):
+ error_events = fake_cclient.get_sent_events()
+ self.assertEquals(len(error_events), 1)
+ self.assertEquals(
+ edit_event_pb2.EditEvent.FromString(
+ error_events[0].source_extension
+ ).edit_monitor_error_event.error_type,
+ error_type,
+ )
+
+
+class FakeClearcutClient:
+
+ def __init__(self):
+ self.pending_log_events = []
+ self.sent_log_event = []
+
+ def log(self, log_event):
+ self.pending_log_events.append(log_event)
+
+ def flush_events(self):
+ self.sent_log_event.extend(self.pending_log_events)
+ self.pending_log_events.clear()
+
+ def get_sent_events(self):
+ return self.sent_log_event + self.pending_log_events
+
if __name__ == '__main__':
unittest.main()
diff --git a/tools/edit_monitor/edit_monitor.py b/tools/edit_monitor/edit_monitor.py
new file mode 100644
index 0000000..ab528e8
--- /dev/null
+++ b/tools/edit_monitor/edit_monitor.py
@@ -0,0 +1,220 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import getpass
+import logging
+import multiprocessing.connection
+import os
+import pathlib
+import platform
+import threading
+import time
+
+from atest.metrics import clearcut_client
+from atest.proto import clientanalytics_pb2
+from proto import edit_event_pb2
+from watchdog.events import FileSystemEvent
+from watchdog.events import PatternMatchingEventHandler
+from watchdog.observers import Observer
+
+# Enum of the Clearcut log source defined under
+# /google3/wireless/android/play/playlog/proto/log_source_enum.proto
+LOG_SOURCE = 2524
+DEFAULT_FLUSH_INTERVAL_SECONDS = 5
+DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD = 100
+
+
+class ClearcutEventHandler(PatternMatchingEventHandler):
+
+ def __init__(
+ self,
+ path: str,
+ flush_interval_sec: int,
+ single_events_size_threshold: int,
+ is_dry_run: bool = False,
+ cclient: clearcut_client.Clearcut | None = None,
+ ):
+
+ super().__init__(patterns=["*"], ignore_directories=True)
+ self.root_monitoring_path = path
+ self.flush_interval_sec = flush_interval_sec
+ self.single_events_size_threshold = single_events_size_threshold
+ self.is_dry_run = is_dry_run
+ self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE)
+
+ self.user_name = getpass.getuser()
+ self.host_name = platform.node()
+ self.source_root = os.environ.get("ANDROID_BUILD_TOP", "")
+
+ self.pending_events = []
+ self._scheduled_log_thread = None
+ self._pending_events_lock = threading.Lock()
+
+ def on_moved(self, event: FileSystemEvent):
+ self._log_edit_event(event, edit_event_pb2.EditEvent.MOVE)
+
+ def on_created(self, event: FileSystemEvent):
+ self._log_edit_event(event, edit_event_pb2.EditEvent.CREATE)
+
+ def on_deleted(self, event: FileSystemEvent):
+ self._log_edit_event(event, edit_event_pb2.EditEvent.DELETE)
+
+ def on_modified(self, event: FileSystemEvent):
+ self._log_edit_event(event, edit_event_pb2.EditEvent.MODIFY)
+
+ def flushall(self):
+ logging.info("flushing all pending events.")
+ if self._scheduled_log_thread:
+ logging.info("canceling log thread")
+ self._scheduled_log_thread.cancel()
+ self._scheduled_log_thread = None
+
+ self._log_clearcut_events()
+ self.cclient.flush_events()
+
+ def _log_edit_event(
+ self, event: FileSystemEvent, edit_type: edit_event_pb2.EditEvent.EditType
+ ):
+ try:
+ event_time = time.time()
+
+ if self._is_hidden_file(pathlib.Path(event.src_path)):
+ logging.debug("ignore hidden file: %s.", event.src_path)
+ return
+
+ if not self._is_under_git_project(pathlib.Path(event.src_path)):
+ logging.debug(
+ "ignore file %s which does not belong to a git project",
+ event.src_path,
+ )
+ return
+
+ logging.info("%s: %s", event.event_type, event.src_path)
+
+ event_proto = edit_event_pb2.EditEvent(
+ user_name=self.user_name,
+ host_name=self.host_name,
+ source_root=self.source_root,
+ )
+ event_proto.single_edit_event.CopyFrom(
+ edit_event_pb2.EditEvent.SingleEditEvent(
+ file_path=event.src_path, edit_type=edit_type
+ )
+ )
+ with self._pending_events_lock:
+ self.pending_events.append((event_proto, event_time))
+ if not self._scheduled_log_thread:
+ logging.debug(
+ "Scheduling thread to run in %d seconds", self.flush_interval_sec
+ )
+ self._scheduled_log_thread = threading.Timer(
+ self.flush_interval_sec, self._log_clearcut_events
+ )
+ self._scheduled_log_thread.start()
+
+ except Exception:
+ logging.exception("Failed to log edit event.")
+
+ def _is_hidden_file(self, file_path: pathlib.Path) -> bool:
+ return any(
+ part.startswith(".")
+ for part in file_path.relative_to(self.root_monitoring_path).parts
+ )
+
+ def _is_under_git_project(self, file_path: pathlib.Path) -> bool:
+ root_path = pathlib.Path(self.root_monitoring_path).resolve()
+ return any(
+ root_path.joinpath(dir).joinpath('.git').exists()
+ for dir in file_path.relative_to(root_path).parents
+ )
+
+ def _log_clearcut_events(self):
+ with self._pending_events_lock:
+ self._scheduled_log_thread = None
+ edit_events = self.pending_events
+ self.pending_events = []
+
+ pending_events_size = len(edit_events)
+ if pending_events_size > self.single_events_size_threshold:
+ logging.info(
+ "got %d events in %d seconds, sending aggregated events instead",
+ pending_events_size,
+ self.flush_interval_sec,
+ )
+ aggregated_event_time = edit_events[0][1]
+ aggregated_event_proto = edit_event_pb2.EditEvent(
+ user_name=self.user_name,
+ host_name=self.host_name,
+ source_root=self.source_root,
+ )
+ aggregated_event_proto.aggregated_edit_event.CopyFrom(
+ edit_event_pb2.EditEvent.AggregatedEditEvent(
+ num_edits=pending_events_size
+ )
+ )
+ edit_events = [(aggregated_event_proto, aggregated_event_time)]
+
+ if self.is_dry_run:
+ logging.info("Sent %d edit events in dry run.", len(edit_events))
+ return
+
+ for event_proto, event_time in edit_events:
+ log_event = clientanalytics_pb2.LogEvent(
+ event_time_ms=int(event_time * 1000),
+ source_extension=event_proto.SerializeToString(),
+ )
+ self.cclient.log(log_event)
+
+ logging.info("sent %d edit events", len(edit_events))
+
+
+def start(
+ path: str,
+ is_dry_run: bool = False,
+ flush_interval_sec: int = DEFAULT_FLUSH_INTERVAL_SECONDS,
+ single_events_size_threshold: int = DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD,
+ cclient: clearcut_client.Clearcut | None = None,
+ pipe_sender: multiprocessing.connection.Connection | None = None,
+):
+ """Method to start the edit monitor.
+
+ This is the entry point to start the edit monitor as a subprocess of
+ the daemon manager.
+
+ params:
+ path: The root path to monitor
+ cclient: The clearcut client to send the edit logs.
+ conn: the sender of the pipe to communicate with the deamon manager.
+ """
+ event_handler = ClearcutEventHandler(
+ path, flush_interval_sec, single_events_size_threshold, is_dry_run, cclient)
+ observer = Observer()
+
+ logging.info("Starting observer on path %s.", path)
+ observer.schedule(event_handler, path, recursive=True)
+ observer.start()
+ logging.info("Observer started.")
+ if pipe_sender:
+ pipe_sender.send("Observer started.")
+
+ try:
+ while True:
+ time.sleep(1)
+ finally:
+ event_handler.flushall()
+ observer.stop()
+ observer.join()
+ if pipe_sender:
+ pipe_sender.close()
diff --git a/tools/edit_monitor/edit_monitor_test.py b/tools/edit_monitor/edit_monitor_test.py
new file mode 100644
index 0000000..64a3871
--- /dev/null
+++ b/tools/edit_monitor/edit_monitor_test.py
@@ -0,0 +1,301 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for Edit Monitor."""
+
+import logging
+import multiprocessing
+import os
+import pathlib
+import signal
+import sys
+import tempfile
+import time
+import unittest
+
+from atest.proto import clientanalytics_pb2
+from edit_monitor import edit_monitor
+from proto import edit_event_pb2
+
+
+class EditMonitorTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ # Configure to print logging to stdout.
+ logging.basicConfig(filename=None, level=logging.DEBUG)
+ console = logging.StreamHandler(sys.stdout)
+ logging.getLogger('').addHandler(console)
+
+ def setUp(self):
+ super().setUp()
+ self.working_dir = tempfile.TemporaryDirectory()
+ self.root_monitoring_path = pathlib.Path(self.working_dir.name).joinpath(
+ 'files'
+ )
+ self.root_monitoring_path.mkdir()
+ self.log_event_dir = pathlib.Path(self.working_dir.name).joinpath('logs')
+ self.log_event_dir.mkdir()
+
+ def tearDown(self):
+ self.working_dir.cleanup()
+ super().tearDown()
+
+ def test_log_single_edit_event_success(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create and modify a file.
+ test_file = self.root_monitoring_path.joinpath('test.txt')
+ with open(test_file, 'w') as f:
+ f.write('something')
+ # Move the file.
+ test_file_moved = self.root_monitoring_path.joinpath('new_test.txt')
+ test_file.rename(test_file_moved)
+ # Delete the file.
+ test_file_moved.unlink()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 4)
+ expected_create_event = edit_event_pb2.EditEvent.SingleEditEvent(
+ file_path=str(
+ self.root_monitoring_path.joinpath('test.txt').resolve()
+ ),
+ edit_type=edit_event_pb2.EditEvent.CREATE,
+ )
+ expected_modify_event = edit_event_pb2.EditEvent.SingleEditEvent(
+ file_path=str(
+ self.root_monitoring_path.joinpath('test.txt').resolve()
+ ),
+ edit_type=edit_event_pb2.EditEvent.MODIFY,
+ )
+ expected_move_event = edit_event_pb2.EditEvent.SingleEditEvent(
+ file_path=str(
+ self.root_monitoring_path.joinpath('test.txt').resolve()
+ ),
+ edit_type=edit_event_pb2.EditEvent.MOVE,
+ )
+ expected_delete_event = edit_event_pb2.EditEvent.SingleEditEvent(
+ file_path=str(
+ self.root_monitoring_path.joinpath('new_test.txt').resolve()
+ ),
+ edit_type=edit_event_pb2.EditEvent.DELETE,
+ )
+ self.assertEqual(
+ expected_create_event,
+ edit_event_pb2.EditEvent.FromString(
+ logged_events[0].source_extension
+ ).single_edit_event,
+ )
+ self.assertEqual(
+ expected_modify_event,
+ edit_event_pb2.EditEvent.FromString(
+ logged_events[1].source_extension
+ ).single_edit_event,
+ )
+ self.assertEqual(
+ expected_move_event,
+ edit_event_pb2.EditEvent.FromString(
+ logged_events[2].source_extension
+ ).single_edit_event,
+ )
+ self.assertEqual(
+ expected_delete_event,
+ edit_event_pb2.EditEvent.FromString(
+ logged_events[3].source_extension
+ ).single_edit_event,
+ )
+
+
+ def test_log_aggregated_edit_event_success(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create 6 test files
+ for i in range(6):
+ test_file = self.root_monitoring_path.joinpath('test_' + str(i))
+ test_file.touch()
+
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 1)
+
+ expected_aggregated_edit_event = (
+ edit_event_pb2.EditEvent.AggregatedEditEvent(
+ num_edits=6,
+ )
+ )
+
+ self.assertEqual(
+ expected_aggregated_edit_event,
+ edit_event_pb2.EditEvent.FromString(
+ logged_events[0].source_extension
+ ).aggregated_edit_event,
+ )
+
+ def test_do_not_log_edit_event_for_directory_change(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a sub directory
+ self.root_monitoring_path.joinpath('test_dir').mkdir()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
+ def test_do_not_log_edit_event_for_hidden_file(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a hidden file.
+ self.root_monitoring_path.joinpath('.test.txt').touch()
+ # Create a hidden dir.
+ hidden_dir = self.root_monitoring_path.joinpath('.test')
+ hidden_dir.mkdir()
+ hidden_dir.joinpath('test.txt').touch()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
+ def test_do_not_log_edit_event_for_non_git_project_file(self):
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output')
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a file.
+ self.root_monitoring_path.joinpath('test.txt').touch()
+ # Create a file under a sub dir.
+ sub_dir = self.root_monitoring_path.joinpath('.test')
+ sub_dir.mkdir()
+ sub_dir.joinpath('test.txt').touch()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
+ def test_log_edit_event_fail(self):
+ # Create the .git file under the monitoring dir.
+ self.root_monitoring_path.joinpath('.git').touch()
+ fake_cclient = FakeClearcutClient(
+ log_output_file=self.log_event_dir.joinpath('logs.output'),
+ raise_log_exception=True,
+ )
+ p = self._start_test_edit_monitor_process(fake_cclient)
+
+ # Create a file.
+ self.root_monitoring_path.joinpath('test.txt').touch()
+ # Give some time for the edit monitor to receive the edit event.
+ time.sleep(1)
+ # Stop the edit monitor and flush all events.
+ os.kill(p.pid, signal.SIGINT)
+ p.join()
+
+ logged_events = self._get_logged_events()
+ self.assertEqual(len(logged_events), 0)
+
+ def _start_test_edit_monitor_process(
+ self, cclient
+ ) -> multiprocessing.Process:
+ receiver, sender = multiprocessing.Pipe()
+ # Start edit monitor in a subprocess.
+ p = multiprocessing.Process(
+ target=edit_monitor.start,
+ args=(str(self.root_monitoring_path.resolve()), False, 0.5, 5, cclient, sender),
+ )
+ p.daemon = True
+ p.start()
+
+ # Wait until observer started.
+ received_data = receiver.recv()
+ self.assertEquals(received_data, 'Observer started.')
+
+ receiver.close()
+ return p
+
+ def _get_logged_events(self):
+ with open(self.log_event_dir.joinpath('logs.output'), 'rb') as f:
+ data = f.read()
+
+ return [
+ clientanalytics_pb2.LogEvent.FromString(record)
+ for record in data.split(b'\x00')
+ if record
+ ]
+
+
+class FakeClearcutClient:
+
+ def __init__(self, log_output_file, raise_log_exception=False):
+ self.pending_log_events = []
+ self.raise_log_exception = raise_log_exception
+ self.log_output_file = log_output_file
+
+ def log(self, log_event):
+ if self.raise_log_exception:
+ raise Exception('unknown exception')
+ self.pending_log_events.append(log_event)
+
+ def flush_events(self):
+ delimiter = b'\x00' # Use a null byte as the delimiter
+ with open(self.log_output_file, 'wb') as f:
+ for log_event in self.pending_log_events:
+ f.write(log_event.SerializeToString() + delimiter)
+
+ self.pending_log_events.clear()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py
index e69de29..6af421b 100644
--- a/tools/edit_monitor/main.py
+++ b/tools/edit_monitor/main.py
@@ -0,0 +1,108 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import os
+import signal
+import sys
+import tempfile
+
+from edit_monitor import daemon_manager
+from edit_monitor import edit_monitor
+
+
+def create_arg_parser():
+ """Creates an instance of the default arg parser."""
+
+ parser = argparse.ArgumentParser(
+ description=(
+ 'Monitors edits in Android source code and uploads the edit logs.'
+ ),
+ add_help=True,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+
+ parser.add_argument(
+ '--path',
+ type=str,
+ required=True,
+ help='Root path to monitor the edit events.',
+ )
+
+ parser.add_argument(
+ '--dry_run',
+ action='store_true',
+ help='Dry run the edit monitor. This starts the edit monitor process without actually send the edit logs to clearcut.',
+ )
+
+ parser.add_argument(
+ '--force_cleanup',
+ action='store_true',
+ help=(
+ 'Instead of start a new edit monitor, force stop all existing edit'
+ ' monitors in the system. This option is only used in emergent cases'
+ ' when we want to prevent user damage by the edit monitor.'
+ ),
+ )
+
+ return parser
+
+
+def configure_logging():
+ root_logging_dir = tempfile.mkdtemp(prefix='edit_monitor_')
+ _, log_path = tempfile.mkstemp(dir=root_logging_dir, suffix='.log')
+
+ log_fmt = '%(asctime)s %(filename)s:%(lineno)s:%(levelname)s: %(message)s'
+ date_fmt = '%Y-%m-%d %H:%M:%S'
+ logging.basicConfig(
+ filename=log_path, level=logging.DEBUG, format=log_fmt, datefmt=date_fmt
+ )
+ # Filter out logs from inotify_buff to prevent log pollution.
+ logging.getLogger('watchdog.observers.inotify_buffer').addFilter(
+ lambda record: record.filename != 'inotify_buffer.py')
+ print(f'logging to file {log_path}')
+
+
+def term_signal_handler(_signal_number, _frame):
+ logging.info('Process %d received SIGTERM, Terminating...', os.getpid())
+ sys.exit(0)
+
+
+def main(argv: list[str]):
+ args = create_arg_parser().parse_args(argv[1:])
+ if args.dry_run:
+ logging.info('This is a dry run.')
+ dm = daemon_manager.DaemonManager(
+ binary_path=argv[0],
+ daemon_target=edit_monitor.start,
+ daemon_args=(args.path, args.dry_run),
+ )
+
+ if args.force_cleanup:
+ dm.cleanup()
+
+ try:
+ dm.start()
+ dm.monitor_daemon()
+ except Exception:
+ logging.exception('Unexpected exception raised when run daemon.')
+ finally:
+ dm.stop()
+
+
+if __name__ == '__main__':
+ signal.signal(signal.SIGTERM, term_signal_handler)
+ configure_logging()
+ main(sys.argv)
diff --git a/tools/edit_monitor/proto/edit_event.proto b/tools/edit_monitor/proto/edit_event.proto
index b3630bc..dc3d3f6 100644
--- a/tools/edit_monitor/proto/edit_event.proto
+++ b/tools/edit_monitor/proto/edit_event.proto
@@ -36,8 +36,6 @@
// Event that logs errors happened in the edit monitor.
message EditMonitorErrorEvent {
ErrorType error_type = 1;
- string error_msg = 2;
- string stack_trace = 3;
}
// ------------------------
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 8c71044..e371b23 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -637,6 +637,8 @@
],
data: [
"testdata/**/*",
+ ],
+ device_common_data: [
":com.android.apex.compressed.v1",
":com.android.apex.vendor.foo.with_vintf",
],