Merge "wifi: Add 11BE feature support for hostapd" into main
diff --git a/ci/build_device_and_tests b/ci/build_device_and_tests
index 9d11268..63d3ce3 100755
--- a/ci/build_device_and_tests
+++ b/ci/build_device_and_tests
@@ -13,6 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+set -euo pipefail
-build/soong/soong_ui.bash --make-mode build_test_suites || exit $?
-$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites $@ || exit $?
+build/soong/soong_ui.bash --make-mode build_test_suites
+$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites --device-build $@
diff --git a/ci/build_test_suites b/ci/build_test_suites
index 9d11268..74470a8 100755
--- a/ci/build_test_suites
+++ b/ci/build_test_suites
@@ -13,6 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+set -euo pipefail
-build/soong/soong_ui.bash --make-mode build_test_suites || exit $?
-$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites $@ || exit $?
+build/soong/soong_ui.bash --make-mode build_test_suites
+$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites $@
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 4d3b546..b67ecec 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -30,9 +30,10 @@
import test_discovery_agent
-REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
+REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP', 'DIST_DIR'])
SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
LOG_PATH = 'logs/build_test_suites.log'
+REQUIRED_BUILD_TARGETS = frozenset(['dist'])
class Error(Exception):
@@ -73,31 +74,46 @@
build_targets = set()
packaging_commands_getters = []
- test_discovery_zip_regexes = set()
- optimization_rationale = ''
- try:
- # Do not use these regexes for now, only run this to collect data on what
- # would be optimized.
- test_discovery_zip_regexes = self._get_test_discovery_zip_regexes()
- logging.info(f'Discovered test discovery regexes: {test_discovery_zip_regexes}')
- except test_discovery_agent.TestDiscoveryError as e:
- optimization_rationale = e.message
- logging.warning(f'Unable to perform test discovery: {optimization_rationale}')
- for target in self.args.extra_targets:
- if optimization_rationale:
- get_metrics_agent().report_unoptimized_target(target, optimization_rationale)
- else:
- regex = r'\b(%s)\b' % re.escape(target)
- if any(re.search(regex, opt) for opt in test_discovery_zip_regexes):
- get_metrics_agent().report_unoptimized_target(target, 'Test artifact used.')
- else:
+ # In order to roll optimizations out differently between test suites and
+ # device builds, we have separate flags.
+ if (
+ 'test_suites_zip_test_discovery'
+ in self.build_context.enabled_build_features
+ and not self.args.device_build
+ ) or (
+ 'device_zip_test_discovery'
+ in self.build_context.enabled_build_features
+ and self.args.device_build
+ ):
+ preliminary_build_targets = self._collect_preliminary_build_targets()
+ else:
+ preliminary_build_targets = self._legacy_collect_preliminary_build_targets()
+
+ # Keep reporting metrics when test discovery is disabled.
+ # To be removed once test discovery is fully rolled out.
+ optimization_rationale = ''
+ test_discovery_zip_regexes = set()
+ try:
+ test_discovery_zip_regexes = self._get_test_discovery_zip_regexes()
+ logging.info(f'Discovered test discovery regexes: {test_discovery_zip_regexes}')
+ except test_discovery_agent.TestDiscoveryError as e:
+ optimization_rationale = e.message
+ logging.warning(f'Unable to perform test discovery: {optimization_rationale}')
+
+ for target in self.args.extra_targets:
+ if optimization_rationale:
+ get_metrics_agent().report_unoptimized_target(target, optimization_rationale)
+ continue
+ try:
+ regex = r'\b(%s.*)\b' % re.escape(target)
+ if any(re.search(regex, opt) for opt in test_discovery_zip_regexes):
+ get_metrics_agent().report_unoptimized_target(target, 'Test artifact used.')
+ continue
get_metrics_agent().report_optimized_target(target)
+ except Exception as e:
+ logging.error(f'unable to parse test discovery output: {repr(e)}')
- if self._unused_target_exclusion_enabled(
- target
- ) and not self.build_context.build_target_used(target):
- continue
-
+ for target in preliminary_build_targets:
target_optimizer_getter = self.target_optimizations.get(target, None)
if not target_optimizer_getter:
build_targets.add(target)
@@ -113,6 +129,51 @@
return BuildPlan(build_targets, packaging_commands_getters)
+ def _collect_preliminary_build_targets(self):
+ build_targets = set()
+ try:
+ test_discovery_zip_regexes = self._get_test_discovery_zip_regexes()
+ logging.info(f'Discovered test discovery regexes: {test_discovery_zip_regexes}')
+ except test_discovery_agent.TestDiscoveryError as e:
+ optimization_rationale = e.message
+ logging.warning(f'Unable to perform test discovery: {optimization_rationale}')
+
+ for target in self.args.extra_targets:
+ get_metrics_agent().report_unoptimized_target(target, optimization_rationale)
+ return self._legacy_collect_preliminary_build_targets()
+
+ for target in self.args.extra_targets:
+ if target in REQUIRED_BUILD_TARGETS:
+ build_targets.add(target)
+ continue
+
+ regex = r'\b(%s.*)\b' % re.escape(target)
+ for opt in test_discovery_zip_regexes:
+ try:
+ if re.search(regex, opt):
+ get_metrics_agent().report_unoptimized_target(target, 'Test artifact used.')
+ build_targets.add(target)
+ continue
+ get_metrics_agent().report_optimized_target(target)
+ except Exception as e:
+ # In case of exception report as unoptimized
+ build_targets.add(target)
+ get_metrics_agent().report_unoptimized_target(target, f'Error in parsing test discovery output for {target}: {repr(e)}')
+ logging.error(f'unable to parse test discovery output: {repr(e)}')
+
+ return build_targets
+
+ def _legacy_collect_preliminary_build_targets(self):
+ build_targets = set()
+ for target in self.args.extra_targets:
+ if self._unused_target_exclusion_enabled(
+ target
+ ) and not self.build_context.build_target_used(target):
+ continue
+
+ build_targets.add(target)
+ return build_targets
+
def _unused_target_exclusion_enabled(self, target: str) -> bool:
return (
f'{target}_unused_exclusion'
@@ -194,6 +255,11 @@
argparser.add_argument(
'extra_targets', nargs='*', help='Extra test suites to build.'
)
+ argparser.add_argument(
+ '--device-build',
+ action='store_true',
+ help='Flag to indicate running a device build.',
+ )
return argparser.parse_args(argv)
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
index 26f4316..29d268e 100644
--- a/ci/build_test_suites_test.py
+++ b/ci/build_test_suites_test.py
@@ -78,6 +78,12 @@
with self.assert_raises_word(build_test_suites.Error, 'TOP'):
build_test_suites.main([])
+ def test_missing_dist_dir_env_var_raises(self):
+ del os.environ['DIST_DIR']
+
+ with self.assert_raises_word(build_test_suites.Error, 'DIST_DIR'):
+ build_test_suites.main([])
+
def test_invalid_arg_raises(self):
invalid_args = ['--invalid_arg']
@@ -114,6 +120,9 @@
self.soong_ui_dir = self.fake_top.joinpath('build/soong')
self.soong_ui_dir.mkdir(parents=True, exist_ok=True)
+ self.logs_dir = self.fake_top.joinpath('dist/logs')
+ self.logs_dir.mkdir(parents=True, exist_ok=True)
+
self.soong_ui = self.soong_ui_dir.joinpath('soong_ui.bash')
self.soong_ui.touch()
@@ -121,6 +130,7 @@
'TARGET_RELEASE': 'release',
'TARGET_PRODUCT': 'product',
'TOP': str(self.fake_top),
+ 'DIST_DIR': str(self.fake_top.joinpath('dist')),
})
self.mock_subprocess_run.return_value = 0
diff --git a/core/Makefile b/core/Makefile
index 35a8a89..a7ab442 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -872,6 +872,7 @@
SOONG_CONV_DATA := $(call intermediates-dir-for,PACKAGING,soong_conversion)/soong_conv_data
$(SOONG_CONV_DATA):
@rm -f $@
+ @touch $@ # This file must be present even if SOONG_CONV is empty.
@$(foreach s,$(SOONG_CONV),echo "$(s),$(SOONG_CONV.$(s).TYPE),$(sort $(SOONG_CONV.$(s).PROBLEMS)),$(sort $(filter-out $(SOONG_ALREADY_CONV),$(SOONG_CONV.$(s).DEPS))),$(sort $(SOONG_CONV.$(s).MAKEFILES)),$(sort $(SOONG_CONV.$(s).INSTALLED))" >>$@;)
$(call declare-1p-target,$(SOONG_CONV_DATA),build)
@@ -2936,6 +2937,9 @@
$(error MTD device is no longer supported and thus BOARD_NAND_SPARE_SIZE is deprecated.)
endif
+recovery_intermediates := $(call intermediates-dir-for,PACKAGING,recovery)
+$(eval $(call write-partition-file-list,$(recovery_intermediates)/file_list.txt,$(TARGET_RECOVERY_OUT),$(INTERNAL_RECOVERYIMAGE_FILES)))
+
# -----------------------------------------------------------------
# Build debug ramdisk and debug boot image.
@@ -5173,6 +5177,8 @@
# Run apex_sepolicy_tests for all installed APEXes
ifeq (,$(TARGET_BUILD_UNBUNDLED))
+# TODO(b/353896817) apex_sepolicy_tests supports only ext4
+ifeq (ext4,$(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE))
intermediate := $(call intermediates-dir-for,PACKAGING,apex_sepolicy_tests)
apex_dirs := \
$(TARGET_OUT)/apex/% \
@@ -5212,6 +5218,7 @@
apex_files :=
intermediate :=
+endif # PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE
endif # TARGET_BUILD_UNBUNDLED
# -----------------------------------------------------------------
@@ -5680,6 +5687,7 @@
brotli \
bsdiff \
build_image \
+ build_mixed_kernels_ramdisk_host \
build_super_image \
build_verity_metadata \
build_verity_tree \
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 7c6b74c..44e2398 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -48,9 +48,11 @@
RECOVERY_FSTAB_VERSION := 2
$(call soong_config_set, recovery, recovery_api_version, $(RECOVERY_API_VERSION))
$(call soong_config_set, recovery, recovery_fstab_version, $(RECOVERY_FSTAB_VERSION))
-$(call soong_config_set, recovery, target_recovery_ui_lib, $(TARGET_RECOVERY_UI_LIB))
$(call soong_config_set_bool, recovery ,target_userimages_use_f2fs ,$(if $(TARGET_USERIMAGES_USE_F2FS),true,false))
$(call soong_config_set_bool, recovery ,has_board_cacheimage_partition_size ,$(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),true,false))
+ifdef TARGET_RECOVERY_UI_LIB
+ $(call soong_config_set_string_list, recovery, target_recovery_ui_lib, $(TARGET_RECOVERY_UI_LIB))
+endif
# For Sanitizers
$(call soong_config_set_bool,ANDROID,ASAN_ENABLED,$(if $(filter address,$(SANITIZE_TARGET)),true,false))
@@ -202,6 +204,19 @@
# Required as platform_bootclasspath is using this namespace
$(call soong_config_set,bootclasspath,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+
+# Add ondeviceintelligence module build flag to soong
+ifeq (true,$(RELEASE_ONDEVICE_INTELLIGENCE_MODULE))
+ $(call soong_config_set,ANDROID,release_ondevice_intelligence_module,true)
+ # Required as platform_bootclasspath is using this namespace
+ $(call soong_config_set,bootclasspath,release_ondevice_intelligence_module,true)
+
+else
+ $(call soong_config_set,ANDROID,release_ondevice_intelligence_platform,true)
+ $(call soong_config_set,bootclasspath,release_ondevice_intelligence_platform,true)
+
+endif
+
# Add uprobestats build flag to soong
$(call soong_config_set,ANDROID,release_uprobestats_module,$(RELEASE_UPROBESTATS_MODULE))
# Add uprobestats file move flags to soong, for both platform and module
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index ccb502c..fd9dc9b 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -24,10 +24,11 @@
# -----------------------------------------------------------------
# Aconfig Flags
-# Create a summary file of build flags for each partition
+# Create a summary file of build flags for a single partition
# $(1): built aconfig flags file (out)
# $(2): installed aconfig flags file (out)
# $(3): the partition (in)
+# $(4): input aconfig files for the partition (in)
define generate-partition-aconfig-flag-file
$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
$(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
@@ -35,7 +36,8 @@
mkdir -p $$(dir $$(PRIVATE_OUT))
$$(if $$(PRIVATE_IN), \
$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
- --filter container:$(strip $(3)) \
+ --filter container:$(strip $(3))+state:ENABLED \
+ --filter container:$(strip $(3))+permission:READ_WRITE \
$$(addprefix --cache ,$$(PRIVATE_IN)), \
echo -n > $$(PRIVATE_OUT) \
)
diff --git a/core/product_config.mk b/core/product_config.mk
index 692e375..f93b63c 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -468,17 +468,26 @@
$(eval SANITIZER.$(TARGET_PRODUCT).$(m).CONFIG := $(cf))))
_psmc_modules :=
-# Reset ADB keys for non-debuggable builds
-ifeq (,$(filter eng userdebug,$(TARGET_BUILD_VARIANT)))
+# Reset ADB keys. If RELEASE_BUILD_USE_VARIANT_FLAGS is set look for
+# the value of a dedicated flag. Otherwise check if build variant is
+# non-debuggable.
+ifneq (,$(RELEASE_BUILD_USE_VARIANT_FLAGS))
+ifneq (,$(RELEASE_BUILD_PURGE_PRODUCT_ADB_KEYS))
PRODUCT_ADB_KEYS :=
endif
+else ifeq (,$(filter eng userdebug,$(TARGET_BUILD_VARIANT)))
+ PRODUCT_ADB_KEYS :=
+endif
+
ifneq ($(filter-out 0 1,$(words $(PRODUCT_ADB_KEYS))),)
$(error Only one file may be in PRODUCT_ADB_KEYS: $(PRODUCT_ADB_KEYS))
endif
# Show a warning wall of text if non-compliance-GSI products set this option.
ifdef PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT
- ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64 gsi_tv_arm gsi_tv_arm64,$(PRODUCT_NAME)))
+ ifeq (,$(filter gsi_arm gsi_arm64 gsi_arm64_soong_system gsi_x86 gsi_x86_64 \
+ gsi_x86_64_soong_system gsi_car_arm64 gsi_car_x86_64 \
+ gsi_tv_arm gsi_tv_arm64,$(PRODUCT_NAME)))
$(warning PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT is set but \
PRODUCT_NAME ($(PRODUCT_NAME)) doesn't look like a GSI for compliance \
testing. This is a special configuration for compliance GSI, so do make \
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index df1cf2d..ab9227f 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -224,30 +224,6 @@
include $(BUILD_SYSTEM)/link_type.mk
endif # !LOCAL_IS_HOST_MODULE
-ifeq (,$(filter tests,$(LOCAL_MODULE_TAGS)))
- ifdef LOCAL_SOONG_DEVICE_RRO_DIRS
- $(call append_enforce_rro_sources, \
- $(my_register_name), \
- false, \
- $(LOCAL_FULL_MANIFEST_FILE), \
- $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
- $(LOCAL_SOONG_DEVICE_RRO_DIRS), \
- vendor \
- )
- endif
-
- ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS
- $(call append_enforce_rro_sources, \
- $(my_register_name), \
- false, \
- $(LOCAL_FULL_MANIFEST_FILE), \
- $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
- $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \
- product \
- )
- endif
-endif
-
ifdef LOCAL_PREBUILT_COVERAGE_ARCHIVE
my_coverage_dir := $(TARGET_OUT_COVERAGE)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
my_coverage_copy_pairs := $(foreach f,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(f):$(my_coverage_dir)/$(notdir $(f)))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index f3d1606..a007888 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -194,6 +194,8 @@
$(call add_json_str, SystemDlkmPath, $(TARGET_COPY_OUT_SYSTEM_DLKM))
$(call add_json_str, OemPath, $(TARGET_COPY_OUT_OEM))
$(call add_json_bool, MinimizeJavaDebugInfo, $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
+$(call add_json_str, RecoveryPath, $(TARGET_COPY_OUT_RECOVERY))
+$(call add_json_bool, BuildingRecoveryImage, $(BUILDING_RECOVERY_IMAGE))
$(call add_json_bool, UseGoma, $(filter-out false,$(USE_GOMA)))
$(call add_json_bool, UseRBE, $(filter-out false,$(USE_RBE)))
@@ -402,6 +404,7 @@
$(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_$(image_type)_ALGORITHM)) \
$(call add_json_str, BoardAvbRollbackIndex, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX)) \
$(call add_json_str, BoardAvbRollbackIndexLocation, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX_LOCATION)) \
+ $(call add_json_str, BoardAvbAddHashtreeFooterArgs, $(BOARD_AVB_$(image_type)_ADD_HASHTREE_FOOTER_ARGS)) \
$(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \
$(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \
$(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \
@@ -449,6 +452,25 @@
$(call add_json_list, InternalBootconfig, $(INTERNAL_BOOTCONFIG))
$(call add_json_str, InternalBootconfigFile, $(INTERNAL_BOOTCONFIG_FILE))
+ # super image stuff
+ $(call add_json_bool, ProductUseDynamicPartitions, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITIONS)))
+ $(call add_json_bool, ProductRetrofitDynamicPartitions, $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)))
+ $(call add_json_bool, ProductBuildSuperPartition, $(filter true,$(PRODUCT_BUILD_SUPER_PARTITION)))
+ $(call add_json_str, BoardSuperPartitionSize, $(BOARD_SUPER_PARTITION_SIZE))
+ $(call add_json_str, BoardSuperPartitionMetadataDevice, $(BOARD_SUPER_PARTITION_METADATA_DEVICE))
+ $(call add_json_list, BoardSuperPartitionBlockDevices, $(BOARD_SUPER_PARTITION_BLOCK_DEVICES))
+ $(call add_json_map, BoardSuperPartitionGroups)
+ $(foreach group, $(BOARD_SUPER_PARTITION_GROUPS), \
+ $(call add_json_map, $(group)) \
+ $(call add_json_str, GroupSize, $(BOARD_$(call to-upper,$(group))_SIZE)) \
+ $(if $(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \
+ $(call add_json_list, PartitionList, $(BOARD_$(call to-upper,$(group))_PARTITION_LIST))) \
+ $(call end_json_map))
+ $(call end_json_map)
+ $(call add_json_bool, ProductVirtualAbOta, $(filter true,$(PRODUCT_VIRTUAL_AB_OTA)))
+ $(call add_json_bool, ProductVirtualAbOtaRetrofit, $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)))
+ $(call add_json_bool, AbOtaUpdater, $(filter true,$(AB_OTA_UPDATER)))
+
# Avb (android verified boot) stuff
$(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE)))
$(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_ALGORITHM))
@@ -486,6 +508,10 @@
$(call add_json_bool, BuildingOdmDlkmImage, $(BUILDING_ODM_DLKM_IMAGE))
$(call add_json_list, OdmKernelModules, $(BOARD_ODM_KERNEL_MODULES))
$(call add_json_str, OdmKernelBlocklistFile, $(BOARD_ODM_KERNEL_MODULES_BLOCKLIST_FILE))
+ $(call add_json_list, VendorRamdiskKernelModules, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES))
+ $(call add_json_str, VendorRamdiskKernelBlocklistFile, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES_BLOCKLIST_FILE))
+ $(call add_json_list, VendorRamdiskKernelLoadModules, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES_LOAD))
+ $(call add_json_str, VendorRamdiskKernelOptionsFile, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES_OPTIONS_FILE))
# Used to generate /vendor/build.prop
$(call add_json_list, BoardInfoFiles, $(if $(TARGET_BOARD_INFO_FILES),$(TARGET_BOARD_INFO_FILES),$(firstword $(TARGET_BOARD_INFO_FILE) $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt))))
@@ -493,6 +519,12 @@
$(call add_json_list, ProductCopyFiles, $(PRODUCT_COPY_FILES))
+ # Used to generate fsv meta
+ $(call add_json_bool, ProductFsverityGenerateMetadata, $(PRODUCT_FSVERITY_GENERATE_METADATA))
+
+ # Used to generate recovery partition
+ $(call add_json_str, TargetScreenDensity, $(TARGET_SCREEN_DENSITY))
+
$(call end_json_map)
# For converting vintf_data
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 473296e..dcde71b 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -184,7 +184,7 @@
endif
BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt
-ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE) && grep " " $(BUILD_FINGERPRINT_FILE)))
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE).tmp && (if ! cmp -s $(BUILD_FINGERPRINT_FILE).tmp $(BUILD_FINGERPRINT_FILE); then mv $(BUILD_FINGERPRINT_FILE).tmp $(BUILD_FINGERPRINT_FILE); else rm $(BUILD_FINGERPRINT_FILE).tmp; fi) && grep " " $(BUILD_FINGERPRINT_FILE)))
$(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))")
endif
BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE))
diff --git a/core/version_util.mk b/core/version_util.mk
index 1d8c643..ddcbda2 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -23,6 +23,7 @@
# PLATFORM_DISPLAY_VERSION
# PLATFORM_SDK_VERSION
# PLATFORM_SDK_EXTENSION_VERSION
+# PLATFORM_BASE_SDK_EXTENSION_VERSION
# PLATFORM_VERSION_CODENAME
# DEFAULT_APP_TARGET_SDK
# BUILD_ID
@@ -73,8 +74,16 @@
PLATFORM_SDK_EXTENSION_VERSION := $(RELEASE_PLATFORM_SDK_EXTENSION_VERSION)
.KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
-# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
-PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION)
+ifdef PLATFORM_BASE_SDK_EXTENSION_VERSION
+ $(error Do not set PLATFORM_BASE_SDK_EXTENSION_VERSION directly. Use RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION. value: $(PLATFORM_BASE_SDK_EXTENSION_VERSION))
+endif
+ifdef RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION
+ # This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
+ PLATFORM_BASE_SDK_EXTENSION_VERSION := $(RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION)
+else
+ # Fallback to PLATFORM_SDK_EXTENSION_VERSION if RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION is undefined.
+ PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION)
+endif
.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
ifdef PLATFORM_VERSION_CODENAME
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 90a6c2c..a78c023 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -316,6 +316,14 @@
endif
+# When we release ondeviceintelligence in neuralnetworks module
+ifneq ($(RELEASE_ONDEVICE_INTELLIGENCE_MODULE),true)
+ PRODUCT_PACKAGES += \
+ framework-ondeviceintelligence-platform
+
+endif
+
+
# When we release uprobestats module
ifeq ($(RELEASE_UPROBESTATS_MODULE),true)
PRODUCT_PACKAGES += \
diff --git a/target/product/build_variables.mk b/target/product/build_variables.mk
index 697a604..c936911 100644
--- a/target/product/build_variables.mk
+++ b/target/product/build_variables.mk
@@ -20,6 +20,12 @@
# Control libbinder client caching
$(call soong_config_set, libbinder, release_libbinder_client_cache, $(RELEASE_LIBBINDER_CLIENT_CACHE))
+# Control caching while adding service in libbinder cache
+$(call soong_config_set, libbinder, release_libbinder_addservice_cache, $(RELEASE_LIBBINDER_ADDSERVICE_CACHE))
+
+# Remove static list in libbinder cache
+$(call soong_config_set, libbinder, release_libbinder_remove_cache_static_list, $(RELEASE_LIBBINDER_REMOVE_CACHE_STATIC_LIST))
+
# Use the configured release of sqlite
$(call soong_config_set, libsqlite3, release_package_libsqlite3, $(RELEASE_PACKAGE_LIBSQLITE3))
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 8665243..33891d7 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -102,6 +102,18 @@
framework-platformcrashrecovery \
endif
+
+# When we release ondeviceintelligence in NeuralNetworks module
+ifeq ($(RELEASE_ONDEVICE_INTELLIGENCE_MODULE),true)
+ PRODUCT_APEX_BOOT_JARS += \
+ com.android.neuralnetworks:framework-ondeviceintelligence \
+
+else
+ PRODUCT_BOOT_JARS += \
+ framework-ondeviceintelligence-platform \
+
+endif
+
# Check if the build supports NFC apex or not
ifeq ($(RELEASE_PACKAGE_NFC_STACK),NfcNci)
PRODUCT_BOOT_JARS += \
@@ -147,6 +159,13 @@
endif
+# When we release ondeviceintelligence in NeuralNetworks module
+ifeq ($(RELEASE_ONDEVICE_INTELLIGENCE_MODULE),true)
+ PRODUCT_APEX_SYSTEM_SERVER_JARS += \
+ com.android.neuralnetworks:service-ondeviceintelligence
+
+endif
+
ifeq ($(RELEASE_AVF_ENABLE_LLPVM_CHANGES),true)
PRODUCT_APEX_SYSTEM_SERVER_JARS += com.android.virt:service-virtualization
endif
diff --git a/target/product/generic/Android.bp b/target/product/generic/Android.bp
index efa1639..a4a20b4 100644
--- a/target/product/generic/Android.bp
+++ b/target/product/generic/Android.bp
@@ -726,6 +726,13 @@
default: [
"framework-platformcrashrecovery", // base_system
],
+ }) + select(release_flag("RELEASE_ONDEVICE_INTELLIGENCE_MODULE"), {
+ true: [
+ "com.android.neuralnetworks", // base_system (RELEASE_ONDEVICE_INTELLIGENCE_MODULE)
+ ],
+ default: [
+ "framework-ondeviceintelligence-platform", // base_system
+ ],
}) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), {
"true": [
"com.android.profiling", // base_system (RELEASE_PACKAGE_PROFILING_MODULE)
@@ -885,6 +892,14 @@
}),
},
},
+ arch: {
+ arm64: {
+ deps: [
+ "libclang_rt.hwasan",
+ "libc_hwasan",
+ ],
+ },
+ },
}
android_system_image {
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 39428d2..f00c38c 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -82,6 +82,7 @@
# Additional settings used in all GSI builds
PRODUCT_PRODUCT_PROPERTIES += \
ro.crypto.metadata_init_delete_all_keys.enabled=false \
+ debug.codec2.bqpool_dealloc_after_stop=1 \
# Window Extensions
ifneq ($(PRODUCT_IS_ATV),true)
diff --git a/teams/Android.bp b/teams/Android.bp
index 6307f5c..a2b0d14 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -3367,6 +3367,13 @@
}
team {
+ name: "trendy_team_aaos_display_safety_triage",
+
+ // go/trendy/manage/engineers/6522093663780864
+ trendy_team_id: "6522093663780864",
+}
+
+team {
name: "trendy_team_camera_htc_lg_qualcomm",
// go/trendy/manage/engineers/6332099480911872
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 043a956..a7f0a4f 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -102,9 +102,7 @@
{
// aconfig_storage file java integration tests
"name": "aconfig_storage_file.test.java"
- }
- ],
- "postsubmit": [
+ },
{
// aconfig_storage read functional test
"name": "aconfig_storage_read_functional"
diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs
index 7a9c382..ae18679 100644
--- a/tools/aconfig/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/aconfig/src/codegen/cpp.rs
@@ -127,6 +127,26 @@
flag_ids: HashMap<String, u16>,
rw_count: &mut i32,
) -> ClassElement {
+ let no_assigned_offset =
+ (pf.container() == "system" || pf.container() == "vendor" || pf.container() == "product")
+ && pf.permission() == ProtoFlagPermission::READ_ONLY
+ && pf.state() == ProtoFlagState::DISABLED;
+
+ let flag_offset = match flag_ids.get(pf.name()) {
+ Some(offset) => offset,
+ None => {
+ // System/vendor/product RO+disabled flags have no offset in storage files.
+ // Assign placeholder value.
+ if no_assigned_offset {
+ &0
+ }
+ // All other flags _must_ have an offset.
+ else {
+ panic!("{}", format!("missing flag offset for {}", pf.name()));
+ }
+ }
+ };
+
ClassElement {
readwrite_idx: if pf.permission() == ProtoFlagPermission::READ_WRITE {
let index = *rw_count;
@@ -144,7 +164,7 @@
},
flag_name: pf.name().to_string(),
flag_macro: pf.name().to_uppercase(),
- flag_offset: *flag_ids.get(pf.name()).expect("values checked at flag parse time"),
+ flag_offset: *flag_offset,
device_config_namespace: pf.namespace().to_string(),
device_config_flag: codegen::create_device_config_ident(package, pf.name())
.expect("values checked at flag parse time"),
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index 2f2889c..7aff4e9 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -158,6 +158,27 @@
) -> FlagElement {
let device_config_flag = codegen::create_device_config_ident(package, pf.name())
.expect("values checked at flag parse time");
+
+ let no_assigned_offset =
+ (pf.container() == "system" || pf.container() == "vendor" || pf.container() == "product")
+ && pf.permission() == ProtoFlagPermission::READ_ONLY
+ && pf.state() == ProtoFlagState::DISABLED;
+
+ let flag_offset = match flag_offsets.get(pf.name()) {
+ Some(offset) => offset,
+ None => {
+ // System/vendor/product RO+disabled flags have no offset in storage files.
+ // Assign placeholder value.
+ if no_assigned_offset {
+ &0
+ }
+ // All other flags _must_ have an offset.
+ else {
+ panic!("{}", format!("missing flag offset for {}", pf.name()));
+ }
+ }
+ };
+
FlagElement {
container: pf.container().to_string(),
default_value: pf.state() == ProtoFlagState::ENABLED,
@@ -165,7 +186,7 @@
device_config_flag,
flag_name: pf.name().to_string(),
flag_name_constant_suffix: pf.name().to_ascii_uppercase(),
- flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("),
+ flag_offset: *flag_offset,
is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
method_name: format_java_method_name(pf.name()),
properties: format_property_name(pf.namespace()),
@@ -522,7 +543,6 @@
import android.compat.annotation.UnsupportedAppUsage;
import android.os.Build;
import android.os.flagging.PlatformAconfigPackageInternal;
- import android.os.flagging.AconfigStorageReadException;
import android.util.Log;
/** @hide */
public final class FeatureFlagsImpl implements FeatureFlags {
@@ -535,38 +555,16 @@
private void init() {
try {
PlatformAconfigPackageInternal reader = PlatformAconfigPackageInternal.load("system", "com.android.aconfig.test", 0x5081CE7221C77064L);
- AconfigStorageReadException error = reader.getException();
- if (error == null) {
- disabledRw = reader.getBooleanFlagValue(1);
- disabledRwExported = reader.getBooleanFlagValue(2);
- enabledRw = reader.getBooleanFlagValue(8);
- disabledRwInOtherNamespace = reader.getBooleanFlagValue(3);
- } else if (Build.VERSION.SDK_INT > 35 && error.getErrorCode() == 5 /* fingerprint doesn't match*/) {
- disabledRw = reader.getBooleanFlagValue("disabled_rw", false);
- disabledRwExported = reader.getBooleanFlagValue("disabled_rw_exported", false);
- enabledRw = reader.getBooleanFlagValue("enabled_rw", true);
- disabledRwInOtherNamespace = reader.getBooleanFlagValue("disabled_rw_in_other_namespace", false);
- } else {
- if (error.getMessage() != null) {
- Log.e(TAG, error.getMessage());
- } else {
- Log.e(TAG, "Encountered a null AconfigStorageReadException");
- }
- }
+ disabledRw = reader.getBooleanFlagValue(0);
+ disabledRwExported = reader.getBooleanFlagValue(1);
+ enabledRw = reader.getBooleanFlagValue(7);
+ disabledRwInOtherNamespace = reader.getBooleanFlagValue(2);
} catch (Exception e) {
- if (e.getMessage() != null) {
- Log.e(TAG, e.getMessage());
- } else {
- Log.e(TAG, "Encountered a null Exception");
- }
+ Log.e(TAG, e.toString());
} catch (NoClassDefFoundError e) {
// for mainline module running on older devices.
// This should be replaces to version check, after the version bump.
- if (e.getMessage() != null) {
- Log.e(TAG, e.getMessage());
- } else {
- Log.e(TAG, "Encountered a null NoClassDefFoundError");
- }
+ Log.e(TAG, e.toString());
}
isCached = true;
}
diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs
index 82a6ebc..2bf565a 100644
--- a/tools/aconfig/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/aconfig/src/codegen/rust.rs
@@ -88,6 +88,27 @@
impl TemplateParsedFlag {
#[allow(clippy::nonminimal_bool)]
fn new(package: &str, flag_offsets: HashMap<String, u16>, pf: &ProtoParsedFlag) -> Self {
+ let no_assigned_offset = (pf.container() == "system"
+ || pf.container() == "vendor"
+ || pf.container() == "product")
+ && pf.permission() == ProtoFlagPermission::READ_ONLY
+ && pf.state() == ProtoFlagState::DISABLED;
+
+ let flag_offset = match flag_offsets.get(pf.name()) {
+ Some(offset) => offset,
+ None => {
+ // System/vendor/product RO+disabled flags have no offset in storage files.
+ // Assign placeholder value.
+ if no_assigned_offset {
+ &0
+ }
+ // All other flags _must_ have an offset.
+ else {
+ panic!("{}", format!("missing flag offset for {}", pf.name()));
+ }
+ }
+ };
+
Self {
readwrite: pf.permission() == ProtoFlagPermission::READ_WRITE,
default_value: match pf.state() {
@@ -96,7 +117,7 @@
},
name: pf.name().to_string(),
container: pf.container().to_string(),
- flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("),
+ flag_offset: *flag_offset,
device_config_namespace: pf.namespace().to_string(),
device_config_flag: codegen::create_device_config_ident(package, pf.name())
.expect("values checked at flag parse time"),
@@ -287,7 +308,7 @@
.and_then(|package_offset| {
match package_offset {
Some(offset) => {
- get_boolean_flag_value(&flag_val_map, offset + 1)
+ get_boolean_flag_value(&flag_val_map, offset + 0)
.map_err(|err| format!("failed to get flag: {err}"))
},
None => {
@@ -327,7 +348,7 @@
.and_then(|package_offset| {
match package_offset {
Some(offset) => {
- get_boolean_flag_value(&flag_val_map, offset + 2)
+ get_boolean_flag_value(&flag_val_map, offset + 1)
.map_err(|err| format!("failed to get flag: {err}"))
},
None => {
@@ -367,7 +388,7 @@
.and_then(|package_offset| {
match package_offset {
Some(offset) => {
- get_boolean_flag_value(&flag_val_map, offset + 3)
+ get_boolean_flag_value(&flag_val_map, offset + 2)
.map_err(|err| format!("failed to get flag: {err}"))
},
None => {
@@ -408,7 +429,7 @@
.and_then(|package_offset| {
match package_offset {
Some(offset) => {
- get_boolean_flag_value(&flag_val_map, offset + 8)
+ get_boolean_flag_value(&flag_val_map, offset + 7)
.map_err(|err| format!("failed to get flag: {err}"))
},
None => {
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index c1989a2..5036bc1 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -423,17 +423,28 @@
{
assert!(parsed_flags_iter.clone().tuple_windows().all(|(a, b)| a.name() <= b.name()));
let mut flag_ids = HashMap::new();
- for (id_to_assign, pf) in (0_u32..).zip(parsed_flags_iter) {
+ let mut flag_idx = 0;
+ for pf in parsed_flags_iter {
if package != pf.package() {
return Err(anyhow::anyhow!("encountered a flag not in current package"));
}
// put a cap on how many flags a package can contain to 65535
- if id_to_assign > u16::MAX as u32 {
+ if flag_idx > u16::MAX as u32 {
return Err(anyhow::anyhow!("the number of flags in a package cannot exceed 65535"));
}
- flag_ids.insert(pf.name().to_string(), id_to_assign as u16);
+ // Exclude system/vendor/product flags that are RO+disabled.
+ let should_filter_container = pf.container == Some("vendor".to_string())
+ || pf.container == Some("system".to_string())
+ || pf.container == Some("product".to_string());
+ if !(should_filter_container
+ && pf.state == Some(ProtoFlagState::DISABLED.into())
+ && pf.permission == Some(ProtoFlagPermission::READ_ONLY.into()))
+ {
+ flag_ids.insert(pf.name().to_string(), flag_idx as u16);
+ flag_idx += 1;
+ }
}
Ok(flag_ids)
}
@@ -891,6 +902,30 @@
}
#[test]
+ fn test_dump_multiple_filters() {
+ let input = parse_test_flags_as_input();
+ let bytes = dump_parsed_flags(
+ vec![input],
+ DumpFormat::Custom("{fully_qualified_name}".to_string()),
+ &["container:system+state:ENABLED", "container:system+permission:READ_WRITE"],
+ false,
+ )
+ .unwrap();
+ let text = std::str::from_utf8(&bytes).unwrap();
+ let expected_flag_list = &[
+ "com.android.aconfig.test.disabled_rw",
+ "com.android.aconfig.test.disabled_rw_exported",
+ "com.android.aconfig.test.disabled_rw_in_other_namespace",
+ "com.android.aconfig.test.enabled_fixed_ro",
+ "com.android.aconfig.test.enabled_fixed_ro_exported",
+ "com.android.aconfig.test.enabled_ro",
+ "com.android.aconfig.test.enabled_ro_exported",
+ "com.android.aconfig.test.enabled_rw",
+ ];
+ assert_eq!(expected_flag_list.map(|s| format!("{}\n", s)).join(""), text);
+ }
+
+ #[test]
fn test_dump_textproto_format_dedup() {
let input = parse_test_flags_as_input();
let input2 = parse_test_flags_as_input();
@@ -952,15 +987,14 @@
let package = find_unique_package(&parsed_flags.parsed_flag).unwrap().to_string();
let flag_ids = assign_flag_ids(&package, parsed_flags.parsed_flag.iter()).unwrap();
let expected_flag_ids = HashMap::from([
- (String::from("disabled_ro"), 0_u16),
- (String::from("disabled_rw"), 1_u16),
- (String::from("disabled_rw_exported"), 2_u16),
- (String::from("disabled_rw_in_other_namespace"), 3_u16),
- (String::from("enabled_fixed_ro"), 4_u16),
- (String::from("enabled_fixed_ro_exported"), 5_u16),
- (String::from("enabled_ro"), 6_u16),
- (String::from("enabled_ro_exported"), 7_u16),
- (String::from("enabled_rw"), 8_u16),
+ (String::from("disabled_rw"), 0_u16),
+ (String::from("disabled_rw_exported"), 1_u16),
+ (String::from("disabled_rw_in_other_namespace"), 2_u16),
+ (String::from("enabled_fixed_ro"), 3_u16),
+ (String::from("enabled_fixed_ro_exported"), 4_u16),
+ (String::from("enabled_ro"), 5_u16),
+ (String::from("enabled_ro_exported"), 6_u16),
+ (String::from("enabled_rw"), 7_u16),
]);
assert_eq!(flag_ids, expected_flag_ids);
}
diff --git a/tools/aconfig/aconfig/src/storage/flag_info.rs b/tools/aconfig/aconfig/src/storage/flag_info.rs
index 0b5a67b..0943daa 100644
--- a/tools/aconfig/aconfig/src/storage/flag_info.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_info.rs
@@ -16,7 +16,7 @@
use crate::commands::assign_flag_ids;
use crate::storage::FlagPackage;
-use aconfig_protos::ProtoFlagPermission;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState};
use aconfig_storage_file::{FlagInfoHeader, FlagInfoList, FlagInfoNode, StorageFileType};
use anyhow::{anyhow, Result};
@@ -36,14 +36,24 @@
packages: &[FlagPackage],
version: u32,
) -> Result<FlagInfoList> {
- // create list
- let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
+ // Exclude system/vendor/product flags that are RO+disabled.
+ let mut filtered_packages = packages.to_vec();
+ if container == "system" || container == "vendor" || container == "product" {
+ for package in filtered_packages.iter_mut() {
+ package.boolean_flags.retain(|b| {
+ !(b.state == Some(ProtoFlagState::DISABLED.into())
+ && b.permission == Some(ProtoFlagPermission::READ_ONLY.into()))
+ });
+ }
+ }
+
+ let num_flags = filtered_packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
let mut is_flag_rw = vec![false; num_flags as usize];
- for pkg in packages.iter() {
+ for pkg in filtered_packages {
let start_index = pkg.boolean_start_index as usize;
let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
- for pf in pkg.boolean_flags.iter() {
+ for pf in pkg.boolean_flags {
let fid = flag_ids
.get(pf.name())
.ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
diff --git a/tools/aconfig/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs
index ae5a16c..3b245a7 100644
--- a/tools/aconfig/aconfig/src/storage/flag_table.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_table.rs
@@ -16,7 +16,7 @@
use crate::commands::assign_flag_ids;
use crate::storage::FlagPackage;
-use aconfig_protos::ProtoFlagPermission;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState};
use aconfig_storage_file::{
get_table_size, FlagTable, FlagTableHeader, FlagTableNode, StorageFileType, StoredFlagType,
};
@@ -62,9 +62,19 @@
}
fn create_nodes(package: &FlagPackage, num_buckets: u32) -> Result<Vec<Self>> {
+ // Exclude system/vendor/product flags that are RO+disabled.
+ let mut filtered_package = package.clone();
+ filtered_package.boolean_flags.retain(|f| {
+ !((f.container == Some("system".to_string())
+ || f.container == Some("vendor".to_string())
+ || f.container == Some("product".to_string()))
+ && f.permission == Some(ProtoFlagPermission::READ_ONLY.into())
+ && f.state == Some(ProtoFlagState::DISABLED.into()))
+ });
+
let flag_ids =
- assign_flag_ids(package.package_name, package.boolean_flags.iter().copied())?;
- package
+ assign_flag_ids(package.package_name, filtered_package.boolean_flags.iter().copied())?;
+ filtered_package
.boolean_flags
.iter()
.map(|&pf| {
diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs
index 065b7e3..3cfa447 100644
--- a/tools/aconfig/aconfig/src/storage/flag_value.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_value.rs
@@ -16,7 +16,7 @@
use crate::commands::assign_flag_ids;
use crate::storage::FlagPackage;
-use aconfig_protos::ProtoFlagState;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState};
use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType};
use anyhow::{anyhow, Result};
@@ -36,15 +36,22 @@
packages: &[FlagPackage],
version: u32,
) -> Result<FlagValueList> {
- // create list
- let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
-
+ // Exclude system/vendor/product flags that are RO+disabled.
+ let mut filtered_packages = packages.to_vec();
+ if container == "system" || container == "vendor" || container == "product" {
+ for package in filtered_packages.iter_mut() {
+ package.boolean_flags.retain(|b| {
+ !(b.state == Some(ProtoFlagState::DISABLED.into())
+ && b.permission == Some(ProtoFlagPermission::READ_ONLY.into()))
+ });
+ }
+ }
+ let num_flags = filtered_packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
let mut list = FlagValueList {
header: new_header(container, num_flags, version),
booleans: vec![false; num_flags as usize],
};
-
- for pkg in packages.iter() {
+ for pkg in filtered_packages {
let start_index = pkg.boolean_start_index as usize;
let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
for pf in pkg.boolean_flags.iter() {
diff --git a/tools/aconfig/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs
index 4bc72f7..61e65d1 100644
--- a/tools/aconfig/aconfig/src/storage/mod.rs
+++ b/tools/aconfig/aconfig/src/storage/mod.rs
@@ -27,9 +27,10 @@
flag_info::create_flag_info, flag_table::create_flag_table, flag_value::create_flag_value,
package_table::create_package_table,
};
-use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags};
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags};
use aconfig_storage_file::StorageFileType;
+#[derive(Clone)]
pub struct FlagPackage<'a> {
pub package_name: &'a str,
pub package_id: u32,
@@ -73,6 +74,17 @@
if index == packages.len() {
packages.push(FlagPackage::new(parsed_flag.package(), index as u32));
}
+
+ // Exclude system/vendor/product flags that are RO+disabled.
+ if (parsed_flag.container == Some("system".to_string())
+ || parsed_flag.container == Some("vendor".to_string())
+ || parsed_flag.container == Some("product".to_string()))
+ && parsed_flag.permission == Some(ProtoFlagPermission::READ_ONLY.into())
+ && parsed_flag.state == Some(ProtoFlagState::DISABLED.into())
+ {
+ continue;
+ }
+
packages[index].insert(parsed_flag);
}
}
diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
index 3fc444a..b605e72 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
@@ -11,7 +11,6 @@
{{ -else }}
import android.os.flagging.AconfigPackageInternal;
{{ -endif }}
-import android.os.flagging.AconfigStorageReadException;
import android.util.Log;
{{ -endif }}
/** @hide */
@@ -32,44 +31,19 @@
{{ -else }}
AconfigPackageInternal reader = AconfigPackageInternal.load("{container}", "{package_name}", {package_fingerprint});
{{ -endif }}
- AconfigStorageReadException error = reader.getException();
- if (error == null) \{
- {{ for namespace_with_flags in namespace_flags }}
- {{ -for flag in namespace_with_flags.flags }}
- {{ -if flag.is_read_write }}
- {flag.method_name} = reader.getBooleanFlagValue({flag.flag_offset});
- {{ endif }}
- {{ -endfor }}
- {{ -endfor }}
- } else if (Build.VERSION.SDK_INT > 35 && error.getErrorCode() == 5 /* fingerprint doesn't match*/) \{
- {{ for namespace_with_flags in namespace_flags }}
- {{ -for flag in namespace_with_flags.flags }}
- {{ -if flag.is_read_write }}
- {flag.method_name} = reader.getBooleanFlagValue("{flag.flag_name}", {flag.default_value});
- {{ -endif }}
- {{ -endfor }}
- {{ -endfor }}
- } else \{
- if (error.getMessage() != null) \{
- Log.e(TAG, error.getMessage());
- } else \{
- Log.e(TAG, "Encountered a null AconfigStorageReadException");
- }
- }
+ {{ -for namespace_with_flags in namespace_flags }}
+ {{ -for flag in namespace_with_flags.flags }}
+ {{ -if flag.is_read_write }}
+ {flag.method_name} = reader.getBooleanFlagValue({flag.flag_offset});
+ {{ -endif }}
+ {{ -endfor }}
+ {{ -endfor }}
} catch (Exception e) \{
- if (e.getMessage() != null) \{
- Log.e(TAG, e.getMessage());
- } else \{
- Log.e(TAG, "Encountered a null Exception");
- }
+ Log.e(TAG, e.toString());
} catch (NoClassDefFoundError e) \{
// for mainline module running on older devices.
// This should be replaces to version check, after the version bump.
- if (e.getMessage() != null) \{
- Log.e(TAG, e.getMessage());
- } else \{
- Log.e(TAG, "Encountered a null NoClassDefFoundError");
- }
+ Log.e(TAG, e.toString());
}
isCached = true;
}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java
index a820970..c2720f9 100644
--- a/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java
@@ -58,10 +58,6 @@
new StorageFileProvider(TestDataUtils.TESTDATA_PATH, TestDataUtils.TESTDATA_PATH);
PackageTable pt = p.getPackageTable("mock.v1");
assertNotNull(pt);
- pt =
- StorageFileProvider.getPackageTable(
- Paths.get(TestDataUtils.TESTDATA_PATH, "mock.v1.package.map"));
- assertNotNull(pt);
FlagTable f = p.getFlagTable("mock.v1");
assertNotNull(f);
FlagValueList v = p.getFlagValueList("mock.v1");
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java
index e6b6db4..d73d9eb 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java
@@ -17,7 +17,6 @@
package android.os.flagging;
import android.aconfig.storage.AconfigStorageException;
-import android.aconfig.storage.FlagTable;
import android.aconfig.storage.FlagValueList;
import android.aconfig.storage.PackageTable;
import android.aconfig.storage.StorageFileProvider;
@@ -41,75 +40,13 @@
*/
public class PlatformAconfigPackageInternal {
- private final FlagTable mFlagTable;
private final FlagValueList mFlagValueList;
- private final int mPackageId;
private final int mPackageBooleanStartOffset;
- private final AconfigStorageReadException mException;
private PlatformAconfigPackageInternal(
- FlagValueList flagValueList,
- FlagTable flagTable,
- int packageBooleanStartOffset,
- int packageId,
- AconfigStorageReadException exception) {
+ FlagValueList flagValueList, int packageBooleanStartOffset) {
this.mFlagValueList = flagValueList;
- this.mFlagTable = flagTable;
this.mPackageBooleanStartOffset = packageBooleanStartOffset;
- this.mPackageId = packageId;
- this.mException = exception;
- }
-
- /**
- * Loads an Aconfig Package from platform Aconfig Storage.
- *
- * <p>This method is intended for internal use only and may be changed or removed without
- * notice.
- *
- * <p>This method loads the specified Aconfig Package from the given container.
- *
- * <p>AconfigStorageException will be stored if there is an error reading from Aconfig Storage.
- * The specific error code can be got using {@link #getException()}.
- *
- * @param container The name of the container.
- * @param packageName The name of the Aconfig package to load.
- * @return An instance of {@link PlatformAconfigPackageInternal}
- * @hide
- */
- @UnsupportedAppUsage
- public static PlatformAconfigPackageInternal load(String container, String packageName) {
- return load(container, packageName, StorageFileProvider.getDefaultProvider());
- }
-
- /** @hide */
- public static PlatformAconfigPackageInternal load(
- String container, String packageName, StorageFileProvider fileProvider) {
- StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
- try {
- PackageTable.Node pNode = fileProvider.getPackageTable(container).get(packageName);
-
- if (pNode == null) {
- return createExceptionInstance(
- AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
- "package "
- + packageName
- + " in container "
- + container
- + " cannot be found on the device");
- }
-
- return new PlatformAconfigPackageInternal(
- fileProvider.getFlagValueList(container),
- fileProvider.getFlagTable(container),
- pNode.getBooleanStartIndex(),
- pNode.getPackageId(),
- null);
-
- } catch (AconfigStorageException e) {
- return createExceptionInstance(e.getErrorCode(), e.getMessage());
- } finally {
- StrictMode.setThreadPolicy(oldPolicy);
- }
}
/**
@@ -118,9 +55,6 @@
* <p>This method is intended for internal use only and may be changed or removed without
* notice.
*
- * <p>AconfigStorageException will be stored if there is an error reading from Aconfig Storage.
- * The specific error code can be got using {@link #getException()}.
- *
* @param container The name of the container.
* @param packageName The name of the Aconfig package.
* @param packageFingerprint The expected fingerprint of the package.
@@ -145,48 +79,40 @@
long packageFingerprint,
StorageFileProvider fileProvider) {
StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
+ PackageTable.Node pNode = null;
+ FlagValueList vList = null;
try {
- PackageTable.Node pNode = fileProvider.getPackageTable(container).get(packageName);
-
- if (pNode == null) {
- return createExceptionInstance(
- AconfigStorageReadException.ERROR_PACKAGE_NOT_FOUND,
- "package "
- + packageName
- + " in container "
- + container
- + " cannot be found on the device");
- }
-
- if (pNode.hasPackageFingerprint()
- && packageFingerprint != pNode.getPackageFingerprint()) {
- return new PlatformAconfigPackageInternal(
- fileProvider.getFlagValueList(container),
- fileProvider.getFlagTable(container),
- pNode.getBooleanStartIndex(),
- pNode.getPackageId(),
- new AconfigStorageReadException(
- AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH,
- "The fingerprint provided for the Aconfig package "
- + packageName
- + " in container "
- + container
- + " does not match"
- + " the fingerprint of the package found on the device."));
- }
-
- return new PlatformAconfigPackageInternal(
- fileProvider.getFlagValueList(container),
- null,
- pNode.getBooleanStartIndex(),
- 0,
- null);
-
+ pNode = fileProvider.getPackageTable(container).get(packageName);
+ vList = fileProvider.getFlagValueList(container);
} catch (AconfigStorageException e) {
- return createExceptionInstance(e.getErrorCode(), e.getMessage());
+ throw new AconfigStorageReadException(e.getErrorCode(), e.toString());
} finally {
StrictMode.setThreadPolicy(oldPolicy);
}
+
+ if (pNode == null || vList == null) {
+ throw new AconfigStorageReadException(
+ AconfigStorageReadException.ERROR_PACKAGE_NOT_FOUND,
+ String.format(
+ "package "
+ + packageName
+ + " in container "
+ + container
+ + " cannot be found on the device"));
+ }
+
+ if (pNode.hasPackageFingerprint() && packageFingerprint != pNode.getPackageFingerprint()) {
+ throw new AconfigStorageReadException(
+ 5, // AconfigStorageReadException.ERROR_FILE_FINGERPRINT_MISMATCH,
+ String.format(
+ "package "
+ + packageName
+ + " in container "
+ + container
+ + " cannot be found on the device"));
+ }
+
+ return new PlatformAconfigPackageInternal(vList, pNode.getBooleanStartIndex());
}
/**
@@ -198,10 +124,6 @@
* <p>This method retrieves the value of a flag within the loaded Aconfig package using its
* index. The index is generated at build time and may vary between builds.
*
- * <p>To ensure you are using the correct index, verify that the package's fingerprint matches
- * the expected fingerprint before calling this method. If the fingerprints do not match, use
- * {@link #getBooleanFlagValue(String, boolean)} instead.
- *
* @param index The index of the flag within the package.
* @return The boolean value of the flag.
* @hide
@@ -210,55 +132,4 @@
public boolean getBooleanFlagValue(int index) {
return mFlagValueList.getBoolean(index + mPackageBooleanStartOffset);
}
-
- /**
- * Retrieves the value of a boolean flag using its name.
- *
- * <p>This method is intended for internal use only and may be changed or removed without
- * notice.
- *
- * <p>This method retrieves the value of a flag within the loaded Aconfig package using its
- * name.
- *
- * @param flagName The name of the flag.
- * @param defaultValue The default value to return if the flag is not found.
- * @return The boolean value of the flag.
- * @hide
- */
- @UnsupportedAppUsage
- public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
- FlagTable.Node fNode = mFlagTable.get(mPackageId, flagName);
- if (fNode == null) {
- return defaultValue;
- }
- return mFlagValueList.getBoolean(fNode.getFlagIndex() + mPackageBooleanStartOffset);
- }
-
- /**
- * Returns any exception that occurred during the loading of the Aconfig package.
- *
- * <p>This method is intended for internal use only and may be changed or removed without
- * notice.
- *
- * @return The exception that occurred, or {@code null} if no exception occurred.
- * @hide
- */
- @UnsupportedAppUsage
- public AconfigStorageReadException getException() {
- return mException;
- }
-
- /**
- * Creates a new {@link PlatformAconfigPackageInternal} instance with an {@link
- * AconfigStorageException}.
- *
- * @param errorCode The error code for the exception.
- * @param message The error message for the exception.
- * @return A new {@link PlatformAconfigPackageInternal} instance with the specified exception.
- */
- private static PlatformAconfigPackageInternal createExceptionInstance(
- int errorCode, String message) {
- return new PlatformAconfigPackageInternal(
- null, null, 0, 0, new AconfigStorageReadException(errorCode, message));
- }
}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java
index c4a5560..69e224b 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java
+++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java
@@ -16,18 +16,17 @@
package android.aconfig.storage.test;
-import static android.aconfig.nano.Aconfig.ENABLED;
-
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
import android.aconfig.DeviceProtos;
+import android.aconfig.nano.Aconfig;
import android.aconfig.nano.Aconfig.parsed_flag;
-import android.aconfig.storage.AconfigStorageException;
import android.aconfig.storage.FlagTable;
import android.aconfig.storage.FlagValueList;
import android.aconfig.storage.PackageTable;
import android.aconfig.storage.StorageFileProvider;
+import android.os.flagging.AconfigStorageReadException;
import android.os.flagging.PlatformAconfigPackageInternal;
import org.junit.Test;
@@ -52,41 +51,9 @@
StorageFileProvider fp = StorageFileProvider.getDefaultProvider();
for (parsed_flag flag : flags) {
-
- String container = flag.container;
- String packageName = flag.package_;
- String flagName = flag.name;
- if (!PLATFORM_CONTAINERS.contains(container)) continue;
-
- PackageTable pTable = fp.getPackageTable(container);
- PackageTable.Node pNode = pTable.get(packageName);
- FlagTable fTable = fp.getFlagTable(container);
- FlagTable.Node fNode = fTable.get(pNode.getPackageId(), flagName);
- FlagValueList fList = fp.getFlagValueList(container);
-
- int index = pNode.getBooleanStartIndex() + fNode.getFlagIndex();
- boolean rVal = fList.getBoolean(index);
-
- PlatformAconfigPackageInternal reader = readerMap.get(packageName);
- if (reader == null) {
- reader = PlatformAconfigPackageInternal.load(container, packageName);
- assertNull(reader.getException());
- readerMap.put(packageName, reader);
+ if (flag.permission == Aconfig.READ_ONLY && flag.state == Aconfig.DISABLED) {
+ continue;
}
- boolean jVal = reader.getBooleanFlagValue(flagName, !rVal);
-
- assertEquals(rVal, jVal);
- }
- }
-
- @Test
- public void testPlatformAconfigPackageInternal_load_with_fingerprint() throws IOException {
- List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
- Map<String, PlatformAconfigPackageInternal> readerMap = new HashMap<>();
- StorageFileProvider fp = StorageFileProvider.getDefaultProvider();
-
- for (parsed_flag flag : flags) {
-
String container = flag.container;
String packageName = flag.package_;
String flagName = flag.name;
@@ -106,7 +73,6 @@
PlatformAconfigPackageInternal reader = readerMap.get(packageName);
if (reader == null) {
reader = PlatformAconfigPackageInternal.load(container, packageName, fingerprint);
- assertNull(reader.getException());
readerMap.put(packageName, reader);
}
boolean jVal = reader.getBooleanFlagValue(fNode.getFlagIndex());
@@ -118,17 +84,20 @@
@Test
public void testAconfigPackage_load_withError() throws IOException {
// container not found fake_container
- PlatformAconfigPackageInternal aPackage =
- PlatformAconfigPackageInternal.load("fake_container", "fake_package", 0);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- aPackage.getException().getErrorCode());
+ AconfigStorageReadException e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ "fake_container", "fake_package", 0));
+ assertEquals(AconfigStorageReadException.ERROR_CANNOT_READ_STORAGE_FILE, e.getErrorCode());
// package not found
- aPackage = PlatformAconfigPackageInternal.load("system", "fake_container", 0);
- assertEquals(
- AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
- aPackage.getException().getErrorCode());
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () -> PlatformAconfigPackageInternal.load("system", "fake_container", 0));
+ assertEquals(AconfigStorageReadException.ERROR_PACKAGE_NOT_FOUND, e.getErrorCode());
// fingerprint doesn't match
List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
@@ -138,18 +107,22 @@
String container = flag.container;
String packageName = flag.package_;
- boolean value = flag.state == ENABLED;
+ boolean value = flag.state == Aconfig.ENABLED;
PackageTable pTable = fp.getPackageTable(container);
PackageTable.Node pNode = pTable.get(packageName);
if (pNode.hasPackageFingerprint()) {
long fingerprint = pNode.getPackageFingerprint();
- aPackage = PlatformAconfigPackageInternal.load(container, packageName, fingerprint + 1);
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ container, packageName, fingerprint + 1));
assertEquals(
// AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH,
- 5, aPackage.getException().getErrorCode());
- assertEquals(aPackage.getBooleanFlagValue(flag.name, !value), value);
+ 5, e.getErrorCode());
}
}
}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java b/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java
index ce3786a..961f0ea 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java
+++ b/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java
@@ -18,12 +18,12 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
-import android.aconfig.storage.AconfigStorageException;
import android.aconfig.storage.PackageTable;
import android.aconfig.storage.StorageFileProvider;
+import android.os.flagging.AconfigStorageReadException;
import android.os.flagging.PlatformAconfigPackageInternal;
import org.junit.Before;
@@ -46,137 +46,101 @@
@Test
public void testLoad_container_package() throws Exception {
+ PackageTable packageTable = pr.getPackageTable("mockup");
+
+ PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+
+ long fingerprint = node1.getPackageFingerprint();
PlatformAconfigPackageInternal p =
PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_1", pr);
- assertNull(p.getException());
+ "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
}
@Test
public void testLoad_container_package_error() throws Exception {
+ PackageTable packageTable = pr.getPackageTable("mockup");
+ PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+ long fingerprint = node1.getPackageFingerprint();
// cannot find package
- PlatformAconfigPackageInternal p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_10", pr);
-
- assertEquals(
- AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
- p.getException().getErrorCode());
+ AconfigStorageReadException e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ "mockup",
+ "com.android.aconfig.storage.test_10",
+ fingerprint,
+ pr));
+ assertEquals(AconfigStorageReadException.ERROR_PACKAGE_NOT_FOUND, e.getErrorCode());
// cannot find container
- p = PlatformAconfigPackageInternal.load(null, "com.android.aconfig.storage.test_1", pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
- p = PlatformAconfigPackageInternal.load("test", "com.android.aconfig.storage.test_1", pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ null,
+ "com.android.aconfig.storage.test_1",
+ fingerprint,
+ pr));
+ assertEquals(AconfigStorageReadException.ERROR_CANNOT_READ_STORAGE_FILE, e.getErrorCode());
- // new storage doesn't exist
- pr = new StorageFileProvider("fake/path/", "fake/path/");
- p = PlatformAconfigPackageInternal.load("mockup", "com.android.aconfig.storage.test_1", pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ "test",
+ "com.android.aconfig.storage.test_1",
+ fingerprint,
+ pr));
+ assertEquals(AconfigStorageReadException.ERROR_CANNOT_READ_STORAGE_FILE, e.getErrorCode());
- // file read issue
- pr = new StorageFileProvider(TESTDATA_PATH, "fake/path/");
- p = PlatformAconfigPackageInternal.load("mockup", "com.android.aconfig.storage.test_1", pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
- }
-
- @Test
- public void testLoad_container_package_fingerprint() throws Exception {
- PackageTable packageTable = pr.getPackageTable("mockup");
-
- PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
-
- long fingerprint = node1.getPackageFingerprint();
- PlatformAconfigPackageInternal p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
- assertNull(p.getException());
- }
-
- @Test
- public void testLoad_container_package_fingerprint_error() throws Exception {
-
- PackageTable packageTable = pr.getPackageTable("mockup");
-
- PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
-
- long fingerprint = node1.getPackageFingerprint();
-
- // cannot find package
- PlatformAconfigPackageInternal p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_10", fingerprint, pr);
-
- assertEquals(
- AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
- p.getException().getErrorCode());
-
- // cannot find container
- p =
- PlatformAconfigPackageInternal.load(
- null, "com.android.aconfig.storage.test_1", fingerprint, pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
- p =
- PlatformAconfigPackageInternal.load(
- "test", "com.android.aconfig.storage.test_1", fingerprint, pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
// fingerprint doesn't match
- p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_1", fingerprint + 1, pr);
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ "mockup",
+ "com.android.aconfig.storage.test_1",
+ fingerprint + 1,
+ pr));
assertEquals(
// AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH,
- 5, p.getException().getErrorCode());
+ 5, e.getErrorCode());
// new storage doesn't exist
pr = new StorageFileProvider("fake/path/", "fake/path/");
- p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ "mockup",
+ "com.android.aconfig.storage.test_1",
+ fingerprint,
+ pr));
+ assertEquals(AconfigStorageReadException.ERROR_CANNOT_READ_STORAGE_FILE, e.getErrorCode());
// file read issue
pr = new StorageFileProvider(TESTDATA_PATH, "fake/path/");
- p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
- assertEquals(
- AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
- p.getException().getErrorCode());
- }
-
- @Test
- public void testGetBooleanFlagValue_flagName() throws Exception {
- PlatformAconfigPackageInternal p =
- PlatformAconfigPackageInternal.load(
- "mockup", "com.android.aconfig.storage.test_1", pr);
- assertFalse(p.getBooleanFlagValue("disabled_rw", true));
- assertTrue(p.getBooleanFlagValue("enabled_ro", false));
- assertTrue(p.getBooleanFlagValue("enabled_rw", false));
- assertFalse(p.getBooleanFlagValue("fake", false));
+ e =
+ assertThrows(
+ AconfigStorageReadException.class,
+ () ->
+ PlatformAconfigPackageInternal.load(
+ "mockup",
+ "com.android.aconfig.storage.test_1",
+ fingerprint,
+ pr));
+ assertEquals(AconfigStorageReadException.ERROR_CANNOT_READ_STORAGE_FILE, e.getErrorCode());
}
@Test
public void testGetBooleanFlagValue_index() throws Exception {
-
PackageTable packageTable = pr.getPackageTable("mockup");
-
PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
-
long fingerprint = node1.getPackageFingerprint();
PlatformAconfigPackageInternal p =
PlatformAconfigPackageInternal.load(
diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java
index 5f066a8..d084048 100644
--- a/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java
+++ b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java
@@ -21,10 +21,6 @@
*/
public class AconfigPackageInternal {
- public static AconfigPackageInternal load(String container, String packageName) {
- throw new UnsupportedOperationException("Stub!");
- }
-
public static AconfigPackageInternal load(
String container, String packageName, long packageFingerprint) {
throw new UnsupportedOperationException("Stub!");
@@ -33,12 +29,4 @@
public boolean getBooleanFlagValue(int index) {
throw new UnsupportedOperationException("Stub!");
}
-
- public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
- throw new UnsupportedOperationException("Stub!");
- }
-
- public AconfigStorageReadException getException() {
- throw new UnsupportedOperationException("Stub!");
- }
}
diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java
index c1bc19b..283b251 100644
--- a/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java
+++ b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java
@@ -21,10 +21,6 @@
*/
public class PlatformAconfigPackageInternal {
- public static PlatformAconfigPackageInternal load(String container, String packageName) {
- throw new UnsupportedOperationException("Stub!");
- }
-
public static PlatformAconfigPackageInternal load(
String container, String packageName, long packageFingerprint) {
throw new UnsupportedOperationException("Stub!");
@@ -33,12 +29,4 @@
public boolean getBooleanFlagValue(int index) {
throw new UnsupportedOperationException("Stub!");
}
-
- public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
- throw new UnsupportedOperationException("Stub!");
- }
-
- public AconfigStorageReadException getException() {
- throw new UnsupportedOperationException("Stub!");
- }
}
diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py
index 0c31ab8..7d666fe 100644
--- a/tools/edit_monitor/daemon_manager.py
+++ b/tools/edit_monitor/daemon_manager.py
@@ -13,6 +13,8 @@
# limitations under the License.
+import errno
+import fcntl
import getpass
import hashlib
import logging
@@ -100,16 +102,32 @@
logging.warning("Edit monitor for cog is not supported, exiting...")
return
- try:
- self._stop_any_existing_instance()
- self._write_pid_to_pidfile()
- self._start_daemon_process()
- except Exception as e:
- logging.exception("Failed to start daemon manager with error %s", e)
- self._send_error_event_to_clearcut(
- edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
- )
- raise e
+ setup_lock_file = pathlib.Path(tempfile.gettempdir()).joinpath(
+ self.pid_file_path.name + ".setup"
+ )
+ logging.info("setup lock file: %s", setup_lock_file)
+ with open(setup_lock_file, "w") as f:
+ try:
+ # Acquire an exclusive lock
+ fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ self._stop_any_existing_instance()
+ self._write_pid_to_pidfile()
+ self._start_daemon_process()
+ except Exception as e:
+ if (
+ isinstance(e, IOError) and e.errno == errno.EAGAIN
+ ): # Failed to acquire the file lock.
+ logging.warning("Another edit monitor is starting, exitinng...")
+ return
+ else:
+ logging.exception("Failed to start daemon manager with error %s", e)
+ self._send_error_event_to_clearcut(
+ edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+ )
+ raise e
+ finally:
+ # Release the lock
+ fcntl.flock(f, fcntl.LOCK_UN)
def monitor_daemon(
self,
@@ -149,15 +167,15 @@
edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE
)
logging.error(
- "Daemon process is consuming too much memory, rebooting...")
+ "Daemon process is consuming too much memory, rebooting..."
+ )
self.reboot()
if self.max_cpu_usage >= cpu_threshold:
self._send_error_event_to_clearcut(
edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_CPU_USAGE
)
- logging.error(
- "Daemon process is consuming too much cpu, killing...")
+ logging.error("Daemon process is consuming too much cpu, killing...")
self._terminate_process(self.daemon_process.pid)
logging.info(
@@ -179,7 +197,7 @@
self._wait_for_process_terminate(self.daemon_process.pid, 1)
if self.daemon_process.is_alive():
self._terminate_process(self.daemon_process.pid)
- self._remove_pidfile()
+ self._remove_pidfile(self.pid)
logging.info("Successfully stopped daemon manager.")
except Exception as e:
logging.exception("Failed to stop daemon manager with error %s", e)
@@ -253,11 +271,15 @@
if ex_pid:
logging.info("Found another instance with pid %d.", ex_pid)
self._terminate_process(ex_pid)
- self._remove_pidfile()
+ self._remove_pidfile(ex_pid)
- def _read_pid_from_pidfile(self):
- with open(self.pid_file_path, "r") as f:
- return int(f.read().strip())
+ def _read_pid_from_pidfile(self) -> int | None:
+ try:
+ with open(self.pid_file_path, "r") as f:
+ return int(f.read().strip())
+ except FileNotFoundError as e:
+ logging.warning("pidfile %s does not exist.", self.pid_file_path)
+ return None
def _write_pid_to_pidfile(self):
"""Creates a pidfile and writes the current pid to the file.
@@ -333,7 +355,23 @@
)
return True
- def _remove_pidfile(self):
+ def _remove_pidfile(self, expected_pid: int):
+ recorded_pid = self._read_pid_from_pidfile()
+
+ if recorded_pid is None:
+ logging.info("pid file %s already removed.", self.pid_file_path)
+ return
+
+ if recorded_pid != expected_pid:
+ logging.warning(
+ "pid file contains pid from a different process, expected pid: %d,"
+ " actual pid: %d.",
+ expected_pid,
+ recorded_pid,
+ )
+ return
+
+ logging.debug("removing pidfile written by process %s", expected_pid)
try:
os.remove(self.pid_file_path)
except FileNotFoundError:
@@ -378,9 +416,7 @@
uptime_end = float(f.readline().split()[0])
return (
- (total_end_time - total_start_time)
- / (uptime_end - uptime_start)
- * 100
+ (total_end_time - total_start_time) / (uptime_end - uptime_start) * 100
)
def _get_total_cpu_time(self, pid: int) -> float:
@@ -395,13 +431,19 @@
def _find_all_instances_pids(self) -> list[int]:
pids = []
- for file in os.listdir(self.pid_file_path.parent):
- if file.endswith(".lock"):
- try:
- with open(self.pid_file_path.parent.joinpath(file), "r") as f:
- pids.append(int(f.read().strip()))
- except (FileNotFoundError, IOError, ValueError, TypeError):
- logging.exception("Failed to get pid from file path: %s", file)
+ try:
+ output = subprocess.check_output(["ps", "-ef", "--no-headers"], text=True)
+ for line in output.splitlines():
+ parts = line.split()
+ process_path = parts[7]
+ if pathlib.Path(process_path).name == "edit_monitor":
+ pid = int(parts[1])
+ if pid != self.pid: # exclude the current process
+ pids.append(pid)
+ except Exception:
+ logging.exception(
+ "Failed to get pids of existing edit monitors from ps command."
+ )
return pids
diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py
index 350739d..be28965 100644
--- a/tools/edit_monitor/daemon_manager_test.py
+++ b/tools/edit_monitor/daemon_manager_test.py
@@ -14,6 +14,7 @@
"""Unittests for DaemonManager."""
+import fcntl
import logging
import multiprocessing
import os
@@ -82,7 +83,8 @@
# tests will be cleaned.
tempfile.tempdir = self.working_dir.name
self.patch = mock.patch.dict(
- os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'true'})
+ os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'true'}
+ )
self.patch.start()
def tearDown(self):
@@ -102,6 +104,7 @@
p = self._create_fake_deamon_process()
self.assert_run_simple_daemon_success()
+ self.assert_no_subprocess_running()
def test_start_success_with_existing_instance_already_dead(self):
# Create a pidfile with pid that does not exist.
@@ -137,7 +140,9 @@
# Verify no daemon process is started.
self.assertIsNone(dm.daemon_process)
- @mock.patch.dict(os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'false'}, clear=True)
+ @mock.patch.dict(
+ os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'false'}, clear=True
+ )
def test_start_return_directly_if_disabled(self):
dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
dm.start()
@@ -154,6 +159,25 @@
# Verify no daemon process is started.
self.assertIsNone(dm.daemon_process)
+ def test_start_failed_other_instance_is_starting(self):
+ f = open(
+ pathlib.Path(self.working_dir.name).joinpath(
+ TEST_PID_FILE_PATH + '.setup'
+ ),
+ 'w',
+ )
+ # Acquire an exclusive lock
+ fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
+
+ dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+ dm.start()
+
+ # Release the lock
+ fcntl.flock(f, fcntl.LOCK_UN)
+ f.close()
+ # Verify no daemon process is started.
+ self.assertIsNone(dm.daemon_process)
+
@mock.patch('os.kill')
def test_start_failed_to_kill_existing_instance(self, mock_kill):
mock_kill.side_effect = OSError('Unknown OSError')
@@ -177,6 +201,7 @@
'edit_monitor'
)
pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+
# Makes the directory read-only so write pidfile will fail.
os.chmod(pid_file_path_dir, 0o555)
@@ -216,7 +241,7 @@
cclient=fake_cclient,
)
# set the fake total_memory_size
- dm.total_memory_size = 100 * 1024 *1024
+ dm.total_memory_size = 100 * 1024 * 1024
dm.start()
dm.monitor_daemon(interval=1)
@@ -367,6 +392,26 @@
fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
)
+ @mock.patch('subprocess.check_output')
+ def test_cleanup_success(self, mock_check_output):
+ p = self._create_fake_deamon_process()
+ fake_cclient = FakeClearcutClient()
+ mock_check_output.return_value = f'user {p.pid} 1 1 1 1 1 edit_monitor arg'
+
+ dm = daemon_manager.DaemonManager(
+ TEST_BINARY_FILE,
+ daemon_target=long_running_daemon,
+ cclient=fake_cclient,
+ )
+ dm.cleanup()
+
+ self.assertFalse(p.is_alive())
+ self.assertTrue(
+ pathlib.Path(self.working_dir.name)
+ .joinpath(daemon_manager.BLOCK_SIGN_FILE)
+ .exists()
+ )
+
def assert_run_simple_daemon_success(self):
damone_output_file = tempfile.NamedTemporaryFile(
dir=self.working_dir.name, delete=False
@@ -432,7 +477,7 @@
pass
def _create_fake_deamon_process(
- self, name: str = ''
+ self, name: str = TEST_PID_FILE_PATH
) -> multiprocessing.Process:
# Create a long running subprocess
p = multiprocessing.Process(target=long_running_daemon)
@@ -443,7 +488,7 @@
'edit_monitor'
)
pid_file_path_dir.mkdir(parents=True, exist_ok=True)
- with open(pid_file_path_dir.joinpath(name + 'pid.lock'), 'w') as f:
+ with open(pid_file_path_dir.joinpath(name), 'w') as f:
f.write(str(p.pid))
return p
diff --git a/tools/edit_monitor/edit_monitor_integration_test.py b/tools/edit_monitor/edit_monitor_integration_test.py
index 3d28274..f39b936 100644
--- a/tools/edit_monitor/edit_monitor_integration_test.py
+++ b/tools/edit_monitor/edit_monitor_integration_test.py
@@ -15,6 +15,7 @@
"""Integration tests for Edit Monitor."""
import glob
+from importlib import resources
import logging
import os
import pathlib
@@ -25,8 +26,6 @@
import tempfile
import time
import unittest
-
-from importlib import resources
from unittest import mock
@@ -49,7 +48,8 @@
self.root_monitoring_path.mkdir()
self.edit_monitor_binary_path = self._import_executable("edit_monitor")
self.patch = mock.patch.dict(
- os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'true'})
+ os.environ, {"ENABLE_ANDROID_EDIT_MONITOR": "true"}
+ )
self.patch.start()
def tearDown(self):
@@ -83,7 +83,21 @@
self.assertEqual(self._get_logged_events_num(), 4)
- def _start_edit_monitor_process(self):
+ def test_start_multiple_edit_monitor_only_one_started(self):
+ p1 = self._start_edit_monitor_process(wait_for_observer_start=False)
+ p2 = self._start_edit_monitor_process(wait_for_observer_start=False)
+ p3 = self._start_edit_monitor_process(wait_for_observer_start=False)
+
+ live_processes = self._get_live_processes([p1, p2, p3])
+
+ # Cleanup all live processes.
+ for p in live_processes:
+ os.kill(p.pid, signal.SIGINT)
+ p.communicate()
+
+ self.assertEqual(len(live_processes), 1)
+
+ def _start_edit_monitor_process(self, wait_for_observer_start=True):
command = f"""
export TMPDIR="{self.working_dir.name}"
{self.edit_monitor_binary_path} --path={self.root_monitoring_path} --dry_run"""
@@ -94,7 +108,9 @@
start_new_session=True,
executable="/bin/bash",
)
- self._wait_for_observer_start(time_out=5)
+ if wait_for_observer_start:
+ self._wait_for_observer_start(time_out=5)
+
return p
def _wait_for_observer_start(self, time_out):
@@ -125,6 +141,18 @@
return 0
+ def _get_live_processes(self, processes):
+ live_processes = []
+ for p in processes:
+ try:
+ p.wait(timeout=5)
+ except subprocess.TimeoutExpired as e:
+ live_processes.append(p)
+ logging.info("process: %d still alive.", p.pid)
+ else:
+ logging.info("process: %d stopped.", p.pid)
+ return live_processes
+
def _import_executable(self, executable_name: str) -> pathlib.Path:
binary_dir = pathlib.Path(self.working_dir.name).joinpath("binary")
binary_dir.mkdir()
diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py
index 49385f1..3c2d183 100644
--- a/tools/edit_monitor/main.py
+++ b/tools/edit_monitor/main.py
@@ -72,7 +72,8 @@
root_logging_dir = tempfile.mkdtemp(prefix='edit_monitor_')
_, log_path = tempfile.mkstemp(dir=root_logging_dir, suffix='.log')
- log_fmt = '%(asctime)s %(filename)s:%(lineno)s:%(levelname)s: %(message)s'
+
+ log_fmt = '%(asctime)s.%(msecs)03d %(filename)s:%(lineno)s:%(levelname)s: %(message)s'
date_fmt = '%Y-%m-%d %H:%M:%S'
log_level = logging.DEBUG if verbose else logging.INFO
@@ -101,12 +102,12 @@
daemon_args=(args.path, args.dry_run),
)
- if args.force_cleanup:
- dm.cleanup()
-
try:
- dm.start()
- dm.monitor_daemon()
+ if args.force_cleanup:
+ dm.cleanup()
+ else:
+ dm.start()
+ dm.monitor_daemon()
except Exception:
logging.exception('Unexpected exception raised when run daemon.')
finally:
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 54df955..08f2b83 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -79,15 +79,10 @@
Returns:
The repacked apex file containing the signed apk files.
"""
- if not os.path.exists(self.debugfs_path):
- raise ApexSigningError(
- "Couldn't find location of debugfs_static: " +
- "Path {} does not exist. ".format(self.debugfs_path) +
- "Make sure bin/debugfs_static can be found in -p <path>")
- list_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
- 'list', self.apex_path]
- entries_names = common.RunAndCheckOutput(list_cmd).split()
- apk_entries = [name for name in entries_names if name.endswith('.apk')]
+ payload_dir = self.ExtractApexPayload(self.apex_path)
+ apk_entries = []
+ for base_dir, _, files in os.walk(payload_dir):
+ apk_entries.extend(os.path.join(base_dir, file) for file in files if file.endswith('.apk'))
# No need to sign and repack, return the original apex path.
if not apk_entries and self.sign_tool is None:
@@ -105,16 +100,16 @@
logger.warning('Apk path does not contain the intended directory name:'
' %s', entry)
- payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
- apk_entries, apk_keys, payload_key, signing_args)
+ has_signed_content = self.SignContentsInPayload(
+ payload_dir, apk_entries, apk_keys, payload_key, signing_args)
if not has_signed_content:
logger.info('No contents has been signed in %s', self.apex_path)
return self.apex_path
return self.RepackApexPayload(payload_dir, payload_key, signing_args)
- def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key, signing_args):
- """Extracts the payload image and signs the containing apk files."""
+ def ExtractApexPayload(self, apex_path):
+ """Extracts the contents of an APEX and returns the directory of the contents"""
if not os.path.exists(self.debugfs_path):
raise ApexSigningError(
"Couldn't find location of debugfs_static: " +
@@ -129,9 +124,12 @@
extract_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
'--fsckerofs_path', self.fsckerofs_path,
'extract',
- self.apex_path, payload_dir]
+ apex_path, payload_dir]
common.RunAndCheckOutput(extract_cmd)
+ return payload_dir
+ def SignContentsInPayload(self, payload_dir, apk_entries, apk_keys, payload_key, signing_args):
+ """Signs the contents in payload."""
has_signed_content = False
for entry in apk_entries:
apk_path = os.path.join(payload_dir, entry)
@@ -163,7 +161,7 @@
common.RunAndCheckOutput(cmd)
has_signed_content = True
- return payload_dir, has_signed_content
+ return has_signed_content
def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
"""Rebuilds the apex file with the updated payload directory."""
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 6446e1f..76d168c 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -1039,6 +1039,9 @@
# Prepare custom images.
if OPTIONS.custom_images:
+ if source_file is not None:
+ source_file = GetTargetFilesZipForCustomImagesUpdates(
+ source_file, OPTIONS.custom_images)
target_file = GetTargetFilesZipForCustomImagesUpdates(
target_file, OPTIONS.custom_images)
@@ -1121,17 +1124,18 @@
additional_args += ["--enable_lz4diff=" +
str(OPTIONS.enable_lz4diff).lower()]
+ env_override = {}
if source_file and OPTIONS.enable_lz4diff:
- input_tmp = common.UnzipTemp(source_file, ["META/liblz4.so"])
- liblz4_path = os.path.join(input_tmp, "META", "liblz4.so")
+ liblz4_path = os.path.join(source_file, "META", "liblz4.so")
assert os.path.exists(
liblz4_path), "liblz4.so not found in META/ dir of target file {}".format(liblz4_path)
logger.info("Enabling lz4diff %s", liblz4_path)
- additional_args += ["--liblz4_path", liblz4_path]
erofs_compression_param = OPTIONS.target_info_dict.get(
"erofs_default_compressor")
assert erofs_compression_param is not None, "'erofs_default_compressor' not found in META/misc_info.txt of target build. This is required to enable lz4diff."
additional_args += ["--erofs_compression_param", erofs_compression_param]
+ env_override["LD_PRELOAD"] = liblz4_path + \
+ ":" + os.environ.get("LD_PRELOAD", "")
if OPTIONS.disable_vabc:
additional_args += ["--disable_vabc=true"]
@@ -1141,10 +1145,15 @@
additional_args += ["--compressor_types", OPTIONS.compressor_types]
additional_args += ["--max_timestamp", max_timestamp]
+ env = dict(os.environ)
+ if env_override:
+ logger.info("Using environment variables %s", env_override)
+ env.update(env_override)
payload.Generate(
target_file,
source_file,
- additional_args + partition_timestamps_flags
+ additional_args + partition_timestamps_flags,
+ env=env
)
# Sign the payload.
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 81b53dc..852d62b 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -845,16 +845,16 @@
self.is_partial_update = is_partial_update
self.spl_downgrade = spl_downgrade
- def _Run(self, cmd): # pylint: disable=no-self-use
+ def _Run(self, cmd, **kwargs): # pylint: disable=no-self-use
# Don't pipe (buffer) the output if verbose is set. Let
# brillo_update_payload write to stdout/stderr directly, so its progress can
# be monitored.
if OPTIONS.verbose:
- common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
+ common.RunAndCheckOutput(cmd, stdout=None, stderr=None, **kwargs)
else:
- common.RunAndCheckOutput(cmd)
+ common.RunAndCheckOutput(cmd, **kwargs)
- def Generate(self, target_file, source_file=None, additional_args=None):
+ def Generate(self, target_file, source_file=None, additional_args=None, **kwargs):
"""Generates a payload from the given target-files zip(s).
Args:
@@ -863,6 +863,7 @@
generating a full OTA.
additional_args: A list of additional args that should be passed to
delta_generator binary; or None.
+ kwargs: Any additional args to pass to subprocess.Popen
"""
if additional_args is None:
additional_args = []
@@ -918,7 +919,7 @@
if self.is_partial_update:
cmd.extend(["--is_partial_update=true"])
cmd.extend(additional_args)
- self._Run(cmd)
+ self._Run(cmd, **kwargs)
self.payload_file = payload_file
self.payload_properties = None