Merge "find_static_candidates.py: protect against missing keys in module" into main
diff --git a/ci/build_test_suites b/ci/build_test_suites
new file mode 100755
index 0000000..89ecefe
--- /dev/null
+++ b/ci/build_test_suites
@@ -0,0 +1,23 @@
+#!prebuilts/build-tools/linux-x86/bin/py3-cmd
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+import build_test_suites
+
+if __name__ == '__main__':
+ sys.dont_write_bytecode = True
+
+ build_test_suites.main(sys.argv)
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
new file mode 100644
index 0000000..9b83148
--- /dev/null
+++ b/ci/build_test_suites.py
@@ -0,0 +1,299 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Script to build only the necessary modules for general-tests along
+
+with whatever other targets are passed in.
+"""
+
+import argparse
+from collections.abc import Sequence
+import json
+import os
+import pathlib
+import re
+import subprocess
+import sys
+from typing import Any, Dict, Set, Text
+
+import test_mapping_module_retriever
+
+
+# List of modules that are always required to be in general-tests.zip
+REQUIRED_MODULES = frozenset(
+ ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util', 'soong_zip']
+)
+
+
+def build_test_suites(argv):
+ args = parse_args(argv)
+
+ if not os.environ.get('BUILD_NUMBER')[0] == 'P':
+ build_everything(args)
+ return
+
+ # Call the class to map changed files to modules to build.
+ # TODO(lucafarsi): Move this into a replaceable class.
+ build_affected_modules(args)
+
+
+def parse_args(argv):
+ argparser = argparse.ArgumentParser()
+ argparser.add_argument(
+ 'extra_targets', nargs='*', help='Extra test suites to build.'
+ )
+ argparser.add_argument('--target_product')
+ argparser.add_argument('--target_release')
+ argparser.add_argument(
+ '--with_dexpreopt_boot_img_and_system_server_only', action='store_true'
+ )
+ argparser.add_argument('--dist_dir')
+ argparser.add_argument('--change_info', nargs='?')
+ argparser.add_argument('--extra_required_modules', nargs='*')
+
+ return argparser.parse_args()
+
+
+def build_everything(args: argparse.Namespace):
+ build_command = base_build_command(args)
+ build_command.append('general-tests')
+
+ run_command(build_command, print_output=True)
+
+
+def build_affected_modules(args: argparse.Namespace):
+ modules_to_build = find_modules_to_build(
+ pathlib.Path(args.change_info), args.extra_required_modules
+ )
+
+ # Call the build command with everything.
+ build_command = base_build_command(args)
+ build_command.extend(modules_to_build)
+
+ run_command(build_command, print_output=True)
+
+ zip_build_outputs(modules_to_build, args.dist_dir, args.target_release)
+
+
+def base_build_command(args: argparse.Namespace) -> list:
+ build_command = []
+ build_command.append('time')
+ build_command.append('./build/soong/soong_ui.bash')
+ build_command.append('--make-mode')
+ build_command.append('dist')
+ build_command.append('DIST_DIR=' + args.dist_dir)
+ build_command.append('TARGET_PRODUCT=' + args.target_product)
+ build_command.append('TARGET_RELEASE=' + args.target_release)
+ if args.with_dexpreopt_boot_img_and_system_server_only:
+ build_command.append('WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY=true')
+ build_command.extend(args.extra_targets)
+
+ return build_command
+
+
+def run_command(
+ args: list[str],
+ env: Dict[Text, Text] = os.environ,
+ print_output: bool = False,
+) -> str:
+ result = subprocess.run(
+ args=args,
+ text=True,
+ capture_output=True,
+ check=False,
+ env=env,
+ )
+ # If the process failed, print its stdout and propagate the exception.
+ if not result.returncode == 0:
+ print('Build command failed! output:')
+ print('stdout: ' + result.stdout)
+ print('stderr: ' + result.stderr)
+
+ result.check_returncode()
+
+ if print_output:
+ print(result.stdout)
+
+ return result.stdout
+
+
+def find_modules_to_build(
+ change_info: pathlib.Path, extra_required_modules: list[Text]
+) -> Set[Text]:
+ changed_files = find_changed_files(change_info)
+
+ test_mappings = test_mapping_module_retriever.GetTestMappings(
+ changed_files, set()
+ )
+
+ # Soong_zip is required to generate the output zip so always build it.
+ modules_to_build = set(REQUIRED_MODULES)
+ if extra_required_modules:
+ modules_to_build.update(extra_required_modules)
+
+ modules_to_build.update(find_affected_modules(test_mappings, changed_files))
+
+ return modules_to_build
+
+
+def find_changed_files(change_info: pathlib.Path) -> Set[Text]:
+ with open(change_info) as change_info_file:
+ change_info_contents = json.load(change_info_file)
+
+ changed_files = set()
+
+ for change in change_info_contents['changes']:
+ project_path = change.get('projectPath') + '/'
+
+ for revision in change.get('revisions'):
+ for file_info in revision.get('fileInfos'):
+ changed_files.add(project_path + file_info.get('path'))
+
+ return changed_files
+
+
+def find_affected_modules(
+ test_mappings: Dict[str, Any], changed_files: Set[Text]
+) -> Set[Text]:
+ modules = set()
+
+ # The test_mappings object returned by GetTestMappings is organized as
+ # follows:
+ # {
+ # 'test_mapping_file_path': {
+ # 'group_name' : [
+ # 'name': 'module_name',
+ # ],
+ # }
+ # }
+ for test_mapping in test_mappings.values():
+ for group in test_mapping.values():
+ for entry in group:
+ module_name = entry.get('name', None)
+
+ if not module_name:
+ continue
+
+ file_patterns = entry.get('file_patterns')
+ if not file_patterns:
+ modules.add(module_name)
+ continue
+
+ if matches_file_patterns(file_patterns, changed_files):
+ modules.add(module_name)
+ continue
+
+ return modules
+
+
+# TODO(lucafarsi): Share this logic with the original logic in
+# test_mapping_test_retriever.py
+def matches_file_patterns(
+ file_patterns: list[Text], changed_files: Set[Text]
+) -> bool:
+ for changed_file in changed_files:
+ for pattern in file_patterns:
+ if re.search(pattern, changed_file):
+ return True
+
+ return False
+
+
+def zip_build_outputs(
+ modules_to_build: Set[Text], dist_dir: Text, target_release: Text
+):
+ src_top = os.environ.get('TOP', os.getcwd())
+
+ # Call dumpvars to get the necessary things.
+ # TODO(lucafarsi): Don't call soong_ui 4 times for this, --dumpvars-mode can
+ # do it but it requires parsing.
+ host_out_testcases = get_soong_var('HOST_OUT_TESTCASES', target_release)
+ target_out_testcases = get_soong_var('TARGET_OUT_TESTCASES', target_release)
+ product_out = get_soong_var('PRODUCT_OUT', target_release)
+ soong_host_out = get_soong_var('SOONG_HOST_OUT', target_release)
+ host_out = get_soong_var('HOST_OUT', target_release)
+
+ # Call the class to package the outputs.
+ # TODO(lucafarsi): Move this code into a replaceable class.
+ host_paths = []
+ target_paths = []
+ for module in modules_to_build:
+ host_path = os.path.join(host_out_testcases, module)
+ if os.path.exists(host_path):
+ host_paths.append(host_path)
+
+ target_path = os.path.join(target_out_testcases, module)
+ if os.path.exists(target_path):
+ target_paths.append(target_path)
+
+ zip_command = ['time', os.path.join(host_out, 'bin', 'soong_zip')]
+
+ # Add host testcases.
+ zip_command.append('-C')
+ zip_command.append(os.path.join(src_top, soong_host_out))
+ zip_command.append('-P')
+ zip_command.append('host/')
+ for path in host_paths:
+ zip_command.append('-D')
+ zip_command.append(path)
+
+ # Add target testcases.
+ zip_command.append('-C')
+ zip_command.append(os.path.join(src_top, product_out))
+ zip_command.append('-P')
+ zip_command.append('target')
+ for path in target_paths:
+ zip_command.append('-D')
+ zip_command.append(path)
+
+ # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
+ # Add necessary tools. These are also hardcoded in general-tests.mk.
+ framework_path = os.path.join(soong_host_out, 'framework')
+
+ zip_command.append('-C')
+ zip_command.append(framework_path)
+ zip_command.append('-P')
+ zip_command.append('host/tools')
+ zip_command.append('-f')
+ zip_command.append(os.path.join(framework_path, 'cts-tradefed.jar'))
+ zip_command.append('-f')
+ zip_command.append(
+ os.path.join(framework_path, 'compatibility-host-util.jar')
+ )
+ zip_command.append('-f')
+ zip_command.append(os.path.join(framework_path, 'vts-tradefed.jar'))
+
+ # Zip to the DIST dir.
+ zip_command.append('-o')
+ zip_command.append(os.path.join(dist_dir, 'general-tests.zip'))
+
+ run_command(zip_command, print_output=True)
+
+
+def get_soong_var(var: str, target_release: str) -> str:
+ new_env = os.environ.copy()
+ new_env['TARGET_RELEASE'] = target_release
+
+ value = run_command(
+ ['./build/soong/soong_ui.bash', '--dumpvar-mode', '--abs', var],
+ env=new_env,
+ ).strip()
+ if not value:
+ raise RuntimeError('Necessary soong variable ' + var + ' not found.')
+
+ return value
+
+
+def main(argv):
+ build_test_suites(sys.argv)
diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py
new file mode 100644
index 0000000..d2c13c0
--- /dev/null
+++ b/ci/test_mapping_module_retriever.py
@@ -0,0 +1,125 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Simple parsing code to scan test_mapping files and determine which
+modules are needed to build for the given list of changed files.
+TODO(lucafarsi): Deduplicate from artifact_helper.py
+"""
+
+from typing import Any, Dict, Set, Text
+import json
+import os
+import re
+
+# Regex to extra test name from the path of test config file.
+TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config'
+
+# Key name for TEST_MAPPING imports
+KEY_IMPORTS = 'imports'
+KEY_IMPORT_PATH = 'path'
+
+# Name of TEST_MAPPING file.
+TEST_MAPPING = 'TEST_MAPPING'
+
+# Pattern used to identify double-quoted strings and '//'-format comments in
+# TEST_MAPPING file, but only double-quoted strings are included within the
+# matching group.
+_COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?')
+
+
+def FilterComments(test_mapping_file: Text) -> Text:
+ """Remove comments in TEST_MAPPING file to valid format.
+
+ Only '//' is regarded as comments.
+
+ Args:
+ test_mapping_file: Path to a TEST_MAPPING file.
+
+ Returns:
+ Valid json string without comments.
+ """
+ return re.sub(_COMMENTS_RE, r'\1', test_mapping_file)
+
+def GetTestMappings(paths: Set[Text],
+ checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]:
+ """Get the affected TEST_MAPPING files.
+
+ TEST_MAPPING files in source code are packaged into a build artifact
+ `test_mappings.zip`. Inside the zip file, the path of each TEST_MAPPING file
+ is preserved. From all TEST_MAPPING files in the source code, this method
+ locates the affected TEST_MAPPING files based on the given paths list.
+
+ A TEST_MAPPING file may also contain `imports` that import TEST_MAPPING files
+ from a different location, e.g.,
+ "imports": [
+ {
+ "path": "../folder2"
+ }
+ ]
+ In that example, TEST_MAPPING files inside ../folder2 (relative to the
+ TEST_MAPPING file containing that imports section) and its parent directories
+ will also be included.
+
+ Args:
+ paths: A set of paths with related TEST_MAPPING files for given changes.
+ checked_paths: A set of paths that have been checked for TEST_MAPPING file
+ already. The set is updated after processing each TEST_MAPPING file. It's
+ used to prevent infinite loop when the method is called recursively.
+
+ Returns:
+ A dictionary of Test Mapping containing the content of the affected
+ TEST_MAPPING files, indexed by the path containing the TEST_MAPPING file.
+ """
+ test_mappings = {}
+
+ # Search for TEST_MAPPING files in each modified path and its parent
+ # directories.
+ all_paths = set()
+ for path in paths:
+ dir_names = path.split(os.path.sep)
+ all_paths |= set(
+ [os.path.sep.join(dir_names[:i + 1]) for i in range(len(dir_names))])
+ # Add root directory to the paths to search for TEST_MAPPING file.
+ all_paths.add('')
+
+ all_paths.difference_update(checked_paths)
+ checked_paths |= all_paths
+ # Try to load TEST_MAPPING file in each possible path.
+ for path in all_paths:
+ try:
+ test_mapping_file = os.path.join(os.path.join(os.getcwd(), path), 'TEST_MAPPING')
+ # Read content of TEST_MAPPING file.
+ content = FilterComments(open(test_mapping_file, "r").read())
+ test_mapping = json.loads(content)
+ test_mappings[path] = test_mapping
+
+ import_paths = set()
+ for import_detail in test_mapping.get(KEY_IMPORTS, []):
+ import_path = import_detail[KEY_IMPORT_PATH]
+ # Try the import path as absolute path.
+ import_paths.add(import_path)
+ # Try the import path as relative path based on the test mapping file
+ # containing the import.
+ norm_import_path = os.path.normpath(os.path.join(path, import_path))
+ import_paths.add(norm_import_path)
+ import_paths.difference_update(checked_paths)
+ if import_paths:
+ import_test_mappings = GetTestMappings(import_paths, checked_paths)
+ test_mappings.update(import_test_mappings)
+ except (KeyError, FileNotFoundError, NotADirectoryError):
+ # TEST_MAPPING file doesn't exist in path
+ pass
+
+ return test_mappings
diff --git a/core/Makefile b/core/Makefile
index 6edac1a..b3870e5 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2094,11 +2094,6 @@
# Get a colon-separated list of search paths.
INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
-# Collects file_contexts files from modules to be installed
-$(call merge-fc-files, \
- $(sort $(foreach m,$(product_MODULES),$(ALL_MODULES.$(m).FILE_CONTEXTS))),\
- $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.modules.tmp)
-
SELINUX_FC := $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.bin
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
@@ -3237,7 +3232,6 @@
exit 1; \
fi
ln -sfn $2 $1
-$1: .KATI_SYMLINK_OUTPUTS := $1
)
$(eval PARTITION_COMPAT_SYMLINKS += $1)
$1
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6af6f08..6ae8b0d 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -189,6 +189,9 @@
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
$(call add_soong_config_var,ANDROID,FULL_SYSTEM_OPTIMIZE_JAVA)
+# TODO(b/319697968): Remove this build flag support when metalava fully supports flagged api
+$(call soong_config_set,ANDROID,release_hidden_api_exportable_stubs,$(RELEASE_HIDDEN_API_EXPORTABLE_STUBS))
+
# Check for SupplementalApi module.
ifeq ($(wildcard packages/modules/SupplementalApi),)
$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,false)
@@ -196,3 +199,7 @@
$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,true)
endif
+# Add crashrecovery build flag to soong
+$(call soong_config_set,ANDROID,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+# Weirdly required because platform_bootclasspath is using AUTO namespace
+$(call soong_config_set,AUTO,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 254bfeb..1793f00 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -1187,7 +1187,6 @@
endif
ALL_MODULES.$(my_register_name).FOR_HOST_CROSS := $(my_host_cross)
ifndef LOCAL_IS_HOST_MODULE
-ALL_MODULES.$(my_register_name).FILE_CONTEXTS := $(LOCAL_FILE_CONTEXTS)
ALL_MODULES.$(my_register_name).APEX_KEYS_FILE := $(LOCAL_APEX_KEY_PATH)
endif
test_config :=
diff --git a/core/board_config.mk b/core/board_config.mk
index 5a1a781..ac9a34f 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -994,11 +994,6 @@
endif
BOARD_API_LEVEL := $(RELEASE_BOARD_API_LEVEL)
.KATI_READONLY := BOARD_API_LEVEL
-
- ifdef RELEASE_BOARD_API_LEVEL_FROZEN
- BOARD_API_LEVEL_FROZEN := true
- .KATI_READONLY := BOARD_API_LEVEL_FROZEN
- endif
endif
###########################################
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 2b84fcd..8d99176 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -87,7 +87,6 @@
LOCAL_EXTRA_FULL_TEST_CONFIGS:=
LOCAL_EXTRACT_APK:=
LOCAL_EXTRACT_DPI_APK:=
-LOCAL_FILE_CONTEXTS:=
LOCAL_FINDBUGS_FLAGS:=
LOCAL_FORCE_STATIC_EXECUTABLE:=
LOCAL_FULL_CLASSES_JACOCO_JAR:=
diff --git a/core/config.mk b/core/config.mk
index 469be30..6f13319 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -896,7 +896,7 @@
sepolicy_vers := $(BOARD_API_LEVEL).0
TOT_SEPOLICY_VERSION := 1000000.0
-ifeq (true,$(BOARD_API_LEVEL_FROZEN))
+ifeq (true,$(RELEASE_BOARD_API_LEVEL_FROZEN))
PLATFORM_SEPOLICY_VERSION := $(sepolicy_vers)
else
PLATFORM_SEPOLICY_VERSION := $(TOT_SEPOLICY_VERSION)
diff --git a/core/definitions.mk b/core/definitions.mk
index 1f2d011..ed842bc 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -3306,7 +3306,6 @@
@mkdir -p $$(dir $$@)
@rm -rf $$@
$(hide) ln -sf $(2) $$@
-$(3): .KATI_SYMLINK_OUTPUTS := $(3)
endef
# Copy an apk to a target location while removing classes*.dex
diff --git a/core/main.mk b/core/main.mk
index 649c75c..f5dbad8 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -311,10 +311,10 @@
ADDITIONAL_VENDOR_PROPERTIES += \
ro.board.api_level=$(BOARD_API_LEVEL)
endif
-# BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen.
-ifdef BOARD_API_LEVEL_FROZEN
+# RELEASE_BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen.
+ifdef RELEASE_BOARD_API_LEVEL_FROZEN
ADDITIONAL_VENDOR_PROPERTIES += \
- ro.board.api_frozen=$(BOARD_API_LEVEL_FROZEN)
+ ro.board.api_frozen=$(RELEASE_BOARD_API_LEVEL_FROZEN)
endif
# Set build prop. This prop is read by ota_from_target_files when generating OTA,
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index 57df911..12057fb 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -97,6 +97,46 @@
)) \
)
+# Create a set of storage file for each partition
+# $(1): built aconfig flags storage dir (out)
+# $(2): installed aconfig flags storage package map file (out)
+# $(3): installed aconfig flags storage flag map file (out)
+# $(4): installed aconfig flags storage flag value file (out)
+# $(5): input aconfig files for the partition (in)
+define generate-partition-aconfig-storage-file
+$(eval $(strip $(1))/target: PRIVATE_OUT_DIR := $(strip $(1)))
+$(eval $(strip $(1))/target: PRIVATE_IN := $(strip $(5)))
+$(strip $(1))/target: $(ACONFIG) $(strip $(5))
+ mkdir -p $$(PRIVATE_OUT_DIR)
+ $$(if $$(PRIVATE_IN), \
+ $$(ACONFIG) create-storage --container "" --out $$(PRIVATE_OUT_DIR) \
+ $$(addprefix --cache ,$$(PRIVATE_IN)), \
+ )
+ echo -n > $$(PRIVATE_OUT_DIR)/target
+$(strip $(1))/package.map: $(strip $(1))/target
+$(strip $(1))/flag.map: $(strip $(1))/target
+$(strip $(1))/flag.val: $(strip $(1))/target
+$(call copy-one-file, $(strip $(1))/package.map, $(2))
+$(call copy-one-file, $(strip $(1))/flag.map, $(3))
+$(call copy-one-file, $(strip $(1))/flag.val, $(4))
+endef
+
+ifeq ($(RELEASE_CREATE_ACONFIG_STORAGE_FILE),true)
+$(foreach partition, $(_FLAG_PARTITIONS), \
+ $(eval aconfig_storage_package_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/package.map) \
+ $(eval aconfig_storage_flag_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/flag.map) \
+ $(eval aconfig_storage_falg_value.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/flag.val) \
+ $(eval $(call generate-partition-aconfig-storage-file, \
+ $(TARGET_OUT_FLAGS)/$(partition), \
+ $(aconfig_storage_package_map.$(partition)), \
+ $(aconfig_storage_flag_map.$(partition)), \
+ $(aconfig_storage_flag_val.$(partition)), \
+ $(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
+ $(ALL_MODULES.$(m).ACONFIG_FILES) \
+ )), \
+ )) \
+)
+endif
# -----------------------------------------------------------------
# Install the ones we need for the configured product
@@ -104,6 +144,9 @@
$(sort $(foreach partition, $(filter $(IMAGES_TO_BUILD), $(_FLAG_PARTITIONS)), \
$(build_flag_summaries.$(partition)) \
$(aconfig_flag_summaries_protobuf.$(partition)) \
+ $(aconfig_storage_package_map.$(partition)) \
+ $(aconfig_storage_flag_map.$(partition)) \
+ $(aconfig_storage_flag_val.$(partition)) \
))
ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files)
@@ -119,5 +162,8 @@
$(foreach partition, $(_FLAG_PARTITIONS), \
$(eval build_flag_summaries.$(partition):=) \
$(eval aconfig_flag_summaries_protobuf.$(partition):=) \
+ $(eval aconfig_storage_package_map.$(partition):=) \
+ $(eval aconfig_storage_flag_map.$(partition):=) \
+ $(eval aconfig_storage_flag_val.$(partition):=) \
)
diff --git a/core/product.mk b/core/product.mk
index 5515a8a..2d22ebf 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -446,7 +446,6 @@
_product_list_vars += PRODUCT_AFDO_PROFILES
-_product_single_value_vars += PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API
_product_single_value_vars += PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE
_product_list_vars += PRODUCT_RELEASE_CONFIG_MAPS
diff --git a/core/soong_config.mk b/core/soong_config.mk
index b6ce2a7..7d03aa3 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -47,7 +47,6 @@
$(call add_json_str, Platform_version_known_codenames, $(PLATFORM_VERSION_KNOWN_CODENAMES))
$(call add_json_bool, Release_aidl_use_unfrozen, $(RELEASE_AIDL_USE_UNFROZEN))
-$(call add_json_bool, Release_expose_flagged_api, $(RELEASE_EXPOSE_FLAGGED_API))
$(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
@@ -396,8 +395,6 @@
$(call add_json_list, ProductPackages, $(sort $(PRODUCT_PACKAGES)))
$(call end_json_map)
-$(call add_json_bool, NextReleaseHideFlaggedApi, $(filter true,$(PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API)))
-
$(call add_json_bool, BuildFromSourceStub, $(findstring true,$(PRODUCT_BUILD_FROM_SOURCE_STUB) $(BUILD_FROM_SOURCE_STUB)))
$(call json_end)
diff --git a/core/tasks/fontchain_lint.mk b/core/tasks/fontchain_lint.mk
new file mode 100644
index 0000000..a4c396d
--- /dev/null
+++ b/core/tasks/fontchain_lint.mk
@@ -0,0 +1,43 @@
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Run sanity tests on fonts on checkbuild
+checkbuild: fontchain_lint
+
+FONTCHAIN_LINTER := $(HOST_OUT_EXECUTABLES)/fontchain_linter
+ifeq ($(MINIMAL_FONT_FOOTPRINT),true)
+CHECK_EMOJI := false
+else
+CHECK_EMOJI := true
+endif
+
+fontchain_lint_timestamp := $(call intermediates-dir-for,PACKAGING,fontchain_lint)/stamp
+
+.PHONY: fontchain_lint
+fontchain_lint: $(fontchain_lint_timestamp)
+
+fontchain_lint_deps := \
+ external/unicode/DerivedAge.txt \
+ external/unicode/emoji-data.txt \
+ external/unicode/emoji-sequences.txt \
+ external/unicode/emoji-variation-sequences.txt \
+ external/unicode/emoji-zwj-sequences.txt \
+ external/unicode/additions/emoji-data.txt \
+ external/unicode/additions/emoji-sequences.txt \
+ external/unicode/additions/emoji-zwj-sequences.txt \
+
+$(fontchain_lint_timestamp): $(FONTCHAIN_LINTER) $(TARGET_OUT)/etc/fonts.xml $(PRODUCT_OUT)/system.img $(fontchain_lint_deps)
+ @echo Running fontchain lint
+ $(FONTCHAIN_LINTER) $(TARGET_OUT) $(CHECK_EMOJI) external/unicode
+ touch $@
diff --git a/envsetup.sh b/envsetup.sh
index 5aa11c7..e180cf1 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -254,7 +254,7 @@
# Note: on windows/cygwin, ANDROID_LUNCH_BUILD_PATHS will contain spaces
# due to "C:\Program Files" being in the path.
- # Handle compat with the old ANDROID_BUILD_PATHS variable.
+ # Handle compat with the old ANDROID_BUILD_PATHS variable.
# TODO: Remove this after we think everyone has lunched again.
if [ -z "$ANDROID_LUNCH_BUILD_PATHS" -a -n "$ANDROID_BUILD_PATHS" ] ; then
ANDROID_LUNCH_BUILD_PATHS="$ANDROID_BUILD_PATHS"
@@ -1886,6 +1886,11 @@
>&2 echo "Couldn't locate the top of the tree. Try setting TOP."
return 1
fi
+ local ret=$?
+ if [[ ret -eq 0 && -z "${ANDROID_QUIET_BUILD:-}" && -n "${ANDROID_BUILD_BANNER}" ]]; then
+ echo "${ANDROID_BUILD_BANNER}"
+ fi
+ return $ret
)
function m()
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 6a101da9..0d88046 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -94,7 +94,6 @@
framework-graphics \
framework-minus-apex \
framework-minus-apex-install-dependencies \
- framework-nfc \
framework-res \
framework-sysconfig.xml \
fsck.erofs \
@@ -201,6 +200,7 @@
libui \
libusbhost \
libutils \
+ libvintf_jni \
libvulkan \
libwilhelm \
linker \
@@ -288,6 +288,13 @@
wifi.rc \
wm \
+# When we release crashrecovery module
+ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true)
+ PRODUCT_PACKAGES += \
+ com.android.crashrecovery \
+
+endif
+
# These packages are not used on Android TV
ifneq ($(PRODUCT_IS_ATV),true)
PRODUCT_PACKAGES += \
@@ -302,6 +309,16 @@
endif
+# Check if the build supports NFC apex or not
+ifeq ($(RELEASE_PACKAGE_NFC_STACK),NfcNci)
+ PRODUCT_PACKAGES += \
+ framework-nfc \
+ NfcNci
+else
+ PRODUCT_PACKAGES += \
+ com.android.nfcservices
+endif
+
# VINTF data for system image
PRODUCT_PACKAGES += \
system_manifest.xml \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 55fcf2f..2fd7209 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -50,7 +50,6 @@
PRODUCT_BOOT_JARS += \
framework-minus-apex \
framework-graphics \
- framework-nfc \
ext \
telephony-common \
voip-common \
@@ -88,6 +87,22 @@
com.android.virt:framework-virtualization \
com.android.wifi:framework-wifi \
+# When we release crashrecovery module
+ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true)
+ PRODUCT_APEX_BOOT_JARS += \
+ com.android.crashrecovery:framework-crashrecovery \
+
+endif
+
+# Check if the build supports NFC apex or not
+ifeq ($(RELEASE_PACKAGE_NFC_STACK),NfcNci)
+ PRODUCT_BOOT_JARS += \
+ framework-nfc
+else
+ PRODUCT_APEX_BOOT_JARS := \
+ com.android.nfcservices:framework-nfc
+endif
+
# TODO(b/308174306): Adjust this after multiple prebuilts version is supported.
# APEX boot jars that are not in prebuilt apexes.
# Keep the list sorted by module names and then library names.
@@ -109,6 +124,13 @@
com.android.permission:service-permission \
com.android.rkpd:service-rkp \
+# When we release crashrecovery module
+ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true)
+ PRODUCT_APEX_SYSTEM_SERVER_JARS += \
+ com.android.crashrecovery:service-crashrecovery \
+
+endif
+
# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
# art project.
ifneq (,$(wildcard art))
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 38efde4..19ec86d 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -103,6 +103,12 @@
libaudiopolicyengineconfigurable \
libpolicy-subsystem
+# Add all of the packages used to support older/upgrading devices
+# These can be removed as we drop support for the older API levels
+PRODUCT_PACKAGES += \
+ $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29) \
+ $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33) \
+ $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34)
# Include all zygote init scripts. "ro.zygote" will select one of them.
PRODUCT_COPY_FILES += \
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 3acf1e6..b5292d2 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -73,7 +73,6 @@
UserDictionaryProvider \
VpnDialogs \
vr \
- $(RELEASE_PACKAGE_NFC_STACK)
PRODUCT_SYSTEM_SERVER_APPS += \
diff --git a/target/product/mainline_sdk.mk b/target/product/mainline_sdk.mk
index cb23bc8..10bb0a0 100644
--- a/target/product/mainline_sdk.mk
+++ b/target/product/mainline_sdk.mk
@@ -17,6 +17,4 @@
PRODUCT_BRAND := Android
PRODUCT_DEVICE := mainline_sdk
-PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
-
PRODUCT_BUILD_FROM_SOURCE_STUB := true
\ No newline at end of file
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index b9ccad3..650f8e9 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -29,6 +29,4 @@
PRODUCT_BRAND := Android
PRODUCT_DEVICE := mainline_x86
-PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
-
PRODUCT_BUILD_FROM_SOURCE_STUB := true
\ No newline at end of file
diff --git a/target/product/security/nfc.pk8 b/target/product/security/nfc.pk8
new file mode 100644
index 0000000..4a5e1b7
--- /dev/null
+++ b/target/product/security/nfc.pk8
Binary files differ
diff --git a/target/product/security/nfc.x509.pem b/target/product/security/nfc.x509.pem
new file mode 100644
index 0000000..e6bff6a
--- /dev/null
+++ b/target/product/security/nfc.x509.pem
@@ -0,0 +1,29 @@
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIUC94q348hFaPm2jow3R84ZjNFc3EwDQYJKoZIhvcNAQELBQAwfDELMAkG
+A1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxFDAS
+BgNVBAoTC0dvb2dsZSBJbmMuMRAwDgYDVQQLEwdBbmRyb2lkMRgwFgYDVQQDDA9jb21fYW5kcm9p
+ZF9uZmMwIBcNMjMxMTAxMjEzNzE1WhgPMjA1MzExMDEyMTM3MTVaMHwxCzAJBgNVBAYTAlVTMRMw
+EQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRQwEgYDVQQKEwtHb29n
+bGUgSW5jLjEQMA4GA1UECxMHQW5kcm9pZDEYMBYGA1UEAwwPY29tX2FuZHJvaWRfbmZjMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEArpgwKyLDl8M3KRb1Fxs3P2mnt81sB3uGZs44R6C6
+CwFhiiACOmEQBcm79BUKBMrE9dUbyOL/GKluNzD026UsDE+N2wDQ8siTxaljDAwhZBpurhOu4uH8
+BKJOzoczAlJFMHpFIMCKQXwotMjT93BuhlSo024Q5QDd2j7Gajk21TkkyQNlBOiyEpKkrRPBuArw
+2knqhuX+nLYkJ5roANaJVDsiKMDG/mKnjwAndrgVbBiKaOdfuRd+pJleN3LUkAfYHHBqlOJnPGSI
+jfYK+9TjsIEYVEOb4SMI3CbWwHfOdEIBgz3IPqMtamEnbZHNlfVWURTNGAF2co+DF3TDGDEjraK4
+R5pXDk/W+4Ex77wQPCIT+d981zkbTpgsPXvZmsBzCYMw6tYksPj86fSVJUrJhlibDk4YHVFsF7OK
+arNf044yPhZ+WUIDqWJ6GB0GU8LWGbbe8iaP0ro9Q1DYgYc6buYWIcX81XZO+hHgWtUb2rNqIUsp
+/4DmT1vgz7TiMWcY7pjrHlNHtVf4jC+OU2c+p8u4XUGQxdIKGgZSoHldtAcnwqGuIpat9lS+gtVl
+vJUp8w3Z2gv4q/bBVZ3NNasA1d3HXVQUWiwszcjiVvoSRa/AlMVUGureGRbsiKsyHisYp9rxk1DB
+dPS9h7tMs/5rV6RM2nZfdfQr71zX9ieSoz0CAwEAAaNQME4wDAYDVR0TBAUwAwEB/zAdBgNVHQ4E
+FgQU9v9SL0QIU9fq7aB70/jqVBLmZJUwHwYDVR0jBBgwFoAU9v9SL0QIU9fq7aB70/jqVBLmZJUw
+DQYJKoZIhvcNAQELBQADggIBAExt2/NFt+2IhC5+/cgi8hzvwZKQyml1MQ9pjrkfQy0JGzGTOPDr
++NPuT5jh/SOfGzdBsGVs3hvK7hFvXsFEWwVQaDOkKhcruks+g7FxldhXC2z9iKgjNrOeUXoE7SiE
+zXA/p1KiBcRL3MSMbg/iQjQVxlJky4BDo39SoEgDeL9+i7L4cBwZQ2LBBLPIOdE7G/cG1Q6UE+KN
+/mnz0kk3+FIg0q4szXDxH+o2V4ZYHSOy8P6TBW8gEQ71RGOnh0wtaTIx2RD/zqJAi+BzWMLsC636
+TNMmqKassG4MH445ul2w0ZzOClJ4gkl1e7dtK7/Kes4kcLOI/i4JHLOcydEqum+t8gQtMYyGM1Kv
+mVpC3hEv2pYwFfhg8hg31MljZmLD761kLOLfw98N350h6dNdQ0jci/3rqbbjVinVQoQSVEzJcA9Q
+gQhRLKHiO7oRmht6ilRLFtGZd/PwIMWMNqksTfVM5frMIIZXdfew+efHIJ7X+ZgJu3tGWcbFYFte
+K/BbmPLnp3aAGg/wwU1dqwCANf53oUc4ZzqRm9eovlVsrFiRM/DGt2/t4ujorU6Uwwt2+n05QU7b
+7PXhc7bTP6adUWMNMxSNIPo6wHmsTb2pCg+K5LuNMFJzXcoI3uBW9Qu4M/tLRv4kRKZzphqUbX+e
+/5hW2myw2BvbdwWFrz6XBgkz
+-----END CERTIFICATE-----
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
index 7b58e94..8d93261 100644
--- a/tools/aconfig/Cargo.toml
+++ b/tools/aconfig/Cargo.toml
@@ -1,22 +1,7 @@
-[package]
-name = "aconfig"
-version = "0.1.0"
-edition = "2021"
-build = "build.rs"
+[workspace]
-[features]
-default = ["cargo"]
-cargo = []
-
-[dependencies]
-anyhow = "1.0.69"
-clap = { version = "4.1.8", features = ["derive"] }
-itertools = "0.10.5"
-paste = "1.0.11"
-protobuf = "3.2.0"
-serde = { version = "1.0.152", features = ["derive"] }
-serde_json = "1.0.93"
-tinytemplate = "1.2.1"
-
-[build-dependencies]
-protobuf-codegen = "3.2.0"
+members = [
+ "aconfig",
+ "aconfig_protos",
+ "printflags"
+]
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp
similarity index 85%
rename from tools/aconfig/Android.bp
rename to tools/aconfig/aconfig/Android.bp
index d5b5b8f..3be456c 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/aconfig/Android.bp
@@ -2,51 +2,6 @@
default_applicable_licenses: ["Android-Apache-2.0"],
}
-// proto libraries for consumers of `aconfig dump --format=protobuf` output
-
-java_library {
- name: "libaconfig_java_proto_lite",
- host_supported: true,
- srcs: ["protos/aconfig.proto"],
- static_libs: ["libprotobuf-java-lite"],
- proto: {
- type: "lite",
- },
- sdk_version: "current",
- min_sdk_version: "UpsideDownCake",
- apex_available: [
- "com.android.configinfrastructure",
- "//apex_available:platform",
- ]
-}
-
-java_library_host {
- name: "libaconfig_java_proto_full",
- srcs: ["protos/aconfig.proto"],
- static_libs: ["libprotobuf-java-full"],
- proto: {
- type: "full",
- },
-}
-
-python_library_host {
- name: "libaconfig_python_proto",
- srcs: ["protos/aconfig.proto"],
- proto: {
- canonical_path_from_root: false,
- },
-}
-
-// host binary: aconfig
-
-rust_protobuf {
- name: "libaconfig_protos",
- protos: ["protos/aconfig.proto"],
- crate_name: "aconfig_protos",
- source_stem: "aconfig_protos",
- host_supported: true,
-}
-
rust_defaults {
name: "aconfig.defaults",
edition: "2021",
@@ -63,9 +18,6 @@
"libserde_json",
"libtinytemplate",
],
- proc_macros: [
- "libpaste",
- ]
}
rust_binary_host {
diff --git a/tools/aconfig/aconfig/Cargo.toml b/tools/aconfig/aconfig/Cargo.toml
new file mode 100644
index 0000000..01ad8c6
--- /dev/null
+++ b/tools/aconfig/aconfig/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "aconfig"
+version = "0.1.0"
+edition = "2021"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
+anyhow = "1.0.69"
+clap = { version = "4.1.8", features = ["derive"] }
+itertools = "0.10.5"
+protobuf = "3.2.0"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
+tinytemplate = "1.2.1"
+aconfig_protos = { path = "../aconfig_protos" }
diff --git a/tools/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs
similarity index 99%
rename from tools/aconfig/src/codegen/cpp.rs
rename to tools/aconfig/aconfig/src/codegen/cpp.rs
index 1279d8e..cd71b10 100644
--- a/tools/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/aconfig/src/codegen/cpp.rs
@@ -19,10 +19,11 @@
use std::path::PathBuf;
use tinytemplate::TinyTemplate;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
+
use crate::codegen;
use crate::codegen::CodegenMode;
use crate::commands::OutputFile;
-use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
pub fn generate_cpp_code<I>(
package: &str,
@@ -136,7 +137,7 @@
#[cfg(test)]
mod tests {
use super::*;
- use crate::protos::ProtoParsedFlags;
+ use aconfig_protos::ProtoParsedFlags;
use std::collections::HashMap;
const EXPORTED_PROD_HEADER_EXPECTED: &str = r#"
diff --git a/tools/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
similarity index 99%
rename from tools/aconfig/src/codegen/java.rs
rename to tools/aconfig/aconfig/src/codegen/java.rs
index 78e892b..7ce1d51 100644
--- a/tools/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -20,10 +20,11 @@
use std::path::PathBuf;
use tinytemplate::TinyTemplate;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
+
use crate::codegen;
use crate::codegen::CodegenMode;
use crate::commands::OutputFile;
-use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
pub fn generate_java_code<I>(
package: &str,
diff --git a/tools/aconfig/src/codegen/mod.rs b/tools/aconfig/aconfig/src/codegen/mod.rs
similarity index 86%
rename from tools/aconfig/src/codegen/mod.rs
rename to tools/aconfig/aconfig/src/codegen/mod.rs
index 64ffa8b..7b2336f 100644
--- a/tools/aconfig/src/codegen/mod.rs
+++ b/tools/aconfig/aconfig/src/codegen/mod.rs
@@ -20,32 +20,7 @@
use anyhow::{ensure, Result};
use clap::ValueEnum;
-
-pub fn is_valid_name_ident(s: &str) -> bool {
- // Identifiers must match [a-z][a-z0-9_]*, except consecutive underscores are not allowed
- if s.contains("__") {
- return false;
- }
- let mut chars = s.chars();
- let Some(first) = chars.next() else {
- return false;
- };
- if !first.is_ascii_lowercase() {
- return false;
- }
- chars.all(|ch| ch.is_ascii_lowercase() || ch.is_ascii_digit() || ch == '_')
-}
-
-pub fn is_valid_package_ident(s: &str) -> bool {
- if !s.contains('.') {
- return false;
- }
- s.split('.').all(is_valid_name_ident)
-}
-
-pub fn is_valid_container_ident(s: &str) -> bool {
- s.split('.').all(is_valid_name_ident)
-}
+use aconfig_protos::{is_valid_name_ident, is_valid_package_ident};
pub fn create_device_config_ident(package: &str, flag_name: &str) -> Result<String> {
ensure!(is_valid_package_ident(package), "bad package");
@@ -75,6 +50,7 @@
#[cfg(test)]
mod tests {
use super::*;
+ use aconfig_protos::is_valid_container_ident;
#[test]
fn test_is_valid_name_ident() {
diff --git a/tools/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs
similarity index 99%
rename from tools/aconfig/src/codegen/rust.rs
rename to tools/aconfig/aconfig/src/codegen/rust.rs
index 8a88ffe..33c3d37 100644
--- a/tools/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/aconfig/src/codegen/rust.rs
@@ -18,10 +18,11 @@
use serde::Serialize;
use tinytemplate::TinyTemplate;
+use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
+
use crate::codegen;
use crate::codegen::CodegenMode;
use crate::commands::OutputFile;
-use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag};
pub fn generate_rust_code<I>(
package: &str,
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
similarity index 95%
rename from tools/aconfig/src/commands.rs
rename to tools/aconfig/aconfig/src/commands.rs
index f7a6417..93bc436 100644
--- a/tools/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -26,11 +26,12 @@
use crate::codegen::rust::generate_rust_code;
use crate::codegen::CodegenMode;
use crate::dump::{DumpFormat, DumpPredicate};
-use crate::protos::{
+use aconfig_protos::{
ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag,
ProtoParsedFlags, ProtoTracepoint,
};
-use crate::storage::generate_storage_files;
+use crate::storage::generate_storage_file;
+use crate::storage::StorageFileSelection;
pub struct Input {
pub source: String,
@@ -43,7 +44,7 @@
self.reader
.read_to_end(&mut buffer)
.with_context(|| format!("failed to read {}", self.source))?;
- crate::protos::parsed_flags::try_from_binary_proto(&buffer)
+ aconfig_protos::parsed_flags::try_from_binary_proto(&buffer)
.with_context(|| self.error_context())
}
@@ -76,7 +77,7 @@
.read_to_string(&mut contents)
.with_context(|| format!("failed to read {}", input.source))?;
- let flag_declarations = crate::protos::flag_declarations::try_from_text_proto(&contents)
+ let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents)
.with_context(|| input.error_context())?;
ensure!(
package == flag_declarations.package(),
@@ -95,7 +96,7 @@
);
}
for mut flag_declaration in flag_declarations.flag.into_iter() {
- crate::protos::flag_declaration::verify_fields(&flag_declaration)
+ aconfig_protos::flag_declaration::verify_fields(&flag_declaration)
.with_context(|| input.error_context())?;
// create ParsedFlag using FlagDeclaration and default values
@@ -129,7 +130,7 @@
parsed_flag.metadata = Some(metadata).into();
// verify ParsedFlag looks reasonable
- crate::protos::parsed_flag::verify_fields(&parsed_flag)?;
+ aconfig_protos::parsed_flag::verify_fields(&parsed_flag)?;
// verify ParsedFlag can be added
ensure!(
@@ -150,10 +151,10 @@
.reader
.read_to_string(&mut contents)
.with_context(|| format!("failed to read {}", input.source))?;
- let flag_values = crate::protos::flag_values::try_from_text_proto(&contents)
+ let flag_values = aconfig_protos::flag_values::try_from_text_proto(&contents)
.with_context(|| input.error_context())?;
for flag_value in flag_values.flag_value.into_iter() {
- crate::protos::flag_value::verify_fields(&flag_value)
+ aconfig_protos::flag_value::verify_fields(&flag_value)
.with_context(|| input.error_context())?;
let Some(parsed_flag) = parsed_flags
@@ -183,8 +184,8 @@
}
// Create a sorted parsed_flags
- crate::protos::parsed_flags::sort_parsed_flags(&mut parsed_flags);
- crate::protos::parsed_flags::verify_fields(&parsed_flags)?;
+ aconfig_protos::parsed_flags::sort_parsed_flags(&mut parsed_flags);
+ aconfig_protos::parsed_flags::verify_fields(&parsed_flags)?;
let mut output = Vec::new();
parsed_flags.write_to_vec(&mut output)?;
Ok(output)
@@ -223,7 +224,7 @@
generate_rust_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
}
-pub fn create_storage(caches: Vec<Input>, container: &str) -> Result<Vec<OutputFile>> {
+pub fn create_storage(caches: Vec<Input>, container: &str, file: &StorageFileSelection) -> Result<Vec<u8>> {
let parsed_flags_vec: Vec<ProtoParsedFlags> = caches
.into_iter()
.map(|mut input| input.try_parse_flags())
@@ -231,7 +232,7 @@
.into_iter()
.filter(|pfs| find_unique_container(pfs) == Some(container))
.collect();
- generate_storage_files(container, parsed_flags_vec.iter())
+ generate_storage_file(container, parsed_flags_vec.iter(), file)
}
pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> {
@@ -286,7 +287,7 @@
let individually_parsed_flags: Result<Vec<ProtoParsedFlags>> =
input.iter_mut().map(|i| i.try_parse_flags()).collect();
let parsed_flags: ProtoParsedFlags =
- crate::protos::parsed_flags::merge(individually_parsed_flags?, dedup)?;
+ aconfig_protos::parsed_flags::merge(individually_parsed_flags?, dedup)?;
let filters: Vec<Box<DumpPredicate>> = if filters.is_empty() {
vec![Box::new(|_| true)]
} else {
@@ -385,16 +386,16 @@
#[cfg(test)]
mod tests {
use super::*;
- use crate::protos::ProtoFlagPurpose;
+ use aconfig_protos::ProtoFlagPurpose;
#[test]
fn test_parse_flags() {
let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags
- crate::protos::parsed_flags::verify_fields(&parsed_flags).unwrap();
+ aconfig_protos::parsed_flags::verify_fields(&parsed_flags).unwrap();
let enabled_ro =
parsed_flags.parsed_flag.iter().find(|pf| pf.name() == "enabled_ro").unwrap();
- assert!(crate::protos::parsed_flag::verify_fields(enabled_ro).is_ok());
+ assert!(aconfig_protos::parsed_flag::verify_fields(enabled_ro).is_ok());
assert_eq!("com.android.aconfig.test", enabled_ro.package());
assert_eq!("enabled_ro", enabled_ro.name());
assert_eq!("This flag is ENABLED + READ_ONLY", enabled_ro.description());
@@ -461,7 +462,7 @@
)
.unwrap();
let parsed_flags =
- crate::protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
+ aconfig_protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
assert_eq!(1, parsed_flags.parsed_flag.len());
let parsed_flag = parsed_flags.parsed_flag.first().unwrap();
assert_eq!(ProtoFlagState::DISABLED, parsed_flag.state());
@@ -601,7 +602,7 @@
)
.unwrap();
let parsed_flags =
- crate::protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
+ aconfig_protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
assert_eq!(1, parsed_flags.parsed_flag.len());
let parsed_flag = parsed_flags.parsed_flag.first().unwrap();
assert_eq!(ProtoFlagPurpose::PURPOSE_FEATURE, parsed_flag.metadata.purpose());
diff --git a/tools/aconfig/src/dump.rs b/tools/aconfig/aconfig/src/dump.rs
similarity index 98%
rename from tools/aconfig/src/dump.rs
rename to tools/aconfig/aconfig/src/dump.rs
index 37368ee..12352f9 100644
--- a/tools/aconfig/src/dump.rs
+++ b/tools/aconfig/aconfig/src/dump.rs
@@ -14,10 +14,10 @@
* limitations under the License.
*/
-use crate::protos::{
+use aconfig_protos::{
ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoTracepoint,
};
-use crate::protos::{ProtoParsedFlag, ProtoParsedFlags};
+use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags};
use anyhow::{anyhow, bail, Context, Result};
use protobuf::Message;
@@ -197,7 +197,7 @@
#[cfg(test)]
mod tests {
use super::*;
- use crate::protos::ProtoParsedFlags;
+ use aconfig_protos::ProtoParsedFlags;
use crate::test::parse_test_flags;
use protobuf::Message;
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs
similarity index 93%
rename from tools/aconfig/src/main.rs
rename to tools/aconfig/aconfig/src/main.rs
index 7d719f0..30a7e9d 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/aconfig/src/main.rs
@@ -27,11 +27,11 @@
mod codegen;
mod commands;
mod dump;
-mod protos;
mod storage;
use codegen::CodegenMode;
use dump::DumpFormat;
+use storage::StorageFileSelection;
#[cfg(test)]
mod test;
@@ -56,8 +56,8 @@
.arg(
Arg::new("default-permission")
.long("default-permission")
- .value_parser(protos::flag_permission::parse_from_str)
- .default_value(protos::flag_permission::to_string(
+ .value_parser(aconfig_protos::flag_permission::parse_from_str)
+ .default_value(aconfig_protos::flag_permission::to_string(
&commands::DEFAULT_FLAG_PERMISSION,
)),
)
@@ -135,6 +135,11 @@
.required(true)
.help("The target container for the generated storage file."),
)
+ .arg(
+ Arg::new("file")
+ .long("file")
+ .value_parser(|s: &str| StorageFileSelection::try_from(s)),
+ )
.arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
.arg(Arg::new("out").long("out").required(true)),
)
@@ -209,7 +214,7 @@
let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
let values = open_zero_or_more_files(sub_matches, "values")?;
let default_permission =
- get_required_arg::<protos::ProtoFlagPermission>(sub_matches, "default-permission")?;
+ get_required_arg::<aconfig_protos::ProtoFlagPermission>(sub_matches, "default-permission")?;
let output = commands::parse_flags(
package,
container,
@@ -278,14 +283,14 @@
write_output_to_file_or_stdout(path, &output)?;
}
Some(("create-storage", sub_matches)) => {
+ let file = get_required_arg::<StorageFileSelection>(sub_matches, "file")
+ .context("Invalid storage file selection")?;
let cache = open_zero_or_more_files(sub_matches, "cache")?;
let container = get_required_arg::<String>(sub_matches, "container")?;
- let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
- let generated_files = commands::create_storage(cache, container)
+ let path = get_required_arg::<String>(sub_matches, "out")?;
+ let output = commands::create_storage(cache, container, file)
.context("failed to create storage files")?;
- generated_files
- .iter()
- .try_for_each(|file| write_output_file_realtive_to_dir(&dir, file))?;
+ write_output_to_file_or_stdout(path, &output)?;
}
_ => unreachable!(),
}
diff --git a/tools/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs
similarity index 98%
rename from tools/aconfig/src/storage/flag_table.rs
rename to tools/aconfig/aconfig/src/storage/flag_table.rs
index 595217e..3545700 100644
--- a/tools/aconfig/src/storage/flag_table.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_table.rs
@@ -295,8 +295,6 @@
};
assert_eq!(header, &expected_header);
- println!("{:?}", &flag_table.as_ref().unwrap().nodes);
-
let buckets: &Vec<Option<u32>> = &flag_table.as_ref().unwrap().buckets;
let expected_bucket: Vec<Option<u32>> = vec![
Some(98),
@@ -338,9 +336,7 @@
#[test]
// this test point locks down the table serialization
fn test_serialization() {
- let flag_table = create_test_flag_table();
- assert!(flag_table.is_ok());
- let flag_table = flag_table.unwrap();
+ let flag_table = create_test_flag_table().unwrap();
let header: &FlagTableHeader = &flag_table.header;
let reinterpreted_header = FlagTableHeader::from_bytes(&header.as_bytes());
diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs
new file mode 100644
index 0000000..3c5bb17
--- /dev/null
+++ b/tools/aconfig/aconfig/src/storage/flag_value.rs
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::commands::assign_flag_ids;
+use aconfig_protos::ProtoFlagState;
+use crate::storage::{self, FlagPackage};
+use anyhow::{anyhow, Result};
+
+#[derive(PartialEq, Debug)]
+pub struct FlagValueHeader {
+ pub version: u32,
+ pub container: String,
+ pub file_size: u32,
+ pub num_flags: u32,
+ pub boolean_value_offset: u32,
+}
+
+impl FlagValueHeader {
+ fn new(container: &str, num_flags: u32) -> Self {
+ Self {
+ version: storage::FILE_VERSION,
+ container: String::from(container),
+ file_size: 0,
+ num_flags,
+ boolean_value_offset: 0,
+ }
+ }
+
+ fn as_bytes(&self) -> Vec<u8> {
+ let mut result = Vec::new();
+ result.extend_from_slice(&self.version.to_le_bytes());
+ let container_bytes = self.container.as_bytes();
+ result.extend_from_slice(&(container_bytes.len() as u32).to_le_bytes());
+ result.extend_from_slice(container_bytes);
+ result.extend_from_slice(&self.file_size.to_le_bytes());
+ result.extend_from_slice(&self.num_flags.to_le_bytes());
+ result.extend_from_slice(&self.boolean_value_offset.to_le_bytes());
+ result
+ }
+}
+
+#[derive(PartialEq, Debug)]
+pub struct FlagValueList {
+ pub header: FlagValueHeader,
+ pub booleans: Vec<bool>,
+}
+
+impl FlagValueList {
+ pub fn new(container: &str, packages: &[FlagPackage]) -> Result<Self> {
+ // create list
+ let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
+
+ let mut list = Self {
+ header: FlagValueHeader::new(container, num_flags),
+ booleans: vec![false; num_flags as usize],
+ };
+
+ for pkg in packages.iter() {
+ let start_offset = pkg.boolean_offset as usize;
+ let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
+ for pf in pkg.boolean_flags.iter() {
+ let fid = flag_ids
+ .get(pf.name())
+ .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
+
+ list.booleans[start_offset + (*fid as usize)] =
+ pf.state() == ProtoFlagState::ENABLED;
+ }
+ }
+
+ // initialize all header fields
+ list.header.boolean_value_offset = list.header.as_bytes().len() as u32;
+ list.header.file_size = list.header.boolean_value_offset + num_flags;
+
+ Ok(list)
+ }
+
+ pub fn as_bytes(&self) -> Vec<u8> {
+ [
+ self.header.as_bytes(),
+ self.booleans
+ .iter()
+ .map(|&v| u8::from(v).to_le_bytes())
+ .collect::<Vec<_>>()
+ .concat(),
+ ]
+ .concat()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::storage::{
+ group_flags_by_package, tests::parse_all_test_flags, tests::read_str_from_bytes,
+ tests::read_u32_from_bytes, tests::read_u8_from_bytes,
+ };
+
+ impl FlagValueHeader {
+ // test only method to deserialize back into the header struct
+ fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ let mut head = 0;
+ Ok(Self {
+ version: read_u32_from_bytes(bytes, &mut head)?,
+ container: read_str_from_bytes(bytes, &mut head)?,
+ file_size: read_u32_from_bytes(bytes, &mut head)?,
+ num_flags: read_u32_from_bytes(bytes, &mut head)?,
+ boolean_value_offset: read_u32_from_bytes(bytes, &mut head)?,
+ })
+ }
+ }
+
+ impl FlagValueList {
+ // test only method to deserialize back into the flag value struct
+ fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ let header = FlagValueHeader::from_bytes(bytes)?;
+ let num_flags = header.num_flags;
+ let mut head = header.as_bytes().len();
+ let booleans = (0..num_flags)
+ .map(|_| read_u8_from_bytes(bytes, &mut head).unwrap() == 1)
+ .collect();
+ let list = Self { header, booleans };
+ Ok(list)
+ }
+ }
+
+ pub fn create_test_flag_value_list() -> Result<FlagValueList> {
+ let caches = parse_all_test_flags();
+ let packages = group_flags_by_package(caches.iter());
+ FlagValueList::new("system", &packages)
+ }
+
+ #[test]
+ // this test point locks down the flag value creation and each field
+ fn test_list_contents() {
+ let flag_value_list = create_test_flag_value_list();
+ assert!(flag_value_list.is_ok());
+
+ let header: &FlagValueHeader = &flag_value_list.as_ref().unwrap().header;
+ let expected_header = FlagValueHeader {
+ version: storage::FILE_VERSION,
+ container: String::from("system"),
+ file_size: 34,
+ num_flags: 8,
+ boolean_value_offset: 26,
+ };
+ assert_eq!(header, &expected_header);
+
+ let booleans: &Vec<bool> = &flag_value_list.as_ref().unwrap().booleans;
+ let expected_booleans: Vec<bool> = vec![false; header.num_flags as usize];
+ assert_eq!(booleans, &expected_booleans);
+ }
+
+ #[test]
+ // this test point locks down the value list serialization
+ fn test_serialization() {
+ let flag_value_list = create_test_flag_value_list().unwrap();
+
+ let header: &FlagValueHeader = &flag_value_list.header;
+ let reinterpreted_header = FlagValueHeader::from_bytes(&header.as_bytes());
+ assert!(reinterpreted_header.is_ok());
+ assert_eq!(header, &reinterpreted_header.unwrap());
+
+ let reinterpreted_value_list = FlagValueList::from_bytes(&flag_value_list.as_bytes());
+ assert!(reinterpreted_value_list.is_ok());
+ assert_eq!(&flag_value_list, &reinterpreted_value_list.unwrap());
+ }
+}
diff --git a/tools/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs
similarity index 82%
rename from tools/aconfig/src/storage/mod.rs
rename to tools/aconfig/aconfig/src/storage/mod.rs
index a28fccd..4f2dc81 100644
--- a/tools/aconfig/src/storage/mod.rs
+++ b/tools/aconfig/aconfig/src/storage/mod.rs
@@ -15,16 +15,37 @@
*/
pub mod flag_table;
+pub mod flag_value;
pub mod package_table;
use anyhow::{anyhow, Result};
use std::collections::{hash_map::DefaultHasher, HashMap, HashSet};
use std::hash::{Hash, Hasher};
-use std::path::PathBuf;
-use crate::commands::OutputFile;
-use crate::protos::{ProtoParsedFlag, ProtoParsedFlags};
-use crate::storage::{flag_table::FlagTable, package_table::PackageTable};
+use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags};
+use crate::storage::{
+ flag_table::FlagTable, flag_value::FlagValueList, package_table::PackageTable,
+};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum StorageFileSelection {
+ PackageMap,
+ FlagMap,
+ FlagVal,
+}
+
+impl TryFrom<&str> for StorageFileSelection {
+ type Error = anyhow::Error;
+
+ fn try_from(value: &str) -> std::result::Result<Self, Self::Error> {
+ match value {
+ "package_map" => Ok(Self::PackageMap),
+ "flag_map" => Ok(Self::FlagMap),
+ "flag_val" => Ok(Self::FlagVal),
+ _ => Err(anyhow!("Invalid storage file to create")),
+ }
+ }
+}
pub const FILE_VERSION: u32 = 1;
@@ -107,28 +128,30 @@
packages
}
-pub fn generate_storage_files<'a, I>(
+pub fn generate_storage_file<'a, I>(
container: &str,
parsed_flags_vec_iter: I,
-) -> Result<Vec<OutputFile>>
+ file: &StorageFileSelection,
+) -> Result<Vec<u8>>
where
I: Iterator<Item = &'a ProtoParsedFlags>,
{
let packages = group_flags_by_package(parsed_flags_vec_iter);
- // create and serialize package map
- let package_table = PackageTable::new(container, &packages)?;
- let package_table_file_path = PathBuf::from("package.map");
- let package_table_file =
- OutputFile { contents: package_table.as_bytes(), path: package_table_file_path };
-
- // create and serialize flag map
- let flag_table = FlagTable::new(container, &packages)?;
- let flag_table_file_path = PathBuf::from("flag.map");
- let flag_table_file =
- OutputFile { contents: flag_table.as_bytes(), path: flag_table_file_path };
-
- Ok(vec![package_table_file, flag_table_file])
+ match file {
+ StorageFileSelection::PackageMap => {
+ let package_table = PackageTable::new(container, &packages)?;
+ Ok(package_table.as_bytes())
+ }
+ StorageFileSelection::FlagMap => {
+ let flag_table = FlagTable::new(container, &packages)?;
+ Ok(flag_table.as_bytes())
+ }
+ StorageFileSelection::FlagVal => {
+ let flag_value = FlagValueList::new(container, &packages)?;
+ Ok(flag_value.as_bytes())
+ }
+ }
}
#[cfg(test)]
@@ -136,6 +159,13 @@
use super::*;
use crate::Input;
+ /// Read and parse bytes as u8
+ pub fn read_u8_from_bytes(buf: &[u8], head: &mut usize) -> Result<u8> {
+ let val = u8::from_le_bytes(buf[*head..*head + 1].try_into()?);
+ *head += 1;
+ Ok(val)
+ }
+
/// Read and parse bytes as u16
pub fn read_u16_from_bytes(buf: &[u8], head: &mut usize) -> Result<u16> {
let val = u16::from_le_bytes(buf[*head..*head + 2].try_into()?);
@@ -191,7 +221,7 @@
crate::commands::DEFAULT_FLAG_PERMISSION,
)
.unwrap();
- crate::protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
+ aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
})
.collect()
}
diff --git a/tools/aconfig/src/storage/package_table.rs b/tools/aconfig/aconfig/src/storage/package_table.rs
similarity index 98%
rename from tools/aconfig/src/storage/package_table.rs
rename to tools/aconfig/aconfig/src/storage/package_table.rs
index 0ce1349..4036234 100644
--- a/tools/aconfig/src/storage/package_table.rs
+++ b/tools/aconfig/aconfig/src/storage/package_table.rs
@@ -277,9 +277,7 @@
#[test]
// this test point locks down the table serialization
fn test_serialization() {
- let package_table = create_test_package_table();
- assert!(package_table.is_ok());
- let package_table = package_table.unwrap();
+ let package_table = create_test_package_table().unwrap();
let header: &PackageTableHeader = &package_table.header;
let reinterpreted_header = PackageTableHeader::from_bytes(&header.as_bytes());
diff --git a/tools/aconfig/src/test.rs b/tools/aconfig/aconfig/src/test.rs
similarity index 97%
rename from tools/aconfig/src/test.rs
rename to tools/aconfig/aconfig/src/test.rs
index cbb95b8..7b5318d 100644
--- a/tools/aconfig/src/test.rs
+++ b/tools/aconfig/aconfig/src/test.rs
@@ -17,7 +17,7 @@
#[cfg(test)]
pub mod test_utils {
use crate::commands::Input;
- use crate::protos::ProtoParsedFlags;
+ use aconfig_protos::ProtoParsedFlags;
use itertools;
pub const TEST_PACKAGE: &str = "com.android.aconfig.test";
@@ -265,7 +265,7 @@
crate::commands::DEFAULT_FLAG_PERMISSION,
)
.unwrap();
- crate::protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
+ aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
}
pub fn parse_test_flags() -> ProtoParsedFlags {
@@ -289,7 +289,7 @@
crate::commands::DEFAULT_FLAG_PERMISSION,
)
.unwrap();
- crate::protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
+ aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
}
pub fn first_significant_code_diff(a: &str, b: &str) -> Option<String> {
diff --git a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FakeFeatureFlagsImpl.java.template
similarity index 100%
rename from tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
rename to tools/aconfig/aconfig/templates/FakeFeatureFlagsImpl.java.template
diff --git a/tools/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/aconfig/templates/FeatureFlags.java.template
similarity index 100%
rename from tools/aconfig/templates/FeatureFlags.java.template
rename to tools/aconfig/aconfig/templates/FeatureFlags.java.template
diff --git a/tools/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
similarity index 100%
rename from tools/aconfig/templates/FeatureFlagsImpl.java.template
rename to tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
diff --git a/tools/aconfig/templates/Flags.java.template b/tools/aconfig/aconfig/templates/Flags.java.template
similarity index 100%
rename from tools/aconfig/templates/Flags.java.template
rename to tools/aconfig/aconfig/templates/Flags.java.template
diff --git a/tools/aconfig/templates/cpp_exported_header.template b/tools/aconfig/aconfig/templates/cpp_exported_header.template
similarity index 100%
rename from tools/aconfig/templates/cpp_exported_header.template
rename to tools/aconfig/aconfig/templates/cpp_exported_header.template
diff --git a/tools/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template
similarity index 100%
rename from tools/aconfig/templates/cpp_source_file.template
rename to tools/aconfig/aconfig/templates/cpp_source_file.template
diff --git a/tools/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template
similarity index 100%
rename from tools/aconfig/templates/rust.template
rename to tools/aconfig/aconfig/templates/rust.template
diff --git a/tools/aconfig/templates/rust_test.template b/tools/aconfig/aconfig/templates/rust_test.template
similarity index 100%
rename from tools/aconfig/templates/rust_test.template
rename to tools/aconfig/aconfig/templates/rust_test.template
diff --git a/tools/aconfig/tests/AconfigHostTest.java b/tools/aconfig/aconfig/tests/AconfigHostTest.java
similarity index 100%
rename from tools/aconfig/tests/AconfigHostTest.java
rename to tools/aconfig/aconfig/tests/AconfigHostTest.java
diff --git a/tools/aconfig/tests/AconfigTest.java b/tools/aconfig/aconfig/tests/AconfigTest.java
similarity index 100%
rename from tools/aconfig/tests/AconfigTest.java
rename to tools/aconfig/aconfig/tests/AconfigTest.java
diff --git a/tools/aconfig/tests/AndroidManifest.xml b/tools/aconfig/aconfig/tests/AndroidManifest.xml
similarity index 100%
rename from tools/aconfig/tests/AndroidManifest.xml
rename to tools/aconfig/aconfig/tests/AndroidManifest.xml
diff --git a/tools/aconfig/tests/aconfig_exported_mode_test.cpp b/tools/aconfig/aconfig/tests/aconfig_exported_mode_test.cpp
similarity index 100%
rename from tools/aconfig/tests/aconfig_exported_mode_test.cpp
rename to tools/aconfig/aconfig/tests/aconfig_exported_mode_test.cpp
diff --git a/tools/aconfig/tests/aconfig_exported_mode_test.rs b/tools/aconfig/aconfig/tests/aconfig_exported_mode_test.rs
similarity index 100%
rename from tools/aconfig/tests/aconfig_exported_mode_test.rs
rename to tools/aconfig/aconfig/tests/aconfig_exported_mode_test.rs
diff --git a/tools/aconfig/tests/aconfig_force_read_only_mode_test.cpp b/tools/aconfig/aconfig/tests/aconfig_force_read_only_mode_test.cpp
similarity index 100%
rename from tools/aconfig/tests/aconfig_force_read_only_mode_test.cpp
rename to tools/aconfig/aconfig/tests/aconfig_force_read_only_mode_test.cpp
diff --git a/tools/aconfig/tests/aconfig_force_read_only_mode_test.rs b/tools/aconfig/aconfig/tests/aconfig_force_read_only_mode_test.rs
similarity index 100%
rename from tools/aconfig/tests/aconfig_force_read_only_mode_test.rs
rename to tools/aconfig/aconfig/tests/aconfig_force_read_only_mode_test.rs
diff --git a/tools/aconfig/tests/aconfig_prod_mode_test.rs b/tools/aconfig/aconfig/tests/aconfig_prod_mode_test.rs
similarity index 100%
rename from tools/aconfig/tests/aconfig_prod_mode_test.rs
rename to tools/aconfig/aconfig/tests/aconfig_prod_mode_test.rs
diff --git a/tools/aconfig/tests/aconfig_test.cpp b/tools/aconfig/aconfig/tests/aconfig_test.cpp
similarity index 100%
rename from tools/aconfig/tests/aconfig_test.cpp
rename to tools/aconfig/aconfig/tests/aconfig_test.cpp
diff --git a/tools/aconfig/tests/aconfig_test_mode_test.rs b/tools/aconfig/aconfig/tests/aconfig_test_mode_test.rs
similarity index 100%
rename from tools/aconfig/tests/aconfig_test_mode_test.rs
rename to tools/aconfig/aconfig/tests/aconfig_test_mode_test.rs
diff --git a/tools/aconfig/tests/aconfig_test_test_variant.cpp b/tools/aconfig/aconfig/tests/aconfig_test_test_variant.cpp
similarity index 100%
rename from tools/aconfig/tests/aconfig_test_test_variant.cpp
rename to tools/aconfig/aconfig/tests/aconfig_test_test_variant.cpp
diff --git a/tools/aconfig/tests/first.values b/tools/aconfig/aconfig/tests/first.values
similarity index 100%
rename from tools/aconfig/tests/first.values
rename to tools/aconfig/aconfig/tests/first.values
diff --git a/tools/aconfig/tests/read_only_test.aconfig b/tools/aconfig/aconfig/tests/read_only_test.aconfig
similarity index 100%
rename from tools/aconfig/tests/read_only_test.aconfig
rename to tools/aconfig/aconfig/tests/read_only_test.aconfig
diff --git a/tools/aconfig/tests/read_only_test.values b/tools/aconfig/aconfig/tests/read_only_test.values
similarity index 100%
rename from tools/aconfig/tests/read_only_test.values
rename to tools/aconfig/aconfig/tests/read_only_test.values
diff --git a/tools/aconfig/tests/second.values b/tools/aconfig/aconfig/tests/second.values
similarity index 100%
rename from tools/aconfig/tests/second.values
rename to tools/aconfig/aconfig/tests/second.values
diff --git a/tools/aconfig/tests/storage_test_1.aconfig b/tools/aconfig/aconfig/tests/storage_test_1.aconfig
similarity index 100%
rename from tools/aconfig/tests/storage_test_1.aconfig
rename to tools/aconfig/aconfig/tests/storage_test_1.aconfig
diff --git a/tools/aconfig/tests/storage_test_2.aconfig b/tools/aconfig/aconfig/tests/storage_test_2.aconfig
similarity index 100%
rename from tools/aconfig/tests/storage_test_2.aconfig
rename to tools/aconfig/aconfig/tests/storage_test_2.aconfig
diff --git a/tools/aconfig/tests/storage_test_4.aconfig b/tools/aconfig/aconfig/tests/storage_test_4.aconfig
similarity index 100%
rename from tools/aconfig/tests/storage_test_4.aconfig
rename to tools/aconfig/aconfig/tests/storage_test_4.aconfig
diff --git a/tools/aconfig/tests/test.aconfig b/tools/aconfig/aconfig/tests/test.aconfig
similarity index 100%
rename from tools/aconfig/tests/test.aconfig
rename to tools/aconfig/aconfig/tests/test.aconfig
diff --git a/tools/aconfig/tests/test_exported.aconfig b/tools/aconfig/aconfig/tests/test_exported.aconfig
similarity index 100%
rename from tools/aconfig/tests/test_exported.aconfig
rename to tools/aconfig/aconfig/tests/test_exported.aconfig
diff --git a/tools/aconfig/tests/test_force_read_only.aconfig b/tools/aconfig/aconfig/tests/test_force_read_only.aconfig
similarity index 100%
rename from tools/aconfig/tests/test_force_read_only.aconfig
rename to tools/aconfig/aconfig/tests/test_force_read_only.aconfig
diff --git a/tools/aconfig/aconfig_protos/Android.bp b/tools/aconfig/aconfig_protos/Android.bp
new file mode 100644
index 0000000..1cc4e41
--- /dev/null
+++ b/tools/aconfig/aconfig_protos/Android.bp
@@ -0,0 +1,62 @@
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+// proto libraries for consumers of `aconfig dump --format=protobuf` output
+
+java_library {
+ name: "libaconfig_java_proto_lite",
+ host_supported: true,
+ srcs: ["protos/aconfig.proto"],
+ static_libs: ["libprotobuf-java-lite"],
+ proto: {
+ type: "lite",
+ },
+ sdk_version: "current",
+ min_sdk_version: "UpsideDownCake",
+ apex_available: [
+ "com.android.configinfrastructure",
+ "//apex_available:platform",
+ ]
+}
+
+java_library_host {
+ name: "libaconfig_java_proto_full",
+ srcs: ["protos/aconfig.proto"],
+ static_libs: ["libprotobuf-java-full"],
+ proto: {
+ type: "full",
+ },
+}
+
+python_library_host {
+ name: "libaconfig_python_proto",
+ srcs: ["protos/aconfig.proto"],
+ proto: {
+ canonical_path_from_root: false,
+ },
+}
+
+rust_protobuf {
+ name: "libaconfig_rust_proto",
+ protos: ["protos/aconfig.proto"],
+ crate_name: "aconfig_rust_proto",
+ source_stem: "aconfig_rust_proto",
+ host_supported: true,
+}
+
+rust_library {
+ name: "libaconfig_protos",
+ srcs: ["src/lib.rs"],
+ crate_name: "aconfig_protos",
+ host_supported: true,
+ lints: "none",
+ rustlibs: [
+ "libaconfig_rust_proto",
+ "libanyhow",
+ "libprotobuf",
+ ],
+ proc_macros: [
+ "libpaste",
+ ]
+}
diff --git a/tools/aconfig/aconfig_protos/Cargo.toml b/tools/aconfig/aconfig_protos/Cargo.toml
new file mode 100644
index 0000000..114cf80
--- /dev/null
+++ b/tools/aconfig/aconfig_protos/Cargo.toml
@@ -0,0 +1,17 @@
+[package]
+name = "aconfig_protos"
+version = "0.1.0"
+edition = "2021"
+build = "build.rs"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
+anyhow = "1.0.69"
+paste = "1.0.11"
+protobuf = "3.2.0"
+
+[build-dependencies]
+protobuf-codegen = "3.2.0"
diff --git a/tools/aconfig/build.rs b/tools/aconfig/aconfig_protos/build.rs
similarity index 100%
rename from tools/aconfig/build.rs
rename to tools/aconfig/aconfig_protos/build.rs
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/aconfig_protos/protos/aconfig.proto
similarity index 100%
rename from tools/aconfig/protos/aconfig.proto
rename to tools/aconfig/aconfig_protos/protos/aconfig.proto
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/aconfig_protos/src/lib.rs
similarity index 91%
rename from tools/aconfig/src/protos.rs
rename to tools/aconfig/aconfig_protos/src/lib.rs
index 2684d20..f0d27d6 100644
--- a/tools/aconfig/src/protos.rs
+++ b/tools/aconfig/aconfig_protos/src/lib.rs
@@ -29,17 +29,17 @@
// ---- When building with the Android tool-chain ----
#[cfg(not(feature = "cargo"))]
mod auto_generated {
- pub use aconfig_protos::aconfig::flag_metadata::Flag_purpose as ProtoFlagPurpose;
- pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
- pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
- pub use aconfig_protos::aconfig::Flag_metadata as ProtoFlagMetadata;
- pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
- pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
- pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
- pub use aconfig_protos::aconfig::Flag_values as ProtoFlagValues;
- pub use aconfig_protos::aconfig::Parsed_flag as ProtoParsedFlag;
- pub use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
- pub use aconfig_protos::aconfig::Tracepoint as ProtoTracepoint;
+ pub use aconfig_rust_proto::aconfig::flag_metadata::Flag_purpose as ProtoFlagPurpose;
+ pub use aconfig_rust_proto::aconfig::Flag_declaration as ProtoFlagDeclaration;
+ pub use aconfig_rust_proto::aconfig::Flag_declarations as ProtoFlagDeclarations;
+ pub use aconfig_rust_proto::aconfig::Flag_metadata as ProtoFlagMetadata;
+ pub use aconfig_rust_proto::aconfig::Flag_permission as ProtoFlagPermission;
+ pub use aconfig_rust_proto::aconfig::Flag_state as ProtoFlagState;
+ pub use aconfig_rust_proto::aconfig::Flag_value as ProtoFlagValue;
+ pub use aconfig_rust_proto::aconfig::Flag_values as ProtoFlagValues;
+ pub use aconfig_rust_proto::aconfig::Parsed_flag as ProtoParsedFlag;
+ pub use aconfig_rust_proto::aconfig::Parsed_flags as ProtoParsedFlags;
+ pub use aconfig_rust_proto::aconfig::Tracepoint as ProtoTracepoint;
}
// ---- When building with cargo ----
@@ -68,6 +68,32 @@
use anyhow::Result;
use paste::paste;
+pub fn is_valid_name_ident(s: &str) -> bool {
+ // Identifiers must match [a-z][a-z0-9_]*, except consecutive underscores are not allowed
+ if s.contains("__") {
+ return false;
+ }
+ let mut chars = s.chars();
+ let Some(first) = chars.next() else {
+ return false;
+ };
+ if !first.is_ascii_lowercase() {
+ return false;
+ }
+ chars.all(|ch| ch.is_ascii_lowercase() || ch.is_ascii_digit() || ch == '_')
+}
+
+pub fn is_valid_package_ident(s: &str) -> bool {
+ if !s.contains('.') {
+ return false;
+ }
+ s.split('.').all(is_valid_name_ident)
+}
+
+pub fn is_valid_container_ident(s: &str) -> bool {
+ s.split('.').all(is_valid_name_ident)
+}
+
fn try_from_text_proto<T>(s: &str) -> Result<T>
where
T: protobuf::MessageFull,
@@ -87,14 +113,13 @@
pub mod flag_declaration {
use super::*;
- use crate::codegen;
use anyhow::ensure;
pub fn verify_fields(pdf: &ProtoFlagDeclaration) -> Result<()> {
ensure_required_fields!("flag declaration", pdf, "name", "namespace", "description");
- ensure!(codegen::is_valid_name_ident(pdf.name()), "bad flag declaration: bad name");
- ensure!(codegen::is_valid_name_ident(pdf.namespace()), "bad flag declaration: bad name");
+ ensure!(is_valid_name_ident(pdf.name()), "bad flag declaration: bad name");
+ ensure!(is_valid_name_ident(pdf.namespace()), "bad flag declaration: bad name");
ensure!(!pdf.description().is_empty(), "bad flag declaration: empty description");
ensure!(pdf.bug.len() == 1, "bad flag declaration: exactly one bug required");
@@ -104,7 +129,6 @@
pub mod flag_declarations {
use super::*;
- use crate::codegen;
use anyhow::ensure;
pub fn try_from_text_proto(s: &str) -> Result<ProtoFlagDeclarations> {
@@ -118,11 +142,11 @@
// TODO(b/312769710): Make the container field required.
ensure!(
- codegen::is_valid_package_ident(pdf.package()),
+ is_valid_package_ident(pdf.package()),
"bad flag declarations: bad package"
);
ensure!(
- !pdf.has_container() || codegen::is_valid_container_ident(pdf.container()),
+ !pdf.has_container() || is_valid_container_ident(pdf.container()),
"bad flag declarations: bad container"
);
for flag_declaration in pdf.flag.iter() {
@@ -135,14 +159,13 @@
pub mod flag_value {
use super::*;
- use crate::codegen;
use anyhow::ensure;
pub fn verify_fields(fv: &ProtoFlagValue) -> Result<()> {
ensure_required_fields!("flag value", fv, "package", "name", "state", "permission");
- ensure!(codegen::is_valid_package_ident(fv.package()), "bad flag value: bad package");
- ensure!(codegen::is_valid_name_ident(fv.name()), "bad flag value: bad name");
+ ensure!(is_valid_package_ident(fv.package()), "bad flag value: bad package");
+ ensure!(is_valid_name_ident(fv.name()), "bad flag value: bad name");
Ok(())
}
@@ -200,7 +223,6 @@
pub mod parsed_flag {
use super::*;
- use crate::codegen;
use anyhow::ensure;
pub fn verify_fields(pf: &ProtoParsedFlag) -> Result<()> {
@@ -215,13 +237,13 @@
"permission"
);
- ensure!(codegen::is_valid_package_ident(pf.package()), "bad parsed flag: bad package");
+ ensure!(is_valid_package_ident(pf.package()), "bad parsed flag: bad package");
ensure!(
- !pf.has_container() || codegen::is_valid_container_ident(pf.container()),
+ !pf.has_container() || is_valid_container_ident(pf.container()),
"bad parsed flag: bad container"
);
- ensure!(codegen::is_valid_name_ident(pf.name()), "bad parsed flag: bad name");
- ensure!(codegen::is_valid_name_ident(pf.namespace()), "bad parsed flag: bad namespace");
+ ensure!(is_valid_name_ident(pf.name()), "bad parsed flag: bad name");
+ ensure!(is_valid_name_ident(pf.namespace()), "bad parsed flag: bad namespace");
ensure!(!pf.description().is_empty(), "bad parsed flag: empty description");
ensure!(!pf.trace.is_empty(), "bad parsed flag: empty trace");
for tp in pf.trace.iter() {
@@ -261,7 +283,7 @@
}
pub fn verify_fields(pf: &ProtoParsedFlags) -> Result<()> {
- use crate::protos::parsed_flag::path_to_declaration;
+ use crate::parsed_flag::path_to_declaration;
let mut previous: Option<&ProtoParsedFlag> = None;
for parsed_flag in pf.parsed_flag.iter() {
@@ -848,7 +870,7 @@
let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
let parsed_flag = &parsed_flags.parsed_flag[0];
assert_eq!(
- crate::protos::parsed_flag::path_to_declaration(parsed_flag),
+ crate::parsed_flag::path_to_declaration(parsed_flag),
"flags.declarations"
);
}
diff --git a/tools/aconfig/printflags/Cargo.toml b/tools/aconfig/printflags/Cargo.toml
new file mode 100644
index 0000000..7313f5d
--- /dev/null
+++ b/tools/aconfig/printflags/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "printflags"
+version = "0.1.0"
+edition = "2021"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
+anyhow = "1.0.69"
+paste = "1.0.11"
+protobuf = "3.2.0"
+regex = "1.10.3"
+aconfig_protos = { path = "../aconfig_protos" }
diff --git a/tools/aconfig/printflags/src/main.rs b/tools/aconfig/printflags/src/main.rs
index ae9b83a..7fcde61 100644
--- a/tools/aconfig/printflags/src/main.rs
+++ b/tools/aconfig/printflags/src/main.rs
@@ -16,8 +16,8 @@
//! `printflags` is a device binary to print feature flags.
-use aconfig_protos::aconfig::Flag_state as State;
-use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
+use aconfig_protos::ProtoFlagState as State;
+use aconfig_protos::ProtoParsedFlags as ProtoParsedFlags;
use anyhow::{bail, Context, Result};
use regex::Regex;
use std::collections::BTreeMap;
diff --git a/tools/perf/benchmarks b/tools/perf/benchmarks
index e188858..acc53bb 100755
--- a/tools/perf/benchmarks
+++ b/tools/perf/benchmarks
@@ -130,8 +130,9 @@
def Clean():
"""Remove the out directory."""
def remove_out():
- if os.path.exists("out"):
- shutil.rmtree("out")
+ out_dir = utils.get_out_dir()
+ if os.path.exists(out_dir):
+ shutil.rmtree(out_dir)
return Change(label="Remove out", change=remove_out, undo=lambda: None)
@@ -270,7 +271,7 @@
def _run_benchmark(self, lunch, benchmark, iteration):
"""Run a single benchmark."""
- benchmark_log_subdir = self._log_dir(lunch, benchmark, iteration)
+ benchmark_log_subdir = self._benchmark_log_dir(lunch, benchmark, iteration)
benchmark_log_dir = self._options.LogDir().joinpath(benchmark_log_subdir)
sys.stderr.write(f"STARTING BENCHMARK: {benchmark.id}\n")
@@ -298,7 +299,7 @@
dist_one = self._options.DistOne()
if dist_one:
# If we're disting just one benchmark, save the logs and we can stop here.
- self._dist(dist_one)
+ self._dist(utils.get_dist_dir())
else:
# Postroll builds
for i in range(benchmark.preroll):
@@ -315,7 +316,7 @@
self._write_summary()
sys.stderr.write(f"FINISHED BENCHMARK: {benchmark.id}\n")
- def _log_dir(self, lunch, benchmark, iteration):
+ def _benchmark_log_dir(self, lunch, benchmark, iteration):
"""Construct the log directory fir a benchmark run."""
path = f"{lunch.Combine()}/{benchmark.id}"
# Zero pad to the correct length for correct alpha sorting
@@ -355,8 +356,8 @@
return after_ns - before_ns
def _dist(self, dist_dir):
- out_dir = pathlib.Path("out")
- dest_dir = pathlib.Path(dist_dir).joinpath("logs")
+ out_dir = utils.get_out_dir()
+ dest_dir = dist_dir.joinpath("logs")
os.makedirs(dest_dir, exist_ok=True)
basenames = [
"build.trace.gz",
@@ -704,6 +705,7 @@
runner.Run()
except FatalError:
sys.stderr.write(f"FAILED\n")
+ sys.exit(1)
if __name__ == "__main__":
diff --git a/tools/perf/format_benchmarks b/tools/perf/format_benchmarks
index 845d73f..162c577 100755
--- a/tools/perf/format_benchmarks
+++ b/tools/perf/format_benchmarks
@@ -86,10 +86,12 @@
class Table:
- def __init__(self):
+ def __init__(self, row_title, fixed_titles=[]):
self._data = {}
self._rows = []
self._cols = []
+ self._fixed_cols = {}
+ self._titles = [row_title] + fixed_titles
def Set(self, column_key, row_key, data):
self._data[(column_key, row_key)] = data
@@ -98,19 +100,27 @@
if not row_key in self._rows:
self._rows.append(row_key)
+ def SetFixedCol(self, row_key, columns):
+ self._fixed_cols[row_key] = columns
+
def Write(self, out):
table = []
# Expand the column items
for row in zip(*self._cols):
if row.count(row[0]) == len(row):
continue
- table.append([""] + [col for col in row])
+ table.append([""] * len(self._titles) + [col for col in row])
if table:
+ # Update the last row of the header with title and add separator
+ for i in range(len(self._titles)):
+ table[len(table)-1][i] = self._titles[i]
table.append(pretty.SEPARATOR)
# Populate the data
for row in self._rows:
- table.append([str(row)] + [str(self._data.get((col, row), "")) for col in self._cols])
- out.write(pretty.FormatTable(table))
+ table.append([str(row)]
+ + self._fixed_cols[row]
+ + [str(self._data.get((col, row), "")) for col in self._cols])
+ out.write(pretty.FormatTable(table, alignments="LL"))
def format_duration_sec(ns):
@@ -173,11 +183,12 @@
in group_by(summary["benchmarks"], bm_key)]
# Build the table
- table = Table()
+ table = Table("Benchmark", ["Rebuild"])
for filename, summary in summaries:
for key, column in summary["columns"]:
for id, cell in column:
duration_ns = statistics.median([b["duration_ns"] for b in cell])
+ table.SetFixedCol(cell[0]["title"], [" ".join(cell[0]["modules"])])
table.Set(tuple([summary["date"].strftime("%Y-%m-%d"),
summary["branch"],
summary["tag"]]
diff --git a/tools/perf/pretty.py b/tools/perf/pretty.py
index 1b59098..14fdc9e 100644
--- a/tools/perf/pretty.py
+++ b/tools/perf/pretty.py
@@ -19,7 +19,7 @@
SEPARATOR = Sentinel()
-def FormatTable(data, prefix=""):
+def FormatTable(data, prefix="", alignments=[]):
"""Pretty print a table.
Prefixes each row with `prefix`.
@@ -40,10 +40,10 @@
else:
for i in range(len(row)):
cell = row[i] if row[i] else ""
- if i != 0:
+ if i >= len(alignments) or alignments[i] == "R":
result += " " * (widths[i] - len(cell))
result += cell
- if i == 0:
+ if i < len(alignments) and alignments[i] == "L":
result += " " * (widths[i] - len(cell))
result += colsep
result += "\n"
diff --git a/tools/perf/utils.py b/tools/perf/utils.py
index 08e393f..934130d 100644
--- a/tools/perf/utils.py
+++ b/tools/perf/utils.py
@@ -28,3 +28,15 @@
d = d.parent
if d == pathlib.Path("/"):
return None
+
+def get_dist_dir():
+ dist_dir = os.getenv("DIST_DIR")
+ if dist_dir:
+ return pathlib.Path(dist_dir).resolve()
+ return get_out_dir().joinpath("dist")
+
+def get_out_dir():
+ out_dir = os.getenv("OUT_DIR")
+ if not out_dir:
+ out_dir = "out"
+ return pathlib.Path(out_dir).resolve()
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index fc4ab68..b39a82c 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -835,8 +835,7 @@
def AddApexInfo(output_zip):
- apex_infos = GetApexInfoFromTargetFiles(OPTIONS.input_tmp, 'system',
- compressed_only=False)
+ apex_infos = GetApexInfoFromTargetFiles(OPTIONS.input_tmp)
apex_metadata_proto = ota_metadata_pb2.ApexMetadata()
apex_metadata_proto.apex_info.extend(apex_infos)
apex_info_bytes = apex_metadata_proto.SerializeToString()
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 1ddffc1..3abef3b 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -534,22 +534,28 @@
'Failed to get type for {}:\n{}'.format(apex_file, e))
-def GetApexInfoFromTargetFiles(input_file, partition, compressed_only=True):
+def GetApexInfoFromTargetFiles(input_file):
"""
- Get information about system APEX stored in the input_file zip
+ Get information about APEXes stored in the input_file zip
Args:
input_file: The filename of the target build target-files zip or directory.
Return:
A list of ota_metadata_pb2.ApexInfo() populated using the APEX stored in
- /system partition of the input_file
+ each partition of the input_file
"""
# Extract the apex files so that we can run checks on them
if not isinstance(input_file, str):
raise RuntimeError("must pass filepath to target-files zip or directory")
+ apex_infos = []
+ for partition in ['system', 'system_ext', 'product', 'vendor']:
+ apex_infos.extend(GetApexInfoForPartition(input_file, partition))
+ return apex_infos
+
+def GetApexInfoForPartition(input_file, partition):
apex_subdir = os.path.join(partition.upper(), 'apex')
if os.path.isdir(input_file):
tmp_dir = input_file
@@ -607,7 +613,6 @@
'--output', decompressed_file_path])
apex_info.decompressed_size = os.path.getsize(decompressed_file_path)
- if not compressed_only or apex_info.is_compressed:
- apex_infos.append(apex_info)
+ apex_infos.append(apex_info)
return apex_infos
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index a7b3523..cdafb4b 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -58,22 +58,6 @@
logger = logging.getLogger(__name__)
-# Work around a bug in Python's zipfile module that prevents opening of zipfiles
-# if any entry has an extra field of between 1 and 3 bytes (which is common with
-# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
-# contains the bug) with an empty version (since we don't need to decode the
-# extra field anyway).
-# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
-# Python 3.5.0 alpha 1.
-
-
-class MyZipInfo(zipfile.ZipInfo):
- def _decodeExtra(self):
- pass
-
-
-zipfile.ZipInfo = MyZipInfo
-
OPTIONS = common.OPTIONS
diff --git a/tools/releasetools/merge/merge_compatibility_checks.py b/tools/releasetools/merge/merge_compatibility_checks.py
index 207abe2..8c9993f 100644
--- a/tools/releasetools/merge/merge_compatibility_checks.py
+++ b/tools/releasetools/merge/merge_compatibility_checks.py
@@ -190,8 +190,8 @@
apex_packages = set()
for partition in partition_map.keys():
try:
- apex_info = apex_utils.GetApexInfoFromTargetFiles(
- target_files_dir, partition, compressed_only=False)
+ apex_info = apex_utils.GetApexInfoForPartition(
+ target_files_dir, partition)
except RuntimeError as err:
errors.append(str(err))
apex_info = []
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
index 24d9ea9..fb5957a 100644
--- a/tools/releasetools/merge_ota.py
+++ b/tools/releasetools/merge_ota.py
@@ -243,8 +243,6 @@
# Get signing keys
key_passwords = common.GetKeyPasswords([args.package_key])
- generator = PayloadGenerator()
-
apex_info_bytes = ApexInfo(file_paths)
with tempfile.NamedTemporaryFile() as unsigned_payload:
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 29042a5..e521e1f 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -856,10 +856,10 @@
return ExtractTargetFiles(target_file)
-def ValidateCompressinParam(target_info):
+def ValidateCompressionParam(target_info):
vabc_compression_param = OPTIONS.vabc_compression_param
if vabc_compression_param:
- minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[vabc_compression_param]
+ minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[vabc_compression_param.split(",")[0]]
if target_info.vendor_api_level < minimum_api_level_required:
raise ValueError("Specified VABC compression param {} is only supported for API level >= {}, device is on API level {}".format(
vabc_compression_param, minimum_api_level_required, target_info.vendor_api_level))
@@ -872,7 +872,7 @@
target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
if OPTIONS.disable_vabc and target_info.is_release_key:
raise ValueError("Disabling VABC on release-key builds is not supported.")
- ValidateCompressinParam(target_info)
+ ValidateCompressionParam(target_info)
vabc_compression_param = target_info.vabc_compression_param
target_file = ExtractOrCopyTargetFiles(target_file)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4356394..7b497c1 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -1137,6 +1137,7 @@
devkeydir + "/shared": d + "/shared",
devkeydir + "/platform": d + "/platform",
devkeydir + "/networkstack": d + "/networkstack",
+ devkeydir + "/sdk_sandbox": d + "/sdk_sandbox",
})
else:
OPTIONS.key_map[s] = d
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index d1e76b9..b6fcb18 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -299,7 +299,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetApexInfoFromTargetFiles(self):
target_files = construct_target_files(compressedApex=True)
- apex_infos = GetApexInfoFromTargetFiles(target_files, 'system')
+ apex_infos = GetApexInfoFromTargetFiles(target_files)
self.assertEqual(len(apex_infos), 1)
self.assertEqual(apex_infos[0].package_name, "com.android.apex.compressed")
self.assertEqual(apex_infos[0].version, 1)