Merge "Install libvintf_jni" into main
diff --git a/ci/build_test_suites b/ci/build_test_suites
new file mode 100755
index 0000000..89ecefe
--- /dev/null
+++ b/ci/build_test_suites
@@ -0,0 +1,23 @@
+#!prebuilts/build-tools/linux-x86/bin/py3-cmd
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+import build_test_suites
+
+if __name__ == '__main__':
+ sys.dont_write_bytecode = True
+
+ build_test_suites.main(sys.argv)
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
new file mode 100644
index 0000000..1064041
--- /dev/null
+++ b/ci/build_test_suites.py
@@ -0,0 +1,299 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Script to build only the necessary modules for general-tests along
+
+with whatever other targets are passed in.
+"""
+
+import argparse
+from collections.abc import Sequence
+import json
+import os
+import pathlib
+import re
+import subprocess
+import sys
+from typing import Any, Dict, Set, Text
+
+import test_mapping_module_retriever
+
+
+# List of modules that are always required to be in general-tests.zip
+REQUIRED_MODULES = frozenset(
+ ['cts-tradefed', 'vts-tradefed', 'compatibility-host-util', 'soong_zip']
+)
+
+
+def build_test_suites(argv):
+ args = parse_args(argv)
+
+ if not args.change_info:
+ build_everything(args)
+ return
+
+ # Call the class to map changed files to modules to build.
+ # TODO(lucafarsi): Move this into a replaceable class.
+ build_affected_modules(args)
+
+
+def parse_args(argv):
+ argparser = argparse.ArgumentParser()
+ argparser.add_argument(
+ 'extra_targets', nargs='*', help='Extra test suites to build.'
+ )
+ argparser.add_argument('--target_product')
+ argparser.add_argument('--target_release')
+ argparser.add_argument(
+ '--with_dexpreopt_boot_img_and_system_server_only', action='store_true'
+ )
+ argparser.add_argument('--dist_dir')
+ argparser.add_argument('--change_info', nargs='?')
+ argparser.add_argument('--extra_required_modules', nargs='*')
+
+ return argparser.parse_args()
+
+
+def build_everything(args: argparse.Namespace):
+ build_command = base_build_command(args)
+ build_command.append('general-tests')
+
+ run_command(build_command, print_output=True)
+
+
+def build_affected_modules(args: argparse.Namespace):
+ modules_to_build = find_modules_to_build(
+ pathlib.Path(args.change_info), args.extra_required_modules
+ )
+
+ # Call the build command with everything.
+ build_command = base_build_command(args)
+ build_command.extend(modules_to_build)
+
+ run_command(build_command, print_output=True)
+
+ zip_build_outputs(modules_to_build, args.dist_dir, args.target_release)
+
+
+def base_build_command(args: argparse.Namespace) -> list:
+ build_command = []
+ build_command.append('time')
+ build_command.append('./build/soong/soong_ui.bash')
+ build_command.append('--make-mode')
+ build_command.append('dist')
+ build_command.append('DIST_DIR=' + args.dist_dir)
+ build_command.append('TARGET_PRODUCT=' + args.target_product)
+ build_command.append('TARGET_RELEASE=' + args.target_release)
+ if args.with_dexpreopt_boot_img_and_system_server_only:
+ build_command.append('WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY=true')
+ build_command.extend(args.extra_targets)
+
+ return build_command
+
+
+def run_command(
+ args: list[str],
+ env: Dict[Text, Text] = os.environ,
+ print_output: bool = False,
+) -> str:
+ result = subprocess.run(
+ args=args,
+ text=True,
+ capture_output=True,
+ check=False,
+ env=env,
+ )
+ # If the process failed, print its stdout and propagate the exception.
+ if not result.returncode == 0:
+ print('Build command failed! output:')
+ print('stdout: ' + result.stdout)
+ print('stderr: ' + result.stderr)
+
+ result.check_returncode()
+
+ if print_output:
+ print(result.stdout)
+
+ return result.stdout
+
+
+def find_modules_to_build(
+ change_info: pathlib.Path, extra_required_modules: list[Text]
+) -> Set[Text]:
+ changed_files = find_changed_files(change_info)
+
+ test_mappings = test_mapping_module_retriever.GetTestMappings(
+ changed_files, set()
+ )
+
+ # Soong_zip is required to generate the output zip so always build it.
+ modules_to_build = set(REQUIRED_MODULES)
+ if extra_required_modules:
+ modules_to_build.update(extra_required_modules)
+
+ modules_to_build.update(find_affected_modules(test_mappings, changed_files))
+
+ return modules_to_build
+
+
+def find_changed_files(change_info: pathlib.Path) -> Set[Text]:
+ with open(change_info) as change_info_file:
+ change_info_contents = json.load(change_info_file)
+
+ changed_files = set()
+
+ for change in change_info_contents['changes']:
+ project_path = change.get('projectPath') + '/'
+
+ for revision in change.get('revisions'):
+ for file_info in revision.get('fileInfos'):
+ changed_files.add(project_path + file_info.get('path'))
+
+ return changed_files
+
+
+def find_affected_modules(
+ test_mappings: Dict[str, Any], changed_files: Set[Text]
+) -> Set[Text]:
+ modules = set()
+
+ # The test_mappings object returned by GetTestMappings is organized as
+ # follows:
+ # {
+ # 'test_mapping_file_path': {
+ # 'group_name' : [
+ # 'name': 'module_name',
+ # ],
+ # }
+ # }
+ for test_mapping in test_mappings.values():
+ for group in test_mapping.values():
+ for entry in group:
+ module_name = entry.get('name', None)
+
+ if not module_name:
+ continue
+
+ file_patterns = entry.get('file_patterns')
+ if not file_patterns:
+ modules.add(module_name)
+ continue
+
+ if matches_file_patterns(file_patterns, changed_files):
+ modules.add(module_name)
+ continue
+
+ return modules
+
+
+# TODO(lucafarsi): Share this logic with the original logic in
+# test_mapping_test_retriever.py
+def matches_file_patterns(
+ file_patterns: list[Text], changed_files: Set[Text]
+) -> bool:
+ for changed_file in changed_files:
+ for pattern in file_patterns:
+ if re.search(pattern, changed_file):
+ return True
+
+ return False
+
+
+def zip_build_outputs(
+ modules_to_build: Set[Text], dist_dir: Text, target_release: Text
+):
+ src_top = os.environ.get('TOP', os.getcwd())
+
+ # Call dumpvars to get the necessary things.
+ # TODO(lucafarsi): Don't call soong_ui 4 times for this, --dumpvars-mode can
+ # do it but it requires parsing.
+ host_out_testcases = get_soong_var('HOST_OUT_TESTCASES', target_release)
+ target_out_testcases = get_soong_var('TARGET_OUT_TESTCASES', target_release)
+ product_out = get_soong_var('PRODUCT_OUT', target_release)
+ soong_host_out = get_soong_var('SOONG_HOST_OUT', target_release)
+ host_out = get_soong_var('HOST_OUT', target_release)
+
+ # Call the class to package the outputs.
+ # TODO(lucafarsi): Move this code into a replaceable class.
+ host_paths = []
+ target_paths = []
+ for module in modules_to_build:
+ host_path = os.path.join(host_out_testcases, module)
+ if os.path.exists(host_path):
+ host_paths.append(host_path)
+
+ target_path = os.path.join(target_out_testcases, module)
+ if os.path.exists(target_path):
+ target_paths.append(target_path)
+
+ zip_command = ['time', os.path.join(host_out, 'bin', 'soong_zip')]
+
+ # Add host testcases.
+ zip_command.append('-C')
+ zip_command.append(os.path.join(src_top, soong_host_out))
+ zip_command.append('-P')
+ zip_command.append('host/')
+ for path in host_paths:
+ zip_command.append('-D')
+ zip_command.append(path)
+
+ # Add target testcases.
+ zip_command.append('-C')
+ zip_command.append(os.path.join(src_top, product_out))
+ zip_command.append('-P')
+ zip_command.append('target')
+ for path in target_paths:
+ zip_command.append('-D')
+ zip_command.append(path)
+
+ # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
+ # Add necessary tools. These are also hardcoded in general-tests.mk.
+ framework_path = os.path.join(soong_host_out, 'framework')
+
+ zip_command.append('-C')
+ zip_command.append(framework_path)
+ zip_command.append('-P')
+ zip_command.append('host/tools')
+ zip_command.append('-f')
+ zip_command.append(os.path.join(framework_path, 'cts-tradefed.jar'))
+ zip_command.append('-f')
+ zip_command.append(
+ os.path.join(framework_path, 'compatibility-host-util.jar')
+ )
+ zip_command.append('-f')
+ zip_command.append(os.path.join(framework_path, 'vts-tradefed.jar'))
+
+ # Zip to the DIST dir.
+ zip_command.append('-o')
+ zip_command.append(os.path.join(dist_dir, 'general-tests.zip'))
+
+ run_command(zip_command, print_output=True)
+
+
+def get_soong_var(var: str, target_release: str) -> str:
+ new_env = os.environ.copy()
+ new_env['TARGET_RELEASE'] = target_release
+
+ value = run_command(
+ ['./build/soong/soong_ui.bash', '--dumpvar-mode', '--abs', var],
+ env=new_env,
+ ).strip()
+ if not value:
+ raise RuntimeError('Necessary soong variable ' + var + ' not found.')
+
+ return value
+
+
+def main(argv):
+ build_test_suites(sys.argv)
diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py
new file mode 100644
index 0000000..d2c13c0
--- /dev/null
+++ b/ci/test_mapping_module_retriever.py
@@ -0,0 +1,125 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Simple parsing code to scan test_mapping files and determine which
+modules are needed to build for the given list of changed files.
+TODO(lucafarsi): Deduplicate from artifact_helper.py
+"""
+
+from typing import Any, Dict, Set, Text
+import json
+import os
+import re
+
+# Regex to extra test name from the path of test config file.
+TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config'
+
+# Key name for TEST_MAPPING imports
+KEY_IMPORTS = 'imports'
+KEY_IMPORT_PATH = 'path'
+
+# Name of TEST_MAPPING file.
+TEST_MAPPING = 'TEST_MAPPING'
+
+# Pattern used to identify double-quoted strings and '//'-format comments in
+# TEST_MAPPING file, but only double-quoted strings are included within the
+# matching group.
+_COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?')
+
+
+def FilterComments(test_mapping_file: Text) -> Text:
+ """Remove comments in TEST_MAPPING file to valid format.
+
+ Only '//' is regarded as comments.
+
+ Args:
+ test_mapping_file: Path to a TEST_MAPPING file.
+
+ Returns:
+ Valid json string without comments.
+ """
+ return re.sub(_COMMENTS_RE, r'\1', test_mapping_file)
+
+def GetTestMappings(paths: Set[Text],
+ checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]:
+ """Get the affected TEST_MAPPING files.
+
+ TEST_MAPPING files in source code are packaged into a build artifact
+ `test_mappings.zip`. Inside the zip file, the path of each TEST_MAPPING file
+ is preserved. From all TEST_MAPPING files in the source code, this method
+ locates the affected TEST_MAPPING files based on the given paths list.
+
+ A TEST_MAPPING file may also contain `imports` that import TEST_MAPPING files
+ from a different location, e.g.,
+ "imports": [
+ {
+ "path": "../folder2"
+ }
+ ]
+ In that example, TEST_MAPPING files inside ../folder2 (relative to the
+ TEST_MAPPING file containing that imports section) and its parent directories
+ will also be included.
+
+ Args:
+ paths: A set of paths with related TEST_MAPPING files for given changes.
+ checked_paths: A set of paths that have been checked for TEST_MAPPING file
+ already. The set is updated after processing each TEST_MAPPING file. It's
+ used to prevent infinite loop when the method is called recursively.
+
+ Returns:
+ A dictionary of Test Mapping containing the content of the affected
+ TEST_MAPPING files, indexed by the path containing the TEST_MAPPING file.
+ """
+ test_mappings = {}
+
+ # Search for TEST_MAPPING files in each modified path and its parent
+ # directories.
+ all_paths = set()
+ for path in paths:
+ dir_names = path.split(os.path.sep)
+ all_paths |= set(
+ [os.path.sep.join(dir_names[:i + 1]) for i in range(len(dir_names))])
+ # Add root directory to the paths to search for TEST_MAPPING file.
+ all_paths.add('')
+
+ all_paths.difference_update(checked_paths)
+ checked_paths |= all_paths
+ # Try to load TEST_MAPPING file in each possible path.
+ for path in all_paths:
+ try:
+ test_mapping_file = os.path.join(os.path.join(os.getcwd(), path), 'TEST_MAPPING')
+ # Read content of TEST_MAPPING file.
+ content = FilterComments(open(test_mapping_file, "r").read())
+ test_mapping = json.loads(content)
+ test_mappings[path] = test_mapping
+
+ import_paths = set()
+ for import_detail in test_mapping.get(KEY_IMPORTS, []):
+ import_path = import_detail[KEY_IMPORT_PATH]
+ # Try the import path as absolute path.
+ import_paths.add(import_path)
+ # Try the import path as relative path based on the test mapping file
+ # containing the import.
+ norm_import_path = os.path.normpath(os.path.join(path, import_path))
+ import_paths.add(norm_import_path)
+ import_paths.difference_update(checked_paths)
+ if import_paths:
+ import_test_mappings = GetTestMappings(import_paths, checked_paths)
+ test_mappings.update(import_test_mappings)
+ except (KeyError, FileNotFoundError, NotADirectoryError):
+ # TEST_MAPPING file doesn't exist in path
+ pass
+
+ return test_mappings
diff --git a/core/Makefile b/core/Makefile
index 3f0da8b..b3870e5 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2094,11 +2094,6 @@
# Get a colon-separated list of search paths.
INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
-# Collects file_contexts files from modules to be installed
-$(call merge-fc-files, \
- $(sort $(foreach m,$(product_MODULES),$(ALL_MODULES.$(m).FILE_CONTEXTS))),\
- $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.modules.tmp)
-
SELINUX_FC := $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.bin
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
@@ -3237,7 +3232,6 @@
exit 1; \
fi
ln -sfn $2 $1
-$1: .KATI_SYMLINK_OUTPUTS := $1
)
$(eval PARTITION_COMPAT_SYMLINKS += $1)
$1
@@ -4600,10 +4594,18 @@
--prop com.android.build.pvmfw.security_patch:$(PVMFW_SECURITY_PATCH)
endif
-# Append avbpubkey of microdroid-vendor partition into vendor_boot partition.
-ifdef MICRODROID_VENDOR_AVBKEY
-BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS += \
- --prop_from_file com.android.build.microdroid-vendor.avbpubkey:$(MICRODROID_VENDOR_AVBKEY)
+# Append root digest of microdroid-vendor partition's hashtree descriptor into vendor partition.
+ifdef MICRODROID_VENDOR_IMAGE_MODULE
+MICRODROID_VENDOR_IMAGE := \
+ $(call intermediates-dir-for,ETC,$(MICRODROID_VENDOR_IMAGE_MODULE))/$(MICRODROID_VENDOR_IMAGE_MODULE)
+MICRODROID_VENDOR_ROOT_DIGEST := $(PRODUCT_OUT)/microdroid_vendor_root_digest
+BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop_from_file com.android.build.microdroid-vendor.root_digest:$(MICRODROID_VENDOR_ROOT_DIGEST)
+$(MICRODROID_VENDOR_ROOT_DIGEST): $(AVBTOOL) $(MICRODROID_VENDOR_IMAGE)
+ $(AVBTOOL) print_partition_digests \
+ --image $(MICRODROID_VENDOR_IMAGE) \
+ | tr -d '\n' | sed -E 's/.*: //g' > $@
+$(INSTALLED_VENDORIMAGE_TARGET): $(MICRODROID_VENDOR_ROOT_DIGEST)
endif
BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6af6f08..6ae8b0d 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -189,6 +189,9 @@
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
$(call add_soong_config_var,ANDROID,FULL_SYSTEM_OPTIMIZE_JAVA)
+# TODO(b/319697968): Remove this build flag support when metalava fully supports flagged api
+$(call soong_config_set,ANDROID,release_hidden_api_exportable_stubs,$(RELEASE_HIDDEN_API_EXPORTABLE_STUBS))
+
# Check for SupplementalApi module.
ifeq ($(wildcard packages/modules/SupplementalApi),)
$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,false)
@@ -196,3 +199,7 @@
$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,true)
endif
+# Add crashrecovery build flag to soong
+$(call soong_config_set,ANDROID,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
+# Weirdly required because platform_bootclasspath is using AUTO namespace
+$(call soong_config_set,AUTO,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index f533358..1793f00 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -121,9 +121,17 @@
$(LOCAL_PROPRIETARY_MODULE))
include $(BUILD_SYSTEM)/local_vndk.mk
-include $(BUILD_SYSTEM)/local_systemsdk.mk
+
+# local_current_sdk needs to run before local_systemsdk because the former may override
+# LOCAL_SDK_VERSION which is used by the latter.
include $(BUILD_SYSTEM)/local_current_sdk.mk
+# Check if the use of System SDK is correct. Note that, for Soong modules, the system sdk version
+# check is done in Soong. No need to do it twice.
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+include $(BUILD_SYSTEM)/local_systemsdk.mk
+endif
+
# Ninja has an implicit dependency on the command being run, and kati will
# regenerate the ninja manifest if any read makefile changes, so there is no
# need to have dependencies on makefiles.
@@ -541,13 +549,14 @@
# Only set up copy rules once, even if another arch variant shares it
my_vintf_new_pairs := $(filter-out $(ALL_VINTF_MANIFEST_FRAGMENTS_LIST),$(my_vintf_pairs))
- my_vintf_new_installed := $(call copy-many-vintf-manifest-files-checked,$(my_vintf_new_pairs))
-
ALL_VINTF_MANIFEST_FRAGMENTS_LIST += $(my_vintf_new_pairs)
- $(my_all_targets) : $(my_vintf_installed)
- # Install fragments together with the target
- $(LOCAL_INSTALLED_MODULE) : | $(my_vintf_installed)
+ ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(call copy-many-vintf-manifest-files-checked,$(my_vintf_new_pairs))
+ $(my_all_targets) : $(my_vintf_installed)
+ # Install fragments together with the target
+ $(LOCAL_INSTALLED_MODULE) : | $(my_vintf_installed)
+ endif
endif # my_vintf_fragments
# Rule to install the module's companion init.rc.
@@ -579,13 +588,14 @@
# Make sure we only set up the copy rules once, even if another arch variant
# shares a common LOCAL_INIT_RC.
my_init_rc_new_pairs := $(filter-out $(ALL_INIT_RC_INSTALLED_PAIRS),$(my_init_rc_pairs))
- my_init_rc_new_installed := $(call copy-many-init-script-files-checked,$(my_init_rc_new_pairs))
-
ALL_INIT_RC_INSTALLED_PAIRS += $(my_init_rc_new_pairs)
- $(my_all_targets) : $(my_init_rc_installed)
- # Install init_rc together with the target
- $(LOCAL_INSTALLED_MODULE) : | $(my_init_rc_installed)
+ ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(call copy-many-init-script-files-checked,$(my_init_rc_new_pairs))
+ $(my_all_targets) : $(my_init_rc_installed)
+ # Install init_rc together with the target
+ $(LOCAL_INSTALLED_MODULE) : | $(my_init_rc_installed)
+ endif
endif # my_init_rc
endif # !LOCAL_IS_HOST_MODULE
@@ -1007,46 +1017,78 @@
my_required_modules += $(LOCAL_REQUIRED_MODULES_$($(my_prefix)OS))
endif
-ALL_MODULES.$(my_register_name).SHARED_LIBS := \
- $(ALL_MODULES.$(my_register_name).SHARED_LIBS) $(LOCAL_SHARED_LIBRARIES)
+ifndef LOCAL_SOONG_MODULE_INFO_JSON
+ ALL_MAKE_MODULE_INFO_JSON_MODULES += $(my_register_name)
+ ALL_MODULES.$(my_register_name).SHARED_LIBS := \
+ $(ALL_MODULES.$(my_register_name).SHARED_LIBS) $(LOCAL_SHARED_LIBRARIES)
-ALL_MODULES.$(my_register_name).STATIC_LIBS := \
- $(ALL_MODULES.$(my_register_name).STATIC_LIBS) $(LOCAL_STATIC_LIBRARIES)
+ ALL_MODULES.$(my_register_name).STATIC_LIBS := \
+ $(ALL_MODULES.$(my_register_name).STATIC_LIBS) $(LOCAL_STATIC_LIBRARIES)
-ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS := \
- $(ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS) $(LOCAL_SYSTEM_SHARED_LIBRARIES)
+ ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS := \
+ $(ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS) $(LOCAL_SYSTEM_SHARED_LIBRARIES)
-ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES := \
- $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES) \
- $(LOCAL_JAVA_LIBRARIES)
+ ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES := \
+ $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES) \
+ $(LOCAL_JAVA_LIBRARIES)
-ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES := \
- $(ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES)
+ ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES := \
+ $(ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES)
-ifneq ($(my_test_data_file_pairs),)
- # Export the list of targets that are handled as data inputs and required
- # by tests at runtime. The format of my_test_data_file_pairs is
- # is $(path):$(relative_file) but for module-info, only the string after
- # ":" is needed.
- ALL_MODULES.$(my_register_name).TEST_DATA := \
- $(strip $(ALL_MODULES.$(my_register_name).TEST_DATA) \
- $(foreach f, $(my_test_data_file_pairs),\
- $(call word-colon,2,$(f))))
+ ifneq ($(my_test_data_file_pairs),)
+ # Export the list of targets that are handled as data inputs and required
+ # by tests at runtime. The format of my_test_data_file_pairs is
+ # is $(path):$(relative_file) but for module-info, only the string after
+ # ":" is needed.
+ ALL_MODULES.$(my_register_name).TEST_DATA := \
+ $(strip $(ALL_MODULES.$(my_register_name).TEST_DATA) \
+ $(foreach f, $(my_test_data_file_pairs),\
+ $(call word-colon,2,$(f))))
+ endif
+
+ ifdef LOCAL_TEST_DATA_BINS
+ ALL_MODULES.$(my_register_name).TEST_DATA_BINS := \
+ $(ALL_MODULES.$(my_register_name).TEST_DATA_BINS) $(LOCAL_TEST_DATA_BINS)
+ endif
+
+ ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS := \
+ $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS) \
+ $(filter-out $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS),$(my_supported_variant))
+
+ ALL_MODULES.$(my_register_name).ACONFIG_FILES := \
+ $(ALL_MODULES.$(my_register_name).ACONFIG_FILES) $(LOCAL_ACONFIG_FILES)
+
+ ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := \
+ $(ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES) $(LOCAL_COMPATIBILITY_SUITE)
+ ALL_MODULES.$(my_register_name).MODULE_NAME := $(LOCAL_MODULE)
+ ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
+ ALL_MODULES.$(my_register_name).EXTRA_TEST_CONFIGS := $(LOCAL_EXTRA_FULL_TEST_CONFIGS)
+ ALL_MODULES.$(my_register_name).TEST_MAINLINE_MODULES := $(LOCAL_TEST_MAINLINE_MODULES)
+ ifdef LOCAL_IS_UNIT_TEST
+ ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
+ endif
+ ifdef LOCAL_TEST_OPTIONS_TAGS
+ ALL_MODULES.$(my_register_name).TEST_OPTIONS_TAGS := $(LOCAL_TEST_OPTIONS_TAGS)
+ endif
+
+ ##########################################################
+ # Track module-level dependencies.
+ # (b/204397180) Unlock RECORD_ALL_DEPS was acknowledged reasonable for better Atest performance.
+ ALL_MODULES.$(my_register_name).ALL_DEPS := \
+ $(ALL_MODULES.$(my_register_name).ALL_DEPS) \
+ $(LOCAL_STATIC_LIBRARIES) \
+ $(LOCAL_WHOLE_STATIC_LIBRARIES) \
+ $(LOCAL_SHARED_LIBRARIES) \
+ $(LOCAL_DYLIB_LIBRARIES) \
+ $(LOCAL_RLIB_LIBRARIES) \
+ $(LOCAL_PROC_MACRO_LIBRARIES) \
+ $(LOCAL_HEADER_LIBRARIES) \
+ $(LOCAL_STATIC_JAVA_LIBRARIES) \
+ $(LOCAL_JAVA_LIBRARIES) \
+ $(LOCAL_JNI_SHARED_LIBRARIES)
+
endif
-ifdef LOCAL_TEST_DATA_BINS
- ALL_MODULES.$(my_register_name).TEST_DATA_BINS := \
- $(ALL_MODULES.$(my_register_name).TEST_DATA_BINS) $(LOCAL_TEST_DATA_BINS)
-endif
-
-ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS := \
- $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS) \
- $(filter-out $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS),$(my_supported_variant))
-
-ALL_MODULES.$(my_register_name).ACONFIG_FILES := \
- $(ALL_MODULES.$(my_register_name).ACONFIG_FILES) $(LOCAL_ACONFIG_FILES)
-
-
##########################################################################
## When compiling against API imported module, use API import stub
## libraries.
@@ -1126,55 +1168,31 @@
$(call pretty-error,LOCAL_TARGET_REQUIRED_MODULES may not be used from target modules. Use LOCAL_REQUIRED_MODULES instead)
endif
endif
-ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS := \
- $(ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS) $(event_log_tags)
+
+ifdef event_log_tags
+ ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS := \
+ $(ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS) $(event_log_tags)
+endif
+
ALL_MODULES.$(my_register_name).MAKEFILE := \
$(ALL_MODULES.$(my_register_name).MAKEFILE) $(LOCAL_MODULE_MAKEFILE)
+
ifdef LOCAL_MODULE_OWNER
-ALL_MODULES.$(my_register_name).OWNER := \
- $(sort $(ALL_MODULES.$(my_register_name).OWNER) $(LOCAL_MODULE_OWNER))
+ ALL_MODULES.$(my_register_name).OWNER := \
+ $(sort $(ALL_MODULES.$(my_register_name).OWNER) $(LOCAL_MODULE_OWNER))
endif
+
ifdef LOCAL_2ND_ARCH_VAR_PREFIX
ALL_MODULES.$(my_register_name).FOR_2ND_ARCH := true
endif
ALL_MODULES.$(my_register_name).FOR_HOST_CROSS := $(my_host_cross)
-ALL_MODULES.$(my_register_name).MODULE_NAME := $(LOCAL_MODULE)
-ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := \
- $(ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES) \
- $(filter-out $(ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES),$(LOCAL_COMPATIBILITY_SUITE))
-ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
-ALL_MODULES.$(my_register_name).EXTRA_TEST_CONFIGS := $(LOCAL_EXTRA_FULL_TEST_CONFIGS)
-ALL_MODULES.$(my_register_name).TEST_MAINLINE_MODULES := $(LOCAL_TEST_MAINLINE_MODULES)
ifndef LOCAL_IS_HOST_MODULE
-ALL_MODULES.$(my_register_name).FILE_CONTEXTS := $(LOCAL_FILE_CONTEXTS)
ALL_MODULES.$(my_register_name).APEX_KEYS_FILE := $(LOCAL_APEX_KEY_PATH)
endif
-ifdef LOCAL_IS_UNIT_TEST
-ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
-endif
-ifdef LOCAL_TEST_OPTIONS_TAGS
-ALL_MODULES.$(my_register_name).TEST_OPTIONS_TAGS := $(LOCAL_TEST_OPTIONS_TAGS)
-endif
test_config :=
INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
-##########################################################
-# Track module-level dependencies.
-# (b/204397180) Unlock RECORD_ALL_DEPS was acknowledged reasonable for better Atest performance.
-ALL_MODULES.$(my_register_name).ALL_DEPS := \
- $(ALL_MODULES.$(my_register_name).ALL_DEPS) \
- $(LOCAL_STATIC_LIBRARIES) \
- $(LOCAL_WHOLE_STATIC_LIBRARIES) \
- $(LOCAL_SHARED_LIBRARIES) \
- $(LOCAL_DYLIB_LIBRARIES) \
- $(LOCAL_RLIB_LIBRARIES) \
- $(LOCAL_PROC_MACRO_LIBRARIES) \
- $(LOCAL_HEADER_LIBRARIES) \
- $(LOCAL_STATIC_JAVA_LIBRARIES) \
- $(LOCAL_JAVA_LIBRARIES) \
- $(LOCAL_JNI_SHARED_LIBRARIES)
-
###########################################################
## umbrella targets used to verify builds
###########################################################
diff --git a/core/binary.mk b/core/binary.mk
index 8c107bd..7998a5a 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -274,6 +274,13 @@
endif
endif
+ifneq ($(LOCAL_MIN_SDK_VERSION),)
+ ifdef LOCAL_IS_HOST_MODULE
+ $(error $(LOCAL_PATH): LOCAL_MIN_SDK_VERSION cannot be used in host module)
+ endif
+ my_api_level := $(LOCAL_MIN_SDK_VERSION)
+endif
+
ifeq ($(NATIVE_COVERAGE),true)
ifndef LOCAL_IS_HOST_MODULE
my_ldflags += -Wl,--wrap,getenv
diff --git a/core/board_config.mk b/core/board_config.mk
index ae11eb6..5a1a781 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -186,6 +186,7 @@
BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
BUILD_BROKEN_INCORRECT_PARTITION_IMAGES \
BUILD_BROKEN_GENRULE_SANDBOXING \
+ BUILD_BROKEN_DONT_CHECK_SYSTEMSDK \
_build_broken_var_list += \
$(foreach m,$(AVAILABLE_BUILD_MODULE_TYPES) \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index b73e9b4..8d99176 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -87,7 +87,6 @@
LOCAL_EXTRA_FULL_TEST_CONFIGS:=
LOCAL_EXTRACT_APK:=
LOCAL_EXTRACT_DPI_APK:=
-LOCAL_FILE_CONTEXTS:=
LOCAL_FINDBUGS_FLAGS:=
LOCAL_FORCE_STATIC_EXECUTABLE:=
LOCAL_FULL_CLASSES_JACOCO_JAR:=
@@ -265,6 +264,7 @@
LOCAL_SOONG_LICENSE_METADATA :=
LOCAL_SOONG_LINK_TYPE :=
LOCAL_SOONG_LINT_REPORTS :=
+LOCAL_SOONG_MODULE_INFO_JSON :=
LOCAL_SOONG_MODULE_TYPE :=
LOCAL_SOONG_PROGUARD_DICT :=
LOCAL_SOONG_PROGUARD_USAGE_ZIP :=
diff --git a/core/combo/arch/x86/goldmont-without-xsaves.mk b/core/combo/arch/x86/goldmont-without-xsaves.mk
new file mode 100644
index 0000000..1b93c17
--- /dev/null
+++ b/core/combo/arch/x86/goldmont-without-xsaves.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# goldmont-without-xsaves arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86_64/goldmont-without-xsaves.mk b/core/combo/arch/x86_64/goldmont-without-xsaves.mk
new file mode 100644
index 0000000..1b93c17
--- /dev/null
+++ b/core/combo/arch/x86_64/goldmont-without-xsaves.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# goldmont-without-xsaves arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/config.mk b/core/config.mk
index c994d39..469be30 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -110,6 +110,7 @@
$(KATI_obsolete_var BUILD_BROKEN_ENG_DEBUG_TAGS)
$(KATI_obsolete_export It is a global setting. See $(CHANGES_URL)#export_keyword)
$(KATI_obsolete_var BUILD_BROKEN_ANDROIDMK_EXPORTS)
+$(KATI_obsolete_var PRODUCT_NOTICE_SPLIT_OVERRIDE,Stop using this, keep calm, and carry on.)
$(KATI_obsolete_var PRODUCT_STATIC_BOOT_CONTROL_HAL,Use shared library module instead. See $(CHANGES_URL)#PRODUCT_STATIC_BOOT_CONTROL_HAL)
$(KATI_obsolete_var \
ARCH_ARM_HAVE_ARMV7A \
@@ -777,16 +778,9 @@
PRODUCT_FULL_TREBLE := true
endif
-# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior and remove
-# references to it here and below.
-ifdef PRODUCT_NOTICE_SPLIT_OVERRIDE
- $(error PRODUCT_NOTICE_SPLIT_OVERRIDE cannot be set.)
-endif
-
requirements := \
PRODUCT_TREBLE_LINKER_NAMESPACES \
- PRODUCT_ENFORCE_VINTF_MANIFEST \
- PRODUCT_NOTICE_SPLIT
+ PRODUCT_ENFORCE_VINTF_MANIFEST
# If it is overriden, then the requirement override is taken, otherwise it's
# PRODUCT_FULL_TREBLE
@@ -799,12 +793,20 @@
PRODUCT_FULL_TREBLE_OVERRIDE ?=
$(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=))
+# used to be a part of PRODUCT_FULL_TREBLE, but now always set it
+PRODUCT_NOTICE_SPLIT := true
+
# TODO(b/114488870): disallow PRODUCT_FULL_TREBLE_OVERRIDE from being used.
.KATI_READONLY := \
PRODUCT_FULL_TREBLE_OVERRIDE \
$(foreach req,$(requirements),$(req)_OVERRIDE) \
$(requirements) \
PRODUCT_FULL_TREBLE \
+ PRODUCT_NOTICE_SPLIT \
+
+ifneq ($(PRODUCT_FULL_TREBLE),true)
+ $(warning This device does not have Treble enabled. This is unsafe.)
+endif
$(KATI_obsolete_var $(foreach req,$(requirements),$(req)_OVERRIDE) \
,This should be referenced without the _OVERRIDE suffix.)
@@ -829,14 +831,10 @@
# Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
# devices if unset.
ifndef BOARD_SYSTEMSDK_VERSIONS
- ifdef PRODUCT_SHIPPING_API_LEVEL
- ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),28),)
- ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- BOARD_SYSTEMSDK_VERSIONS := $(PLATFORM_SDK_VERSION)
- else
- BOARD_SYSTEMSDK_VERSIONS := $(PLATFORM_VERSION_CODENAME)
- endif
- endif
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ BOARD_SYSTEMSDK_VERSIONS := $(PLATFORM_SDK_VERSION)
+ else
+ BOARD_SYSTEMSDK_VERSIONS := $(PLATFORM_VERSION_CODENAME)
endif
endif
diff --git a/core/definitions.mk b/core/definitions.mk
index 7a6c064..ed842bc 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -37,6 +37,8 @@
# sub-variables.
ALL_MODULES:=
+ALL_MAKE_MODULE_INFO_JSON_MODULES:=
+
# The relative paths of the non-module targets in the system.
ALL_NON_MODULES:=
NON_MODULES_WITHOUT_LICENSE_METADATA:=
@@ -3120,14 +3122,12 @@
# Copies many init script files and check they are well-formed.
# $(1): The init script files to copy. Each entry is a ':' separated src:dst pair.
-# Evaluates to the list of the dst files. (ie suitable for a dependency list.)
define copy-many-init-script-files-checked
$(foreach f, $(1), $(strip \
$(eval _cmf_tuple := $(subst :, ,$(f))) \
$(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
$(eval _cmf_dest := $(word 2,$(_cmf_tuple))) \
- $(eval $(call copy-init-script-file-checked,$(_cmf_src),$(_cmf_dest))) \
- $(_cmf_dest)))
+ $(eval $(call copy-init-script-file-checked,$(_cmf_src),$(_cmf_dest)))))
endef
# Copy the file only if it's a well-formed xml file. For use via $(eval).
@@ -3165,14 +3165,12 @@
# Copies many vintf manifest files checked.
# $(1): The files to copy. Each entry is a ':' separated src:dst pair
-# Evaluates to the list of the dst files (ie suitable for a dependency list)
define copy-many-vintf-manifest-files-checked
$(foreach f, $(1), $(strip \
$(eval _cmf_tuple := $(subst :, ,$(f))) \
$(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
$(eval _cmf_dest := $(word 2,$(_cmf_tuple))) \
- $(eval $(call copy-vintf-manifest-checked,$(_cmf_src),$(_cmf_dest))) \
- $(_cmf_dest)))
+ $(eval $(call copy-vintf-manifest-checked,$(_cmf_src),$(_cmf_dest)))))
endef
# Copy the file only if it's not an ELF file. For use via $(eval).
@@ -3308,7 +3306,6 @@
@mkdir -p $$(dir $$@)
@rm -rf $$@
$(hide) ln -sf $(2) $$@
-$(3): .KATI_SYMLINK_OUTPUTS := $(3)
endef
# Copy an apk to a target location while removing classes*.dex
diff --git a/core/local_current_sdk.mk b/core/local_current_sdk.mk
index ea7da8a..ccdbf77 100644
--- a/core/local_current_sdk.mk
+++ b/core/local_current_sdk.mk
@@ -14,13 +14,24 @@
# limitations under the License.
#
ifdef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
- ifneq (current,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
+ _override_to := $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+
+ # b/314011075: apks and jars in the vendor or odm partitions cannot use
+ # system SDK 35 and beyond. In order not to suddenly break those vendor
+ # modules using current or system_current as their LOCAL_SDK_VERSION,
+ # override it to 34, which is the maximum API level allowed for them.
+ ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
+ _override_to := 34
+ endif
+
+ ifneq (current,$(_override_to))
ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
ifeq (current,$(LOCAL_SDK_VERSION))
- LOCAL_SDK_VERSION := $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+ LOCAL_SDK_VERSION := $(_override_to)
else ifeq (system_current,$(LOCAL_SDK_VERSION))
- LOCAL_SDK_VERSION := system_$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+ LOCAL_SDK_VERSION := system_$(_override_to)
endif
endif
endif
+ _override_to :=
endif
diff --git a/core/local_systemsdk.mk b/core/local_systemsdk.mk
index 460073d..3307e72 100644
--- a/core/local_systemsdk.mk
+++ b/core/local_systemsdk.mk
@@ -33,6 +33,9 @@
# Runtime resource overlays are exempted from building against System SDK.
# TODO(b/155027019): remove this, after no product/vendor apps rely on this behavior.
LOCAL_SDK_VERSION := system_current
+ # We have run below again since LOCAL_SDK_VERSION is newly set and the "_current"
+ # may have to be updated
+ include $(BUILD_SYSTEM)/local_current_sdk.mk
endif
endif
endif
@@ -54,10 +57,35 @@
# If not, vendor apks are treated equally to system apps
_supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
endif
+
+ # b/314011075: apks and jars in the vendor or odm partitions cannot use system SDK 35 and beyond.
+ # This is to discourage the use of Java APIs in the partitions, which hasn't been supported since
+ # the beginning of the project Treble back in Android 10. Ultimately, we'd like to completely
+ # disallow any Java API in the partitions, but it shall be done progressively.
+ ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+ # 28 is the API level when BOARD_SYSTEMSDK_VERSIONS was introduced. So, it's the oldset API
+ # we allow.
+ _supported_systemsdk_versions := $(call int_range_list, 28, 34)
+ endif
+
+ # Extract version number from LOCAL_SDK_VERSION (ex: system_34 -> 34)
_system_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
+ # However, the extraction may fail if it doesn't have any number (i.e. current, core_current,
+ # system_current, or similar) Then use the latest platform SDK version number or the actual
+ # codename.
+ ifeq (,$(_system_sdk_version)
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ _system_sdk_version := $(PLATFORM_SDK_VERSION)
+ else
+ _system_sdk_version := $(PLATFORM_VERSION_CODENAME)
+ endif
+ endif
+
ifneq ($(_system_sdk_version),$(filter $(_system_sdk_version),$(_supported_systemsdk_versions)))
- $(call pretty-error,Incompatible LOCAL_SDK_VERSION '$(LOCAL_SDK_VERSION)'. \
- System SDK version '$(_system_sdk_version)' is not supported. Supported versions are: $(_supported_systemsdk_versions))
+ ifneq (true,$(BUILD_BROKEN_DONT_CHECK_SYSTEMSDK)
+ $(call pretty-error,Incompatible LOCAL_SDK_VERSION '$(LOCAL_SDK_VERSION)'. \
+ System SDK version '$(_system_sdk_version)' is not supported. Supported versions are: $(_supported_systemsdk_versions))
+ endif
endif
_system_sdk_version :=
_supported_systemsdk_versions :=
diff --git a/core/main.mk b/core/main.mk
index 348a964..649c75c 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1721,10 +1721,8 @@
# dist_files only for putting your library into the dist directory with a full build.
.PHONY: dist_files
-ifeq ($(SOONG_COLLECT_JAVA_DEPS), true)
- $(call dist-for-goals, dist_files, $(SOONG_OUT_DIR)/module_bp_java_deps.json)
- $(call dist-for-goals, dist_files, $(PRODUCT_OUT)/module-info.json)
-endif
+$(call dist-for-goals, dist_files, $(SOONG_OUT_DIR)/module_bp_java_deps.json)
+$(call dist-for-goals, dist_files, $(PRODUCT_OUT)/module-info.json)
.PHONY: apps_only
ifeq ($(HOST_OS),darwin)
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index 57df911..12057fb 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -97,6 +97,46 @@
)) \
)
+# Create a set of storage file for each partition
+# $(1): built aconfig flags storage dir (out)
+# $(2): installed aconfig flags storage package map file (out)
+# $(3): installed aconfig flags storage flag map file (out)
+# $(4): installed aconfig flags storage flag value file (out)
+# $(5): input aconfig files for the partition (in)
+define generate-partition-aconfig-storage-file
+$(eval $(strip $(1))/target: PRIVATE_OUT_DIR := $(strip $(1)))
+$(eval $(strip $(1))/target: PRIVATE_IN := $(strip $(5)))
+$(strip $(1))/target: $(ACONFIG) $(strip $(5))
+ mkdir -p $$(PRIVATE_OUT_DIR)
+ $$(if $$(PRIVATE_IN), \
+ $$(ACONFIG) create-storage --container "" --out $$(PRIVATE_OUT_DIR) \
+ $$(addprefix --cache ,$$(PRIVATE_IN)), \
+ )
+ echo -n > $$(PRIVATE_OUT_DIR)/target
+$(strip $(1))/package.map: $(strip $(1))/target
+$(strip $(1))/flag.map: $(strip $(1))/target
+$(strip $(1))/flag.val: $(strip $(1))/target
+$(call copy-one-file, $(strip $(1))/package.map, $(2))
+$(call copy-one-file, $(strip $(1))/flag.map, $(3))
+$(call copy-one-file, $(strip $(1))/flag.val, $(4))
+endef
+
+ifeq ($(RELEASE_CREATE_ACONFIG_STORAGE_FILE),true)
+$(foreach partition, $(_FLAG_PARTITIONS), \
+ $(eval aconfig_storage_package_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/package.map) \
+ $(eval aconfig_storage_flag_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/flag.map) \
+ $(eval aconfig_storage_falg_value.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/flag.val) \
+ $(eval $(call generate-partition-aconfig-storage-file, \
+ $(TARGET_OUT_FLAGS)/$(partition), \
+ $(aconfig_storage_package_map.$(partition)), \
+ $(aconfig_storage_flag_map.$(partition)), \
+ $(aconfig_storage_flag_val.$(partition)), \
+ $(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
+ $(ALL_MODULES.$(m).ACONFIG_FILES) \
+ )), \
+ )) \
+)
+endif
# -----------------------------------------------------------------
# Install the ones we need for the configured product
@@ -104,6 +144,9 @@
$(sort $(foreach partition, $(filter $(IMAGES_TO_BUILD), $(_FLAG_PARTITIONS)), \
$(build_flag_summaries.$(partition)) \
$(aconfig_flag_summaries_protobuf.$(partition)) \
+ $(aconfig_storage_package_map.$(partition)) \
+ $(aconfig_storage_flag_map.$(partition)) \
+ $(aconfig_storage_flag_val.$(partition)) \
))
ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files)
@@ -119,5 +162,8 @@
$(foreach partition, $(_FLAG_PARTITIONS), \
$(eval build_flag_summaries.$(partition):=) \
$(eval aconfig_flag_summaries_protobuf.$(partition):=) \
+ $(eval aconfig_storage_package_map.$(partition):=) \
+ $(eval aconfig_storage_flag_map.$(partition):=) \
+ $(eval aconfig_storage_flag_val.$(partition):=) \
)
diff --git a/core/product.mk b/core/product.mk
index 5515a8a..2d22ebf 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -446,7 +446,6 @@
_product_list_vars += PRODUCT_AFDO_PROFILES
-_product_single_value_vars += PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API
_product_single_value_vars += PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE
_product_list_vars += PRODUCT_RELEASE_CONFIG_MAPS
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 193ac18..7d03aa3 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -47,7 +47,6 @@
$(call add_json_str, Platform_version_known_codenames, $(PLATFORM_VERSION_KNOWN_CODENAMES))
$(call add_json_bool, Release_aidl_use_unfrozen, $(RELEASE_AIDL_USE_UNFROZEN))
-$(call add_json_bool, Release_expose_flagged_api, $(RELEASE_EXPOSE_FLAGGED_API))
$(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
@@ -299,6 +298,7 @@
$(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
$(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES)))
$(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES))
+$(call add_json_bool, BuildBrokenDontCheckSystemSdk, $(filter true,$(BUILD_BROKEN_DONT_CHECK_SYSTEMSDK)))
$(call add_json_list, BuildWarningBadOptionalUsesLibsAllowlist, $(BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST))
@@ -395,8 +395,6 @@
$(call add_json_list, ProductPackages, $(sort $(PRODUCT_PACKAGES)))
$(call end_json_map)
-$(call add_json_bool, NextReleaseHideFlaggedApi, $(filter true,$(PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API)))
-
$(call add_json_bool, BuildFromSourceStub, $(findstring true,$(PRODUCT_BUILD_FROM_SOURCE_STUB) $(BUILD_FROM_SOURCE_STUB)))
$(call json_end)
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index eb5c63c..8546828 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -13,10 +13,15 @@
$(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"')
endef
-$(MODULE_INFO_JSON):
+SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT).json
+
+$(MODULE_INFO_JSON): PRIVATE_SOONG_MODULE_INFO := $(SOONG_MODULE_INFO)
+$(MODULE_INFO_JSON): PRIVATE_MERGE_JSON_OBJECTS := $(HOST_OUT_EXECUTABLES)/merge_module_info_json
+$(MODULE_INFO_JSON): $(HOST_OUT_EXECUTABLES)/merge_module_info_json
+$(MODULE_INFO_JSON): $(SOONG_MODULE_INFO)
@echo Generating $@
- $(hide) echo -ne '{\n ' > $@
- $(hide) echo -ne $(KATI_foreach_sep m,$(COMMA)$(_NEWLINE), $(sort $(ALL_MODULES)),\
+ $(hide) echo -ne '{\n ' > $@.tmp
+ $(hide) echo -ne $(KATI_foreach_sep m,$(COMMA)$(_NEWLINE), $(sort $(ALL_MAKE_MODULE_INFO_JSON_MODULES)),\
'"$(m)": {' \
'"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)"' \
$(call write-optional-json-list, "class", $(sort $(ALL_MODULES.$(m).CLASS))) \
@@ -43,7 +48,9 @@
$(call write-optional-json-list, "supported_variants", $(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS))) \
$(call write-optional-json-list, "host_dependencies", $(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET))) \
$(call write-optional-json-list, "target_dependencies", $(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST))) \
- '}')'\n}\n' >> $@
+ '}')'\n}\n' >> $@.tmp
+ $(PRIVATE_MERGE_JSON_OBJECTS) -o $@ $(PRIVATE_SOONG_MODULE_INFO) $@.tmp
+ rm $@.tmp
droidcore-unbundled: $(MODULE_INFO_JSON)
diff --git a/envsetup.sh b/envsetup.sh
index 212ed9f..e180cf1 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -254,7 +254,7 @@
# Note: on windows/cygwin, ANDROID_LUNCH_BUILD_PATHS will contain spaces
# due to "C:\Program Files" being in the path.
- # Handle compat with the old ANDROID_BUILD_PATHS variable.
+ # Handle compat with the old ANDROID_BUILD_PATHS variable.
# TODO: Remove this after we think everyone has lunched again.
if [ -z "$ANDROID_LUNCH_BUILD_PATHS" -a -n "$ANDROID_BUILD_PATHS" ] ; then
ANDROID_LUNCH_BUILD_PATHS="$ANDROID_BUILD_PATHS"
@@ -775,7 +775,7 @@
answer=$1
else
print_lunch_menu
- echo "Which would you like? [aosp_arm-trunk_staging-eng]"
+ echo "Which would you like? [aosp_cf_x86_64_phone-trunk_staging-eng]"
echo -n "Pick from common choices above (e.g. 13) or specify your own (e.g. aosp_barbet-trunk_staging-eng): "
read answer
used_lunch_menu=1
@@ -785,10 +785,10 @@
if [ -z "$answer" ]
then
- selection=aosp_arm-trunk_staging-eng
+ selection=aosp_cf_x86_64_phone-trunk_staging-eng
elif (echo -n $answer | grep -q -e "^[0-9][0-9]*$")
then
- local choices=($(TARGET_BUILD_APPS= get_build_var COMMON_LUNCH_CHOICES))
+ local choices=($(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_RELEASE= TARGET_BUILD_VARIANT= get_build_var COMMON_LUNCH_CHOICES 2>/dev/null))
if [ $answer -le ${#choices[@]} ]
then
# array in zsh starts from 1 instead of 0.
@@ -1886,6 +1886,11 @@
>&2 echo "Couldn't locate the top of the tree. Try setting TOP."
return 1
fi
+ local ret=$?
+ if [[ ret -eq 0 && -z "${ANDROID_QUIET_BUILD:-}" && -n "${ANDROID_BUILD_BANNER}" ]]; then
+ echo "${ANDROID_BUILD_BANNER}"
+ fi
+ return $ret
)
function m()
diff --git a/target/product/OWNERS b/target/product/OWNERS
index 008e4a2..48d3f2a 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -1,4 +1,4 @@
-per-file runtime_libart.mk = calin@google.com, mast@google.com, ngeoffray@google.com, oth@google.com, rpl@google.com, vmarko@google.com
+per-file runtime_libart.mk = mast@google.com, ngeoffray@google.com, rpl@google.com, vmarko@google.com
# GSI
per-file gsi_release.mk = file:/target/product/gsi/OWNERS
@@ -7,4 +7,4 @@
# Android Go
per-file go_defaults.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
per-file go_defaults_512.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
-per-file go_defaults_common.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
\ No newline at end of file
+per-file go_defaults_common.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 5d5dca4..0d88046 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -70,7 +70,7 @@
com.android.scheduling \
com.android.sdkext \
com.android.tethering \
- com.android.tzdata \
+ $(RELEASE_PACKAGE_TZDATA_MODULE) \
com.android.uwb \
com.android.virt \
com.android.wifi \
@@ -94,7 +94,6 @@
framework-graphics \
framework-minus-apex \
framework-minus-apex-install-dependencies \
- framework-nfc \
framework-res \
framework-sysconfig.xml \
fsck.erofs \
@@ -289,6 +288,13 @@
wifi.rc \
wm \
+# When we release crashrecovery module
+ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true)
+ PRODUCT_PACKAGES += \
+ com.android.crashrecovery \
+
+endif
+
# These packages are not used on Android TV
ifneq ($(PRODUCT_IS_ATV),true)
PRODUCT_PACKAGES += \
@@ -303,6 +309,16 @@
endif
+# Check if the build supports NFC apex or not
+ifeq ($(RELEASE_PACKAGE_NFC_STACK),NfcNci)
+ PRODUCT_PACKAGES += \
+ framework-nfc \
+ NfcNci
+else
+ PRODUCT_PACKAGES += \
+ com.android.nfcservices
+endif
+
# VINTF data for system image
PRODUCT_PACKAGES += \
system_manifest.xml \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 55fcf2f..2fd7209 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -50,7 +50,6 @@
PRODUCT_BOOT_JARS += \
framework-minus-apex \
framework-graphics \
- framework-nfc \
ext \
telephony-common \
voip-common \
@@ -88,6 +87,22 @@
com.android.virt:framework-virtualization \
com.android.wifi:framework-wifi \
+# When we release crashrecovery module
+ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true)
+ PRODUCT_APEX_BOOT_JARS += \
+ com.android.crashrecovery:framework-crashrecovery \
+
+endif
+
+# Check if the build supports NFC apex or not
+ifeq ($(RELEASE_PACKAGE_NFC_STACK),NfcNci)
+ PRODUCT_BOOT_JARS += \
+ framework-nfc
+else
+ PRODUCT_APEX_BOOT_JARS := \
+ com.android.nfcservices:framework-nfc
+endif
+
# TODO(b/308174306): Adjust this after multiple prebuilts version is supported.
# APEX boot jars that are not in prebuilt apexes.
# Keep the list sorted by module names and then library names.
@@ -109,6 +124,13 @@
com.android.permission:service-permission \
com.android.rkpd:service-rkp \
+# When we release crashrecovery module
+ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true)
+ PRODUCT_APEX_SYSTEM_SERVER_JARS += \
+ com.android.crashrecovery:service-crashrecovery \
+
+endif
+
# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
# art project.
ifneq (,$(wildcard art))
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 53c9e0c..80aecb7 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -18,6 +18,7 @@
LLNDK: libneuralnetworks.so
LLNDK: libselinux.so
LLNDK: libsync.so
+LLNDK: libvendorsupport.so
LLNDK: libvndksupport.so
LLNDK: libvulkan.so
VNDK-SP: android.hardware.common-V2-ndk.so
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 3acf1e6..b5292d2 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -73,7 +73,6 @@
UserDictionaryProvider \
VpnDialogs \
vr \
- $(RELEASE_PACKAGE_NFC_STACK)
PRODUCT_SYSTEM_SERVER_APPS += \
diff --git a/target/product/mainline_sdk.mk b/target/product/mainline_sdk.mk
index cb23bc8..10bb0a0 100644
--- a/target/product/mainline_sdk.mk
+++ b/target/product/mainline_sdk.mk
@@ -17,6 +17,4 @@
PRODUCT_BRAND := Android
PRODUCT_DEVICE := mainline_sdk
-PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
-
PRODUCT_BUILD_FROM_SOURCE_STUB := true
\ No newline at end of file
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index b9ccad3..650f8e9 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -29,6 +29,4 @@
PRODUCT_BRAND := Android
PRODUCT_DEVICE := mainline_x86
-PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
-
PRODUCT_BUILD_FROM_SOURCE_STUB := true
\ No newline at end of file
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
index 37be2dd..d5b5b8f 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/Android.bp
@@ -96,6 +96,12 @@
srcs: ["tests/test_exported.aconfig"],
}
+aconfig_declarations {
+ name: "aconfig.test.forcereadonly.flags",
+ package: "com.android.aconfig.test.forcereadonly",
+ srcs: ["tests/test_force_read_only.aconfig"],
+}
+
aconfig_values {
name: "aconfig.test.flag.values",
package: "com.android.aconfig.test",
@@ -125,6 +131,12 @@
mode: "exported",
}
+java_aconfig_library {
+ name: "aconfig_test_java_library_forcereadonly",
+ aconfig_declarations: "aconfig.test.forcereadonly.flags",
+ mode: "force-read-only",
+}
+
android_test {
name: "aconfig.test.java",
srcs: [
@@ -135,6 +147,7 @@
static_libs: [
"aconfig_test_java_library",
"aconfig_test_java_library_exported",
+ "aconfig_test_java_library_forcereadonly",
"androidx.test.rules",
"testng",
],
@@ -179,6 +192,12 @@
mode: "exported",
}
+cc_aconfig_library {
+ name: "aconfig_test_cpp_library_force_read_only_variant",
+ aconfig_declarations: "aconfig.test.flags",
+ mode: "force-read-only",
+}
+
cc_test {
name: "aconfig.test.cpp",
srcs: [
@@ -224,6 +243,21 @@
test_suites: ["general-tests"],
}
+cc_test {
+ name: "aconfig.test.cpp.force_read_only_mode",
+ srcs: [
+ "tests/aconfig_force_read_only_mode_test.cpp",
+ ],
+ static_libs: [
+ "aconfig_test_cpp_library_force_read_only_variant",
+ "libgmock",
+ ],
+ shared_libs: [
+ "server_configurable_flags",
+ ],
+ test_suites: ["general-tests"],
+}
+
rust_aconfig_library {
name: "libaconfig_test_rust_library",
crate_name: "aconfig_test_rust_library",
@@ -276,3 +310,21 @@
],
test_suites: ["general-tests"],
}
+
+rust_aconfig_library {
+ name: "libaconfig_test_rust_library_with_force_read_only_mode",
+ crate_name: "aconfig_test_rust_library",
+ aconfig_declarations: "aconfig.test.flags",
+ mode: "force-read-only",
+}
+
+rust_test {
+ name: "aconfig.force_read_only_mode.test.rust",
+ srcs: [
+ "tests/aconfig_force_read_only_mode_test.rs"
+ ],
+ rustlibs: [
+ "libaconfig_test_rust_library_with_force_read_only_mode",
+ ],
+ test_suites: ["general-tests"],
+}
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
index 2edf4b8..7b58e94 100644
--- a/tools/aconfig/Cargo.toml
+++ b/tools/aconfig/Cargo.toml
@@ -20,6 +20,3 @@
[build-dependencies]
protobuf-codegen = "3.2.0"
-
-[dev-dependencies]
-itertools = "0.10.5"
diff --git a/tools/aconfig/printflags/src/main.rs b/tools/aconfig/printflags/src/main.rs
index 4110317..ae9b83a 100644
--- a/tools/aconfig/printflags/src/main.rs
+++ b/tools/aconfig/printflags/src/main.rs
@@ -20,6 +20,7 @@
use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
use anyhow::{bail, Context, Result};
use regex::Regex;
+use std::collections::BTreeMap;
use std::collections::HashMap;
use std::process::Command;
use std::{fs, str};
@@ -66,7 +67,7 @@
let device_config_flags = parse_device_config(dc_stdout);
// read aconfig_flags.pb files
- let mut flags: HashMap<String, Vec<String>> = HashMap::new();
+ let mut flags: BTreeMap<String, Vec<String>> = BTreeMap::new();
for partition in ["system", "system_ext", "product", "vendor"] {
let path = format!("/{}/etc/aconfig_flags.pb", partition);
let Ok(bytes) = fs::read(&path) else {
@@ -86,11 +87,10 @@
// print flags
for (key, mut value) in flags {
- let (_, package_and_name) = key.split_once('/').unwrap();
if let Some(dc_value) = device_config_flags.get(&key) {
value.push(dc_value.to_string());
}
- println!("{}: {}", package_and_name, value.join(", "));
+ println!("{}: {}", key, value.join(", "));
}
Ok(())
diff --git a/tools/aconfig/src/codegen/cpp.rs b/tools/aconfig/src/codegen/cpp.rs
index d6bebba..1279d8e 100644
--- a/tools/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/src/codegen/cpp.rs
@@ -51,8 +51,6 @@
readwrite,
readwrite_count,
is_test_mode: codegen_mode == CodegenMode::Test,
- is_prod_mode: codegen_mode == CodegenMode::Production,
- is_exported_mode: codegen_mode == CodegenMode::Exported,
class_elements,
};
@@ -96,8 +94,6 @@
pub readwrite: bool,
pub readwrite_count: i32,
pub is_test_mode: bool,
- pub is_prod_mode: bool,
- pub is_exported_mode: bool,
pub class_elements: Vec<ClassElement>,
}
@@ -485,6 +481,88 @@
#endif
"#;
+ const EXPORTED_FORCE_READ_ONLY_HEADER_EXPECTED: &str = r#"
+#pragma once
+
+#ifndef COM_ANDROID_ACONFIG_TEST
+#define COM_ANDROID_ACONFIG_TEST(FLAG) COM_ANDROID_ACONFIG_TEST_##FLAG
+#endif
+
+#ifndef COM_ANDROID_ACONFIG_TEST_ENABLED_FIXED_RO
+#define COM_ANDROID_ACONFIG_TEST_ENABLED_FIXED_RO true
+#endif
+
+#ifdef __cplusplus
+
+#include <memory>
+
+namespace com::android::aconfig::test {
+
+class flag_provider_interface {
+public:
+ virtual ~flag_provider_interface() = default;
+
+ virtual bool disabled_ro() = 0;
+
+ virtual bool disabled_rw() = 0;
+
+ virtual bool disabled_rw_in_other_namespace() = 0;
+
+ virtual bool enabled_fixed_ro() = 0;
+
+ virtual bool enabled_ro() = 0;
+
+ virtual bool enabled_rw() = 0;
+};
+
+extern std::unique_ptr<flag_provider_interface> provider_;
+
+inline bool disabled_ro() {
+ return false;
+}
+
+inline bool disabled_rw() {
+ return false;
+}
+
+inline bool disabled_rw_in_other_namespace() {
+ return false;
+}
+
+inline bool enabled_fixed_ro() {
+ return COM_ANDROID_ACONFIG_TEST_ENABLED_FIXED_RO;
+}
+
+inline bool enabled_ro() {
+ return true;
+}
+
+inline bool enabled_rw() {
+ return true;
+}
+
+}
+
+extern "C" {
+#endif // __cplusplus
+
+bool com_android_aconfig_test_disabled_ro();
+
+bool com_android_aconfig_test_disabled_rw();
+
+bool com_android_aconfig_test_disabled_rw_in_other_namespace();
+
+bool com_android_aconfig_test_enabled_fixed_ro();
+
+bool com_android_aconfig_test_enabled_ro();
+
+bool com_android_aconfig_test_enabled_rw();
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+"#;
+
const PROD_SOURCE_FILE_EXPECTED: &str = r#"
#include "com_android_aconfig_test.h"
#include <server_configurable_flags/get_flags.h>
@@ -906,6 +984,69 @@
"#;
+ const FORCE_READ_ONLY_SOURCE_FILE_EXPECTED: &str = r#"
+#include "com_android_aconfig_test.h"
+
+namespace com::android::aconfig::test {
+
+ class flag_provider : public flag_provider_interface {
+ public:
+
+ virtual bool disabled_ro() override {
+ return false;
+ }
+
+ virtual bool disabled_rw() override {
+ return false;
+ }
+
+ virtual bool disabled_rw_in_other_namespace() override {
+ return false;
+ }
+
+ virtual bool enabled_fixed_ro() override {
+ return COM_ANDROID_ACONFIG_TEST_ENABLED_FIXED_RO;
+ }
+
+ virtual bool enabled_ro() override {
+ return true;
+ }
+
+ virtual bool enabled_rw() override {
+ return true;
+ }
+ };
+
+ std::unique_ptr<flag_provider_interface> provider_ =
+ std::make_unique<flag_provider>();
+}
+
+bool com_android_aconfig_test_disabled_ro() {
+ return false;
+}
+
+bool com_android_aconfig_test_disabled_rw() {
+ return false;
+}
+
+bool com_android_aconfig_test_disabled_rw_in_other_namespace() {
+ return false;
+}
+
+bool com_android_aconfig_test_enabled_fixed_ro() {
+ return COM_ANDROID_ACONFIG_TEST_ENABLED_FIXED_RO;
+}
+
+bool com_android_aconfig_test_enabled_ro() {
+ return true;
+}
+
+bool com_android_aconfig_test_enabled_rw() {
+ return true;
+}
+
+"#;
+
const READ_ONLY_EXPORTED_PROD_HEADER_EXPECTED: &str = r#"
#pragma once
@@ -1095,6 +1236,17 @@
}
#[test]
+ fn test_generate_cpp_code_for_force_read_only() {
+ let parsed_flags = crate::test::parse_test_flags();
+ test_generate_cpp_code(
+ parsed_flags,
+ CodegenMode::ForceReadOnly,
+ EXPORTED_FORCE_READ_ONLY_HEADER_EXPECTED,
+ FORCE_READ_ONLY_SOURCE_FILE_EXPECTED,
+ );
+ }
+
+ #[test]
fn test_generate_cpp_code_for_read_only_prod() {
let parsed_flags = crate::test::parse_read_only_test_flags();
test_generate_cpp_code(
diff --git a/tools/aconfig/src/codegen/java.rs b/tools/aconfig/src/codegen/java.rs
index a02a7e2..78e892b 100644
--- a/tools/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/src/codegen/java.rs
@@ -834,6 +834,228 @@
}
#[test]
+ fn test_generate_java_code_force_read_only() {
+ let parsed_flags = crate::test::parse_test_flags();
+ let mode = CodegenMode::ForceReadOnly;
+ let modified_parsed_flags =
+ crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap();
+ let generated_files =
+ generate_java_code(crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), mode)
+ .unwrap();
+ let expect_featureflags_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ /** @hide */
+ public interface FeatureFlags {
+ @com.android.aconfig.annotations.AssumeFalseForR8
+ @UnsupportedAppUsage
+ boolean disabledRo();
+ @com.android.aconfig.annotations.AssumeFalseForR8
+ @UnsupportedAppUsage
+ boolean disabledRw();
+ @com.android.aconfig.annotations.AssumeFalseForR8
+ @UnsupportedAppUsage
+ boolean disabledRwInOtherNamespace();
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ boolean enabledFixedRo();
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ boolean enabledRo();
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ boolean enabledRw();
+ }"#;
+
+ let expect_featureflagsimpl_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ /** @hide */
+ public final class FeatureFlagsImpl implements FeatureFlags {
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRo() {
+ return false;
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRw() {
+ return false;
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRwInOtherNamespace() {
+ return false;
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledFixedRo() {
+ return true;
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledRo() {
+ return true;
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledRw() {
+ return true;
+ }
+ }
+ "#;
+
+ let expect_flags_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ /** @hide */
+ public final class Flags {
+ /** @hide */
+ public static final String FLAG_DISABLED_RO = "com.android.aconfig.test.disabled_ro";
+ /** @hide */
+ public static final String FLAG_DISABLED_RW = "com.android.aconfig.test.disabled_rw";
+ /** @hide */
+ public static final String FLAG_DISABLED_RW_IN_OTHER_NAMESPACE = "com.android.aconfig.test.disabled_rw_in_other_namespace";
+ /** @hide */
+ public static final String FLAG_ENABLED_FIXED_RO = "com.android.aconfig.test.enabled_fixed_ro";
+ /** @hide */
+ public static final String FLAG_ENABLED_RO = "com.android.aconfig.test.enabled_ro";
+ /** @hide */
+ public static final String FLAG_ENABLED_RW = "com.android.aconfig.test.enabled_rw";
+
+ @com.android.aconfig.annotations.AssumeFalseForR8
+ @UnsupportedAppUsage
+ public static boolean disabledRo() {
+ return FEATURE_FLAGS.disabledRo();
+ }
+ @com.android.aconfig.annotations.AssumeFalseForR8
+ @UnsupportedAppUsage
+ public static boolean disabledRw() {
+ return FEATURE_FLAGS.disabledRw();
+ }
+ @com.android.aconfig.annotations.AssumeFalseForR8
+ @UnsupportedAppUsage
+ public static boolean disabledRwInOtherNamespace() {
+ return FEATURE_FLAGS.disabledRwInOtherNamespace();
+ }
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ public static boolean enabledFixedRo() {
+ return FEATURE_FLAGS.enabledFixedRo();
+ }
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ public static boolean enabledRo() {
+ return FEATURE_FLAGS.enabledRo();
+ }
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ public static boolean enabledRw() {
+ return FEATURE_FLAGS.enabledRw();
+ }
+ private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
+ }"#;
+
+ let expect_fakefeatureflags_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ import java.util.HashMap;
+ import java.util.Map;
+ /** @hide */
+ public class FakeFeatureFlagsImpl implements FeatureFlags {
+ public FakeFeatureFlagsImpl() {
+ resetAll();
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRo() {
+ return getValue(Flags.FLAG_DISABLED_RO);
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRw() {
+ return getValue(Flags.FLAG_DISABLED_RW);
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRwInOtherNamespace() {
+ return getValue(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE);
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledFixedRo() {
+ return getValue(Flags.FLAG_ENABLED_FIXED_RO);
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledRo() {
+ return getValue(Flags.FLAG_ENABLED_RO);
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledRw() {
+ return getValue(Flags.FLAG_ENABLED_RW);
+ }
+ public void setFlag(String flagName, boolean value) {
+ if (!this.mFlagMap.containsKey(flagName)) {
+ throw new IllegalArgumentException("no such flag " + flagName);
+ }
+ this.mFlagMap.put(flagName, value);
+ }
+ public void resetAll() {
+ for (Map.Entry entry : mFlagMap.entrySet()) {
+ entry.setValue(null);
+ }
+ }
+ private boolean getValue(String flagName) {
+ Boolean value = this.mFlagMap.get(flagName);
+ if (value == null) {
+ throw new IllegalArgumentException(flagName + " is not set");
+ }
+ return value;
+ }
+ private Map<String, Boolean> mFlagMap = new HashMap<>(
+ Map.ofEntries(
+ Map.entry(Flags.FLAG_DISABLED_RO, false),
+ Map.entry(Flags.FLAG_DISABLED_RW, false),
+ Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
+ Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
+ Map.entry(Flags.FLAG_ENABLED_RO, false),
+ Map.entry(Flags.FLAG_ENABLED_RW, false)
+ )
+ );
+ }
+ "#;
+ let mut file_set = HashMap::from([
+ ("com/android/aconfig/test/Flags.java", expect_flags_content),
+ ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
+ ("com/android/aconfig/test/FeatureFlags.java", expect_featureflags_content),
+ ("com/android/aconfig/test/FakeFeatureFlagsImpl.java", expect_fakefeatureflags_content),
+ ]);
+
+ for file in generated_files {
+ let file_path = file.path.to_str().unwrap();
+ assert!(file_set.contains_key(file_path), "Cannot find {}", file_path);
+ assert_eq!(
+ None,
+ crate::test::first_significant_code_diff(
+ file_set.get(file_path).unwrap(),
+ &String::from_utf8(file.contents).unwrap()
+ ),
+ "File {} content is not correct",
+ file_path
+ );
+ file_set.remove(file_path);
+ }
+
+ assert!(file_set.is_empty());
+ }
+
+ #[test]
fn test_format_java_method_name() {
let expected = "someSnakeName";
let input = "____some_snake___name____";
diff --git a/tools/aconfig/src/codegen/mod.rs b/tools/aconfig/src/codegen/mod.rs
index 476d2b3..64ffa8b 100644
--- a/tools/aconfig/src/codegen/mod.rs
+++ b/tools/aconfig/src/codegen/mod.rs
@@ -44,7 +44,7 @@
}
pub fn is_valid_container_ident(s: &str) -> bool {
- is_valid_name_ident(s) || s.split('.').all(is_valid_name_ident)
+ s.split('.').all(is_valid_name_ident)
}
pub fn create_device_config_ident(package: &str, flag_name: &str) -> Result<String> {
@@ -56,6 +56,7 @@
#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)]
pub enum CodegenMode {
Exported,
+ ForceReadOnly,
Production,
Test,
}
@@ -64,6 +65,7 @@
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
CodegenMode::Exported => write!(f, "exported"),
+ CodegenMode::ForceReadOnly => write!(f, "force-read-only"),
CodegenMode::Production => write!(f, "production"),
CodegenMode::Test => write!(f, "test"),
}
diff --git a/tools/aconfig/src/codegen/rust.rs b/tools/aconfig/src/codegen/rust.rs
index 56cb311..8a88ffe 100644
--- a/tools/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/src/codegen/rust.rs
@@ -44,9 +44,10 @@
template.add_template(
"rust_code_gen",
match codegen_mode {
- CodegenMode::Production => include_str!("../../templates/rust_prod.template"),
CodegenMode::Test => include_str!("../../templates/rust_test.template"),
- CodegenMode::Exported => include_str!("../../templates/rust_exported.template"),
+ CodegenMode::Exported | CodegenMode::ForceReadOnly | CodegenMode::Production => {
+ include_str!("../../templates/rust.template")
+ }
},
)?;
let contents = template.render("rust_code_gen", &context)?;
@@ -554,6 +555,84 @@
}
"#;
+ const FORCE_READ_ONLY_EXPECTED: &str = r#"
+//! codegenerated rust flag lib
+
+/// flag provider
+pub struct FlagProvider;
+
+impl FlagProvider {
+ /// query flag disabled_ro
+ pub fn disabled_ro(&self) -> bool {
+ false
+ }
+
+ /// query flag disabled_rw
+ pub fn disabled_rw(&self) -> bool {
+ false
+ }
+
+ /// query flag disabled_rw_in_other_namespace
+ pub fn disabled_rw_in_other_namespace(&self) -> bool {
+ false
+ }
+
+ /// query flag enabled_fixed_ro
+ pub fn enabled_fixed_ro(&self) -> bool {
+ true
+ }
+
+ /// query flag enabled_ro
+ pub fn enabled_ro(&self) -> bool {
+ true
+ }
+
+ /// query flag enabled_rw
+ pub fn enabled_rw(&self) -> bool {
+ true
+ }
+}
+
+/// flag provider
+pub static PROVIDER: FlagProvider = FlagProvider;
+
+/// query flag disabled_ro
+#[inline(always)]
+pub fn disabled_ro() -> bool {
+ false
+}
+
+/// query flag disabled_rw
+#[inline(always)]
+pub fn disabled_rw() -> bool {
+ false
+}
+
+/// query flag disabled_rw_in_other_namespace
+#[inline(always)]
+pub fn disabled_rw_in_other_namespace() -> bool {
+ false
+}
+
+/// query flag enabled_fixed_ro
+#[inline(always)]
+pub fn enabled_fixed_ro() -> bool {
+ true
+}
+
+/// query flag enabled_ro
+#[inline(always)]
+pub fn enabled_ro() -> bool {
+ true
+}
+
+/// query flag enabled_rw
+#[inline(always)]
+pub fn enabled_rw() -> bool {
+ true
+}
+"#;
+
fn test_generate_rust_code(mode: CodegenMode) {
let parsed_flags = crate::test::parse_test_flags();
let modified_parsed_flags =
@@ -569,6 +648,7 @@
CodegenMode::Production => PROD_EXPECTED,
CodegenMode::Test => TEST_EXPECTED,
CodegenMode::Exported => EXPORTED_EXPECTED,
+ CodegenMode::ForceReadOnly => FORCE_READ_ONLY_EXPECTED,
},
&String::from_utf8(generated.contents).unwrap()
)
@@ -589,4 +669,9 @@
fn test_generate_rust_code_for_exported() {
test_generate_rust_code(CodegenMode::Exported);
}
+
+ #[test]
+ fn test_generate_rust_code_for_force_read_only() {
+ test_generate_rust_code(CodegenMode::ForceReadOnly);
+ }
}
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
index 261acec..f7a6417 100644
--- a/tools/aconfig/src/commands.rs
+++ b/tools/aconfig/src/commands.rs
@@ -15,7 +15,9 @@
*/
use anyhow::{bail, ensure, Context, Result};
+use itertools::Itertools;
use protobuf::Message;
+use std::collections::HashMap;
use std::io::Read;
use std::path::PathBuf;
@@ -195,6 +197,7 @@
bail!("no parsed flags, or the parsed flags use different packages");
};
let package = package.to_string();
+ let _flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
generate_java_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
}
@@ -205,6 +208,7 @@
bail!("no parsed flags, or the parsed flags use different packages");
};
let package = package.to_string();
+ let _flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
generate_cpp_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
}
@@ -215,6 +219,7 @@
bail!("no parsed flags, or the parsed flags use different packages");
};
let package = package.to_string();
+ let _flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?;
generate_rust_code(&package, modified_parsed_flags.into_iter(), codegen_mode)
}
@@ -327,6 +332,11 @@
parsed_flag
}
+ fn force_read_only_mode_flag_modifier(mut parsed_flag: ProtoParsedFlag) -> ProtoParsedFlag {
+ parsed_flag.set_permission(ProtoFlagPermission::READ_ONLY);
+ parsed_flag
+ }
+
let modified_parsed_flags: Vec<_> = match codegen_mode {
CodegenMode::Exported => parsed_flags
.parsed_flag
@@ -334,6 +344,12 @@
.filter(|pf| pf.is_exported())
.map(exported_mode_flag_modifier)
.collect(),
+ CodegenMode::ForceReadOnly => parsed_flags
+ .parsed_flag
+ .into_iter()
+ .filter(|pf| !pf.is_exported())
+ .map(force_read_only_mode_flag_modifier)
+ .collect(),
CodegenMode::Production | CodegenMode::Test => {
parsed_flags.parsed_flag.into_iter().collect()
}
@@ -345,6 +361,27 @@
Ok(modified_parsed_flags)
}
+pub fn assign_flag_ids<'a, I>(package: &str, parsed_flags_iter: I) -> Result<HashMap<String, u16>>
+where
+ I: Iterator<Item = &'a ProtoParsedFlag> + Clone,
+{
+ assert!(parsed_flags_iter.clone().tuple_windows().all(|(a, b)| a.name() <= b.name()));
+ let mut flag_ids = HashMap::new();
+ for (id_to_assign, pf) in (0_u32..).zip(parsed_flags_iter) {
+ if package != pf.package() {
+ return Err(anyhow::anyhow!("encountered a flag not in current package"));
+ }
+
+ // put a cap on how many flags a package can contain to 65535
+ if id_to_assign > u16::MAX as u32 {
+ return Err(anyhow::anyhow!("the number of flags in a package cannot exceed 65535"));
+ }
+
+ flag_ids.insert(pf.name().to_string(), id_to_assign as u16);
+ }
+ Ok(flag_ids)
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -655,4 +692,44 @@
modify_parsed_flags_based_on_mode(parsed_flags, CodegenMode::Exported).unwrap_err();
assert_eq!("exported library contains no exported flags", format!("{:?}", error));
}
+
+ #[test]
+ fn test_assign_flag_ids() {
+ let parsed_flags = crate::test::parse_test_flags();
+ let package = find_unique_package(&parsed_flags.parsed_flag).unwrap().to_string();
+ let flag_ids = assign_flag_ids(&package, parsed_flags.parsed_flag.iter()).unwrap();
+ let expected_flag_ids = HashMap::from([
+ (String::from("disabled_ro"), 0_u16),
+ (String::from("disabled_rw"), 1_u16),
+ (String::from("disabled_rw_exported"), 2_u16),
+ (String::from("disabled_rw_in_other_namespace"), 3_u16),
+ (String::from("enabled_fixed_ro"), 4_u16),
+ (String::from("enabled_fixed_ro_exported"), 5_u16),
+ (String::from("enabled_ro"), 6_u16),
+ (String::from("enabled_ro_exported"), 7_u16),
+ (String::from("enabled_rw"), 8_u16),
+ ]);
+ assert_eq!(flag_ids, expected_flag_ids);
+ }
+
+ #[test]
+ fn test_modify_parsed_flags_based_on_mode_force_read_only() {
+ let parsed_flags = crate::test::parse_test_flags();
+ let p_parsed_flags =
+ modify_parsed_flags_based_on_mode(parsed_flags.clone(), CodegenMode::ForceReadOnly)
+ .unwrap();
+ assert_eq!(6, p_parsed_flags.len());
+ for pf in p_parsed_flags {
+ assert_eq!(ProtoFlagPermission::READ_ONLY, pf.permission());
+ }
+
+ let mut parsed_flags = crate::test::parse_test_flags();
+ parsed_flags.parsed_flag.retain_mut(|pf| pf.is_exported());
+ let error = modify_parsed_flags_based_on_mode(parsed_flags, CodegenMode::ForceReadOnly)
+ .unwrap_err();
+ assert_eq!(
+ "force-read-only library contains no force-read-only flags",
+ format!("{:?}", error)
+ );
+ }
}
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
index 6c4e241..7d719f0 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/src/main.rs
@@ -135,7 +135,7 @@
.required(true)
.help("The target container for the generated storage file."),
)
- .arg(Arg::new("cache").long("cache").required(true))
+ .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
.arg(Arg::new("out").long("out").required(true)),
)
}
diff --git a/tools/aconfig/src/storage/flag_table.rs b/tools/aconfig/src/storage/flag_table.rs
new file mode 100644
index 0000000..3545700
--- /dev/null
+++ b/tools/aconfig/src/storage/flag_table.rs
@@ -0,0 +1,358 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::commands::assign_flag_ids;
+use crate::storage::{self, FlagPackage};
+use anyhow::{anyhow, Result};
+
+#[derive(PartialEq, Debug)]
+pub struct FlagTableHeader {
+ pub version: u32,
+ pub container: String,
+ pub file_size: u32,
+ pub num_flags: u32,
+ pub bucket_offset: u32,
+ pub node_offset: u32,
+}
+
+impl FlagTableHeader {
+ fn new(container: &str, num_flags: u32) -> Self {
+ Self {
+ version: storage::FILE_VERSION,
+ container: String::from(container),
+ file_size: 0,
+ num_flags,
+ bucket_offset: 0,
+ node_offset: 0,
+ }
+ }
+
+ fn as_bytes(&self) -> Vec<u8> {
+ let mut result = Vec::new();
+ result.extend_from_slice(&self.version.to_le_bytes());
+ let container_bytes = self.container.as_bytes();
+ result.extend_from_slice(&(container_bytes.len() as u32).to_le_bytes());
+ result.extend_from_slice(container_bytes);
+ result.extend_from_slice(&self.file_size.to_le_bytes());
+ result.extend_from_slice(&self.num_flags.to_le_bytes());
+ result.extend_from_slice(&self.bucket_offset.to_le_bytes());
+ result.extend_from_slice(&self.node_offset.to_le_bytes());
+ result
+ }
+}
+
+#[derive(PartialEq, Debug, Clone)]
+pub struct FlagTableNode {
+ pub package_id: u32,
+ pub flag_name: String,
+ pub flag_type: u16,
+ pub flag_id: u16,
+ pub next_offset: Option<u32>,
+ pub bucket_index: u32,
+}
+
+impl FlagTableNode {
+ fn new(
+ package_id: u32,
+ flag_name: &str,
+ flag_type: u16,
+ flag_id: u16,
+ num_buckets: u32,
+ ) -> Self {
+ let full_flag_name = package_id.to_string() + "/" + flag_name;
+ let bucket_index = storage::get_bucket_index(&full_flag_name, num_buckets);
+ Self {
+ package_id,
+ flag_name: flag_name.to_string(),
+ flag_type,
+ flag_id,
+ next_offset: None,
+ bucket_index,
+ }
+ }
+
+ fn as_bytes(&self) -> Vec<u8> {
+ let mut result = Vec::new();
+ result.extend_from_slice(&self.package_id.to_le_bytes());
+ let name_bytes = self.flag_name.as_bytes();
+ result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
+ result.extend_from_slice(name_bytes);
+ result.extend_from_slice(&self.flag_type.to_le_bytes());
+ result.extend_from_slice(&self.flag_id.to_le_bytes());
+ result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes());
+ result
+ }
+}
+
+#[derive(PartialEq, Debug)]
+pub struct FlagTable {
+ pub header: FlagTableHeader,
+ pub buckets: Vec<Option<u32>>,
+ pub nodes: Vec<FlagTableNode>,
+}
+
+impl FlagTable {
+ fn create_nodes(package: &FlagPackage, num_buckets: u32) -> Result<Vec<FlagTableNode>> {
+ let flag_ids =
+ assign_flag_ids(package.package_name, package.boolean_flags.iter().copied())?;
+ package
+ .boolean_flags
+ .iter()
+ .map(|&pf| {
+ let fid = flag_ids
+ .get(pf.name())
+ .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
+ // all flags are boolean value at the moment, thus using the last bit. When more
+ // flag value types are supported, flag value type information should come from the
+ // parsed flag, and we will set the flag_type bit mask properly.
+ let flag_type = 1;
+ Ok(FlagTableNode::new(package.package_id, pf.name(), flag_type, *fid, num_buckets))
+ })
+ .collect::<Result<Vec<_>>>()
+ }
+
+ pub fn new(container: &str, packages: &[FlagPackage]) -> Result<Self> {
+ // create table
+ let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
+ let num_buckets = storage::get_table_size(num_flags)?;
+
+ let mut table = Self {
+ header: FlagTableHeader::new(container, num_flags),
+ buckets: vec![None; num_buckets as usize],
+ nodes: packages
+ .iter()
+ .map(|pkg| FlagTable::create_nodes(pkg, num_buckets))
+ .collect::<Result<Vec<_>>>()?
+ .concat(),
+ };
+
+ // initialize all header fields
+ table.header.bucket_offset = table.header.as_bytes().len() as u32;
+ table.header.node_offset = table.header.bucket_offset + num_buckets * 4;
+ table.header.file_size = table.header.node_offset
+ + table.nodes.iter().map(|x| x.as_bytes().len()).sum::<usize>() as u32;
+
+ // sort nodes by bucket index for efficiency
+ table.nodes.sort_by(|a, b| a.bucket_index.cmp(&b.bucket_index));
+
+ // fill all node offset
+ let mut offset = table.header.node_offset;
+ for i in 0..table.nodes.len() {
+ let node_bucket_idx = table.nodes[i].bucket_index;
+ let next_node_bucket_idx = if i + 1 < table.nodes.len() {
+ Some(table.nodes[i + 1].bucket_index)
+ } else {
+ None
+ };
+
+ if table.buckets[node_bucket_idx as usize].is_none() {
+ table.buckets[node_bucket_idx as usize] = Some(offset);
+ }
+ offset += table.nodes[i].as_bytes().len() as u32;
+
+ if let Some(index) = next_node_bucket_idx {
+ if index == node_bucket_idx {
+ table.nodes[i].next_offset = Some(offset);
+ }
+ }
+ }
+
+ Ok(table)
+ }
+
+ pub fn as_bytes(&self) -> Vec<u8> {
+ [
+ self.header.as_bytes(),
+ self.buckets.iter().map(|v| v.unwrap_or(0).to_le_bytes()).collect::<Vec<_>>().concat(),
+ self.nodes.iter().map(|v| v.as_bytes()).collect::<Vec<_>>().concat(),
+ ]
+ .concat()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::storage::{
+ group_flags_by_package, tests::parse_all_test_flags, tests::read_str_from_bytes,
+ tests::read_u16_from_bytes, tests::read_u32_from_bytes,
+ };
+
+ impl FlagTableHeader {
+ // test only method to deserialize back into the header struct
+ fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ let mut head = 0;
+ Ok(Self {
+ version: read_u32_from_bytes(bytes, &mut head)?,
+ container: read_str_from_bytes(bytes, &mut head)?,
+ file_size: read_u32_from_bytes(bytes, &mut head)?,
+ num_flags: read_u32_from_bytes(bytes, &mut head)?,
+ bucket_offset: read_u32_from_bytes(bytes, &mut head)?,
+ node_offset: read_u32_from_bytes(bytes, &mut head)?,
+ })
+ }
+ }
+
+ impl FlagTableNode {
+ // test only method to deserialize back into the node struct
+ fn from_bytes(bytes: &[u8], num_buckets: u32) -> Result<Self> {
+ let mut head = 0;
+ let mut node = Self {
+ package_id: read_u32_from_bytes(bytes, &mut head)?,
+ flag_name: read_str_from_bytes(bytes, &mut head)?,
+ flag_type: read_u16_from_bytes(bytes, &mut head)?,
+ flag_id: read_u16_from_bytes(bytes, &mut head)?,
+ next_offset: match read_u32_from_bytes(bytes, &mut head)? {
+ 0 => None,
+ val => Some(val),
+ },
+ bucket_index: 0,
+ };
+ let full_flag_name = node.package_id.to_string() + "/" + &node.flag_name;
+ node.bucket_index = storage::get_bucket_index(&full_flag_name, num_buckets);
+ Ok(node)
+ }
+
+ // create test baseline, syntactic sugar
+ fn new_expected(
+ package_id: u32,
+ flag_name: &str,
+ flag_type: u16,
+ flag_id: u16,
+ next_offset: Option<u32>,
+ bucket_index: u32,
+ ) -> Self {
+ Self {
+ package_id,
+ flag_name: flag_name.to_string(),
+ flag_type,
+ flag_id,
+ next_offset,
+ bucket_index,
+ }
+ }
+ }
+
+ impl FlagTable {
+ // test only method to deserialize back into the table struct
+ fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ let header = FlagTableHeader::from_bytes(bytes)?;
+ let num_flags = header.num_flags;
+ let num_buckets = storage::get_table_size(num_flags)?;
+ let mut head = header.as_bytes().len();
+ let buckets = (0..num_buckets)
+ .map(|_| match read_u32_from_bytes(bytes, &mut head).unwrap() {
+ 0 => None,
+ val => Some(val),
+ })
+ .collect();
+ let nodes = (0..num_flags)
+ .map(|_| {
+ let node = FlagTableNode::from_bytes(&bytes[head..], num_buckets).unwrap();
+ head += node.as_bytes().len();
+ node
+ })
+ .collect();
+
+ let table = Self { header, buckets, nodes };
+ Ok(table)
+ }
+ }
+
+ pub fn create_test_flag_table() -> Result<FlagTable> {
+ let caches = parse_all_test_flags();
+ let packages = group_flags_by_package(caches.iter());
+ FlagTable::new("system", &packages)
+ }
+
+ #[test]
+ // this test point locks down the table creation and each field
+ fn test_table_contents() {
+ let flag_table = create_test_flag_table();
+ assert!(flag_table.is_ok());
+
+ let header: &FlagTableHeader = &flag_table.as_ref().unwrap().header;
+ let expected_header = FlagTableHeader {
+ version: storage::FILE_VERSION,
+ container: String::from("system"),
+ file_size: 320,
+ num_flags: 8,
+ bucket_offset: 30,
+ node_offset: 98,
+ };
+ assert_eq!(header, &expected_header);
+
+ let buckets: &Vec<Option<u32>> = &flag_table.as_ref().unwrap().buckets;
+ let expected_bucket: Vec<Option<u32>> = vec![
+ Some(98),
+ Some(124),
+ None,
+ None,
+ None,
+ Some(177),
+ None,
+ Some(203),
+ None,
+ Some(261),
+ None,
+ None,
+ None,
+ None,
+ None,
+ Some(293),
+ None,
+ ];
+ assert_eq!(buckets, &expected_bucket);
+
+ let nodes: &Vec<FlagTableNode> = &flag_table.as_ref().unwrap().nodes;
+ assert_eq!(nodes.len(), 8);
+
+ assert_eq!(nodes[0], FlagTableNode::new_expected(0, "enabled_ro", 1, 1, None, 0));
+ assert_eq!(nodes[1], FlagTableNode::new_expected(0, "enabled_rw", 1, 2, Some(150), 1));
+ assert_eq!(nodes[2], FlagTableNode::new_expected(1, "disabled_ro", 1, 0, None, 1));
+ assert_eq!(nodes[3], FlagTableNode::new_expected(2, "enabled_ro", 1, 1, None, 5));
+ assert_eq!(
+ nodes[4],
+ FlagTableNode::new_expected(1, "enabled_fixed_ro", 1, 1, Some(235), 7)
+ );
+ assert_eq!(nodes[5], FlagTableNode::new_expected(1, "enabled_ro", 1, 2, None, 7));
+ assert_eq!(nodes[6], FlagTableNode::new_expected(2, "enabled_fixed_ro", 1, 0, None, 9));
+ assert_eq!(nodes[7], FlagTableNode::new_expected(0, "disabled_rw", 1, 0, None, 15));
+ }
+
+ #[test]
+ // this test point locks down the table serialization
+ fn test_serialization() {
+ let flag_table = create_test_flag_table().unwrap();
+
+ let header: &FlagTableHeader = &flag_table.header;
+ let reinterpreted_header = FlagTableHeader::from_bytes(&header.as_bytes());
+ assert!(reinterpreted_header.is_ok());
+ assert_eq!(header, &reinterpreted_header.unwrap());
+
+ let nodes: &Vec<FlagTableNode> = &flag_table.nodes;
+ let num_buckets = storage::get_table_size(header.num_flags).unwrap();
+ for node in nodes.iter() {
+ let reinterpreted_node = FlagTableNode::from_bytes(&node.as_bytes(), num_buckets);
+ assert!(reinterpreted_node.is_ok());
+ assert_eq!(node, &reinterpreted_node.unwrap());
+ }
+
+ let reinterpreted_table = FlagTable::from_bytes(&flag_table.as_bytes());
+ assert!(reinterpreted_table.is_ok());
+ assert_eq!(&flag_table, &reinterpreted_table.unwrap());
+ }
+}
diff --git a/tools/aconfig/src/storage/flag_value.rs b/tools/aconfig/src/storage/flag_value.rs
new file mode 100644
index 0000000..45f5ec0
--- /dev/null
+++ b/tools/aconfig/src/storage/flag_value.rs
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::commands::assign_flag_ids;
+use crate::protos::ProtoFlagState;
+use crate::storage::{self, FlagPackage};
+use anyhow::{anyhow, Result};
+
+#[derive(PartialEq, Debug)]
+pub struct FlagValueHeader {
+ pub version: u32,
+ pub container: String,
+ pub file_size: u32,
+ pub num_flags: u32,
+ pub boolean_value_offset: u32,
+}
+
+impl FlagValueHeader {
+ fn new(container: &str, num_flags: u32) -> Self {
+ Self {
+ version: storage::FILE_VERSION,
+ container: String::from(container),
+ file_size: 0,
+ num_flags,
+ boolean_value_offset: 0,
+ }
+ }
+
+ fn as_bytes(&self) -> Vec<u8> {
+ let mut result = Vec::new();
+ result.extend_from_slice(&self.version.to_le_bytes());
+ let container_bytes = self.container.as_bytes();
+ result.extend_from_slice(&(container_bytes.len() as u32).to_le_bytes());
+ result.extend_from_slice(container_bytes);
+ result.extend_from_slice(&self.file_size.to_le_bytes());
+ result.extend_from_slice(&self.num_flags.to_le_bytes());
+ result.extend_from_slice(&self.boolean_value_offset.to_le_bytes());
+ result
+ }
+}
+
+#[derive(PartialEq, Debug)]
+pub struct FlagValueList {
+ pub header: FlagValueHeader,
+ pub booleans: Vec<bool>,
+}
+
+impl FlagValueList {
+ pub fn new(container: &str, packages: &[FlagPackage]) -> Result<Self> {
+ // create list
+ let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
+
+ let mut list = Self {
+ header: FlagValueHeader::new(container, num_flags),
+ booleans: vec![false; num_flags as usize],
+ };
+
+ for pkg in packages.iter() {
+ let start_offset = pkg.boolean_offset as usize;
+ let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
+ for pf in pkg.boolean_flags.iter() {
+ let fid = flag_ids
+ .get(pf.name())
+ .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
+
+ list.booleans[start_offset + (*fid as usize)] =
+ pf.state() == ProtoFlagState::ENABLED;
+ }
+ }
+
+ // initialize all header fields
+ list.header.boolean_value_offset = list.header.as_bytes().len() as u32;
+ list.header.file_size = list.header.boolean_value_offset + num_flags;
+
+ Ok(list)
+ }
+
+ pub fn as_bytes(&self) -> Vec<u8> {
+ [
+ self.header.as_bytes(),
+ self.booleans
+ .iter()
+ .map(|&v| u8::from(v).to_le_bytes())
+ .collect::<Vec<_>>()
+ .concat(),
+ ]
+ .concat()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::storage::{
+ group_flags_by_package, tests::parse_all_test_flags, tests::read_str_from_bytes,
+ tests::read_u32_from_bytes, tests::read_u8_from_bytes,
+ };
+
+ impl FlagValueHeader {
+ // test only method to deserialize back into the header struct
+ fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ let mut head = 0;
+ Ok(Self {
+ version: read_u32_from_bytes(bytes, &mut head)?,
+ container: read_str_from_bytes(bytes, &mut head)?,
+ file_size: read_u32_from_bytes(bytes, &mut head)?,
+ num_flags: read_u32_from_bytes(bytes, &mut head)?,
+ boolean_value_offset: read_u32_from_bytes(bytes, &mut head)?,
+ })
+ }
+ }
+
+ impl FlagValueList {
+ // test only method to deserialize back into the flag value struct
+ fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ let header = FlagValueHeader::from_bytes(bytes)?;
+ let num_flags = header.num_flags;
+ let mut head = header.as_bytes().len();
+ let booleans = (0..num_flags)
+ .map(|_| read_u8_from_bytes(bytes, &mut head).unwrap() == 1)
+ .collect();
+ let list = Self { header, booleans };
+ Ok(list)
+ }
+ }
+
+ pub fn create_test_flag_value_list() -> Result<FlagValueList> {
+ let caches = parse_all_test_flags();
+ let packages = group_flags_by_package(caches.iter());
+ FlagValueList::new("system", &packages)
+ }
+
+ #[test]
+ // this test point locks down the flag value creation and each field
+ fn test_list_contents() {
+ let flag_value_list = create_test_flag_value_list();
+ assert!(flag_value_list.is_ok());
+
+ let header: &FlagValueHeader = &flag_value_list.as_ref().unwrap().header;
+ let expected_header = FlagValueHeader {
+ version: storage::FILE_VERSION,
+ container: String::from("system"),
+ file_size: 34,
+ num_flags: 8,
+ boolean_value_offset: 26,
+ };
+ assert_eq!(header, &expected_header);
+
+ let booleans: &Vec<bool> = &flag_value_list.as_ref().unwrap().booleans;
+ let expected_booleans: Vec<bool> = vec![false; header.num_flags as usize];
+ assert_eq!(booleans, &expected_booleans);
+ }
+
+ #[test]
+ // this test point locks down the value list serialization
+ fn test_serialization() {
+ let flag_value_list = create_test_flag_value_list().unwrap();
+
+ let header: &FlagValueHeader = &flag_value_list.header;
+ let reinterpreted_header = FlagValueHeader::from_bytes(&header.as_bytes());
+ assert!(reinterpreted_header.is_ok());
+ assert_eq!(header, &reinterpreted_header.unwrap());
+
+ let reinterpreted_value_list = FlagValueList::from_bytes(&flag_value_list.as_bytes());
+ assert!(reinterpreted_value_list.is_ok());
+ assert_eq!(&flag_value_list, &reinterpreted_value_list.unwrap());
+ }
+}
diff --git a/tools/aconfig/src/storage/mod.rs b/tools/aconfig/src/storage/mod.rs
index 686f9ae..36ea309 100644
--- a/tools/aconfig/src/storage/mod.rs
+++ b/tools/aconfig/src/storage/mod.rs
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+pub mod flag_table;
+pub mod flag_value;
pub mod package_table;
use anyhow::{anyhow, Result};
@@ -23,12 +25,14 @@
use crate::commands::OutputFile;
use crate::protos::{ProtoParsedFlag, ProtoParsedFlags};
-use crate::storage::package_table::PackageTable;
+use crate::storage::{
+ flag_table::FlagTable, flag_value::FlagValueList, package_table::PackageTable,
+};
pub const FILE_VERSION: u32 = 1;
pub const HASH_PRIMES: [u32; 29] = [
- 7, 13, 29, 53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593, 49157, 98317, 196613, 393241,
+ 7, 17, 29, 53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593, 49157, 98317, 196613, 393241,
786433, 1572869, 3145739, 6291469, 12582917, 25165843, 50331653, 100663319, 201326611,
402653189, 805306457, 1610612741,
];
@@ -55,6 +59,8 @@
pub package_id: u32,
pub flag_names: HashSet<&'a str>,
pub boolean_flags: Vec<&'a ProtoParsedFlag>,
+ // offset of the first boolean flag in this flag package with respect to the start of
+ // boolean flag value array in the flag value file
pub boolean_offset: u32,
}
@@ -94,12 +100,11 @@
}
// calculate package flag value start offset, in flag value file, each boolean
- // is stored as two bytes, the first byte will be the flag value. the second
- // byte is flag info byte, which is a bitmask to indicate the status of a flag
+ // is stored as a single byte
let mut boolean_offset = 0;
for p in packages.iter_mut() {
p.boolean_offset = boolean_offset;
- boolean_offset += 2 * p.boolean_flags.len() as u32;
+ boolean_offset += p.boolean_flags.len() as u32;
}
packages
@@ -120,7 +125,19 @@
let package_table_file =
OutputFile { contents: package_table.as_bytes(), path: package_table_file_path };
- Ok(vec![package_table_file])
+ // create and serialize flag map
+ let flag_table = FlagTable::new(container, &packages)?;
+ let flag_table_file_path = PathBuf::from("flag.map");
+ let flag_table_file =
+ OutputFile { contents: flag_table.as_bytes(), path: flag_table_file_path };
+
+ // create and serialize flag value
+ let flag_value = FlagValueList::new(container, &packages)?;
+ let flag_value_file_path = PathBuf::from("flag.val");
+ let flag_value_file =
+ OutputFile { contents: flag_value.as_bytes(), path: flag_value_file_path };
+
+ Ok(vec![package_table_file, flag_table_file, flag_value_file])
}
#[cfg(test)]
@@ -128,6 +145,20 @@
use super::*;
use crate::Input;
+ /// Read and parse bytes as u8
+ pub fn read_u8_from_bytes(buf: &[u8], head: &mut usize) -> Result<u8> {
+ let val = u8::from_le_bytes(buf[*head..*head + 1].try_into()?);
+ *head += 1;
+ Ok(val)
+ }
+
+ /// Read and parse bytes as u16
+ pub fn read_u16_from_bytes(buf: &[u8], head: &mut usize) -> Result<u16> {
+ let val = u16::from_le_bytes(buf[*head..*head + 2].try_into()?);
+ *head += 2;
+ Ok(val)
+ }
+
/// Read and parse bytes as u32
pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32> {
let val = u32::from_le_bytes(buf[*head..*head + 4].try_into()?);
@@ -147,13 +178,8 @@
let aconfig_files = [
(
"com.android.aconfig.storage.test_1",
- "storage_test_1_part_1.aconfig",
- include_bytes!("../../tests/storage_test_1_part_1.aconfig").as_slice(),
- ),
- (
- "com.android.aconfig.storage.test_1",
- "storage_test_1_part_2.aconfig",
- include_bytes!("../../tests/storage_test_1_part_2.aconfig").as_slice(),
+ "storage_test_1.aconfig",
+ include_bytes!("../../tests/storage_test_1.aconfig").as_slice(),
),
(
"com.android.aconfig.storage.test_2",
@@ -204,12 +230,10 @@
assert_eq!(packages[0].package_name, "com.android.aconfig.storage.test_1");
assert_eq!(packages[0].package_id, 0);
- assert_eq!(packages[0].flag_names.len(), 5);
+ assert_eq!(packages[0].flag_names.len(), 3);
assert!(packages[0].flag_names.contains("enabled_rw"));
assert!(packages[0].flag_names.contains("disabled_rw"));
assert!(packages[0].flag_names.contains("enabled_ro"));
- assert!(packages[0].flag_names.contains("disabled_ro"));
- assert!(packages[0].flag_names.contains("enabled_fixed_ro"));
assert_eq!(packages[0].boolean_offset, 0);
assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2");
@@ -218,13 +242,13 @@
assert!(packages[1].flag_names.contains("enabled_ro"));
assert!(packages[1].flag_names.contains("disabled_ro"));
assert!(packages[1].flag_names.contains("enabled_fixed_ro"));
- assert_eq!(packages[1].boolean_offset, 10);
+ assert_eq!(packages[1].boolean_offset, 3);
assert_eq!(packages[2].package_name, "com.android.aconfig.storage.test_4");
assert_eq!(packages[2].package_id, 2);
assert_eq!(packages[2].flag_names.len(), 2);
assert!(packages[2].flag_names.contains("enabled_ro"));
assert!(packages[2].flag_names.contains("enabled_fixed_ro"));
- assert_eq!(packages[2].boolean_offset, 16);
+ assert_eq!(packages[2].boolean_offset, 6);
}
}
diff --git a/tools/aconfig/src/storage/package_table.rs b/tools/aconfig/src/storage/package_table.rs
index 940c5b2..4036234 100644
--- a/tools/aconfig/src/storage/package_table.rs
+++ b/tools/aconfig/src/storage/package_table.rs
@@ -57,6 +57,8 @@
pub struct PackageTableNode {
pub package_name: String,
pub package_id: u32,
+ // offset of the first boolean flag in this flag package with respect to the start of
+ // boolean flag value array in the flag value file
pub boolean_offset: u32,
pub next_offset: Option<u32>,
pub bucket_index: u32,
@@ -249,7 +251,7 @@
let first_node_expected = PackageTableNode {
package_name: String::from("com.android.aconfig.storage.test_2"),
package_id: 1,
- boolean_offset: 10,
+ boolean_offset: 3,
next_offset: None,
bucket_index: 0,
};
@@ -265,7 +267,7 @@
let third_node_expected = PackageTableNode {
package_name: String::from("com.android.aconfig.storage.test_4"),
package_id: 2,
- boolean_offset: 16,
+ boolean_offset: 6,
next_offset: None,
bucket_index: 3,
};
@@ -275,9 +277,7 @@
#[test]
// this test point locks down the table serialization
fn test_serialization() {
- let package_table = create_test_package_table();
- assert!(package_table.is_ok());
- let package_table = package_table.unwrap();
+ let package_table = create_test_package_table().unwrap();
let header: &PackageTableHeader = &package_table.header;
let reinterpreted_header = PackageTableHeader::from_bytes(&header.as_bytes());
diff --git a/tools/aconfig/templates/cpp_exported_header.template b/tools/aconfig/templates/cpp_exported_header.template
index 6b6daa7..0f7853e 100644
--- a/tools/aconfig/templates/cpp_exported_header.template
+++ b/tools/aconfig/templates/cpp_exported_header.template
@@ -44,7 +44,6 @@
{{ -if is_test_mode }}
return provider_->{item.flag_name}();
{{ -else }}
- {{ -if is_prod_mode }}
{{ -if item.readwrite }}
return provider_->{item.flag_name}();
{{ -else }}
@@ -54,11 +53,6 @@
return {item.default_value};
{{ -endif }}
{{ -endif }}
- {{ -else }}
- {{ -if is_exported_mode }}
- return provider_->{item.flag_name}();
- {{ -endif }}
- {{ -endif }}
{{ -endif }}
}
diff --git a/tools/aconfig/templates/cpp_source_file.template b/tools/aconfig/templates/cpp_source_file.template
index 4aec540..4bcd1b7 100644
--- a/tools/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/templates/cpp_source_file.template
@@ -59,7 +59,6 @@
{{ -for item in class_elements }}
virtual bool {item.flag_name}() override \{
- {{ -if is_prod_mode }}
{{ -if item.readwrite }}
if (cache_[{item.readwrite_idx}] == -1) \{
cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag(
@@ -75,17 +74,6 @@
return {item.default_value};
{{ -endif }}
{{ -endif }}
- {{ -else- }}
- {{ -if is_exported_mode }}
- if (cache_[{item.readwrite_idx}] == -1) \{
- cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag(
- "aconfig_flags.{item.device_config_namespace}",
- "{item.device_config_flag}",
- "false") == "true";
- }
- return cache_[{item.readwrite_idx}];
- {{ -endif }}
- {{ -endif }}
}
{{ -endfor }}
{{ if readwrite- }}
@@ -106,7 +94,6 @@
{{ -if is_test_mode }}
return {cpp_namespace}::{item.flag_name}();
{{ -else }}
- {{ -if is_prod_mode }}
{{ -if item.readwrite }}
return {cpp_namespace}::{item.flag_name}();
{{ -else }}
@@ -116,11 +103,6 @@
return {item.default_value};
{{ -endif }}
{{ -endif }}
- {{ -else }}
- {{ -if is_exported_mode }}
- return {cpp_namespace}::{item.flag_name}();
- {{ -endif }}
- {{ -endif }}
{{ -endif }}
}
diff --git a/tools/aconfig/templates/rust_prod.template b/tools/aconfig/templates/rust.template
similarity index 100%
rename from tools/aconfig/templates/rust_prod.template
rename to tools/aconfig/templates/rust.template
diff --git a/tools/aconfig/templates/rust_exported.template b/tools/aconfig/templates/rust_exported.template
deleted file mode 100644
index 110f2d4..0000000
--- a/tools/aconfig/templates/rust_exported.template
+++ /dev/null
@@ -1,35 +0,0 @@
-//! codegenerated rust flag lib
-
-/// flag provider
-pub struct FlagProvider;
-
-lazy_static::lazy_static! \{
-{{ for flag in template_flags }}
- /// flag value cache for {flag.name}
- static ref CACHED_{flag.name}: bool = flags_rust::GetServerConfigurableFlag(
- "aconfig_flags.{flag.device_config_namespace}",
- "{flag.device_config_flag}",
- "false") == "true";
-{{ endfor }}
-}
-
-impl FlagProvider \{
-{{ for flag in template_flags }}
- /// query flag {flag.name}
- pub fn {flag.name}(&self) -> bool \{
- *CACHED_{flag.name}
- }
-{{ endfor }}
-
-}
-
-/// flag provider
-pub static PROVIDER: FlagProvider = FlagProvider;
-
-{{ for flag in template_flags }}
-/// query flag {flag.name}
-#[inline(always)]
-pub fn {flag.name}() -> bool \{
- PROVIDER.{flag.name}()
-}
-{{ endfor }}
diff --git a/tools/aconfig/tests/AconfigTest.java b/tools/aconfig/tests/AconfigTest.java
index b2deca0..7e76efb 100644
--- a/tools/aconfig/tests/AconfigTest.java
+++ b/tools/aconfig/tests/AconfigTest.java
@@ -10,6 +10,7 @@
import static com.android.aconfig.test.Flags.enabledRw;
import static com.android.aconfig.test.exported.Flags.exportedFlag;
import static com.android.aconfig.test.exported.Flags.FLAG_EXPORTED_FLAG;
+import static com.android.aconfig.test.forcereadonly.Flags.froRw;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThrows;
@@ -66,4 +67,9 @@
assertEquals("com.android.aconfig.test.exported.exported_flag", FLAG_EXPORTED_FLAG);
assertFalse(exportedFlag());
}
+
+ @Test
+ public void testForceReadOnly() {
+ assertFalse(froRw());
+ }
}
diff --git a/tools/aconfig/tests/aconfig_force_read_only_mode_test.cpp b/tools/aconfig/tests/aconfig_force_read_only_mode_test.cpp
new file mode 100644
index 0000000..0dec481
--- /dev/null
+++ b/tools/aconfig/tests/aconfig_force_read_only_mode_test.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "com_android_aconfig_test.h"
+#include "gtest/gtest.h"
+
+using namespace com::android::aconfig::test;
+
+TEST(AconfigTest, TestDisabledReadOnlyFlag) {
+ ASSERT_FALSE(com_android_aconfig_test_disabled_ro());
+ ASSERT_FALSE(provider_->disabled_ro());
+ ASSERT_FALSE(disabled_ro());
+}
+
+TEST(AconfigTest, TestEnabledReadOnlyFlag) {
+ ASSERT_TRUE(com_android_aconfig_test_enabled_ro());
+ ASSERT_TRUE(provider_->enabled_ro());
+ ASSERT_TRUE(enabled_ro());
+}
+
+TEST(AconfigTest, TestDisabledReadWriteFlag) {
+ ASSERT_FALSE(com_android_aconfig_test_disabled_rw());
+ ASSERT_FALSE(provider_->disabled_rw());
+ ASSERT_FALSE(disabled_rw());
+}
+
+TEST(AconfigTest, TestEnabledReadWriteFlag) {
+ ASSERT_TRUE(com_android_aconfig_test_enabled_rw());
+ ASSERT_TRUE(provider_->enabled_rw());
+ ASSERT_TRUE(enabled_rw());
+}
+
+TEST(AconfigTest, TestEnabledFixedReadOnlyFlag) {
+ ASSERT_TRUE(com_android_aconfig_test_enabled_fixed_ro());
+ ASSERT_TRUE(provider_->enabled_fixed_ro());
+ ASSERT_TRUE(enabled_fixed_ro());
+}
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/tools/aconfig/tests/aconfig_force_read_only_mode_test.rs b/tools/aconfig/tests/aconfig_force_read_only_mode_test.rs
new file mode 100644
index 0000000..4f05e26
--- /dev/null
+++ b/tools/aconfig/tests/aconfig_force_read_only_mode_test.rs
@@ -0,0 +1,10 @@
+#[cfg(not(feature = "cargo"))]
+#[test]
+fn test_flags() {
+ assert!(!aconfig_test_rust_library::disabled_ro());
+ assert!(!aconfig_test_rust_library::disabled_rw());
+ assert!(!aconfig_test_rust_library::disabled_rw_in_other_namespace());
+ assert!(aconfig_test_rust_library::enabled_fixed_ro());
+ assert!(aconfig_test_rust_library::enabled_ro());
+ assert!(aconfig_test_rust_library::enabled_rw());
+}
diff --git a/tools/aconfig/tests/storage_test_1_part_1.aconfig b/tools/aconfig/tests/storage_test_1.aconfig
similarity index 72%
rename from tools/aconfig/tests/storage_test_1_part_1.aconfig
rename to tools/aconfig/tests/storage_test_1.aconfig
index 70462cd..a122c57 100644
--- a/tools/aconfig/tests/storage_test_1_part_1.aconfig
+++ b/tools/aconfig/tests/storage_test_1.aconfig
@@ -15,3 +15,10 @@
bug: "456"
is_exported: true
}
+
+flag {
+ name: "enabled_ro"
+ namespace: "aconfig_test"
+ description: "This flag is ENABLED + READ_ONLY"
+ bug: "abc"
+}
diff --git a/tools/aconfig/tests/storage_test_1_part_2.aconfig b/tools/aconfig/tests/storage_test_1_part_2.aconfig
deleted file mode 100644
index 5eb0c0c..0000000
--- a/tools/aconfig/tests/storage_test_1_part_2.aconfig
+++ /dev/null
@@ -1,24 +0,0 @@
-package: "com.android.aconfig.storage.test_1"
-container: "system"
-
-flag {
- name: "enabled_ro"
- namespace: "aconfig_test"
- description: "This flag is ENABLED + READ_ONLY"
- bug: "abc"
-}
-
-flag {
- name: "disabled_ro"
- namespace: "aconfig_test"
- description: "This flag is DISABLED + READ_ONLY"
- bug: "123"
-}
-
-flag {
- name: "enabled_fixed_ro"
- namespace: "aconfig_test"
- description: "This flag is fixed READ_ONLY + ENABLED"
- bug: ""
- is_fixed_read_only: true
-}
diff --git a/tools/aconfig/tests/test_force_read_only.aconfig b/tools/aconfig/tests/test_force_read_only.aconfig
new file mode 100644
index 0000000..05ab0e2
--- /dev/null
+++ b/tools/aconfig/tests/test_force_read_only.aconfig
@@ -0,0 +1,17 @@
+package: "com.android.aconfig.test.forcereadonly"
+container: "system"
+
+flag {
+ name: "fro_exported"
+ namespace: "aconfig_test"
+ description: "This is an exported flag"
+ is_exported: true
+ bug: "888"
+}
+
+flag {
+ name: "fro_rw"
+ namespace: "aconfig_test"
+ description: "This flag is not exported"
+ bug: "777"
+}
diff --git a/tools/metadata/generator.go b/tools/metadata/generator.go
index d328876..b7668be 100644
--- a/tools/metadata/generator.go
+++ b/tools/metadata/generator.go
@@ -77,9 +77,18 @@
return string(data)
}
-func writeNewlineToOutputFile(outputFile string) {
+func writeEmptyOutputProto(outputFile string, metadataRule string) {
file, err := os.Create(outputFile)
- data := "\n"
+ if err != nil {
+ log.Fatal(err)
+ }
+ var message proto.Message
+ if metadataRule == "test_spec" {
+ message = &test_spec_proto.TestSpec{}
+ } else if metadataRule == "code_metadata" {
+ message = &code_metadata_proto.CodeMetadata{}
+ }
+ data, err := proto.Marshal(message)
if err != nil {
log.Fatal(err)
}
@@ -92,8 +101,8 @@
}
func processTestSpecProtobuf(
- filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
- errCh chan error, wg *sync.WaitGroup,
+ filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
+ errCh chan error, wg *sync.WaitGroup,
) {
defer wg.Done()
@@ -121,7 +130,7 @@
if metadata.GetTrendyTeamId() != existing.GetTrendyTeamId() {
errCh <- fmt.Errorf(
"Conflicting trendy team IDs found for %s at:\n%s with teamId"+
- ": %s,\n%s with teamId: %s",
+ ": %s,\n%s with teamId: %s",
key,
metadata.GetPath(), metadata.GetTrendyTeamId(), existing.GetPath(),
existing.GetTrendyTeamId(),
@@ -147,8 +156,8 @@
// processCodeMetadataProtobuf processes CodeMetadata protobuf files
func processCodeMetadataProtobuf(
- filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap,
- errCh chan error, wg *sync.WaitGroup,
+ filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap,
+ errCh chan error, wg *sync.WaitGroup,
) {
defer wg.Done()
@@ -182,8 +191,8 @@
if attributes.TeamID != existing.TeamID && (!attributes.MultiOwnership || !existing.MultiOwnership) {
errCh <- fmt.Errorf(
"Conflict found for source file %s covered at %s with team ID: %s. Existing team ID: %s and path: %s."+
- " If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+
- "Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.",
+ " If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+
+ "Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.",
srcFile, internalMetadata.GetPath(), attributes.TeamID, existing.TeamID, existing.Path,
)
srcFileLock.Unlock()
@@ -235,7 +244,7 @@
inputFileData := strings.TrimRight(readFileToString(*inputFile), "\n")
filePaths := strings.Split(inputFileData, " ")
if len(filePaths) == 1 && filePaths[0] == "" {
- writeNewlineToOutputFile(*outputFile)
+ writeEmptyOutputProto(*outputFile, *rule)
return
}
ownershipMetadataMap := &sync.Map{}
diff --git a/tools/metadata/testdata/generatedEmptyOutputFile.txt b/tools/metadata/testdata/generatedEmptyOutputFile.txt
index 8b13789..e69de29 100644
--- a/tools/metadata/testdata/generatedEmptyOutputFile.txt
+++ b/tools/metadata/testdata/generatedEmptyOutputFile.txt
@@ -1 +0,0 @@
-
diff --git a/tools/perf/benchmarks b/tools/perf/benchmarks
index f46b920..acc53bb 100755
--- a/tools/perf/benchmarks
+++ b/tools/perf/benchmarks
@@ -130,8 +130,9 @@
def Clean():
"""Remove the out directory."""
def remove_out():
- if os.path.exists("out"):
- shutil.rmtree("out")
+ out_dir = utils.get_out_dir()
+ if os.path.exists(out_dir):
+ shutil.rmtree(out_dir)
return Change(label="Remove out", change=remove_out, undo=lambda: None)
@@ -270,7 +271,7 @@
def _run_benchmark(self, lunch, benchmark, iteration):
"""Run a single benchmark."""
- benchmark_log_subdir = self._log_dir(lunch, benchmark, iteration)
+ benchmark_log_subdir = self._benchmark_log_dir(lunch, benchmark, iteration)
benchmark_log_dir = self._options.LogDir().joinpath(benchmark_log_subdir)
sys.stderr.write(f"STARTING BENCHMARK: {benchmark.id}\n")
@@ -298,7 +299,7 @@
dist_one = self._options.DistOne()
if dist_one:
# If we're disting just one benchmark, save the logs and we can stop here.
- self._dist(dist_one)
+ self._dist(utils.get_dist_dir())
else:
# Postroll builds
for i in range(benchmark.preroll):
@@ -315,7 +316,7 @@
self._write_summary()
sys.stderr.write(f"FINISHED BENCHMARK: {benchmark.id}\n")
- def _log_dir(self, lunch, benchmark, iteration):
+ def _benchmark_log_dir(self, lunch, benchmark, iteration):
"""Construct the log directory fir a benchmark run."""
path = f"{lunch.Combine()}/{benchmark.id}"
# Zero pad to the correct length for correct alpha sorting
@@ -335,6 +336,7 @@
"--build-mode",
"--all-modules",
f"--dir={self._options.root}",
+ "--skip-metrics-upload",
] + modules
env = dict(os.environ)
env["TARGET_PRODUCT"] = lunch.target_product
@@ -354,8 +356,8 @@
return after_ns - before_ns
def _dist(self, dist_dir):
- out_dir = pathlib.Path("out")
- dest_dir = pathlib.Path(dist_dir).joinpath("logs")
+ out_dir = utils.get_out_dir()
+ dest_dir = dist_dir.joinpath("logs")
os.makedirs(dest_dir, exist_ok=True)
basenames = [
"build.trace.gz",
@@ -460,7 +462,7 @@
parser.add_argument("--benchmark", nargs="*", default=[b.id for b in self._benchmarks],
metavar="BENCHMARKS",
help="Benchmarks to run. Default suite will be run if omitted.")
- parser.add_argument("--dist-one", type=str,
+ parser.add_argument("--dist-one", action="store_true",
help="Copy logs and metrics to the given dist dir. Requires that only"
+ " one benchmark be supplied. Postroll steps will be skipped.")
@@ -478,7 +480,7 @@
self._error(f"Invalid benchmark: {id}")
# --dist-one requires that only one benchmark be supplied
- if len(self.Benchmarks()) != 1:
+ if self._args.dist_one and len(self.Benchmarks()) != 1:
self._error("--dist-one requires that exactly one --benchmark.")
if self._had_error:
@@ -703,6 +705,7 @@
runner.Run()
except FatalError:
sys.stderr.write(f"FAILED\n")
+ sys.exit(1)
if __name__ == "__main__":
diff --git a/tools/perf/format_benchmarks b/tools/perf/format_benchmarks
index c01aa76..162c577 100755
--- a/tools/perf/format_benchmarks
+++ b/tools/perf/format_benchmarks
@@ -73,14 +73,6 @@
# Rows:
# Benchmark
-@dataclasses.dataclass(frozen=True)
-class Key():
- pass
-
-class Column():
- def __init__(self):
- pass
-
def lunch_str(d):
"Convert a lunch dict to a string"
return f"{d['TARGET_PRODUCT']}-{d['TARGET_RELEASE']}-{d['TARGET_BUILD_VARIANT']}"
@@ -94,10 +86,12 @@
class Table:
- def __init__(self):
+ def __init__(self, row_title, fixed_titles=[]):
self._data = {}
self._rows = []
self._cols = []
+ self._fixed_cols = {}
+ self._titles = [row_title] + fixed_titles
def Set(self, column_key, row_key, data):
self._data[(column_key, row_key)] = data
@@ -106,19 +100,27 @@
if not row_key in self._rows:
self._rows.append(row_key)
+ def SetFixedCol(self, row_key, columns):
+ self._fixed_cols[row_key] = columns
+
def Write(self, out):
table = []
# Expand the column items
for row in zip(*self._cols):
if row.count(row[0]) == len(row):
continue
- table.append([""] + [col for col in row])
+ table.append([""] * len(self._titles) + [col for col in row])
if table:
+ # Update the last row of the header with title and add separator
+ for i in range(len(self._titles)):
+ table[len(table)-1][i] = self._titles[i]
table.append(pretty.SEPARATOR)
# Populate the data
for row in self._rows:
- table.append([str(row)] + [str(self._data.get((col, row), "")) for col in self._cols])
- out.write(pretty.FormatTable(table))
+ table.append([str(row)]
+ + self._fixed_cols[row]
+ + [str(self._data.get((col, row), "")) for col in self._cols])
+ out.write(pretty.FormatTable(table, alignments="LL"))
def format_duration_sec(ns):
@@ -181,12 +183,13 @@
in group_by(summary["benchmarks"], bm_key)]
# Build the table
- table = Table()
+ table = Table("Benchmark", ["Rebuild"])
for filename, summary in summaries:
for key, column in summary["columns"]:
for id, cell in column:
duration_ns = statistics.median([b["duration_ns"] for b in cell])
- table.Set(tuple([summary["date"].strftime("YYYY-MM-DD"),
+ table.SetFixedCol(cell[0]["title"], [" ".join(cell[0]["modules"])])
+ table.Set(tuple([summary["date"].strftime("%Y-%m-%d"),
summary["branch"],
summary["tag"]]
+ list(key)),
diff --git a/tools/perf/pretty.py b/tools/perf/pretty.py
index 1b59098..14fdc9e 100644
--- a/tools/perf/pretty.py
+++ b/tools/perf/pretty.py
@@ -19,7 +19,7 @@
SEPARATOR = Sentinel()
-def FormatTable(data, prefix=""):
+def FormatTable(data, prefix="", alignments=[]):
"""Pretty print a table.
Prefixes each row with `prefix`.
@@ -40,10 +40,10 @@
else:
for i in range(len(row)):
cell = row[i] if row[i] else ""
- if i != 0:
+ if i >= len(alignments) or alignments[i] == "R":
result += " " * (widths[i] - len(cell))
result += cell
- if i == 0:
+ if i < len(alignments) and alignments[i] == "L":
result += " " * (widths[i] - len(cell))
result += colsep
result += "\n"
diff --git a/tools/perf/utils.py b/tools/perf/utils.py
index 08e393f..934130d 100644
--- a/tools/perf/utils.py
+++ b/tools/perf/utils.py
@@ -28,3 +28,15 @@
d = d.parent
if d == pathlib.Path("/"):
return None
+
+def get_dist_dir():
+ dist_dir = os.getenv("DIST_DIR")
+ if dist_dir:
+ return pathlib.Path(dist_dir).resolve()
+ return get_out_dir().joinpath("dist")
+
+def get_out_dir():
+ out_dir = os.getenv("OUT_DIR")
+ if not out_dir:
+ out_dir = "out"
+ return pathlib.Path(out_dir).resolve()
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index a7b3523..cdafb4b 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -58,22 +58,6 @@
logger = logging.getLogger(__name__)
-# Work around a bug in Python's zipfile module that prevents opening of zipfiles
-# if any entry has an extra field of between 1 and 3 bytes (which is common with
-# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
-# contains the bug) with an empty version (since we don't need to decode the
-# extra field anyway).
-# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
-# Python 3.5.0 alpha 1.
-
-
-class MyZipInfo(zipfile.ZipInfo):
- def _decodeExtra(self):
- pass
-
-
-zipfile.ZipInfo = MyZipInfo
-
OPTIONS = common.OPTIONS
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
index 24d9ea9..fb5957a 100644
--- a/tools/releasetools/merge_ota.py
+++ b/tools/releasetools/merge_ota.py
@@ -243,8 +243,6 @@
# Get signing keys
key_passwords = common.GetKeyPasswords([args.package_key])
- generator = PayloadGenerator()
-
apex_info_bytes = ApexInfo(file_paths)
with tempfile.NamedTemporaryFile() as unsigned_payload:
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index b65764b..e521e1f 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -259,6 +259,9 @@
--vabc_cow_version
Specify the VABC cow version to be used
+
+ --compression_factor
+ Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k
"""
from __future__ import print_function
@@ -331,6 +334,7 @@
OPTIONS.security_patch_level = None
OPTIONS.max_threads = None
OPTIONS.vabc_cow_version = None
+OPTIONS.compression_factor = None
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -393,17 +397,6 @@
"""
return ModifyKeyvalueList(content, "virtual_ab_compression_method", algo)
-def SetVABCCowVersion(content, cow_version):
- """ Update virtual_ab_cow_version in dynamic_partitions_info.txt
- Args:
- content: The string content of dynamic_partitions_info.txt
- algo: The cow version be used for VABC. See
- https://cs.android.com/android/platform/superproject/main/+/main:system/core/fs_mgr/libsnapshot/include/libsnapshot/cow_format.h;l=36
- Returns:
- Updated content of dynamic_partitions_info.txt , updated cow version
- """
- return ModifyKeyvalueList(content, "virtual_ab_cow_version", cow_version)
-
def UpdatesInfoForSpecialUpdates(content, partitions_filter,
delete_keys=None):
@@ -863,10 +856,10 @@
return ExtractTargetFiles(target_file)
-def ValidateCompressinParam(target_info):
+def ValidateCompressionParam(target_info):
vabc_compression_param = OPTIONS.vabc_compression_param
if vabc_compression_param:
- minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[vabc_compression_param]
+ minimum_api_level_required = VABC_COMPRESSION_PARAM_SUPPORT[vabc_compression_param.split(",")[0]]
if target_info.vendor_api_level < minimum_api_level_required:
raise ValueError("Specified VABC compression param {} is only supported for API level >= {}, device is on API level {}".format(
vabc_compression_param, minimum_api_level_required, target_info.vendor_api_level))
@@ -879,7 +872,7 @@
target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
if OPTIONS.disable_vabc and target_info.is_release_key:
raise ValueError("Disabling VABC on release-key builds is not supported.")
- ValidateCompressinParam(target_info)
+ ValidateCompressionParam(target_info)
vabc_compression_param = target_info.vabc_compression_param
target_file = ExtractOrCopyTargetFiles(target_file)
@@ -1020,6 +1013,8 @@
target_file, vabc_compression_param)
if OPTIONS.vabc_cow_version:
target_file = ModifyTargetFilesDynamicPartitionInfo(target_file, "virtual_ab_cow_version", OPTIONS.vabc_cow_version)
+ if OPTIONS.compression_factor:
+ target_file = ModifyTargetFilesDynamicPartitionInfo(target_file, "virtual_ab_compression_factor", OPTIONS.compression_factor)
if OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
# Target_file may have been modified, reparse ab_partitions
@@ -1280,6 +1275,13 @@
else:
raise ValueError("Cannot parse value %r for option %r - only "
"integers are allowed." % (a, o))
+ elif o in ("--compression_factor"):
+ values = ["4k", "8k", "16k", "32k", "64k", "128k"]
+ if a[:-1].isdigit() and a in values and a.endswith("k"):
+ OPTIONS.compression_factor = str(int(a[:-1]) * 1024)
+ else:
+ raise ValueError("Please specify value from following options: 4k, 8k, 16k, 32k, 64k, 128k")
+
elif o == "--vabc_cow_version":
if a.isdigit():
OPTIONS.vabc_cow_version = a
@@ -1335,6 +1337,7 @@
"security_patch_level=",
"max_threads=",
"vabc_cow_version=",
+ "compression_factor=",
], extra_option_handler=[option_handler, payload_signer.signer_options])
common.InitLogging()
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4356394..7b497c1 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -1137,6 +1137,7 @@
devkeydir + "/shared": d + "/shared",
devkeydir + "/platform": d + "/platform",
devkeydir + "/networkstack": d + "/networkstack",
+ devkeydir + "/sdk_sandbox": d + "/sdk_sandbox",
})
else:
OPTIONS.key_map[s] = d
diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py
index e637d53..d0d232d 100644
--- a/tools/sbom/generate-sbom-framework_res.py
+++ b/tools/sbom/generate-sbom-framework_res.py
@@ -52,8 +52,19 @@
filename = 'data/framework_res.jar'
file_id = f'SPDXRef-{sbom_data.encode_for_spdxid(filename)}'
file = sbom_data.File(id=file_id, name=filename, checksum='SHA1: <checksum>')
+
+ package_name = 'framework_res'
+ package_id = f'SPDXRef-PREBUILT-{sbom_data.encode_for_spdxid(package_name)}'
+ package = sbom_data.Package(id=package_id, name=package_name, version='<package_version>',
+ download_location=sbom_data.VALUE_NONE,
+ supplier='Organization: <organization>',
+ files_analyzed=True,
+ verification_code='<package_verification_code>')
+ package.file_ids.append(file_id)
+
+ doc.packages.append(package)
doc.files.append(file)
- doc.describes = file_id
+ doc.describes = package_id
with open(args.layoutlib_sbom, 'r', encoding='utf-8') as f:
layoutlib_sbom = json.load(f)
@@ -72,7 +83,9 @@
if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'):
resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID])
- doc.relationships = []
+ doc.relationships = [
+ sbom_data.Relationship(package_id, sbom_data.RelationshipType.CONTAINS, file_id)
+ ]
for spdxid in resource_file_spdxids:
doc.relationships.append(
sbom_data.Relationship(file_id, sbom_data.RelationshipType.GENERATED_FROM,