Merge "Use Soong-defined system image's artifacts in target_files.zip" into main
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
index 9143cbf..4bee401 100644
--- a/ci/optimized_targets.py
+++ b/ci/optimized_targets.py
@@ -16,12 +16,13 @@
from abc import ABC
import argparse
import functools
-from build_context import BuildContext
import json
import logging
import os
-from typing import Self
+import pathlib
+import subprocess
+from build_context import BuildContext
import test_mapping_module_retriever
@@ -33,6 +34,9 @@
build.
"""
+ _SOONG_UI_BASH_PATH = 'build/soong/soong_ui.bash'
+ _PREBUILT_SOONG_ZIP_PATH = 'prebuilts/build-tools/linux-x86/bin/soong_zip'
+
def __init__(
self,
target: str,
@@ -75,6 +79,88 @@
f'get_build_targets_impl not implemented in {type(self).__name__}'
)
+ def _generate_zip_options_for_items(
+ self,
+ prefix: str = '',
+ relative_root: str = '',
+ list_files: list[str] | None = None,
+ files: list[str] | None = None,
+ directories: list[str] | None = None,
+ ) -> list[str]:
+ if not list_files and not files and not directories:
+ raise RuntimeError(
+ f'No items specified to be added to zip! Prefix: {prefix}, Relative'
+ f' root: {relative_root}'
+ )
+ command_segment = []
+ # These are all soong_zip options so consult soong_zip --help for specifics.
+ if prefix:
+ command_segment.append('-P')
+ command_segment.append(prefix)
+ if relative_root:
+ command_segment.append('-C')
+ command_segment.append(relative_root)
+ if list_files:
+ for list_file in list_files:
+ command_segment.append('-l')
+ command_segment.append(list_file)
+ if files:
+ for file in files:
+ command_segment.append('-f')
+ command_segment.append(file)
+ if directories:
+ for directory in directories:
+ command_segment.append('-D')
+ command_segment.append(directory)
+
+ return command_segment
+
+ def _query_soong_vars(
+ self, src_top: pathlib.Path, soong_vars: list[str]
+ ) -> dict[str, str]:
+ process_result = subprocess.run(
+ args=[
+ f'{src_top / self._SOONG_UI_BASH_PATH}',
+ '--dumpvar-mode',
+ '--abs',
+ soong_vars,
+ ],
+ env=os.environ,
+ check=False,
+ capture_output=True,
+ )
+ if not process_result.returncode == 0:
+ logging.error('soong dumpvars command failed! stderr:')
+ logging.error(process_result.stderr)
+ raise RuntimeError('Soong dumpvars failed! See log for stderr.')
+
+ if not process_result.stdout:
+ raise RuntimeError(
+ 'Necessary soong variables ' + soong_vars + ' not found.'
+ )
+
+ try:
+ return {
+ line.split('=')[0]: line.split('=')[1].strip("'")
+ for line in process_result.stdout.split('\n')
+ }
+ except IndexError as e:
+ raise RuntimeError(
+ 'Error parsing soong dumpvars output! See output here:'
+ f' {process_result.stdout}',
+ e,
+ )
+
+ def _base_zip_command(
+ self, src_top: pathlib.Path, dist_dir: pathlib.Path, name: str
+ ) -> list[str]:
+ return [
+ f'{src_top / self._PREBUILT_SOONG_ZIP_PATH }',
+ '-d',
+ '-o',
+ f'{dist_dir / name}',
+ ]
+
class NullOptimizer(OptimizedBuildTarget):
"""No-op target optimizer.
@@ -121,8 +207,6 @@
class GeneralTestsOptimizer(OptimizedBuildTarget):
"""general-tests optimizer
- TODO(b/358215235): Implement
-
This optimizer reads in the list of changed files from the file located in
env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to
determine what test mapping modules will run for the given changes. It then
@@ -177,6 +261,208 @@
return modules_to_build
+ def get_package_outputs_commands_impl(self):
+ src_top = pathlib.Path(os.environ.get('TOP', os.getcwd()))
+ dist_dir = pathlib.Path(os.environ.get('DIST_DIR'))
+
+ soong_vars = self._query_soong_vars(
+ src_top,
+ [
+ 'HOST_OUT_TESTCASES',
+ 'TARGET_OUT_TESTCASES',
+ 'PRODUCT_OUT',
+ 'SOONG_HOST_OUT',
+ 'HOST_OUT',
+ ],
+ )
+ host_out_testcases = pathlib.Path(soong_vars.get('HOST_OUT_TESTCASES'))
+ target_out_testcases = pathlib.Path(soong_vars.get('TARGET_OUT_TESTCASES'))
+ product_out = pathlib.Path(soong_vars.get('PRODUCT_OUT'))
+ soong_host_out = pathlib.Path(soong_vars.get('SOONG_HOST_OUT'))
+ host_out = pathlib.Path(soong_vars.get('HOST_OUT'))
+
+ host_paths = []
+ target_paths = []
+ host_config_files = []
+ target_config_files = []
+ for module in self.modules_to_build:
+ host_path = host_out_testcases / module
+ if os.path.exists(host_path):
+ host_paths.append(host_path)
+ self._collect_config_files(src_top, host_path, host_config_files)
+
+ target_path = target_out_testcases / module
+ if os.path.exists(target_path):
+ target_paths.append(target_path)
+ self._collect_config_files(src_top, target_path, target_config_files)
+
+ if not os.path.exists(host_path) and not os.path.exists(target_path):
+ logging.info(f'No host or target build outputs found for {module}.')
+
+ zip_commands = []
+
+ zip_commands.extend(
+ self._get_zip_test_configs_zips_commands(
+ dist_dir,
+ host_out,
+ product_out,
+ host_config_files,
+ target_config_files,
+ )
+ )
+
+ zip_command = self._base_zip_command(
+ host_out, dist_dir, 'general-tests.zip'
+ )
+
+ # Add host testcases.
+ zip_command.extend(
+ self._generate_zip_options_for_items(
+ prefix='host',
+ relative_root=f'{src_top / soong_host_out}',
+ directories=host_paths,
+ )
+ )
+
+ # Add target testcases.
+ zip_command.extend(
+ self._generate_zip_options_for_items(
+ prefix='target',
+ relative_root=f'{src_top / product_out}',
+ directories=target_paths,
+ )
+ )
+
+ # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
+ # Add necessary tools. These are also hardcoded in general-tests.mk.
+ framework_path = soong_host_out / 'framework'
+
+ zip_command.extend(
+ self._generate_zip_options_for_items(
+ prefix='host/tools',
+ relative_root=str(framework_path),
+ files=[
+ f"{framework_path / 'cts-tradefed.jar'}",
+ f"{framework_path / 'compatibility-host-util.jar'}",
+ f"{framework_path / 'vts-tradefed.jar'}",
+ ],
+ )
+ )
+
+ zip_commands.append(zip_command)
+ return zip_commands
+
+ def _collect_config_files(
+ self,
+ src_top: pathlib.Path,
+ root_dir: pathlib.Path,
+ config_files: list[str],
+ ):
+ for root, dirs, files in os.walk(src_top / root_dir):
+ for file in files:
+ if file.endswith('.config'):
+ config_files.append(root_dir / file)
+
+ def _get_zip_test_configs_zips_commands(
+ self,
+ dist_dir: pathlib.Path,
+ host_out: pathlib.Path,
+ product_out: pathlib.Path,
+ host_config_files: list[str],
+ target_config_files: list[str],
+ ) -> tuple[list[str], list[str]]:
+ """Generate general-tests_configs.zip and general-tests_list.zip.
+
+ general-tests_configs.zip contains all of the .config files that were
+ built and general-tests_list.zip contains a text file which lists
+ all of the .config files that are in general-tests_configs.zip.
+
+ general-tests_configs.zip is organized as follows:
+ /
+ host/
+ testcases/
+ test_1.config
+ test_2.config
+ ...
+ target/
+ testcases/
+ test_1.config
+ test_2.config
+ ...
+
+ So the process is we write out the paths to all the host config files into
+ one
+ file and all the paths to the target config files in another. We also write
+ the paths to all the config files into a third file to use for
+ general-tests_list.zip.
+
+ Args:
+ dist_dir: dist directory.
+ host_out: host out directory.
+ product_out: product out directory.
+ host_config_files: list of all host config files.
+ target_config_files: list of all target config files.
+
+ Returns:
+ The commands to generate general-tests_configs.zip and
+ general-tests_list.zip
+ """
+ with open(
+ f"{host_out / 'host_general-tests_list'}", 'w'
+ ) as host_list_file, open(
+ f"{product_out / 'target_general-tests_list'}", 'w'
+ ) as target_list_file, open(
+ f"{host_out / 'general-tests_list'}", 'w'
+ ) as list_file:
+
+ for config_file in host_config_files:
+ host_list_file.write(f'{config_file}' + '\n')
+ list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n')
+
+ for config_file in target_config_files:
+ target_list_file.write(f'{config_file}' + '\n')
+ list_file.write(
+ 'target/' + os.path.relpath(config_file, product_out) + '\n'
+ )
+
+ zip_commands = []
+
+ tests_config_zip_command = self._base_zip_command(
+ host_out, dist_dir, 'general-tests_configs.zip'
+ )
+ tests_config_zip_command.extend(
+ self._generate_zip_options_for_items(
+ prefix='host',
+ relative_root=str(host_out),
+ list_files=[f"{host_out / 'host_general-tests_list'}"],
+ )
+ )
+
+ tests_config_zip_command.extend(
+ self._generate_zip_options_for_items(
+ prefix='target',
+ relative_root=str(product_out),
+ list_files=[
+ f"{product_out / 'target_general-tests_list'}"
+ ],
+ ),
+ )
+
+ zip_commands.append(tests_config_zip_command)
+
+ tests_list_zip_command = self._base_zip_command(
+ host_out, dist_dir, 'general-tests_list.zip'
+ )
+ tests_list_zip_command.extend(
+ self._generate_zip_options_for_items(
+ relative_root=str(host_out),
+ files=[f"{host_out / 'general-tests_list'}"],
+ )
+ )
+ zip_commands.append(tests_list_zip_command)
+
+ return zip_commands
+
def get_enabled_flag(self):
return 'general_tests_optimized'
diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py
index 919c193..762b62e 100644
--- a/ci/optimized_targets_test.py
+++ b/ci/optimized_targets_test.py
@@ -19,10 +19,12 @@
import os
import pathlib
import re
+import subprocess
+import textwrap
import unittest
from unittest import mock
-import optimized_targets
from build_context import BuildContext
+import optimized_targets
from pyfakefs import fake_filesystem_unittest
@@ -43,11 +45,68 @@
def _setup_working_build_env(self):
self.change_info_file = pathlib.Path('/tmp/change_info')
+ self._write_soong_ui_file()
+ self._host_out_testcases = pathlib.Path('/tmp/top/host_out_testcases')
+ self._host_out_testcases.mkdir(parents=True)
+ self._target_out_testcases = pathlib.Path('/tmp/top/target_out_testcases')
+ self._target_out_testcases.mkdir(parents=True)
+ self._product_out = pathlib.Path('/tmp/top/product_out')
+ self._product_out.mkdir(parents=True)
+ self._soong_host_out = pathlib.Path('/tmp/top/soong_host_out')
+ self._soong_host_out.mkdir(parents=True)
+ self._host_out = pathlib.Path('/tmp/top/host_out')
+ self._host_out.mkdir(parents=True)
+
+ self._dist_dir = pathlib.Path('/tmp/top/out/dist')
+ self._dist_dir.mkdir(parents=True)
self.mock_os_environ.update({
'CHANGE_INFO': str(self.change_info_file),
+ 'TOP': '/tmp/top',
+ 'DIST_DIR': '/tmp/top/out/dist',
})
+ def _write_soong_ui_file(self):
+ soong_path = pathlib.Path('/tmp/top/build/soong')
+ soong_path.mkdir(parents=True)
+ with open(os.path.join(soong_path, 'soong_ui.bash'), 'w') as f:
+ f.write("""
+ #/bin/bash
+ echo HOST_OUT_TESTCASES='/tmp/top/host_out_testcases'
+ echo TARGET_OUT_TESTCASES='/tmp/top/target_out_testcases'
+ echo PRODUCT_OUT='/tmp/top/product_out'
+ echo SOONG_HOST_OUT='/tmp/top/soong_host_out'
+ echo HOST_OUT='/tmp/top/host_out'
+ """)
+ os.chmod(os.path.join(soong_path, 'soong_ui.bash'), 0o666)
+
+ def _write_change_info_file(self):
+ change_info_contents = {
+ 'changes': [{
+ 'projectPath': '/project/path',
+ 'revisions': [{
+ 'fileInfos': [{
+ 'path': 'file/path/file_name',
+ }],
+ }],
+ }]
+ }
+
+ with open(self.change_info_file, 'w') as f:
+ json.dump(change_info_contents, f)
+
+ def _write_test_mapping_file(self):
+ test_mapping_contents = {
+ 'test-mapping-group': [
+ {
+ 'name': 'test_mapping_module',
+ },
+ ],
+ }
+
+ with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+ json.dump(test_mapping_contents, f)
+
def test_general_tests_optimized(self):
optimizer = self._create_general_tests_optimizer()
@@ -124,36 +183,56 @@
with self.assertRaises(json.decoder.JSONDecodeError):
build_targets = optimizer.get_build_targets()
- def _write_change_info_file(self):
- change_info_contents = {
- 'changes': [{
- 'projectPath': '/project/path',
- 'revisions': [{
- 'fileInfos': [{
- 'path': 'file/path/file_name',
- }],
- }],
- }]
- }
+ @mock.patch('subprocess.run')
+ def test_packaging_outputs_success(self, subprocess_run):
+ subprocess_run.return_value = self._get_soong_vars_output()
+ optimizer = self._create_general_tests_optimizer()
+ self._set_up_build_outputs(['test_mapping_module'])
- with open(self.change_info_file, 'w') as f:
- json.dump(change_info_contents, f)
+ targets = optimizer.get_build_targets()
+ package_commands = optimizer.get_package_outputs_commands()
- def _write_test_mapping_file(self):
- test_mapping_contents = {
- 'test-mapping-group': [
- {
- 'name': 'test_mapping_module',
- },
- ],
- }
+ self._verify_soong_zip_commands(package_commands, ['test_mapping_module'])
- with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
- json.dump(test_mapping_contents, f)
+ @mock.patch('subprocess.run')
+ def test_get_soong_dumpvars_fails_raises(self, subprocess_run):
+ subprocess_run.return_value = self._get_soong_vars_output(return_code=-1)
+ optimizer = self._create_general_tests_optimizer()
+ self._set_up_build_outputs(['test_mapping_module'])
- def _create_general_tests_optimizer(
- self, build_context: BuildContext = None
- ):
+ targets = optimizer.get_build_targets()
+
+ with self.assertRaisesRegex(RuntimeError, 'Soong dumpvars failed!'):
+ package_commands = optimizer.get_package_outputs_commands()
+
+ @mock.patch('subprocess.run')
+ def test_get_soong_dumpvars_bad_output_raises(self, subprocess_run):
+ subprocess_run.return_value = self._get_soong_vars_output(
+ stdout='This output is bad'
+ )
+ optimizer = self._create_general_tests_optimizer()
+ self._set_up_build_outputs(['test_mapping_module'])
+
+ targets = optimizer.get_build_targets()
+
+ with self.assertRaisesRegex(
+ RuntimeError, 'Error parsing soong dumpvars output'
+ ):
+ package_commands = optimizer.get_package_outputs_commands()
+
+ @mock.patch('subprocess.run')
+ def test_no_build_outputs_packaging_fails(self, subprocess_run):
+ subprocess_run.return_value = self._get_soong_vars_output()
+ optimizer = self._create_general_tests_optimizer()
+
+ targets = optimizer.get_build_targets()
+
+ with self.assertRaisesRegex(
+ RuntimeError, 'No items specified to be added to zip'
+ ):
+ package_commands = optimizer.get_package_outputs_commands()
+
+ def _create_general_tests_optimizer(self, build_context: BuildContext = None):
if not build_context:
build_context = self._create_build_context()
return optimized_targets.GeneralTestsOptimizer(
@@ -170,7 +249,9 @@
build_context_dict = {}
build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}]
if general_tests_optimized:
- build_context_dict['enabledBuildFeatures'].append({'name': 'general_tests_optimized'})
+ build_context_dict['enabledBuildFeatures'].append(
+ {'name': 'general_tests_optimized'}
+ )
build_context_dict['testContext'] = test_context
return BuildContext(build_context_dict)
@@ -199,6 +280,81 @@
],
}
+ def _get_soong_vars_output(
+ self, return_code: int = 0, stdout: str = ''
+ ) -> subprocess.CompletedProcess:
+ return_value = subprocess.CompletedProcess(args=[], returncode=return_code)
+ if not stdout:
+ stdout = textwrap.dedent(f"""\
+ HOST_OUT_TESTCASES='{self._host_out_testcases}'
+ TARGET_OUT_TESTCASES='{self._target_out_testcases}'
+ PRODUCT_OUT='{self._product_out}'
+ SOONG_HOST_OUT='{self._soong_host_out}'
+ HOST_OUT='{self._host_out}'""")
+
+ return_value.stdout = stdout
+ return return_value
+
+ def _set_up_build_outputs(self, targets: list[str]):
+ for target in targets:
+ host_dir = self._host_out_testcases / target
+ host_dir.mkdir()
+ (host_dir / f'{target}.config').touch()
+ (host_dir / f'test_file').touch()
+
+ target_dir = self._target_out_testcases / target
+ target_dir.mkdir()
+ (target_dir / f'{target}.config').touch()
+ (target_dir / f'test_file').touch()
+
+ def _verify_soong_zip_commands(self, commands: list[str], targets: list[str]):
+ """Verify the structure of the zip commands.
+
+ Zip commands have to start with the soong_zip binary path, then are followed
+ by a couple of options and the name of the file being zipped. Depending on
+ which zip we are creating look for a few essential items being added in
+ those zips.
+
+ Args:
+ commands: list of command lists
+ targets: list of targets expected to be in general-tests.zip
+ """
+ for command in commands:
+ self.assertEqual(
+ '/tmp/top/host_out/prebuilts/build-tools/linux-x86/bin/soong_zip',
+ command[0],
+ )
+ self.assertEqual('-d', command[1])
+ self.assertEqual('-o', command[2])
+ match (command[3]):
+ case '/tmp/top/out/dist/general-tests_configs.zip':
+ self.assertIn(f'{self._host_out}/host_general-tests_list', command)
+ self.assertIn(
+ f'{self._product_out}/target_general-tests_list', command
+ )
+ return
+ case '/tmp/top/out/dist/general-tests_list.zip':
+ self.assertIn('-f', command)
+ self.assertIn(f'{self._host_out}/general-tests_list', command)
+ return
+ case '/tmp/top/out/dist/general-tests.zip':
+ for target in targets:
+ self.assertIn(f'{self._host_out_testcases}/{target}', command)
+ self.assertIn(f'{self._target_out_testcases}/{target}', command)
+ self.assertIn(
+ f'{self._soong_host_out}/framework/cts-tradefed.jar', command
+ )
+ self.assertIn(
+ f'{self._soong_host_out}/framework/compatibility-host-util.jar',
+ command,
+ )
+ self.assertIn(
+ f'{self._soong_host_out}/framework/vts-tradefed.jar', command
+ )
+ return
+ case _:
+ self.fail(f'malformed command: {command}')
+
if __name__ == '__main__':
# Setup logging to be silent so unit tests can pass through TF.
diff --git a/core/main.mk b/core/main.mk
index 8d0b465..80ffec4 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -289,6 +289,9 @@
$(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
+# Build bootloader.img/radio.img, and unpack the partitions.
+include $(BUILD_SYSTEM)/tasks/tools/update_bootloader_radio_image.mk
+
# For an unbundled image, we can skip blueprint_tools because unbundled image
# aims to remove a large number framework projects from the manifest, the
# sources or dependencies for these tools may be missing from the tree.
@@ -297,6 +300,9 @@
checkbuild: blueprint_tests
endif
+# Create necessary directories and symlinks in the root filesystem
+include system/core/rootdir/create_root_structure.mk
+
endif # dont_bother
ifndef subdir_makefiles_total
@@ -305,9 +311,6 @@
$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] finishing legacy Make module parsing ...)
-# Create necessary directories and symlinks in the root filesystem
-include system/core/rootdir/create_root_structure.mk
-
# -------------------------------------------------------------------
# All module makefiles have been included at this point.
# -------------------------------------------------------------------
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index a77956b..4693bcd 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -18,7 +18,7 @@
#
# TODO: Should we do all of the images in $(IMAGES_TO_BUILD)?
-_FLAG_PARTITIONS := product system system_ext vendor
+_FLAG_PARTITIONS := product system vendor
# -----------------------------------------------------------------
@@ -28,7 +28,6 @@
# $(1): built aconfig flags file (out)
# $(2): installed aconfig flags file (out)
# $(3): the partition (in)
-# $(4): input aconfig files for the partition (in)
define generate-partition-aconfig-flag-file
$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
$(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
@@ -36,12 +35,14 @@
mkdir -p $$(dir $$(PRIVATE_OUT))
$$(if $$(PRIVATE_IN), \
$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
- --filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \
+ --filter container:$(strip $(3)) \
+ $$(addprefix --cache ,$$(PRIVATE_IN)), \
echo -n > $$(PRIVATE_OUT) \
)
$(call copy-one-file, $(1), $(2))
endef
+
# Create a summary file of build flags for each partition
# $(1): built aconfig flags file (out)
# $(2): installed aconfig flags file (out)
@@ -59,16 +60,22 @@
$(call copy-one-file, $(1), $(2))
endef
-
$(foreach partition, $(_FLAG_PARTITIONS), \
$(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \
$(eval $(call generate-partition-aconfig-flag-file, \
- $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
- $(aconfig_flag_summaries_protobuf.$(partition)), \
- $(partition), \
- $(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
+ $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
+ $(aconfig_flag_summaries_protobuf.$(partition)), \
+ $(partition), \
+ $(sort \
+ $(foreach m, $(call register-names-for-partition, $(partition)), \
$(ALL_MODULES.$(m).ACONFIG_FILES) \
- )), \
+ ) \
+ $(if $(filter system, $(partition)), \
+ $(foreach m, $(call register-names-for-partition, system_ext), \
+ $(ALL_MODULES.$(m).ACONFIG_FILES) \
+ ) \
+ ) \
+ ) \
)) \
)
@@ -175,4 +182,3 @@
$(eval aconfig_storage_flag_map.$(partition):=) \
$(eval aconfig_storage_flag_val.$(partition):=) \
)
-
diff --git a/core/tasks/tools/update_bootloader_radio_image.mk b/core/tasks/tools/update_bootloader_radio_image.mk
new file mode 100644
index 0000000..0ebf247
--- /dev/null
+++ b/core/tasks/tools/update_bootloader_radio_image.mk
@@ -0,0 +1,17 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http:#www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifeq ($(USES_DEVICE_GOOGLE_ZUMA),true)
+ -include vendor/google_devices/zuma/prebuilts/misc_bins/update_bootloader_radio_image.mk
+endif
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 1a3f2cf..61d7235 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -76,6 +76,7 @@
com.android.mediaprovider:framework-mediaprovider \
com.android.mediaprovider:framework-pdf \
com.android.mediaprovider:framework-pdf-v \
+ com.android.mediaprovider:framework-photopicker \
com.android.ondevicepersonalization:framework-ondevicepersonalization \
com.android.os.statsd:framework-statsd \
com.android.permission:framework-permission \
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index dbc4ab5..1ac58c1 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -745,10 +745,8 @@
boolean val;
try {
val = reader.getBooleanFlagValue(1);
- if (val == disabledRw) {
- Log.i(TAG, String.format(SUCCESS_LOG, "disabledRw"));
- } else {
- Log.i(TAG, String.format(MISMATCH_LOG, "disabledRw", val, disabledRw));
+ if (val != disabledRw) {
+ Log.w(TAG, String.format(MISMATCH_LOG, "disabledRw", val, disabledRw));
}
if (useNewStorageValueAndDiscardOld) {
@@ -756,10 +754,8 @@
}
val = reader.getBooleanFlagValue(2);
- if (val == disabledRwExported) {
- Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwExported"));
- } else {
- Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwExported", val, disabledRwExported));
+ if (val != disabledRwExported) {
+ Log.w(TAG, String.format(MISMATCH_LOG, "disabledRwExported", val, disabledRwExported));
}
if (useNewStorageValueAndDiscardOld) {
@@ -767,10 +763,8 @@
}
val = reader.getBooleanFlagValue(8);
- if (val == enabledRw) {
- Log.i(TAG, String.format(SUCCESS_LOG, "enabledRw"));
- } else {
- Log.i(TAG, String.format(MISMATCH_LOG, "enabledRw", val, enabledRw));
+ if (val != enabledRw) {
+ Log.w(TAG, String.format(MISMATCH_LOG, "enabledRw", val, enabledRw));
}
if (useNewStorageValueAndDiscardOld) {
@@ -804,10 +798,8 @@
boolean val;
try {
val = reader.getBooleanFlagValue(3);
- if (val == disabledRwInOtherNamespace) {
- Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwInOtherNamespace"));
- } else {
- Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwInOtherNamespace", val, disabledRwInOtherNamespace));
+ if (val != disabledRwInOtherNamespace) {
+ Log.w(TAG, String.format(MISMATCH_LOG, "disabledRwInOtherNamespace", val, disabledRwInOtherNamespace));
}
if (useNewStorageValueAndDiscardOld) {
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index 1a14f64..797a893 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -79,8 +79,18 @@
.read_to_string(&mut contents)
.with_context(|| format!("failed to read {}", input.source))?;
- let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents)
- .with_context(|| input.error_context())?;
+ let mut flag_declarations =
+ aconfig_protos::flag_declarations::try_from_text_proto(&contents)
+ .with_context(|| input.error_context())?;
+
+ // system_ext flags should be treated as system flags as we are combining /system_ext
+ // and /system as one container
+ // TODO: remove this logic when we start enforcing that system_ext cannot be set as
+ // container in aconfig declaration files.
+ if flag_declarations.container() == "system_ext" {
+ flag_declarations.set_container(String::from("system"));
+ }
+
ensure!(
package == flag_declarations.package(),
"failed to parse {}: expected package {}, got {}",
diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
index 9970b1f..bc01aa4 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
@@ -90,10 +90,8 @@
{{ -if flag.is_read_write }}
val = reader.getBooleanFlagValue({flag.flag_offset});
- if (val == {flag.method_name}) \{
- Log.i(TAG, String.format(SUCCESS_LOG, "{flag.method_name}"));
- } else \{
- Log.i(TAG, String.format(MISMATCH_LOG, "{flag.method_name}", val, {flag.method_name}));
+ if (val != {flag.method_name}) \{
+ Log.w(TAG, String.format(MISMATCH_LOG, "{flag.method_name}", val, {flag.method_name}));
}
if (useNewStorageValueAndDiscardOld) \{
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
index 95cecf4..932dfbf 100644
--- a/tools/aconfig/aconfig_device_paths/Android.bp
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -51,4 +51,8 @@
static_libs: [
"libaconfig_java_proto_nano",
],
+ sdk_version: "core_platform",
+ apex_available: [
+ "//apex_available:platform",
+ ],
}
diff --git a/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
index 58c58de..4d41199 100644
--- a/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
+++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
@@ -29,7 +29,7 @@
* @hide
*/
public class DeviceProtos {
- static final String[] PATHS = {
+ public static final String[] PATHS = {
TEMPLATE
};
@@ -50,10 +50,11 @@
ArrayList<parsed_flag> result = new ArrayList();
for (String path : parsedFlagsProtoPaths()) {
- FileInputStream inputStream = new FileInputStream(path);
- parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
- for (parsed_flag flag : parsedFlags.parsedFlag) {
- result.add(flag);
+ try (FileInputStream inputStream = new FileInputStream(path)) {
+ parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
+ for (parsed_flag flag : parsedFlags.parsedFlag) {
+ result.add(flag);
+ }
}
}
@@ -64,7 +65,7 @@
* Returns the list of all on-device aconfig protos paths.
* @hide
*/
- private static List<String> parsedFlagsProtoPaths() {
+ public static List<String> parsedFlagsProtoPaths() {
ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
File apexDirectory = new File(APEX_DIR);
diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs
index b2fd3c9..68edf7d 100644
--- a/tools/aconfig/aflags/src/aconfig_storage_source.rs
+++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs
@@ -1,3 +1,4 @@
+use crate::load_protos;
use crate::{Flag, FlagSource};
use crate::{FlagPermission, FlagValue, ValuePickedFrom};
use aconfigd_protos::{
@@ -9,13 +10,18 @@
use anyhow::Result;
use protobuf::Message;
use protobuf::SpecialFields;
+use std::collections::HashMap;
use std::io::{Read, Write};
use std::net::Shutdown;
use std::os::unix::net::UnixStream;
pub struct AconfigStorageSource {}
-fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> {
+fn load_flag_to_container() -> Result<HashMap<String, String>> {
+ Ok(load_protos::load()?.into_iter().map(|p| (p.qualified_name(), p.container)).collect())
+}
+
+fn convert(msg: ProtoFlagQueryReturnMessage, containers: &HashMap<String, String>) -> Result<Flag> {
let (value, value_picked_from) = match (
&msg.boot_flag_value,
msg.default_flag_value,
@@ -55,15 +61,21 @@
None => return Err(anyhow!("missing permission")),
};
+ let name = msg.flag_name.ok_or(anyhow!("missing flag name"))?;
+ let package = msg.package_name.ok_or(anyhow!("missing package name"))?;
+ let qualified_name = format!("{package}.{name}");
Ok(Flag {
- name: msg.flag_name.ok_or(anyhow!("missing flag name"))?,
- package: msg.package_name.ok_or(anyhow!("missing package name"))?,
+ name,
+ package,
value,
permission,
value_picked_from,
staged_value,
- container: "-".to_string(),
-
+ container: containers
+ .get(&qualified_name)
+ .cloned()
+ .unwrap_or_else(|| "<no container>".to_string())
+ .to_string(),
// TODO: remove once DeviceConfig is not in the CLI.
namespace: "-".to_string(),
})
@@ -114,9 +126,13 @@
impl FlagSource for AconfigStorageSource {
fn list_flags() -> Result<Vec<Flag>> {
+ let containers = load_flag_to_container()?;
read_from_socket()
.map(|query_messages| {
- query_messages.iter().map(|message| convert(message.clone())).collect::<Vec<_>>()
+ query_messages
+ .iter()
+ .map(|message| convert(message.clone(), &containers))
+ .collect::<Vec<_>>()
})?
.into_iter()
.collect()
diff --git a/tools/aconfig/fake_device_config/src/android/util/Log.java b/tools/aconfig/fake_device_config/src/android/util/Log.java
index 3e7fd0f..79de680 100644
--- a/tools/aconfig/fake_device_config/src/android/util/Log.java
+++ b/tools/aconfig/fake_device_config/src/android/util/Log.java
@@ -5,6 +5,10 @@
return 0;
}
+ public static int w(String tag, String msg) {
+ return 0;
+ }
+
public static int e(String tag, String msg) {
return 0;
}
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 985cd56..6446e1f 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -264,6 +264,10 @@
--compression_factor
Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k
+
+ --full_ota_partitions
+ Specify list of partitions should be updated in full OTA fashion, even if
+ an incremental OTA is about to be generated
"""
from __future__ import print_function
@@ -283,7 +287,7 @@
import ota_utils
import payload_signer
from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata,
- PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
+ PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR)
from common import DoesInputFileContain, IsSparseImage
import target_files_diff
from non_ab_ota import GenerateNonAbOtaPackage
@@ -337,6 +341,7 @@
OPTIONS.max_threads = None
OPTIONS.vabc_cow_version = None
OPTIONS.compression_factor = None
+OPTIONS.full_ota_partitions = None
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -892,6 +897,14 @@
if source_file is not None:
source_file = ExtractTargetFiles(source_file)
+ if OPTIONS.full_ota_partitions:
+ for partition in OPTIONS.full_ota_partitions:
+ for subdir in TARGET_FILES_IMAGES_SUBDIR:
+ image_path = os.path.join(source_file, subdir, partition + ".img")
+ if os.path.exists(image_path):
+ logger.info(
+ "Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition)
+ os.remove(image_path)
assert "ab_partitions" in OPTIONS.source_info_dict, \
"META/ab_partitions.txt is required for ab_update."
assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -1193,7 +1206,7 @@
def main(argv):
- def option_handler(o, a):
+ def option_handler(o, a: str):
if o in ("-i", "--incremental_from"):
OPTIONS.incremental_source = a
elif o == "--full_radio":
@@ -1320,6 +1333,9 @@
else:
raise ValueError("Cannot parse value %r for option %r - only "
"integers are allowed." % (a, o))
+ elif o == "--full_ota_partitions":
+ OPTIONS.full_ota_partitions = set(
+ a.strip().strip("\"").strip("'").split(","))
else:
return False
return True
@@ -1370,6 +1386,7 @@
"max_threads=",
"vabc_cow_version=",
"compression_factor=",
+ "full_ota_partitions=",
], extra_option_handler=[option_handler, payload_signer.signer_options])
common.InitLogging()
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 0254f37..4ad97e0 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -184,6 +184,7 @@
import shutil
import stat
import sys
+import shlex
import tempfile
import zipfile
from xml.etree import ElementTree
@@ -589,7 +590,7 @@
if OPTIONS.payload_signer:
args.extend(["--payload_signer=" + OPTIONS.payload_signer])
if OPTIONS.payload_signer_args:
- args.extend(["--payload_signer_args=" + OPTIONS.payload_signer_args])
+ args.extend(["--payload_signer_args=" + shlex.join(OPTIONS.payload_signer_args)])
if OPTIONS.search_path:
args.extend(["--search_path", OPTIONS.search_path])
if OPTIONS.payload_signer_maximum_signature_size: