Merge "Remove racoon, mtpd, pppd, ip-up-vpn" into main
diff --git a/core/Makefile b/core/Makefile
index 09c815e..7d7457e 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1158,7 +1158,7 @@
BOARD_KERNEL_16K_BOOTIMAGE_PARTITION_SIZE := $(BOARD_BOOTIMAGE_PARTITION_SIZE)
-$(BUILT_BOOTIMAGE_16K_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS) $(BUILT_KERNEL_16K_TARGET)
+$(BUILT_BOOTIMAGE_16K_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(BUILT_KERNEL_16K_TARGET)
$(call pretty,"Target boot 16k image: $@")
$(call build_boot_from_kernel_avb_enabled,$@,$(BUILT_KERNEL_16K_TARGET))
@@ -1281,15 +1281,6 @@
define build_boot_from_kernel_avb_enabled
$(eval kernel := $(2))
$(MKBOOTIMG) --kernel $(kernel) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
- $(if $(BOARD_GKI_SIGNING_KEY_PATH), \
- $(eval boot_signature := $(call intermediates-dir-for,PACKAGING,generic_boot)/$(notdir $(1)).boot_signature) \
- $(eval kernel_signature := $(call intermediates-dir-for,PACKAGING,generic_kernel)/$(notdir $(kernel)).boot_signature) \
- $(call generate_generic_boot_image_certificate,$(1),$(boot_signature),boot,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
- $(call generate_generic_boot_image_certificate,$(kernel),$(kernel_signature),generic_kernel,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
- cat $(kernel_signature) >> $(boot_signature) $(newline) \
- $(call assert-max-image-size,$(boot_signature),16 << 10) $(newline) \
- truncate -s $$(( 16 << 10 )) $(boot_signature) $(newline) \
- cat "$(boot_signature)" >> $(1))
$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot)))
$(AVBTOOL) add_hash_footer \
--image $(1) \
@@ -1306,11 +1297,8 @@
INTERNAL_BOOTIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
-# TODO(b/229701033): clean up BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK.
-ifneq ($(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK),true)
- ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
- INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
- endif
+ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
+ INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
endif
ifndef BUILDING_VENDOR_BOOT_IMAGE
@@ -1343,51 +1331,9 @@
endif
endif # BUILDING_VENDOR_BOOT_IMAGE == "" && BOARD_USES_GENERIC_KERNEL_IMAGE != true
-ifdef BOARD_GKI_SIGNING_KEY_PATH
- # GKI boot images will not set system version & SPL value in the header.
- # They can be set by the device manufacturer in the AVB properties instead.
- INTERNAL_MKBOOTIMG_VERSION_ARGS :=
-else
- INTERNAL_MKBOOTIMG_VERSION_ARGS := \
- --os_version $(PLATFORM_VERSION_LAST_STABLE) \
- --os_patch_level $(PLATFORM_SECURITY_PATCH)
-endif # BOARD_GKI_SIGNING_KEY_PATH
-
-# $(1): image target to certify
-# $(2): out certificate target
-# $(3): image name
-# $(4): additional AVB arguments
-define generate_generic_boot_image_certificate
- rm -rf "$(2)"
- mkdir -p "$(dir $(2))"
- $(GENERATE_GKI_CERTIFICATE) $(INTERNAL_GKI_CERTIFICATE_ARGS) \
- --additional_avb_args "$(4)" \
- --name "$(3)" --output "$(2)" "$(1)"
-endef
-
-INTERNAL_GKI_CERTIFICATE_ARGS :=
-INTERNAL_GKI_CERTIFICATE_DEPS :=
-ifdef BOARD_GKI_SIGNING_KEY_PATH
- ifndef BOARD_GKI_SIGNING_ALGORITHM
- $(error BOARD_GKI_SIGNING_ALGORITHM should be defined with BOARD_GKI_SIGNING_KEY_PATH)
- endif
-
- INTERNAL_GKI_CERTIFICATE_ARGS := \
- --key "$(BOARD_GKI_SIGNING_KEY_PATH)" \
- --algorithm "$(BOARD_GKI_SIGNING_ALGORITHM)" \
- --avbtool "$(AVBTOOL)"
-
- # Quote and pass BOARD_GKI_SIGNING_SIGNATURE_ARGS as a single string argument.
- ifdef BOARD_GKI_SIGNING_SIGNATURE_ARGS
- INTERNAL_GKI_CERTIFICATE_ARGS += --additional_avb_args "$(BOARD_GKI_SIGNING_SIGNATURE_ARGS)"
- endif
-
- INTERNAL_GKI_CERTIFICATE_DEPS := \
- $(GENERATE_GKI_CERTIFICATE) \
- $(BOARD_GKI_SIGNING_KEY_PATH) \
- $(AVBTOOL)
-
-endif
+INTERNAL_MKBOOTIMG_VERSION_ARGS := \
+ --os_version $(PLATFORM_VERSION_LAST_STABLE) \
+ --os_patch_level $(PLATFORM_SECURITY_PATCH)
# Define these only if we are building boot
ifdef BUILDING_BOOT_IMAGE
@@ -1407,17 +1353,17 @@
$(call build_boot_from_kernel_avb_enabled,$(1),$(kernel))
endef
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH)
$(call pretty,"Target boot image: $@")
$(call build_boot_board_avb_enabled,$@)
$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
-$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES) $(INTERNAL_GKI_CERTIFICATE_DEPS),$(PRODUCT_OUT)/:/)
+$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS)
+bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
@echo "make $@: ignoring dependencies"
$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
@@ -2176,6 +2122,7 @@
$(if $(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT),$(hide) echo "$(1)_extfs_inode_count=$(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT),$(hide) echo "$(1)_extfs_rsv_pct=$(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS),$(hide) echo "$(1)_f2fs_sldc_flags=$(BOARD_$(_var)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)" >> $(2))
+$(if $(BOARD_$(_var)IMAGE_F2FS_BLOCKSIZE),$(hide) echo "$(1)_f2fs_blocksize=$(BOARD_$(_var)IMAGE_F2FS_BLOCKSIZE)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_FILE_SYSTEM_COMPRESS),$(hide) echo "$(1)_f2fs_compress=$(BOARD_$(_var)IMAGE_FILE_SYSTEM_COMPRESS)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_FILE_SYSTEM_TYPE),$(hide) echo "$(1)_fs_type=$(BOARD_$(_var)IMAGE_FILE_SYSTEM_TYPE)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_JOURNAL_SIZE),$(hide) echo "$(1)_journal_size=$(BOARD_$(_var)IMAGE_JOURNAL_SIZE)" >> $(2))
@@ -2266,6 +2213,7 @@
$(if $(BOARD_EROFS_SHARE_DUP_BLOCKS),$(hide) echo "erofs_share_dup_blocks=$(BOARD_EROFS_SHARE_DUP_BLOCKS)" >> $(1))
$(if $(BOARD_EROFS_USE_LEGACY_COMPRESSION),$(hide) echo "erofs_use_legacy_compression=$(BOARD_EROFS_USE_LEGACY_COMPRESSION)" >> $(1))
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
+$(if $(BOARD_F2FS_BLOCKSIZE),$(hide) echo "f2fs_blocksize=$(BOARD_F2FS_BLOCKSIZE)" >> $(1))
$(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
$(if $(BOARD_FLASH_ERASE_BLOCK_SIZE), $(hide) echo "flash_erase_block_size=$(BOARD_FLASH_ERASE_BLOCK_SIZE)" >> $(1))
$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
@@ -2348,8 +2296,6 @@
)
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
$(hide) echo "recovery_as_boot=true" >> $(1))
-$(if $(filter true,$(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK)),\
- $(hide) echo "gki_boot_image_without_ramdisk=true" >> $(1))
$(hide) echo "root_dir=$(TARGET_ROOT_OUT)" >> $(1)
$(if $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)),\
$(hide) echo "use_dynamic_partition_size=true" >> $(1))
@@ -3835,7 +3781,7 @@
ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_dlkm_lib_modules_symlink)
endif
-# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and STUB_LIBRARIES
+# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and SOONG_STUB_VENDOR_LIBRARIES
vendor_linker_config_file := $(TARGET_OUT_VENDOR)/etc/linker.config.pb
$(vendor_linker_config_file): private_linker_config_fragments := $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)
$(vendor_linker_config_file): $(INTERNAL_VENDORIMAGE_FILES) $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
@@ -3846,7 +3792,7 @@
--source $(call normalize-path-list,$(private_linker_config_fragments)) \
--output $@
$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $@ \
- --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
+ --output $@ --value "$(SOONG_STUB_VENDOR_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
$(call define declare-0p-target,$(vendor_linker_config_file),)
INTERNAL_VENDORIMAGE_FILES += $(vendor_linker_config_file)
ALL_DEFAULT_INSTALLED_MODULES += $(vendor_linker_config_file)
@@ -4654,6 +4600,12 @@
--prop com.android.build.pvmfw.security_patch:$(PVMFW_SECURITY_PATCH)
endif
+# Append avbpubkey of microdroid-vendor partition into vendor_boot partition.
+ifdef MICRODROID_VENDOR_AVBKEY
+BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS += \
+ --prop_from_file com.android.build.microdroid-vendor.avbpubkey:$(MICRODROID_VENDOR_AVBKEY)
+endif
+
BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
INIT_BOOT_FOOTER_ARGS := BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS
VENDOR_BOOT_FOOTER_ARGS := BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS
@@ -5530,7 +5482,6 @@
fsck.erofs \
fsck.f2fs \
fs_config \
- generate_gki_certificate \
generate_verity_key \
host_init_verifier \
img2simg \
@@ -5815,11 +5766,6 @@
$(hide) echo 'recovery_mkbootimg_args=$(BOARD_RECOVERY_MKBOOTIMG_ARGS)' >> $@
$(hide) echo 'mkbootimg_version_args=$(INTERNAL_MKBOOTIMG_VERSION_ARGS)' >> $@
$(hide) echo 'mkbootimg_init_args=$(BOARD_MKBOOTIMG_INIT_ARGS)' >> $@
-ifdef BOARD_GKI_SIGNING_KEY_PATH
- $(hide) echo 'gki_signing_key_path=$(BOARD_GKI_SIGNING_KEY_PATH)' >> $@
- $(hide) echo 'gki_signing_algorithm=$(BOARD_GKI_SIGNING_ALGORITHM)' >> $@
- $(hide) echo 'gki_signing_signature_args=$(BOARD_GKI_SIGNING_SIGNATURE_ARGS)' >> $@
-endif
$(hide) echo "multistage_support=1" >> $@
$(hide) echo "blockimgdiff_versions=3,4" >> $@
ifeq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true)
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6d64f97..c74aa49 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -31,6 +31,7 @@
$(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
$(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
$(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
+$(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS)
$(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
# Default behavior for the tree wrt building modules or using prebuilts. This
@@ -180,7 +181,13 @@
else
SYSTEM_OPTIMIZE_JAVA ?= true
endif
+
+ifeq (true,$(FULL_SYSTEM_OPTIMIZE_JAVA))
+ SYSTEM_OPTIMIZE_JAVA := true
+endif
+
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
+$(call add_soong_config_var,ANDROID,FULL_SYSTEM_OPTIMIZE_JAVA)
# Check for SupplementalApi module.
ifeq ($(wildcard packages/modules/SupplementalApi),)
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index b141a98..2671956 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -85,11 +85,6 @@
my_prebuilt_src_file := $(my_extracted_apk)
my_extracted_apk :=
my_extract_apk :=
-ifeq ($(PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK),true)
-# If the product property is set, always preopt for extracted modules to prevent executing out of
-# the APK.
-my_preopt_for_extracted_apk := true
-endif
endif
rs_compatibility_jni_libs :=
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 8236dc9..f533358 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -670,8 +670,14 @@
copy_test_data_pairs :=
-my_installed_test_data := $(call copy-many-files,$(my_test_data_pairs))
-$(LOCAL_INSTALLED_MODULE): $(my_installed_test_data)
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ my_installed_test_data := $(call copy-many-files,$(my_test_data_pairs))
+ $(LOCAL_INSTALLED_MODULE): $(my_installed_test_data)
+else
+ # Skip installing test data for Soong modules, it's already been handled.
+ # Just compute my_installed_test_data.
+ my_installed_test_data := $(foreach f, $(my_test_data_pairs), $(call word-colon,2,$(f)))
+endif
endif
endif
@@ -1017,15 +1023,14 @@
ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES := \
$(ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES)
-ifdef LOCAL_TEST_DATA
+ifneq ($(my_test_data_file_pairs),)
# Export the list of targets that are handled as data inputs and required
- # by tests at runtime. The LOCAL_TEST_DATA format is generated from below
- # https://cs.android.com/android/platform/superproject/+/master:build/soong/android/androidmk.go;l=925-944;drc=master
- # which format is like $(path):$(relative_file) but for module-info, only
- # the string after ":" is needed.
+ # by tests at runtime. The format of my_test_data_file_pairs is
+ # is $(path):$(relative_file) but for module-info, only the string after
+ # ":" is needed.
ALL_MODULES.$(my_register_name).TEST_DATA := \
$(strip $(ALL_MODULES.$(my_register_name).TEST_DATA) \
- $(foreach f, $(LOCAL_TEST_DATA),\
+ $(foreach f, $(my_test_data_file_pairs),\
$(call word-colon,2,$(f))))
endif
diff --git a/core/binary.mk b/core/binary.mk
index 4c68ba7..8c107bd 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -289,25 +289,20 @@
endif
ifneq ($(LOCAL_USE_VNDK),)
- # Required VNDK version for vendor modules is BOARD_VNDK_VERSION.
- my_api_level := $(BOARD_VNDK_VERSION)
- ifeq ($(my_api_level),current)
- # Build with current PLATFORM_VNDK_VERSION.
- # If PLATFORM_VNDK_VERSION has a CODENAME, it will return
- # __ANDROID_API_FUTURE__.
- my_api_level := $(call codename-or-sdk-to-sdk,$(PLATFORM_VNDK_VERSION))
- else
- # Build with current BOARD_VNDK_VERSION.
- my_api_level := $(call codename-or-sdk-to-sdk,$(BOARD_VNDK_VERSION))
- endif
my_cflags += -D__ANDROID_VNDK__
ifneq ($(LOCAL_USE_VNDK_VENDOR),)
- # Vendor modules have LOCAL_USE_VNDK_VENDOR when
- # BOARD_VNDK_VERSION is defined.
+ # Vendor modules have LOCAL_USE_VNDK_VENDOR
my_cflags += -D__ANDROID_VENDOR__
+
+ ifeq ($(BOARD_API_LEVEL),)
+ # TODO(b/314036847): This is a fallback for UDC targets.
+ # This must be a build failure when UDC is no longer built from this source tree.
+ my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION)
+ else
+ my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
+ endif
else ifneq ($(LOCAL_USE_VNDK_PRODUCT),)
- # Product modules have LOCAL_USE_VNDK_PRODUCT when
- # PRODUCT_PRODUCT_VNDK_VERSION is defined.
+ # Product modules have LOCAL_USE_VNDK_PRODUCT
my_cflags += -D__ANDROID_PRODUCT__
endif
endif
diff --git a/core/board_config.mk b/core/board_config.mk
index 537fb73..ae11eb6 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -161,9 +161,6 @@
_board_strip_list += BOARD_AVB_VENDOR_KERNEL_BOOT_KEY_PATH
_board_strip_list += BOARD_AVB_VENDOR_KERNEL_BOOT_ALGORITHM
_board_strip_list += BOARD_AVB_VENDOR_KERNEL_BOOT_ROLLBACK_INDEX_LOCATION
-_board_strip_list += BOARD_GKI_SIGNING_SIGNATURE_ARGS
-_board_strip_list += BOARD_GKI_SIGNING_ALGORITHM
-_board_strip_list += BOARD_GKI_SIGNING_KEY_PATH
_board_strip_list += BOARD_MKBOOTIMG_ARGS
_board_strip_list += BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE
_board_strip_list += BOARD_VENDOR_KERNEL_BOOTIMAGE_PARTITION_SIZE
diff --git a/core/config.mk b/core/config.mk
index 90cc33c..f8a9879 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -436,16 +436,16 @@
# Boolean variable determining if AOSP is page size agnostic. This means
# that AOSP can use a kernel configured with 4k/16k/64k PAGE SIZES.
-TARGET_PAGE_SIZE_AGNOSTIC := false
-ifdef PRODUCT_PAGE_SIZE_AGNOSTIC
- TARGET_PAGE_SIZE_AGNOSTIC := $(PRODUCT_PAGE_SIZE_AGNOSTIC)
- ifeq ($(TARGET_PAGE_SIZE_AGNOSTIC),true)
+TARGET_NO_BIONIC_PAGE_SIZE_MACRO := false
+ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
+ TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)
+ ifeq ($(TARGET_NO_BIONIC_PAGE_SIZE_MACRO),true)
ifneq ($(TARGET_MAX_PAGE_SIZE_SUPPORTED),65536)
$(error TARGET_MAX_PAGE_SIZE_SUPPORTED has to be 65536 to support page size agnostic)
endif
endif
endif
-.KATI_READONLY := TARGET_PAGE_SIZE_AGNOSTIC
+.KATI_READONLY := TARGET_NO_BIONIC_PAGE_SIZE_MACRO
# Pruned directory options used when using findleaves.py
# See envsetup.mk for a description of SCAN_EXCLUDE_DIRS
@@ -683,7 +683,6 @@
MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
MINIGZIP := $(GZIP)
LZ4 := $(HOST_OUT_EXECUTABLES)/lz4$(HOST_EXECUTABLE_SUFFIX)
-GENERATE_GKI_CERTIFICATE := $(HOST_OUT_EXECUTABLES)/generate_gki_certificate$(HOST_EXECUTABLE_SUFFIX)
ifeq (,$(strip $(BOARD_CUSTOM_MKBOOTIMG)))
MKBOOTIMG := $(HOST_OUT_EXECUTABLES)/mkbootimg$(HOST_EXECUTABLE_SUFFIX)
else
@@ -1313,3 +1312,9 @@
.KATI_READONLY := DEFAULT_DATA_OUT_MODULES
include $(BUILD_SYSTEM)/dumpvar.mk
+
+ifeq (true,$(FULL_SYSTEM_OPTIMIZE_JAVA))
+ifeq (,$(SYSTEM_OPTIMIZE_JAVA))
+$(error SYSTEM_OPTIMIZE_JAVA must be enabled when FULL_SYSTEM_OPTIMIZE_JAVA is enabled)
+endif
+endif
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 83be006..3507961 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -355,6 +355,12 @@
my_sanitize := $(filter-out cfi,$(my_sanitize))
my_cflags += -fno-lto
my_ldflags += -fno-lto
+
+ # TODO(b/142430592): Upstream linker scripts for sanitizer runtime libraries
+ # discard the sancov_lowest_stack symbol, because it's emulated TLS (and thus
+ # doesn't match the linker script due to the "__emutls_v." prefix).
+ my_cflags += -fno-sanitize-coverage=stack-depth
+ my_ldflags += -fno-sanitize-coverage=stack-depth
endif
ifneq ($(filter integer_overflow,$(my_sanitize)),)
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 0155c67..151591e 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -60,10 +60,8 @@
LOCAL_DEX_PREOPT :=
endif
-ifneq (true,$(my_preopt_for_extracted_apk))
- ifeq (true,$(WITH_DEXPREOPT_ART_BOOT_IMG_ONLY))
- LOCAL_DEX_PREOPT :=
- endif
+ifeq (true,$(WITH_DEXPREOPT_ART_BOOT_IMG_ONLY))
+ LOCAL_DEX_PREOPT :=
endif
my_process_profile :=
@@ -392,7 +390,6 @@
$(call add_json_list, DexPreoptImageLocationsOnDevice,$(my_dexpreopt_image_locations_on_device))
$(call add_json_list, PreoptBootClassPathDexFiles, $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES))
$(call add_json_list, PreoptBootClassPathDexLocations,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS))
- $(call add_json_bool, PreoptExtractedApk, $(my_preopt_for_extracted_apk))
$(call add_json_bool, NoCreateAppImage, $(filter false,$(LOCAL_DEX_PREOPT_APP_IMAGE)))
$(call add_json_bool, ForceCreateAppImage, $(filter true,$(LOCAL_DEX_PREOPT_APP_IMAGE)))
$(call add_json_bool, PresignedPrebuilt, $(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
diff --git a/core/main.mk b/core/main.mk
index 367b5bf..348a964 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -182,6 +182,12 @@
$(KATI_obsolete_var PRODUCT_FULL_TREBLE,\
Code should be written to work regardless of a device being Treble)
+# Set ro.llndk.api_level to show the maximum vendor API level that the LLNDK in
+# the system partition supports.
+ifdef RELEASE_BOARD_API_LEVEL
+ADDITIONAL_SYSTEM_PROPERTIES += ro.llndk.api_level=$(RELEASE_BOARD_API_LEVEL)
+endif
+
# Sets ro.actionable_compatible_property.enabled to know on runtime whether the
# allowed list of actionable compatible properties is enabled or not.
ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true
@@ -1271,6 +1277,11 @@
$(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
$(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
$(if $(filter arm64,$(TARGET_ARCH) $(TARGET_2ND_ARCH)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ARM64)) \
+ $(if $(PRODUCT_SHIPPING_API_LEVEL), \
+ $(if $(call math_gt_or_eq,29,$(PRODUCT_SHIPPING_API_LEVEL)),$(call get-product-var,$(1),PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29)) \
+ $(if $(call math_gt_or_eq,33,$(PRODUCT_SHIPPING_API_LEVEL)),$(call get-product-var,$(1),PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33)) \
+ $(if $(call math_gt_or_eq,34,$(PRODUCT_SHIPPING_API_LEVEL)),$(call get-product-var,$(1),PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34)) \
+ ) \
$(call auto-included-modules) \
) \
$(eval ### Filter out the overridden packages and executables before doing expansion) \
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 7465cbf..6935115 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -2,30 +2,6 @@
## Track NOTICE files
###########################################################
-ifneq ($(LOCAL_NOTICE_FILE),)
- notice_file:=$(strip $(LOCAL_NOTICE_FILE))
-else
- notice_file:=$(strip $(wildcard $(LOCAL_PATH)/LICENSE $(LOCAL_PATH)/LICENCE $(LOCAL_PATH)/NOTICE))
-endif
-
-ifeq ($(LOCAL_MODULE_CLASS),GYP)
- # We ignore NOTICE files for modules of type GYP.
- notice_file :=
-endif
-
-ifeq ($(LOCAL_MODULE_CLASS),FAKE)
- # We ignore NOTICE files for modules of type FAKE.
- notice_file :=
-endif
-
-# Soong generates stub libraries that don't need NOTICE files
-ifdef LOCAL_NO_NOTICE_FILE
- ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- $(call pretty-error,LOCAL_NO_NOTICE_FILE should not be used by Android.mk files)
- endif
- notice_file :=
-endif
-
module_license_metadata := $(call local-meta-intermediates-dir)/$(my_register_name).meta_lic
$(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))),\
@@ -45,6 +21,30 @@
# Make modules don't have enough information to produce a license metadata rule until after fix-notice-deps
# has been called, store the necessary information until later.
+ ifneq ($(LOCAL_NOTICE_FILE),)
+ notice_file:=$(strip $(LOCAL_NOTICE_FILE))
+ else
+ notice_file:=$(strip $(wildcard $(LOCAL_PATH)/LICENSE $(LOCAL_PATH)/LICENCE $(LOCAL_PATH)/NOTICE))
+ endif
+
+ ifeq ($(LOCAL_MODULE_CLASS),GYP)
+ # We ignore NOTICE files for modules of type GYP.
+ notice_file :=
+ endif
+
+ ifeq ($(LOCAL_MODULE_CLASS),FAKE)
+ # We ignore NOTICE files for modules of type FAKE.
+ notice_file :=
+ endif
+
+ # Soong generates stub libraries that don't need NOTICE files
+ ifdef LOCAL_NO_NOTICE_FILE
+ ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(call pretty-error,LOCAL_NO_NOTICE_FILE should not be used by Android.mk files)
+ endif
+ notice_file :=
+ endif
+
ifneq (,$(strip $(LOCAL_LICENSE_PACKAGE_NAME)))
license_package_name:=$(strip $(LOCAL_LICENSE_PACKAGE_NAME))
else
@@ -136,8 +136,9 @@
ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps)
ALL_MODULES.$(my_register_name).IS_CONTAINER := $(strip $(filter-out false,$(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container)))
ALL_MODULES.$(my_register_name).PATH := $(strip $(ALL_MODULES.$(my_register_name).PATH) $(local_path))
+
+ ifdef notice_file
+ ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
+ endif # notice_file
endif
-ifdef notice_file
-ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
-endif # notice_file
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index a7e8d35..57df911 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -78,7 +78,7 @@
$(strip $(1)): $(ACONFIG) $(strip $(3))
mkdir -p $$(dir $$(PRIVATE_OUT))
$$(if $$(PRIVATE_IN), \
- $$(ACONFIG) dump --format protobuf --out $$(PRIVATE_OUT) \
+ $$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
$$(addprefix --cache ,$$(PRIVATE_IN)), \
echo -n > $$(PRIVATE_OUT) \
)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 5bea9b6..9462640 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -63,4 +63,3 @@
$(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
my_prebuilt_src_file :=
-my_preopt_for_extracted_apk :=
diff --git a/core/product.mk b/core/product.mk
index 3667bb1..5515a8a 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -35,7 +35,7 @@
# Indicates that AOSP can use a kernel configured with 4k/16k/64k page sizes.
# The possible values are true or false.
-_product_single_value_vars += PRODUCT_PAGE_SIZE_AGNOSTIC
+_product_single_value_vars += PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
# The resource configuration options to use for this product.
_product_list_vars += PRODUCT_LOCALES
@@ -183,8 +183,6 @@
# Set to true to disable <uses-library> checks for a product.
_product_list_vars += PRODUCT_BROKEN_VERIFY_USES_LIBRARIES
-# All of the apps that we force preopt, this overrides WITH_DEXPREOPT.
-_product_list_vars += PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK
_product_list_vars += PRODUCT_DEXPREOPT_SPEED_APPS
_product_list_vars += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
_product_single_value_vars += PRODUCT_VBOOT_SIGNING_KEY
@@ -443,9 +441,13 @@
# If set, determines whether the build system checks vendor seapp contexts violations.
_product_single_value_vars += PRODUCT_CHECK_VENDOR_SEAPP_VIOLATIONS
+# If set, determines whether the build system checks dev type violations.
+_product_single_value_vars += PRODUCT_CHECK_DEV_TYPE_VIOLATIONS
+
_product_list_vars += PRODUCT_AFDO_PROFILES
_product_single_value_vars += PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API
+_product_single_value_vars += PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE
_product_list_vars += PRODUCT_RELEASE_CONFIG_MAPS
diff --git a/core/product_config.mk b/core/product_config.mk
index 3ee9654..500735e 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -500,18 +500,6 @@
endif
endif
-ifdef PRODUCT_SHIPPING_API_LEVEL
- ifneq (,$(call math_gt_or_eq,29,$(PRODUCT_SHIPPING_API_LEVEL)))
- PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29)
- endif
- ifneq (,$(call math_gt_or_eq,33,$(PRODUCT_SHIPPING_API_LEVEL)))
- PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33)
- endif
- ifneq (,$(call math_gt_or_eq,34,$(PRODUCT_SHIPPING_API_LEVEL)))
- PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34)
- endif
-endif
-
# If build command defines OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS,
# override PRODUCT_EXTRA_VNDK_VERSIONS with it.
ifdef OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS
@@ -590,6 +578,15 @@
endif
.KATI_READONLY := CHECK_VENDOR_SEAPP_VIOLATIONS
+# Boolean variable determining if selinux labels of /dev are enforced
+CHECK_DEV_TYPE_VIOLATIONS := false
+ifneq ($(call math_gt,$(VSR_VENDOR_API_LEVEL),35),)
+ CHECK_DEV_TYPE_VIOLATIONS := true
+else ifneq ($(PRODUCT_CHECK_DEV_TYPE_VIOLATIONS),)
+ CHECK_DEV_TYPE_VIOLATIONS := $(PRODUCT_CHECK_DEV_TYPE_VIOLATIONS)
+endif
+.KATI_READONLY := CHECK_DEV_TYPE_VIOLATIONS
+
define product-overrides-config
$$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
$$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/release_config.mk b/core/release_config.mk
index 68dd876..1fb5747 100644
--- a/core/release_config.mk
+++ b/core/release_config.mk
@@ -63,11 +63,19 @@
#
# $1 config name
# $2 release config files
+# $3 overridden release config. Only applied for $(TARGET_RELEASE), not in depth.
define declare-release-config
$(if $(strip $(2)),, \
$(error declare-release-config: config $(strip $(1)) must have release config files) \
)
$(eval _all_release_configs := $(sort $(_all_release_configs) $(strip $(1))))
+ $(if $(strip $(3)), \
+ $(if $(filter $(_all_release_configs), $(strip $(3))),
+ $(if $(filter $(_all_release_configs.$(strip $(1)).OVERRIDES),$(strip $(3))),,
+ $(eval _all_release_configs.$(strip $(1)).OVERRIDES := $(_all_release_configs.$(strip $(1)).OVERRIDES) $(strip $(3)))), \
+ $(error No release config $(strip $(3))) \
+ ) \
+ )
$(eval _all_release_configs.$(strip $(1)).DECLARED_IN := $(_included) $(_all_release_configs.$(strip $(1)).DECLARED_IN))
$(eval _all_release_configs.$(strip $(1)).FILES := $(_all_release_configs.$(strip $(1)).FILES) $(strip $(2)))
endef
@@ -105,8 +113,10 @@
# Don't sort this, use it in the order they gave us.
# Do allow duplicate entries, retaining only the first usage.
flag_value_files :=
-$(foreach f,$(_all_release_configs.$(TARGET_RELEASE).FILES), \
- $(if $(filter $(f),$(flag_value_files)),,$(eval flag_value_files += $(f)))\
+$(foreach r,$(_all_release_configs.$(TARGET_RELEASE).OVERRIDES) $(TARGET_RELEASE), \
+ $(foreach f,$(_all_release_configs.$(r).FILES), \
+ $(if $(filter $(f),$(flag_value_files)),,$(eval flag_value_files += $(f)))\
+ )\
)
# Unset variables so they can't use them
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 947f66f..193ac18 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -147,6 +147,7 @@
$(call add_json_bool, ArtUseReadBarrier, $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
$(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
$(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
+$(call add_json_str, VendorApiLevel, $(BOARD_API_LEVEL))
$(call add_json_str, DeviceVndkVersion, $(BOARD_VNDK_VERSION))
$(call add_json_str, Platform_vndk_version, $(PLATFORM_VNDK_VERSION))
$(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS))
@@ -158,7 +159,7 @@
$(call add_json_bool, Malloc_pattern_fill_contents, $(MALLOC_PATTERN_FILL_CONTENTS))
$(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER))
$(call add_json_str, DeviceMaxPageSizeSupported, $(TARGET_MAX_PAGE_SIZE_SUPPORTED))
-$(call add_json_bool, DevicePageSizeAgnostic, $(filter true,$(TARGET_PAGE_SIZE_AGNOSTIC)))
+$(call add_json_bool, DeviceNoBionicPageSizeMacro, $(filter true,$(TARGET_NO_BIONIC_PAGE_SIZE_MACRO)))
$(call add_json_bool, UncompressPrivAppDex, $(call invert_bool,$(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))))
$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
@@ -386,7 +387,6 @@
$(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE))
$(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
- $(call add_json_bool, BoardBuildGkiBootImageWithoutRamdisk, $(filter true,$(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK)))
$(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)))
$(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP)))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 9744abf..7f85231 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -93,26 +93,11 @@
$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_res_package))
my_transitive_res_packages := $(intermediates.COMMON)/transitive-res-packages
- $(my_transitive_res_packages): PRIVATE_TRANSITIVE_RES_PACKAGES := $(filter-out $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE),$(LOCAL_SOONG_TRANSITIVE_RES_PACKAGES))
- $(my_transitive_res_packages):
- @echo Write transitive resource package list $@
- rm -f $@
- touch $@
- $(foreach f,$(PRIVATE_TRANSITIVE_RES_PACKAGES),\
- echo "$f" >> $@; )
-
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_TRANSITIVE_RES_PACKAGES),$(my_transitive_res_packages)))
$(call add-dependency,$(my_res_package),$(my_transitive_res_packages))
my_proguard_flags := $(intermediates.COMMON)/export_proguard_flags
- $(my_proguard_flags): $(LOCAL_SOONG_EXPORT_PROGUARD_FLAGS)
- @echo "Export proguard flags: $@"
- rm -f $@
- touch $@
- for f in $+; do \
- echo -e "\n# including $$f" >>$@; \
- cat $$f >>$@; \
- done
-
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_EXPORT_PROGUARD_FLAGS),$(my_proguard_flags)))
$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_proguard_flags))
my_static_library_extra_packages := $(intermediates.COMMON)/extra_packages
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 8e2d58e..eb5c63c 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -5,37 +5,45 @@
COMMA := ,
_NEWLINE := '\n'
+define write-optional-json-list
+$(if $(strip $(2)),'$(COMMA)$(strip $(1)): [$(KATI_foreach_sep w,$(COMMA) ,$(2),"$(w)")]')
+endef
+
+define write-optional-json-bool
+$(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"')
+endef
+
$(MODULE_INFO_JSON):
@echo Generating $@
$(hide) echo -ne '{\n ' > $@
$(hide) echo -ne $(KATI_foreach_sep m,$(COMMA)$(_NEWLINE), $(sort $(ALL_MODULES)),\
'"$(m)": {' \
- '"class": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).CLASS)),"$(w)")],' \
- '"path": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).PATH)),"$(w)")],' \
- '"tags": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TAGS)),"$(w)")],' \
- '"installed": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).INSTALLED)),"$(w)")],' \
- '"compatibility_suites": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES)),"$(w)")],' \
- '"auto_test_config": [$(ALL_MODULES.$(m).auto_test_config)],' \
- '"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)"$(COMMA)' \
- '"test_config": [$(KATI_foreach_sep w,$(COMMA) ,$(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS)),"$(w)")],' \
- '"dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).ALL_DEPS)),"$(w)")],' \
- '"shared_libs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SHARED_LIBS)),"$(w)")],' \
- '"static_libs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).STATIC_LIBS)),"$(w)")],' \
- '"system_shared_libs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SYSTEM_SHARED_LIBS)),"$(w)")],' \
- '"srcs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SRCS)),"$(w)")],' \
- '"srcjars": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SRCJARS)),"$(w)")],' \
- '"classes_jar": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).CLASSES_JAR)),"$(w)")],' \
- '"test_mainline_modules": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)")],' \
- '"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)"$(COMMA)' \
- '"test_options_tags": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS)),"$(w)")],' \
- '"data": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_DATA)),"$(w)")],' \
- '"runtime_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES)),"$(w)")],' \
- '"static_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).LOCAL_STATIC_LIBRARIES)),"$(w)")],' \
- '"data_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_DATA_BINS)),"$(w)")],' \
- '"supported_variants": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS)),"$(w)")],' \
- '"host_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET)),"$(w)")],' \
- '"target_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST)),"$(w)")]' \
- '}')'\n}\n' >> $@
+ '"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)"' \
+ $(call write-optional-json-list, "class", $(sort $(ALL_MODULES.$(m).CLASS))) \
+ $(call write-optional-json-list, "path", $(sort $(ALL_MODULES.$(m).PATH))) \
+ $(call write-optional-json-list, "tags", $(sort $(ALL_MODULES.$(m).TAGS))) \
+ $(call write-optional-json-list, "installed", $(sort $(ALL_MODULES.$(m).INSTALLED))) \
+ $(call write-optional-json-list, "compatibility_suites", $(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES))) \
+ $(call write-optional-json-list, "auto_test_config", $(sort $(ALL_MODULES.$(m).auto_test_config))) \
+ $(call write-optional-json-list, "test_config", $(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS))) \
+ $(call write-optional-json-list, "dependencies", $(sort $(ALL_MODULES.$(m).ALL_DEPS))) \
+ $(call write-optional-json-list, "shared_libs", $(sort $(ALL_MODULES.$(m).SHARED_LIBS))) \
+ $(call write-optional-json-list, "static_libs", $(sort $(ALL_MODULES.$(m).STATIC_LIBS))) \
+ $(call write-optional-json-list, "system_shared_libs", $(sort $(ALL_MODULES.$(m).SYSTEM_SHARED_LIBS))) \
+ $(call write-optional-json-list, "srcs", $(sort $(ALL_MODULES.$(m).SRCS))) \
+ $(call write-optional-json-list, "srcjars", $(sort $(ALL_MODULES.$(m).SRCJARS))) \
+ $(call write-optional-json-list, "classes_jar", $(sort $(ALL_MODULES.$(m).CLASSES_JAR))) \
+ $(call write-optional-json-list, "test_mainline_modules", $(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES))) \
+ $(call write-optional-json-bool, $(ALL_MODULES.$(m).IS_UNIT_TEST)) \
+ $(call write-optional-json-list, "test_options_tags", $(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS))) \
+ $(call write-optional-json-list, "data", $(sort $(ALL_MODULES.$(m).TEST_DATA))) \
+ $(call write-optional-json-list, "runtime_dependencies", $(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES))) \
+ $(call write-optional-json-list, "static_dependencies", $(sort $(ALL_MODULES.$(m).LOCAL_STATIC_LIBRARIES))) \
+ $(call write-optional-json-list, "data_dependencies", $(sort $(ALL_MODULES.$(m).TEST_DATA_BINS))) \
+ $(call write-optional-json-list, "supported_variants", $(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS))) \
+ $(call write-optional-json-list, "host_dependencies", $(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET))) \
+ $(call write-optional-json-list, "target_dependencies", $(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST))) \
+ '}')'\n}\n' >> $@
droidcore-unbundled: $(MODULE_INFO_JSON)
diff --git a/core/version_util.mk b/core/version_util.mk
index dfa0277..103b64e 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -100,7 +100,7 @@
PLATFORM_VERSION_KNOWN_CODENAMES := \
Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
-JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
+JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch L Lollipop LollipopMr1 M N NMr1 O OMr1 P \
Q R S Sv2 Tiramisu UpsideDownCake VanillaIceCream
# Convert from space separated list to comma separated
diff --git a/envsetup.sh b/envsetup.sh
index 3d29ed7..cc808d2 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -836,15 +836,21 @@
# Note this is the string "release", not the value of the variable.
export TARGET_BUILD_TYPE=release
+ [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || echo
+
+ set_stuff_for_environment
+ [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
+
+ if [ "${TARGET_BUILD_VARIANT}" = "userdebug" ] && [[ -z "${ANDROID_QUIET_BUILD}" ]]; then
+ echo
+ echo "Want FASTER LOCAL BUILDS? Use -eng instead of -userdebug (however for" \
+ "performance benchmarking continue to use userdebug)"
+ fi
if [ $used_lunch_menu -eq 1 ]; then
echo
echo "Hint: next time you can simply run 'lunch $selection'"
fi
- [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || echo
-
- set_stuff_for_environment
- [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
destroy_build_var_cache
if [[ -n "${CHECK_MU_CONFIG:-}" ]]; then
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index d612f92..098ed27 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -94,6 +94,7 @@
framework-graphics \
framework-minus-apex \
framework-minus-apex-install-dependencies \
+ framework-nfc \
framework-res \
framework-sysconfig.xml \
fsck.erofs \
@@ -306,20 +307,17 @@
system_manifest.xml \
system_compatibility_matrix.xml \
-HIDL_SUPPORT_SERVICES := \
- hwservicemanager \
- android.hidl.allocator@1.0-service \
- android.hidl.memory@1.0-impl \
-
-# TODO(b/299166571) Remove this after the artifact path requirements checker picks up
-# this library correctly with the *SHIPPING_API_LEVEL_34 variable
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
- $(TARGET_COPY_OUT_SYSTEM)/lib/hw/android.hidl.memory@1.0-impl.so \
- $(TARGET_COPY_OUT_SYSTEM)/lib64/hw/android.hidl.memory@1.0-impl.so \
-
# Base modules when shipping api level is less than or equal to 34
PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
- $(HIDL_SUPPORT_SERVICES) \
+ android.hidl.memory@1.0-impl \
+
+# hwservicemanager is now installed on system_ext, but apexes might be using
+# old libraries that are expecting it to be installed on system. This allows
+# those apexes to continue working. The symlink can be removed once we are sure
+# there are no devices using hwservicemanager (when Android V launching devices
+# are no longer supported for dessert upgrades).
+PRODUCT_PACKAGES += \
+ hwservicemanager_compat_symlink_module \
PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
libclang_rt.hwasan.bootstrap \
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index 852d7ca..d8c1863 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -22,3 +22,8 @@
passwd_system_ext \
selinux_policy_system_ext \
system_ext_manifest.xml \
+
+# Base modules when shipping api level is less than or equal to 34
+PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
+ hwservicemanager \
+ android.hidl.allocator@1.0-service \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index b02a583..55fcf2f 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -50,6 +50,7 @@
PRODUCT_BOOT_JARS += \
framework-minus-apex \
framework-graphics \
+ framework-nfc \
ext \
telephony-common \
voip-common \
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
index 5726c06..5e2a694 100644
--- a/target/product/fullmte.mk
+++ b/target/product/fullmte.mk
@@ -25,3 +25,4 @@
SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
endif
PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
+PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE := 131072
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 6d40436..38efde4 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -128,10 +128,6 @@
_base_mk_allowed_list :=
-# TODO(b/299166571) Remove this after the artifact path requirements checker picks up
-# hwservicemanager correctly.
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += $(TARGET_COPY_OUT_SYSTEM)/bin/hwservicemanager
-
_my_allowed_list := $(_base_mk_allowed_list)
# For mainline, system.img should be mounted at /, so we include ROOT here.
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index 51a1ef6..ba0912c 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -24,10 +24,6 @@
# Speed profile services and wifi-service to reduce RAM and storage.
PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
-# Always preopt extracted APKs to prevent extracting out of the APK for gms
-# modules.
-PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK := true
-
# Use a profile based boot image for this device. Note that this is currently a
# generic profile and not Android Go optimized.
PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 6c93dd7..219b90c 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -56,7 +56,6 @@
MmsService \
MtpService \
MusicFX \
- NfcNci \
PacProcessor \
preinstalled-packages-platform-handheld-system.xml \
PrintRecommendationService \
@@ -73,6 +72,7 @@
UserDictionaryProvider \
VpnDialogs \
vr \
+ $(RELEASE_PACKAGE_NFC_STACK)
PRODUCT_SYSTEM_SERVER_APPS += \
diff --git a/target/product/handheld_system_ext.mk b/target/product/handheld_system_ext.mk
index 187b627..1218f7a 100644
--- a/target/product/handheld_system_ext.mk
+++ b/target/product/handheld_system_ext.mk
@@ -29,3 +29,8 @@
StorageManager \
SystemUI \
WallpaperCropper \
+
+# Base modules when shipping api level is less than or equal to 34
+PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
+ hwservicemanager \
+ android.hidl.allocator@1.0-service \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 1e01b33..a9d478d 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -134,23 +134,8 @@
# This option is for faster iteration during development and should never be enabled for production.
ifneq (,$(filter true,$(OVERRIDE_DISABLE_DEXOPT_ALL)))
PRODUCT_SYSTEM_PROPERTIES += \
- pm.dexopt.post-boot=skip \
- pm.dexopt.first-boot=skip \
- pm.dexopt.boot-after-ota=skip \
- pm.dexopt.boot-after-mainline-update=skip \
- pm.dexopt.install=skip \
- pm.dexopt.install-fast=skip \
- pm.dexopt.install-bulk=skip \
- pm.dexopt.install-bulk-secondary=skip \
- pm.dexopt.install-bulk-downgraded=skip \
- pm.dexopt.install-bulk-secondary-downgraded=skip \
- pm.dexopt.bg-dexopt=skip \
- pm.dexopt.ab-ota=skip \
- pm.dexopt.inactive=skip \
- pm.dexopt.cmdline=skip \
- pm.dexopt.shared=skip
-
- PRODUCT_SYSTEM_PROPERTIES += dalvik.vm.disable-odrefresh=true
+ dalvik.vm.disable-art-service-dexopt=true \
+ dalvik.vm.disable-odrefresh=true
# Disable all dexpreopt activities except for the ART boot image.
# We have to dexpreopt the ART boot image because they are used by ART tests. This should not
diff --git a/target/product/telephony_system_ext.mk b/target/product/telephony_system_ext.mk
index f81a607..f821381 100644
--- a/target/product/telephony_system_ext.mk
+++ b/target/product/telephony_system_ext.mk
@@ -21,3 +21,7 @@
PRODUCT_PACKAGES += \
CarrierConfig \
EmergencyInfo \
+
+PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
+ hwservicemanager \
+ android.hidl.allocator@1.0-service \
diff --git a/tools/OWNERS b/tools/OWNERS
deleted file mode 100644
index 7d666f1..0000000
--- a/tools/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-per-file warn.py,checkowners.py = chh@google.com
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
index d5e2868..ed4b24c 100644
--- a/tools/aconfig/protos/aconfig.proto
+++ b/tools/aconfig/protos/aconfig.proto
@@ -40,11 +40,28 @@
optional string description = 3;
repeated string bug = 4;
optional bool is_fixed_read_only = 5;
+ optional bool is_exported = 6;
+ optional flag_metadata metadata = 7;
};
+// Optional metadata about the flag, such as its purpose and its intended form factors.
+// Can influence the applied policies and testing strategy.
+message flag_metadata {
+ enum flag_purpose {
+ PURPOSE_UNSPECIFIED = 0;
+ PURPOSE_FEATURE = 1;
+ PURPOSE_BUGFIX = 2;
+ }
+
+ optional flag_purpose purpose = 1;
+
+ // TODO(b/315025930): Add field to designate intended target device form factor(s), such as phone, watch or other.
+}
+
message flag_declarations {
optional string package = 1;
repeated flag_declaration flag = 2;
+ optional string container = 3;
};
message flag_value {
@@ -77,6 +94,9 @@
optional flag_permission permission = 7;
repeated tracepoint trace = 8;
optional bool is_fixed_read_only = 9;
+ optional bool is_exported = 10;
+ optional string container = 11;
+ optional flag_metadata metadata = 12;
}
message parsed_flags {
diff --git a/tools/aconfig/src/codegen.rs b/tools/aconfig/src/codegen.rs
index b7fb08f..fef7a3f 100644
--- a/tools/aconfig/src/codegen.rs
+++ b/tools/aconfig/src/codegen.rs
@@ -38,6 +38,10 @@
s.split('.').all(is_valid_name_ident)
}
+pub fn is_valid_container_ident(s: &str) -> bool {
+ is_valid_name_ident(s) || s.split('.').all(is_valid_name_ident)
+}
+
pub fn create_device_config_ident(package: &str, flag_name: &str) -> Result<String> {
ensure!(is_valid_package_ident(package), "bad package");
ensure!(is_valid_name_ident(flag_name), "bad flag name");
@@ -85,6 +89,29 @@
}
#[test]
+ fn test_is_valid_container_ident() {
+ assert!(is_valid_container_ident("foo.bar"));
+ assert!(is_valid_container_ident("foo.bar_baz"));
+ assert!(is_valid_container_ident("foo.bar.a123"));
+ assert!(is_valid_container_ident("foo"));
+ assert!(is_valid_container_ident("foo_bar_123"));
+
+ assert!(!is_valid_container_ident(""));
+ assert!(!is_valid_container_ident("foo._bar"));
+ assert!(!is_valid_container_ident("_foo"));
+ assert!(!is_valid_container_ident("123_foo"));
+ assert!(!is_valid_container_ident("foo-bar"));
+ assert!(!is_valid_container_ident("foo-b\u{00e5}r"));
+ assert!(!is_valid_container_ident("foo.bar.123"));
+ assert!(!is_valid_container_ident(".foo.bar"));
+ assert!(!is_valid_container_ident("foo.bar."));
+ assert!(!is_valid_container_ident("."));
+ assert!(!is_valid_container_ident(".."));
+ assert!(!is_valid_container_ident("foo..bar"));
+ assert!(!is_valid_container_ident("foo.__bar"));
+ }
+
+ #[test]
fn test_create_device_config_ident() {
assert_eq!(
"com.foo.bar.some_flag",
diff --git a/tools/aconfig/src/codegen_cpp.rs b/tools/aconfig/src/codegen_cpp.rs
index 42d900b..c536260 100644
--- a/tools/aconfig/src/codegen_cpp.rs
+++ b/tools/aconfig/src/codegen_cpp.rs
@@ -151,12 +151,9 @@
#ifdef __cplusplus
#include <memory>
-#include <vector>
namespace com::android::aconfig::test {
-extern std::vector<int8_t> cache_;
-
class flag_provider_interface {
public:
virtual ~flag_provider_interface() = default;
@@ -165,6 +162,8 @@
virtual bool disabled_rw() = 0;
+ virtual bool disabled_rw_exported() = 0;
+
virtual bool disabled_rw_in_other_namespace() = 0;
virtual bool enabled_fixed_ro() = 0;
@@ -184,6 +183,10 @@
return provider_->disabled_rw();
}
+inline bool disabled_rw_exported() {
+ return provider_->disabled_rw_exported();
+}
+
inline bool disabled_rw_in_other_namespace() {
return provider_->disabled_rw_in_other_namespace();
}
@@ -209,6 +212,8 @@
bool com_android_aconfig_test_disabled_rw();
+bool com_android_aconfig_test_disabled_rw_exported();
+
bool com_android_aconfig_test_disabled_rw_in_other_namespace();
bool com_android_aconfig_test_enabled_fixed_ro();
@@ -244,6 +249,10 @@
virtual void disabled_rw(bool val) = 0;
+ virtual bool disabled_rw_exported() = 0;
+
+ virtual void disabled_rw_exported(bool val) = 0;
+
virtual bool disabled_rw_in_other_namespace() = 0;
virtual void disabled_rw_in_other_namespace(bool val) = 0;
@@ -281,6 +290,14 @@
provider_->disabled_rw(val);
}
+inline bool disabled_rw_exported() {
+ return provider_->disabled_rw_exported();
+}
+
+inline void disabled_rw_exported(bool val) {
+ provider_->disabled_rw_exported(val);
+}
+
inline bool disabled_rw_in_other_namespace() {
return provider_->disabled_rw_in_other_namespace();
}
@@ -330,6 +347,10 @@
void set_com_android_aconfig_test_disabled_rw(bool val);
+bool com_android_aconfig_test_disabled_rw_exported();
+
+void set_com_android_aconfig_test_disabled_rw_exported(bool val);
+
bool com_android_aconfig_test_disabled_rw_in_other_namespace();
void set_com_android_aconfig_test_disabled_rw_in_other_namespace(bool val);
@@ -359,6 +380,7 @@
const PROD_SOURCE_FILE_EXPECTED: &str = r#"
#include "com_android_aconfig_test.h"
#include <server_configurable_flags/get_flags.h>
+#include <vector>
namespace com::android::aconfig::test {
@@ -379,14 +401,24 @@
return cache_[0];
}
- virtual bool disabled_rw_in_other_namespace() override {
+ virtual bool disabled_rw_exported() override {
if (cache_[1] == -1) {
cache_[1] = server_configurable_flags::GetServerConfigurableFlag(
+ "aconfig_flags.aconfig_test",
+ "com.android.aconfig.test.disabled_rw_exported",
+ "false") == "true";
+ }
+ return cache_[1];
+ }
+
+ virtual bool disabled_rw_in_other_namespace() override {
+ if (cache_[2] == -1) {
+ cache_[2] = server_configurable_flags::GetServerConfigurableFlag(
"aconfig_flags.other_namespace",
"com.android.aconfig.test.disabled_rw_in_other_namespace",
"false") == "true";
}
- return cache_[1];
+ return cache_[2];
}
virtual bool enabled_fixed_ro() override {
@@ -398,19 +430,19 @@
}
virtual bool enabled_rw() override {
- if (cache_[2] == -1) {
- cache_[2] = server_configurable_flags::GetServerConfigurableFlag(
+ if (cache_[3] == -1) {
+ cache_[3] = server_configurable_flags::GetServerConfigurableFlag(
"aconfig_flags.aconfig_test",
"com.android.aconfig.test.enabled_rw",
"true") == "true";
}
- return cache_[2];
+ return cache_[3];
}
+ private:
+ std::vector<int8_t> cache_ = std::vector<int8_t>(4, -1);
};
- std::vector<int8_t> cache_ = std::vector<int8_t>(3, -1);
-
std::unique_ptr<flag_provider_interface> provider_ =
std::make_unique<flag_provider>();
}
@@ -423,6 +455,10 @@
return com::android::aconfig::test::disabled_rw();
}
+bool com_android_aconfig_test_disabled_rw_exported() {
+ return com::android::aconfig::test::disabled_rw_exported();
+}
+
bool com_android_aconfig_test_disabled_rw_in_other_namespace() {
return com::android::aconfig::test::disabled_rw_in_other_namespace();
}
@@ -487,6 +523,22 @@
overrides_["disabled_rw"] = val;
}
+ virtual bool disabled_rw_exported() override {
+ auto it = overrides_.find("disabled_rw_exported");
+ if (it != overrides_.end()) {
+ return it->second;
+ } else {
+ return server_configurable_flags::GetServerConfigurableFlag(
+ "aconfig_flags.aconfig_test",
+ "com.android.aconfig.test.disabled_rw_exported",
+ "false") == "true";
+ }
+ }
+
+ virtual void disabled_rw_exported(bool val) override {
+ overrides_["disabled_rw_exported"] = val;
+ }
+
virtual bool disabled_rw_in_other_namespace() override {
auto it = overrides_.find("disabled_rw_in_other_namespace");
if (it != overrides_.end()) {
@@ -572,11 +624,20 @@
com::android::aconfig::test::disabled_rw(val);
}
+
+bool com_android_aconfig_test_disabled_rw_exported() {
+ return com::android::aconfig::test::disabled_rw_exported();
+}
+
+void set_com_android_aconfig_test_disabled_rw_exported(bool val) {
+ com::android::aconfig::test::disabled_rw_exported(val);
+}
+
+
bool com_android_aconfig_test_disabled_rw_in_other_namespace() {
return com::android::aconfig::test::disabled_rw_in_other_namespace();
}
-
void set_com_android_aconfig_test_disabled_rw_in_other_namespace(bool val) {
com::android::aconfig::test::disabled_rw_in_other_namespace(val);
}
@@ -636,6 +697,8 @@
match mode {
CodegenMode::Production => EXPORTED_PROD_HEADER_EXPECTED,
CodegenMode::Test => EXPORTED_TEST_HEADER_EXPECTED,
+ CodegenMode::Exported =>
+ todo!("exported mode not yet supported for cpp, see b/313894653."),
},
generated_files_map.get(&target_file_path).unwrap()
)
@@ -649,6 +712,8 @@
match mode {
CodegenMode::Production => PROD_SOURCE_FILE_EXPECTED,
CodegenMode::Test => TEST_SOURCE_FILE_EXPECTED,
+ CodegenMode::Exported =>
+ todo!("exported mode not yet supported for cpp, see b/313894653."),
},
generated_files_map.get(&target_file_path).unwrap()
)
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
index a822cd5..b3e5e6c 100644
--- a/tools/aconfig/src/codegen_java.rs
+++ b/tools/aconfig/src/codegen_java.rs
@@ -39,6 +39,7 @@
flag_elements.iter().map(|fe| format_property_name(&fe.device_config_namespace)).collect();
let is_read_write = flag_elements.iter().any(|elem| elem.is_read_write);
let is_test_mode = codegen_mode == CodegenMode::Test;
+ let library_exported = codegen_mode == CodegenMode::Exported;
let context = Context {
flag_elements,
namespace_flags,
@@ -46,6 +47,7 @@
is_read_write,
properties_set,
package_name: package.to_string(),
+ library_exported,
};
let mut template = TinyTemplate::new();
template.add_template("Flags.java", include_str!("../templates/Flags.java.template"))?;
@@ -103,6 +105,7 @@
pub is_read_write: bool,
pub properties_set: BTreeSet<String>,
pub package_name: String,
+ pub library_exported: bool,
}
#[derive(Serialize, Debug)]
@@ -120,6 +123,7 @@
pub is_read_write: bool,
pub method_name: String,
pub properties: String,
+ pub exported: bool,
}
fn create_flag_element(package: &str, pf: &ProtoParsedFlag) -> FlagElement {
@@ -133,6 +137,7 @@
is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
method_name: format_java_method_name(pf.name()),
properties: format_property_name(pf.namespace()),
+ exported: pf.is_exported.unwrap_or(false),
}
}
@@ -179,6 +184,8 @@
@UnsupportedAppUsage
boolean disabledRw();
@UnsupportedAppUsage
+ boolean disabledRwExported();
+ @UnsupportedAppUsage
boolean disabledRwInOtherNamespace();
@com.android.aconfig.annotations.AssumeTrueForR8
@UnsupportedAppUsage
@@ -202,6 +209,8 @@
/** @hide */
public static final String FLAG_DISABLED_RW = "com.android.aconfig.test.disabled_rw";
/** @hide */
+ public static final String FLAG_DISABLED_RW_EXPORTED = "com.android.aconfig.test.disabled_rw_exported";
+ /** @hide */
public static final String FLAG_DISABLED_RW_IN_OTHER_NAMESPACE = "com.android.aconfig.test.disabled_rw_in_other_namespace";
/** @hide */
public static final String FLAG_ENABLED_FIXED_RO = "com.android.aconfig.test.enabled_fixed_ro";
@@ -220,6 +229,10 @@
return FEATURE_FLAGS.disabledRw();
}
@UnsupportedAppUsage
+ public static boolean disabledRwExported() {
+ return FEATURE_FLAGS.disabledRwExported();
+ }
+ @UnsupportedAppUsage
public static boolean disabledRwInOtherNamespace() {
return FEATURE_FLAGS.disabledRwInOtherNamespace();
}
@@ -262,6 +275,11 @@
}
@Override
@UnsupportedAppUsage
+ public boolean disabledRwExported() {
+ return getValue(Flags.FLAG_DISABLED_RW_EXPORTED);
+ }
+ @Override
+ @UnsupportedAppUsage
public boolean disabledRwInOtherNamespace() {
return getValue(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE);
}
@@ -302,6 +320,7 @@
Map.ofEntries(
Map.entry(Flags.FLAG_DISABLED_RO, false),
Map.entry(Flags.FLAG_DISABLED_RW, false),
+ Map.entry(Flags.FLAG_DISABLED_RW_EXPORTED, false),
Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
Map.entry(Flags.FLAG_ENABLED_RO, false),
@@ -336,6 +355,7 @@
private static boolean aconfig_test_is_cached = false;
private static boolean other_namespace_is_cached = false;
private static boolean disabledRw = false;
+ private static boolean disabledRwExported = false;
private static boolean disabledRwInOtherNamespace = false;
private static boolean enabledRw = true;
@@ -345,6 +365,8 @@
Properties properties = DeviceConfig.getProperties("aconfig_test");
disabledRw =
properties.getBoolean("com.android.aconfig.test.disabled_rw", false);
+ disabledRwExported =
+ properties.getBoolean("com.android.aconfig.test.disabled_rw_exported", false);
enabledRw =
properties.getBoolean("com.android.aconfig.test.enabled_rw", true);
} catch (NullPointerException e) {
@@ -394,6 +416,14 @@
}
@Override
@UnsupportedAppUsage
+ public boolean disabledRwExported() {
+ if (!aconfig_test_is_cached) {
+ load_overrides_aconfig_test();
+ }
+ return disabledRwExported;
+ }
+ @Override
+ @UnsupportedAppUsage
public boolean disabledRwInOtherNamespace() {
if (!other_namespace_is_cached) {
load_overrides_other_namespace();
@@ -449,6 +479,202 @@
}
#[test]
+ fn test_generate_java_code_exported() {
+ let parsed_flags = crate::test::parse_test_flags();
+ let generated_files = generate_java_code(
+ crate::test::TEST_PACKAGE,
+ parsed_flags.parsed_flag.iter(),
+ CodegenMode::Exported,
+ )
+ .unwrap();
+
+ let expect_flags_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ /** @hide */
+ public final class Flags {
+ /** @hide */
+ public static final String FLAG_DISABLED_RW = "com.android.aconfig.test.disabled_rw";
+ /** @hide */
+ public static final String FLAG_DISABLED_RW_EXPORTED = "com.android.aconfig.test.disabled_rw_exported";
+
+ @UnsupportedAppUsage
+ public static boolean disabledRw() {
+ return FEATURE_FLAGS.disabledRw();
+ }
+ @UnsupportedAppUsage
+ public static boolean disabledRwExported() {
+ return FEATURE_FLAGS.disabledRwExported();
+ }
+ private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
+ }
+ "#;
+
+ let expect_feature_flags_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ /** @hide */
+ public interface FeatureFlags {
+ @UnsupportedAppUsage
+ boolean disabledRw();
+ @UnsupportedAppUsage
+ boolean disabledRwExported();
+ }
+ "#;
+
+ let expect_feature_flags_impl_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ import android.provider.DeviceConfig;
+ import android.provider.DeviceConfig.Properties;
+ /** @hide */
+ public final class FeatureFlagsImpl implements FeatureFlags {
+ private static boolean aconfig_test_is_cached = false;
+ private static boolean other_namespace_is_cached = false;
+ private static boolean disabledRw = false;
+ private static boolean disabledRwExported = false;
+
+
+ private void load_overrides_aconfig_test() {
+ try {
+ Properties properties = DeviceConfig.getProperties("aconfig_test");
+ disabledRw =
+ properties.getBoolean("com.android.aconfig.test.disabled_rw", false);
+ disabledRwExported =
+ properties.getBoolean("com.android.aconfig.test.disabled_rw_exported", false);
+ } catch (NullPointerException e) {
+ throw new RuntimeException(
+ "Cannot read value from namespace aconfig_test "
+ + "from DeviceConfig. It could be that the code using flag "
+ + "executed before SettingsProvider initialization. Please use "
+ + "fixed read-only flag by adding is_fixed_read_only: true in "
+ + "flag declaration.",
+ e
+ );
+ }
+ aconfig_test_is_cached = true;
+ }
+
+ private void load_overrides_other_namespace() {
+ try {
+ Properties properties = DeviceConfig.getProperties("other_namespace");
+ } catch (NullPointerException e) {
+ throw new RuntimeException(
+ "Cannot read value from namespace other_namespace "
+ + "from DeviceConfig. It could be that the code using flag "
+ + "executed before SettingsProvider initialization. Please use "
+ + "fixed read-only flag by adding is_fixed_read_only: true in "
+ + "flag declaration.",
+ e
+ );
+ }
+ other_namespace_is_cached = true;
+ }
+
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRw() {
+ if (!aconfig_test_is_cached) {
+ load_overrides_aconfig_test();
+ }
+ return disabledRw;
+ }
+
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRwExported() {
+ if (!aconfig_test_is_cached) {
+ load_overrides_aconfig_test();
+ }
+ return disabledRwExported;
+ }
+ }"#;
+
+ let expect_fake_feature_flags_impl_content = r#"
+ package com.android.aconfig.test;
+ // TODO(b/303773055): Remove the annotation after access issue is resolved.
+ import android.compat.annotation.UnsupportedAppUsage;
+ import java.util.HashMap;
+ import java.util.Map;
+ /** @hide */
+ public class FakeFeatureFlagsImpl implements FeatureFlags {
+ public FakeFeatureFlagsImpl() {
+ resetAll();
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRw() {
+ return getValue(Flags.FLAG_DISABLED_RW);
+ }
+ @Override
+ @UnsupportedAppUsage
+ public boolean disabledRwExported() {
+ return getValue(Flags.FLAG_DISABLED_RW_EXPORTED);
+ }
+ public void setFlag(String flagName, boolean value) {
+ if (!this.mFlagMap.containsKey(flagName)) {
+ throw new IllegalArgumentException("no such flag " + flagName);
+ }
+ this.mFlagMap.put(flagName, value);
+ }
+ public void resetAll() {
+ for (Map.Entry entry : mFlagMap.entrySet()) {
+ entry.setValue(null);
+ }
+ }
+ private boolean getValue(String flagName) {
+ Boolean value = this.mFlagMap.get(flagName);
+ if (value == null) {
+ throw new IllegalArgumentException(flagName + " is not set");
+ }
+ return value;
+ }
+ private Map<String, Boolean> mFlagMap = new HashMap<>(
+ Map.ofEntries(
+ Map.entry(Flags.FLAG_DISABLED_RO, false),
+ Map.entry(Flags.FLAG_DISABLED_RW, false),
+ Map.entry(Flags.FLAG_DISABLED_RW_EXPORTED, false),
+ Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
+ Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
+ Map.entry(Flags.FLAG_ENABLED_RO, false),
+ Map.entry(Flags.FLAG_ENABLED_RW, false)
+ )
+ );
+ }
+ "#;
+
+ let mut file_set = HashMap::from([
+ ("com/android/aconfig/test/Flags.java", expect_flags_content),
+ ("com/android/aconfig/test/FeatureFlags.java", expect_feature_flags_content),
+ ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_feature_flags_impl_content),
+ (
+ "com/android/aconfig/test/FakeFeatureFlagsImpl.java",
+ expect_fake_feature_flags_impl_content,
+ ),
+ ]);
+
+ for file in generated_files {
+ let file_path = file.path.to_str().unwrap();
+ assert!(file_set.contains_key(file_path), "Cannot find {}", file_path);
+ assert_eq!(
+ None,
+ crate::test::first_significant_code_diff(
+ file_set.get(file_path).unwrap(),
+ &String::from_utf8(file.contents).unwrap()
+ ),
+ "File {} content is not correct",
+ file_path
+ );
+ file_set.remove(file_path);
+ }
+
+ assert!(file_set.is_empty());
+ }
+
+ #[test]
fn test_generate_java_code_test() {
let parsed_flags = crate::test::parse_test_flags();
let generated_files = generate_java_code(
@@ -489,6 +715,12 @@
}
@Override
@UnsupportedAppUsage
+ public boolean disabledRwExported() {
+ throw new UnsupportedOperationException(
+ "Method is not implemented.");
+ }
+ @Override
+ @UnsupportedAppUsage
public boolean disabledRwInOtherNamespace() {
throw new UnsupportedOperationException(
"Method is not implemented.");
diff --git a/tools/aconfig/src/codegen_rust.rs b/tools/aconfig/src/codegen_rust.rs
index d8675e7..502cec8 100644
--- a/tools/aconfig/src/codegen_rust.rs
+++ b/tools/aconfig/src/codegen_rust.rs
@@ -45,6 +45,9 @@
match codegen_mode {
CodegenMode::Production => include_str!("../templates/rust_prod.template"),
CodegenMode::Test => include_str!("../templates/rust_test.template"),
+ CodegenMode::Exported => {
+ todo!("exported mode not yet supported for rust, see b/313894653.")
+ }
},
)?;
let contents = template.render("rust_code_gen", &context)?;
@@ -104,6 +107,12 @@
"com.android.aconfig.test.disabled_rw",
"false") == "true";
+ /// flag value cache for disabled_rw_exported
+ static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+ "aconfig_flags.aconfig_test",
+ "com.android.aconfig.test.disabled_rw_exported",
+ "false") == "true";
+
/// flag value cache for disabled_rw_in_other_namespace
static ref CACHED_disabled_rw_in_other_namespace: bool = flags_rust::GetServerConfigurableFlag(
"aconfig_flags.other_namespace",
@@ -115,6 +124,7 @@
"aconfig_flags.aconfig_test",
"com.android.aconfig.test.enabled_rw",
"true") == "true";
+
}
impl FlagProvider {
@@ -128,6 +138,11 @@
*CACHED_disabled_rw
}
+ /// query flag disabled_rw_exported
+ pub fn disabled_rw_exported(&self) -> bool {
+ *CACHED_disabled_rw_exported
+ }
+
/// query flag disabled_rw_in_other_namespace
pub fn disabled_rw_in_other_namespace(&self) -> bool {
*CACHED_disabled_rw_in_other_namespace
@@ -164,6 +179,12 @@
PROVIDER.disabled_rw()
}
+/// query flag disabled_rw_exported
+#[inline(always)]
+pub fn disabled_rw_exported() -> bool {
+ PROVIDER.disabled_rw_exported()
+}
+
/// query flag disabled_rw_in_other_namespace
#[inline(always)]
pub fn disabled_rw_in_other_namespace() -> bool {
@@ -228,6 +249,21 @@
self.overrides.insert("disabled_rw", val);
}
+ /// query flag disabled_rw_exported
+ pub fn disabled_rw_exported(&self) -> bool {
+ self.overrides.get("disabled_rw_exported").copied().unwrap_or(
+ flags_rust::GetServerConfigurableFlag(
+ "aconfig_flags.aconfig_test",
+ "com.android.aconfig.test.disabled_rw_exported",
+ "false") == "true"
+ )
+ }
+
+ /// set flag disabled_rw_exported
+ pub fn set_disabled_rw_exported(&mut self, val: bool) {
+ self.overrides.insert("disabled_rw_exported", val);
+ }
+
/// query flag disabled_rw_in_other_namespace
pub fn disabled_rw_in_other_namespace(&self) -> bool {
self.overrides.get("disabled_rw_in_other_namespace").copied().unwrap_or(
@@ -317,6 +353,18 @@
PROVIDER.lock().unwrap().set_disabled_rw(val);
}
+/// query flag disabled_rw_exported
+#[inline(always)]
+pub fn disabled_rw_exported() -> bool {
+ PROVIDER.lock().unwrap().disabled_rw_exported()
+}
+
+/// set flag disabled_rw_exported
+#[inline(always)]
+pub fn set_disabled_rw_exported(val: bool) {
+ PROVIDER.lock().unwrap().set_disabled_rw_exported(val);
+}
+
/// query flag disabled_rw_in_other_namespace
#[inline(always)]
pub fn disabled_rw_in_other_namespace() -> bool {
@@ -383,6 +431,8 @@
match mode {
CodegenMode::Production => PROD_EXPECTED,
CodegenMode::Test => TEST_EXPECTED,
+ CodegenMode::Exported =>
+ todo!("exported mode not yet supported for rust, see b/313894653."),
},
&String::from_utf8(generated.contents).unwrap()
)
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
index c8c7975..1e80d26 100644
--- a/tools/aconfig/src/commands.rs
+++ b/tools/aconfig/src/commands.rs
@@ -24,7 +24,8 @@
use crate::codegen_java::generate_java_code;
use crate::codegen_rust::generate_rust_code;
use crate::protos::{
- ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint,
+ ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags,
+ ProtoTracepoint,
};
pub struct Input {
@@ -57,6 +58,7 @@
pub fn parse_flags(
package: &str,
+ container: Option<&str>,
declarations: Vec<Input>,
values: Vec<Input>,
default_permission: ProtoFlagPermission,
@@ -79,12 +81,24 @@
package,
flag_declarations.package()
);
+ if let Some(c) = container {
+ ensure!(
+ c == flag_declarations.container(),
+ "failed to parse {}: expected container {}, got {}",
+ input.source,
+ c,
+ flag_declarations.container()
+ );
+ }
for mut flag_declaration in flag_declarations.flag.into_iter() {
crate::protos::flag_declaration::verify_fields(&flag_declaration)
.with_context(|| input.error_context())?;
// create ParsedFlag using FlagDeclaration and default values
let mut parsed_flag = ProtoParsedFlag::new();
+ if let Some(c) = container {
+ parsed_flag.set_container(c.to_string());
+ }
parsed_flag.set_package(package.to_string());
parsed_flag.set_name(flag_declaration.take_name());
parsed_flag.set_namespace(flag_declaration.take_namespace());
@@ -98,12 +112,18 @@
};
parsed_flag.set_permission(flag_permission);
parsed_flag.set_is_fixed_read_only(flag_declaration.is_fixed_read_only());
+ parsed_flag.set_is_exported(flag_declaration.is_exported());
let mut tracepoint = ProtoTracepoint::new();
tracepoint.set_source(input.source.clone());
tracepoint.set_state(DEFAULT_FLAG_STATE);
tracepoint.set_permission(flag_permission);
parsed_flag.trace.push(tracepoint);
+ let mut metadata = ProtoFlagMetadata::new();
+ let purpose = flag_declaration.metadata.purpose();
+ metadata.set_purpose(purpose);
+ parsed_flag.metadata = Some(metadata).into();
+
// verify ParsedFlag looks reasonable
crate::protos::parsed_flag::verify_fields(&parsed_flag)?;
@@ -170,6 +190,7 @@
pub enum CodegenMode {
Production,
Test,
+ Exported,
}
pub fn create_java_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<Vec<OutputFile>> {
@@ -249,20 +270,25 @@
Textproto,
}
-pub fn dump_parsed_flags(mut input: Vec<Input>, format: DumpFormat) -> Result<Vec<u8>> {
+pub fn dump_parsed_flags(
+ mut input: Vec<Input>,
+ format: DumpFormat,
+ dedup: bool,
+) -> Result<Vec<u8>> {
let individually_parsed_flags: Result<Vec<ProtoParsedFlags>> =
input.iter_mut().map(|i| i.try_parse_flags()).collect();
let parsed_flags: ProtoParsedFlags =
- crate::protos::parsed_flags::merge(individually_parsed_flags?)?;
+ crate::protos::parsed_flags::merge(individually_parsed_flags?, dedup)?;
let mut output = Vec::new();
match format {
DumpFormat::Text => {
for parsed_flag in parsed_flags.parsed_flag.into_iter() {
let line = format!(
- "{}.{}: {:?} + {:?}\n",
+ "{}.{} [{}]: {:?} + {:?}\n",
parsed_flag.package(),
parsed_flag.name(),
+ parsed_flag.container(),
parsed_flag.permission(),
parsed_flag.state()
);
@@ -274,9 +300,10 @@
let sources: Vec<_> =
parsed_flag.trace.iter().map(|tracepoint| tracepoint.source()).collect();
let line = format!(
- "{}.{}: {:?} + {:?} ({})\n",
+ "{}.{} [{}]: {:?} + {:?} ({})\n",
parsed_flag.package(),
parsed_flag.name(),
+ parsed_flag.container(),
parsed_flag.permission(),
parsed_flag.state(),
sources.join(", ")
@@ -308,6 +335,7 @@
#[cfg(test)]
mod tests {
use super::*;
+ use crate::protos::ProtoFlagPurpose;
#[test]
fn test_parse_flags() {
@@ -322,6 +350,7 @@
assert_eq!("This flag is ENABLED + READ_ONLY", enabled_ro.description());
assert_eq!(ProtoFlagState::ENABLED, enabled_ro.state());
assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.permission());
+ assert_eq!(ProtoFlagPurpose::PURPOSE_BUGFIX, enabled_ro.metadata.purpose());
assert_eq!(3, enabled_ro.trace.len());
assert!(!enabled_ro.is_fixed_read_only());
assert_eq!("tests/test.aconfig", enabled_ro.trace[0].source());
@@ -334,7 +363,7 @@
assert_eq!(ProtoFlagState::ENABLED, enabled_ro.trace[2].state());
assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.trace[2].permission());
- assert_eq!(6, parsed_flags.parsed_flag.len());
+ assert_eq!(7, parsed_flags.parsed_flag.len());
for pf in parsed_flags.parsed_flag.iter() {
if pf.name() == "enabled_fixed_ro" {
continue;
@@ -375,6 +404,7 @@
let flags_bytes = crate::commands::parse_flags(
"com.first",
+ None,
declaration,
value,
ProtoFlagPermission::READ_ONLY,
@@ -389,9 +419,72 @@
}
#[test]
+ fn test_parse_flags_package_mismatch_between_declaration_and_command_line() {
+ let first_flag = r#"
+ package: "com.declaration.package"
+ container: "first.container"
+ flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+ }
+ "#;
+ let declaration =
+ vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+ let value: Vec<Input> = vec![];
+
+ let error = crate::commands::parse_flags(
+ "com.argument.package",
+ Some("first.container"),
+ declaration,
+ value,
+ ProtoFlagPermission::READ_WRITE,
+ )
+ .unwrap_err();
+ assert_eq!(
+ format!("{:?}", error),
+ "failed to parse memory: expected package com.argument.package, got com.declaration.package"
+ );
+ }
+
+ #[test]
+ fn test_parse_flags_container_mismatch_between_declaration_and_command_line() {
+ let first_flag = r#"
+ package: "com.first"
+ container: "declaration.container"
+ flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+ }
+ "#;
+ let declaration =
+ vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+ let value: Vec<Input> = vec![];
+
+ let error = crate::commands::parse_flags(
+ "com.first",
+ Some("argument.container"),
+ declaration,
+ value,
+ ProtoFlagPermission::READ_WRITE,
+ )
+ .unwrap_err();
+ assert_eq!(
+ format!("{:?}", error),
+ "failed to parse memory: expected container argument.container, got declaration.container"
+ );
+ }
+
+ #[test]
fn test_parse_flags_override_fixed_read_only() {
let first_flag = r#"
package: "com.first"
+ container: "com.first.container"
flag {
name: "first"
namespace: "first_ns"
@@ -417,6 +510,7 @@
}];
let error = crate::commands::parse_flags(
"com.first",
+ Some("com.first.container"),
declaration,
value,
ProtoFlagPermission::READ_WRITE,
@@ -429,11 +523,46 @@
}
#[test]
+ fn test_parse_flags_metadata() {
+ let metadata_flag = r#"
+ package: "com.first"
+ flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of this feature flag."
+ bug: "123"
+ metadata {
+ purpose: PURPOSE_FEATURE
+ }
+ }
+ "#;
+ let declaration = vec![Input {
+ source: "memory".to_string(),
+ reader: Box::new(metadata_flag.as_bytes()),
+ }];
+ let value: Vec<Input> = vec![];
+
+ let flags_bytes = crate::commands::parse_flags(
+ "com.first",
+ None,
+ declaration,
+ value,
+ ProtoFlagPermission::READ_ONLY,
+ )
+ .unwrap();
+ let parsed_flags =
+ crate::protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
+ assert_eq!(1, parsed_flags.parsed_flag.len());
+ let parsed_flag = parsed_flags.parsed_flag.first().unwrap();
+ assert_eq!(ProtoFlagPurpose::PURPOSE_FEATURE, parsed_flag.metadata.purpose());
+ }
+
+ #[test]
fn test_create_device_config_defaults() {
let input = parse_test_flags_as_input();
let bytes = create_device_config_defaults(input).unwrap();
let text = std::str::from_utf8(&bytes).unwrap();
- assert_eq!("aconfig_test:com.android.aconfig.test.disabled_rw=disabled\nother_namespace:com.android.aconfig.test.disabled_rw_in_other_namespace=disabled\naconfig_test:com.android.aconfig.test.enabled_rw=enabled\n", text);
+ assert_eq!("aconfig_test:com.android.aconfig.test.disabled_rw=disabled\naconfig_test:com.android.aconfig.test.disabled_rw_exported=disabled\nother_namespace:com.android.aconfig.test.disabled_rw_in_other_namespace=disabled\naconfig_test:com.android.aconfig.test.enabled_rw=enabled\n", text);
}
#[test]
@@ -441,15 +570,17 @@
let input = parse_test_flags_as_input();
let bytes = create_device_config_sysprops(input).unwrap();
let text = std::str::from_utf8(&bytes).unwrap();
- assert_eq!("persist.device_config.com.android.aconfig.test.disabled_rw=false\npersist.device_config.com.android.aconfig.test.disabled_rw_in_other_namespace=false\npersist.device_config.com.android.aconfig.test.enabled_rw=true\n", text);
+ assert_eq!("persist.device_config.com.android.aconfig.test.disabled_rw=false\npersist.device_config.com.android.aconfig.test.disabled_rw_exported=false\npersist.device_config.com.android.aconfig.test.disabled_rw_in_other_namespace=false\npersist.device_config.com.android.aconfig.test.enabled_rw=true\n", text);
}
#[test]
fn test_dump_text_format() {
let input = parse_test_flags_as_input();
- let bytes = dump_parsed_flags(vec![input], DumpFormat::Text).unwrap();
+ let bytes = dump_parsed_flags(vec![input], DumpFormat::Text, false).unwrap();
let text = std::str::from_utf8(&bytes).unwrap();
- assert!(text.contains("com.android.aconfig.test.disabled_ro: READ_ONLY + DISABLED"));
+ assert!(
+ text.contains("com.android.aconfig.test.disabled_ro [system]: READ_ONLY + DISABLED")
+ );
}
#[test]
@@ -462,7 +593,7 @@
.unwrap();
let input = parse_test_flags_as_input();
- let actual = dump_parsed_flags(vec![input], DumpFormat::Protobuf).unwrap();
+ let actual = dump_parsed_flags(vec![input], DumpFormat::Protobuf, false).unwrap();
assert_eq!(expected, actual);
}
@@ -470,7 +601,16 @@
#[test]
fn test_dump_textproto_format() {
let input = parse_test_flags_as_input();
- let bytes = dump_parsed_flags(vec![input], DumpFormat::Textproto).unwrap();
+ let bytes = dump_parsed_flags(vec![input], DumpFormat::Textproto, false).unwrap();
+ let text = std::str::from_utf8(&bytes).unwrap();
+ assert_eq!(crate::test::TEST_FLAGS_TEXTPROTO.trim(), text.trim());
+ }
+
+ #[test]
+ fn test_dump_textproto_format_dedup() {
+ let input = parse_test_flags_as_input();
+ let input2 = parse_test_flags_as_input();
+ let bytes = dump_parsed_flags(vec![input, input2], DumpFormat::Textproto, true).unwrap();
let text = std::str::from_utf8(&bytes).unwrap();
assert_eq!(crate::test::TEST_FLAGS_TEXTPROTO.trim(), text.trim());
}
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
index 7e44baf..90464c5 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/src/main.rs
@@ -42,6 +42,8 @@
.subcommand(
Command::new("create-cache")
.arg(Arg::new("package").long("package").required(true))
+ // TODO(b/312769710): Make this argument required.
+ .arg(Arg::new("container").long("container"))
.arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
.arg(Arg::new("values").long("values").action(ArgAction::Append))
.arg(
@@ -99,13 +101,14 @@
)
.subcommand(
Command::new("dump")
- .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+ .arg(Arg::new("cache").long("cache").action(ArgAction::Append))
.arg(
Arg::new("format")
.long("format")
.value_parser(EnumValueParser::<commands::DumpFormat>::new())
.default_value("text"),
)
+ .arg(Arg::new("dedup").long("dedup").num_args(0).action(ArgAction::SetTrue))
.arg(Arg::new("out").long("out").default_value("-")),
)
}
@@ -119,6 +122,13 @@
.ok_or(anyhow!("internal error: required argument '{}' not found", arg_name))
}
+fn get_optional_arg<'a, T>(matches: &'a ArgMatches, arg_name: &str) -> Option<&'a T>
+where
+ T: Any + Clone + Send + Sync + 'static,
+{
+ matches.get_one::<T>(arg_name)
+}
+
fn open_zero_or_more_files(matches: &ArgMatches, arg_name: &str) -> Result<Vec<Input>> {
let mut opened_files = vec![];
for path in matches.get_many::<String>(arg_name).unwrap_or_default() {
@@ -167,12 +177,20 @@
match matches.subcommand() {
Some(("create-cache", sub_matches)) => {
let package = get_required_arg::<String>(sub_matches, "package")?;
+ let container =
+ get_optional_arg::<String>(sub_matches, "container").map(|c| c.as_str());
let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
let values = open_zero_or_more_files(sub_matches, "values")?;
let default_permission =
get_required_arg::<protos::ProtoFlagPermission>(sub_matches, "default-permission")?;
- let output = commands::parse_flags(package, declarations, values, *default_permission)
- .context("failed to create cache")?;
+ let output = commands::parse_flags(
+ package,
+ container,
+ declarations,
+ values,
+ *default_permission,
+ )
+ .context("failed to create cache")?;
let path = get_required_arg::<String>(sub_matches, "cache")?;
write_output_to_file_or_stdout(path, &output)?;
}
@@ -222,7 +240,8 @@
let input = open_zero_or_more_files(sub_matches, "cache")?;
let format = get_required_arg::<DumpFormat>(sub_matches, "format")
.context("failed to dump previously parsed flags")?;
- let output = commands::dump_parsed_flags(input, *format)?;
+ let dedup = get_required_arg::<bool>(sub_matches, "dedup")?;
+ let output = commands::dump_parsed_flags(input, *format, *dedup)?;
let path = get_required_arg::<String>(sub_matches, "out")?;
write_output_to_file_or_stdout(path, &output)?;
}
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
index d3b5b37..37eb67d 100644
--- a/tools/aconfig/src/protos.rs
+++ b/tools/aconfig/src/protos.rs
@@ -29,8 +29,10 @@
// ---- When building with the Android tool-chain ----
#[cfg(not(feature = "cargo"))]
mod auto_generated {
+ pub use aconfig_protos::aconfig::flag_metadata::Flag_purpose as ProtoFlagPurpose;
pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
+ pub use aconfig_protos::aconfig::Flag_metadata as ProtoFlagMetadata;
pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
@@ -47,8 +49,10 @@
// because this is only used during local development, and only if using cargo instead of the
// Android tool-chain, we allow it
include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
+ pub use aconfig::flag_metadata::Flag_purpose as ProtoFlagPurpose;
pub use aconfig::Flag_declaration as ProtoFlagDeclaration;
pub use aconfig::Flag_declarations as ProtoFlagDeclarations;
+ pub use aconfig::Flag_metadata as ProtoFlagMetadata;
pub use aconfig::Flag_permission as ProtoFlagPermission;
pub use aconfig::Flag_state as ProtoFlagState;
pub use aconfig::Flag_value as ProtoFlagValue;
@@ -111,11 +115,16 @@
pub fn verify_fields(pdf: &ProtoFlagDeclarations) -> Result<()> {
ensure_required_fields!("flag declarations", pdf, "package");
+ // TODO(b/312769710): Make the container field required.
ensure!(
codegen::is_valid_package_ident(pdf.package()),
"bad flag declarations: bad package"
);
+ ensure!(
+ !pdf.has_container() || codegen::is_valid_container_ident(pdf.container()),
+ "bad flag declarations: bad container"
+ );
for flag_declaration in pdf.flag.iter() {
super::flag_declaration::verify_fields(flag_declaration)?;
}
@@ -207,6 +216,10 @@
);
ensure!(codegen::is_valid_package_ident(pf.package()), "bad parsed flag: bad package");
+ ensure!(
+ !pf.has_container() || codegen::is_valid_container_ident(pf.container()),
+ "bad parsed flag: bad container"
+ );
ensure!(codegen::is_valid_name_ident(pf.name()), "bad parsed flag: bad name");
ensure!(codegen::is_valid_name_ident(pf.namespace()), "bad parsed flag: bad namespace");
ensure!(!pf.description().is_empty(), "bad parsed flag: empty description");
@@ -274,12 +287,18 @@
Ok(())
}
- pub fn merge(parsed_flags: Vec<ProtoParsedFlags>) -> Result<ProtoParsedFlags> {
+ pub fn merge(parsed_flags: Vec<ProtoParsedFlags>, dedup: bool) -> Result<ProtoParsedFlags> {
let mut merged = ProtoParsedFlags::new();
for mut pfs in parsed_flags.into_iter() {
merged.parsed_flag.append(&mut pfs.parsed_flag);
}
merged.parsed_flag.sort_by_cached_key(create_sorting_key);
+ if dedup {
+ // Deduplicate identical protobuf messages. Messages with the same sorting key but
+ // different fields (including the path to the original source file) will not be
+ // deduplicated and trigger an error in verify_fields.
+ merged.parsed_flag.dedup();
+ }
verify_fields(&merged)?;
Ok(merged)
}
@@ -303,11 +322,13 @@
let flag_declarations = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
description: "This is the description of the first flag."
bug: "123"
+ is_exported: true
}
flag {
name: "second"
@@ -320,22 +341,42 @@
)
.unwrap();
assert_eq!(flag_declarations.package(), "com.foo.bar");
+ assert_eq!(flag_declarations.container(), "system");
let first = flag_declarations.flag.iter().find(|pf| pf.name() == "first").unwrap();
assert_eq!(first.name(), "first");
assert_eq!(first.namespace(), "first_ns");
assert_eq!(first.description(), "This is the description of the first flag.");
assert_eq!(first.bug, vec!["123"]);
assert!(!first.is_fixed_read_only());
+ assert!(first.is_exported());
let second = flag_declarations.flag.iter().find(|pf| pf.name() == "second").unwrap();
assert_eq!(second.name(), "second");
assert_eq!(second.namespace(), "second_ns");
assert_eq!(second.description(), "This is the description of the second flag.");
assert_eq!(second.bug, vec!["abc"]);
assert!(second.is_fixed_read_only());
+ assert!(!second.is_exported());
+
+ // valid input: missing container in flag declarations is supported
+ let flag_declarations = flag_declarations::try_from_text_proto(
+ r#"
+package: "com.foo.bar"
+flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+}
+"#,
+ )
+ .unwrap();
+ assert_eq!(flag_declarations.container(), "");
+ assert!(!flag_declarations.has_container());
// bad input: missing package in flag declarations
let error = flag_declarations::try_from_text_proto(
r#"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -355,6 +396,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
description: "This is the description of the first flag."
@@ -373,6 +415,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "_com.FOO__BAR"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -392,6 +435,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "FIRST"
namespace: "first_ns"
@@ -411,6 +455,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -425,6 +470,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -436,6 +482,25 @@
)
.unwrap_err();
assert!(format!("{:?}", error).contains("bad flag declaration: exactly one bug required"));
+
+ // bad input: invalid container name in flag declaration
+ let error = flag_declarations::try_from_text_proto(
+ r#"
+package: "com.foo.bar"
+container: "__bad_bad_container.com"
+flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+ bug: "abc"
+}
+"#,
+ )
+ .unwrap_err();
+ assert!(format!("{:?}", error).contains("bad flag declarations: bad container"));
+
+ // TODO(b/312769710): Verify error when container is missing.
}
#[test]
@@ -550,6 +615,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.second"
@@ -570,6 +636,7 @@
permission: READ_ONLY
}
is_fixed_read_only: true
+ container: "system"
}
"#;
let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -604,6 +671,7 @@
description: "This is the description of the first flag."
state: DISABLED
permission: READ_ONLY
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -622,6 +690,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -642,6 +711,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "aaa.aaa"
@@ -656,6 +726,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -679,6 +750,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.foo"
@@ -693,6 +765,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -716,6 +789,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.foo"
@@ -730,6 +804,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -757,6 +832,7 @@
state: ENABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -783,6 +859,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.second"
@@ -797,6 +874,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let expected = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -815,6 +893,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let first = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -833,18 +912,68 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let second = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+ let text_proto = r#"
+parsed_flag {
+ package: "com.second"
+ name: "second"
+ namespace: "second_ns"
+ bug: "b"
+ description: "This is the description of the second flag."
+ state: ENABLED
+ permission: READ_WRITE
+ trace {
+ source: "duplicate/flags.declarations"
+ state: DISABLED
+ permission: READ_ONLY
+ }
+}
+"#;
+ let second_duplicate = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+
// bad cases
- let error = parsed_flags::merge(vec![first.clone(), first.clone()]).unwrap_err();
+
+ // two of the same flag with dedup disabled
+ let error = parsed_flags::merge(vec![first.clone(), first.clone()], false).unwrap_err();
assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.first.first (defined in flags.declarations and flags.declarations)");
+ // two conflicting flags with dedup disabled
+ let error =
+ parsed_flags::merge(vec![second.clone(), second_duplicate.clone()], false).unwrap_err();
+ assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.second.second (defined in flags.declarations and duplicate/flags.declarations)");
+
+ // two conflicting flags with dedup enabled
+ let error =
+ parsed_flags::merge(vec![second.clone(), second_duplicate.clone()], true).unwrap_err();
+ assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.second.second (defined in flags.declarations and duplicate/flags.declarations)");
+
// valid cases
- assert!(parsed_flags::merge(vec![]).unwrap().parsed_flag.is_empty());
- assert_eq!(first, parsed_flags::merge(vec![first.clone()]).unwrap());
- assert_eq!(expected, parsed_flags::merge(vec![first.clone(), second.clone()]).unwrap());
- assert_eq!(expected, parsed_flags::merge(vec![second, first]).unwrap());
+ assert!(parsed_flags::merge(vec![], false).unwrap().parsed_flag.is_empty());
+ assert!(parsed_flags::merge(vec![], true).unwrap().parsed_flag.is_empty());
+ assert_eq!(first, parsed_flags::merge(vec![first.clone()], false).unwrap());
+ assert_eq!(first, parsed_flags::merge(vec![first.clone()], true).unwrap());
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![first.clone(), second.clone()], false).unwrap()
+ );
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![first.clone(), second.clone()], true).unwrap()
+ );
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![second.clone(), first.clone()], false).unwrap()
+ );
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![second.clone(), first.clone()], true).unwrap()
+ );
+
+ // two identical flags with dedup enabled
+ assert_eq!(first, parsed_flags::merge(vec![first.clone(), first.clone()], true).unwrap());
}
}
diff --git a/tools/aconfig/src/test.rs b/tools/aconfig/src/test.rs
index bb3d1f0..2c63fef 100644
--- a/tools/aconfig/src/test.rs
+++ b/tools/aconfig/src/test.rs
@@ -42,6 +42,11 @@
permission: READ_ONLY
}
is_fixed_read_only: false
+ is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -57,6 +62,36 @@
permission: READ_WRITE
}
is_fixed_read_only: false
+ is_exported: true
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
+}
+parsed_flag {
+ package: "com.android.aconfig.test"
+ name: "disabled_rw_exported"
+ namespace: "aconfig_test"
+ description: "This flag is exported"
+ bug: "111"
+ state: DISABLED
+ permission: READ_WRITE
+ trace {
+ source: "tests/test.aconfig"
+ state: DISABLED
+ permission: READ_WRITE
+ }
+ trace {
+ source: "tests/first.values"
+ state: DISABLED
+ permission: READ_WRITE
+ }
+ is_fixed_read_only: false
+ is_exported: true
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -77,6 +112,11 @@
permission: READ_WRITE
}
is_fixed_read_only: false
+ is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -97,6 +137,11 @@
permission: READ_ONLY
}
is_fixed_read_only: true
+ is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -122,6 +167,11 @@
permission: READ_ONLY
}
is_fixed_read_only: false
+ is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -142,12 +192,18 @@
permission: READ_WRITE
}
is_fixed_read_only: false
+ is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
"#;
pub fn parse_test_flags() -> ProtoParsedFlags {
let bytes = crate::commands::parse_flags(
"com.android.aconfig.test",
+ Some("system"),
vec![Input {
source: "tests/test.aconfig".to_string(),
reader: Box::new(include_bytes!("../tests/test.aconfig").as_slice()),
diff --git a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
index 933d6a7..fd2e26a 100644
--- a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
@@ -12,11 +12,23 @@
}
{{ for item in flag_elements}}
+{{ if library_exported }}
+
+{{ if item.exported }}
@Override
@UnsupportedAppUsage
public boolean {item.method_name}() \{
return getValue(Flags.FLAG_{item.flag_name_constant_suffix});
}
+{{ endif }}
+
+{{ else }}
+ @Override
+ @UnsupportedAppUsage
+ public boolean {item.method_name}() \{
+ return getValue(Flags.FLAG_{item.flag_name_constant_suffix});
+ }
+{{ endif }}
{{ endfor}}
public void setFlag(String flagName, boolean value) \{
if (!this.mFlagMap.containsKey(flagName)) \{
diff --git a/tools/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/templates/FeatureFlags.java.template
index da850ae..180f882 100644
--- a/tools/aconfig/templates/FeatureFlags.java.template
+++ b/tools/aconfig/templates/FeatureFlags.java.template
@@ -5,6 +5,15 @@
/** @hide */
public interface FeatureFlags \{
{{ for item in flag_elements }}
+{{ if library_exported }}
+
+{{ if item.exported }}
+ @UnsupportedAppUsage
+ boolean {item.method_name}();
+{{ endif }}
+
+{{ else }}
+
{{ -if not item.is_read_write }}
{{ -if item.default_value }}
@com.android.aconfig.annotations.AssumeTrueForR8
@@ -14,5 +23,7 @@
{{ endif }}
@UnsupportedAppUsage
boolean {item.method_name}();
+
+{{ endif }}
{{ endfor }}
}
diff --git a/tools/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/templates/FeatureFlagsImpl.java.template
index ec8822c..a15c859 100644
--- a/tools/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FeatureFlagsImpl.java.template
@@ -14,9 +14,17 @@
{{- endfor- }}
{{ for flag in flag_elements }}
+{{ if library_exported }}
+{{ if flag.exported }}
+ private static boolean {flag.method_name} = false;
+{{ endif }}
+
+{{ else }}
+
{{- if flag.is_read_write }}
private static boolean {flag.method_name} = {flag.default_value};
{{- endif- }}
+{{ endif }}
{{ endfor }}
{{ for namespace_with_flags in namespace_flags }}
@@ -25,10 +33,21 @@
Properties properties = DeviceConfig.getProperties("{namespace_with_flags.namespace}");
{{- for flag in namespace_with_flags.flags }}
- {{- if flag.is_read_write }}
+ {{ if library_exported }}
+
+ {{ if flag.exported }}
+ {flag.method_name} =
+ properties.getBoolean("{flag.device_config_flag}", false);
+ {{ endif }}
+
+ {{ else }}
+
+ {{ if flag.is_read_write }}
{flag.method_name} =
properties.getBoolean("{flag.device_config_flag}", {flag.default_value});
- {{- endif- }}
+ {{ endif }}
+
+ {{ endif }}
{{ endfor }}
} catch (NullPointerException e) \{
throw new RuntimeException(
@@ -46,6 +65,9 @@
{{ endif- }}
{{ for flag in flag_elements }}
+{{ if library_exported }}
+
+{{ if flag.exported }}
@Override
@UnsupportedAppUsage
public boolean {flag.method_name}() \{
@@ -58,6 +80,23 @@
return {flag.default_value};
{{ endif- }}
}
+{{ endif }}
+
+{{ else }}
+ @Override
+ @UnsupportedAppUsage
+ public boolean {flag.method_name}() \{
+ {{ -if flag.is_read_write }}
+ if (!{flag.device_config_namespace}_is_cached) \{
+ load_overrides_{flag.device_config_namespace}();
+ }
+ return {flag.method_name};
+ {{ else }}
+ return {flag.default_value};
+ {{ endif- }}
+ }
+{{ endif }}
+
{{ endfor }}
}
{{ else }}
diff --git a/tools/aconfig/templates/Flags.java.template b/tools/aconfig/templates/Flags.java.template
index cf6604c..9f4c52f 100644
--- a/tools/aconfig/templates/Flags.java.template
+++ b/tools/aconfig/templates/Flags.java.template
@@ -6,10 +6,28 @@
/** @hide */
public final class Flags \{
{{- for item in flag_elements}}
+ {{ if library_exported }}
+ {{ if item.exported }}
/** @hide */
public static final String FLAG_{item.flag_name_constant_suffix} = "{item.device_config_flag}";
+ {{ endif }}
+ {{ else }}
+ /** @hide */
+ public static final String FLAG_{item.flag_name_constant_suffix} = "{item.device_config_flag}";
+ {{ endif }}
{{- endfor }}
{{ for item in flag_elements}}
+{{ if library_exported }}
+
+{{ if item.exported }}
+ @UnsupportedAppUsage
+ public static boolean {item.method_name}() \{
+ return FEATURE_FLAGS.{item.method_name}();
+ }
+{{ endif }}
+
+{{ else }}
+
{{ -if not item.is_read_write }}
{{ -if item.default_value }}
@com.android.aconfig.annotations.AssumeTrueForR8
@@ -21,6 +39,7 @@
public static boolean {item.method_name}() \{
return FEATURE_FLAGS.{item.method_name}();
}
+{{ endif }}
{{ endfor }}
{{ -if is_test_mode }}
public static void setFeatureFlags(FeatureFlags featureFlags) \{
diff --git a/tools/aconfig/templates/cpp_exported_header.template b/tools/aconfig/templates/cpp_exported_header.template
index d19c0fa..cc1b18d 100644
--- a/tools/aconfig/templates/cpp_exported_header.template
+++ b/tools/aconfig/templates/cpp_exported_header.template
@@ -18,16 +18,9 @@
#ifdef __cplusplus
#include <memory>
-{{ if not for_test- }}
-#include <vector>
-{{ -endif }}
namespace {cpp_namespace} \{
-{{ if not for_test- }}
-extern std::vector<int8_t> cache_;
-{{ -endif }}
-
class flag_provider_interface \{
public:
virtual ~flag_provider_interface() = default;
diff --git a/tools/aconfig/templates/cpp_source_file.template b/tools/aconfig/templates/cpp_source_file.template
index 91e828a..1bfa4b6 100644
--- a/tools/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/templates/cpp_source_file.template
@@ -5,6 +5,8 @@
{{ if for_test }}
#include <unordered_map>
#include <string>
+{{ -else- }}
+#include <vector>
{{ endif }}
namespace {cpp_namespace} \{
@@ -69,16 +71,15 @@
{{ -endif }}
}
{{ endfor }}
+ private:
+ std::vector<int8_t> cache_ = std::vector<int8_t>({readwrite_count}, -1);
};
- std::vector<int8_t> cache_ = std::vector<int8_t>({readwrite_count}, -1);
{{ -endif }}
std::unique_ptr<flag_provider_interface> provider_ =
std::make_unique<flag_provider>();
-
-
}
diff --git a/tools/aconfig/tests/first.values b/tools/aconfig/tests/first.values
index 07d8d1d..b248d43 100644
--- a/tools/aconfig/tests/first.values
+++ b/tools/aconfig/tests/first.values
@@ -28,3 +28,9 @@
state: ENABLED
permission: READ_ONLY
}
+flag_value {
+ package: "com.android.aconfig.test"
+ name: "disabled_rw_exported"
+ state: DISABLED
+ permission: READ_WRITE
+}
diff --git a/tools/aconfig/tests/test.aconfig b/tools/aconfig/tests/test.aconfig
index 8527e93..310d0a6 100644
--- a/tools/aconfig/tests/test.aconfig
+++ b/tools/aconfig/tests/test.aconfig
@@ -1,4 +1,5 @@
package: "com.android.aconfig.test"
+container: "system"
# This flag's final value is calculated from:
# - test.aconfig: DISABLED + READ_WRITE (default)
@@ -9,6 +10,9 @@
namespace: "aconfig_test"
description: "This flag is ENABLED + READ_ONLY"
bug: "abc"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
}
# This flag's final value is calculated from:
@@ -39,6 +43,7 @@
namespace: "aconfig_test"
description: "This flag is DISABLED + READ_WRITE"
bug: "456"
+ is_exported: true
}
# This flag's final value calculated from:
@@ -58,3 +63,11 @@
description: "This flag is DISABLED + READ_WRITE, and is defined in another namespace"
bug: "999"
}
+
+flag {
+ name: "disabled_rw_exported"
+ namespace: "aconfig_test"
+ description: "This flag is exported"
+ bug: "111"
+ is_exported: true
+}
\ No newline at end of file
diff --git a/tools/checkowners.py b/tools/checkowners.py
deleted file mode 100755
index f037321..0000000
--- a/tools/checkowners.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python
-
-"""Parse and check syntax errors of a given OWNERS file."""
-
-import argparse
-import re
-import sys
-import urllib.request, urllib.parse, urllib.error
-import urllib.request, urllib.error, urllib.parse
-
-parser = argparse.ArgumentParser(description='Check OWNERS file syntax')
-parser.add_argument('-v', '--verbose', dest='verbose',
- action='store_true', default=False,
- help='Verbose output to debug')
-parser.add_argument('-c', '--check_address', dest='check_address',
- action='store_true', default=False,
- help='Check email addresses')
-parser.add_argument(dest='owners', metavar='OWNERS', nargs='+',
- help='Path to OWNERS file')
-args = parser.parse_args()
-
-gerrit_server = 'https://android-review.googlesource.com'
-checked_addresses = {}
-
-
-def echo(msg):
- if args.verbose:
- print(msg)
-
-
-def find_address(address):
- if address not in checked_addresses:
- request = (gerrit_server + '/accounts/?n=1&q=email:'
- + urllib.parse.quote(address))
- echo('Checking email address: ' + address)
- result = urllib.request.urlopen(request).read()
- checked_addresses[address] = result.find('"_account_id":') >= 0
- if checked_addresses[address]:
- echo('Found email address: ' + address)
- return checked_addresses[address]
-
-
-def check_address(fname, num, address):
- if find_address(address):
- return 0
- print('%s:%d: ERROR: unknown email address: %s' % (fname, num, address))
- return 1
-
-
-def main():
- # One regular expression to check all valid lines.
- noparent = 'set +noparent'
- email = '([^@ ]+@[^ @]+|\\*)'
- emails = '(%s( *, *%s)*)' % (email, email)
- file_directive = 'file: *([^ :]+ *: *)?[^ ]+'
- directive = '(%s|%s|%s)' % (emails, noparent, file_directive)
- glob = '[a-zA-Z0-9_\\.\\-\\*\\?]+'
- globs = '(%s( *, *%s)*)' % (glob, glob)
- perfile = 'per-file +' + globs + ' *= *' + directive
- include = 'include +([^ :]+ *: *)?[^ ]+'
- pats = '(|%s|%s|%s|%s|%s)$' % (noparent, email, perfile, include, file_directive)
- patterns = re.compile(pats)
- address_pattern = re.compile('([^@ ]+@[^ @]+)')
- perfile_pattern = re.compile('per-file +.*=(.*)')
-
- error = 0
- for fname in args.owners:
- echo('Checking file: ' + fname)
- num = 0
- for line in open(fname, 'r'):
- num += 1
- stripped_line = re.sub('#.*$', '', line).strip()
- if not patterns.match(stripped_line):
- error += 1
- print('%s:%d: ERROR: unknown line [%s]' % (fname, num, line.strip()))
- elif args.check_address:
- if perfile_pattern.match(stripped_line):
- for addr in perfile_pattern.match(stripped_line).group(1).split(','):
- a = addr.strip()
- if a and a != '*':
- error += check_address(fname, num, addr.strip())
- elif address_pattern.match(stripped_line):
- error += check_address(fname, num, stripped_line)
- sys.exit(error)
-
-if __name__ == '__main__':
- main()
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
index 501f260..cc97d1f 100644
--- a/tools/finalization/README.md
+++ b/tools/finalization/README.md
@@ -12,10 +12,8 @@
## CI:
Performed in build targets in Finalization branches.
-1. [Finalization Step 1 for Main, git_main-fina-1-release](https://android-build.googleplex.com/builds/branches/git_main-fina-1-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
-2. [Finalization Step 1 for UDC, git_udc-fina-1-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-1-release/grid). Same but for udc-dev.
-3. [Finalization Step 2 for Main, git_main-fina-2-release](https://android-build.googleplex.com/builds/branches/git_main-fina-2-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
-4. [Finalization Step 2 for UDC, git_udc-fina-2-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-2-release/grid). Same but for udc-dev.
+1. [Finalization Step 1, git_main-fina-1-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-1-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
+3. [Finalization Step 2, git_main-fina-2-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-2-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
## Utility:
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
index 6d13325..37c0011 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -137,6 +137,13 @@
local version_codes="$top/platform_testing/libraries/compatibility-common-util/src/com/android/compatibility/common/util/VersionCodes.java"
sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1));/a \\ ${SDK_VERSION}" $version_codes
+ # tools/platform-compat
+ local class2nonsdklist="$top/tools/platform-compat/java/com/android/class2nonsdklist/Class2NonSdkList.java"
+ if ! grep -q "\.*map.put($((${FINAL_PLATFORM_SDK_VERSION}))" $class2nonsdklist ; then
+ local sdk_version="map.put(${FINAL_PLATFORM_SDK_VERSION}, FLAG_UNSUPPORTED);"
+ sed -i -e "/.*map.put($((${FINAL_PLATFORM_SDK_VERSION}-1))/a \\ ${sdk_version}" $class2nonsdklist
+ fi
+
# Finalize resources
"$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
"$top/frameworks/base/core/res/res/values/public-staging.xml" \
diff --git a/tools/metadata/Android.bp b/tools/metadata/Android.bp
index b2fabec..77d106d 100644
--- a/tools/metadata/Android.bp
+++ b/tools/metadata/Android.bp
@@ -6,6 +6,8 @@
name: "metadata",
deps: [
"soong-testing-test_spec_proto",
+ "soong-testing-code_metadata_proto",
+ "soong-testing-code_metadata_internal_proto",
"golang-protobuf-proto",
],
srcs: [
diff --git a/tools/metadata/generator.go b/tools/metadata/generator.go
index bb8293a..d328876 100644
--- a/tools/metadata/generator.go
+++ b/tools/metadata/generator.go
@@ -10,6 +10,8 @@
"strings"
"sync"
+ "android/soong/testing/code_metadata_internal_proto"
+ "android/soong/testing/code_metadata_proto"
"android/soong/testing/test_spec_proto"
"google.golang.org/protobuf/proto"
)
@@ -23,6 +25,13 @@
return mutex.(*sync.Mutex)
}
+// Define a struct to hold the combination of team ID and multi-ownership flag for validation
+type sourceFileAttributes struct {
+ TeamID string
+ MultiOwnership bool
+ Path string
+}
+
func getSortedKeys(syncMap *sync.Map) []string {
var allKeys []string
syncMap.Range(
@@ -36,14 +45,9 @@
return allKeys
}
-func writeOutput(
- outputFile string,
- allMetadata []*test_spec_proto.TestSpec_OwnershipMetadata,
-) {
- testSpec := &test_spec_proto.TestSpec{
- OwnershipMetadataList: allMetadata,
- }
- data, err := proto.Marshal(testSpec)
+// writeProtoToFile marshals a protobuf message and writes it to a file
+func writeProtoToFile(outputFile string, message proto.Message) {
+ data, err := proto.Marshal(message)
if err != nil {
log.Fatal(err)
}
@@ -88,8 +92,8 @@
}
func processTestSpecProtobuf(
- filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
- errCh chan error, wg *sync.WaitGroup,
+ filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
+ errCh chan error, wg *sync.WaitGroup,
) {
defer wg.Done()
@@ -117,7 +121,7 @@
if metadata.GetTrendyTeamId() != existing.GetTrendyTeamId() {
errCh <- fmt.Errorf(
"Conflicting trendy team IDs found for %s at:\n%s with teamId"+
- ": %s,\n%s with teamId: %s",
+ ": %s,\n%s with teamId: %s",
key,
metadata.GetPath(), metadata.GetTrendyTeamId(), existing.GetPath(),
existing.GetTrendyTeamId(),
@@ -141,10 +145,86 @@
}
}
+// processCodeMetadataProtobuf processes CodeMetadata protobuf files
+func processCodeMetadataProtobuf(
+ filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap,
+ errCh chan error, wg *sync.WaitGroup,
+) {
+ defer wg.Done()
+
+ fileContent := strings.TrimRight(readFileToString(filePath), "\n")
+ internalCodeData := code_metadata_internal_proto.CodeMetadataInternal{}
+ err := proto.Unmarshal([]byte(fileContent), &internalCodeData)
+ if err != nil {
+ errCh <- err
+ return
+ }
+
+ // Process each TargetOwnership entry
+ for _, internalMetadata := range internalCodeData.GetTargetOwnershipList() {
+ key := internalMetadata.GetTargetName()
+ lock := keyLocks.GetLockForKey(key)
+ lock.Lock()
+
+ for _, srcFile := range internalMetadata.GetSourceFiles() {
+ srcFileKey := srcFile
+ srcFileLock := keyLocks.GetLockForKey(srcFileKey)
+ srcFileLock.Lock()
+ attributes := sourceFileAttributes{
+ TeamID: internalMetadata.GetTrendyTeamId(),
+ MultiOwnership: internalMetadata.GetMultiOwnership(),
+ Path: internalMetadata.GetPath(),
+ }
+
+ existingAttributes, exists := sourceFileMetadataMap.Load(srcFileKey)
+ if exists {
+ existing := existingAttributes.(sourceFileAttributes)
+ if attributes.TeamID != existing.TeamID && (!attributes.MultiOwnership || !existing.MultiOwnership) {
+ errCh <- fmt.Errorf(
+ "Conflict found for source file %s covered at %s with team ID: %s. Existing team ID: %s and path: %s."+
+ " If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+
+ "Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.",
+ srcFile, internalMetadata.GetPath(), attributes.TeamID, existing.TeamID, existing.Path,
+ )
+ srcFileLock.Unlock()
+ lock.Unlock()
+ return
+ }
+ } else {
+ // Store the metadata if no conflict
+ sourceFileMetadataMap.Store(srcFileKey, attributes)
+ }
+ srcFileLock.Unlock()
+ }
+
+ value, loaded := ownershipMetadataMap.LoadOrStore(
+ key, []*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership{internalMetadata},
+ )
+ if loaded {
+ existingMetadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
+ isDuplicate := false
+ for _, existing := range existingMetadata {
+ if internalMetadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && internalMetadata.GetPath() == existing.GetPath() {
+ isDuplicate = true
+ break
+ }
+ }
+ if !isDuplicate {
+ existingMetadata = append(existingMetadata, internalMetadata)
+ ownershipMetadataMap.Store(key, existingMetadata)
+ }
+ }
+
+ lock.Unlock()
+ }
+}
+
func main() {
inputFile := flag.String("inputFile", "", "Input file path")
outputFile := flag.String("outputFile", "", "Output file path")
- rule := flag.String("rule", "", "Metadata rule (Hint: test_spec or code_metadata)")
+ rule := flag.String(
+ "rule", "", "Metadata rule (Hint: test_spec or code_metadata)",
+ )
flag.Parse()
if *inputFile == "" || *outputFile == "" || *rule == "" {
@@ -153,7 +233,7 @@
}
inputFileData := strings.TrimRight(readFileToString(*inputFile), "\n")
- filePaths := strings.Split(inputFileData, "\n")
+ filePaths := strings.Split(inputFileData, " ")
if len(filePaths) == 1 && filePaths[0] == "" {
writeNewlineToOutputFile(*outputFile)
return
@@ -167,7 +247,9 @@
case "test_spec":
for _, filePath := range filePaths {
wg.Add(1)
- go processTestSpecProtobuf(filePath, ownershipMetadataMap, keyLocks, errCh, &wg)
+ go processTestSpecProtobuf(
+ filePath, ownershipMetadataMap, keyLocks, errCh, &wg,
+ )
}
wg.Wait()
@@ -186,9 +268,51 @@
allMetadata = append(allMetadata, metadataList...)
}
- writeOutput(*outputFile, allMetadata)
+ testSpec := &test_spec_proto.TestSpec{
+ OwnershipMetadataList: allMetadata,
+ }
+ writeProtoToFile(*outputFile, testSpec)
break
case "code_metadata":
+ sourceFileMetadataMap := &sync.Map{}
+ for _, filePath := range filePaths {
+ wg.Add(1)
+ go processCodeMetadataProtobuf(
+ filePath, ownershipMetadataMap, sourceFileMetadataMap, keyLocks, errCh, &wg,
+ )
+ }
+
+ wg.Wait()
+ close(errCh)
+
+ for err := range errCh {
+ log.Fatal(err)
+ }
+
+ sortedKeys := getSortedKeys(ownershipMetadataMap)
+ allMetadata := make([]*code_metadata_proto.CodeMetadata_TargetOwnership, 0)
+ for _, key := range sortedKeys {
+ value, _ := ownershipMetadataMap.Load(key)
+ metadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
+ for _, m := range metadata {
+ targetName := m.GetTargetName()
+ path := m.GetPath()
+ trendyTeamId := m.GetTrendyTeamId()
+
+ allMetadata = append(allMetadata, &code_metadata_proto.CodeMetadata_TargetOwnership{
+ TargetName: &targetName,
+ Path: &path,
+ TrendyTeamId: &trendyTeamId,
+ SourceFiles: m.GetSourceFiles(),
+ })
+ }
+ }
+
+ finalMetadata := &code_metadata_proto.CodeMetadata{
+ TargetOwnershipList: allMetadata,
+ }
+ writeProtoToFile(*outputFile, finalMetadata)
+ break
default:
log.Fatalf("No specific processing implemented for rule '%s'.\n", *rule)
}
diff --git a/tools/metadata/go.work b/tools/metadata/go.work
index 23875da..f2cdf8e 100644
--- a/tools/metadata/go.work
+++ b/tools/metadata/go.work
@@ -4,7 +4,8 @@
.
../../../../external/golang-protobuf
../../../soong/testing/test_spec_proto
-
+ ../../../soong/testing/code_metadata_proto
+ ../../../soong/testing/code_metadata_proto_internal
)
replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
diff --git a/tools/metadata/testdata/expectedCodeMetadataOutput.txt b/tools/metadata/testdata/expectedCodeMetadataOutput.txt
new file mode 100644
index 0000000..755cf40
--- /dev/null
+++ b/tools/metadata/testdata/expectedCodeMetadataOutput.txt
@@ -0,0 +1,7 @@
+
+
+bar
+Android.bp12346"b.java
+
+foo
+Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/file5.txt b/tools/metadata/testdata/file5.txt
new file mode 100644
index 0000000..d8de064
--- /dev/null
+++ b/tools/metadata/testdata/file5.txt
@@ -0,0 +1,4 @@
+
+
+foo
+Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file6.txt b/tools/metadata/testdata/file6.txt
new file mode 100644
index 0000000..9c7cdcd
--- /dev/null
+++ b/tools/metadata/testdata/file6.txt
@@ -0,0 +1,4 @@
+
+
+bar
+Android.bp12346"b.java
diff --git a/tools/metadata/testdata/file7.txt b/tools/metadata/testdata/file7.txt
new file mode 100644
index 0000000..d8de064
--- /dev/null
+++ b/tools/metadata/testdata/file7.txt
@@ -0,0 +1,4 @@
+
+
+foo
+Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file8.txt b/tools/metadata/testdata/file8.txt
new file mode 100644
index 0000000..a931690
--- /dev/null
+++ b/tools/metadata/testdata/file8.txt
@@ -0,0 +1,4 @@
+
+
+foo
+Android.gp12346"a.java
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutput.txt b/tools/metadata/testdata/generatedCodeMetadataOutput.txt
new file mode 100644
index 0000000..755cf40
--- /dev/null
+++ b/tools/metadata/testdata/generatedCodeMetadataOutput.txt
@@ -0,0 +1,7 @@
+
+
+bar
+Android.bp12346"b.java
+
+foo
+Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
new file mode 100644
index 0000000..755cf40
--- /dev/null
+++ b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
@@ -0,0 +1,7 @@
+
+
+bar
+Android.bp12346"b.java
+
+foo
+Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadata.txt b/tools/metadata/testdata/inputCodeMetadata.txt
new file mode 100644
index 0000000..7a81b7d
--- /dev/null
+++ b/tools/metadata/testdata/inputCodeMetadata.txt
@@ -0,0 +1 @@
+file5.txt file6.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadataNegative.txt b/tools/metadata/testdata/inputCodeMetadataNegative.txt
new file mode 100644
index 0000000..26668e4
--- /dev/null
+++ b/tools/metadata/testdata/inputCodeMetadataNegative.txt
@@ -0,0 +1 @@
+file7.txt file8.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputFiles.txt b/tools/metadata/testdata/inputFiles.txt
index 61e6a8d..e44bc94 100644
--- a/tools/metadata/testdata/inputFiles.txt
+++ b/tools/metadata/testdata/inputFiles.txt
@@ -1,2 +1 @@
-file1.txt
-file2.txt
\ No newline at end of file
+file1.txt file2.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputFilesNegativeCase.txt b/tools/metadata/testdata/inputFilesNegativeCase.txt
index 17a9480..a37aa3f 100644
--- a/tools/metadata/testdata/inputFilesNegativeCase.txt
+++ b/tools/metadata/testdata/inputFilesNegativeCase.txt
@@ -1,2 +1 @@
-file3.txt
-file4.txt
\ No newline at end of file
+file3.txt file4.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/metadata_test.go b/tools/metadata/testdata/metadata_test.go
index 71856fe..314add3 100644
--- a/tools/metadata/testdata/metadata_test.go
+++ b/tools/metadata/testdata/metadata_test.go
@@ -87,3 +87,33 @@
t.Errorf("Generated file contents do not match the expected output")
}
}
+
+func TestCodeMetadata(t *testing.T) {
+ cmd := exec.Command(
+ "metadata", "-rule", "code_metadata", "-inputFile", "./inputCodeMetadata.txt", "-outputFile",
+ "./generatedCodeMetadataOutputFile.txt",
+ )
+ stderr, err := cmd.CombinedOutput()
+ if err != nil {
+ t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
+ }
+
+ // Read the contents of the expected output file
+ expectedOutput, err := ioutil.ReadFile("./expectedCodeMetadataOutput.txt")
+ if err != nil {
+ t.Fatalf("Error reading expected output file: %s", err)
+ }
+
+ // Read the contents of the generated output file
+ generatedOutput, err := ioutil.ReadFile("./generatedCodeMetadataOutputFile.txt")
+ if err != nil {
+ t.Fatalf("Error reading generated output file: %s", err)
+ }
+
+ fmt.Println()
+
+ // Compare the contents
+ if string(expectedOutput) != string(generatedOutput) {
+ t.Errorf("Generated file contents do not match the expected output")
+ }
+}
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index ee266b7..bd8ce14 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -333,6 +333,7 @@
srcs: [
"ota_utils.py",
"payload_signer.py",
+ "ota_signing_utils.py",
],
libs: [
"releasetools_common",
@@ -348,7 +349,6 @@
},
srcs: [
"merge_ota.py",
- "ota_signing_utils.py",
],
libs: [
"ota_metadata_proto",
@@ -501,7 +501,6 @@
name: "ota_from_raw_img",
srcs: [
"ota_from_raw_img.py",
- "ota_signing_utils.py",
],
main: "ota_from_raw_img.py",
defaults: [
@@ -552,6 +551,8 @@
defaults: ["releasetools_binary_defaults"],
srcs: [
"sign_target_files_apks.py",
+ "payload_signer.py",
+ "ota_signing_utils.py",
],
libs: [
"releasetools_add_img_to_target_files",
@@ -615,7 +616,6 @@
"sign_target_files_apks.py",
"validate_target_files.py",
"merge_ota.py",
- "ota_signing_utils.py",
":releasetools_merge_sources",
":releasetools_merge_tests",
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 34b7172..bde152f 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -437,6 +437,8 @@
sldc_flags = sldc_flags_str.split()
build_command.append(str(len(sldc_flags)))
build_command.extend(sldc_flags)
+ f2fs_blocksize = prop_dict.get("f2fs_blocksize", "4096")
+ build_command.extend(["-b", f2fs_blocksize])
else:
raise BuildImageError(
"Error: unknown filesystem type: {}".format(fs_type))
@@ -721,6 +723,7 @@
"system_f2fs_compress",
"system_f2fs_sldc_flags",
"f2fs_sparse_flag",
+ "f2fs_blocksize",
"skip_fsck",
"ext_mkuserimg",
"avb_enable",
@@ -770,6 +773,7 @@
(True, "{}_extfs_inode_count", "extfs_inode_count"),
(True, "{}_f2fs_compress", "f2fs_compress"),
(True, "{}_f2fs_sldc_flags", "f2fs_sldc_flags"),
+ (True, "{}_f2fs_blocksize", "f2fs_block_size"),
(True, "{}_reserved_size", "partition_reserved_size"),
(True, "{}_squashfs_block_size", "squashfs_block_size"),
(True, "{}_squashfs_compressor", "squashfs_compressor"),
@@ -817,7 +821,6 @@
d["mount_point"] = mount_point
if mount_point == "system":
copy_prop("system_headroom", "partition_headroom")
- copy_prop("system_root_image", "system_root_image")
copy_prop("root_dir", "root_dir")
copy_prop("root_fs_config", "root_fs_config")
elif mount_point == "data":
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 33624f5..d31f87e 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -218,12 +218,12 @@
2. invoke apexd_host with vendor APEXes.
"""
- apex_dir = os.path.join(inp, 'APEX')
+ apex_dir = common.MakeTempDir('APEX')
# checkvintf needs /apex dirmap
dirmap['/apex'] = apex_dir
# Always create /apex directory for dirmap
- os.makedirs(apex_dir)
+ os.makedirs(apex_dir, exist_ok=True)
# Invoke apexd_host to activate vendor APEXes for checkvintf
apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host')
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 8ce6083..7451ccc 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1156,8 +1156,7 @@
return self.build_props.get(prop)
-def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path,
- system_root_image=False):
+def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path):
class Partition(object):
def __init__(self, mount_point, fs_type, device, length, context, slotselect):
self.mount_point = mount_point
@@ -1216,12 +1215,6 @@
device=pieces[0], length=length, context=context,
slotselect=slotselect)
- # / is used for the system mount point when the root directory is included in
- # system. Other areas assume system is always at "/system" so point /system
- # at /.
- if system_root_image:
- assert '/system' not in d and '/' in d
- d["/system"] = d["/"]
return d
@@ -1237,22 +1230,19 @@
# ../RAMDISK/system/etc/recovery.fstab. This function has to handle both
# cases, since it may load the info_dict from an old build (e.g. when
# generating incremental OTAs from that build).
- system_root_image = info_dict.get('system_root_image') == 'true'
if info_dict.get('no_recovery') != 'true':
recovery_fstab_path = 'RECOVERY/RAMDISK/system/etc/recovery.fstab'
if not DoesInputFileContain(input_file, recovery_fstab_path):
recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
return LoadRecoveryFSTab(
- read_helper, info_dict['fstab_version'], recovery_fstab_path,
- system_root_image)
+ read_helper, info_dict['fstab_version'], recovery_fstab_path)
if info_dict.get('recovery_as_boot') == 'true':
recovery_fstab_path = 'BOOT/RAMDISK/system/etc/recovery.fstab'
if not DoesInputFileContain(input_file, recovery_fstab_path):
recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
return LoadRecoveryFSTab(
- read_helper, info_dict['fstab_version'], recovery_fstab_path,
- system_root_image)
+ read_helper, info_dict['fstab_version'], recovery_fstab_path)
return None
@@ -1980,11 +1970,6 @@
if info_dict.get("gki_boot_image_without_ramdisk") == "true":
return False # A GKI boot.img has no ramdisk since Android-13.
- if info_dict.get("system_root_image") == "true":
- # The ramdisk content is merged into the system.img, so there is NO
- # ramdisk in the boot.img or boot-<kernel version>.img.
- return False
-
if info_dict.get("init_boot") == "true":
# The ramdisk is moved to the init_boot.img, so there is NO
# ramdisk in the boot.img or boot-<kernel version>.img.
@@ -3120,6 +3105,34 @@
zip_file.writestr(zinfo, data)
zipfile.ZIP64_LIMIT = saved_zip64_limit
+def ZipExclude(input_zip, output_zip, entries, force=False):
+ """Deletes entries from a ZIP file.
+
+ Args:
+ zip_filename: The name of the ZIP file.
+ entries: The name of the entry, or the list of names to be deleted.
+ """
+ if isinstance(entries, str):
+ entries = [entries]
+ # If list is empty, nothing to do
+ if not entries:
+ shutil.copy(input_zip, output_zip)
+ return
+
+ with zipfile.ZipFile(input_zip, 'r') as zin:
+ if not force and len(set(zin.namelist()).intersection(entries)) == 0:
+ raise ExternalError(
+ "Failed to delete zip entries, name not matched: %s" % entries)
+
+ fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(input_zip))
+ os.close(fd)
+ cmd = ["zip2zip", "-i", input_zip, "-o", new_zipfile]
+ for entry in entries:
+ cmd.append("-x")
+ cmd.append(entry)
+ RunAndCheckOutput(cmd)
+ os.replace(new_zipfile, output_zip)
+
def ZipDelete(zip_filename, entries, force=False):
"""Deletes entries from a ZIP file.
@@ -3134,20 +3147,7 @@
if not entries:
return
- with zipfile.ZipFile(zip_filename, 'r') as zin:
- if not force and len(set(zin.namelist()).intersection(entries)) == 0:
- raise ExternalError(
- "Failed to delete zip entries, name not matched: %s" % entries)
-
- fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(zip_filename))
- os.close(fd)
- cmd = ["zip2zip", "-i", zip_filename, "-o", new_zipfile]
- for entry in entries:
- cmd.append("-x")
- cmd.append(entry)
- RunAndCheckOutput(cmd)
-
- os.replace(new_zipfile, zip_filename)
+ ZipExclude(zip_filename, zip_filename, entries, force)
def ZipClose(zip_file):
@@ -3853,14 +3853,11 @@
output_sink(recovery_img_path, recovery_img.data)
else:
- system_root_image = info_dict.get("system_root_image") == "true"
include_recovery_dtbo = info_dict.get("include_recovery_dtbo") == "true"
include_recovery_acpio = info_dict.get("include_recovery_acpio") == "true"
path = os.path.join(input_dir, recovery_resource_dat_path)
- # With system-root-image, boot and recovery images will have mismatching
- # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
- # to handle such a case.
- if system_root_image or include_recovery_dtbo or include_recovery_acpio:
+ # Use bsdiff to handle mismatching entries (Bug: 72731506)
+ if include_recovery_dtbo or include_recovery_acpio:
diff_program = ["bsdiff"]
bonus_args = ""
assert not os.path.exists(path)
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
index 44f0a95..f290323 100644
--- a/tools/releasetools/create_brick_ota.py
+++ b/tools/releasetools/create_brick_ota.py
@@ -59,9 +59,9 @@
parser.add_argument('otafile', metavar='PAYLOAD', type=str,
help='The output OTA package file.')
parser.add_argument('--product', type=str,
- help='The product name of the device, for example, bramble, redfin. This can be a comma separated list.', required=True)
+ help='The product name of the device, for example, bramble, redfin.', required=True)
parser.add_argument('--serialno', type=str,
- help='The serial number of devices that are allowed to install this OTA package. This can be a comma separated list.')
+ help='The serial number of devices that are allowed to install this OTA package. This can be a | separated list.')
parser.add_argument('--extra_wipe_partitions', type=str,
help='Additional partitions on device which should be wiped.')
parser.add_argument('-v', action="store_true",
diff --git a/tools/releasetools/merge/merge_builds.py b/tools/releasetools/merge/merge_builds.py
index 3ac4ec4..032278c 100644
--- a/tools/releasetools/merge/merge_builds.py
+++ b/tools/releasetools/merge/merge_builds.py
@@ -47,6 +47,10 @@
The optional path to a newline-separated config file containing keys to
obtain from the framework instance of misc_info.txt, used for creating
vbmeta.img. The remaining keys come from the vendor instance.
+
+ --avb_resolve_rollback_index_location_conflict
+ If provided, resolve the conflict AVB rollback index location when
+ necessary.
"""
from __future__ import print_function
@@ -65,6 +69,7 @@
OPTIONS.product_out_vendor = None
OPTIONS.build_vbmeta = False
OPTIONS.framework_misc_info_keys = None
+OPTIONS.avb_resolve_rollback_index_location_conflict = False
def CreateImageSymlinks():
@@ -140,7 +145,8 @@
output_vbmeta_path = os.path.join(OPTIONS.product_out_vendor, "vbmeta.img")
OPTIONS.info_dict = merged_dict
common.BuildVBMeta(output_vbmeta_path, partitions, "vbmeta",
- vbmeta_partitions)
+ vbmeta_partitions,
+ OPTIONS.avb_resolve_rollback_index_location_conflict)
def MergeBuilds():
@@ -164,6 +170,8 @@
OPTIONS.build_vbmeta = True
elif o == "--framework_misc_info_keys":
OPTIONS.framework_misc_info_keys = a
+ elif o == "--avb_resolve_rollback_index_location_conflict":
+ OPTIONS.avb_resolve_rollback_index_location_conflict = True
else:
return False
return True
@@ -177,6 +185,7 @@
"product_out_vendor=",
"build_vbmeta",
"framework_misc_info_keys=",
+ "avb_resolve_rollback_index_location_conflict"
],
extra_option_handler=option_handler)
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 6bf1b49..4619246 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -46,6 +46,10 @@
The optional path to a newline-separated config file of items that
are extracted as-is from the vendor target files package.
+ --boot-image-dir-path
+ The input boot image directory path. This path contains IMAGES/boot.img
+ file.
+
--output-target-files output-target-files-package
If provided, the output merged target files package. Also a zip archive.
@@ -136,6 +140,7 @@
OPTIONS.framework_misc_info_keys = []
OPTIONS.vendor_target_files = None
OPTIONS.vendor_item_list = []
+OPTIONS.boot_image_dir_path = None
OPTIONS.output_target_files = None
OPTIONS.output_dir = None
OPTIONS.output_item_list = []
@@ -210,6 +215,12 @@
output_dir=output_target_files_temp_dir,
item_list=OPTIONS.vendor_item_list)
+ if OPTIONS.boot_image_dir_path:
+ merge_utils.CollectTargetFiles(
+ input_zipfile_or_dir=OPTIONS.boot_image_dir_path,
+ output_dir=output_target_files_temp_dir,
+ item_list=['IMAGES/boot.img'])
+
# Perform special case processing on META/* items.
# After this function completes successfully, all the files we need to create
# the output target files package are in place.
@@ -539,6 +550,8 @@
OPTIONS.vendor_item_list = a
elif o == '--vendor-item-list':
OPTIONS.vendor_item_list = a
+ elif o == '--boot-image-dir-path':
+ OPTIONS.boot_image_dir_path = a
elif o == '--output-target-files':
OPTIONS.output_target_files = a
elif o == '--output-dir':
@@ -587,6 +600,7 @@
'vendor-target-files=',
'other-item-list=',
'vendor-item-list=',
+ 'boot-image-dir-path=',
'output-target-files=',
'output-dir=',
'output-item-list=',
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 0a6ff39..ddd2d36 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -27,7 +27,8 @@
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
GetRamdiskFormat, ParseUpdateEngineConfig)
-from payload_signer import PayloadSigner
+import payload_signer
+from payload_signer import PayloadSigner, AddSigningArgumentParse, GeneratePayloadProperties
logger = logging.getLogger(__name__)
@@ -785,8 +786,8 @@
class PayloadGenerator(object):
"""Manages the creation and the signing of an A/B OTA Payload."""
- PAYLOAD_BIN = 'payload.bin'
- PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
+ PAYLOAD_BIN = payload_signer.PAYLOAD_BIN
+ PAYLOAD_PROPERTIES_TXT = payload_signer.PAYLOAD_PROPERTIES_TXT
SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
@@ -905,12 +906,7 @@
"""
assert self.payload_file is not None
# 4. Dump the signed payload properties.
- properties_file = common.MakeTempFile(prefix="payload-properties-",
- suffix=".txt")
- cmd = ["delta_generator",
- "--in_file=" + self.payload_file,
- "--properties_file=" + properties_file]
- self._Run(cmd)
+ properties_file = GeneratePayloadProperties(self.payload_file)
with open(properties_file, "a") as f:
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
index a5d09e1..e85d64c 100644
--- a/tools/releasetools/payload_signer.py
+++ b/tools/releasetools/payload_signer.py
@@ -17,7 +17,12 @@
import common
import logging
import shlex
+import argparse
+import tempfile
+import zipfile
+import shutil
from common import OPTIONS, OptionHandler
+from ota_signing_utils import AddSigningArgumentParse
logger = logging.getLogger(__name__)
@@ -26,6 +31,8 @@
OPTIONS.payload_signer_maximum_signature_size = None
OPTIONS.package_key = None
+PAYLOAD_BIN = 'payload.bin'
+PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
class SignerOptions(OptionHandler):
@@ -165,3 +172,52 @@
cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
common.RunAndCheckOutput(cmd)
return out_file
+
+def GeneratePayloadProperties(payload_file):
+ properties_file = common.MakeTempFile(prefix="payload-properties-",
+ suffix=".txt")
+ cmd = ["delta_generator",
+ "--in_file=" + payload_file,
+ "--properties_file=" + properties_file]
+ common.RunAndCheckOutput(cmd)
+ return properties_file
+
+def SignOtaPackage(input_path, output_path):
+ payload_signer = PayloadSigner(
+ OPTIONS.package_key, OPTIONS.private_key_suffix,
+ None, OPTIONS.payload_signer, OPTIONS.payload_signer_args)
+ common.ZipExclude(input_path, output_path, [PAYLOAD_BIN, PAYLOAD_PROPERTIES_TXT])
+ with tempfile.NamedTemporaryFile() as unsigned_payload, zipfile.ZipFile(input_path, "r", allowZip64=True) as zfp:
+ with zfp.open("payload.bin") as payload_fp:
+ shutil.copyfileobj(payload_fp, unsigned_payload)
+ signed_payload = payload_signer.SignPayload(unsigned_payload.name)
+ properties_file = GeneratePayloadProperties(signed_payload)
+ with zipfile.ZipFile(output_path, "a", compression=zipfile.ZIP_STORED, allowZip64=True) as output_zfp:
+ common.ZipWrite(output_zfp, signed_payload, PAYLOAD_BIN)
+ common.ZipWrite(output_zfp, properties_file, PAYLOAD_PROPERTIES_TXT)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ prog=argv[0], description="Given a series of .img files, produces a full OTA package that installs thoese images")
+ parser.add_argument("input_ota", type=str,
+ help="Input OTA for signing")
+ parser.add_argument('output_ota', type=str,
+ help='Output OTA for the signed package')
+ parser.add_argument("-v", action="store_true",
+ help="Enable verbose logging", dest="verbose")
+ AddSigningArgumentParse(parser)
+ args = parser.parse_args(argv[1:])
+ input_ota = args.input_ota
+ output_ota = args.output_ota
+ if args.verbose:
+ OPTIONS.verbose = True
+ common.InitLogging()
+ if args.package_key:
+ OPTIONS.package_key = args.package_key
+ logger.info("Re-signing OTA package {}".format(input_ota))
+ SignOtaPackage(input_ota, output_ota)
+
+if __name__ == "__main__":
+ import sys
+ main(sys.argv)
\ No newline at end of file
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 2b45825..4356394 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -83,9 +83,8 @@
--replace_verity_public_key <key>
Replace the certificate (public key) used for verity verification. The
- key file replaces the one at BOOT/RAMDISK/verity_key (or ROOT/verity_key
- for devices using system_root_image). It expects the key filename WITH
- the extension (e.g. verity_key.pub).
+ key file replaces the one at BOOT/RAMDISK/verity_key. It expects the key
+ filename WITH the extension (e.g. verity_key.pub).
--replace_verity_keyid <path_to_X509_PEM_cert_file>
Replace the veritykeyid in BOOT/cmdline of input_target_file_zip
@@ -147,6 +146,34 @@
--override_apex_keys <path>
Replace all APEX keys with this private key
+
+ -k (--package_key) <key>
+ Key to use to sign the package (default is the value of
+ default_system_dev_certificate from the input target-files's
+ META/misc_info.txt, or "build/make/target/product/security/testkey" if
+ that value is not specified).
+
+ For incremental OTAs, the default value is based on the source
+ target-file, not the target build.
+
+ --payload_signer <signer>
+ Specify the signer when signing the payload and metadata for A/B OTAs.
+ By default (i.e. without this flag), it calls 'openssl pkeyutl' to sign
+ with the package private key. If the private key cannot be accessed
+ directly, a payload signer that knows how to do that should be specified.
+ The signer will be supplied with "-inkey <path_to_key>",
+ "-in <input_file>" and "-out <output_file>" parameters.
+
+ --payload_signer_args <args>
+ Specify the arguments needed for payload signer.
+
+ --payload_signer_maximum_signature_size <signature_size>
+ The maximum signature size (in bytes) that would be generated by the given
+ payload signer. Only meaningful when custom payload signer is specified
+ via '--payload_signer'.
+ If the signer uses a RSA key, this should be the number of bytes to
+ represent the modulus. If it uses an EC key, this is the size of a
+ DER-encoded ECDSA signature.
"""
from __future__ import print_function
@@ -162,7 +189,6 @@
import re
import shutil
import stat
-import subprocess
import sys
import tempfile
import zipfile
@@ -171,6 +197,8 @@
import add_img_to_target_files
import apex_utils
import common
+import payload_signer
+from payload_signer import SignOtaPackage, PAYLOAD_BIN
if sys.hexversion < 0x02070000:
@@ -241,6 +269,20 @@
return filename.endswith(".apex") or filename.endswith(".capex")
+def IsOtaPackage(fp):
+ with zipfile.ZipFile(fp) as zfp:
+ if not PAYLOAD_BIN in zfp.namelist():
+ return False
+ with zfp.open(PAYLOAD_BIN, "r") as payload:
+ magic = payload.read(4)
+ return magic == b"CrAU"
+
+
+def IsEntryOtaPackage(input_zip, filename):
+ with input_zip.open(filename, "r") as fp:
+ return IsOtaPackage(fp)
+
+
def GetApexFilename(filename):
name = os.path.basename(filename)
# Replace the suffix for compressed apex
@@ -515,6 +557,7 @@
return data
+
def IsBuildPropFile(filename):
return filename in (
"SYSTEM/etc/prop.default",
@@ -541,7 +584,7 @@
filename.endswith("/prop.default")
-def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
+def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info,
apk_keys, apex_keys, key_passwords,
platform_api_level, codename_to_api_level_map,
compressed_extension):
@@ -555,8 +598,6 @@
# Sets this to zero for targets without APK files, e.g., gki_arm64.
maxsize = 0
- system_root_image = misc_info.get("system_root_image") == "true"
-
for info in input_tf_zip.infolist():
filename = info.filename
if filename.startswith("IMAGES/"):
@@ -631,6 +672,15 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
+ elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename):
+ logger.info("Re-signing OTA package {}".format(filename))
+ with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota:
+ with input_tf_zip.open(filename, "r") as in_fp:
+ shutil.copyfileobj(in_fp, input_ota)
+ input_ota.flush()
+ SignOtaPackage(input_ota.name, output_ota.name)
+ common.ZipWrite(output_tf_zip, output_ota.name, filename,
+ compress_type=zipfile.ZIP_STORED)
# System properties.
elif IsBuildPropFile(filename):
print("Rewriting %s:" % (filename,))
@@ -1504,7 +1554,7 @@
"override_apk_keys=",
"override_apex_keys=",
],
- extra_option_handler=option_handler)
+ extra_option_handler=[option_handler, payload_signer.signer_options])
if len(args) != 2:
common.Usage(__doc__)
@@ -1518,6 +1568,10 @@
allowZip64=True)
misc_info = common.LoadInfoDict(input_zip)
+ if OPTIONS.package_key is None:
+ OPTIONS.package_key = misc_info.get(
+ "default_system_dev_certificate",
+ "build/make/target/product/security/testkey")
BuildKeyMap(misc_info, key_mapping_options)
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
index cfae7a5..d4f7ccc 100644
--- a/tools/releasetools/test_build_image.py
+++ b/tools/releasetools/test_build_image.py
@@ -99,11 +99,10 @@
}
self.assertRaises(BuildImageError, CheckHeadroom, ext4fs_output, prop_dict)
- def test_SetUpInDirAndFsConfig_SystemRootImageTrue_NonSystem(self):
+ def test_SetUpInDirAndFsConfig_NonSystem(self):
prop_dict = {
'fs_config': 'fs-config',
'mount_point': 'vendor',
- 'system_root_image': 'true',
}
in_dir, fs_config = SetUpInDirAndFsConfig('/path/to/in_dir', prop_dict)
self.assertEqual('/path/to/in_dir', in_dir)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 14f0e88..9b2e667 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1348,7 +1348,6 @@
INFO_DICT_DEFAULT = {
'recovery_api_version': 3,
'fstab_version': 2,
- 'system_root_image': 'true',
'no_recovery': 'true',
'recovery_as_boot': 'true',
}
@@ -1377,14 +1376,8 @@
info_values = ''.join(
['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
-
- FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
- if info_dict.get('system_root_image') == 'true':
- fstab_values = FSTAB_TEMPLATE.format('/')
- else:
- fstab_values = FSTAB_TEMPLATE.format('/system')
- common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
-
+ common.ZipWriteStr(target_files_zip, fstab_path,
+ "/dev/block/system /system ext4 ro,barrier=1 defaults")
common.ZipWriteStr(
target_files_zip, 'META/file_contexts', 'file-contexts')
return target_files
@@ -1397,7 +1390,6 @@
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
def test_LoadInfoDict_legacyRecoveryFstabPath(self):
@@ -1408,7 +1400,6 @@
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1420,7 +1411,6 @@
loaded_dict = common.LoadInfoDict(unzipped)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1432,15 +1422,11 @@
loaded_dict = common.LoadInfoDict(unzipped)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
- def test_LoadInfoDict_systemRootImageFalse(self):
- # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
- # launched prior to P will likely have this config.
+ def test_LoadInfoDict_recoveryAsBootFalse(self):
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
del info_dict['no_recovery']
- del info_dict['system_root_image']
del info_dict['recovery_as_boot']
target_files = self._test_LoadInfoDict_createTargetFiles(
info_dict,
@@ -1452,22 +1438,6 @@
self.assertNotIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
- def test_LoadInfoDict_recoveryAsBootFalse(self):
- # Devices using system-as-root, but with standalone recovery image. Non-A/B
- # devices launched since P will likely have this config.
- info_dict = copy.copy(self.INFO_DICT_DEFAULT)
- del info_dict['no_recovery']
- del info_dict['recovery_as_boot']
- target_files = self._test_LoadInfoDict_createTargetFiles(
- info_dict,
- 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
- loaded_dict = common.LoadInfoDict(target_files_zip)
- self.assertEqual(3, loaded_dict['recovery_api_version'])
- self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
- self.assertIn('/system', loaded_dict['fstab'])
-
def test_LoadInfoDict_noRecoveryTrue(self):
# Device doesn't have a recovery partition at all.
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
@@ -1499,7 +1469,6 @@
loaded_dict = common.LoadInfoDict(unzipped, True)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
self.assertEqual(
os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index 48b563d..4d4b9e5 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -156,7 +156,6 @@
verity_key_mincrypt)
info_dict = {
- 'system_root_image' : 'true',
'verity' : 'true',
}
options = {
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 82b3107..8da4fa2 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -361,18 +361,15 @@
"Mismatching mincrypt verity key files"
logging.info('Verified the content of /verity_key')
- # For devices with a separate ramdisk (i.e. non-system-as-root), there must
- # be a copy in ramdisk.
- if info_dict.get("system_root_image") != "true":
- verity_key_ramdisk = os.path.join(
- input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
- assert os.path.exists(
- verity_key_ramdisk), 'Missing verity_key in ramdisk'
+ verity_key_ramdisk = os.path.join(
+ input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
+ assert os.path.exists(
+ verity_key_ramdisk), 'Missing verity_key in ramdisk'
- assert filecmp.cmp(
- verity_key_mincrypt, verity_key_ramdisk, shallow=False), \
- 'Mismatching verity_key files in root and ramdisk'
- logging.info('Verified the content of /verity_key in ramdisk')
+ assert filecmp.cmp(
+ verity_key_mincrypt, verity_key_ramdisk, shallow=False), \
+ 'Mismatching verity_key files in root and ramdisk'
+ logging.info('Verified the content of /verity_key in ramdisk')
# Then verify the verity signed system/vendor/product images, against the
# verity pubkey in mincrypt format.