Merge "Revert "Allow to override fastboot-info.txt for specific board"" into main
diff --git a/OWNERS b/OWNERS
index 97fda40..bd049e9 100644
--- a/OWNERS
+++ b/OWNERS
@@ -2,6 +2,6 @@
# Since this file affects all Android developers, lock it down. There is still
# round the world timzeone coverage.
-per-file envsetup.sh = joeo@google.com, jingwen@google.com, lberki@google.com
-per-file shell_utils.sh = joeo@google.com, jingwen@google.com, lberki@google.com
+per-file envsetup.sh = joeo@google.com, jingwen@google.com
+per-file shell_utils.sh = joeo@google.com, jingwen@google.com
diff --git a/core/Makefile b/core/Makefile
index 845f7c8..7d7457e 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -3781,7 +3781,7 @@
ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_dlkm_lib_modules_symlink)
endif
-# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and STUB_LIBRARIES
+# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and SOONG_STUB_VENDOR_LIBRARIES
vendor_linker_config_file := $(TARGET_OUT_VENDOR)/etc/linker.config.pb
$(vendor_linker_config_file): private_linker_config_fragments := $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)
$(vendor_linker_config_file): $(INTERNAL_VENDORIMAGE_FILES) $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
@@ -3792,7 +3792,7 @@
--source $(call normalize-path-list,$(private_linker_config_fragments)) \
--output $@
$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $@ \
- --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
+ --output $@ --value "$(SOONG_STUB_VENDOR_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
$(call define declare-0p-target,$(vendor_linker_config_file),)
INTERNAL_VENDORIMAGE_FILES += $(vendor_linker_config_file)
ALL_DEFAULT_INSTALLED_MODULES += $(vendor_linker_config_file)
@@ -4600,6 +4600,12 @@
--prop com.android.build.pvmfw.security_patch:$(PVMFW_SECURITY_PATCH)
endif
+# Append avbpubkey of microdroid-vendor partition into vendor_boot partition.
+ifdef MICRODROID_VENDOR_AVBKEY
+BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS += \
+ --prop_from_file com.android.build.microdroid-vendor.avbpubkey:$(MICRODROID_VENDOR_AVBKEY)
+endif
+
BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
INIT_BOOT_FOOTER_ARGS := BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS
VENDOR_BOOT_FOOTER_ARGS := BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index c74aa49..6af6f08 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -72,9 +72,9 @@
ifneq (,$(MODULE_BUILD_FROM_SOURCE))
# Keep an explicit setting.
-else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
+else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES))$(findstring com.google.android.go.conscrypt,$(PRODUCT_PACKAGES)))
# Prebuilt module SDKs require prebuilt modules to work, and currently
- # prebuilt modules are only provided for com.google.android.xxx. If we can't
+ # prebuilt modules are only provided for com.google.android(.go)?.xxx. If we can't
# find one of them in PRODUCT_PACKAGES then assume com.android.xxx are in use,
# and disable prebuilt SDKs. In particular this applies to AOSP builds.
#
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 8236dc9..f533358 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -670,8 +670,14 @@
copy_test_data_pairs :=
-my_installed_test_data := $(call copy-many-files,$(my_test_data_pairs))
-$(LOCAL_INSTALLED_MODULE): $(my_installed_test_data)
+ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ my_installed_test_data := $(call copy-many-files,$(my_test_data_pairs))
+ $(LOCAL_INSTALLED_MODULE): $(my_installed_test_data)
+else
+ # Skip installing test data for Soong modules, it's already been handled.
+ # Just compute my_installed_test_data.
+ my_installed_test_data := $(foreach f, $(my_test_data_pairs), $(call word-colon,2,$(f)))
+endif
endif
endif
@@ -1017,15 +1023,14 @@
ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES := \
$(ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES)
-ifdef LOCAL_TEST_DATA
+ifneq ($(my_test_data_file_pairs),)
# Export the list of targets that are handled as data inputs and required
- # by tests at runtime. The LOCAL_TEST_DATA format is generated from below
- # https://cs.android.com/android/platform/superproject/+/master:build/soong/android/androidmk.go;l=925-944;drc=master
- # which format is like $(path):$(relative_file) but for module-info, only
- # the string after ":" is needed.
+ # by tests at runtime. The format of my_test_data_file_pairs is
+ # is $(path):$(relative_file) but for module-info, only the string after
+ # ":" is needed.
ALL_MODULES.$(my_register_name).TEST_DATA := \
$(strip $(ALL_MODULES.$(my_register_name).TEST_DATA) \
- $(foreach f, $(LOCAL_TEST_DATA),\
+ $(foreach f, $(my_test_data_file_pairs),\
$(call word-colon,2,$(f))))
endif
diff --git a/core/binary.mk b/core/binary.mk
index 4c68ba7..8c107bd 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -289,25 +289,20 @@
endif
ifneq ($(LOCAL_USE_VNDK),)
- # Required VNDK version for vendor modules is BOARD_VNDK_VERSION.
- my_api_level := $(BOARD_VNDK_VERSION)
- ifeq ($(my_api_level),current)
- # Build with current PLATFORM_VNDK_VERSION.
- # If PLATFORM_VNDK_VERSION has a CODENAME, it will return
- # __ANDROID_API_FUTURE__.
- my_api_level := $(call codename-or-sdk-to-sdk,$(PLATFORM_VNDK_VERSION))
- else
- # Build with current BOARD_VNDK_VERSION.
- my_api_level := $(call codename-or-sdk-to-sdk,$(BOARD_VNDK_VERSION))
- endif
my_cflags += -D__ANDROID_VNDK__
ifneq ($(LOCAL_USE_VNDK_VENDOR),)
- # Vendor modules have LOCAL_USE_VNDK_VENDOR when
- # BOARD_VNDK_VERSION is defined.
+ # Vendor modules have LOCAL_USE_VNDK_VENDOR
my_cflags += -D__ANDROID_VENDOR__
+
+ ifeq ($(BOARD_API_LEVEL),)
+ # TODO(b/314036847): This is a fallback for UDC targets.
+ # This must be a build failure when UDC is no longer built from this source tree.
+ my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION)
+ else
+ my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
+ endif
else ifneq ($(LOCAL_USE_VNDK_PRODUCT),)
- # Product modules have LOCAL_USE_VNDK_PRODUCT when
- # PRODUCT_PRODUCT_VNDK_VERSION is defined.
+ # Product modules have LOCAL_USE_VNDK_PRODUCT
my_cflags += -D__ANDROID_PRODUCT__
endif
endif
diff --git a/core/config.mk b/core/config.mk
index fbf6764..f8a9879 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -436,16 +436,16 @@
# Boolean variable determining if AOSP is page size agnostic. This means
# that AOSP can use a kernel configured with 4k/16k/64k PAGE SIZES.
-TARGET_PAGE_SIZE_AGNOSTIC := false
-ifdef PRODUCT_PAGE_SIZE_AGNOSTIC
- TARGET_PAGE_SIZE_AGNOSTIC := $(PRODUCT_PAGE_SIZE_AGNOSTIC)
- ifeq ($(TARGET_PAGE_SIZE_AGNOSTIC),true)
+TARGET_NO_BIONIC_PAGE_SIZE_MACRO := false
+ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
+ TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)
+ ifeq ($(TARGET_NO_BIONIC_PAGE_SIZE_MACRO),true)
ifneq ($(TARGET_MAX_PAGE_SIZE_SUPPORTED),65536)
$(error TARGET_MAX_PAGE_SIZE_SUPPORTED has to be 65536 to support page size agnostic)
endif
endif
endif
-.KATI_READONLY := TARGET_PAGE_SIZE_AGNOSTIC
+.KATI_READONLY := TARGET_NO_BIONIC_PAGE_SIZE_MACRO
# Pruned directory options used when using findleaves.py
# See envsetup.mk for a description of SCAN_EXCLUDE_DIRS
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 83be006..3507961 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -355,6 +355,12 @@
my_sanitize := $(filter-out cfi,$(my_sanitize))
my_cflags += -fno-lto
my_ldflags += -fno-lto
+
+ # TODO(b/142430592): Upstream linker scripts for sanitizer runtime libraries
+ # discard the sancov_lowest_stack symbol, because it's emulated TLS (and thus
+ # doesn't match the linker script due to the "__emutls_v." prefix).
+ my_cflags += -fno-sanitize-coverage=stack-depth
+ my_ldflags += -fno-sanitize-coverage=stack-depth
endif
ifneq ($(filter integer_overflow,$(my_sanitize)),)
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index a7e8d35..57df911 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -78,7 +78,7 @@
$(strip $(1)): $(ACONFIG) $(strip $(3))
mkdir -p $$(dir $$(PRIVATE_OUT))
$$(if $$(PRIVATE_IN), \
- $$(ACONFIG) dump --format protobuf --out $$(PRIVATE_OUT) \
+ $$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
$$(addprefix --cache ,$$(PRIVATE_IN)), \
echo -n > $$(PRIVATE_OUT) \
)
diff --git a/core/product.mk b/core/product.mk
index 969b506..5515a8a 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -35,7 +35,7 @@
# Indicates that AOSP can use a kernel configured with 4k/16k/64k page sizes.
# The possible values are true or false.
-_product_single_value_vars += PRODUCT_PAGE_SIZE_AGNOSTIC
+_product_single_value_vars += PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
# The resource configuration options to use for this product.
_product_list_vars += PRODUCT_LOCALES
@@ -447,6 +447,7 @@
_product_list_vars += PRODUCT_AFDO_PROFILES
_product_single_value_vars += PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API
+_product_single_value_vars += PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE
_product_list_vars += PRODUCT_RELEASE_CONFIG_MAPS
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 30acbba..193ac18 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -147,6 +147,7 @@
$(call add_json_bool, ArtUseReadBarrier, $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
$(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
$(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
+$(call add_json_str, VendorApiLevel, $(BOARD_API_LEVEL))
$(call add_json_str, DeviceVndkVersion, $(BOARD_VNDK_VERSION))
$(call add_json_str, Platform_vndk_version, $(PLATFORM_VNDK_VERSION))
$(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS))
@@ -158,7 +159,7 @@
$(call add_json_bool, Malloc_pattern_fill_contents, $(MALLOC_PATTERN_FILL_CONTENTS))
$(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER))
$(call add_json_str, DeviceMaxPageSizeSupported, $(TARGET_MAX_PAGE_SIZE_SUPPORTED))
-$(call add_json_bool, DevicePageSizeAgnostic, $(filter true,$(TARGET_PAGE_SIZE_AGNOSTIC)))
+$(call add_json_bool, DeviceNoBionicPageSizeMacro, $(filter true,$(TARGET_NO_BIONIC_PAGE_SIZE_MACRO)))
$(call add_json_bool, UncompressPrivAppDex, $(call invert_bool,$(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))))
$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 8e2d58e..eb5c63c 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -5,37 +5,45 @@
COMMA := ,
_NEWLINE := '\n'
+define write-optional-json-list
+$(if $(strip $(2)),'$(COMMA)$(strip $(1)): [$(KATI_foreach_sep w,$(COMMA) ,$(2),"$(w)")]')
+endef
+
+define write-optional-json-bool
+$(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"')
+endef
+
$(MODULE_INFO_JSON):
@echo Generating $@
$(hide) echo -ne '{\n ' > $@
$(hide) echo -ne $(KATI_foreach_sep m,$(COMMA)$(_NEWLINE), $(sort $(ALL_MODULES)),\
'"$(m)": {' \
- '"class": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).CLASS)),"$(w)")],' \
- '"path": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).PATH)),"$(w)")],' \
- '"tags": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TAGS)),"$(w)")],' \
- '"installed": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).INSTALLED)),"$(w)")],' \
- '"compatibility_suites": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES)),"$(w)")],' \
- '"auto_test_config": [$(ALL_MODULES.$(m).auto_test_config)],' \
- '"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)"$(COMMA)' \
- '"test_config": [$(KATI_foreach_sep w,$(COMMA) ,$(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS)),"$(w)")],' \
- '"dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).ALL_DEPS)),"$(w)")],' \
- '"shared_libs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SHARED_LIBS)),"$(w)")],' \
- '"static_libs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).STATIC_LIBS)),"$(w)")],' \
- '"system_shared_libs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SYSTEM_SHARED_LIBS)),"$(w)")],' \
- '"srcs": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SRCS)),"$(w)")],' \
- '"srcjars": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SRCJARS)),"$(w)")],' \
- '"classes_jar": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).CLASSES_JAR)),"$(w)")],' \
- '"test_mainline_modules": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)")],' \
- '"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)"$(COMMA)' \
- '"test_options_tags": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS)),"$(w)")],' \
- '"data": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_DATA)),"$(w)")],' \
- '"runtime_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES)),"$(w)")],' \
- '"static_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).LOCAL_STATIC_LIBRARIES)),"$(w)")],' \
- '"data_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TEST_DATA_BINS)),"$(w)")],' \
- '"supported_variants": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS)),"$(w)")],' \
- '"host_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET)),"$(w)")],' \
- '"target_dependencies": [$(KATI_foreach_sep w,$(COMMA) ,$(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST)),"$(w)")]' \
- '}')'\n}\n' >> $@
+ '"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)"' \
+ $(call write-optional-json-list, "class", $(sort $(ALL_MODULES.$(m).CLASS))) \
+ $(call write-optional-json-list, "path", $(sort $(ALL_MODULES.$(m).PATH))) \
+ $(call write-optional-json-list, "tags", $(sort $(ALL_MODULES.$(m).TAGS))) \
+ $(call write-optional-json-list, "installed", $(sort $(ALL_MODULES.$(m).INSTALLED))) \
+ $(call write-optional-json-list, "compatibility_suites", $(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES))) \
+ $(call write-optional-json-list, "auto_test_config", $(sort $(ALL_MODULES.$(m).auto_test_config))) \
+ $(call write-optional-json-list, "test_config", $(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS))) \
+ $(call write-optional-json-list, "dependencies", $(sort $(ALL_MODULES.$(m).ALL_DEPS))) \
+ $(call write-optional-json-list, "shared_libs", $(sort $(ALL_MODULES.$(m).SHARED_LIBS))) \
+ $(call write-optional-json-list, "static_libs", $(sort $(ALL_MODULES.$(m).STATIC_LIBS))) \
+ $(call write-optional-json-list, "system_shared_libs", $(sort $(ALL_MODULES.$(m).SYSTEM_SHARED_LIBS))) \
+ $(call write-optional-json-list, "srcs", $(sort $(ALL_MODULES.$(m).SRCS))) \
+ $(call write-optional-json-list, "srcjars", $(sort $(ALL_MODULES.$(m).SRCJARS))) \
+ $(call write-optional-json-list, "classes_jar", $(sort $(ALL_MODULES.$(m).CLASSES_JAR))) \
+ $(call write-optional-json-list, "test_mainline_modules", $(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES))) \
+ $(call write-optional-json-bool, $(ALL_MODULES.$(m).IS_UNIT_TEST)) \
+ $(call write-optional-json-list, "test_options_tags", $(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS))) \
+ $(call write-optional-json-list, "data", $(sort $(ALL_MODULES.$(m).TEST_DATA))) \
+ $(call write-optional-json-list, "runtime_dependencies", $(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES))) \
+ $(call write-optional-json-list, "static_dependencies", $(sort $(ALL_MODULES.$(m).LOCAL_STATIC_LIBRARIES))) \
+ $(call write-optional-json-list, "data_dependencies", $(sort $(ALL_MODULES.$(m).TEST_DATA_BINS))) \
+ $(call write-optional-json-list, "supported_variants", $(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS))) \
+ $(call write-optional-json-list, "host_dependencies", $(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET))) \
+ $(call write-optional-json-list, "target_dependencies", $(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST))) \
+ '}')'\n}\n' >> $@
droidcore-unbundled: $(MODULE_INFO_JSON)
diff --git a/target/product/aosp_product.mk b/target/product/aosp_product.mk
index a4c3a91..f72f2df 100644
--- a/target/product/aosp_product.mk
+++ b/target/product/aosp_product.mk
@@ -33,6 +33,7 @@
messaging \
PhotoTable \
preinstalled-packages-platform-aosp-product.xml \
+ ThemePicker \
WallpaperPicker \
# Telephony:
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index f31749b..098ed27 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -94,6 +94,7 @@
framework-graphics \
framework-minus-apex \
framework-minus-apex-install-dependencies \
+ framework-nfc \
framework-res \
framework-sysconfig.xml \
fsck.erofs \
@@ -125,7 +126,6 @@
IntentResolver \
ip \
iptables \
- ip-up-vpn \
javax.obex \
keystore2 \
credstore \
@@ -223,7 +223,6 @@
mkfs.erofs \
monkey \
mtectrl \
- mtpd \
ndc \
netd \
NetworkStack \
@@ -237,13 +236,11 @@
ping6 \
platform.xml \
pm \
- pppd \
preinstalled-packages-asl-files.xml \
preinstalled-packages-platform.xml \
printflags \
privapp-permissions-platform.xml \
prng_seeder \
- racoon \
recovery-persist \
resize2fs \
rss_hwm_reset \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index b02a583..55fcf2f 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -50,6 +50,7 @@
PRODUCT_BOOT_JARS += \
framework-minus-apex \
framework-graphics \
+ framework-nfc \
ext \
telephony-common \
voip-common \
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
index 5726c06..5e2a694 100644
--- a/target/product/fullmte.mk
+++ b/target/product/fullmte.mk
@@ -25,3 +25,4 @@
SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
endif
PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
+PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE := 131072
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 6c93dd7..3acf1e6 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -44,6 +44,7 @@
CaptivePortalLogin \
CertInstaller \
CredentialManager \
+ DeviceAsWebcam \
DocumentsUI \
DownloadProviderUi \
EasterEgg \
@@ -56,7 +57,6 @@
MmsService \
MtpService \
MusicFX \
- NfcNci \
PacProcessor \
preinstalled-packages-platform-handheld-system.xml \
PrintRecommendationService \
@@ -73,6 +73,7 @@
UserDictionaryProvider \
VpnDialogs \
vr \
+ $(RELEASE_PACKAGE_NFC_STACK)
PRODUCT_SYSTEM_SERVER_APPS += \
diff --git a/tools/OWNERS b/tools/OWNERS
deleted file mode 100644
index 7d666f1..0000000
--- a/tools/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-per-file warn.py,checkowners.py = chh@google.com
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
index e2fadb0..82bfa7e 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/Android.bp
@@ -80,6 +80,7 @@
rustlibs: [
"libitertools",
],
+ test_suites: ["general-tests"],
}
// integration tests: general
@@ -90,6 +91,12 @@
srcs: ["tests/test.aconfig"],
}
+aconfig_declarations {
+ name: "aconfig.test.exported.flags",
+ package: "com.android.aconfig.test.exported",
+ srcs: ["tests/test_exported.aconfig"],
+}
+
aconfig_values {
name: "aconfig.test.flag.values",
package: "com.android.aconfig.test",
@@ -113,6 +120,12 @@
aconfig_declarations: "aconfig.test.flags",
}
+java_aconfig_library {
+ name: "aconfig_test_java_library_exported",
+ aconfig_declarations: "aconfig.test.exported.flags",
+ mode: "exported",
+}
+
android_test {
name: "aconfig.test.java",
srcs: [
@@ -122,10 +135,11 @@
certificate: "platform",
static_libs: [
"aconfig_test_java_library",
+ "aconfig_test_java_library_exported",
"androidx.test.rules",
"testng",
],
- test_suites: ["device-tests"],
+ test_suites: ["general-tests"],
}
java_aconfig_library {
@@ -154,6 +168,12 @@
aconfig_declarations: "aconfig.test.flags",
}
+cc_aconfig_library {
+ name: "aconfig_test_cpp_library_test_variant",
+ aconfig_declarations: "aconfig.test.flags",
+ mode: "test",
+}
+
cc_test {
name: "aconfig.test.cpp",
srcs: [
@@ -166,6 +186,22 @@
shared_libs: [
"server_configurable_flags",
],
+ test_suites: ["general-tests"],
+}
+
+cc_test {
+ name: "aconfig.test.cpp.test_mode",
+ srcs: [
+ "tests/aconfig_test_test_variant.cpp",
+ ],
+ static_libs: [
+ "aconfig_test_cpp_library_test_variant",
+ "libgmock",
+ ],
+ shared_libs: [
+ "server_configurable_flags",
+ ],
+ test_suites: ["general-tests"],
}
rust_aconfig_library {
@@ -182,6 +218,7 @@
rustlibs: [
"libaconfig_test_rust_library",
],
+ test_suites: ["general-tests"],
}
rust_aconfig_library {
@@ -199,4 +236,5 @@
rustlibs: [
"libaconfig_test_rust_library_with_test_mode",
],
+ test_suites: ["general-tests"],
}
diff --git a/tools/aconfig/OWNERS b/tools/aconfig/OWNERS
index 4e05b00..9a76279 100644
--- a/tools/aconfig/OWNERS
+++ b/tools/aconfig/OWNERS
@@ -1,5 +1,7 @@
amhk@google.com
+dzshen@google.com
jham@google.com
joeo@google.com
opg@google.com
+tedbauer@google.com
zhidou@google.com
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 74ac5ec..e29918f 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -17,5 +17,35 @@
// that using the flag macros to do filtering will get affected.
"name": "FlagMacrosTests"
}
+ ],
+ "postsubmit": [
+ {
+ // aconfig unit tests
+ "name": "aconfig.test"
+ },
+ {
+ // aconfig Java integration tests
+ "name": "aconfig.test.java"
+ },
+ {
+ // aconfig C++ integration tests (production mode auto-generated code)
+ "name": "aconfig.test.cpp"
+ },
+ {
+ // aconfig C++ integration tests (test mode auto-generated code)
+ "name": "aconfig.test.cpp.test_mode"
+ },
+ {
+ // aconfig C++ integration tests (production mode auto-generated code)
+ "name": "aconfig.prod_mode.test.rust"
+ },
+ {
+ // aconfig C++ integration tests (test mode auto-generated code)
+ "name": "aconfig.test_mode.test.rust"
+ },
+ {
+ // printflags unit tests
+ "name": "printflags.test"
+ }
]
}
diff --git a/tools/aconfig/printflags/Android.bp b/tools/aconfig/printflags/Android.bp
index da18cdc..d50a77d 100644
--- a/tools/aconfig/printflags/Android.bp
+++ b/tools/aconfig/printflags/Android.bp
@@ -24,4 +24,5 @@
rust_test_host {
name: "printflags.test",
defaults: ["printflags.defaults"],
+ test_suites: ["general-tests"],
}
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
index 9e193ec..ed4b24c 100644
--- a/tools/aconfig/protos/aconfig.proto
+++ b/tools/aconfig/protos/aconfig.proto
@@ -41,11 +41,27 @@
repeated string bug = 4;
optional bool is_fixed_read_only = 5;
optional bool is_exported = 6;
+ optional flag_metadata metadata = 7;
};
+// Optional metadata about the flag, such as its purpose and its intended form factors.
+// Can influence the applied policies and testing strategy.
+message flag_metadata {
+ enum flag_purpose {
+ PURPOSE_UNSPECIFIED = 0;
+ PURPOSE_FEATURE = 1;
+ PURPOSE_BUGFIX = 2;
+ }
+
+ optional flag_purpose purpose = 1;
+
+ // TODO(b/315025930): Add field to designate intended target device form factor(s), such as phone, watch or other.
+}
+
message flag_declarations {
optional string package = 1;
repeated flag_declaration flag = 2;
+ optional string container = 3;
};
message flag_value {
@@ -79,7 +95,8 @@
repeated tracepoint trace = 8;
optional bool is_fixed_read_only = 9;
optional bool is_exported = 10;
-
+ optional string container = 11;
+ optional flag_metadata metadata = 12;
}
message parsed_flags {
diff --git a/tools/aconfig/src/codegen_cpp.rs b/tools/aconfig/src/codegen/cpp.rs
similarity index 92%
rename from tools/aconfig/src/codegen_cpp.rs
rename to tools/aconfig/src/codegen/cpp.rs
index c536260..000581b 100644
--- a/tools/aconfig/src/codegen_cpp.rs
+++ b/tools/aconfig/src/codegen/cpp.rs
@@ -56,12 +56,12 @@
let files = [
FileSpec {
name: &format!("{}.h", header),
- template: include_str!("../templates/cpp_exported_header.template"),
+ template: include_str!("../../templates/cpp_exported_header.template"),
dir: "include",
},
FileSpec {
name: &format!("{}.cc", header),
- template: include_str!("../templates/cpp_source_file.template"),
+ template: include_str!("../../templates/cpp_source_file.template"),
dir: "",
},
];
@@ -170,6 +170,8 @@
virtual bool enabled_ro() = 0;
+ virtual bool enabled_ro_exported() = 0;
+
virtual bool enabled_rw() = 0;
};
@@ -199,6 +201,10 @@
return true;
}
+inline bool enabled_ro_exported() {
+ return true;
+}
+
inline bool enabled_rw() {
return provider_->enabled_rw();
}
@@ -220,6 +226,8 @@
bool com_android_aconfig_test_enabled_ro();
+bool com_android_aconfig_test_enabled_ro_exported();
+
bool com_android_aconfig_test_enabled_rw();
#ifdef __cplusplus
@@ -265,6 +273,10 @@
virtual void enabled_ro(bool val) = 0;
+ virtual bool enabled_ro_exported() = 0;
+
+ virtual void enabled_ro_exported(bool val) = 0;
+
virtual bool enabled_rw() = 0;
virtual void enabled_rw(bool val) = 0;
@@ -322,6 +334,14 @@
provider_->enabled_ro(val);
}
+inline bool enabled_ro_exported() {
+ return provider_->enabled_ro_exported();
+}
+
+inline void enabled_ro_exported(bool val) {
+ provider_->enabled_ro_exported(val);
+}
+
inline bool enabled_rw() {
return provider_->enabled_rw();
}
@@ -363,6 +383,10 @@
void set_com_android_aconfig_test_enabled_ro(bool val);
+bool com_android_aconfig_test_enabled_ro_exported();
+
+void set_com_android_aconfig_test_enabled_ro_exported(bool val);
+
bool com_android_aconfig_test_enabled_rw();
void set_com_android_aconfig_test_enabled_rw(bool val);
@@ -429,6 +453,10 @@
return true;
}
+ virtual bool enabled_ro_exported() override {
+ return true;
+ }
+
virtual bool enabled_rw() override {
if (cache_[3] == -1) {
cache_[3] = server_configurable_flags::GetServerConfigurableFlag(
@@ -471,6 +499,10 @@
return true;
}
+bool com_android_aconfig_test_enabled_ro_exported() {
+ return true;
+}
+
bool com_android_aconfig_test_enabled_rw() {
return com::android::aconfig::test::enabled_rw();
}
@@ -581,6 +613,19 @@
overrides_["enabled_ro"] = val;
}
+ virtual bool enabled_ro_exported() override {
+ auto it = overrides_.find("enabled_ro_exported");
+ if (it != overrides_.end()) {
+ return it->second;
+ } else {
+ return true;
+ }
+ }
+
+ virtual void enabled_ro_exported(bool val) override {
+ overrides_["enabled_ro_exported"] = val;
+ }
+
virtual bool enabled_rw() override {
auto it = overrides_.find("enabled_rw");
if (it != overrides_.end()) {
@@ -661,6 +706,17 @@
com::android::aconfig::test::enabled_ro(val);
}
+
+bool com_android_aconfig_test_enabled_ro_exported() {
+ return com::android::aconfig::test::enabled_ro_exported();
+}
+
+
+void set_com_android_aconfig_test_enabled_ro_exported(bool val) {
+ com::android::aconfig::test::enabled_ro_exported(val);
+}
+
+
bool com_android_aconfig_test_enabled_rw() {
return com::android::aconfig::test::enabled_rw();
}
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen/java.rs
similarity index 91%
rename from tools/aconfig/src/codegen_java.rs
rename to tools/aconfig/src/codegen/java.rs
index b3e5e6c..ae3f274 100644
--- a/tools/aconfig/src/codegen_java.rs
+++ b/tools/aconfig/src/codegen/java.rs
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-use anyhow::Result;
+use anyhow::{anyhow, Result};
use serde::Serialize;
use std::collections::{BTreeMap, BTreeSet};
use std::path::PathBuf;
@@ -34,34 +34,43 @@
{
let flag_elements: Vec<FlagElement> =
parsed_flags_iter.map(|pf| create_flag_element(package, pf)).collect();
+ let exported_flag_elements: Vec<FlagElement> =
+ flag_elements.iter().filter(|elem| elem.exported).cloned().collect();
let namespace_flags = gen_flags_by_namespace(&flag_elements);
let properties_set: BTreeSet<String> =
flag_elements.iter().map(|fe| format_property_name(&fe.device_config_namespace)).collect();
- let is_read_write = flag_elements.iter().any(|elem| elem.is_read_write);
let is_test_mode = codegen_mode == CodegenMode::Test;
let library_exported = codegen_mode == CodegenMode::Exported;
+ let runtime_lookup_required =
+ flag_elements.iter().any(|elem| elem.is_read_write) || library_exported;
+
+ if library_exported && exported_flag_elements.is_empty() {
+ return Err(anyhow!("exported library contains no exported flags"));
+ }
+
let context = Context {
flag_elements,
+ exported_flag_elements,
namespace_flags,
is_test_mode,
- is_read_write,
+ runtime_lookup_required,
properties_set,
package_name: package.to_string(),
library_exported,
};
let mut template = TinyTemplate::new();
- template.add_template("Flags.java", include_str!("../templates/Flags.java.template"))?;
+ template.add_template("Flags.java", include_str!("../../templates/Flags.java.template"))?;
template.add_template(
"FeatureFlagsImpl.java",
- include_str!("../templates/FeatureFlagsImpl.java.template"),
+ include_str!("../../templates/FeatureFlagsImpl.java.template"),
)?;
template.add_template(
"FeatureFlags.java",
- include_str!("../templates/FeatureFlags.java.template"),
+ include_str!("../../templates/FeatureFlags.java.template"),
)?;
template.add_template(
"FakeFeatureFlagsImpl.java",
- include_str!("../templates/FakeFeatureFlagsImpl.java.template"),
+ include_str!("../../templates/FakeFeatureFlagsImpl.java.template"),
)?;
let path: PathBuf = package.split('.').collect();
@@ -100,9 +109,10 @@
#[derive(Serialize)]
struct Context {
pub flag_elements: Vec<FlagElement>,
+ pub exported_flag_elements: Vec<FlagElement>,
pub namespace_flags: Vec<NamespaceFlags>,
pub is_test_mode: bool,
- pub is_read_write: bool,
+ pub runtime_lookup_required: bool,
pub properties_set: BTreeSet<String>,
pub package_name: String,
pub library_exported: bool,
@@ -193,6 +203,9 @@
@com.android.aconfig.annotations.AssumeTrueForR8
@UnsupportedAppUsage
boolean enabledRo();
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ boolean enabledRoExported();
@UnsupportedAppUsage
boolean enabledRw();
}
@@ -217,6 +230,8 @@
/** @hide */
public static final String FLAG_ENABLED_RO = "com.android.aconfig.test.enabled_ro";
/** @hide */
+ public static final String FLAG_ENABLED_RO_EXPORTED = "com.android.aconfig.test.enabled_ro_exported";
+ /** @hide */
public static final String FLAG_ENABLED_RW = "com.android.aconfig.test.enabled_rw";
@com.android.aconfig.annotations.AssumeFalseForR8
@@ -246,6 +261,11 @@
public static boolean enabledRo() {
return FEATURE_FLAGS.enabledRo();
}
+ @com.android.aconfig.annotations.AssumeTrueForR8
+ @UnsupportedAppUsage
+ public static boolean enabledRoExported() {
+ return FEATURE_FLAGS.enabledRoExported();
+ }
@UnsupportedAppUsage
public static boolean enabledRw() {
return FEATURE_FLAGS.enabledRw();
@@ -295,6 +315,11 @@
}
@Override
@UnsupportedAppUsage
+ public boolean enabledRoExported() {
+ return getValue(Flags.FLAG_ENABLED_RO_EXPORTED);
+ }
+ @Override
+ @UnsupportedAppUsage
public boolean enabledRw() {
return getValue(Flags.FLAG_ENABLED_RW);
}
@@ -324,6 +349,7 @@
Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
Map.entry(Flags.FLAG_ENABLED_RO, false),
+ Map.entry(Flags.FLAG_ENABLED_RO_EXPORTED, false),
Map.entry(Flags.FLAG_ENABLED_RW, false)
)
);
@@ -442,6 +468,11 @@
}
@Override
@UnsupportedAppUsage
+ public boolean enabledRoExported() {
+ return true;
+ }
+ @Override
+ @UnsupportedAppUsage
public boolean enabledRw() {
if (!aconfig_test_is_cached) {
load_overrides_aconfig_test();
@@ -495,18 +526,18 @@
/** @hide */
public final class Flags {
/** @hide */
- public static final String FLAG_DISABLED_RW = "com.android.aconfig.test.disabled_rw";
- /** @hide */
public static final String FLAG_DISABLED_RW_EXPORTED = "com.android.aconfig.test.disabled_rw_exported";
+ /** @hide */
+ public static final String FLAG_ENABLED_RO_EXPORTED = "com.android.aconfig.test.enabled_ro_exported";
@UnsupportedAppUsage
- public static boolean disabledRw() {
- return FEATURE_FLAGS.disabledRw();
- }
- @UnsupportedAppUsage
public static boolean disabledRwExported() {
return FEATURE_FLAGS.disabledRwExported();
}
+ @UnsupportedAppUsage
+ public static boolean enabledRoExported() {
+ return FEATURE_FLAGS.enabledRoExported();
+ }
private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
}
"#;
@@ -518,9 +549,9 @@
/** @hide */
public interface FeatureFlags {
@UnsupportedAppUsage
- boolean disabledRw();
- @UnsupportedAppUsage
boolean disabledRwExported();
+ @UnsupportedAppUsage
+ boolean enabledRoExported();
}
"#;
@@ -534,17 +565,17 @@
public final class FeatureFlagsImpl implements FeatureFlags {
private static boolean aconfig_test_is_cached = false;
private static boolean other_namespace_is_cached = false;
- private static boolean disabledRw = false;
private static boolean disabledRwExported = false;
+ private static boolean enabledRoExported = false;
private void load_overrides_aconfig_test() {
try {
Properties properties = DeviceConfig.getProperties("aconfig_test");
- disabledRw =
- properties.getBoolean("com.android.aconfig.test.disabled_rw", false);
disabledRwExported =
properties.getBoolean("com.android.aconfig.test.disabled_rw_exported", false);
+ enabledRoExported =
+ properties.getBoolean("com.android.aconfig.test.enabled_ro_exported", false);
} catch (NullPointerException e) {
throw new RuntimeException(
"Cannot read value from namespace aconfig_test "
@@ -576,21 +607,21 @@
@Override
@UnsupportedAppUsage
- public boolean disabledRw() {
- if (!aconfig_test_is_cached) {
- load_overrides_aconfig_test();
- }
- return disabledRw;
- }
-
- @Override
- @UnsupportedAppUsage
public boolean disabledRwExported() {
if (!aconfig_test_is_cached) {
load_overrides_aconfig_test();
}
return disabledRwExported;
}
+
+ @Override
+ @UnsupportedAppUsage
+ public boolean enabledRoExported() {
+ if (!aconfig_test_is_cached) {
+ load_overrides_aconfig_test();
+ }
+ return enabledRoExported;
+ }
}"#;
let expect_fake_feature_flags_impl_content = r#"
@@ -606,13 +637,13 @@
}
@Override
@UnsupportedAppUsage
- public boolean disabledRw() {
- return getValue(Flags.FLAG_DISABLED_RW);
+ public boolean disabledRwExported() {
+ return getValue(Flags.FLAG_DISABLED_RW_EXPORTED);
}
@Override
@UnsupportedAppUsage
- public boolean disabledRwExported() {
- return getValue(Flags.FLAG_DISABLED_RW_EXPORTED);
+ public boolean enabledRoExported() {
+ return getValue(Flags.FLAG_ENABLED_RO_EXPORTED);
}
public void setFlag(String flagName, boolean value) {
if (!this.mFlagMap.containsKey(flagName)) {
@@ -634,13 +665,8 @@
}
private Map<String, Boolean> mFlagMap = new HashMap<>(
Map.ofEntries(
- Map.entry(Flags.FLAG_DISABLED_RO, false),
- Map.entry(Flags.FLAG_DISABLED_RW, false),
Map.entry(Flags.FLAG_DISABLED_RW_EXPORTED, false),
- Map.entry(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false),
- Map.entry(Flags.FLAG_ENABLED_FIXED_RO, false),
- Map.entry(Flags.FLAG_ENABLED_RO, false),
- Map.entry(Flags.FLAG_ENABLED_RW, false)
+ Map.entry(Flags.FLAG_ENABLED_RO_EXPORTED, false)
)
);
}
@@ -739,6 +765,12 @@
}
@Override
@UnsupportedAppUsage
+ public boolean enabledRoExported() {
+ throw new UnsupportedOperationException(
+ "Method is not implemented.");
+ }
+ @Override
+ @UnsupportedAppUsage
public boolean enabledRw() {
throw new UnsupportedOperationException(
"Method is not implemented.");
diff --git a/tools/aconfig/src/codegen.rs b/tools/aconfig/src/codegen/mod.rs
similarity index 72%
rename from tools/aconfig/src/codegen.rs
rename to tools/aconfig/src/codegen/mod.rs
index b7fb08f..abc27c6 100644
--- a/tools/aconfig/src/codegen.rs
+++ b/tools/aconfig/src/codegen/mod.rs
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+pub mod cpp;
+pub mod java;
+pub mod rust;
+
use anyhow::{ensure, Result};
pub fn is_valid_name_ident(s: &str) -> bool {
@@ -38,6 +42,10 @@
s.split('.').all(is_valid_name_ident)
}
+pub fn is_valid_container_ident(s: &str) -> bool {
+ is_valid_name_ident(s) || s.split('.').all(is_valid_name_ident)
+}
+
pub fn create_device_config_ident(package: &str, flag_name: &str) -> Result<String> {
ensure!(is_valid_package_ident(package), "bad package");
ensure!(is_valid_name_ident(flag_name), "bad flag name");
@@ -85,6 +93,29 @@
}
#[test]
+ fn test_is_valid_container_ident() {
+ assert!(is_valid_container_ident("foo.bar"));
+ assert!(is_valid_container_ident("foo.bar_baz"));
+ assert!(is_valid_container_ident("foo.bar.a123"));
+ assert!(is_valid_container_ident("foo"));
+ assert!(is_valid_container_ident("foo_bar_123"));
+
+ assert!(!is_valid_container_ident(""));
+ assert!(!is_valid_container_ident("foo._bar"));
+ assert!(!is_valid_container_ident("_foo"));
+ assert!(!is_valid_container_ident("123_foo"));
+ assert!(!is_valid_container_ident("foo-bar"));
+ assert!(!is_valid_container_ident("foo-b\u{00e5}r"));
+ assert!(!is_valid_container_ident("foo.bar.123"));
+ assert!(!is_valid_container_ident(".foo.bar"));
+ assert!(!is_valid_container_ident("foo.bar."));
+ assert!(!is_valid_container_ident("."));
+ assert!(!is_valid_container_ident(".."));
+ assert!(!is_valid_container_ident("foo..bar"));
+ assert!(!is_valid_container_ident("foo.__bar"));
+ }
+
+ #[test]
fn test_create_device_config_ident() {
assert_eq!(
"com.foo.bar.some_flag",
diff --git a/tools/aconfig/src/codegen_rust.rs b/tools/aconfig/src/codegen/rust.rs
similarity index 91%
rename from tools/aconfig/src/codegen_rust.rs
rename to tools/aconfig/src/codegen/rust.rs
index 502cec8..04be93b 100644
--- a/tools/aconfig/src/codegen_rust.rs
+++ b/tools/aconfig/src/codegen/rust.rs
@@ -43,8 +43,8 @@
template.add_template(
"rust_code_gen",
match codegen_mode {
- CodegenMode::Production => include_str!("../templates/rust_prod.template"),
- CodegenMode::Test => include_str!("../templates/rust_test.template"),
+ CodegenMode::Production => include_str!("../../templates/rust_prod.template"),
+ CodegenMode::Test => include_str!("../../templates/rust_test.template"),
CodegenMode::Exported => {
todo!("exported mode not yet supported for rust, see b/313894653.")
}
@@ -158,6 +158,11 @@
true
}
+ /// query flag enabled_ro_exported
+ pub fn enabled_ro_exported(&self) -> bool {
+ true
+ }
+
/// query flag enabled_rw
pub fn enabled_rw(&self) -> bool {
*CACHED_enabled_rw
@@ -203,6 +208,12 @@
true
}
+/// query flag enabled_ro_exported
+#[inline(always)]
+pub fn enabled_ro_exported() -> bool {
+ true
+}
+
/// query flag enabled_rw
#[inline(always)]
pub fn enabled_rw() -> bool {
@@ -303,6 +314,18 @@
self.overrides.insert("enabled_ro", val);
}
+ /// query flag enabled_ro_exported
+ pub fn enabled_ro_exported(&self) -> bool {
+ self.overrides.get("enabled_ro_exported").copied().unwrap_or(
+ true
+ )
+ }
+
+ /// set flag enabled_ro_exported
+ pub fn set_enabled_ro_exported(&mut self, val: bool) {
+ self.overrides.insert("enabled_ro_exported", val);
+ }
+
/// query flag enabled_rw
pub fn enabled_rw(&self) -> bool {
self.overrides.get("enabled_rw").copied().unwrap_or(
@@ -401,6 +424,18 @@
PROVIDER.lock().unwrap().set_enabled_ro(val);
}
+/// query flag enabled_ro_exported
+#[inline(always)]
+pub fn enabled_ro_exported() -> bool {
+ PROVIDER.lock().unwrap().enabled_ro_exported()
+}
+
+/// set flag enabled_ro_exported
+#[inline(always)]
+pub fn set_enabled_ro_exported(val: bool) {
+ PROVIDER.lock().unwrap().set_enabled_ro_exported(val);
+}
+
/// query flag enabled_rw
#[inline(always)]
pub fn enabled_rw() -> bool {
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
index 47e90ac..2a3fe27 100644
--- a/tools/aconfig/src/commands.rs
+++ b/tools/aconfig/src/commands.rs
@@ -20,11 +20,14 @@
use std::io::Read;
use std::path::PathBuf;
-use crate::codegen_cpp::generate_cpp_code;
-use crate::codegen_java::generate_java_code;
-use crate::codegen_rust::generate_rust_code;
+use crate::codegen::cpp::generate_cpp_code;
+use crate::codegen::java::generate_java_code;
+use crate::codegen::rust::generate_rust_code;
+use crate::storage::generate_storage_files;
+
use crate::protos::{
- ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint,
+ ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag,
+ ProtoParsedFlags, ProtoTracepoint,
};
pub struct Input {
@@ -57,6 +60,7 @@
pub fn parse_flags(
package: &str,
+ container: Option<&str>,
declarations: Vec<Input>,
values: Vec<Input>,
default_permission: ProtoFlagPermission,
@@ -79,12 +83,24 @@
package,
flag_declarations.package()
);
+ if let Some(c) = container {
+ ensure!(
+ c == flag_declarations.container(),
+ "failed to parse {}: expected container {}, got {}",
+ input.source,
+ c,
+ flag_declarations.container()
+ );
+ }
for mut flag_declaration in flag_declarations.flag.into_iter() {
crate::protos::flag_declaration::verify_fields(&flag_declaration)
.with_context(|| input.error_context())?;
// create ParsedFlag using FlagDeclaration and default values
let mut parsed_flag = ProtoParsedFlag::new();
+ if let Some(c) = container {
+ parsed_flag.set_container(c.to_string());
+ }
parsed_flag.set_package(package.to_string());
parsed_flag.set_name(flag_declaration.take_name());
parsed_flag.set_namespace(flag_declaration.take_namespace());
@@ -105,6 +121,11 @@
tracepoint.set_permission(flag_permission);
parsed_flag.trace.push(tracepoint);
+ let mut metadata = ProtoFlagMetadata::new();
+ let purpose = flag_declaration.metadata.purpose();
+ metadata.set_purpose(purpose);
+ parsed_flag.metadata = Some(metadata).into();
+
// verify ParsedFlag looks reasonable
crate::protos::parsed_flag::verify_fields(&parsed_flag)?;
@@ -198,6 +219,17 @@
generate_rust_code(package, parsed_flags.parsed_flag.iter(), codegen_mode)
}
+pub fn create_storage(caches: Vec<Input>, container: &str) -> Result<Vec<OutputFile>> {
+ let parsed_flags_vec: Vec<ProtoParsedFlags> = caches
+ .into_iter()
+ .map(|mut input| input.try_parse_flags())
+ .collect::<Result<Vec<_>>>()?
+ .into_iter()
+ .filter(|pfs| find_unique_container(pfs) == Some(container))
+ .collect();
+ generate_storage_files(container, parsed_flags_vec.iter())
+}
+
pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> {
let parsed_flags = input.try_parse_flags()?;
let mut output = Vec::new();
@@ -207,10 +239,9 @@
.filter(|pf| pf.permission() == ProtoFlagPermission::READ_WRITE)
{
let line = format!(
- "{}:{}.{}={}\n",
+ "{}:{}={}\n",
parsed_flag.namespace(),
- parsed_flag.package(),
- parsed_flag.name(),
+ parsed_flag.fully_qualified_name(),
match parsed_flag.state() {
ProtoFlagState::ENABLED => "enabled",
ProtoFlagState::DISABLED => "disabled",
@@ -230,9 +261,8 @@
.filter(|pf| pf.permission() == ProtoFlagPermission::READ_WRITE)
{
let line = format!(
- "persist.device_config.{}.{}={}\n",
- parsed_flag.package(),
- parsed_flag.name(),
+ "persist.device_config.{}={}\n",
+ parsed_flag.fully_qualified_name(),
match parsed_flag.state() {
ProtoFlagState::ENABLED => "true",
ProtoFlagState::DISABLED => "false",
@@ -249,22 +279,27 @@
Verbose,
Protobuf,
Textproto,
+ Bool,
}
-pub fn dump_parsed_flags(mut input: Vec<Input>, format: DumpFormat) -> Result<Vec<u8>> {
+pub fn dump_parsed_flags(
+ mut input: Vec<Input>,
+ format: DumpFormat,
+ dedup: bool,
+) -> Result<Vec<u8>> {
let individually_parsed_flags: Result<Vec<ProtoParsedFlags>> =
input.iter_mut().map(|i| i.try_parse_flags()).collect();
let parsed_flags: ProtoParsedFlags =
- crate::protos::parsed_flags::merge(individually_parsed_flags?)?;
+ crate::protos::parsed_flags::merge(individually_parsed_flags?, dedup)?;
let mut output = Vec::new();
match format {
DumpFormat::Text => {
for parsed_flag in parsed_flags.parsed_flag.into_iter() {
let line = format!(
- "{}.{}: {:?} + {:?}\n",
- parsed_flag.package(),
- parsed_flag.name(),
+ "{} [{}]: {:?} + {:?}\n",
+ parsed_flag.fully_qualified_name(),
+ parsed_flag.container(),
parsed_flag.permission(),
parsed_flag.state()
);
@@ -276,9 +311,9 @@
let sources: Vec<_> =
parsed_flag.trace.iter().map(|tracepoint| tracepoint.source()).collect();
let line = format!(
- "{}.{}: {:?} + {:?} ({})\n",
- parsed_flag.package(),
- parsed_flag.name(),
+ "{} [{}]: {:?} + {:?} ({})\n",
+ parsed_flag.fully_qualified_name(),
+ parsed_flag.container(),
parsed_flag.permission(),
parsed_flag.state(),
sources.join(", ")
@@ -293,6 +328,16 @@
let s = protobuf::text_format::print_to_string_pretty(&parsed_flags);
output.extend_from_slice(s.as_bytes());
}
+ DumpFormat::Bool => {
+ for parsed_flag in parsed_flags.parsed_flag.into_iter() {
+ let line = format!(
+ "{}={:?}\n",
+ parsed_flag.fully_qualified_name(),
+ parsed_flag.state() == ProtoFlagState::ENABLED
+ );
+ output.extend_from_slice(line.as_bytes());
+ }
+ }
}
Ok(output)
}
@@ -307,9 +352,20 @@
Some(package)
}
+fn find_unique_container(parsed_flags: &ProtoParsedFlags) -> Option<&str> {
+ let Some(container) = parsed_flags.parsed_flag.first().map(|pf| pf.container()) else {
+ return None;
+ };
+ if parsed_flags.parsed_flag.iter().any(|pf| pf.container() != container) {
+ return None;
+ }
+ Some(container)
+}
+
#[cfg(test)]
mod tests {
use super::*;
+ use crate::protos::ProtoFlagPurpose;
#[test]
fn test_parse_flags() {
@@ -324,6 +380,7 @@
assert_eq!("This flag is ENABLED + READ_ONLY", enabled_ro.description());
assert_eq!(ProtoFlagState::ENABLED, enabled_ro.state());
assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.permission());
+ assert_eq!(ProtoFlagPurpose::PURPOSE_BUGFIX, enabled_ro.metadata.purpose());
assert_eq!(3, enabled_ro.trace.len());
assert!(!enabled_ro.is_fixed_read_only());
assert_eq!("tests/test.aconfig", enabled_ro.trace[0].source());
@@ -336,7 +393,7 @@
assert_eq!(ProtoFlagState::ENABLED, enabled_ro.trace[2].state());
assert_eq!(ProtoFlagPermission::READ_ONLY, enabled_ro.trace[2].permission());
- assert_eq!(7, parsed_flags.parsed_flag.len());
+ assert_eq!(8, parsed_flags.parsed_flag.len());
for pf in parsed_flags.parsed_flag.iter() {
if pf.name() == "enabled_fixed_ro" {
continue;
@@ -377,6 +434,7 @@
let flags_bytes = crate::commands::parse_flags(
"com.first",
+ None,
declaration,
value,
ProtoFlagPermission::READ_ONLY,
@@ -391,9 +449,72 @@
}
#[test]
+ fn test_parse_flags_package_mismatch_between_declaration_and_command_line() {
+ let first_flag = r#"
+ package: "com.declaration.package"
+ container: "first.container"
+ flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+ }
+ "#;
+ let declaration =
+ vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+ let value: Vec<Input> = vec![];
+
+ let error = crate::commands::parse_flags(
+ "com.argument.package",
+ Some("first.container"),
+ declaration,
+ value,
+ ProtoFlagPermission::READ_WRITE,
+ )
+ .unwrap_err();
+ assert_eq!(
+ format!("{:?}", error),
+ "failed to parse memory: expected package com.argument.package, got com.declaration.package"
+ );
+ }
+
+ #[test]
+ fn test_parse_flags_container_mismatch_between_declaration_and_command_line() {
+ let first_flag = r#"
+ package: "com.first"
+ container: "declaration.container"
+ flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+ }
+ "#;
+ let declaration =
+ vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+ let value: Vec<Input> = vec![];
+
+ let error = crate::commands::parse_flags(
+ "com.first",
+ Some("argument.container"),
+ declaration,
+ value,
+ ProtoFlagPermission::READ_WRITE,
+ )
+ .unwrap_err();
+ assert_eq!(
+ format!("{:?}", error),
+ "failed to parse memory: expected container argument.container, got declaration.container"
+ );
+ }
+
+ #[test]
fn test_parse_flags_override_fixed_read_only() {
let first_flag = r#"
package: "com.first"
+ container: "com.first.container"
flag {
name: "first"
namespace: "first_ns"
@@ -419,6 +540,7 @@
}];
let error = crate::commands::parse_flags(
"com.first",
+ Some("com.first.container"),
declaration,
value,
ProtoFlagPermission::READ_WRITE,
@@ -431,6 +553,41 @@
}
#[test]
+ fn test_parse_flags_metadata() {
+ let metadata_flag = r#"
+ package: "com.first"
+ flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of this feature flag."
+ bug: "123"
+ metadata {
+ purpose: PURPOSE_FEATURE
+ }
+ }
+ "#;
+ let declaration = vec![Input {
+ source: "memory".to_string(),
+ reader: Box::new(metadata_flag.as_bytes()),
+ }];
+ let value: Vec<Input> = vec![];
+
+ let flags_bytes = crate::commands::parse_flags(
+ "com.first",
+ None,
+ declaration,
+ value,
+ ProtoFlagPermission::READ_ONLY,
+ )
+ .unwrap();
+ let parsed_flags =
+ crate::protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
+ assert_eq!(1, parsed_flags.parsed_flag.len());
+ let parsed_flag = parsed_flags.parsed_flag.first().unwrap();
+ assert_eq!(ProtoFlagPurpose::PURPOSE_FEATURE, parsed_flag.metadata.purpose());
+ }
+
+ #[test]
fn test_create_device_config_defaults() {
let input = parse_test_flags_as_input();
let bytes = create_device_config_defaults(input).unwrap();
@@ -449,9 +606,11 @@
#[test]
fn test_dump_text_format() {
let input = parse_test_flags_as_input();
- let bytes = dump_parsed_flags(vec![input], DumpFormat::Text).unwrap();
+ let bytes = dump_parsed_flags(vec![input], DumpFormat::Text, false).unwrap();
let text = std::str::from_utf8(&bytes).unwrap();
- assert!(text.contains("com.android.aconfig.test.disabled_ro: READ_ONLY + DISABLED"));
+ assert!(
+ text.contains("com.android.aconfig.test.disabled_ro [system]: READ_ONLY + DISABLED")
+ );
}
#[test]
@@ -464,7 +623,7 @@
.unwrap();
let input = parse_test_flags_as_input();
- let actual = dump_parsed_flags(vec![input], DumpFormat::Protobuf).unwrap();
+ let actual = dump_parsed_flags(vec![input], DumpFormat::Protobuf, false).unwrap();
assert_eq!(expected, actual);
}
@@ -472,7 +631,16 @@
#[test]
fn test_dump_textproto_format() {
let input = parse_test_flags_as_input();
- let bytes = dump_parsed_flags(vec![input], DumpFormat::Textproto).unwrap();
+ let bytes = dump_parsed_flags(vec![input], DumpFormat::Textproto, false).unwrap();
+ let text = std::str::from_utf8(&bytes).unwrap();
+ assert_eq!(crate::test::TEST_FLAGS_TEXTPROTO.trim(), text.trim());
+ }
+
+ #[test]
+ fn test_dump_textproto_format_dedup() {
+ let input = parse_test_flags_as_input();
+ let input2 = parse_test_flags_as_input();
+ let bytes = dump_parsed_flags(vec![input, input2], DumpFormat::Textproto, true).unwrap();
let text = std::str::from_utf8(&bytes).unwrap();
assert_eq!(crate::test::TEST_FLAGS_TEXTPROTO.trim(), text.trim());
}
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
index 7e44baf..63a50c8 100644
--- a/tools/aconfig/src/main.rs
+++ b/tools/aconfig/src/main.rs
@@ -25,11 +25,9 @@
use std::path::{Path, PathBuf};
mod codegen;
-mod codegen_cpp;
-mod codegen_java;
-mod codegen_rust;
mod commands;
mod protos;
+mod storage;
#[cfg(test)]
mod test;
@@ -42,6 +40,8 @@
.subcommand(
Command::new("create-cache")
.arg(Arg::new("package").long("package").required(true))
+ // TODO(b/312769710): Make this argument required.
+ .arg(Arg::new("container").long("container"))
.arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
.arg(Arg::new("values").long("values").action(ArgAction::Append))
.arg(
@@ -99,15 +99,27 @@
)
.subcommand(
Command::new("dump")
- .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+ .arg(Arg::new("cache").long("cache").action(ArgAction::Append))
.arg(
Arg::new("format")
.long("format")
.value_parser(EnumValueParser::<commands::DumpFormat>::new())
.default_value("text"),
)
+ .arg(Arg::new("dedup").long("dedup").num_args(0).action(ArgAction::SetTrue))
.arg(Arg::new("out").long("out").default_value("-")),
)
+ .subcommand(
+ Command::new("create-storage")
+ .arg(
+ Arg::new("container")
+ .long("container")
+ .required(true)
+ .help("The target container for the generated storage file."),
+ )
+ .arg(Arg::new("cache").long("cache").required(true))
+ .arg(Arg::new("out").long("out").required(true)),
+ )
}
fn get_required_arg<'a, T>(matches: &'a ArgMatches, arg_name: &str) -> Result<&'a T>
@@ -119,6 +131,13 @@
.ok_or(anyhow!("internal error: required argument '{}' not found", arg_name))
}
+fn get_optional_arg<'a, T>(matches: &'a ArgMatches, arg_name: &str) -> Option<&'a T>
+where
+ T: Any + Clone + Send + Sync + 'static,
+{
+ matches.get_one::<T>(arg_name)
+}
+
fn open_zero_or_more_files(matches: &ArgMatches, arg_name: &str) -> Result<Vec<Input>> {
let mut opened_files = vec![];
for path in matches.get_many::<String>(arg_name).unwrap_or_default() {
@@ -167,12 +186,20 @@
match matches.subcommand() {
Some(("create-cache", sub_matches)) => {
let package = get_required_arg::<String>(sub_matches, "package")?;
+ let container =
+ get_optional_arg::<String>(sub_matches, "container").map(|c| c.as_str());
let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
let values = open_zero_or_more_files(sub_matches, "values")?;
let default_permission =
get_required_arg::<protos::ProtoFlagPermission>(sub_matches, "default-permission")?;
- let output = commands::parse_flags(package, declarations, values, *default_permission)
- .context("failed to create cache")?;
+ let output = commands::parse_flags(
+ package,
+ container,
+ declarations,
+ values,
+ *default_permission,
+ )
+ .context("failed to create cache")?;
let path = get_required_arg::<String>(sub_matches, "cache")?;
write_output_to_file_or_stdout(path, &output)?;
}
@@ -222,10 +249,21 @@
let input = open_zero_or_more_files(sub_matches, "cache")?;
let format = get_required_arg::<DumpFormat>(sub_matches, "format")
.context("failed to dump previously parsed flags")?;
- let output = commands::dump_parsed_flags(input, *format)?;
+ let dedup = get_required_arg::<bool>(sub_matches, "dedup")?;
+ let output = commands::dump_parsed_flags(input, *format, *dedup)?;
let path = get_required_arg::<String>(sub_matches, "out")?;
write_output_to_file_or_stdout(path, &output)?;
}
+ Some(("create-storage", sub_matches)) => {
+ let cache = open_zero_or_more_files(sub_matches, "cache")?;
+ let container = get_required_arg::<String>(sub_matches, "container")?;
+ let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
+ let generated_files = commands::create_storage(cache, container)
+ .context("failed to create storage files")?;
+ generated_files
+ .iter()
+ .try_for_each(|file| write_output_file_realtive_to_dir(&dir, file))?;
+ }
_ => unreachable!(),
}
Ok(())
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
index a5a5342..2684d20 100644
--- a/tools/aconfig/src/protos.rs
+++ b/tools/aconfig/src/protos.rs
@@ -29,8 +29,10 @@
// ---- When building with the Android tool-chain ----
#[cfg(not(feature = "cargo"))]
mod auto_generated {
+ pub use aconfig_protos::aconfig::flag_metadata::Flag_purpose as ProtoFlagPurpose;
pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
+ pub use aconfig_protos::aconfig::Flag_metadata as ProtoFlagMetadata;
pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
@@ -47,8 +49,10 @@
// because this is only used during local development, and only if using cargo instead of the
// Android tool-chain, we allow it
include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
+ pub use aconfig::flag_metadata::Flag_purpose as ProtoFlagPurpose;
pub use aconfig::Flag_declaration as ProtoFlagDeclaration;
pub use aconfig::Flag_declarations as ProtoFlagDeclarations;
+ pub use aconfig::Flag_metadata as ProtoFlagMetadata;
pub use aconfig::Flag_permission as ProtoFlagPermission;
pub use aconfig::Flag_state as ProtoFlagState;
pub use aconfig::Flag_value as ProtoFlagValue;
@@ -111,11 +115,16 @@
pub fn verify_fields(pdf: &ProtoFlagDeclarations) -> Result<()> {
ensure_required_fields!("flag declarations", pdf, "package");
+ // TODO(b/312769710): Make the container field required.
ensure!(
codegen::is_valid_package_ident(pdf.package()),
"bad flag declarations: bad package"
);
+ ensure!(
+ !pdf.has_container() || codegen::is_valid_container_ident(pdf.container()),
+ "bad flag declarations: bad container"
+ );
for flag_declaration in pdf.flag.iter() {
super::flag_declaration::verify_fields(flag_declaration)?;
}
@@ -207,6 +216,10 @@
);
ensure!(codegen::is_valid_package_ident(pf.package()), "bad parsed flag: bad package");
+ ensure!(
+ !pf.has_container() || codegen::is_valid_container_ident(pf.container()),
+ "bad parsed flag: bad container"
+ );
ensure!(codegen::is_valid_name_ident(pf.name()), "bad parsed flag: bad name");
ensure!(codegen::is_valid_name_ident(pf.namespace()), "bad parsed flag: bad namespace");
ensure!(!pf.description().is_empty(), "bad parsed flag: empty description");
@@ -274,12 +287,18 @@
Ok(())
}
- pub fn merge(parsed_flags: Vec<ProtoParsedFlags>) -> Result<ProtoParsedFlags> {
+ pub fn merge(parsed_flags: Vec<ProtoParsedFlags>, dedup: bool) -> Result<ProtoParsedFlags> {
let mut merged = ProtoParsedFlags::new();
for mut pfs in parsed_flags.into_iter() {
merged.parsed_flag.append(&mut pfs.parsed_flag);
}
merged.parsed_flag.sort_by_cached_key(create_sorting_key);
+ if dedup {
+ // Deduplicate identical protobuf messages. Messages with the same sorting key but
+ // different fields (including the path to the original source file) will not be
+ // deduplicated and trigger an error in verify_fields.
+ merged.parsed_flag.dedup();
+ }
verify_fields(&merged)?;
Ok(merged)
}
@@ -289,7 +308,17 @@
}
fn create_sorting_key(pf: &ProtoParsedFlag) -> String {
- format!("{}.{}", pf.package(), pf.name())
+ pf.fully_qualified_name()
+ }
+}
+
+pub trait ParsedFlagExt {
+ fn fully_qualified_name(&self) -> String;
+}
+
+impl ParsedFlagExt for ProtoParsedFlag {
+ fn fully_qualified_name(&self) -> String {
+ format!("{}.{}", self.package(), self.name())
}
}
@@ -303,6 +332,7 @@
let flag_declarations = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -321,6 +351,7 @@
)
.unwrap();
assert_eq!(flag_declarations.package(), "com.foo.bar");
+ assert_eq!(flag_declarations.container(), "system");
let first = flag_declarations.flag.iter().find(|pf| pf.name() == "first").unwrap();
assert_eq!(first.name(), "first");
assert_eq!(first.namespace(), "first_ns");
@@ -336,9 +367,26 @@
assert!(second.is_fixed_read_only());
assert!(!second.is_exported());
+ // valid input: missing container in flag declarations is supported
+ let flag_declarations = flag_declarations::try_from_text_proto(
+ r#"
+package: "com.foo.bar"
+flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+}
+"#,
+ )
+ .unwrap();
+ assert_eq!(flag_declarations.container(), "");
+ assert!(!flag_declarations.has_container());
+
// bad input: missing package in flag declarations
let error = flag_declarations::try_from_text_proto(
r#"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -358,6 +406,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
description: "This is the description of the first flag."
@@ -376,6 +425,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "_com.FOO__BAR"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -395,6 +445,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "FIRST"
namespace: "first_ns"
@@ -414,6 +465,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -428,6 +480,7 @@
let error = flag_declarations::try_from_text_proto(
r#"
package: "com.foo.bar"
+container: "system"
flag {
name: "first"
namespace: "first_ns"
@@ -439,6 +492,25 @@
)
.unwrap_err();
assert!(format!("{:?}", error).contains("bad flag declaration: exactly one bug required"));
+
+ // bad input: invalid container name in flag declaration
+ let error = flag_declarations::try_from_text_proto(
+ r#"
+package: "com.foo.bar"
+container: "__bad_bad_container.com"
+flag {
+ name: "first"
+ namespace: "first_ns"
+ description: "This is the description of the first flag."
+ bug: "123"
+ bug: "abc"
+}
+"#,
+ )
+ .unwrap_err();
+ assert!(format!("{:?}", error).contains("bad flag declarations: bad container"));
+
+ // TODO(b/312769710): Verify error when container is missing.
}
#[test]
@@ -553,6 +625,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.second"
@@ -573,6 +646,7 @@
permission: READ_ONLY
}
is_fixed_read_only: true
+ container: "system"
}
"#;
let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -607,6 +681,7 @@
description: "This is the description of the first flag."
state: DISABLED
permission: READ_ONLY
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -625,6 +700,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -645,6 +721,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "aaa.aaa"
@@ -659,6 +736,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -682,6 +760,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.foo"
@@ -696,6 +775,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -719,6 +799,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.foo"
@@ -733,6 +814,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let error = try_from_binary_proto_from_text_proto(text_proto).unwrap_err();
@@ -760,6 +842,7 @@
state: ENABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let parsed_flags = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -786,6 +869,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
parsed_flag {
package: "com.second"
@@ -800,6 +884,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let expected = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -818,6 +903,7 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let first = try_from_binary_proto_from_text_proto(text_proto).unwrap();
@@ -836,18 +922,68 @@
state: DISABLED
permission: READ_ONLY
}
+ container: "system"
}
"#;
let second = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+ let text_proto = r#"
+parsed_flag {
+ package: "com.second"
+ name: "second"
+ namespace: "second_ns"
+ bug: "b"
+ description: "This is the description of the second flag."
+ state: ENABLED
+ permission: READ_WRITE
+ trace {
+ source: "duplicate/flags.declarations"
+ state: DISABLED
+ permission: READ_ONLY
+ }
+}
+"#;
+ let second_duplicate = try_from_binary_proto_from_text_proto(text_proto).unwrap();
+
// bad cases
- let error = parsed_flags::merge(vec![first.clone(), first.clone()]).unwrap_err();
+
+ // two of the same flag with dedup disabled
+ let error = parsed_flags::merge(vec![first.clone(), first.clone()], false).unwrap_err();
assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.first.first (defined in flags.declarations and flags.declarations)");
+ // two conflicting flags with dedup disabled
+ let error =
+ parsed_flags::merge(vec![second.clone(), second_duplicate.clone()], false).unwrap_err();
+ assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.second.second (defined in flags.declarations and duplicate/flags.declarations)");
+
+ // two conflicting flags with dedup enabled
+ let error =
+ parsed_flags::merge(vec![second.clone(), second_duplicate.clone()], true).unwrap_err();
+ assert_eq!(format!("{:?}", error), "bad parsed flags: duplicate flag com.second.second (defined in flags.declarations and duplicate/flags.declarations)");
+
// valid cases
- assert!(parsed_flags::merge(vec![]).unwrap().parsed_flag.is_empty());
- assert_eq!(first, parsed_flags::merge(vec![first.clone()]).unwrap());
- assert_eq!(expected, parsed_flags::merge(vec![first.clone(), second.clone()]).unwrap());
- assert_eq!(expected, parsed_flags::merge(vec![second, first]).unwrap());
+ assert!(parsed_flags::merge(vec![], false).unwrap().parsed_flag.is_empty());
+ assert!(parsed_flags::merge(vec![], true).unwrap().parsed_flag.is_empty());
+ assert_eq!(first, parsed_flags::merge(vec![first.clone()], false).unwrap());
+ assert_eq!(first, parsed_flags::merge(vec![first.clone()], true).unwrap());
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![first.clone(), second.clone()], false).unwrap()
+ );
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![first.clone(), second.clone()], true).unwrap()
+ );
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![second.clone(), first.clone()], false).unwrap()
+ );
+ assert_eq!(
+ expected,
+ parsed_flags::merge(vec![second.clone(), first.clone()], true).unwrap()
+ );
+
+ // two identical flags with dedup enabled
+ assert_eq!(first, parsed_flags::merge(vec![first.clone(), first.clone()], true).unwrap());
}
}
diff --git a/tools/aconfig/src/storage/mod.rs b/tools/aconfig/src/storage/mod.rs
new file mode 100644
index 0000000..90e05f5
--- /dev/null
+++ b/tools/aconfig/src/storage/mod.rs
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::Result;
+use std::collections::{HashMap, HashSet};
+
+use crate::commands::OutputFile;
+use crate::protos::{ProtoParsedFlag, ProtoParsedFlags};
+
+pub struct FlagPackage<'a> {
+ pub package_name: &'a str,
+ pub package_id: u32,
+ pub flag_names: HashSet<&'a str>,
+ pub boolean_flags: Vec<&'a ProtoParsedFlag>,
+ pub boolean_offset: u32,
+}
+
+impl<'a> FlagPackage<'a> {
+ fn new(package_name: &'a str, package_id: u32) -> Self {
+ FlagPackage {
+ package_name,
+ package_id,
+ flag_names: HashSet::new(),
+ boolean_flags: vec![],
+ boolean_offset: 0,
+ }
+ }
+
+ fn insert(&mut self, pf: &'a ProtoParsedFlag) {
+ if self.flag_names.insert(pf.name()) {
+ self.boolean_flags.push(pf);
+ }
+ }
+}
+
+pub fn group_flags_by_package<'a, I>(parsed_flags_vec_iter: I) -> Vec<FlagPackage<'a>>
+where
+ I: Iterator<Item = &'a ProtoParsedFlags>,
+{
+ // group flags by package
+ let mut packages: Vec<FlagPackage<'a>> = Vec::new();
+ let mut package_index: HashMap<&'a str, usize> = HashMap::new();
+ for parsed_flags in parsed_flags_vec_iter {
+ for parsed_flag in parsed_flags.parsed_flag.iter() {
+ let index = *(package_index.entry(parsed_flag.package()).or_insert(packages.len()));
+ if index == packages.len() {
+ packages.push(FlagPackage::new(parsed_flag.package(), index as u32));
+ }
+ packages[index].insert(parsed_flag);
+ }
+ }
+
+ // calculate package flag value start offset, in flag value file, each boolean
+ // is stored as two bytes, the first byte will be the flag value. the second
+ // byte is flag info byte, which is a bitmask to indicate the status of a flag
+ let mut boolean_offset = 0;
+ for p in packages.iter_mut() {
+ p.boolean_offset = boolean_offset;
+ boolean_offset += 2 * p.boolean_flags.len() as u32;
+ }
+
+ packages
+}
+
+pub fn generate_storage_files<'a, I>(
+ _containser: &str,
+ parsed_flags_vec_iter: I,
+) -> Result<Vec<OutputFile>>
+where
+ I: Iterator<Item = &'a ProtoParsedFlags>,
+{
+ let _packages = group_flags_by_package(parsed_flags_vec_iter);
+ Ok(vec![])
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::Input;
+
+ pub fn parse_all_test_flags() -> Vec<ProtoParsedFlags> {
+ let aconfig_files = [
+ (
+ "com.android.aconfig.storage.test_1",
+ "storage_test_1_part_1.aconfig",
+ include_bytes!("../../tests/storage_test_1_part_1.aconfig").as_slice(),
+ ),
+ (
+ "com.android.aconfig.storage.test_1",
+ "storage_test_1_part_2.aconfig",
+ include_bytes!("../../tests/storage_test_1_part_2.aconfig").as_slice(),
+ ),
+ (
+ "com.android.aconfig.storage.test_2",
+ "storage_test_2.aconfig",
+ include_bytes!("../../tests/storage_test_2.aconfig").as_slice(),
+ ),
+ ];
+
+ aconfig_files
+ .into_iter()
+ .map(|(pkg, file, content)| {
+ let bytes = crate::commands::parse_flags(
+ pkg,
+ Some("system"),
+ vec![Input {
+ source: format!("tests/{}", file).to_string(),
+ reader: Box::new(content),
+ }],
+ vec![],
+ crate::commands::DEFAULT_FLAG_PERMISSION,
+ )
+ .unwrap();
+ crate::protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
+ })
+ .collect()
+ }
+
+ #[test]
+ fn test_flag_package() {
+ let caches = parse_all_test_flags();
+ let packages = group_flags_by_package(caches.iter());
+
+ for pkg in packages.iter() {
+ let pkg_name = pkg.package_name;
+ assert_eq!(pkg.flag_names.len(), pkg.boolean_flags.len());
+ for pf in pkg.boolean_flags.iter() {
+ assert!(pkg.flag_names.contains(pf.name()));
+ assert_eq!(pf.package(), pkg_name);
+ }
+ }
+
+ assert_eq!(packages.len(), 2);
+
+ assert_eq!(packages[0].package_name, "com.android.aconfig.storage.test_1");
+ assert_eq!(packages[0].package_id, 0);
+ assert_eq!(packages[0].flag_names.len(), 5);
+ assert!(packages[0].flag_names.contains("enabled_rw"));
+ assert!(packages[0].flag_names.contains("disabled_rw"));
+ assert!(packages[0].flag_names.contains("enabled_ro"));
+ assert!(packages[0].flag_names.contains("disabled_ro"));
+ assert!(packages[0].flag_names.contains("enabled_fixed_ro"));
+ assert_eq!(packages[0].boolean_offset, 0);
+
+ assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2");
+ assert_eq!(packages[1].package_id, 1);
+ assert_eq!(packages[1].flag_names.len(), 3);
+ assert!(packages[1].flag_names.contains("enabled_ro"));
+ assert!(packages[1].flag_names.contains("disabled_ro"));
+ assert!(packages[1].flag_names.contains("enabled_fixed_ro"));
+ assert_eq!(packages[1].boolean_offset, 10);
+ }
+}
diff --git a/tools/aconfig/src/test.rs b/tools/aconfig/src/test.rs
index 9f598d0..71de57e 100644
--- a/tools/aconfig/src/test.rs
+++ b/tools/aconfig/src/test.rs
@@ -43,6 +43,10 @@
}
is_fixed_read_only: false
is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -58,13 +62,17 @@
permission: READ_WRITE
}
is_fixed_read_only: false
- is_exported: true
+ is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
name: "disabled_rw_exported"
namespace: "aconfig_test"
- description: "This flag is exported"
+ description: "This flag is DISABLED + READ_WRITE and exported"
bug: "111"
state: DISABLED
permission: READ_WRITE
@@ -80,6 +88,10 @@
}
is_fixed_read_only: false
is_exported: true
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -101,6 +113,10 @@
}
is_fixed_read_only: false
is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -122,6 +138,10 @@
}
is_fixed_read_only: true
is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -148,6 +168,35 @@
}
is_fixed_read_only: false
is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+parsed_flag {
+ package: "com.android.aconfig.test"
+ name: "enabled_ro_exported"
+ namespace: "aconfig_test"
+ description: "This flag is ENABLED + READ_ONLY and exported"
+ bug: "111"
+ state: ENABLED
+ permission: READ_ONLY
+ trace {
+ source: "tests/test.aconfig"
+ state: DISABLED
+ permission: READ_WRITE
+ }
+ trace {
+ source: "tests/first.values"
+ state: ENABLED
+ permission: READ_ONLY
+ }
+ is_fixed_read_only: false
+ is_exported: true
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
parsed_flag {
package: "com.android.aconfig.test"
@@ -169,12 +218,17 @@
}
is_fixed_read_only: false
is_exported: false
+ container: "system"
+ metadata {
+ purpose: PURPOSE_UNSPECIFIED
+ }
}
"#;
pub fn parse_test_flags() -> ProtoParsedFlags {
let bytes = crate::commands::parse_flags(
"com.android.aconfig.test",
+ Some("system"),
vec![Input {
source: "tests/test.aconfig".to_string(),
reader: Box::new(include_bytes!("../tests/test.aconfig").as_slice()),
diff --git a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
index fd2e26a..8010b88 100644
--- a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
@@ -52,11 +52,20 @@
}
private Map<String, Boolean> mFlagMap = new HashMap<>(
+ {{ if library_exported }}
+ Map.ofEntries(
+ {{-for item in exported_flag_elements}}
+ Map.entry(Flags.FLAG_{item.flag_name_constant_suffix}, false)
+ {{ -if not @last }},{{ endif }}
+ {{ -endfor }}
+ )
+ {{ else }}
Map.ofEntries(
{{-for item in flag_elements}}
Map.entry(Flags.FLAG_{item.flag_name_constant_suffix}, false)
{{ -if not @last }},{{ endif }}
{{ -endfor }}
)
+ {{ endif }}
);
}
diff --git a/tools/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/templates/FeatureFlagsImpl.java.template
index a15c859..7a52ceb 100644
--- a/tools/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FeatureFlagsImpl.java.template
@@ -2,13 +2,13 @@
// TODO(b/303773055): Remove the annotation after access issue is resolved.
import android.compat.annotation.UnsupportedAppUsage;
{{ if not is_test_mode }}
-{{ if is_read_write- }}
+{{ if runtime_lookup_required- }}
import android.provider.DeviceConfig;
import android.provider.DeviceConfig.Properties;
{{ endif }}
/** @hide */
public final class FeatureFlagsImpl implements FeatureFlags \{
-{{- if is_read_write }}
+{{- if runtime_lookup_required }}
{{- for namespace_with_flags in namespace_flags }}
private static boolean {namespace_with_flags.namespace}_is_cached = false;
{{- endfor- }}
@@ -71,14 +71,10 @@
@Override
@UnsupportedAppUsage
public boolean {flag.method_name}() \{
- {{ -if flag.is_read_write }}
if (!{flag.device_config_namespace}_is_cached) \{
load_overrides_{flag.device_config_namespace}();
}
return {flag.method_name};
- {{ else }}
- return {flag.default_value};
- {{ endif- }}
}
{{ endif }}
diff --git a/tools/aconfig/tests/AconfigTest.java b/tools/aconfig/tests/AconfigTest.java
index 958b02e..bb993c4 100644
--- a/tools/aconfig/tests/AconfigTest.java
+++ b/tools/aconfig/tests/AconfigTest.java
@@ -8,6 +8,8 @@
import static com.android.aconfig.test.Flags.enabledFixedRo;
import static com.android.aconfig.test.Flags.enabledRo;
import static com.android.aconfig.test.Flags.enabledRw;
+import static com.android.aconfig.test.exported.Flags.exportedFlag;
+import static com.android.aconfig.test.exported.Flags.FLAG_EXPORTED_FLAG;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThrows;
@@ -64,4 +66,10 @@
fakeFeatureFlags.setFlag(FLAG_ENABLED_RW, false);
assertFalse(fakeFeatureFlags.enabledRw());
}
+
+ @Test
+ public void testExportedFlag() {
+ assertEquals("com.android.aconfig.test.exported.exported_flag", FLAG_EXPORTED_FLAG);
+ assertFalse(exportedFlag());
+ }
}
diff --git a/tools/aconfig/tests/aconfig_test.cpp b/tools/aconfig/tests/aconfig_test.cpp
index 10de347..52651e4 100644
--- a/tools/aconfig/tests/aconfig_test.cpp
+++ b/tools/aconfig/tests/aconfig_test.cpp
@@ -17,24 +17,42 @@
#include "com_android_aconfig_test.h"
#include "gtest/gtest.h"
+using namespace com::android::aconfig::test;
+
TEST(AconfigTest, TestDisabledReadOnlyFlag) {
ASSERT_FALSE(com_android_aconfig_test_disabled_ro());
+ ASSERT_FALSE(provider_->disabled_ro());
+ ASSERT_FALSE(disabled_ro());
}
TEST(AconfigTest, TestEnabledReadOnlyFlag) {
// TODO: change to assertTrue(enabledRo()) when the build supports reading tests/*.values
// (currently all flags are assigned the default READ_ONLY + DISABLED)
ASSERT_FALSE(com_android_aconfig_test_enabled_ro());
+ ASSERT_FALSE(provider_->enabled_ro());
+ ASSERT_FALSE(enabled_ro());
}
TEST(AconfigTest, TestDisabledReadWriteFlag) {
ASSERT_FALSE(com_android_aconfig_test_disabled_rw());
+ ASSERT_FALSE(provider_->disabled_rw());
+ ASSERT_FALSE(disabled_rw());
}
TEST(AconfigTest, TestEnabledReadWriteFlag) {
// TODO: change to assertTrue(enabledRo()) when the build supports reading tests/*.values
// (currently all flags are assigned the default READ_ONLY + DISABLED)
ASSERT_FALSE(com_android_aconfig_test_enabled_rw());
+ ASSERT_FALSE(provider_->enabled_rw());
+ ASSERT_FALSE(enabled_rw());
+}
+
+TEST(AconfigTest, TestEnabledFixedReadOnlyFlag) {
+ // TODO: change to assertTrue(enabledFixedRo()) when the build supports reading tests/*.values
+ // (currently all flags are assigned the default READ_ONLY + DISABLED)
+ ASSERT_FALSE(com_android_aconfig_test_enabled_fixed_ro());
+ ASSERT_FALSE(provider_->enabled_fixed_ro());
+ ASSERT_FALSE(enabled_fixed_ro());
}
int main(int argc, char** argv) {
diff --git a/tools/aconfig/tests/aconfig_test_test_variant.cpp b/tools/aconfig/tests/aconfig_test_test_variant.cpp
new file mode 100644
index 0000000..8a745c5
--- /dev/null
+++ b/tools/aconfig/tests/aconfig_test_test_variant.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "com_android_aconfig_test.h"
+#include "gtest/gtest.h"
+
+using namespace com::android::aconfig::test;
+
+class AconfigTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ reset_flags();
+ }
+};
+
+TEST_F(AconfigTest, TestDisabledReadOnlyFlag) {
+ ASSERT_FALSE(com_android_aconfig_test_disabled_ro());
+ ASSERT_FALSE(provider_->disabled_ro());
+ ASSERT_FALSE(disabled_ro());
+}
+
+TEST_F(AconfigTest, TestEnabledReadOnlyFlag) {
+ // TODO: change to assertTrue(enabledRo()) when the build supports reading tests/*.values
+ // (currently all flags are assigned the default READ_ONLY + DISABLED)
+ ASSERT_FALSE(com_android_aconfig_test_enabled_ro());
+ ASSERT_FALSE(provider_->enabled_ro());
+ ASSERT_FALSE(enabled_ro());
+}
+
+TEST_F(AconfigTest, TestDisabledReadWriteFlag) {
+ ASSERT_FALSE(com_android_aconfig_test_disabled_rw());
+ ASSERT_FALSE(provider_->disabled_rw());
+ ASSERT_FALSE(disabled_rw());
+}
+
+TEST_F(AconfigTest, TestEnabledReadWriteFlag) {
+ // TODO: change to assertTrue(enabledRo()) when the build supports reading tests/*.values
+ // (currently all flags are assigned the default READ_ONLY + DISABLED)
+ ASSERT_FALSE(com_android_aconfig_test_enabled_rw());
+ ASSERT_FALSE(provider_->enabled_rw());
+ ASSERT_FALSE(enabled_rw());
+}
+
+TEST_F(AconfigTest, TestEnabledFixedReadOnlyFlag) {
+ // TODO: change to assertTrue(enabledFixedRo()) when the build supports reading tests/*.values
+ // (currently all flags are assigned the default READ_ONLY + DISABLED)
+ ASSERT_FALSE(com_android_aconfig_test_enabled_fixed_ro());
+ ASSERT_FALSE(provider_->enabled_fixed_ro());
+ ASSERT_FALSE(enabled_fixed_ro());
+}
+
+TEST_F(AconfigTest, OverrideFlagValue) {
+ ASSERT_FALSE(disabled_ro());
+ disabled_ro(true);
+ ASSERT_TRUE(disabled_ro());
+}
+
+TEST_F(AconfigTest, ResetFlagValue) {
+ ASSERT_FALSE(disabled_ro());
+ ASSERT_FALSE(enabled_ro());
+ disabled_ro(true);
+ enabled_ro(true);
+ ASSERT_TRUE(disabled_ro());
+ ASSERT_TRUE(enabled_ro());
+ reset_flags();
+ ASSERT_FALSE(disabled_ro());
+ ASSERT_FALSE(enabled_ro());
+}
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/tools/aconfig/tests/first.values b/tools/aconfig/tests/first.values
index b248d43..731ce84 100644
--- a/tools/aconfig/tests/first.values
+++ b/tools/aconfig/tests/first.values
@@ -30,6 +30,12 @@
}
flag_value {
package: "com.android.aconfig.test"
+ name: "enabled_ro_exported"
+ state: ENABLED
+ permission: READ_ONLY
+}
+flag_value {
+ package: "com.android.aconfig.test"
name: "disabled_rw_exported"
state: DISABLED
permission: READ_WRITE
diff --git a/tools/aconfig/tests/storage_test_1_part_1.aconfig b/tools/aconfig/tests/storage_test_1_part_1.aconfig
new file mode 100644
index 0000000..70462cd
--- /dev/null
+++ b/tools/aconfig/tests/storage_test_1_part_1.aconfig
@@ -0,0 +1,17 @@
+package: "com.android.aconfig.storage.test_1"
+container: "system"
+
+flag {
+ name: "enabled_rw"
+ namespace: "aconfig_test"
+ description: "This flag is ENABLED + READ_WRITE"
+ bug: ""
+}
+
+flag {
+ name: "disabled_rw"
+ namespace: "aconfig_test"
+ description: "This flag is DISABLED + READ_WRITE"
+ bug: "456"
+ is_exported: true
+}
diff --git a/tools/aconfig/tests/storage_test_1_part_2.aconfig b/tools/aconfig/tests/storage_test_1_part_2.aconfig
new file mode 100644
index 0000000..5eb0c0c
--- /dev/null
+++ b/tools/aconfig/tests/storage_test_1_part_2.aconfig
@@ -0,0 +1,24 @@
+package: "com.android.aconfig.storage.test_1"
+container: "system"
+
+flag {
+ name: "enabled_ro"
+ namespace: "aconfig_test"
+ description: "This flag is ENABLED + READ_ONLY"
+ bug: "abc"
+}
+
+flag {
+ name: "disabled_ro"
+ namespace: "aconfig_test"
+ description: "This flag is DISABLED + READ_ONLY"
+ bug: "123"
+}
+
+flag {
+ name: "enabled_fixed_ro"
+ namespace: "aconfig_test"
+ description: "This flag is fixed READ_ONLY + ENABLED"
+ bug: ""
+ is_fixed_read_only: true
+}
diff --git a/tools/aconfig/tests/storage_test_2.aconfig b/tools/aconfig/tests/storage_test_2.aconfig
new file mode 100644
index 0000000..bb14fd1
--- /dev/null
+++ b/tools/aconfig/tests/storage_test_2.aconfig
@@ -0,0 +1,24 @@
+package: "com.android.aconfig.storage.test_2"
+container: "system"
+
+flag {
+ name: "enabled_ro"
+ namespace: "aconfig_test"
+ description: "This flag is ENABLED + READ_ONLY"
+ bug: "abc"
+}
+
+flag {
+ name: "disabled_ro"
+ namespace: "aconfig_test"
+ description: "This flag is DISABLED + READ_ONLY"
+ bug: "123"
+}
+
+flag {
+ name: "enabled_fixed_ro"
+ namespace: "aconfig_test"
+ description: "This flag is fixed READ_ONLY + ENABLED"
+ bug: ""
+ is_fixed_read_only: true
+}
diff --git a/tools/aconfig/tests/test.aconfig b/tools/aconfig/tests/test.aconfig
index 8a1a913..014bced 100644
--- a/tools/aconfig/tests/test.aconfig
+++ b/tools/aconfig/tests/test.aconfig
@@ -1,4 +1,5 @@
package: "com.android.aconfig.test"
+container: "system"
# This flag's final value is calculated from:
# - test.aconfig: DISABLED + READ_WRITE (default)
@@ -9,6 +10,9 @@
namespace: "aconfig_test"
description: "This flag is ENABLED + READ_ONLY"
bug: "abc"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
}
# This flag's final value is calculated from:
@@ -39,7 +43,6 @@
namespace: "aconfig_test"
description: "This flag is DISABLED + READ_WRITE"
bug: "456"
- is_exported: true
}
# This flag's final value calculated from:
@@ -61,9 +64,17 @@
}
flag {
- name: "disabled_rw_exported"
+ name: "enabled_ro_exported"
namespace: "aconfig_test"
- description: "This flag is exported"
+ description: "This flag is ENABLED + READ_ONLY and exported"
bug: "111"
is_exported: true
-}
\ No newline at end of file
+}
+
+flag {
+ name: "disabled_rw_exported"
+ namespace: "aconfig_test"
+ description: "This flag is DISABLED + READ_WRITE and exported"
+ bug: "111"
+ is_exported: true
+}
diff --git a/tools/aconfig/tests/test_exported.aconfig b/tools/aconfig/tests/test_exported.aconfig
new file mode 100644
index 0000000..20f23a3
--- /dev/null
+++ b/tools/aconfig/tests/test_exported.aconfig
@@ -0,0 +1,17 @@
+package: "com.android.aconfig.test.exported"
+container: "system"
+
+flag {
+ name: "exported_flag"
+ namespace: "aconfig_test"
+ description: "This is an exported flag"
+ is_exported: true
+ bug: "888"
+}
+
+flag {
+ name: "not_exported_flag"
+ namespace: "aconfig_test"
+ description: "This flag is not exported"
+ bug: "777"
+}
diff --git a/tools/checkowners.py b/tools/checkowners.py
deleted file mode 100755
index f037321..0000000
--- a/tools/checkowners.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python
-
-"""Parse and check syntax errors of a given OWNERS file."""
-
-import argparse
-import re
-import sys
-import urllib.request, urllib.parse, urllib.error
-import urllib.request, urllib.error, urllib.parse
-
-parser = argparse.ArgumentParser(description='Check OWNERS file syntax')
-parser.add_argument('-v', '--verbose', dest='verbose',
- action='store_true', default=False,
- help='Verbose output to debug')
-parser.add_argument('-c', '--check_address', dest='check_address',
- action='store_true', default=False,
- help='Check email addresses')
-parser.add_argument(dest='owners', metavar='OWNERS', nargs='+',
- help='Path to OWNERS file')
-args = parser.parse_args()
-
-gerrit_server = 'https://android-review.googlesource.com'
-checked_addresses = {}
-
-
-def echo(msg):
- if args.verbose:
- print(msg)
-
-
-def find_address(address):
- if address not in checked_addresses:
- request = (gerrit_server + '/accounts/?n=1&q=email:'
- + urllib.parse.quote(address))
- echo('Checking email address: ' + address)
- result = urllib.request.urlopen(request).read()
- checked_addresses[address] = result.find('"_account_id":') >= 0
- if checked_addresses[address]:
- echo('Found email address: ' + address)
- return checked_addresses[address]
-
-
-def check_address(fname, num, address):
- if find_address(address):
- return 0
- print('%s:%d: ERROR: unknown email address: %s' % (fname, num, address))
- return 1
-
-
-def main():
- # One regular expression to check all valid lines.
- noparent = 'set +noparent'
- email = '([^@ ]+@[^ @]+|\\*)'
- emails = '(%s( *, *%s)*)' % (email, email)
- file_directive = 'file: *([^ :]+ *: *)?[^ ]+'
- directive = '(%s|%s|%s)' % (emails, noparent, file_directive)
- glob = '[a-zA-Z0-9_\\.\\-\\*\\?]+'
- globs = '(%s( *, *%s)*)' % (glob, glob)
- perfile = 'per-file +' + globs + ' *= *' + directive
- include = 'include +([^ :]+ *: *)?[^ ]+'
- pats = '(|%s|%s|%s|%s|%s)$' % (noparent, email, perfile, include, file_directive)
- patterns = re.compile(pats)
- address_pattern = re.compile('([^@ ]+@[^ @]+)')
- perfile_pattern = re.compile('per-file +.*=(.*)')
-
- error = 0
- for fname in args.owners:
- echo('Checking file: ' + fname)
- num = 0
- for line in open(fname, 'r'):
- num += 1
- stripped_line = re.sub('#.*$', '', line).strip()
- if not patterns.match(stripped_line):
- error += 1
- print('%s:%d: ERROR: unknown line [%s]' % (fname, num, line.strip()))
- elif args.check_address:
- if perfile_pattern.match(stripped_line):
- for addr in perfile_pattern.match(stripped_line).group(1).split(','):
- a = addr.strip()
- if a and a != '*':
- error += check_address(fname, num, addr.strip())
- elif address_pattern.match(stripped_line):
- error += check_address(fname, num, stripped_line)
- sys.exit(error)
-
-if __name__ == '__main__':
- main()
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
index 501f260..cc97d1f 100644
--- a/tools/finalization/README.md
+++ b/tools/finalization/README.md
@@ -12,10 +12,8 @@
## CI:
Performed in build targets in Finalization branches.
-1. [Finalization Step 1 for Main, git_main-fina-1-release](https://android-build.googleplex.com/builds/branches/git_main-fina-1-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
-2. [Finalization Step 1 for UDC, git_udc-fina-1-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-1-release/grid). Same but for udc-dev.
-3. [Finalization Step 2 for Main, git_main-fina-2-release](https://android-build.googleplex.com/builds/branches/git_main-fina-2-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
-4. [Finalization Step 2 for UDC, git_udc-fina-2-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-2-release/grid). Same but for udc-dev.
+1. [Finalization Step 1, git_main-fina-1-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-1-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
+3. [Finalization Step 2, git_main-fina-2-release](https://android-build.corp.google.com/build_explorer/branch/git_main-fina-2-release). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
## Utility:
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
index 6d13325..37c0011 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -137,6 +137,13 @@
local version_codes="$top/platform_testing/libraries/compatibility-common-util/src/com/android/compatibility/common/util/VersionCodes.java"
sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1));/a \\ ${SDK_VERSION}" $version_codes
+ # tools/platform-compat
+ local class2nonsdklist="$top/tools/platform-compat/java/com/android/class2nonsdklist/Class2NonSdkList.java"
+ if ! grep -q "\.*map.put($((${FINAL_PLATFORM_SDK_VERSION}))" $class2nonsdklist ; then
+ local sdk_version="map.put(${FINAL_PLATFORM_SDK_VERSION}, FLAG_UNSUPPORTED);"
+ sed -i -e "/.*map.put($((${FINAL_PLATFORM_SDK_VERSION}-1))/a \\ ${sdk_version}" $class2nonsdklist
+ fi
+
# Finalize resources
"$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
"$top/frameworks/base/core/res/res/values/public-staging.xml" \
diff --git a/tools/metadata/Android.bp b/tools/metadata/Android.bp
index b2fabec..77d106d 100644
--- a/tools/metadata/Android.bp
+++ b/tools/metadata/Android.bp
@@ -6,6 +6,8 @@
name: "metadata",
deps: [
"soong-testing-test_spec_proto",
+ "soong-testing-code_metadata_proto",
+ "soong-testing-code_metadata_internal_proto",
"golang-protobuf-proto",
],
srcs: [
diff --git a/tools/metadata/generator.go b/tools/metadata/generator.go
index e970e17..d328876 100644
--- a/tools/metadata/generator.go
+++ b/tools/metadata/generator.go
@@ -10,6 +10,8 @@
"strings"
"sync"
+ "android/soong/testing/code_metadata_internal_proto"
+ "android/soong/testing/code_metadata_proto"
"android/soong/testing/test_spec_proto"
"google.golang.org/protobuf/proto"
)
@@ -23,6 +25,13 @@
return mutex.(*sync.Mutex)
}
+// Define a struct to hold the combination of team ID and multi-ownership flag for validation
+type sourceFileAttributes struct {
+ TeamID string
+ MultiOwnership bool
+ Path string
+}
+
func getSortedKeys(syncMap *sync.Map) []string {
var allKeys []string
syncMap.Range(
@@ -36,14 +45,9 @@
return allKeys
}
-func writeOutput(
- outputFile string,
- allMetadata []*test_spec_proto.TestSpec_OwnershipMetadata,
-) {
- testSpec := &test_spec_proto.TestSpec{
- OwnershipMetadataList: allMetadata,
- }
- data, err := proto.Marshal(testSpec)
+// writeProtoToFile marshals a protobuf message and writes it to a file
+func writeProtoToFile(outputFile string, message proto.Message) {
+ data, err := proto.Marshal(message)
if err != nil {
log.Fatal(err)
}
@@ -88,8 +92,8 @@
}
func processTestSpecProtobuf(
- filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
- errCh chan error, wg *sync.WaitGroup,
+ filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
+ errCh chan error, wg *sync.WaitGroup,
) {
defer wg.Done()
@@ -117,7 +121,7 @@
if metadata.GetTrendyTeamId() != existing.GetTrendyTeamId() {
errCh <- fmt.Errorf(
"Conflicting trendy team IDs found for %s at:\n%s with teamId"+
- ": %s,\n%s with teamId: %s",
+ ": %s,\n%s with teamId: %s",
key,
metadata.GetPath(), metadata.GetTrendyTeamId(), existing.GetPath(),
existing.GetTrendyTeamId(),
@@ -141,10 +145,86 @@
}
}
+// processCodeMetadataProtobuf processes CodeMetadata protobuf files
+func processCodeMetadataProtobuf(
+ filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap,
+ errCh chan error, wg *sync.WaitGroup,
+) {
+ defer wg.Done()
+
+ fileContent := strings.TrimRight(readFileToString(filePath), "\n")
+ internalCodeData := code_metadata_internal_proto.CodeMetadataInternal{}
+ err := proto.Unmarshal([]byte(fileContent), &internalCodeData)
+ if err != nil {
+ errCh <- err
+ return
+ }
+
+ // Process each TargetOwnership entry
+ for _, internalMetadata := range internalCodeData.GetTargetOwnershipList() {
+ key := internalMetadata.GetTargetName()
+ lock := keyLocks.GetLockForKey(key)
+ lock.Lock()
+
+ for _, srcFile := range internalMetadata.GetSourceFiles() {
+ srcFileKey := srcFile
+ srcFileLock := keyLocks.GetLockForKey(srcFileKey)
+ srcFileLock.Lock()
+ attributes := sourceFileAttributes{
+ TeamID: internalMetadata.GetTrendyTeamId(),
+ MultiOwnership: internalMetadata.GetMultiOwnership(),
+ Path: internalMetadata.GetPath(),
+ }
+
+ existingAttributes, exists := sourceFileMetadataMap.Load(srcFileKey)
+ if exists {
+ existing := existingAttributes.(sourceFileAttributes)
+ if attributes.TeamID != existing.TeamID && (!attributes.MultiOwnership || !existing.MultiOwnership) {
+ errCh <- fmt.Errorf(
+ "Conflict found for source file %s covered at %s with team ID: %s. Existing team ID: %s and path: %s."+
+ " If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+
+ "Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.",
+ srcFile, internalMetadata.GetPath(), attributes.TeamID, existing.TeamID, existing.Path,
+ )
+ srcFileLock.Unlock()
+ lock.Unlock()
+ return
+ }
+ } else {
+ // Store the metadata if no conflict
+ sourceFileMetadataMap.Store(srcFileKey, attributes)
+ }
+ srcFileLock.Unlock()
+ }
+
+ value, loaded := ownershipMetadataMap.LoadOrStore(
+ key, []*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership{internalMetadata},
+ )
+ if loaded {
+ existingMetadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
+ isDuplicate := false
+ for _, existing := range existingMetadata {
+ if internalMetadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && internalMetadata.GetPath() == existing.GetPath() {
+ isDuplicate = true
+ break
+ }
+ }
+ if !isDuplicate {
+ existingMetadata = append(existingMetadata, internalMetadata)
+ ownershipMetadataMap.Store(key, existingMetadata)
+ }
+ }
+
+ lock.Unlock()
+ }
+}
+
func main() {
inputFile := flag.String("inputFile", "", "Input file path")
outputFile := flag.String("outputFile", "", "Output file path")
- rule := flag.String("rule", "", "Metadata rule (Hint: test_spec or code_metadata)")
+ rule := flag.String(
+ "rule", "", "Metadata rule (Hint: test_spec or code_metadata)",
+ )
flag.Parse()
if *inputFile == "" || *outputFile == "" || *rule == "" {
@@ -167,7 +247,9 @@
case "test_spec":
for _, filePath := range filePaths {
wg.Add(1)
- go processTestSpecProtobuf(filePath, ownershipMetadataMap, keyLocks, errCh, &wg)
+ go processTestSpecProtobuf(
+ filePath, ownershipMetadataMap, keyLocks, errCh, &wg,
+ )
}
wg.Wait()
@@ -186,9 +268,51 @@
allMetadata = append(allMetadata, metadataList...)
}
- writeOutput(*outputFile, allMetadata)
+ testSpec := &test_spec_proto.TestSpec{
+ OwnershipMetadataList: allMetadata,
+ }
+ writeProtoToFile(*outputFile, testSpec)
break
case "code_metadata":
+ sourceFileMetadataMap := &sync.Map{}
+ for _, filePath := range filePaths {
+ wg.Add(1)
+ go processCodeMetadataProtobuf(
+ filePath, ownershipMetadataMap, sourceFileMetadataMap, keyLocks, errCh, &wg,
+ )
+ }
+
+ wg.Wait()
+ close(errCh)
+
+ for err := range errCh {
+ log.Fatal(err)
+ }
+
+ sortedKeys := getSortedKeys(ownershipMetadataMap)
+ allMetadata := make([]*code_metadata_proto.CodeMetadata_TargetOwnership, 0)
+ for _, key := range sortedKeys {
+ value, _ := ownershipMetadataMap.Load(key)
+ metadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
+ for _, m := range metadata {
+ targetName := m.GetTargetName()
+ path := m.GetPath()
+ trendyTeamId := m.GetTrendyTeamId()
+
+ allMetadata = append(allMetadata, &code_metadata_proto.CodeMetadata_TargetOwnership{
+ TargetName: &targetName,
+ Path: &path,
+ TrendyTeamId: &trendyTeamId,
+ SourceFiles: m.GetSourceFiles(),
+ })
+ }
+ }
+
+ finalMetadata := &code_metadata_proto.CodeMetadata{
+ TargetOwnershipList: allMetadata,
+ }
+ writeProtoToFile(*outputFile, finalMetadata)
+ break
default:
log.Fatalf("No specific processing implemented for rule '%s'.\n", *rule)
}
diff --git a/tools/metadata/go.work b/tools/metadata/go.work
index 23875da..f2cdf8e 100644
--- a/tools/metadata/go.work
+++ b/tools/metadata/go.work
@@ -4,7 +4,8 @@
.
../../../../external/golang-protobuf
../../../soong/testing/test_spec_proto
-
+ ../../../soong/testing/code_metadata_proto
+ ../../../soong/testing/code_metadata_proto_internal
)
replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
diff --git a/tools/metadata/testdata/expectedCodeMetadataOutput.txt b/tools/metadata/testdata/expectedCodeMetadataOutput.txt
new file mode 100644
index 0000000..755cf40
--- /dev/null
+++ b/tools/metadata/testdata/expectedCodeMetadataOutput.txt
@@ -0,0 +1,7 @@
+
+
+bar
+Android.bp12346"b.java
+
+foo
+Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/file5.txt b/tools/metadata/testdata/file5.txt
new file mode 100644
index 0000000..d8de064
--- /dev/null
+++ b/tools/metadata/testdata/file5.txt
@@ -0,0 +1,4 @@
+
+
+foo
+Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file6.txt b/tools/metadata/testdata/file6.txt
new file mode 100644
index 0000000..9c7cdcd
--- /dev/null
+++ b/tools/metadata/testdata/file6.txt
@@ -0,0 +1,4 @@
+
+
+bar
+Android.bp12346"b.java
diff --git a/tools/metadata/testdata/file7.txt b/tools/metadata/testdata/file7.txt
new file mode 100644
index 0000000..d8de064
--- /dev/null
+++ b/tools/metadata/testdata/file7.txt
@@ -0,0 +1,4 @@
+
+
+foo
+Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file8.txt b/tools/metadata/testdata/file8.txt
new file mode 100644
index 0000000..a931690
--- /dev/null
+++ b/tools/metadata/testdata/file8.txt
@@ -0,0 +1,4 @@
+
+
+foo
+Android.gp12346"a.java
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutput.txt b/tools/metadata/testdata/generatedCodeMetadataOutput.txt
new file mode 100644
index 0000000..755cf40
--- /dev/null
+++ b/tools/metadata/testdata/generatedCodeMetadataOutput.txt
@@ -0,0 +1,7 @@
+
+
+bar
+Android.bp12346"b.java
+
+foo
+Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
new file mode 100644
index 0000000..755cf40
--- /dev/null
+++ b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
@@ -0,0 +1,7 @@
+
+
+bar
+Android.bp12346"b.java
+
+foo
+Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadata.txt b/tools/metadata/testdata/inputCodeMetadata.txt
new file mode 100644
index 0000000..7a81b7d
--- /dev/null
+++ b/tools/metadata/testdata/inputCodeMetadata.txt
@@ -0,0 +1 @@
+file5.txt file6.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadataNegative.txt b/tools/metadata/testdata/inputCodeMetadataNegative.txt
new file mode 100644
index 0000000..26668e4
--- /dev/null
+++ b/tools/metadata/testdata/inputCodeMetadataNegative.txt
@@ -0,0 +1 @@
+file7.txt file8.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/metadata_test.go b/tools/metadata/testdata/metadata_test.go
index 71856fe..314add3 100644
--- a/tools/metadata/testdata/metadata_test.go
+++ b/tools/metadata/testdata/metadata_test.go
@@ -87,3 +87,33 @@
t.Errorf("Generated file contents do not match the expected output")
}
}
+
+func TestCodeMetadata(t *testing.T) {
+ cmd := exec.Command(
+ "metadata", "-rule", "code_metadata", "-inputFile", "./inputCodeMetadata.txt", "-outputFile",
+ "./generatedCodeMetadataOutputFile.txt",
+ )
+ stderr, err := cmd.CombinedOutput()
+ if err != nil {
+ t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
+ }
+
+ // Read the contents of the expected output file
+ expectedOutput, err := ioutil.ReadFile("./expectedCodeMetadataOutput.txt")
+ if err != nil {
+ t.Fatalf("Error reading expected output file: %s", err)
+ }
+
+ // Read the contents of the generated output file
+ generatedOutput, err := ioutil.ReadFile("./generatedCodeMetadataOutputFile.txt")
+ if err != nil {
+ t.Fatalf("Error reading generated output file: %s", err)
+ }
+
+ fmt.Println()
+
+ // Compare the contents
+ if string(expectedOutput) != string(generatedOutput) {
+ t.Errorf("Generated file contents do not match the expected output")
+ }
+}
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 5f99f6c..bd8ce14 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -168,6 +168,7 @@
"apexd_host",
"brillo_update_payload",
"checkvintf",
+ "generate_gki_certificate",
"lz4",
"toybox",
"unpack_bootimg",
@@ -244,6 +245,7 @@
"boot_signer",
"brotli",
"bsdiff",
+ "generate_gki_certificate",
"imgdiff",
"lz4",
"mkbootfs",
@@ -308,6 +310,7 @@
"brotli",
"bsdiff",
"deapexer",
+ "generate_gki_certificate",
"imgdiff",
"lz4",
"mkbootfs",
@@ -330,6 +333,7 @@
srcs: [
"ota_utils.py",
"payload_signer.py",
+ "ota_signing_utils.py",
],
libs: [
"releasetools_common",
@@ -345,7 +349,6 @@
},
srcs: [
"merge_ota.py",
- "ota_signing_utils.py",
],
libs: [
"ota_metadata_proto",
@@ -498,7 +501,6 @@
name: "ota_from_raw_img",
srcs: [
"ota_from_raw_img.py",
- "ota_signing_utils.py",
],
main: "ota_from_raw_img.py",
defaults: [
@@ -549,6 +551,8 @@
defaults: ["releasetools_binary_defaults"],
srcs: [
"sign_target_files_apks.py",
+ "payload_signer.py",
+ "ota_signing_utils.py",
],
libs: [
"releasetools_add_img_to_target_files",
@@ -612,7 +616,6 @@
"sign_target_files_apks.py",
"validate_target_files.py",
"merge_ota.py",
- "ota_signing_utils.py",
":releasetools_merge_sources",
":releasetools_merge_tests",
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 8571d74..bde152f 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -821,7 +821,6 @@
d["mount_point"] = mount_point
if mount_point == "system":
copy_prop("system_headroom", "partition_headroom")
- copy_prop("system_root_image", "system_root_image")
copy_prop("root_dir", "root_dir")
copy_prop("root_fs_config", "root_fs_config")
elif mount_point == "data":
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index a4c92ae..7451ccc 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1156,8 +1156,7 @@
return self.build_props.get(prop)
-def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path,
- system_root_image=False):
+def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path):
class Partition(object):
def __init__(self, mount_point, fs_type, device, length, context, slotselect):
self.mount_point = mount_point
@@ -1216,12 +1215,6 @@
device=pieces[0], length=length, context=context,
slotselect=slotselect)
- # / is used for the system mount point when the root directory is included in
- # system. Other areas assume system is always at "/system" so point /system
- # at /.
- if system_root_image:
- assert '/system' not in d and '/' in d
- d["/system"] = d["/"]
return d
@@ -1237,22 +1230,19 @@
# ../RAMDISK/system/etc/recovery.fstab. This function has to handle both
# cases, since it may load the info_dict from an old build (e.g. when
# generating incremental OTAs from that build).
- system_root_image = info_dict.get('system_root_image') == 'true'
if info_dict.get('no_recovery') != 'true':
recovery_fstab_path = 'RECOVERY/RAMDISK/system/etc/recovery.fstab'
if not DoesInputFileContain(input_file, recovery_fstab_path):
recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
return LoadRecoveryFSTab(
- read_helper, info_dict['fstab_version'], recovery_fstab_path,
- system_root_image)
+ read_helper, info_dict['fstab_version'], recovery_fstab_path)
if info_dict.get('recovery_as_boot') == 'true':
recovery_fstab_path = 'BOOT/RAMDISK/system/etc/recovery.fstab'
if not DoesInputFileContain(input_file, recovery_fstab_path):
recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
return LoadRecoveryFSTab(
- read_helper, info_dict['fstab_version'], recovery_fstab_path,
- system_root_image)
+ read_helper, info_dict['fstab_version'], recovery_fstab_path)
return None
@@ -1575,6 +1565,50 @@
pubkey_path=pubkey_path)
+def _HasGkiCertificationArgs():
+ return ("gki_signing_key_path" in OPTIONS.info_dict and
+ "gki_signing_algorithm" in OPTIONS.info_dict)
+
+
+def _GenerateGkiCertificate(image, image_name):
+ key_path = OPTIONS.info_dict.get("gki_signing_key_path")
+ algorithm = OPTIONS.info_dict.get("gki_signing_algorithm")
+
+ key_path = ResolveAVBSigningPathArgs(key_path)
+
+ # Checks key_path exists, before processing --gki_signing_* args.
+ if not os.path.exists(key_path):
+ raise ExternalError(
+ 'gki_signing_key_path: "{}" not found'.format(key_path))
+
+ output_certificate = tempfile.NamedTemporaryFile()
+ cmd = [
+ "generate_gki_certificate",
+ "--name", image_name,
+ "--algorithm", algorithm,
+ "--key", key_path,
+ "--output", output_certificate.name,
+ image,
+ ]
+
+ signature_args = OPTIONS.info_dict.get("gki_signing_signature_args", "")
+ signature_args = signature_args.strip()
+ if signature_args:
+ cmd.extend(["--additional_avb_args", signature_args])
+
+ args = OPTIONS.info_dict.get("avb_boot_add_hash_footer_args", "")
+ args = args.strip()
+ if args:
+ cmd.extend(["--additional_avb_args", args])
+
+ RunAndCheckOutput(cmd)
+
+ output_certificate.seek(os.SEEK_SET, 0)
+ data = output_certificate.read()
+ output_certificate.close()
+ return data
+
+
def BuildVBMeta(image_path, partitions, name, needed_partitions,
resolve_rollback_index_location_conflict=False):
"""Creates a VBMeta image.
@@ -1797,6 +1831,29 @@
RunAndCheckOutput(cmd)
+ if _HasGkiCertificationArgs():
+ if not os.path.exists(img.name):
+ raise ValueError("Cannot find GKI boot.img")
+ if kernel_path is None or not os.path.exists(kernel_path):
+ raise ValueError("Cannot find GKI kernel.img")
+
+ # Certify GKI images.
+ boot_signature_bytes = b''
+ boot_signature_bytes += _GenerateGkiCertificate(img.name, "boot")
+ boot_signature_bytes += _GenerateGkiCertificate(
+ kernel_path, "generic_kernel")
+
+ BOOT_SIGNATURE_SIZE = 16 * 1024
+ if len(boot_signature_bytes) > BOOT_SIGNATURE_SIZE:
+ raise ValueError(
+ f"GKI boot_signature size must be <= {BOOT_SIGNATURE_SIZE}")
+ boot_signature_bytes += (
+ b'\0' * (BOOT_SIGNATURE_SIZE - len(boot_signature_bytes)))
+ assert len(boot_signature_bytes) == BOOT_SIGNATURE_SIZE
+
+ with open(img.name, 'ab') as f:
+ f.write(boot_signature_bytes)
+
# Sign the image if vboot is non-empty.
if info_dict.get("vboot"):
path = "/" + partition_name
@@ -1910,10 +1967,8 @@
if info_dict.get("recovery_as_boot") == "true":
return True # the recovery-as-boot boot.img has a RECOVERY ramdisk.
- if info_dict.get("system_root_image") == "true":
- # The ramdisk content is merged into the system.img, so there is NO
- # ramdisk in the boot.img or boot-<kernel version>.img.
- return False
+ if info_dict.get("gki_boot_image_without_ramdisk") == "true":
+ return False # A GKI boot.img has no ramdisk since Android-13.
if info_dict.get("init_boot") == "true":
# The ramdisk is moved to the init_boot.img, so there is NO
@@ -3050,6 +3105,34 @@
zip_file.writestr(zinfo, data)
zipfile.ZIP64_LIMIT = saved_zip64_limit
+def ZipExclude(input_zip, output_zip, entries, force=False):
+ """Deletes entries from a ZIP file.
+
+ Args:
+ zip_filename: The name of the ZIP file.
+ entries: The name of the entry, or the list of names to be deleted.
+ """
+ if isinstance(entries, str):
+ entries = [entries]
+ # If list is empty, nothing to do
+ if not entries:
+ shutil.copy(input_zip, output_zip)
+ return
+
+ with zipfile.ZipFile(input_zip, 'r') as zin:
+ if not force and len(set(zin.namelist()).intersection(entries)) == 0:
+ raise ExternalError(
+ "Failed to delete zip entries, name not matched: %s" % entries)
+
+ fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(input_zip))
+ os.close(fd)
+ cmd = ["zip2zip", "-i", input_zip, "-o", new_zipfile]
+ for entry in entries:
+ cmd.append("-x")
+ cmd.append(entry)
+ RunAndCheckOutput(cmd)
+ os.replace(new_zipfile, output_zip)
+
def ZipDelete(zip_filename, entries, force=False):
"""Deletes entries from a ZIP file.
@@ -3064,20 +3147,7 @@
if not entries:
return
- with zipfile.ZipFile(zip_filename, 'r') as zin:
- if not force and len(set(zin.namelist()).intersection(entries)) == 0:
- raise ExternalError(
- "Failed to delete zip entries, name not matched: %s" % entries)
-
- fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(zip_filename))
- os.close(fd)
- cmd = ["zip2zip", "-i", zip_filename, "-o", new_zipfile]
- for entry in entries:
- cmd.append("-x")
- cmd.append(entry)
- RunAndCheckOutput(cmd)
-
- os.replace(new_zipfile, zip_filename)
+ ZipExclude(zip_filename, zip_filename, entries, force)
def ZipClose(zip_file):
@@ -3783,14 +3853,11 @@
output_sink(recovery_img_path, recovery_img.data)
else:
- system_root_image = info_dict.get("system_root_image") == "true"
include_recovery_dtbo = info_dict.get("include_recovery_dtbo") == "true"
include_recovery_acpio = info_dict.get("include_recovery_acpio") == "true"
path = os.path.join(input_dir, recovery_resource_dat_path)
- # With system-root-image, boot and recovery images will have mismatching
- # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
- # to handle such a case.
- if system_root_image or include_recovery_dtbo or include_recovery_acpio:
+ # Use bsdiff to handle mismatching entries (Bug: 72731506)
+ if include_recovery_dtbo or include_recovery_acpio:
diff_program = ["bsdiff"]
bonus_args = ""
assert not os.path.exists(path)
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
index 44f0a95..f290323 100644
--- a/tools/releasetools/create_brick_ota.py
+++ b/tools/releasetools/create_brick_ota.py
@@ -59,9 +59,9 @@
parser.add_argument('otafile', metavar='PAYLOAD', type=str,
help='The output OTA package file.')
parser.add_argument('--product', type=str,
- help='The product name of the device, for example, bramble, redfin. This can be a comma separated list.', required=True)
+ help='The product name of the device, for example, bramble, redfin.', required=True)
parser.add_argument('--serialno', type=str,
- help='The serial number of devices that are allowed to install this OTA package. This can be a comma separated list.')
+ help='The serial number of devices that are allowed to install this OTA package. This can be a | separated list.')
parser.add_argument('--extra_wipe_partitions', type=str,
help='Additional partitions on device which should be wiped.')
parser.add_argument('-v', action="store_true",
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index fa4ed09..4a5facd 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -256,6 +256,9 @@
--max_threads
Specify max number of threads allowed when generating A/B OTA
+
+ --vabc_cow_version
+ Specify the VABC cow version to be used
"""
from __future__ import print_function
@@ -327,10 +330,12 @@
OPTIONS.vabc_compression_param = None
OPTIONS.security_patch_level = None
OPTIONS.max_threads = None
+OPTIONS.vabc_cow_version = None
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
+MISC_INFO = 'META/misc_info.txt'
AB_PARTITIONS = 'META/ab_partitions.txt'
# Files to be unzipped for target diffing purpose.
@@ -357,6 +362,25 @@
oem_dicts.append(common.LoadDictionaryFromFile(oem_file))
return oem_dicts
+def ModifyKeyvalueList(content: str, key: str, value: str):
+ """ Update update the key value list with specified key and value
+ Args:
+ content: The string content of dynamic_partitions_info.txt. Each line
+ should be a key valur pair, where string before the first '=' are keys,
+ remaining parts are values.
+ key: the key of the key value pair to modify
+ value: the new value to replace with
+
+ Returns:
+ Updated content of the key value list
+ """
+ output_list = []
+ for line in content.splitlines():
+ if line.startswith(key+"="):
+ continue
+ output_list.append(line)
+ output_list.append("{}={}".format(key, value))
+ return "\n".join(output_list)
def ModifyVABCCompressionParam(content, algo):
""" Update update VABC Compression Param in dynamic_partitions_info.txt
@@ -367,13 +391,18 @@
Returns:
Updated content of dynamic_partitions_info.txt , with custom compression algo
"""
- output_list = []
- for line in content.splitlines():
- if line.startswith("virtual_ab_compression_method="):
- continue
- output_list.append(line)
- output_list.append("virtual_ab_compression_method="+algo)
- return "\n".join(output_list)
+ return ModifyKeyvalueList(content, "virtual_ab_compression_method", algo)
+
+def SetVABCCowVersion(content, cow_version):
+ """ Update virtual_ab_cow_version in dynamic_partitions_info.txt
+ Args:
+ content: The string content of dynamic_partitions_info.txt
+ algo: The cow version be used for VABC. See
+ https://cs.android.com/android/platform/superproject/main/+/main:system/core/fs_mgr/libsnapshot/include/libsnapshot/cow_format.h;l=36
+ Returns:
+ Updated content of dynamic_partitions_info.txt , updated cow version
+ """
+ return ModifyKeyvalueList(content, "virtual_ab_cow_version", cow_version)
def UpdatesInfoForSpecialUpdates(content, partitions_filter,
@@ -533,8 +562,7 @@
def ParseInfoDict(target_file_path):
return common.LoadInfoDict(target_file_path)
-
-def GetTargetFilesZipForCustomVABCCompression(input_file, vabc_compression_param):
+def ModifyTargetFilesDynamicPartitionInfo(input_file, key, value):
"""Returns a target-files.zip with a custom VABC compression param.
Args:
input_file: The input target-files.zip path
@@ -545,11 +573,11 @@
"""
if os.path.isdir(input_file):
dynamic_partition_info_path = os.path.join(
- input_file, "META", "dynamic_partitions_info.txt")
+ input_file, *DYNAMIC_PARTITION_INFO.split("/"))
with open(dynamic_partition_info_path, "r") as fp:
dynamic_partition_info = fp.read()
- dynamic_partition_info = ModifyVABCCompressionParam(
- dynamic_partition_info, vabc_compression_param)
+ dynamic_partition_info = ModifyKeyvalueList(
+ dynamic_partition_info, key, value)
with open(dynamic_partition_info_path, "w") as fp:
fp.write(dynamic_partition_info)
return input_file
@@ -559,12 +587,23 @@
common.ZipDelete(target_file, DYNAMIC_PARTITION_INFO)
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as zfp:
dynamic_partition_info = zfp.read(DYNAMIC_PARTITION_INFO).decode()
- dynamic_partition_info = ModifyVABCCompressionParam(
- dynamic_partition_info, vabc_compression_param)
+ dynamic_partition_info = ModifyKeyvalueList(
+ dynamic_partition_info, key, value)
with zipfile.ZipFile(target_file, "a", allowZip64=True) as output_zip:
output_zip.writestr(DYNAMIC_PARTITION_INFO, dynamic_partition_info)
return target_file
+def GetTargetFilesZipForCustomVABCCompression(input_file, vabc_compression_param):
+ """Returns a target-files.zip with a custom VABC compression param.
+ Args:
+ input_file: The input target-files.zip path
+ vabc_compression_param: Custom Virtual AB Compression algorithm
+
+ Returns:
+ The path to modified target-files.zip
+ """
+ return ModifyTargetFilesDynamicPartitionInfo(input_file, "virtual_ab_compression_method", vabc_compression_param)
+
def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
"""Returns a target-files.zip for partial ota update package generation.
@@ -979,6 +1018,8 @@
if vabc_compression_param != target_info.vabc_compression_param:
target_file = GetTargetFilesZipForCustomVABCCompression(
target_file, vabc_compression_param)
+ if OPTIONS.vabc_cow_version:
+ target_file = ModifyTargetFilesDynamicPartitionInfo(target_file, "virtual_ab_cow_version", OPTIONS.vabc_cow_version)
if OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
# Target_file may have been modified, reparse ab_partitions
@@ -1235,6 +1276,12 @@
else:
raise ValueError("Cannot parse value %r for option %r - only "
"integers are allowed." % (a, o))
+ elif o == "--vabc_cow_version":
+ if a.isdigit():
+ OPTIONS.vabc_cow_version = a
+ else:
+ raise ValueError("Cannot parse value %r for option %r - only "
+ "integers are allowed." % (a, o))
else:
return False
return True
@@ -1283,6 +1330,7 @@
"vabc_compression_param=",
"security_patch_level=",
"max_threads=",
+ "vabc_cow_version=",
], extra_option_handler=[option_handler, payload_signer.signer_options])
common.InitLogging()
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 0a6ff39..ddd2d36 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -27,7 +27,8 @@
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
GetRamdiskFormat, ParseUpdateEngineConfig)
-from payload_signer import PayloadSigner
+import payload_signer
+from payload_signer import PayloadSigner, AddSigningArgumentParse, GeneratePayloadProperties
logger = logging.getLogger(__name__)
@@ -785,8 +786,8 @@
class PayloadGenerator(object):
"""Manages the creation and the signing of an A/B OTA Payload."""
- PAYLOAD_BIN = 'payload.bin'
- PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
+ PAYLOAD_BIN = payload_signer.PAYLOAD_BIN
+ PAYLOAD_PROPERTIES_TXT = payload_signer.PAYLOAD_PROPERTIES_TXT
SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
@@ -905,12 +906,7 @@
"""
assert self.payload_file is not None
# 4. Dump the signed payload properties.
- properties_file = common.MakeTempFile(prefix="payload-properties-",
- suffix=".txt")
- cmd = ["delta_generator",
- "--in_file=" + self.payload_file,
- "--properties_file=" + properties_file]
- self._Run(cmd)
+ properties_file = GeneratePayloadProperties(self.payload_file)
with open(properties_file, "a") as f:
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
index a5d09e1..e85d64c 100644
--- a/tools/releasetools/payload_signer.py
+++ b/tools/releasetools/payload_signer.py
@@ -17,7 +17,12 @@
import common
import logging
import shlex
+import argparse
+import tempfile
+import zipfile
+import shutil
from common import OPTIONS, OptionHandler
+from ota_signing_utils import AddSigningArgumentParse
logger = logging.getLogger(__name__)
@@ -26,6 +31,8 @@
OPTIONS.payload_signer_maximum_signature_size = None
OPTIONS.package_key = None
+PAYLOAD_BIN = 'payload.bin'
+PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
class SignerOptions(OptionHandler):
@@ -165,3 +172,52 @@
cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
common.RunAndCheckOutput(cmd)
return out_file
+
+def GeneratePayloadProperties(payload_file):
+ properties_file = common.MakeTempFile(prefix="payload-properties-",
+ suffix=".txt")
+ cmd = ["delta_generator",
+ "--in_file=" + payload_file,
+ "--properties_file=" + properties_file]
+ common.RunAndCheckOutput(cmd)
+ return properties_file
+
+def SignOtaPackage(input_path, output_path):
+ payload_signer = PayloadSigner(
+ OPTIONS.package_key, OPTIONS.private_key_suffix,
+ None, OPTIONS.payload_signer, OPTIONS.payload_signer_args)
+ common.ZipExclude(input_path, output_path, [PAYLOAD_BIN, PAYLOAD_PROPERTIES_TXT])
+ with tempfile.NamedTemporaryFile() as unsigned_payload, zipfile.ZipFile(input_path, "r", allowZip64=True) as zfp:
+ with zfp.open("payload.bin") as payload_fp:
+ shutil.copyfileobj(payload_fp, unsigned_payload)
+ signed_payload = payload_signer.SignPayload(unsigned_payload.name)
+ properties_file = GeneratePayloadProperties(signed_payload)
+ with zipfile.ZipFile(output_path, "a", compression=zipfile.ZIP_STORED, allowZip64=True) as output_zfp:
+ common.ZipWrite(output_zfp, signed_payload, PAYLOAD_BIN)
+ common.ZipWrite(output_zfp, properties_file, PAYLOAD_PROPERTIES_TXT)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ prog=argv[0], description="Given a series of .img files, produces a full OTA package that installs thoese images")
+ parser.add_argument("input_ota", type=str,
+ help="Input OTA for signing")
+ parser.add_argument('output_ota', type=str,
+ help='Output OTA for the signed package')
+ parser.add_argument("-v", action="store_true",
+ help="Enable verbose logging", dest="verbose")
+ AddSigningArgumentParse(parser)
+ args = parser.parse_args(argv[1:])
+ input_ota = args.input_ota
+ output_ota = args.output_ota
+ if args.verbose:
+ OPTIONS.verbose = True
+ common.InitLogging()
+ if args.package_key:
+ OPTIONS.package_key = args.package_key
+ logger.info("Re-signing OTA package {}".format(input_ota))
+ SignOtaPackage(input_ota, output_ota)
+
+if __name__ == "__main__":
+ import sys
+ main(sys.argv)
\ No newline at end of file
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 2fbb3b0..4356394 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -83,9 +83,8 @@
--replace_verity_public_key <key>
Replace the certificate (public key) used for verity verification. The
- key file replaces the one at BOOT/RAMDISK/verity_key (or ROOT/verity_key
- for devices using system_root_image). It expects the key filename WITH
- the extension (e.g. verity_key.pub).
+ key file replaces the one at BOOT/RAMDISK/verity_key. It expects the key
+ filename WITH the extension (e.g. verity_key.pub).
--replace_verity_keyid <path_to_X509_PEM_cert_file>
Replace the veritykeyid in BOOT/cmdline of input_target_file_zip
@@ -123,6 +122,17 @@
mounted on the partition (e.g. "--signing_helper /path/to/helper"). The
args will be appended to the existing ones in info dict.
+ --gki_signing_algorithm <algorithm>
+ --gki_signing_key <key>
+ Use the specified algorithm (e.g. SHA256_RSA4096) and the key to generate
+ 'boot signature' in a v4 boot.img. Otherwise it uses the existing values
+ in info dict.
+
+ --gki_signing_extra_args <args>
+ Specify any additional args that are needed to generate 'boot signature'
+ (e.g. --prop foo:bar). The args will be appended to the existing ones
+ in info dict.
+
--android_jar_path <path>
Path to the android.jar to repack the apex file.
@@ -136,6 +146,34 @@
--override_apex_keys <path>
Replace all APEX keys with this private key
+
+ -k (--package_key) <key>
+ Key to use to sign the package (default is the value of
+ default_system_dev_certificate from the input target-files's
+ META/misc_info.txt, or "build/make/target/product/security/testkey" if
+ that value is not specified).
+
+ For incremental OTAs, the default value is based on the source
+ target-file, not the target build.
+
+ --payload_signer <signer>
+ Specify the signer when signing the payload and metadata for A/B OTAs.
+ By default (i.e. without this flag), it calls 'openssl pkeyutl' to sign
+ with the package private key. If the private key cannot be accessed
+ directly, a payload signer that knows how to do that should be specified.
+ The signer will be supplied with "-inkey <path_to_key>",
+ "-in <input_file>" and "-out <output_file>" parameters.
+
+ --payload_signer_args <args>
+ Specify the arguments needed for payload signer.
+
+ --payload_signer_maximum_signature_size <signature_size>
+ The maximum signature size (in bytes) that would be generated by the given
+ payload signer. Only meaningful when custom payload signer is specified
+ via '--payload_signer'.
+ If the signer uses a RSA key, this should be the number of bytes to
+ represent the modulus. If it uses an EC key, this is the size of a
+ DER-encoded ECDSA signature.
"""
from __future__ import print_function
@@ -151,7 +189,6 @@
import re
import shutil
import stat
-import subprocess
import sys
import tempfile
import zipfile
@@ -160,6 +197,8 @@
import add_img_to_target_files
import apex_utils
import common
+import payload_signer
+from payload_signer import SignOtaPackage, PAYLOAD_BIN
if sys.hexversion < 0x02070000:
@@ -182,6 +221,9 @@
OPTIONS.avb_keys = {}
OPTIONS.avb_algorithms = {}
OPTIONS.avb_extra_args = {}
+OPTIONS.gki_signing_key = None
+OPTIONS.gki_signing_algorithm = None
+OPTIONS.gki_signing_extra_args = None
OPTIONS.android_jar_path = None
OPTIONS.vendor_partitions = set()
OPTIONS.vendor_otatools = None
@@ -227,6 +269,20 @@
return filename.endswith(".apex") or filename.endswith(".capex")
+def IsOtaPackage(fp):
+ with zipfile.ZipFile(fp) as zfp:
+ if not PAYLOAD_BIN in zfp.namelist():
+ return False
+ with zfp.open(PAYLOAD_BIN, "r") as payload:
+ magic = payload.read(4)
+ return magic == b"CrAU"
+
+
+def IsEntryOtaPackage(input_zip, filename):
+ with input_zip.open(filename, "r") as fp:
+ return IsOtaPackage(fp)
+
+
def GetApexFilename(filename):
name = os.path.basename(filename)
# Replace the suffix for compressed apex
@@ -501,6 +557,7 @@
return data
+
def IsBuildPropFile(filename):
return filename in (
"SYSTEM/etc/prop.default",
@@ -527,7 +584,7 @@
filename.endswith("/prop.default")
-def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
+def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info,
apk_keys, apex_keys, key_passwords,
platform_api_level, codename_to_api_level_map,
compressed_extension):
@@ -538,11 +595,9 @@
[len(os.path.basename(i.filename)) for i in input_tf_zip.infolist()
if GetApkFileInfo(i.filename, compressed_extension, [])[0]])
except ValueError:
- # Sets this to zero for targets without APK files.
+ # Sets this to zero for targets without APK files, e.g., gki_arm64.
maxsize = 0
- system_root_image = misc_info.get("system_root_image") == "true"
-
for info in input_tf_zip.infolist():
filename = info.filename
if filename.startswith("IMAGES/"):
@@ -617,6 +672,15 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
+ elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename):
+ logger.info("Re-signing OTA package {}".format(filename))
+ with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota:
+ with input_tf_zip.open(filename, "r") as in_fp:
+ shutil.copyfileobj(in_fp, input_ota)
+ input_ota.flush()
+ SignOtaPackage(input_ota.name, output_ota.name)
+ common.ZipWrite(output_tf_zip, output_ota.name, filename,
+ compress_type=zipfile.ZIP_STORED)
# System properties.
elif IsBuildPropFile(filename):
print("Rewriting %s:" % (filename,))
@@ -754,6 +818,9 @@
if misc_info.get('avb_enable') == 'true':
RewriteAvbProps(misc_info)
+ # Replace the GKI signing key for boot.img, if any.
+ ReplaceGkiSigningKey(misc_info)
+
# Write back misc_info with the latest values.
ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
@@ -1035,6 +1102,27 @@
misc_info[args_key] = result
+def ReplaceGkiSigningKey(misc_info):
+ """Replaces the GKI signing key."""
+
+ key = OPTIONS.gki_signing_key
+ if not key:
+ return
+
+ algorithm = OPTIONS.gki_signing_algorithm
+ if not algorithm:
+ raise ValueError("Missing --gki_signing_algorithm")
+
+ print('Replacing GKI signing key with "%s" (%s)' % (key, algorithm))
+ misc_info["gki_signing_algorithm"] = algorithm
+ misc_info["gki_signing_key_path"] = key
+
+ extra_args = OPTIONS.gki_signing_extra_args
+ if extra_args:
+ print('Setting GKI signing args: "%s"' % (extra_args))
+ misc_info["gki_signing_signature_args"] = extra_args
+
+
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
if s is None: # -d option
@@ -1388,6 +1476,12 @@
# 'oem=--signing_helper_with_files=/tmp/avbsigner.sh'.
partition, extra_args = a.split("=", 1)
OPTIONS.avb_extra_args[partition] = extra_args
+ elif o == "--gki_signing_key":
+ OPTIONS.gki_signing_key = a
+ elif o == "--gki_signing_algorithm":
+ OPTIONS.gki_signing_algorithm = a
+ elif o == "--gki_signing_extra_args":
+ OPTIONS.gki_signing_extra_args = a
elif o == "--vendor_otatools":
OPTIONS.vendor_otatools = a
elif o == "--vendor_partitions":
@@ -1451,13 +1545,16 @@
"avb_extra_custom_image_key=",
"avb_extra_custom_image_algorithm=",
"avb_extra_custom_image_extra_args=",
+ "gki_signing_key=",
+ "gki_signing_algorithm=",
+ "gki_signing_extra_args=",
"vendor_partitions=",
"vendor_otatools=",
"allow_gsi_debug_sepolicy",
"override_apk_keys=",
"override_apex_keys=",
],
- extra_option_handler=option_handler)
+ extra_option_handler=[option_handler, payload_signer.signer_options])
if len(args) != 2:
common.Usage(__doc__)
@@ -1471,6 +1568,10 @@
allowZip64=True)
misc_info = common.LoadInfoDict(input_zip)
+ if OPTIONS.package_key is None:
+ OPTIONS.package_key = misc_info.get(
+ "default_system_dev_certificate",
+ "build/make/target/product/security/testkey")
BuildKeyMap(misc_info, key_mapping_options)
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
index cfae7a5..d4f7ccc 100644
--- a/tools/releasetools/test_build_image.py
+++ b/tools/releasetools/test_build_image.py
@@ -99,11 +99,10 @@
}
self.assertRaises(BuildImageError, CheckHeadroom, ext4fs_output, prop_dict)
- def test_SetUpInDirAndFsConfig_SystemRootImageTrue_NonSystem(self):
+ def test_SetUpInDirAndFsConfig_NonSystem(self):
prop_dict = {
'fs_config': 'fs-config',
'mount_point': 'vendor',
- 'system_root_image': 'true',
}
in_dir, fs_config = SetUpInDirAndFsConfig('/path/to/in_dir', prop_dict)
self.assertEqual('/path/to/in_dir', in_dir)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index c61c290..9b2e667 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1348,7 +1348,6 @@
INFO_DICT_DEFAULT = {
'recovery_api_version': 3,
'fstab_version': 2,
- 'system_root_image': 'true',
'no_recovery': 'true',
'recovery_as_boot': 'true',
}
@@ -1377,14 +1376,8 @@
info_values = ''.join(
['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
-
- FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
- if info_dict.get('system_root_image') == 'true':
- fstab_values = FSTAB_TEMPLATE.format('/')
- else:
- fstab_values = FSTAB_TEMPLATE.format('/system')
- common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
-
+ common.ZipWriteStr(target_files_zip, fstab_path,
+ "/dev/block/system /system ext4 ro,barrier=1 defaults")
common.ZipWriteStr(
target_files_zip, 'META/file_contexts', 'file-contexts')
return target_files
@@ -1397,7 +1390,6 @@
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
def test_LoadInfoDict_legacyRecoveryFstabPath(self):
@@ -1408,7 +1400,6 @@
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1420,7 +1411,6 @@
loaded_dict = common.LoadInfoDict(unzipped)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1432,15 +1422,11 @@
loaded_dict = common.LoadInfoDict(unzipped)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
- def test_LoadInfoDict_systemRootImageFalse(self):
- # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
- # launched prior to P will likely have this config.
+ def test_LoadInfoDict_recoveryAsBootFalse(self):
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
del info_dict['no_recovery']
- del info_dict['system_root_image']
del info_dict['recovery_as_boot']
target_files = self._test_LoadInfoDict_createTargetFiles(
info_dict,
@@ -1452,22 +1438,6 @@
self.assertNotIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
- def test_LoadInfoDict_recoveryAsBootFalse(self):
- # Devices using system-as-root, but with standalone recovery image. Non-A/B
- # devices launched since P will likely have this config.
- info_dict = copy.copy(self.INFO_DICT_DEFAULT)
- del info_dict['no_recovery']
- del info_dict['recovery_as_boot']
- target_files = self._test_LoadInfoDict_createTargetFiles(
- info_dict,
- 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
- loaded_dict = common.LoadInfoDict(target_files_zip)
- self.assertEqual(3, loaded_dict['recovery_api_version'])
- self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
- self.assertIn('/system', loaded_dict['fstab'])
-
def test_LoadInfoDict_noRecoveryTrue(self):
# Device doesn't have a recovery partition at all.
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
@@ -1499,7 +1469,6 @@
loaded_dict = common.LoadInfoDict(unzipped, True)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
- self.assertIn('/', loaded_dict['fstab'])
self.assertIn('/system', loaded_dict['fstab'])
self.assertEqual(
os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
@@ -1636,6 +1605,40 @@
self.assertEqual(3, chained_partition_args.rollback_index_location)
self.assertTrue(os.path.exists(chained_partition_args.pubkey_path))
+ def test_GenerateGkiCertificate_KeyPathNotFound(self):
+ pubkey = os.path.join(self.testdata_dir, 'no_testkey_gki.pem')
+ self.assertFalse(os.path.exists(pubkey))
+
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+ common.OPTIONS.search_path = None
+ test_file = tempfile.NamedTemporaryFile()
+ self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
+ test_file.name, 'generic_kernel')
+
+ def test_GenerateGkiCertificate_SearchKeyPathNotFound(self):
+ pubkey = 'no_testkey_gki.pem'
+ self.assertFalse(os.path.exists(pubkey))
+
+ # Tests it should raise ExternalError if no key found under
+ # OPTIONS.search_path.
+ search_path_dir = common.MakeTempDir()
+ search_pubkey = os.path.join(search_path_dir, pubkey)
+ self.assertFalse(os.path.exists(search_pubkey))
+
+ common.OPTIONS.search_path = search_path_dir
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+ test_file = tempfile.NamedTemporaryFile()
+ self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
+ test_file.name, 'generic_kernel')
+
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 9cc6df4..0cd7dac 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -23,7 +23,8 @@
import test_utils
from sign_target_files_apks import (
CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
- ReplaceCerts, RewriteAvbProps, RewriteProps, WriteOtacerts)
+ ReplaceCerts, ReplaceGkiSigningKey, RewriteAvbProps, RewriteProps,
+ WriteOtacerts)
class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
@@ -535,3 +536,52 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey', None),
}, keys_info)
+
+ def test_ReplaceGkiSigningKey(self):
+ common.OPTIONS.gki_signing_key = 'release_gki_key'
+ common.OPTIONS.gki_signing_algorithm = 'release_gki_algorithm'
+ common.OPTIONS.gki_signing_extra_args = 'release_gki_signature_extra_args'
+
+ misc_info = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ expected_dict = {
+ 'gki_signing_key_path': 'release_gki_key',
+ 'gki_signing_algorithm': 'release_gki_algorithm',
+ 'gki_signing_signature_args': 'release_gki_signature_extra_args',
+ }
+ ReplaceGkiSigningKey(misc_info)
+ self.assertDictEqual(expected_dict, misc_info)
+
+ def test_ReplaceGkiSigningKey_MissingSigningAlgorithm(self):
+ common.OPTIONS.gki_signing_key = 'release_gki_key'
+ common.OPTIONS.gki_signing_algorithm = None
+ common.OPTIONS.gki_signing_extra_args = 'release_gki_signature_extra_args'
+
+ misc_info = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ self.assertRaises(ValueError, ReplaceGkiSigningKey, misc_info)
+
+ def test_ReplaceGkiSigningKey_MissingSigningKeyNop(self):
+ common.OPTIONS.gki_signing_key = None
+ common.OPTIONS.gki_signing_algorithm = 'release_gki_algorithm'
+ common.OPTIONS.gki_signing_extra_args = 'release_gki_signature_extra_args'
+
+ # No change to misc_info if common.OPTIONS.gki_signing_key is missing.
+ misc_info = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ expected_dict = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ ReplaceGkiSigningKey(misc_info)
+ self.assertDictEqual(expected_dict, misc_info)
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index 48b563d..4d4b9e5 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -156,7 +156,6 @@
verity_key_mincrypt)
info_dict = {
- 'system_root_image' : 'true',
'verity' : 'true',
}
options = {
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 84a2f7e..8da4fa2 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -132,7 +132,7 @@
return
# Verify IMAGES/system.img if applicable.
- # Some targets are system.img-less.
+ # Some targets, e.g., gki_arm64, gki_x86_64, etc., are system.img-less.
if 'IMAGES/system.img' in input_zip.namelist():
CheckAllFiles('system')
@@ -361,18 +361,15 @@
"Mismatching mincrypt verity key files"
logging.info('Verified the content of /verity_key')
- # For devices with a separate ramdisk (i.e. non-system-as-root), there must
- # be a copy in ramdisk.
- if info_dict.get("system_root_image") != "true":
- verity_key_ramdisk = os.path.join(
- input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
- assert os.path.exists(
- verity_key_ramdisk), 'Missing verity_key in ramdisk'
+ verity_key_ramdisk = os.path.join(
+ input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
+ assert os.path.exists(
+ verity_key_ramdisk), 'Missing verity_key in ramdisk'
- assert filecmp.cmp(
- verity_key_mincrypt, verity_key_ramdisk, shallow=False), \
- 'Mismatching verity_key files in root and ramdisk'
- logging.info('Verified the content of /verity_key in ramdisk')
+ assert filecmp.cmp(
+ verity_key_mincrypt, verity_key_ramdisk, shallow=False), \
+ 'Mismatching verity_key files in root and ramdisk'
+ logging.info('Verified the content of /verity_key in ramdisk')
# Then verify the verity signed system/vendor/product images, against the
# verity pubkey in mincrypt format.
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
index 5eae262..72f896b 100755
--- a/tools/sbom/generate-sbom.py
+++ b/tools/sbom/generate-sbom.py
@@ -347,7 +347,7 @@
sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
if sbom_url and sbom_checksum and upstream_element_id:
- doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(name)}'
+ doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{sbom_data.encode_for_spdxid(name)}'
external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
uri=sbom_url,
checksum=sbom_checksum)