Merge "Revert "Revert "Create a product variable for skipping apex cont..."" into main
diff --git a/core/Makefile b/core/Makefile
index 94fc88e..d4e241e 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -5109,8 +5109,10 @@
$(error EMPTY_VENDOR_SKU_PLACEHOLDER is an internal variable and cannot be used for DEIVCE_MANIFEST_SKUS)
endif
-# -- Check system manifest / matrix including fragments (excluding other framework manifests / matrices, e.g. product);
-check_vintf_system_deps := $(filter $(TARGET_OUT)/etc/vintf/%, $(check_vintf_common_srcs))
+# -- Check system and system_ext manifests / matrices including fragments (excluding other framework manifests / matrices, e.g. product);
+check_vintf_system_deps := $(filter $(TARGET_OUT)/etc/vintf/% \
+ $(TARGET_OUT_SYSTEM_EXT)/etc/vintf/%, \
+ $(check_vintf_common_srcs))
ifneq ($(check_vintf_system_deps),)
check_vintf_has_system := true
@@ -5138,8 +5140,9 @@
check_vintf_system_deps :=
# -- Check vendor manifest / matrix including fragments (excluding other device manifests / matrices)
-check_vintf_vendor_deps := $(filter $(TARGET_OUT_VENDOR)/etc/vintf/%, $(check_vintf_common_srcs))
-check_vintf_vendor_deps += $(filter $(TARGET_OUT_VENDOR)/apex/%, $(check_vintf_common_srcs))
+check_vintf_vendor_deps := $(filter $(TARGET_OUT_VENDOR)/etc/vintf/% \
+ $(TARGET_OUT_VENDOR)/apex/%, \
+ $(check_vintf_common_srcs))
ifneq ($(strip $(check_vintf_vendor_deps)),)
check_vintf_has_vendor := true
check_vintf_vendor_log := $(intermediates)/check_vintf_vendor.log
diff --git a/core/OWNERS b/core/OWNERS
index c98196a..36951a9 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -7,3 +7,6 @@
# For sdk extensions version updates
per-file version_defaults.mk = amhk@google.com,gurpreetgs@google.com,mkhokhlova@google.com,robertogil@google.com
+
+# For Ravenwood test configs
+per-file ravenwood_test_config_template.xml = jsharkey@google.com,omakoto@google.com
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index ff49262..7f46903 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -51,6 +51,9 @@
my_target_sdk_version := $(my_target_sdk_version).$$(cat $(API_FINGERPRINT))
my_min_sdk_version := $(my_min_sdk_version).$$(cat $(API_FINGERPRINT))
$(fixed_android_manifest): $(API_FINGERPRINT)
+ else ifdef UNBUNDLED_BUILD_TARGET_SDK_WITH_DESSERT_SHA
+ my_target_sdk_version := $(UNBUNDLED_BUILD_TARGET_SDK_WITH_DESSERT_SHA)
+ my_min_sdk_version := $(UNBUNDLED_BUILD_TARGET_SDK_WITH_DESSERT_SHA)
endif
endif
endif
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 18d955c..90ac75f 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -152,10 +152,6 @@
$(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
endif
-ifdef PRODUCT_AVF_KERNEL_MODULES_ENABLED
-$(call add_soong_config_var_value,ANDROID,avf_kernel_modules_enabled,$(PRODUCT_AVF_KERNEL_MODULES_ENABLED))
-endif
-
$(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS))
$(call add_soong_config_var_value,ANDROID,release_avf_enable_device_assignment,$(RELEASE_AVF_ENABLE_DEVICE_ASSIGNMENT))
$(call add_soong_config_var_value,ANDROID,release_avf_enable_dice_changes,$(RELEASE_AVF_ENABLE_DICE_CHANGES))
@@ -164,6 +160,7 @@
$(call add_soong_config_var_value,ANDROID,release_avf_enable_remote_attestation,$(RELEASE_AVF_ENABLE_REMOTE_ATTESTATION))
$(call add_soong_config_var_value,ANDROID,release_avf_enable_vendor_modules,$(RELEASE_AVF_ENABLE_VENDOR_MODULES))
$(call add_soong_config_var_value,ANDROID,release_avf_enable_virt_cpufreq,$(RELEASE_AVF_ENABLE_VIRT_CPUFREQ))
+$(call add_soong_config_var_value,ANDROID,release_avf_microdroid_kernel_version,$(RELEASE_AVF_MICRODROID_KERNEL_VERSION))
$(call add_soong_config_var_value,ANDROID,release_binder_death_recipient_weak_from_jni,$(RELEASE_BINDER_DEATH_RECIPIENT_WEAK_FROM_JNI))
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index 137b118..b69f694 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -29,7 +29,7 @@
ifeq (true,$(LOCAL_VENDOR_MODULE))
autogen_test_install_base = /data/local/tests/vendor
endif
- ifeq (true,$(LOCAL_USE_VNDK))
+ ifeq (true,$(call module-in-vendor-or-product))
autogen_test_install_base = /data/local/tests/vendor
endif
endif
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 44e1c7a..b8aa5fe 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -120,7 +120,7 @@
$(LOCAL_VENDOR_MODULE) \
$(LOCAL_PROPRIETARY_MODULE))
-include $(BUILD_SYSTEM)/local_vndk.mk
+include $(BUILD_SYSTEM)/local_vendor_product.mk
# local_current_sdk needs to run before local_systemsdk because the former may override
# LOCAL_SDK_VERSION which is used by the latter.
@@ -806,7 +806,7 @@
ifneq (,$(test_config))
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
- $(test_config):$(dir)/$(LOCAL_MODULE).config)))
+ $(test_config):$(dir)/$(LOCAL_MODULE).config$(LOCAL_TEST_CONFIG_SUFFIX))))
endif
ifneq (,$(LOCAL_EXTRA_FULL_TEST_CONFIGS))
@@ -1095,10 +1095,10 @@
## When compiling against API imported module, use API import stub
## libraries.
##########################################################################
-ifneq ($(LOCAL_USE_VNDK),)
+ifneq ($(call module-in-vendor-or-product),)
ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
apiimport_postfix := .apiimport
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
apiimport_postfix := .apiimport.product
else
apiimport_postfix := .apiimport.vendor
@@ -1113,7 +1113,7 @@
## When compiling against the VNDK, add the .vendor or .product suffix to
## required modules.
##########################################################################
-ifneq ($(LOCAL_USE_VNDK),)
+ifneq ($(call module-in-vendor-or-product),)
#####################################################
## Soong modules may be built three times, once for
## /system, once for /vendor and once for /product.
@@ -1124,7 +1124,7 @@
# We don't do this renaming for soong-defined modules since they already
# have correct names (with .vendor or .product suffix when necessary) in
# their LOCAL_*_LIBRARIES.
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
my_required_modules := $(foreach l,$(my_required_modules),\
$(if $(SPLIT_PRODUCT.SHARED_LIBRARIES.$(l)),$(l).product,$(l)))
else
diff --git a/core/binary.mk b/core/binary.mk
index d872b66..6dab49c 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -332,10 +332,10 @@
endif
endif
-ifneq ($(LOCAL_USE_VNDK),)
+ifneq ($(call module-in-vendor-or-product),)
my_cflags += -D__ANDROID_VNDK__
- ifneq ($(LOCAL_USE_VNDK_VENDOR),)
- # Vendor modules have LOCAL_USE_VNDK_VENDOR
+ ifneq ($(LOCAL_IN_VENDOR),)
+ # Vendor modules have LOCAL_IN_VENDOR
my_cflags += -D__ANDROID_VENDOR__
ifeq ($(BOARD_API_LEVEL),)
@@ -345,8 +345,8 @@
else
my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
endif
- else ifneq ($(LOCAL_USE_VNDK_PRODUCT),)
- # Product modules have LOCAL_USE_VNDK_PRODUCT
+ else ifneq ($(LOCAL_IN_PRODUCT),)
+ # Product modules have LOCAL_IN_PRODUCT
my_cflags += -D__ANDROID_PRODUCT__
endif
endif
@@ -1174,8 +1174,8 @@
apiimport_postfix := .apiimport
-ifneq ($(LOCAL_USE_VNDK),)
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ifneq ($(call module-in-vendor-or-product),)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
apiimport_postfix := .apiimport.product
else
apiimport_postfix := .apiimport.vendor
@@ -1192,14 +1192,14 @@
###########################################################
## When compiling against the VNDK, use LL-NDK libraries
###########################################################
-ifneq ($(LOCAL_USE_VNDK),)
+ifneq ($(call module-in-vendor-or-product),)
#####################################################
## Soong modules may be built three times, once for
## /system, once for /vendor and once for /product.
## If we're using the VNDK, switch all soong
## libraries over to the /vendor or /product variant.
#####################################################
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
$(if $(SPLIT_PRODUCT.STATIC_LIBRARIES.$(l)),$(l).product,$(l)))
my_static_libraries := $(foreach l,$(my_static_libraries),\
@@ -1226,7 +1226,7 @@
# Platform can use vendor public libraries. If a required shared lib is one of
# the vendor public libraries, the lib is switched to the stub version of the lib.
-ifeq ($(LOCAL_USE_VNDK),)
+ifeq ($(call module-in-vendor-or-product),)
my_shared_libraries := $(foreach l,$(my_shared_libraries),\
$(if $(filter $(l),$(VENDOR_PUBLIC_LIBRARIES)),$(l).vendorpublic,$(l)))
endif
@@ -1278,7 +1278,7 @@
my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
my_warn_types := $(my_warn_ndk_types)
my_allowed_types := $(my_allowed_ndk_types)
-else ifdef LOCAL_USE_VNDK
+else ifeq ($(call module-in-vendor-or-product),true)
_name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
_name := $(patsubst %.product,%,$(LOCAL_MODULE))
ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -1289,7 +1289,7 @@
endif
my_warn_types :=
my_allowed_types := native:vndk native:vndk_private
- else ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ else ifeq ($(LOCAL_IN_PRODUCT),true)
# Modules installed to /product cannot directly depend on modules marked
# with vendor_available: false
my_link_type := native:product
@@ -1592,7 +1592,7 @@
###########################################################
ifndef LOCAL_IS_HOST_MODULE
-ifdef LOCAL_USE_VNDK
+ifeq ($(call module-in-vendor-or-product),true)
my_target_global_c_includes :=
my_target_global_c_system_includes := $(TARGET_OUT_HEADERS)
else ifdef LOCAL_SDK_VERSION
@@ -1686,7 +1686,7 @@
####################################################
imported_includes :=
-ifdef LOCAL_USE_VNDK
+ifeq (true,$(call module-in-vendor-or-product))
imported_includes += $(call intermediates-dir-for,HEADER_LIBRARIES,device_kernel_headers,$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))
else
# everything else should manually specify headers
diff --git a/core/board_config.mk b/core/board_config.mk
index ac9a34f..8c23f93 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -965,12 +965,15 @@
$(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
endef
+ifeq ($(KEEP_VNDK),true)
ifeq ($(BOARD_VNDK_VERSION),$(PLATFORM_VNDK_VERSION))
$(error BOARD_VNDK_VERSION is equal to PLATFORM_VNDK_VERSION; use BOARD_VNDK_VERSION := current)
endif
ifneq ($(BOARD_VNDK_VERSION),current)
$(call check_vndk_version,$(BOARD_VNDK_VERSION))
endif
+endif
+
TARGET_VENDOR_TEST_SUFFIX := /vendor
ifeq (,$(TARGET_BUILD_UNBUNDLED))
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
index 000159a..e34e110 100644
--- a/core/cc_prebuilt_internal.mk
+++ b/core/cc_prebuilt_internal.mk
@@ -80,7 +80,7 @@
ifdef LOCAL_SDK_VERSION
my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
-else ifdef LOCAL_USE_VNDK
+else ifeq ($(call module-in-vendor-or-product),true)
_name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
_name := $(patsubst %.product,%,$(LOCAL_MODULE))
ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -90,7 +90,7 @@
my_link_type := native:vndk_private
endif
else
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
my_link_type := native:product
else
my_link_type := native:vendor
@@ -139,8 +139,8 @@
# When compiling against API imported module, use API import stub libraries.
apiimport_postfix := .apiimport
-ifneq ($(LOCAL_USE_VNDK),)
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ifeq ($(call module-in-vendor-or-product),true)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
apiimport_postfix := .apiimport.product
else
apiimport_postfix := .apiimport.vendor
@@ -158,8 +158,8 @@
endif #my_system_shared_libraries
ifdef my_shared_libraries
-ifdef LOCAL_USE_VNDK
- ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+ifeq ($(call module-in-vendor-or-product),true)
+ ifeq ($(LOCAL_IN_PRODUCT),true)
my_shared_libraries := $(foreach l,$(my_shared_libraries),\
$(if $(SPLIT_PRODUCT.SHARED_LIBRARIES.$(l)),$(l).product,$(l)))
else
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 8d99176..5481d50 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -291,6 +291,7 @@
LOCAL_SYSTEM_SHARED_LIBRARIES:=none
LOCAL_TARGET_REQUIRED_MODULES:=
LOCAL_TEST_CONFIG:=
+LOCAL_TEST_CONFIG_SUFFIX:=
LOCAL_TEST_DATA:=
LOCAL_TEST_DATA_BINS:=
LOCAL_TEST_MAINLINE_MODULES:=
@@ -304,7 +305,8 @@
LOCAL_USE_AAPT2:=
LOCAL_USE_CLANG_LLD:=
LOCAL_USE_VNDK:=
-LOCAL_USE_VNDK_PRODUCT:=
+LOCAL_IN_VENDOR:=
+LOCAL_IN_PRODUCT:=
LOCAL_USES_LIBRARIES:=
LOCAL_VENDOR_MODULE:=
LOCAL_VINTF_FRAGMENTS:=
diff --git a/core/config.mk b/core/config.mk
index fc11405..dbee0a0 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -374,16 +374,6 @@
endif
-include $(ANDROID_BUILDSPEC)
-# Starting in Android U, non-VNDK devices not supported
-# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
-# letting upstream know it's important to you, we may do cleanup which breaks this
-# significantly. Please let us know if you are changing this.
-ifndef BOARD_VNDK_VERSION
-# READ WARNING - DO NOT CHANGE
-BOARD_VNDK_VERSION := current
-# READ WARNING - DO NOT CHANGE
-endif
-
# ---------------------------------------------------------------
# Define most of the global variables. These are the ones that
# are specific to the user's build configuration.
@@ -813,13 +803,6 @@
requirements :=
-# Set default value of KEEP_VNDK.
-ifeq ($(RELEASE_DEPRECATE_VNDK),true)
- KEEP_VNDK ?= false
-else
- KEEP_VNDK ?= true
-endif
-
# BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED can be true only if early-mount of
# partitions is supported. But the early-mount must be supported for full
# treble products, and so BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED should be set
@@ -888,21 +871,9 @@
# SEPolicy versions
-# PLATFORM_SEPOLICY_VERSION is a number of the form "YYYYMM.0" with "YYYYMM"
-# mapping to vFRC version. This value will be set to 1000000.0 to represent
-# tip-of-tree development that is inherently unstable and thus designed not to
-# work with any shipping vendor policy. This is similar in spirit to how
-# DEFAULT_APP_TARGET_SDK is set.
-sepolicy_vers := $(BOARD_API_LEVEL).0
-
-TOT_SEPOLICY_VERSION := 1000000.0
-ifeq (true,$(RELEASE_BOARD_API_LEVEL_FROZEN))
- PLATFORM_SEPOLICY_VERSION := $(sepolicy_vers)
-else
- PLATFORM_SEPOLICY_VERSION := $(TOT_SEPOLICY_VERSION)
-endif
-sepolicy_vers :=
-
+# PLATFORM_SEPOLICY_VERSION is a number of the form "YYYYMM" with "YYYYMM"
+# mapping to vFRC version.
+PLATFORM_SEPOLICY_VERSION := $(BOARD_API_LEVEL)
BOARD_SEPOLICY_VERS := $(PLATFORM_SEPOLICY_VERSION)
.KATI_READONLY := PLATFORM_SEPOLICY_VERSION BOARD_SEPOLICY_VERS
@@ -919,7 +890,6 @@
.KATI_READONLY := \
PLATFORM_SEPOLICY_COMPAT_VERSIONS \
PLATFORM_SEPOLICY_VERSION \
- TOT_SEPOLICY_VERSION \
ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
ifneq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
@@ -1293,6 +1263,15 @@
include $(BUILD_SYSTEM)/dumpvar.mk
+ifneq ($(KEEP_VNDK),true)
+ifdef BOARD_VNDK_VERSION
+BOARD_VNDK_VERSION=
+endif
+ifdef PLATFORM_VNDK_VERSION
+PLATFORM_VNDK_VERSION=
+endif
+endif
+
ifeq (true,$(FULL_SYSTEM_OPTIMIZE_JAVA))
ifeq (,$(SYSTEM_OPTIMIZE_JAVA))
$(error SYSTEM_OPTIMIZE_JAVA must be enabled when FULL_SYSTEM_OPTIMIZE_JAVA is enabled)
diff --git a/core/copy_headers.mk b/core/copy_headers.mk
index c457eb0..397ea62 100644
--- a/core/copy_headers.mk
+++ b/core/copy_headers.mk
@@ -13,13 +13,12 @@
$(call pretty-error,Modules using LOCAL_SDK_VERSION may not use LOCAL_COPY_HEADERS)
endif
-include $(BUILD_SYSTEM)/local_vndk.mk
+include $(BUILD_SYSTEM)/local_vendor_product.mk
-# If we're using the VNDK, only vendor modules using the VNDK may use
-# LOCAL_COPY_HEADERS. Platform libraries will not have the include path
-# present.
-ifndef LOCAL_USE_VNDK
- $(call pretty-error,Only vendor modules using LOCAL_USE_VNDK may use LOCAL_COPY_HEADERS)
+# Modules in vendor or product may use LOCAL_COPY_HEADERS.
+# Platform libraries will not have the include path present.
+ifeq ($(call module-in-vendor-or-product),)
+ $(call pretty-error,Only modules in vendor or product may use LOCAL_COPY_HEADERS)
endif
# Clean up LOCAL_COPY_HEADERS_TO, since soong_ui will be comparing cleaned
diff --git a/core/definitions.mk b/core/definitions.mk
index ed842bc..40b7980 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2774,6 +2774,10 @@
$(if $(LOCAL_MIN_SDK_VERSION),$(LOCAL_MIN_SDK_VERSION),$(call module-target-sdk-version))
endef
+# Checks if module is in vendor or product
+define module-in-vendor-or-product
+$(if $(filter true,$(LOCAL_IN_VENDOR) $(LOCAL_IN_PRODUCT)),true)
+endef
define transform-classes.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
diff --git a/core/envsetup.mk b/core/envsetup.mk
index cfb8a66..30a6c06 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -50,6 +50,25 @@
# Release config
include $(BUILD_SYSTEM)/release_config.mk
+# Set default value of KEEP_VNDK.
+ifeq ($(RELEASE_DEPRECATE_VNDK),true)
+ KEEP_VNDK ?= false
+else
+ KEEP_VNDK ?= true
+endif
+
+ifeq ($(KEEP_VNDK),true)
+ # Starting in Android U, non-VNDK devices not supported
+ # WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
+ # letting upstream know it's important to you, we may do cleanup which breaks this
+ # significantly. Please let us know if you are changing this.
+ ifndef BOARD_VNDK_VERSION
+ # READ WARNING - DO NOT CHANGE
+ BOARD_VNDK_VERSION := current
+ # READ WARNING - DO NOT CHANGE
+ endif
+endif
+
# ---------------------------------------------------------------
# Set up version information
include $(BUILD_SYSTEM)/version_util.mk
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index fb14cce..fecf4f6 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -45,7 +45,7 @@
my_target_crtbegin_dynamic_o :=
my_target_crtbegin_static_o :=
my_target_crtend_o :=
-else ifdef LOCAL_USE_VNDK
+else ifeq (true,$(call module-in-vendor-or-product))
my_target_crtbegin_dynamic_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_dynamic.vendor)
my_target_crtbegin_static_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_static.vendor)
my_target_crtend_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_android.vendor)
diff --git a/core/local_vendor_product.mk b/core/local_vendor_product.mk
new file mode 100644
index 0000000..75982cd
--- /dev/null
+++ b/core/local_vendor_product.mk
@@ -0,0 +1,22 @@
+# LOCAL_USE_VNDK is not the variable which set by module directly, but there are some modules do so.
+# Set those as LOCAL_IN_VENDOR to make those modules work as expected.
+ifeq (true,$(LOCAL_USE_VNDK))
+ $(warning LOCAL_USE_VNDK must not be used. Please use LOCAL_VENDOR_MODULE or LOCAL_PRODUCT_MODULE instead.)
+ LOCAL_IN_VENDOR:=true
+endif
+
+# Set LOCAL_IN_VENDOR for modules going into vendor or odm partition and LOCAL_IN_PRODUCT for product
+# except for host modules. If LOCAL_SDK_VERSION is set, thats a more restrictive set, so they don't need
+# LOCAL_IN_VENDOR or LOCAL_IN_PRODUCT
+ifndef LOCAL_IS_HOST_MODULE
+ifndef LOCAL_SDK_VERSION
+ ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+ LOCAL_IN_VENDOR:=true
+ # Note: no need to check LOCAL_MODULE_PATH* since LOCAL_[VENDOR|ODM|OEM]_MODULE is already
+ # set correctly before this is included.
+ endif
+ ifeq (true,$(LOCAL_PRODUCT_MODULE))
+ LOCAL_IN_PRODUCT:=true
+ endif
+endif
+endif
diff --git a/core/local_vndk.mk b/core/local_vndk.mk
deleted file mode 100644
index eb8f2c0..0000000
--- a/core/local_vndk.mk
+++ /dev/null
@@ -1,41 +0,0 @@
-
-#Set LOCAL_USE_VNDK for modules going into product, vendor or odm partition, except for host modules
-#If LOCAL_SDK_VERSION is set, thats a more restrictive set, so they dont need LOCAL_USE_VNDK
-ifndef LOCAL_IS_HOST_MODULE
-ifndef LOCAL_SDK_VERSION
- ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
- LOCAL_USE_VNDK:=true
- LOCAL_USE_VNDK_VENDOR:=true
- # Note: no need to check LOCAL_MODULE_PATH* since LOCAL_[VENDOR|ODM|OEM]_MODULE is already
- # set correctly before this is included.
- endif
- ifdef PRODUCT_PRODUCT_VNDK_VERSION
- # Product modules also use VNDK when PRODUCT_PRODUCT_VNDK_VERSION is defined.
- ifeq (true,$(LOCAL_PRODUCT_MODULE))
- LOCAL_USE_VNDK:=true
- LOCAL_USE_VNDK_PRODUCT:=true
- endif
- endif
-endif
-endif
-
-# Verify LOCAL_USE_VNDK usage, and set LOCAL_SDK_VERSION if necessary
-
-ifdef LOCAL_IS_HOST_MODULE
- ifdef LOCAL_USE_VNDK
- $(shell echo $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): Do not use LOCAL_USE_VNDK with host modules >&2)
- $(error done)
- endif
-endif
-ifdef LOCAL_USE_VNDK
- ifneq ($(LOCAL_USE_VNDK),true)
- $(shell echo '$(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): LOCAL_USE_VNDK must be "true" or empty, not "$(LOCAL_USE_VNDK)"' >&2)
- $(error done)
- endif
-
- ifdef LOCAL_SDK_VERSION
- $(shell echo $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): LOCAL_USE_VNDK must not be used with LOCAL_SDK_VERSION >&2)
- $(error done)
- endif
-endif
-
diff --git a/core/main.mk b/core/main.mk
index f5dbad8..a05f757 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -225,20 +225,14 @@
# ADDITIONAL_VENDOR_PROPERTIES will be installed in vendor/build.prop if
# property_overrides_split_enabled is true. Otherwise it will be installed in
# /system/build.prop
+ifeq ($(KEEP_VNDK),true)
ifdef BOARD_VNDK_VERSION
- ifeq ($(KEEP_VNDK),true)
ifeq ($(BOARD_VNDK_VERSION),current)
ADDITIONAL_VENDOR_PROPERTIES := ro.vndk.version=$(PLATFORM_VNDK_VERSION)
else
ADDITIONAL_VENDOR_PROPERTIES := ro.vndk.version=$(BOARD_VNDK_VERSION)
endif
- endif
-
- # TODO(b/290159430): ro.vndk.deprecate is a temporal variable for deprecating VNDK.
- # This variable will be removed once ro.vndk.version can be removed.
- ifneq ($(KEEP_VNDK),true)
- ADDITIONAL_SYSTEM_PROPERTIES += ro.vndk.deprecate=true
- endif
+endif
endif
# Add cpu properties for bionic and ART.
@@ -440,6 +434,8 @@
# To speedup startup of non-preopted builds, don't verify or compile the boot image.
ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.image-dex2oat-filter=extract
endif
+# b/323566535
+ADDITIONAL_SYSTEM_PROPERTIES += init.svc_debug.no_fatal.zygote=true
endif
## asan ##
diff --git a/core/proguard.flags b/core/proguard.flags
index 9cbba0f..aa406b9 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -15,6 +15,13 @@
@com.android.internal.annotations.VisibleForTesting *;
}
+# Keep classes and members with platform @TestApi annotations, similar to
+# @VisibleForTesting.
+-keep @android.annotation.TestApi class *
+-keepclassmembers class * {
+ @android.annotation.TestApi *;
+}
+
# Keep classes and members with non-platform @VisibleForTesting annotations, but
# only within platform-defined packages. This avoids keeping external, library-specific
# test code that isn't actually needed for platform testing.
diff --git a/core/python_binary_host_mobly_test_config_template.xml b/core/python_binary_host_mobly_test_config_template.xml
index a6576cd..a986df2 100644
--- a/core/python_binary_host_mobly_test_config_template.xml
+++ b/core/python_binary_host_mobly_test_config_template.xml
@@ -13,13 +13,9 @@
<configuration description="Config for {MODULE} mobly test">
{EXTRA_CONFIGS}
- <device name="device1"></device>
- <device name="device2"></device>
+ <device name="AndroidRealDevice"></device>
+ <device name="AndroidRealDevice"></device>
- <test class="com.android.tradefed.testtype.mobly.MoblyBinaryHostTest">
- <!-- The mobly-par-file-name should match the module name -->
- <option name="mobly-par-file-name" value="{MODULE}" />
- <!-- Timeout limit in milliseconds for all test cases of the python binary -->
- <option name="mobly-test-timeout" value="300000" />
- </test>
+ <option name="mobly_pkg" key="file" value="{MODULE}" />
+ <test class="MoblyAospPackageTest" />
</configuration>
diff --git a/core/ravenwood_test_config_template.xml b/core/ravenwood_test_config_template.xml
new file mode 100644
index 0000000..16a22c0
--- /dev/null
+++ b/core/ravenwood_test_config_template.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}">
+ <option name="test-suite-tag" value="ravenwood" />
+ <option name="test-suite-tag" value="ravenwood-tests" />
+
+ <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
+ <option name="use-ravenwood-resources" value="true" />
+ <option name="exclude-paths" value="java" />
+ <option name="socket-timeout" value="10000" />
+ <option name="null-device" value="true" />
+
+ {EXTRA_CONFIGS}
+
+ <test class="com.android.tradefed.testtype.IsolatedHostTest" >
+ <option name="jar" value="{MODULE}.jar" />
+ <option name="java-flags" value="--add-modules=jdk.compiler"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED"/>
+ <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED"/>
+
+ <!-- Needed for supporting ParcelFileDescriptor internals -->
+ <option name="java-flags" value="--add-exports=java.base/jdk.internal.access=ALL-UNNAMED"/>
+ </test>
+</configuration>
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 139de10..2f510d9 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -42,7 +42,7 @@
ifeq ($(LOCAL_NO_CRT),true)
my_target_crtbegin_so_o :=
my_target_crtend_so_o :=
-else ifdef LOCAL_USE_VNDK
+else ifeq ($(call module-in-vendor-or-product),true)
my_target_crtbegin_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtbegin_so.vendor)
my_target_crtend_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_so.vendor)
else
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index 94e1115..943ed30 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -99,7 +99,7 @@
include $(BUILD_SYSTEM)/link_type.mk
endif
-ifdef LOCAL_USE_VNDK
+ifeq ($(call module-in-vendor-or-product),true)
ifneq ($(LOCAL_VNDK_DEPEND_ON_CORE_VARIANT),true)
name_without_suffix := $(patsubst %.vendor,%,$(LOCAL_MODULE))
ifneq ($(name_without_suffix),$(LOCAL_MODULE))
@@ -128,8 +128,8 @@
ifdef LOCAL_INSTALLED_MODULE
ifdef LOCAL_SHARED_LIBRARIES
my_shared_libraries := $(LOCAL_SHARED_LIBRARIES)
- ifdef LOCAL_USE_VNDK
- ifdef LOCAL_USE_VNDK_PRODUCT
+ ifeq ($(call module-in-vendor-or-product),true)
+ ifdef LOCAL_IN_PRODUCT
my_shared_libraries := $(foreach l,$(my_shared_libraries),\
$(if $(SPLIT_PRODUCT.SHARED_LIBRARIES.$(l)),$(l).product,$(l)))
else
@@ -143,8 +143,8 @@
ifdef LOCAL_DYLIB_LIBRARIES
my_dylibs := $(LOCAL_DYLIB_LIBRARIES)
# Treat these as shared library dependencies for installation purposes.
- ifdef LOCAL_USE_VNDK
- ifdef LOCAL_USE_VNDK_PRODUCT
+ ifeq ($(call module-in-vendor-or-product),true)
+ ifdef LOCAL_IN_PRODUCT
my_dylibs := $(foreach l,$(my_dylibs),\
$(if $(SPLIT_PRODUCT.SHARED_LIBRARIES.$(l)),$(l).product,$(l)))
else
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 843b03c..ec0c70e 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -147,8 +147,10 @@
$(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
$(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
$(call add_json_str, VendorApiLevel, $(BOARD_API_LEVEL))
+ifeq ($(KEEP_VNDK),true)
$(call add_json_str, DeviceVndkVersion, $(BOARD_VNDK_VERSION))
$(call add_json_str, Platform_vndk_version, $(PLATFORM_VNDK_VERSION))
+endif
$(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS))
$(call add_json_list, DeviceSystemSdkVersions, $(BOARD_SYSTEMSDK_VERSIONS))
$(call add_json_str, RecoverySnapshotVersion, $(RECOVERY_SNAPSHOT_VERSION))
@@ -228,7 +230,6 @@
$(call add_json_str, ProductSepolicyPrebuiltApiDir, $(BOARD_PRODUCT_PREBUILT_DIR))
$(call add_json_str, PlatformSepolicyVersion, $(PLATFORM_SEPOLICY_VERSION))
-$(call add_json_str, TotSepolicyVersion, $(TOT_SEPOLICY_VERSION))
$(call add_json_list, PlatformSepolicyCompatVersions, $(PLATFORM_SEPOLICY_COMPAT_VERSIONS))
$(call add_json_bool, ForceApexSymlinkOptimization, $(filter true,$(TARGET_FORCE_APEX_SYMLINK_OPTIMIZATION)))
diff --git a/core/version_util.mk b/core/version_util.mk
index dfa0277..0ed4499 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -157,21 +157,23 @@
endif
.KATI_READONLY := DEFAULT_APP_TARGET_SDK
-ifndef PLATFORM_VNDK_VERSION
- # This is the definition of the VNDK version for the current VNDK libraries.
- # With trunk stable, VNDK will not be frozen but deprecated.
- # This version will be removed with the VNDK deprecation.
- ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- ifdef RELEASE_PLATFORM_VNDK_VERSION
- PLATFORM_VNDK_VERSION := $(RELEASE_PLATFORM_VNDK_VERSION)
+ifeq ($(KEEP_VNDK),true)
+ ifndef PLATFORM_VNDK_VERSION
+ # This is the definition of the VNDK version for the current VNDK libraries.
+ # With trunk stable, VNDK will not be frozen but deprecated.
+ # This version will be removed with the VNDK deprecation.
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ ifdef RELEASE_PLATFORM_VNDK_VERSION
+ PLATFORM_VNDK_VERSION := $(RELEASE_PLATFORM_VNDK_VERSION)
+ else
+ PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
+ endif
else
- PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
+ PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
endif
- else
- PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
endif
+ .KATI_READONLY := PLATFORM_VNDK_VERSION
endif
-.KATI_READONLY := PLATFORM_VNDK_VERSION
ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
# This is the oldest version of system SDK that the platform supports. Contrary
diff --git a/envsetup.sh b/envsetup.sh
index 6111952..db21188 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -367,7 +367,7 @@
# And in with the new...
ANDROID_GLOBAL_BUILD_PATHS=$T/build/soong/bin
- ANDROID_GLOBAL_BUILD_PATHS+=:$T/bazel/bin
+ ANDROID_GLOBAL_BUILD_PATHS+=:$T/build/bazel/bin
ANDROID_GLOBAL_BUILD_PATHS+=:$T/development/scripts
ANDROID_GLOBAL_BUILD_PATHS+=:$T/prebuilts/devtools/tools
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index 01ebe56..c3878b8 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -21,8 +21,10 @@
# the devices with metadata parition
BOARD_USES_METADATA_PARTITION := true
+ifeq ($(KEEP_VNDK),true)
# Default is current, but allow devices to override vndk version if needed.
BOARD_VNDK_VERSION ?= current
+endif
# 64 bit mediadrmserver
TARGET_ENABLE_MEDIADRM_64 := true
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 7d2b3ba..277223e 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -126,6 +126,7 @@
ip \
iptables \
javax.obex \
+ kcmdlinectrl \
keystore2 \
credstore \
ld.mc \
@@ -319,6 +320,11 @@
com.android.nfcservices
endif
+ifeq ($(RELEASE_USE_WEBVIEW_BOOTSTRAP_MODULE),true)
+ PRODUCT_PACKAGES += \
+ com.android.webview.bootstrap
+endif
+
# VINTF data for system image
PRODUCT_PACKAGES += \
system_manifest.xml \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 2fd7209..3e3918c 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -131,6 +131,10 @@
endif
+ifeq ($(RELEASE_AVF_ENABLE_LLPVM_CHANGES),true)
+ PRODUCT_APEX_SYSTEM_SERVER_JARS += com.android.virt:service-virtualization
+endif
+
# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
# art project.
ifneq (,$(wildcard art))
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
index 5e2a694..b622496 100644
--- a/target/product/fullmte.mk
+++ b/target/product/fullmte.mk
@@ -20,8 +20,7 @@
# For more details, see:
# https://source.android.com/docs/security/test/memory-safety/arm-mte
ifeq ($(filter memtag_heap,$(SANITIZE_TARGET)),)
- # TODO(b/292478827): Re-enable memtag_stack when new toolchain rolls.
- SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap)
+ SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack)
SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
endif
PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index ba0912c..5218f29 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -49,6 +49,3 @@
# use the go specific handheld_core_hardware.xml from frameworks
PRODUCT_COPY_FILES += \
frameworks/native/data/etc/go_handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml
-
-# Dedupe VNDK libraries with identical core variants.
-TARGET_VNDK_USE_CORE_VARIANT := true
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 007aabd..54c84ea 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -7,6 +7,7 @@
#####################################################################
# This is the up-to-date list of vndk libs.
LATEST_VNDK_LIB_LIST := $(LOCAL_PATH)/current.txt
+ifeq ($(KEEP_VNDK),true)
UNFROZEN_VNDK := true
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
# Use frozen vndk lib list only if "34 >= PLATFORM_VNDK_VERSION"
@@ -18,6 +19,7 @@
UNFROZEN_VNDK :=
endif
endif
+endif
#####################################################################
# Check the generate list against the latest list stored in the
@@ -35,6 +37,8 @@
check-vndk-list: ;
else ifeq ($(TARGET_SKIP_CURRENT_VNDK),true)
check-vndk-list: ;
+else ifeq ($(BOARD_VNDK_VERSION),)
+check-vndk-list: ;
else
check-vndk-list: $(check-vndk-list-timestamp)
ifneq ($(SKIP_ABI_CHECKS),true)
@@ -199,25 +203,14 @@
include $(BUILD_PHONY_PACKAGE)
include $(CLEAR_VARS)
-_vndk_versions :=
-ifeq ($(filter com.android.vndk.current.on_vendor, $(PRODUCT_PACKAGES)),)
- _vndk_versions += $(if $(call math_is_number,$(PLATFORM_VNDK_VERSION)),\
- $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),\
- $(if $(call math_lt,$(vndk_ver),$(PLATFORM_VNDK_VERSION)),$(vndk_ver))),\
- $(PRODUCT_EXTRA_VNDK_VERSIONS))
-endif
-ifneq ($(BOARD_VNDK_VERSION),current)
- _vndk_versions += $(BOARD_VNDK_VERSION)
-endif
+
LOCAL_MODULE := vndk_apex_snapshot_package
LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
LOCAL_LICENSE_CONDITIONS := notice
LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(_vndk_versions),com.android.vndk.v$(vndk_ver))
+LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver))
include $(BUILD_PHONY_PACKAGE)
-_vndk_versions :=
-
#####################################################################
# Define Phony module to install LLNDK modules which are installed in
# the system image
diff --git a/teams/Android.bp b/teams/Android.bp
index 89c719f..bae8e80 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -4335,3 +4335,32 @@
// go/trendy/manage/engineers/5093014696525824
trendy_team_id: "5093014696525824",
}
+
+team {
+ name: "trendy_team_media_framework_drm",
+
+ // go/trendy/manage/engineers/5311752690335744
+ trendy_team_id: "5311752690335744",
+}
+
+team {
+ name: "trendy_team_media_framework_audio",
+
+ // go/trendy/manage/engineers/5823575353065472
+ trendy_team_id: "5823575353065472",
+}
+
+team {
+ name: "trendy_team_ar_sensors_context_hub",
+
+ // go/trendy/manage/engineers/4776371090259968
+ trendy_team_id: "4776371090259968",
+}
+
+
+team {
+ name: "trendy_team_media_codec_framework",
+
+ // go/trendy/manage/engineers/4943966050844672
+ trendy_team_id: "4943966050844672",
+}
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 650c8c0..398da06 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -1,25 +1,6 @@
{
"presubmit": [
{
- // Ensure changes on aconfig auto generated library is compatible with
- // test testing filtering logic. Breakage on this test means all tests
- // that using the flag annotations to do filtering will get affected.
- "name": "FlagAnnotationTests",
- "options": [
- {
- "include-filter": "android.cts.flags.tests.FlagAnnotationTest"
- }
- ]
- },
- {
- // Ensure changes on aconfig auto generated library is compatible with
- // test testing filtering logic. Breakage on this test means all tests
- // that using the flag macros to do filtering will get affected.
- "name": "FlagMacrosTests"
- }
- ],
- "postsubmit": [
- {
// aconfig unit tests
"name": "aconfig.test"
},
@@ -66,6 +47,23 @@
{
// aconfig_storage_file unit tests
"name": "aconfig_storage_file.test"
+ },
+ {
+ // Ensure changes on aconfig auto generated library is compatible with
+ // test testing filtering logic. Breakage on this test means all tests
+ // that using the flag annotations to do filtering will get affected.
+ "name": "FlagAnnotationTests",
+ "options": [
+ {
+ "include-filter": "android.cts.flags.tests.FlagAnnotationTest"
+ }
+ ]
+ },
+ {
+ // Ensure changes on aconfig auto generated library is compatible with
+ // test testing filtering logic. Breakage on this test means all tests
+ // that using the flag macros to do filtering will get affected.
+ "name": "FlagMacrosTests"
}
]
}
diff --git a/tools/aconfig/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp
index 3152d35..164bfe7 100644
--- a/tools/aconfig/aconfig/Android.bp
+++ b/tools/aconfig/aconfig/Android.bp
@@ -40,18 +40,21 @@
aconfig_declarations {
name: "aconfig.test.flags",
package: "com.android.aconfig.test",
+ container: "system",
srcs: ["tests/test.aconfig"],
}
aconfig_declarations {
name: "aconfig.test.exported.flags",
package: "com.android.aconfig.test.exported",
+ container: "system",
srcs: ["tests/test_exported.aconfig"],
}
aconfig_declarations {
name: "aconfig.test.forcereadonly.flags",
package: "com.android.aconfig.test.forcereadonly",
+ container: "system",
srcs: ["tests/test_force_read_only.aconfig"],
}
@@ -220,7 +223,7 @@
rust_test {
name: "aconfig.prod_mode.test.rust",
srcs: [
- "tests/aconfig_prod_mode_test.rs"
+ "tests/aconfig_prod_mode_test.rs",
],
rustlibs: [
"libaconfig_test_rust_library",
@@ -238,7 +241,7 @@
rust_test {
name: "aconfig.test_mode.test.rust",
srcs: [
- "tests/aconfig_test_mode_test.rs"
+ "tests/aconfig_test_mode_test.rs",
],
rustlibs: [
"libaconfig_test_rust_library_with_test_mode",
@@ -256,7 +259,7 @@
rust_test {
name: "aconfig.exported_mode.test.rust",
srcs: [
- "tests/aconfig_exported_mode_test.rs"
+ "tests/aconfig_exported_mode_test.rs",
],
rustlibs: [
"libaconfig_test_rust_library_with_exported_mode",
@@ -274,7 +277,7 @@
rust_test {
name: "aconfig.force_read_only_mode.test.rust",
srcs: [
- "tests/aconfig_force_read_only_mode_test.rs"
+ "tests/aconfig_force_read_only_mode_test.rs",
],
rustlibs: [
"libaconfig_test_rust_library_with_force_read_only_mode",
diff --git a/tools/aconfig/aconfig/src/test.rs b/tools/aconfig/aconfig/src/test.rs
index 7b5318d..7409cda 100644
--- a/tools/aconfig/aconfig/src/test.rs
+++ b/tools/aconfig/aconfig/src/test.rs
@@ -15,6 +15,9 @@
*/
#[cfg(test)]
+pub use test_utils::*;
+
+#[cfg(test)]
pub mod test_utils {
use crate::commands::Input;
use aconfig_protos::ProtoParsedFlags;
@@ -340,6 +343,3 @@
);
}
}
-
-#[cfg(test)]
-pub use test_utils::*;
diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp
index 53b693f..8922ba4 100644
--- a/tools/aconfig/aconfig_storage_file/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/Android.bp
@@ -13,6 +13,9 @@
"libonce_cell",
"libprotobuf",
"libtempfile",
+ "libmemmap2",
+ "libcxx",
+ "libthiserror",
],
}
@@ -23,10 +26,36 @@
defaults: ["aconfig_storage_file.defaults"],
}
+genrule {
+ name: "ro.package.map",
+ out: ["tests/tmp.ro.package.map"],
+ srcs: ["tests/package.map"],
+ cmd: "rm -f $(out);cp -f $(in) $(out);chmod -w $(out)",
+}
+
+genrule {
+ name: "ro.flag.map",
+ out: ["tests/tmp.ro.flag.map"],
+ srcs: ["tests/flag.map"],
+ cmd: "rm -f $(out);cp -f $(in) $(out);chmod -w $(out)",
+}
+
+genrule {
+ name: "ro.flag.val",
+ out: ["tests/tmp.ro.flag.val"],
+ srcs: ["tests/flag.val"],
+ cmd: "rm -f $(out);cp -f $(in) $(out);chmod -w $(out)",
+}
+
rust_test_host {
name: "aconfig_storage_file.test",
test_suites: ["general-tests"],
defaults: ["aconfig_storage_file.defaults"],
+ data: [
+ "tests/package.map",
+ "tests/flag.map",
+ "tests/flag.val",
+ ],
}
rust_protobuf {
@@ -36,3 +65,52 @@
source_stem: "aconfig_storage_protos",
host_supported: true,
}
+
+cc_library_static {
+ name: "libaconfig_storage_protos_cc",
+ proto: {
+ export_proto_headers: true,
+ type: "lite",
+ },
+ srcs: ["protos/aconfig_storage_metadata.proto"],
+ apex_available: [
+ "//apex_available:platform",
+ "//apex_available:anyapex",
+ ],
+ host_supported: true,
+}
+
+genrule {
+ name: "libcxx_aconfig_storage_bridge_code",
+ tools: ["cxxbridge"],
+ cmd: "$(location cxxbridge) $(in) > $(out)",
+ srcs: ["src/lib.rs"],
+ out: ["aconfig_storage/lib.rs.cc"],
+}
+
+genrule {
+ name: "libcxx_aconfig_storage_bridge_header",
+ tools: ["cxxbridge"],
+ cmd: "$(location cxxbridge) $(in) --header > $(out)",
+ srcs: ["src/lib.rs"],
+ out: ["aconfig_storage/lib.rs.h"],
+}
+
+rust_ffi_static {
+ name: "libaconfig_storage_cxx_bridge",
+ crate_name: "aconfig_storage_cxx_bridge",
+ host_supported: true,
+ defaults: ["aconfig_storage_file.defaults"],
+}
+
+cc_library_static {
+ name: "libaconfig_storage_cc",
+ srcs: ["aconfig_storage.cpp"],
+ generated_headers: [
+ "cxx-bridge-header",
+ "libcxx_aconfig_storage_bridge_header"
+ ],
+ generated_sources: ["libcxx_aconfig_storage_bridge_code"],
+ whole_static_libs: ["libaconfig_storage_cxx_bridge"],
+ export_include_dirs: ["include"],
+}
diff --git a/tools/aconfig/aconfig_storage_file/Cargo.toml b/tools/aconfig/aconfig_storage_file/Cargo.toml
index 54ba6c7..c4e2670 100644
--- a/tools/aconfig/aconfig_storage_file/Cargo.toml
+++ b/tools/aconfig/aconfig_storage_file/Cargo.toml
@@ -13,6 +13,9 @@
protobuf = "3.2.0"
once_cell = "1.19.0"
tempfile = "3.9.0"
+cxx = "1.0"
+thiserror = "1.0.56"
[build-dependencies]
protobuf-codegen = "3.2.0"
+cxx-build = "1.0"
diff --git a/tools/aconfig/aconfig_storage_file/aconfig_storage.cpp b/tools/aconfig/aconfig_storage_file/aconfig_storage.cpp
new file mode 100644
index 0000000..ac64093
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/aconfig_storage.cpp
@@ -0,0 +1,106 @@
+#include "aconfig_storage/aconfig_storage.hpp"
+
+#include "rust/cxx.h"
+#include "aconfig_storage/lib.rs.h"
+
+namespace aconfig_storage {
+
+/// Get package offset
+PackageOffsetQuery get_package_offset(
+ std::string const& container,
+ std::string const& package) {
+ auto offset_cxx = get_package_offset_cxx(
+ rust::Str(container.c_str()),
+ rust::Str(package.c_str()));
+ auto offset = PackageOffsetQuery();
+ offset.query_success = offset_cxx.query_success;
+ offset.error_message = std::string(offset_cxx.error_message.c_str());
+ offset.package_exists = offset_cxx.package_exists;
+ offset.package_id = offset_cxx.package_id;
+ offset.boolean_offset = offset_cxx.boolean_offset;
+ return offset;
+}
+
+/// Get flag offset
+FlagOffsetQuery get_flag_offset(
+ std::string const& container,
+ uint32_t package_id,
+ std::string const& flag_name) {
+ auto offset_cxx = get_flag_offset_cxx(
+ rust::Str(container.c_str()),
+ package_id,
+ rust::Str(flag_name.c_str()));
+ auto offset = FlagOffsetQuery();
+ offset.query_success = offset_cxx.query_success;
+ offset.error_message = std::string(offset_cxx.error_message.c_str());
+ offset.flag_exists = offset_cxx.flag_exists;
+ offset.flag_offset = offset_cxx.flag_offset;
+ return offset;
+}
+
+/// Get boolean flag value
+BooleanFlagValueQuery get_boolean_flag_value(
+ std::string const& container,
+ uint32_t offset) {
+ auto value_cxx = get_boolean_flag_value_cxx(
+ rust::Str(container.c_str()),
+ offset);
+ auto value = BooleanFlagValueQuery();
+ value.query_success = value_cxx.query_success;
+ value.error_message = std::string(value_cxx.error_message.c_str());
+ value.flag_value = value_cxx.flag_value;
+ return value;
+}
+
+namespace test_only_api {
+PackageOffsetQuery get_package_offset_impl(
+ std::string const& pb_file,
+ std::string const& container,
+ std::string const& package) {
+ auto offset_cxx = get_package_offset_cxx_impl(
+ rust::Str(pb_file.c_str()),
+ rust::Str(container.c_str()),
+ rust::Str(package.c_str()));
+ auto offset = PackageOffsetQuery();
+ offset.query_success = offset_cxx.query_success;
+ offset.error_message = std::string(offset_cxx.error_message.c_str());
+ offset.package_exists = offset_cxx.package_exists;
+ offset.package_id = offset_cxx.package_id;
+ offset.boolean_offset = offset_cxx.boolean_offset;
+ return offset;
+}
+
+FlagOffsetQuery get_flag_offset_impl(
+ std::string const& pb_file,
+ std::string const& container,
+ uint32_t package_id,
+ std::string const& flag_name) {
+ auto offset_cxx = get_flag_offset_cxx_impl(
+ rust::Str(pb_file.c_str()),
+ rust::Str(container.c_str()),
+ package_id,
+ rust::Str(flag_name.c_str()));
+ auto offset = FlagOffsetQuery();
+ offset.query_success = offset_cxx.query_success;
+ offset.error_message = std::string(offset_cxx.error_message.c_str());
+ offset.flag_exists = offset_cxx.flag_exists;
+ offset.flag_offset = offset_cxx.flag_offset;
+ return offset;
+}
+
+BooleanFlagValueQuery get_boolean_flag_value_impl(
+ std::string const& pb_file,
+ std::string const& container,
+ uint32_t offset) {
+ auto value_cxx = get_boolean_flag_value_cxx_impl(
+ rust::Str(pb_file.c_str()),
+ rust::Str(container.c_str()),
+ offset);
+ auto value = BooleanFlagValueQuery();
+ value.query_success = value_cxx.query_success;
+ value.error_message = std::string(value_cxx.error_message.c_str());
+ value.flag_value = value_cxx.flag_value;
+ return value;
+}
+} // namespace test_only_api
+} // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_file/build.rs b/tools/aconfig/aconfig_storage_file/build.rs
index 1feeb60..894b71c 100644
--- a/tools/aconfig/aconfig_storage_file/build.rs
+++ b/tools/aconfig/aconfig_storage_file/build.rs
@@ -14,4 +14,7 @@
.inputs(proto_files)
.cargo_out_dir("aconfig_storage_protos")
.run_from_script();
+
+ let _ = cxx_build::bridge("src/lib.rs");
+ println!("cargo:rerun-if-changed=src/lib.rs");
}
diff --git a/tools/aconfig/aconfig_storage_file/include/aconfig_storage/aconfig_storage.hpp b/tools/aconfig/aconfig_storage_file/include/aconfig_storage/aconfig_storage.hpp
new file mode 100644
index 0000000..636fb7e
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/include/aconfig_storage/aconfig_storage.hpp
@@ -0,0 +1,76 @@
+#pragma once
+
+#include <stdint.h>
+#include <string>
+
+namespace aconfig_storage {
+
+/// Package offset query result
+struct PackageOffsetQuery {
+ bool query_success;
+ std::string error_message;
+ bool package_exists;
+ uint32_t package_id;
+ uint32_t boolean_offset;
+};
+
+/// Flag offset query result
+struct FlagOffsetQuery {
+ bool query_success;
+ std::string error_message;
+ bool flag_exists;
+ uint16_t flag_offset;
+};
+
+/// Boolean flag value query result
+struct BooleanFlagValueQuery {
+ bool query_success;
+ std::string error_message;
+ bool flag_value;
+};
+
+/// Get package offset
+/// \input container: the flag container name
+/// \input package: the flag package name
+/// \returns a PackageOffsetQuery
+PackageOffsetQuery get_package_offset(
+ std::string const& container,
+ std::string const& package);
+
+/// Get flag offset
+/// \input container: the flag container name
+/// \input package_id: the flag package id obtained from package offset query
+/// \input flag_name: flag name
+/// \returns a FlagOffsetQuery
+FlagOffsetQuery get_flag_offset(
+ std::string const& container,
+ uint32_t package_id,
+ std::string const& flag_name);
+
+/// Get boolean flag value
+/// \input container: the flag container name
+/// \input offset: the boolean flag value byte offset in the file
+/// \returns a BooleanFlagValueQuery
+BooleanFlagValueQuery get_boolean_flag_value(
+ std::string const& container,
+ uint32_t offset);
+
+/// DO NOT USE APIS IN THE FOLLOWING NAMESPACE, TEST ONLY
+namespace test_only_api {
+PackageOffsetQuery get_package_offset_impl(
+ std::string const& pb_file,
+ std::string const& container,
+ std::string const& package);
+
+FlagOffsetQuery get_flag_offset_impl(
+ std::string const& pb_file,
+ std::string const& container,
+ uint32_t package_id,
+ std::string const& flag_name);
+
+BooleanFlagValueQuery get_boolean_flag_value_impl(
+ std::string const& pb_file,
+ std::string const& container,
+ uint32_t offset);
+} // namespace test_only_api
+} // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
index dfbd9de..108804e 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
@@ -17,8 +17,10 @@
//! flag table module defines the flag table file format and methods for serialization
//! and deserialization
+use crate::AconfigStorageError::{self, BytesParseFail, HigherStorageFileVersion};
use crate::{get_bucket_index, read_str_from_bytes, read_u16_from_bytes, read_u32_from_bytes};
-use anyhow::{anyhow, Result};
+use anyhow::anyhow;
+pub type FlagOffset = u16;
/// Flag table header struct
#[derive(PartialEq, Debug)]
@@ -47,7 +49,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let mut head = 0;
Ok(Self {
version: read_u32_from_bytes(bytes, &mut head)?,
@@ -85,7 +87,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let mut head = 0;
let node = Self {
package_id: read_u32_from_bytes(bytes, &mut head)?,
@@ -127,7 +129,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let header = FlagTableHeader::from_bytes(bytes)?;
let num_flags = header.num_flags;
let num_buckets = crate::get_table_size(num_flags)?;
@@ -144,7 +146,8 @@
head += node.as_bytes().len();
Ok(node)
})
- .collect::<Result<Vec<_>>>()?;
+ .collect::<Result<Vec<_>, AconfigStorageError>>()
+ .map_err(|errmsg| BytesParseFail(anyhow!("fail to parse flag table: {}", errmsg)))?;
let table = Self { header, buckets, nodes };
Ok(table)
@@ -152,14 +155,18 @@
}
/// Query flag within package offset
-pub fn find_flag_offset(buf: &[u8], package_id: u32, flag: &str) -> Result<Option<u16>> {
+pub fn find_flag_offset(
+ buf: &[u8],
+ package_id: u32,
+ flag: &str,
+) -> Result<Option<FlagOffset>, AconfigStorageError> {
let interpreted_header = FlagTableHeader::from_bytes(buf)?;
if interpreted_header.version > crate::FILE_VERSION {
- return Err(anyhow!(
+ return Err(HigherStorageFileVersion(anyhow!(
"Cannot read storage file with a higher version of {} with lib version {}",
interpreted_header.version,
crate::FILE_VERSION
- ));
+ )));
}
let num_buckets = (interpreted_header.node_offset - interpreted_header.bucket_offset) / 4;
@@ -202,7 +209,7 @@
}
}
- pub fn create_test_flag_table() -> Result<FlagTable> {
+ pub fn create_test_flag_table() -> FlagTable {
let header = FlagTableHeader {
version: crate::FILE_VERSION,
container: String::from("system"),
@@ -240,13 +247,13 @@
FlagTableNode::new_expected(2, "enabled_fixed_ro", 1, 0, None),
FlagTableNode::new_expected(0, "disabled_rw", 1, 0, None),
];
- Ok(FlagTable { header, buckets, nodes })
+ FlagTable { header, buckets, nodes }
}
#[test]
// this test point locks down the table serialization
fn test_serialization() {
- let flag_table = create_test_flag_table().unwrap();
+ let flag_table = create_test_flag_table();
let header: &FlagTableHeader = &flag_table.header;
let reinterpreted_header = FlagTableHeader::from_bytes(&header.as_bytes());
@@ -267,7 +274,7 @@
#[test]
// this test point locks down table query
fn test_flag_query() {
- let flag_table = create_test_flag_table().unwrap().as_bytes();
+ let flag_table = create_test_flag_table().as_bytes();
let baseline = vec![
(0, "enabled_ro", 1u16),
(0, "enabled_rw", 2u16),
@@ -288,7 +295,7 @@
#[test]
// this test point locks down table query of a non exist flag
fn test_not_existed_flag_query() {
- let flag_table = create_test_flag_table().unwrap().as_bytes();
+ let flag_table = create_test_flag_table().as_bytes();
let flag_offset = find_flag_offset(&flag_table[..], 1, "disabled_fixed_ro").unwrap();
assert_eq!(flag_offset, None);
let flag_offset = find_flag_offset(&flag_table[..], 2, "disabled_rw").unwrap();
@@ -298,14 +305,14 @@
#[test]
// this test point locks down query error when file has a higher version
fn test_higher_version_storage_file() {
- let mut table = create_test_flag_table().unwrap();
+ let mut table = create_test_flag_table();
table.header.version = crate::FILE_VERSION + 1;
let flag_table = table.as_bytes();
let error = find_flag_offset(&flag_table[..], 0, "enabled_ro").unwrap_err();
assert_eq!(
format!("{:?}", error),
format!(
- "Cannot read storage file with a higher version of {} with lib version {}",
+ "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
crate::FILE_VERSION + 1,
crate::FILE_VERSION
)
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_value.rs b/tools/aconfig/aconfig_storage_file/src/flag_value.rs
index bb8892d..0a6a37f 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_value.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_value.rs
@@ -17,8 +17,9 @@
//! flag value module defines the flag value file format and methods for serialization
//! and deserialization
+use crate::AconfigStorageError::{self, HigherStorageFileVersion, InvalidStorageFileOffset};
use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
-use anyhow::{anyhow, Result};
+use anyhow::anyhow;
/// Flag value header struct
#[derive(PartialEq, Debug)]
@@ -45,7 +46,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let mut head = 0;
Ok(Self {
version: read_u32_from_bytes(bytes, &mut head)?,
@@ -75,7 +76,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let header = FlagValueHeader::from_bytes(bytes)?;
let num_flags = header.num_flags;
let mut head = header.as_bytes().len();
@@ -87,14 +88,14 @@
}
/// Query flag value
-pub fn get_boolean_flag_value(buf: &[u8], flag_offset: u32) -> Result<bool> {
+pub fn find_boolean_flag_value(buf: &[u8], flag_offset: u32) -> Result<bool, AconfigStorageError> {
let interpreted_header = FlagValueHeader::from_bytes(buf)?;
if interpreted_header.version > crate::FILE_VERSION {
- return Err(anyhow!(
+ return Err(HigherStorageFileVersion(anyhow!(
"Cannot read storage file with a higher version of {} with lib version {}",
interpreted_header.version,
crate::FILE_VERSION
- ));
+ )));
}
let mut head = (interpreted_header.boolean_value_offset + flag_offset) as usize;
@@ -102,7 +103,9 @@
// TODO: right now, there is only boolean flags, with more flag value types added
// later, the end of boolean flag value section should be updated (b/322826265).
if head >= interpreted_header.file_size as usize {
- return Err(anyhow!("Flag value offset goes beyond the end of the file."));
+ return Err(InvalidStorageFileOffset(anyhow!(
+ "Flag value offset goes beyond the end of the file."
+ )));
}
let val = read_u8_from_bytes(buf, &mut head)?;
@@ -113,7 +116,7 @@
mod tests {
use super::*;
- pub fn create_test_flag_value_list() -> Result<FlagValueList> {
+ pub fn create_test_flag_value_list() -> FlagValueList {
let header = FlagValueHeader {
version: crate::FILE_VERSION,
container: String::from("system"),
@@ -122,13 +125,13 @@
boolean_value_offset: 26,
};
let booleans: Vec<bool> = vec![false, true, false, false, true, true, false, true];
- Ok(FlagValueList { header, booleans })
+ FlagValueList { header, booleans }
}
#[test]
// this test point locks down the value list serialization
fn test_serialization() {
- let flag_value_list = create_test_flag_value_list().unwrap();
+ let flag_value_list = create_test_flag_value_list();
let header: &FlagValueHeader = &flag_value_list.header;
let reinterpreted_header = FlagValueHeader::from_bytes(&header.as_bytes());
@@ -143,10 +146,10 @@
#[test]
// this test point locks down flag value query
fn test_flag_value_query() {
- let flag_value_list = create_test_flag_value_list().unwrap().as_bytes();
+ let flag_value_list = create_test_flag_value_list().as_bytes();
let baseline: Vec<bool> = vec![false, true, false, false, true, true, false, true];
for (offset, expected_value) in baseline.into_iter().enumerate() {
- let flag_value = get_boolean_flag_value(&flag_value_list[..], offset as u32).unwrap();
+ let flag_value = find_boolean_flag_value(&flag_value_list[..], offset as u32).unwrap();
assert_eq!(flag_value, expected_value);
}
}
@@ -154,22 +157,25 @@
#[test]
// this test point locks down query beyond the end of boolean section
fn test_boolean_out_of_range() {
- let flag_value_list = create_test_flag_value_list().unwrap().as_bytes();
- let error = get_boolean_flag_value(&flag_value_list[..], 8).unwrap_err();
- assert_eq!(format!("{:?}", error), "Flag value offset goes beyond the end of the file.");
+ let flag_value_list = create_test_flag_value_list().as_bytes();
+ let error = find_boolean_flag_value(&flag_value_list[..], 8).unwrap_err();
+ assert_eq!(
+ format!("{:?}", error),
+ "InvalidStorageFileOffset(Flag value offset goes beyond the end of the file.)"
+ );
}
#[test]
// this test point locks down query error when file has a higher version
fn test_higher_version_storage_file() {
- let mut value_list = create_test_flag_value_list().unwrap();
+ let mut value_list = create_test_flag_value_list();
value_list.header.version = crate::FILE_VERSION + 1;
let flag_value = value_list.as_bytes();
- let error = get_boolean_flag_value(&flag_value[..], 4).unwrap_err();
+ let error = find_boolean_flag_value(&flag_value[..], 4).unwrap_err();
assert_eq!(
format!("{:?}", error),
format!(
- "Cannot read storage file with a higher version of {} with lib version {}",
+ "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
crate::FILE_VERSION + 1,
crate::FILE_VERSION
)
diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs
index a9f5e21..84e0e90 100644
--- a/tools/aconfig/aconfig_storage_file/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_file/src/lib.rs
@@ -15,37 +15,56 @@
*/
//! `aconfig_storage_file` is a crate that defines aconfig storage file format, it
-//! also includes apis to read flags from storage files
+//! also includes apis to read flags from storage files. It provides three apis to
+//! interface with storage files:
+//!
+//! 1, function to get package flag value start offset
+//! pub fn get_package_offset(container: &str, package: &str) -> `Result<Option<PackageOffset>>>`
+//!
+//! 2, function to get flag offset within a specific package
+//! pub fn get_flag_offset(container: &str, package_id: u32, flag: &str) -> `Result<Option<u16>>>`
+//!
+//! 3, function to get the actual flag value given the global offset (combined package and
+//! flag offset).
+//! pub fn get_boolean_flag_value(container: &str, offset: u32) -> `Result<bool>`
+//!
+//! Note these are low level apis that are expected to be only used in auto generated flag
+//! apis. DO NOT DIRECTLY USE THESE APIS IN YOUR SOURCE CODE. For auto generated flag apis
+//! please refer to the g3doc go/android-flags
pub mod flag_table;
pub mod flag_value;
-pub mod package_table;
-
-#[cfg(feature = "cargo")]
pub mod mapped_file;
+pub mod package_table;
+pub mod protos;
-mod protos;
#[cfg(test)]
mod test_utils;
-use anyhow::{anyhow, Result};
+use anyhow::anyhow;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
-pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode};
+pub use crate::flag_table::{FlagOffset, FlagTable, FlagTableHeader, FlagTableNode};
pub use crate::flag_value::{FlagValueHeader, FlagValueList};
-pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode};
+pub use crate::package_table::{PackageOffset, PackageTable, PackageTableHeader, PackageTableNode};
+pub use crate::protos::ProtoStorageFiles;
+
+use crate::AconfigStorageError::{BytesParseFail, HashTableSizeLimit};
/// Storage file version
pub const FILE_VERSION: u32 = 1;
/// Good hash table prime number
-pub const HASH_PRIMES: [u32; 29] = [
+pub(crate) const HASH_PRIMES: [u32; 29] = [
7, 17, 29, 53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593, 49157, 98317, 196613, 393241,
786433, 1572869, 3145739, 6291469, 12582917, 25165843, 50331653, 100663319, 201326611,
402653189, 805306457, 1610612741,
];
+/// Storage file location pb file
+pub const STORAGE_LOCATION_FILE: &str = "/metadata/aconfig/storage_files.pb";
+
/// Storage file type enum
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum StorageFileSelection {
@@ -69,46 +88,483 @@
/// Get the right hash table size given number of entries in the table. Use a
/// load factor of 0.5 for performance.
-pub fn get_table_size(entries: u32) -> Result<u32> {
+pub fn get_table_size(entries: u32) -> Result<u32, AconfigStorageError> {
HASH_PRIMES
.iter()
.find(|&&num| num >= 2 * entries)
.copied()
- .ok_or(anyhow!("Number of packages is too large"))
+ .ok_or(HashTableSizeLimit(anyhow!("Number of items in a hash table exceeds limit")))
}
/// Get the corresponding bucket index given the key and number of buckets
-pub fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 {
+pub(crate) fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 {
let mut s = DefaultHasher::new();
val.hash(&mut s);
(s.finish() % num_buckets as u64) as u32
}
/// Read and parse bytes as u8
-pub fn read_u8_from_bytes(buf: &[u8], head: &mut usize) -> Result<u8> {
- let val = u8::from_le_bytes(buf[*head..*head + 1].try_into()?);
+pub(crate) fn read_u8_from_bytes(buf: &[u8], head: &mut usize) -> Result<u8, AconfigStorageError> {
+ let val =
+ u8::from_le_bytes(buf[*head..*head + 1].try_into().map_err(|errmsg| {
+ BytesParseFail(anyhow!("fail to parse u8 from bytes: {}", errmsg))
+ })?);
*head += 1;
Ok(val)
}
/// Read and parse bytes as u16
-pub fn read_u16_from_bytes(buf: &[u8], head: &mut usize) -> Result<u16> {
- let val = u16::from_le_bytes(buf[*head..*head + 2].try_into()?);
+pub(crate) fn read_u16_from_bytes(
+ buf: &[u8],
+ head: &mut usize,
+) -> Result<u16, AconfigStorageError> {
+ let val =
+ u16::from_le_bytes(buf[*head..*head + 2].try_into().map_err(|errmsg| {
+ BytesParseFail(anyhow!("fail to parse u16 from bytes: {}", errmsg))
+ })?);
*head += 2;
Ok(val)
}
/// Read and parse bytes as u32
-pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32> {
- let val = u32::from_le_bytes(buf[*head..*head + 4].try_into()?);
+pub(crate) fn read_u32_from_bytes(
+ buf: &[u8],
+ head: &mut usize,
+) -> Result<u32, AconfigStorageError> {
+ let val =
+ u32::from_le_bytes(buf[*head..*head + 4].try_into().map_err(|errmsg| {
+ BytesParseFail(anyhow!("fail to parse u32 from bytes: {}", errmsg))
+ })?);
*head += 4;
Ok(val)
}
/// Read and parse bytes as string
-pub fn read_str_from_bytes(buf: &[u8], head: &mut usize) -> Result<String> {
+pub(crate) fn read_str_from_bytes(
+ buf: &[u8],
+ head: &mut usize,
+) -> Result<String, AconfigStorageError> {
let num_bytes = read_u32_from_bytes(buf, head)? as usize;
- let val = String::from_utf8(buf[*head..*head + num_bytes].to_vec())?;
+ let val = String::from_utf8(buf[*head..*head + num_bytes].to_vec())
+ .map_err(|errmsg| BytesParseFail(anyhow!("fail to parse string from bytes: {}", errmsg)))?;
*head += num_bytes;
Ok(val)
}
+
+/// Storage query api error
+#[non_exhaustive]
+#[derive(thiserror::Error, Debug)]
+pub enum AconfigStorageError {
+ #[error("failed to read the file")]
+ FileReadFail(#[source] anyhow::Error),
+
+ #[error("fail to parse protobuf")]
+ ProtobufParseFail(#[source] anyhow::Error),
+
+ #[error("storage files not found for this container")]
+ StorageFileNotFound(#[source] anyhow::Error),
+
+ #[error("fail to map storage file")]
+ MapFileFail(#[source] anyhow::Error),
+
+ #[error("number of items in hash table exceed limit")]
+ HashTableSizeLimit(#[source] anyhow::Error),
+
+ #[error("failed to parse bytes into data")]
+ BytesParseFail(#[source] anyhow::Error),
+
+ #[error("cannot parse storage files with a higher version")]
+ HigherStorageFileVersion(#[source] anyhow::Error),
+
+ #[error("invalid storage file byte offset")]
+ InvalidStorageFileOffset(#[source] anyhow::Error),
+}
+
+/// Get package start offset implementation
+pub fn get_package_offset_impl(
+ pb_file: &str,
+ container: &str,
+ package: &str,
+) -> Result<Option<PackageOffset>, AconfigStorageError> {
+ let mapped_file =
+ crate::mapped_file::get_mapped_file(pb_file, container, StorageFileSelection::PackageMap)?;
+ crate::package_table::find_package_offset(&mapped_file, package)
+}
+
+/// Get flag offset implementation
+pub fn get_flag_offset_impl(
+ pb_file: &str,
+ container: &str,
+ package_id: u32,
+ flag: &str,
+) -> Result<Option<FlagOffset>, AconfigStorageError> {
+ let mapped_file =
+ crate::mapped_file::get_mapped_file(pb_file, container, StorageFileSelection::FlagMap)?;
+ crate::flag_table::find_flag_offset(&mapped_file, package_id, flag)
+}
+
+/// Get boolean flag value implementation
+pub fn get_boolean_flag_value_impl(
+ pb_file: &str,
+ container: &str,
+ offset: u32,
+) -> Result<bool, AconfigStorageError> {
+ let mapped_file =
+ crate::mapped_file::get_mapped_file(pb_file, container, StorageFileSelection::FlagVal)?;
+ crate::flag_value::find_boolean_flag_value(&mapped_file, offset)
+}
+
+/// Get package start offset for flags given the container and package name.
+///
+/// This function would map the corresponding package map file if has not been mapped yet,
+/// and then look for the target package in this mapped file.
+///
+/// If a package is found, it returns Ok(Some(PackageOffset))
+/// If a package is not found, it returns Ok(None)
+/// If errors out such as no such package map file is found, it returns an Err(errmsg)
+pub fn get_package_offset(
+ container: &str,
+ package: &str,
+) -> Result<Option<PackageOffset>, AconfigStorageError> {
+ get_package_offset_impl(STORAGE_LOCATION_FILE, container, package)
+}
+
+/// Get flag offset within a package given the container name, package id and flag name.
+///
+/// This function would map the corresponding flag map file if has not been mapped yet,
+/// and then look for the target flag in this mapped file.
+///
+/// If a flag is found, it returns Ok(Some(u16))
+/// If a flag is not found, it returns Ok(None)
+/// If errors out such as no such flag map file is found, it returns an Err(errmsg)
+pub fn get_flag_offset(
+ container: &str,
+ package_id: u32,
+ flag: &str,
+) -> Result<Option<FlagOffset>, AconfigStorageError> {
+ get_flag_offset_impl(STORAGE_LOCATION_FILE, container, package_id, flag)
+}
+
+/// Get the boolean flag value given the container name and flag global offset
+///
+/// This function would map the corresponding flag value file if has not been mapped yet,
+/// and then look for the target flag value at the specified offset.
+///
+/// If flag value file is successfully mapped and the provide offset is valid, it returns
+/// the boolean flag value, otherwise it returns the error message.
+pub fn get_boolean_flag_value(container: &str, offset: u32) -> Result<bool, AconfigStorageError> {
+ get_boolean_flag_value_impl(STORAGE_LOCATION_FILE, container, offset)
+}
+
+#[cxx::bridge]
+mod ffi {
+ // Package table query return for cc interlop
+ pub struct PackageOffsetQueryCXX {
+ pub query_success: bool,
+ pub error_message: String,
+ pub package_exists: bool,
+ pub package_id: u32,
+ pub boolean_offset: u32,
+ }
+
+ // Flag table query return for cc interlop
+ pub struct FlagOffsetQueryCXX {
+ pub query_success: bool,
+ pub error_message: String,
+ pub flag_exists: bool,
+ pub flag_offset: u16,
+ }
+
+ // Flag value query return for cc interlop
+ pub struct BooleanFlagValueQueryCXX {
+ pub query_success: bool,
+ pub error_message: String,
+ pub flag_value: bool,
+ }
+
+ // Rust export to c++
+ extern "Rust" {
+ pub fn get_package_offset_cxx_impl(
+ pb_file: &str,
+ container: &str,
+ package: &str,
+ ) -> PackageOffsetQueryCXX;
+
+ pub fn get_flag_offset_cxx_impl(
+ pb_file: &str,
+ container: &str,
+ package_id: u32,
+ flag: &str,
+ ) -> FlagOffsetQueryCXX;
+
+ pub fn get_boolean_flag_value_cxx_impl(
+ pb_file: &str,
+ container: &str,
+ offset: u32,
+ ) -> BooleanFlagValueQueryCXX;
+
+ pub fn get_package_offset_cxx(container: &str, package: &str) -> PackageOffsetQueryCXX;
+
+ pub fn get_flag_offset_cxx(
+ container: &str,
+ package_id: u32,
+ flag: &str,
+ ) -> FlagOffsetQueryCXX;
+
+ pub fn get_boolean_flag_value_cxx(container: &str, offset: u32)
+ -> BooleanFlagValueQueryCXX;
+ }
+}
+
+/// Get package start offset impl cc interlop
+pub fn get_package_offset_cxx_impl(
+ pb_file: &str,
+ container: &str,
+ package: &str,
+) -> ffi::PackageOffsetQueryCXX {
+ ffi::PackageOffsetQueryCXX::new(get_package_offset_impl(pb_file, container, package))
+}
+
+/// Get flag start offset impl cc interlop
+pub fn get_flag_offset_cxx_impl(
+ pb_file: &str,
+ container: &str,
+ package_id: u32,
+ flag: &str,
+) -> ffi::FlagOffsetQueryCXX {
+ ffi::FlagOffsetQueryCXX::new(get_flag_offset_impl(pb_file, container, package_id, flag))
+}
+
+/// Get boolean flag value impl cc interlop
+pub fn get_boolean_flag_value_cxx_impl(
+ pb_file: &str,
+ container: &str,
+ offset: u32,
+) -> ffi::BooleanFlagValueQueryCXX {
+ ffi::BooleanFlagValueQueryCXX::new(get_boolean_flag_value_impl(pb_file, container, offset))
+}
+
+/// Get package start offset cc interlop
+pub fn get_package_offset_cxx(container: &str, package: &str) -> ffi::PackageOffsetQueryCXX {
+ ffi::PackageOffsetQueryCXX::new(get_package_offset(container, package))
+}
+
+/// Get flag start offset cc interlop
+pub fn get_flag_offset_cxx(
+ container: &str,
+ package_id: u32,
+ flag: &str,
+) -> ffi::FlagOffsetQueryCXX {
+ ffi::FlagOffsetQueryCXX::new(get_flag_offset(container, package_id, flag))
+}
+
+/// Get boolean flag value cc interlop
+pub fn get_boolean_flag_value_cxx(container: &str, offset: u32) -> ffi::BooleanFlagValueQueryCXX {
+ ffi::BooleanFlagValueQueryCXX::new(get_boolean_flag_value(container, offset))
+}
+
+impl ffi::PackageOffsetQueryCXX {
+ pub(crate) fn new(offset_result: Result<Option<PackageOffset>, AconfigStorageError>) -> Self {
+ match offset_result {
+ Ok(offset_opt) => match offset_opt {
+ Some(offset) => Self {
+ query_success: true,
+ error_message: String::from(""),
+ package_exists: true,
+ package_id: offset.package_id,
+ boolean_offset: offset.boolean_offset,
+ },
+ None => Self {
+ query_success: true,
+ error_message: String::from(""),
+ package_exists: false,
+ package_id: 0,
+ boolean_offset: 0,
+ },
+ },
+ Err(errmsg) => Self {
+ query_success: false,
+ error_message: format!("{:?}", errmsg),
+ package_exists: false,
+ package_id: 0,
+ boolean_offset: 0,
+ },
+ }
+ }
+}
+
+impl ffi::FlagOffsetQueryCXX {
+ pub(crate) fn new(offset_result: Result<Option<FlagOffset>, AconfigStorageError>) -> Self {
+ match offset_result {
+ Ok(offset_opt) => match offset_opt {
+ Some(offset) => Self {
+ query_success: true,
+ error_message: String::from(""),
+ flag_exists: true,
+ flag_offset: offset,
+ },
+ None => Self {
+ query_success: true,
+ error_message: String::from(""),
+ flag_exists: false,
+ flag_offset: 0,
+ },
+ },
+ Err(errmsg) => Self {
+ query_success: false,
+ error_message: format!("{:?}", errmsg),
+ flag_exists: false,
+ flag_offset: 0,
+ },
+ }
+ }
+}
+
+impl ffi::BooleanFlagValueQueryCXX {
+ pub(crate) fn new(value_result: Result<bool, AconfigStorageError>) -> Self {
+ match value_result {
+ Ok(value) => {
+ Self { query_success: true, error_message: String::from(""), flag_value: value }
+ }
+ Err(errmsg) => Self {
+ query_success: false,
+ error_message: format!("{:?}", errmsg),
+ flag_value: false,
+ },
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::test_utils::{write_storage_text_to_temp_file, TestStorageFileSet};
+
+ fn create_test_storage_files(read_only: bool) -> TestStorageFileSet {
+ TestStorageFileSet::new(
+ "./tests/package.map",
+ "./tests/flag.map",
+ "./tests/flag.val",
+ read_only,
+ )
+ .unwrap()
+ }
+
+ #[test]
+ // this test point locks down flag package offset query
+ fn test_package_offset_query() {
+ let ro_files = create_test_storage_files(true);
+ let text_proto = format!(
+ r#"
+files {{
+ version: 0
+ container: "system"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
+ timestamp: 12345
+}}
+"#,
+ ro_files.package_map.name, ro_files.flag_map.name, ro_files.flag_val.name
+ );
+
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
+ let file_full_path = file.path().display().to_string();
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_1",
+ )
+ .unwrap()
+ .unwrap();
+ let expected_package_offset = PackageOffset { package_id: 0, boolean_offset: 0 };
+ assert_eq!(package_offset, expected_package_offset);
+
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_2",
+ )
+ .unwrap()
+ .unwrap();
+ let expected_package_offset = PackageOffset { package_id: 1, boolean_offset: 3 };
+ assert_eq!(package_offset, expected_package_offset);
+
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_4",
+ )
+ .unwrap()
+ .unwrap();
+ let expected_package_offset = PackageOffset { package_id: 2, boolean_offset: 6 };
+ assert_eq!(package_offset, expected_package_offset);
+ }
+
+ #[test]
+ // this test point locks down flag offset query
+ fn test_flag_offset_query() {
+ let ro_files = create_test_storage_files(true);
+ let text_proto = format!(
+ r#"
+files {{
+ version: 0
+ container: "system"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
+ timestamp: 12345
+}}
+"#,
+ ro_files.package_map.name, ro_files.flag_map.name, ro_files.flag_val.name
+ );
+
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
+ let file_full_path = file.path().display().to_string();
+ let baseline = vec![
+ (0, "enabled_ro", 1u16),
+ (0, "enabled_rw", 2u16),
+ (1, "disabled_ro", 0u16),
+ (2, "enabled_ro", 1u16),
+ (1, "enabled_fixed_ro", 1u16),
+ (1, "enabled_ro", 2u16),
+ (2, "enabled_fixed_ro", 0u16),
+ (0, "disabled_rw", 0u16),
+ ];
+ for (package_id, flag_name, expected_offset) in baseline.into_iter() {
+ let flag_offset =
+ get_flag_offset_impl(&file_full_path, "system", package_id, flag_name)
+ .unwrap()
+ .unwrap();
+ assert_eq!(flag_offset, expected_offset);
+ }
+ }
+
+ #[test]
+ // this test point locks down flag offset query
+ fn test_flag_value_query() {
+ let ro_files = create_test_storage_files(true);
+ let text_proto = format!(
+ r#"
+files {{
+ version: 0
+ container: "system"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
+ timestamp: 12345
+}}
+"#,
+ ro_files.package_map.name, ro_files.flag_map.name, ro_files.flag_val.name
+ );
+
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
+ let file_full_path = file.path().display().to_string();
+ let baseline: Vec<bool> = vec![false; 8];
+ for (offset, expected_value) in baseline.into_iter().enumerate() {
+ let flag_value =
+ get_boolean_flag_value_impl(&file_full_path, "system", offset as u32).unwrap();
+ assert_eq!(flag_value, expected_value);
+ }
+ }
+}
diff --git a/tools/aconfig/aconfig_storage_file/src/mapped_file.rs b/tools/aconfig/aconfig_storage_file/src/mapped_file.rs
index 4f65df0..d8f2570 100644
--- a/tools/aconfig/aconfig_storage_file/src/mapped_file.rs
+++ b/tools/aconfig/aconfig_storage_file/src/mapped_file.rs
@@ -19,13 +19,16 @@
use std::io::{BufReader, Read};
use std::sync::{Arc, Mutex};
-use anyhow::{bail, ensure, Result};
+use anyhow::anyhow;
use memmap2::Mmap;
use once_cell::sync::Lazy;
use crate::protos::{
storage_files::try_from_binary_proto, ProtoStorageFileInfo, ProtoStorageFiles,
};
+use crate::AconfigStorageError::{
+ self, FileReadFail, MapFileFail, ProtobufParseFail, StorageFileNotFound,
+};
use crate::StorageFileSelection;
/// Cache for already mapped files
@@ -46,30 +49,43 @@
fn find_container_storage_location(
location_pb_file: &str,
container: &str,
-) -> Result<ProtoStorageFileInfo> {
- let file = File::open(location_pb_file)?;
+) -> Result<ProtoStorageFileInfo, AconfigStorageError> {
+ let file = File::open(location_pb_file).map_err(|errmsg| {
+ FileReadFail(anyhow!("Failed to open file {}: {}", location_pb_file, errmsg))
+ })?;
let mut reader = BufReader::new(file);
let mut bytes = Vec::new();
- reader.read_to_end(&mut bytes)?;
-
- let storage_locations: ProtoStorageFiles = try_from_binary_proto(&bytes)?;
+ reader.read_to_end(&mut bytes).map_err(|errmsg| {
+ FileReadFail(anyhow!("Failed to read file {}: {}", location_pb_file, errmsg))
+ })?;
+ let storage_locations: ProtoStorageFiles = try_from_binary_proto(&bytes).map_err(|errmsg| {
+ ProtobufParseFail(anyhow!(
+ "Failed to parse storage location pb file {}: {}",
+ location_pb_file,
+ errmsg
+ ))
+ })?;
for location_info in storage_locations.files.iter() {
if location_info.container() == container {
return Ok(location_info.clone());
}
}
- bail!("Storage file does not exist for {}", container)
+ Err(StorageFileNotFound(anyhow!("Storage file does not exist for {}", container)))
}
/// Verify the file is read only and then map it
-fn verify_read_only_and_map(file_path: &str) -> Result<Mmap> {
- let file = File::open(file_path)?;
- let metadata = file.metadata()?;
- ensure!(
- metadata.permissions().readonly(),
- "Cannot mmap file {} as it is not read only",
- file_path
- );
+fn verify_read_only_and_map(file_path: &str) -> Result<Mmap, AconfigStorageError> {
+ let file = File::open(file_path)
+ .map_err(|errmsg| FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg)))?;
+ let metadata = file.metadata().map_err(|errmsg| {
+ FileReadFail(anyhow!("Failed to find metadata for {}: {}", file_path, errmsg))
+ })?;
+
+ // ensure storage file is read only
+ if !metadata.permissions().readonly() {
+ return Err(MapFileFail(anyhow!("fail to map non read only storage file {}", file_path)));
+ }
+
// SAFETY:
//
// Mmap constructors are unsafe as it would have undefined behaviors if the file
@@ -83,14 +99,19 @@
// We should remove this restriction if we need to support mmap non read only file in
// the future (by making this api unsafe). But for now, all flags are boot stable, so
// the boot flag file copy should be readonly.
- unsafe { Ok(Mmap::map(&file)?) }
+ unsafe {
+ let mapped_file = Mmap::map(&file).map_err(|errmsg| {
+ MapFileFail(anyhow!("fail to map storage file {}: {}", file_path, errmsg))
+ })?;
+ Ok(mapped_file)
+ }
}
/// Map all storage files for a particular container
fn map_container_storage_files(
location_pb_file: &str,
container: &str,
-) -> Result<MappedStorageFileSet> {
+) -> Result<MappedStorageFileSet, AconfigStorageError> {
let files_location = find_container_storage_location(location_pb_file, container)?;
let package_map = Arc::new(verify_read_only_and_map(files_location.package_map())?);
let flag_map = Arc::new(verify_read_only_and_map(files_location.flag_map())?);
@@ -99,11 +120,11 @@
}
/// Get a mapped storage file given the container and file type
-pub fn get_mapped_file(
+pub(crate) fn get_mapped_file(
location_pb_file: &str,
container: &str,
file_selection: StorageFileSelection,
-) -> Result<Arc<Mmap>> {
+) -> Result<Arc<Mmap>, AconfigStorageError> {
let mut all_mapped_files = ALL_MAPPED_FILES.lock().unwrap();
match all_mapped_files.get(container) {
Some(mapped_files) => Ok(match file_selection {
@@ -127,7 +148,7 @@
#[cfg(test)]
mod tests {
use super::*;
- use crate::test_utils::{get_binary_storage_proto_bytes, write_bytes_to_temp_file};
+ use crate::test_utils::{write_storage_text_to_temp_file, TestStorageFileSet};
#[test]
fn test_find_storage_file_location() {
@@ -149,10 +170,8 @@
timestamp: 54321
}
"#;
- let binary_proto_bytes = get_binary_storage_proto_bytes(text_proto).unwrap();
- let file = write_bytes_to_temp_file(&binary_proto_bytes).unwrap();
+ let file = write_storage_text_to_temp_file(text_proto).unwrap();
let file_full_path = file.path().display().to_string();
-
let file_info = find_container_storage_location(&file_full_path, "system").unwrap();
assert_eq!(file_info.version(), 0);
assert_eq!(file_info.container(), "system");
@@ -170,7 +189,10 @@
assert_eq!(file_info.timestamp(), 54321);
let err = find_container_storage_location(&file_full_path, "vendor").unwrap_err();
- assert_eq!(format!("{:?}", err), "Storage file does not exist for vendor");
+ assert_eq!(
+ format!("{:?}", err),
+ "StorageFileNotFound(Storage file does not exist for vendor)"
+ );
}
fn map_and_verify(
@@ -186,89 +208,121 @@
assert_eq!(mmaped_file[..], content[..]);
}
+ fn create_test_storage_files(read_only: bool) -> TestStorageFileSet {
+ TestStorageFileSet::new(
+ "./tests/package.map",
+ "./tests/flag.map",
+ "./tests/flag.val",
+ read_only,
+ )
+ .unwrap()
+ }
+
#[test]
fn test_mapped_file_contents() {
- let text_proto = r#"
-files {
+ let ro_files = create_test_storage_files(true);
+ let text_proto = format!(
+ r#"
+files {{
version: 0
container: "system"
- package_map: "./tests/package.map"
- flag_map: "./tests/flag.map"
- flag_val: "./tests/flag.val"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
timestamp: 12345
-}
-"#;
- let binary_proto_bytes = get_binary_storage_proto_bytes(text_proto).unwrap();
- let file = write_bytes_to_temp_file(&binary_proto_bytes).unwrap();
+}}
+"#,
+ ro_files.package_map.name, ro_files.flag_map.name, ro_files.flag_val.name
+ );
+
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
let file_full_path = file.path().display().to_string();
-
- map_and_verify(&file_full_path, StorageFileSelection::PackageMap, "./tests/package.map");
-
- map_and_verify(&file_full_path, StorageFileSelection::FlagMap, "./tests/flag.map");
-
- map_and_verify(&file_full_path, StorageFileSelection::FlagVal, "./tests/flag.val");
+ map_and_verify(
+ &file_full_path,
+ StorageFileSelection::PackageMap,
+ &ro_files.package_map.name,
+ );
+ map_and_verify(&file_full_path, StorageFileSelection::FlagMap, &ro_files.flag_map.name);
+ map_and_verify(&file_full_path, StorageFileSelection::FlagVal, &ro_files.flag_val.name);
}
#[test]
fn test_map_non_read_only_file() {
- let text_proto = r#"
-files {
+ let ro_files = create_test_storage_files(true);
+ let rw_files = create_test_storage_files(false);
+ let text_proto = format!(
+ r#"
+files {{
version: 0
container: "system"
- package_map: "./tests/rw.package.map"
- flag_map: "./tests/rw.flag.map"
- flag_val: "./tests/rw.flag.val"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
timestamp: 12345
-}
-"#;
- let binary_proto_bytes = get_binary_storage_proto_bytes(text_proto).unwrap();
- let file = write_bytes_to_temp_file(&binary_proto_bytes).unwrap();
- let file_full_path = file.path().display().to_string();
-
- let error = map_container_storage_files(&file_full_path, "system").unwrap_err();
- assert_eq!(
- format!("{:?}", error),
- "Cannot mmap file ./tests/rw.package.map as it is not read only"
+}}
+"#,
+ rw_files.package_map.name, ro_files.flag_map.name, ro_files.flag_val.name
);
- let text_proto = r#"
-files {
- version: 0
- container: "system"
- package_map: "./tests/package.map"
- flag_map: "./tests/rw.flag.map"
- flag_val: "./tests/rw.flag.val"
- timestamp: 12345
-}
-"#;
- let binary_proto_bytes = get_binary_storage_proto_bytes(text_proto).unwrap();
- let file = write_bytes_to_temp_file(&binary_proto_bytes).unwrap();
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
let file_full_path = file.path().display().to_string();
-
let error = map_container_storage_files(&file_full_path, "system").unwrap_err();
assert_eq!(
format!("{:?}", error),
- "Cannot mmap file ./tests/rw.flag.map as it is not read only"
+ format!(
+ "MapFileFail(fail to map non read only storage file {})",
+ rw_files.package_map.name
+ )
);
- let text_proto = r#"
-files {
+ let text_proto = format!(
+ r#"
+files {{
version: 0
container: "system"
- package_map: "./tests/package.map"
- flag_map: "./tests/flag.map"
- flag_val: "./tests/rw.flag.val"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
timestamp: 12345
-}
-"#;
- let binary_proto_bytes = get_binary_storage_proto_bytes(text_proto).unwrap();
- let file = write_bytes_to_temp_file(&binary_proto_bytes).unwrap();
- let file_full_path = file.path().display().to_string();
+}}
+"#,
+ ro_files.package_map.name, rw_files.flag_map.name, ro_files.flag_val.name
+ );
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
+ let file_full_path = file.path().display().to_string();
let error = map_container_storage_files(&file_full_path, "system").unwrap_err();
assert_eq!(
format!("{:?}", error),
- "Cannot mmap file ./tests/rw.flag.val as it is not read only"
+ format!(
+ "MapFileFail(fail to map non read only storage file {})",
+ rw_files.flag_map.name
+ )
+ );
+
+ let text_proto = format!(
+ r#"
+files {{
+ version: 0
+ container: "system"
+ package_map: "{}"
+ flag_map: "{}"
+ flag_val: "{}"
+ timestamp: 12345
+}}
+"#,
+ ro_files.package_map.name, ro_files.flag_map.name, rw_files.flag_val.name
+ );
+
+ let file = write_storage_text_to_temp_file(&text_proto).unwrap();
+ let file_full_path = file.path().display().to_string();
+ let error = map_container_storage_files(&file_full_path, "system").unwrap_err();
+ assert_eq!(
+ format!("{:?}", error),
+ format!(
+ "MapFileFail(fail to map non read only storage file {})",
+ rw_files.flag_val.name
+ )
);
}
}
diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs
index a3ad6ec..7308d7b 100644
--- a/tools/aconfig/aconfig_storage_file/src/package_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs
@@ -17,8 +17,9 @@
//! package table module defines the package table file format and methods for serialization
//! and deserialization
+use crate::AconfigStorageError::{self, BytesParseFail, HigherStorageFileVersion};
use crate::{get_bucket_index, read_str_from_bytes, read_u32_from_bytes};
-use anyhow::{anyhow, Result};
+use anyhow::anyhow;
/// Package table header struct
#[derive(PartialEq, Debug)]
@@ -47,7 +48,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let mut head = 0;
Ok(Self {
version: read_u32_from_bytes(bytes, &mut head)?,
@@ -85,7 +86,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let mut head = 0;
let node = Self {
package_name: read_str_from_bytes(bytes, &mut head)?,
@@ -127,7 +128,7 @@
}
/// Deserialize from bytes
- pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
+ pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
let header = PackageTableHeader::from_bytes(bytes)?;
let num_packages = header.num_packages;
let num_buckets = crate::get_table_size(num_packages)?;
@@ -144,7 +145,8 @@
head += node.as_bytes().len();
Ok(node)
})
- .collect::<Result<Vec<_>>>()?;
+ .collect::<Result<Vec<_>, AconfigStorageError>>()
+ .map_err(|errmsg| BytesParseFail(anyhow!("fail to parse package table: {}", errmsg)))?;
let table = Self { header, buckets, nodes };
Ok(table)
@@ -159,18 +161,21 @@
}
/// Query package id and start offset
-pub fn find_package_offset(buf: &[u8], package: &str) -> Result<Option<PackageOffset>> {
+pub fn find_package_offset(
+ buf: &[u8],
+ package: &str,
+) -> Result<Option<PackageOffset>, AconfigStorageError> {
let interpreted_header = PackageTableHeader::from_bytes(buf)?;
if interpreted_header.version > crate::FILE_VERSION {
- return Err(anyhow!(
+ return Err(HigherStorageFileVersion(anyhow!(
"Cannot read storage file with a higher version of {} with lib version {}",
interpreted_header.version,
crate::FILE_VERSION
- ));
+ )));
}
let num_buckets = (interpreted_header.node_offset - interpreted_header.bucket_offset) / 4;
- let bucket_index = PackageTableNode::find_bucket_index(&package, num_buckets);
+ let bucket_index = PackageTableNode::find_bucket_index(package, num_buckets);
let mut pos = (interpreted_header.bucket_offset + 4 * bucket_index) as usize;
let mut package_node_offset = read_u32_from_bytes(buf, &mut pos)? as usize;
@@ -199,7 +204,7 @@
mod tests {
use super::*;
- pub fn create_test_package_table() -> Result<PackageTable> {
+ pub fn create_test_package_table() -> PackageTable {
let header = PackageTableHeader {
version: crate::FILE_VERSION,
container: String::from("system"),
@@ -228,14 +233,13 @@
next_offset: None,
};
let nodes = vec![first_node, second_node, third_node];
- Ok(PackageTable { header, buckets, nodes })
+ PackageTable { header, buckets, nodes }
}
#[test]
// this test point locks down the table serialization
fn test_serialization() {
- let package_table = create_test_package_table().unwrap();
-
+ let package_table = create_test_package_table();
let header: &PackageTableHeader = &package_table.header;
let reinterpreted_header = PackageTableHeader::from_bytes(&header.as_bytes());
assert!(reinterpreted_header.is_ok());
@@ -255,7 +259,7 @@
#[test]
// this test point locks down table query
fn test_package_query() {
- let package_table = create_test_package_table().unwrap().as_bytes();
+ let package_table = create_test_package_table().as_bytes();
let package_offset =
find_package_offset(&package_table[..], "com.android.aconfig.storage.test_1")
.unwrap()
@@ -280,7 +284,7 @@
// this test point locks down table query of a non exist package
fn test_not_existed_package_query() {
// this will land at an empty bucket
- let package_table = create_test_package_table().unwrap().as_bytes();
+ let package_table = create_test_package_table().as_bytes();
let package_offset =
find_package_offset(&package_table[..], "com.android.aconfig.storage.test_3").unwrap();
assert_eq!(package_offset, None);
@@ -293,7 +297,7 @@
#[test]
// this test point locks down query error when file has a higher version
fn test_higher_version_storage_file() {
- let mut table = create_test_package_table().unwrap();
+ let mut table = create_test_package_table();
table.header.version = crate::FILE_VERSION + 1;
let package_table = table.as_bytes();
let error = find_package_offset(&package_table[..], "com.android.aconfig.storage.test_1")
@@ -301,7 +305,7 @@
assert_eq!(
format!("{:?}", error),
format!(
- "Cannot read storage file with a higher version of {} with lib version {}",
+ "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
crate::FILE_VERSION + 1,
crate::FILE_VERSION
)
diff --git a/tools/aconfig/aconfig_storage_file/src/test_utils.rs b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
index c468683..7905d51 100644
--- a/tools/aconfig/aconfig_storage_file/src/test_utils.rs
+++ b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
@@ -17,18 +17,76 @@
use crate::protos::ProtoStorageFiles;
use anyhow::Result;
use protobuf::Message;
+use std::fs;
use std::io::Write;
use tempfile::NamedTempFile;
-pub fn get_binary_storage_proto_bytes(text_proto: &str) -> Result<Vec<u8>> {
+pub(crate) fn get_binary_storage_proto_bytes(text_proto: &str) -> Result<Vec<u8>> {
let storage_files: ProtoStorageFiles = protobuf::text_format::parse_from_str(text_proto)?;
let mut binary_proto = Vec::new();
storage_files.write_to_vec(&mut binary_proto)?;
Ok(binary_proto)
}
-pub fn write_bytes_to_temp_file(bytes: &[u8]) -> Result<NamedTempFile> {
+pub(crate) fn write_storage_text_to_temp_file(text_proto: &str) -> Result<NamedTempFile> {
+ let bytes = get_binary_storage_proto_bytes(text_proto).unwrap();
let mut file = NamedTempFile::new()?;
let _ = file.write_all(&bytes);
Ok(file)
}
+
+fn set_file_read_only(file: &NamedTempFile) {
+ let mut perms = fs::metadata(file.path()).unwrap().permissions();
+ if !perms.readonly() {
+ perms.set_readonly(true);
+ fs::set_permissions(file.path(), perms).unwrap();
+ }
+}
+
+fn set_file_read_write(file: &NamedTempFile) {
+ let mut perms = fs::metadata(file.path()).unwrap().permissions();
+ if perms.readonly() {
+ perms.set_readonly(false);
+ fs::set_permissions(file.path(), perms).unwrap();
+ }
+}
+
+pub(crate) struct TestStorageFile {
+ pub file: NamedTempFile,
+ pub name: String,
+}
+
+impl TestStorageFile {
+ pub(crate) fn new(source_file: &str, read_only: bool) -> Result<Self> {
+ let file = NamedTempFile::new()?;
+ fs::copy(source_file, file.path())?;
+ if read_only {
+ set_file_read_only(&file);
+ } else {
+ set_file_read_write(&file);
+ }
+ let name = file.path().display().to_string();
+ Ok(Self { file, name })
+ }
+}
+
+pub(crate) struct TestStorageFileSet {
+ pub package_map: TestStorageFile,
+ pub flag_map: TestStorageFile,
+ pub flag_val: TestStorageFile,
+}
+
+impl TestStorageFileSet {
+ pub(crate) fn new(
+ package_map_path: &str,
+ flag_map_path: &str,
+ flag_val_path: &str,
+ read_only: bool,
+ ) -> Result<Self> {
+ Ok(Self {
+ package_map: TestStorageFile::new(package_map_path, read_only)?,
+ flag_map: TestStorageFile::new(flag_map_path, read_only)?,
+ flag_val: TestStorageFile::new(flag_val_path, read_only)?,
+ })
+ }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp
new file mode 100644
index 0000000..b951273
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp
@@ -0,0 +1,42 @@
+rust_test {
+ name: "aconfig_storage.test.rust",
+ srcs: [
+ "storage_lib_rust_test.rs"
+ ],
+ rustlibs: [
+ "libanyhow",
+ "libaconfig_storage_file",
+ "libprotobuf",
+ "libtempfile",
+ ],
+ data: [
+ ":ro.package.map",
+ ":ro.flag.map",
+ ":ro.flag.val",
+ ],
+ test_suites: ["general-tests"],
+}
+
+cc_test {
+ name: "aconfig_storage.test.cpp",
+ srcs: [
+ "storage_lib_cc_test.cpp",
+ ],
+ static_libs: [
+ "libgmock",
+ "libaconfig_storage_protos_cc",
+ "libprotobuf-cpp-lite",
+ "libaconfig_storage_cc",
+ "libbase",
+ "liblog",
+ ],
+ data: [
+ ":ro.package.map",
+ ":ro.flag.map",
+ ":ro.flag.val",
+ ],
+ test_suites: [
+ "device-tests",
+ "general-tests",
+ ],
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/rw.flag.map b/tools/aconfig/aconfig_storage_file/tests/rw.flag.map
deleted file mode 100644
index 43b6f9a..0000000
--- a/tools/aconfig/aconfig_storage_file/tests/rw.flag.map
+++ /dev/null
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/rw.flag.val b/tools/aconfig/aconfig_storage_file/tests/rw.flag.val
deleted file mode 100644
index f39f8d3..0000000
--- a/tools/aconfig/aconfig_storage_file/tests/rw.flag.val
+++ /dev/null
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/rw.package.map b/tools/aconfig/aconfig_storage_file/tests/rw.package.map
deleted file mode 100644
index 8ed4767..0000000
--- a/tools/aconfig/aconfig_storage_file/tests/rw.package.map
+++ /dev/null
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_lib_cc_test.cpp b/tools/aconfig/aconfig_storage_file/tests/storage_lib_cc_test.cpp
new file mode 100644
index 0000000..7d5ba0a
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/storage_lib_cc_test.cpp
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string>
+#include <vector>
+
+#include "aconfig_storage/aconfig_storage.hpp"
+#include <gtest/gtest.h>
+#include <protos/aconfig_storage_metadata.pb.h>
+#include <android-base/file.h>
+
+using android::aconfig_storage_metadata::storage_files;
+using ::android::base::WriteStringToFile;
+using ::aconfig_storage::test_only_api::get_package_offset_impl;
+using ::aconfig_storage::test_only_api::get_flag_offset_impl;
+using ::aconfig_storage::test_only_api::get_boolean_flag_value_impl;
+
+void write_storage_location_pb_to_file(std::string const& file_path) {
+ auto const test_dir = android::base::GetExecutableDirectory();
+ auto proto = storage_files();
+ auto* info = proto.add_files();
+ info->set_version(0);
+ info->set_container("system");
+ info->set_package_map(test_dir + "/tests/tmp.ro.package.map");
+ info->set_flag_map(test_dir + "/tests/tmp.ro.flag.map");
+ info->set_flag_val(test_dir + "/tests/tmp.ro.flag.val");
+ info->set_timestamp(12345);
+
+ auto content = std::string();
+ proto.SerializeToString(&content);
+ ASSERT_TRUE(WriteStringToFile(content, file_path))
+ << "Failed to write a file: " << file_path;
+}
+
+TEST(AconfigStorageTest, test_package_offset_query) {
+ auto pb_file = std::string("/tmp/test_package_offset_query.pb");
+ write_storage_location_pb_to_file(pb_file);
+
+ auto query = get_package_offset_impl(
+ pb_file, "system", "com.android.aconfig.storage.test_1");
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_TRUE(query.package_exists);
+ ASSERT_EQ(query.package_id, 0);
+ ASSERT_EQ(query.boolean_offset, 0);
+
+ query = get_package_offset_impl(
+ pb_file, "system", "com.android.aconfig.storage.test_2");
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_TRUE(query.package_exists);
+ ASSERT_EQ(query.package_id, 1);
+ ASSERT_EQ(query.boolean_offset, 3);
+
+ query = get_package_offset_impl(
+ pb_file, "system", "com.android.aconfig.storage.test_4");
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_TRUE(query.package_exists);
+ ASSERT_EQ(query.package_id, 2);
+ ASSERT_EQ(query.boolean_offset, 6);
+}
+
+TEST(AconfigStorageTest, test_invalid_package_offset_query) {
+ auto pb_file = std::string("/tmp/test_package_offset_query.pb");
+ write_storage_location_pb_to_file(pb_file);
+
+ auto query = get_package_offset_impl(
+ pb_file, "system", "com.android.aconfig.storage.test_3");
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_FALSE(query.package_exists);
+
+ query = get_package_offset_impl(
+ pb_file, "vendor", "com.android.aconfig.storage.test_1");
+ ASSERT_EQ(query.error_message,
+ std::string("StorageFileNotFound(Storage file does not exist for vendor)"));
+ ASSERT_FALSE(query.query_success);
+}
+
+TEST(AconfigStorageTest, test_flag_offset_query) {
+ auto pb_file = std::string("/tmp/test_package_offset_query.pb");
+ write_storage_location_pb_to_file(pb_file);
+
+ auto baseline = std::vector<std::tuple<int, std::string, int>>{
+ {0, "enabled_ro", 1},
+ {0, "enabled_rw", 2},
+ {1, "disabled_ro", 0},
+ {2, "enabled_ro", 1},
+ {1, "enabled_fixed_ro", 1},
+ {1, "enabled_ro", 2},
+ {2, "enabled_fixed_ro", 0},
+ {0, "disabled_rw", 0},
+ };
+ for (auto const&[package_id, flag_name, expected_offset] : baseline) {
+ auto query = get_flag_offset_impl(pb_file, "system", package_id, flag_name);
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_TRUE(query.flag_exists);
+ ASSERT_EQ(query.flag_offset, expected_offset);
+ }
+}
+
+TEST(AconfigStorageTest, test_invalid_flag_offset_query) {
+ auto pb_file = std::string("/tmp/test_invalid_package_offset_query.pb");
+ write_storage_location_pb_to_file(pb_file);
+
+ auto query = get_flag_offset_impl(pb_file, "system", 0, "none_exist");
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_FALSE(query.flag_exists);
+
+ query = get_flag_offset_impl(pb_file, "system", 3, "enabled_ro");
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_FALSE(query.flag_exists);
+
+ query = get_flag_offset_impl(pb_file, "vendor", 0, "enabled_ro");
+ ASSERT_EQ(query.error_message,
+ std::string("StorageFileNotFound(Storage file does not exist for vendor)"));
+ ASSERT_FALSE(query.query_success);
+}
+
+TEST(AconfigStorageTest, test_boolean_flag_value_query) {
+ auto pb_file = std::string("/tmp/test_boolean_flag_value_query.pb");
+ write_storage_location_pb_to_file(pb_file);
+ for (int offset = 0; offset < 8; ++offset) {
+ auto query = get_boolean_flag_value_impl(pb_file, "system", offset);
+ ASSERT_EQ(query.error_message, std::string());
+ ASSERT_TRUE(query.query_success);
+ ASSERT_FALSE(query.flag_value);
+ }
+}
+
+TEST(AconfigStorageTest, test_invalid_boolean_flag_value_query) {
+ auto pb_file = std::string("/tmp/test_invalid_boolean_flag_value_query.pb");
+ write_storage_location_pb_to_file(pb_file);
+
+ auto query = get_boolean_flag_value_impl(pb_file, "vendor", 0);
+ ASSERT_EQ(query.error_message,
+ std::string("StorageFileNotFound(Storage file does not exist for vendor)"));
+ ASSERT_FALSE(query.query_success);
+
+ query = get_boolean_flag_value_impl(pb_file, "system", 8);
+ ASSERT_EQ(query.error_message,
+ std::string("InvalidStorageFileOffset(Flag value offset goes beyond the end of the file.)"));
+ ASSERT_FALSE(query.query_success);
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_lib_rust_test.rs b/tools/aconfig/aconfig_storage_file/tests/storage_lib_rust_test.rs
new file mode 100644
index 0000000..9916915
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/storage_lib_rust_test.rs
@@ -0,0 +1,174 @@
+#[cfg(not(feature = "cargo"))]
+mod aconfig_storage_rust_test {
+ use aconfig_storage_file::{
+ get_boolean_flag_value_impl, get_flag_offset_impl, get_package_offset_impl, PackageOffset,
+ ProtoStorageFiles,
+ };
+ use protobuf::Message;
+ use std::io::Write;
+ use tempfile::NamedTempFile;
+
+ fn write_storage_location_file() -> NamedTempFile {
+ let text_proto = r#"
+files {
+ version: 0
+ container: "system"
+ package_map: "./tests/tmp.ro.package.map"
+ flag_map: "./tests/tmp.ro.flag.map"
+ flag_val: "./tests/tmp.ro.flag.val"
+ timestamp: 12345
+}
+"#;
+ let storage_files: ProtoStorageFiles =
+ protobuf::text_format::parse_from_str(text_proto).unwrap();
+ let mut binary_proto_bytes = Vec::new();
+ storage_files.write_to_vec(&mut binary_proto_bytes).unwrap();
+ let mut file = NamedTempFile::new().unwrap();
+ file.write_all(&binary_proto_bytes).unwrap();
+ file
+ }
+
+ #[test]
+ fn test_package_offset_query() {
+ let file = write_storage_location_file();
+ let file_full_path = file.path().display().to_string();
+
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_1",
+ )
+ .unwrap()
+ .unwrap();
+ let expected_package_offset = PackageOffset { package_id: 0, boolean_offset: 0 };
+ assert_eq!(package_offset, expected_package_offset);
+
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_2",
+ )
+ .unwrap()
+ .unwrap();
+ let expected_package_offset = PackageOffset { package_id: 1, boolean_offset: 3 };
+ assert_eq!(package_offset, expected_package_offset);
+
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_4",
+ )
+ .unwrap()
+ .unwrap();
+ let expected_package_offset = PackageOffset { package_id: 2, boolean_offset: 6 };
+ assert_eq!(package_offset, expected_package_offset);
+
+ let package_offset = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_3",
+ )
+ .unwrap();
+ assert_eq!(package_offset, None);
+ }
+
+ #[test]
+ fn test_invalid_package_offset_query() {
+ let file = write_storage_location_file();
+ let file_full_path = file.path().display().to_string();
+
+ let package_offset_option = get_package_offset_impl(
+ &file_full_path,
+ "system",
+ "com.android.aconfig.storage.test_3",
+ )
+ .unwrap();
+ assert_eq!(package_offset_option, None);
+
+ let err = get_package_offset_impl(
+ &file_full_path,
+ "vendor",
+ "com.android.aconfig.storage.test_1",
+ )
+ .unwrap_err();
+ assert_eq!(
+ format!("{:?}", err),
+ "StorageFileNotFound(Storage file does not exist for vendor)"
+ );
+ }
+
+ #[test]
+ fn test_flag_offset_query() {
+ let file = write_storage_location_file();
+ let file_full_path = file.path().display().to_string();
+
+ let baseline = vec![
+ (0, "enabled_ro", 1u16),
+ (0, "enabled_rw", 2u16),
+ (1, "disabled_ro", 0u16),
+ (2, "enabled_ro", 1u16),
+ (1, "enabled_fixed_ro", 1u16),
+ (1, "enabled_ro", 2u16),
+ (2, "enabled_fixed_ro", 0u16),
+ (0, "disabled_rw", 0u16),
+ ];
+ for (package_id, flag_name, expected_offset) in baseline.into_iter() {
+ let flag_offset =
+ get_flag_offset_impl(&file_full_path, "system", package_id, flag_name)
+ .unwrap()
+ .unwrap();
+ assert_eq!(flag_offset, expected_offset);
+ }
+ }
+
+ #[test]
+ fn test_invalid_flag_offset_query() {
+ let file = write_storage_location_file();
+ let file_full_path = file.path().display().to_string();
+
+ let flag_offset_option =
+ get_flag_offset_impl(&file_full_path, "system", 0, "none_exist").unwrap();
+ assert_eq!(flag_offset_option, None);
+
+ let flag_offset_option =
+ get_flag_offset_impl(&file_full_path, "system", 3, "enabled_ro").unwrap();
+ assert_eq!(flag_offset_option, None);
+
+ let err = get_flag_offset_impl(&file_full_path, "vendor", 0, "enabled_ro").unwrap_err();
+ assert_eq!(
+ format!("{:?}", err),
+ "StorageFileNotFound(Storage file does not exist for vendor)"
+ );
+ }
+
+ #[test]
+ fn test_boolean_flag_value_query() {
+ let file = write_storage_location_file();
+ let file_full_path = file.path().display().to_string();
+
+ let baseline: Vec<bool> = vec![false; 8];
+ for (offset, expected_value) in baseline.into_iter().enumerate() {
+ let flag_value =
+ get_boolean_flag_value_impl(&file_full_path, "system", offset as u32).unwrap();
+ assert_eq!(flag_value, expected_value);
+ }
+ }
+
+ #[test]
+ fn test_invalid_boolean_flag_value_query() {
+ let file = write_storage_location_file();
+ let file_full_path = file.path().display().to_string();
+
+ let err = get_boolean_flag_value_impl(&file_full_path, "vendor", 0u32).unwrap_err();
+ assert_eq!(
+ format!("{:?}", err),
+ "StorageFileNotFound(Storage file does not exist for vendor)"
+ );
+
+ let err = get_boolean_flag_value_impl(&file_full_path, "system", 8u32).unwrap_err();
+ assert_eq!(
+ format!("{:?}", err),
+ "InvalidStorageFileOffset(Flag value offset goes beyond the end of the file.)"
+ );
+ }
+}
diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp
index 7420aa8..4566bf9 100644
--- a/tools/aconfig/fake_device_config/Android.bp
+++ b/tools/aconfig/fake_device_config/Android.bp
@@ -15,7 +15,8 @@
java_library {
name: "fake_device_config",
srcs: ["src/**/*.java"],
- sdk_version: "core_current",
+ sdk_version: "none",
+ system_modules: "core-all-system-modules",
host_supported: true,
}
diff --git a/tools/characteristics_rro_generator.py b/tools/characteristics_rro_generator.py
index 6489673..cf873ee 100644
--- a/tools/characteristics_rro_generator.py
+++ b/tools/characteristics_rro_generator.py
@@ -1,22 +1,14 @@
#!/usr/bin/env python3
import sys
-from xml.dom.minidom import parseString
-
-def parse_package(manifest):
- with open(manifest, 'r') as f:
- data = f.read()
- dom = parseString(data)
- return dom.documentElement.getAttribute('package')
if __name__ == '__main__':
if len(sys.argv) != 3:
- sys.exit(f"usage: {sys_argv[0]} target_package_manifest output\n")
- package_name = parse_package(sys.argv[1])
+ sys.exit(f"usage: {sys_argv[0]} target_package_name output\n")
with open(sys.argv[2], "w") as f:
f.write(f'''<?xml version="1.0" encoding="utf-8"?>
-<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="{package_name}.auto_generated_characteristics_rro">
+ <manifest xmlns:android="http://schemas.android.com/apk/res/android" package="{sys.argv[1]}.auto_generated_characteristics_rro">
<application android:hasCode="false" />
- <overlay android:targetPackage="{package_name}"
+ <overlay android:targetPackage="{sys.argv[1]}"
android:isStatic="true"
android:priority="0" />
</manifest>
diff --git a/tools/ide_query/go.mod b/tools/ide_query/go.mod
new file mode 100644
index 0000000..f9d727f
--- /dev/null
+++ b/tools/ide_query/go.mod
@@ -0,0 +1,7 @@
+module ide_query
+
+go 1.21
+
+require (
+ google.golang.org/protobuf v0.0.0
+)
diff --git a/tools/ide_query/go.work b/tools/ide_query/go.work
new file mode 100644
index 0000000..851f352
--- /dev/null
+++ b/tools/ide_query/go.work
@@ -0,0 +1,9 @@
+go 1.21
+
+use (
+ .
+)
+
+replace (
+ google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
+)
\ No newline at end of file
diff --git a/tools/ide_query/go.work.sum b/tools/ide_query/go.work.sum
new file mode 100644
index 0000000..cf42b48
--- /dev/null
+++ b/tools/ide_query/go.work.sum
@@ -0,0 +1,5 @@
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/protobuf v1.26.0-rc.1 h1:7QnIQpGRHE5RnLKnESfDoxm2dTapTZua5a0kS0A+VXQ=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go
new file mode 100644
index 0000000..c1c4da0
--- /dev/null
+++ b/tools/ide_query/ide_query.go
@@ -0,0 +1,265 @@
+// Binary ide_query generates and analyzes build artifacts.
+// The produced result can be consumed by IDEs to provide language features.
+package main
+
+import (
+ "container/list"
+ "context"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "log"
+ "os"
+ "os/exec"
+ "path"
+ "slices"
+ "strings"
+
+ "google.golang.org/protobuf/proto"
+ pb "ide_query/ide_query_proto"
+)
+
+// Env contains information about the current environment.
+type Env struct {
+ LunchTarget LunchTarget
+ RepoDir string
+ OutDir string
+}
+
+// LunchTarget is a parsed Android lunch target.
+// Input format: <product_name>-<release_type>-<build_variant>
+type LunchTarget struct {
+ Product string
+ Release string
+ Variant string
+}
+
+var _ flag.Value = (*LunchTarget)(nil)
+
+// // Get implements flag.Value.
+// func (l *LunchTarget) Get() any {
+// return l
+// }
+
+// Set implements flag.Value.
+func (l *LunchTarget) Set(s string) error {
+ parts := strings.Split(s, "-")
+ if len(parts) != 3 {
+ return fmt.Errorf("invalid lunch target: %q, must have form <product_name>-<release_type>-<build_variant>", s)
+ }
+ *l = LunchTarget{
+ Product: parts[0],
+ Release: parts[1],
+ Variant: parts[2],
+ }
+ return nil
+}
+
+// String implements flag.Value.
+func (l *LunchTarget) String() string {
+ return fmt.Sprintf("%s-%s-%s", l.Product, l.Release, l.Variant)
+}
+
+func main() {
+ var env Env
+ env.OutDir = os.Getenv("OUT_DIR")
+ env.RepoDir = os.Getenv("ANDROID_BUILD_TOP")
+ flag.Var(&env.LunchTarget, "lunch_target", "The lunch target to query")
+ flag.Parse()
+ files := flag.Args()
+ if len(files) == 0 {
+ fmt.Println("No files provided.")
+ os.Exit(1)
+ return
+ }
+
+ var javaFiles []string
+ for _, f := range files {
+ switch {
+ case strings.HasSuffix(f, ".java") || strings.HasSuffix(f, ".kt"):
+ javaFiles = append(javaFiles, f)
+ default:
+ log.Printf("File %q is supported - will be skipped.", f)
+ }
+ }
+
+ ctx := context.Background()
+ javaDepsPath := path.Join(env.RepoDir, env.OutDir, "soong/module_bp_java_deps.json")
+ // TODO(michaelmerg): Figure out if module_bp_java_deps.json is outdated.
+ runMake(ctx, env, "nothing")
+
+ javaModules, err := loadJavaModules(javaDepsPath)
+ if err != nil {
+ log.Fatalf("Failed to load java modules: %v", err)
+ }
+
+ fileToModule := make(map[string]*javaModule) // file path -> module
+ for _, f := range javaFiles {
+ for _, m := range javaModules {
+ if !slices.Contains(m.Srcs, f) {
+ continue
+ }
+ if fileToModule[f] != nil {
+ // TODO(michaelmerg): Handle the case where a file is covered by multiple modules.
+ log.Printf("File %q found in module %q but is already covered by module %q", f, m.Name, fileToModule[f].Name)
+ continue
+ }
+ fileToModule[f] = m
+ }
+ }
+
+ var toMake []string
+ for _, m := range fileToModule {
+ toMake = append(toMake, m.Name)
+ }
+ fmt.Printf("Running make for modules: %v\n", strings.Join(toMake, ", "))
+ if err := runMake(ctx, env, toMake...); err != nil {
+ log.Fatalf("Failed to run make: %v", err)
+ }
+
+ var sources []*pb.SourceFile
+ type depsAndGenerated struct {
+ Deps []string
+ Generated []*pb.GeneratedFile
+ }
+ moduleToDeps := make(map[string]*depsAndGenerated)
+ for _, f := range files {
+ file := &pb.SourceFile{
+ Path: f,
+ WorkingDir: env.RepoDir,
+ }
+ sources = append(sources, file)
+
+ m := fileToModule[f]
+ if m == nil {
+ file.Status = &pb.Status{
+ Code: pb.Status_FAILURE,
+ Message: proto.String("File not found in any module."),
+ }
+ continue
+ }
+
+ file.Status = &pb.Status{Code: pb.Status_OK}
+ if moduleToDeps[m.Name] != nil {
+ file.Generated = moduleToDeps[m.Name].Generated
+ file.Deps = moduleToDeps[m.Name].Deps
+ continue
+ }
+
+ deps := transitiveDeps(m, javaModules)
+ var generated []*pb.GeneratedFile
+ outPrefix := env.OutDir + "/"
+ for _, d := range deps {
+ if relPath, ok := strings.CutPrefix(d, outPrefix); ok {
+ contents, err := os.ReadFile(d)
+ if err != nil {
+ fmt.Printf("Generated file %q not found - will be skipped.\n", d)
+ continue
+ }
+
+ generated = append(generated, &pb.GeneratedFile{
+ Path: relPath,
+ Contents: contents,
+ })
+ }
+ }
+ moduleToDeps[m.Name] = &depsAndGenerated{deps, generated}
+ file.Generated = generated
+ file.Deps = deps
+ }
+
+ res := &pb.IdeAnalysis{
+ BuildArtifactRoot: env.OutDir,
+ Sources: sources,
+ Status: &pb.Status{Code: pb.Status_OK},
+ }
+ data, err := proto.Marshal(res)
+ if err != nil {
+ log.Fatalf("Failed to marshal result proto: %v", err)
+ }
+
+ err = os.WriteFile(path.Join(env.OutDir, "ide_query.pb"), data, 0644)
+ if err != nil {
+ log.Fatalf("Failed to write result proto: %v", err)
+ }
+
+ for _, s := range sources {
+ fmt.Printf("%s: %v (Deps: %d, Generated: %d)\n", s.GetPath(), s.GetStatus(), len(s.GetDeps()), len(s.GetGenerated()))
+ }
+}
+
+// runMake runs Soong build for the given modules.
+func runMake(ctx context.Context, env Env, modules ...string) error {
+ args := []string{
+ "--make-mode",
+ "ANDROID_BUILD_ENVIRONMENT_CONFIG=googler-cog",
+ "TARGET_PRODUCT=" + env.LunchTarget.Product,
+ "TARGET_RELEASE=" + env.LunchTarget.Release,
+ "TARGET_BUILD_VARIANT=" + env.LunchTarget.Variant,
+ }
+ args = append(args, modules...)
+ cmd := exec.CommandContext(ctx, "build/soong/soong_ui.bash", args...)
+ cmd.Dir = env.RepoDir
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ return cmd.Run()
+}
+
+type javaModule struct {
+ Name string
+ Path []string `json:"path,omitempty"`
+ Deps []string `json:"dependencies,omitempty"`
+ Srcs []string `json:"srcs,omitempty"`
+ Jars []string `json:"jars,omitempty"`
+ SrcJars []string `json:"srcjars,omitempty"`
+}
+
+func loadJavaModules(path string) (map[string]*javaModule, error) {
+ data, err := os.ReadFile(path)
+ if err != nil {
+ return nil, err
+ }
+
+ var ret map[string]*javaModule // module name -> module
+ if err = json.Unmarshal(data, &ret); err != nil {
+ return nil, err
+ }
+
+ for name, module := range ret {
+ if strings.HasSuffix(name, "-jarjar") || strings.HasSuffix(name, ".impl") {
+ delete(ret, name)
+ continue
+ }
+
+ module.Name = name
+ }
+ return ret, nil
+}
+
+func transitiveDeps(m *javaModule, modules map[string]*javaModule) []string {
+ var ret []string
+ q := list.New()
+ q.PushBack(m.Name)
+ seen := make(map[string]bool) // module names -> true
+ for q.Len() > 0 {
+ name := q.Remove(q.Front()).(string)
+ mod := modules[name]
+ if mod == nil {
+ continue
+ }
+
+ ret = append(ret, mod.Srcs...)
+ ret = append(ret, mod.SrcJars...)
+ ret = append(ret, mod.Jars...)
+ for _, d := range mod.Deps {
+ if seen[d] {
+ continue
+ }
+ seen[d] = true
+ q.PushBack(d)
+ }
+ }
+ slices.Sort(ret)
+ ret = slices.Compact(ret)
+ return ret
+}
diff --git a/tools/ide_query/ide_query.sh b/tools/ide_query/ide_query.sh
new file mode 100755
index 0000000..663c4dc
--- /dev/null
+++ b/tools/ide_query/ide_query.sh
@@ -0,0 +1,12 @@
+#!/bin/bash -e
+
+cd $(dirname $BASH_SOURCE)
+source $(pwd)/../../shell_utils.sh
+require_top
+
+# Ensure cogsetup (out/ will be symlink outside the repo)
+. ${TOP}/build/make/cogsetup.sh
+
+export ANDROID_BUILD_TOP=$TOP
+export OUT_DIR=${OUT_DIR}
+exec "${TOP}/prebuilts/go/linux-x86/bin/go" "run" "ide_query" "$@"
diff --git a/tools/ide_query/ide_query_proto/ide_query.pb.go b/tools/ide_query/ide_query_proto/ide_query.pb.go
new file mode 100644
index 0000000..30571cc
--- /dev/null
+++ b/tools/ide_query/ide_query_proto/ide_query.pb.go
@@ -0,0 +1,522 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// protoc-gen-go v1.30.0
+// protoc v3.21.12
+// source: ide_query.proto
+
+package ide_query_proto
+
+import (
+ protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+ protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+ reflect "reflect"
+ sync "sync"
+)
+
+const (
+ // Verify that this generated code is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+ // Verify that runtime/protoimpl is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+type Status_Code int32
+
+const (
+ Status_OK Status_Code = 0
+ Status_FAILURE Status_Code = 1
+)
+
+// Enum value maps for Status_Code.
+var (
+ Status_Code_name = map[int32]string{
+ 0: "OK",
+ 1: "FAILURE",
+ }
+ Status_Code_value = map[string]int32{
+ "OK": 0,
+ "FAILURE": 1,
+ }
+)
+
+func (x Status_Code) Enum() *Status_Code {
+ p := new(Status_Code)
+ *p = x
+ return p
+}
+
+func (x Status_Code) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (Status_Code) Descriptor() protoreflect.EnumDescriptor {
+ return file_ide_query_proto_enumTypes[0].Descriptor()
+}
+
+func (Status_Code) Type() protoreflect.EnumType {
+ return &file_ide_query_proto_enumTypes[0]
+}
+
+func (x Status_Code) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use Status_Code.Descriptor instead.
+func (Status_Code) EnumDescriptor() ([]byte, []int) {
+ return file_ide_query_proto_rawDescGZIP(), []int{0, 0}
+}
+
+// Indicates the success/failure for analysis.
+type Status struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Code Status_Code `protobuf:"varint,1,opt,name=code,proto3,enum=cider.build.companion.Status_Code" json:"code,omitempty"`
+ // Details about the status, might be displayed to user.
+ Message *string `protobuf:"bytes,2,opt,name=message,proto3,oneof" json:"message,omitempty"`
+}
+
+func (x *Status) Reset() {
+ *x = Status{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_ide_query_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *Status) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Status) ProtoMessage() {}
+
+func (x *Status) ProtoReflect() protoreflect.Message {
+ mi := &file_ide_query_proto_msgTypes[0]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use Status.ProtoReflect.Descriptor instead.
+func (*Status) Descriptor() ([]byte, []int) {
+ return file_ide_query_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *Status) GetCode() Status_Code {
+ if x != nil {
+ return x.Code
+ }
+ return Status_OK
+}
+
+func (x *Status) GetMessage() string {
+ if x != nil && x.Message != nil {
+ return *x.Message
+ }
+ return ""
+}
+
+type GeneratedFile struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ // Path to the file relative to IdeAnalysis.build_artifact_root.
+ Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
+ // The text of the generated file, if not provided contents will be read
+ //
+ // from the path above in user's workstation.
+ Contents []byte `protobuf:"bytes,2,opt,name=contents,proto3,oneof" json:"contents,omitempty"`
+}
+
+func (x *GeneratedFile) Reset() {
+ *x = GeneratedFile{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_ide_query_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *GeneratedFile) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*GeneratedFile) ProtoMessage() {}
+
+func (x *GeneratedFile) ProtoReflect() protoreflect.Message {
+ mi := &file_ide_query_proto_msgTypes[1]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use GeneratedFile.ProtoReflect.Descriptor instead.
+func (*GeneratedFile) Descriptor() ([]byte, []int) {
+ return file_ide_query_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *GeneratedFile) GetPath() string {
+ if x != nil {
+ return x.Path
+ }
+ return ""
+}
+
+func (x *GeneratedFile) GetContents() []byte {
+ if x != nil {
+ return x.Contents
+ }
+ return nil
+}
+
+type SourceFile struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ // Repo root relative path to the source file in the tree.
+ Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
+ // Working directory used by the build system. All the relative
+ // paths in compiler_arguments should be relative to this path.
+ // Relative to workspace root.
+ WorkingDir string `protobuf:"bytes,2,opt,name=working_dir,json=workingDir,proto3" json:"working_dir,omitempty"`
+ // Compiler arguments to compile the source file. If multiple variants
+ // of the module being compiled are possible, the query script will choose
+ // one.
+ CompilerArguments []string `protobuf:"bytes,3,rep,name=compiler_arguments,json=compilerArguments,proto3" json:"compiler_arguments,omitempty"`
+ // Any generated files that are used in compiling the file.
+ Generated []*GeneratedFile `protobuf:"bytes,4,rep,name=generated,proto3" json:"generated,omitempty"`
+ // Paths to all of the sources, like build files, code generators,
+ // proto files etc. that were used during analysis. Used to figure
+ // out when a set of build artifacts are stale and the query tool
+ // must be re-run.
+ // Relative to workspace root.
+ Deps []string `protobuf:"bytes,5,rep,name=deps,proto3" json:"deps,omitempty"`
+ // Represensts analysis status for this particular file. e.g. not part
+ // of the build graph.
+ Status *Status `protobuf:"bytes,6,opt,name=status,proto3,oneof" json:"status,omitempty"`
+}
+
+func (x *SourceFile) Reset() {
+ *x = SourceFile{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_ide_query_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *SourceFile) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*SourceFile) ProtoMessage() {}
+
+func (x *SourceFile) ProtoReflect() protoreflect.Message {
+ mi := &file_ide_query_proto_msgTypes[2]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use SourceFile.ProtoReflect.Descriptor instead.
+func (*SourceFile) Descriptor() ([]byte, []int) {
+ return file_ide_query_proto_rawDescGZIP(), []int{2}
+}
+
+func (x *SourceFile) GetPath() string {
+ if x != nil {
+ return x.Path
+ }
+ return ""
+}
+
+func (x *SourceFile) GetWorkingDir() string {
+ if x != nil {
+ return x.WorkingDir
+ }
+ return ""
+}
+
+func (x *SourceFile) GetCompilerArguments() []string {
+ if x != nil {
+ return x.CompilerArguments
+ }
+ return nil
+}
+
+func (x *SourceFile) GetGenerated() []*GeneratedFile {
+ if x != nil {
+ return x.Generated
+ }
+ return nil
+}
+
+func (x *SourceFile) GetDeps() []string {
+ if x != nil {
+ return x.Deps
+ }
+ return nil
+}
+
+func (x *SourceFile) GetStatus() *Status {
+ if x != nil {
+ return x.Status
+ }
+ return nil
+}
+
+type IdeAnalysis struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ // Path relative to workspace root, containing all the artifacts
+ // generated by the build system. GeneratedFile.path are always
+ // relative to this directory.
+ BuildArtifactRoot string `protobuf:"bytes,1,opt,name=build_artifact_root,json=buildArtifactRoot,proto3" json:"build_artifact_root,omitempty"`
+ Sources []*SourceFile `protobuf:"bytes,2,rep,name=sources,proto3" json:"sources,omitempty"`
+ // Status representing overall analysis.
+ // Should fail only when no analysis can be performed, e.g. workspace
+ // isn't setup.
+ Status *Status `protobuf:"bytes,3,opt,name=status,proto3,oneof" json:"status,omitempty"`
+}
+
+func (x *IdeAnalysis) Reset() {
+ *x = IdeAnalysis{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_ide_query_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *IdeAnalysis) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*IdeAnalysis) ProtoMessage() {}
+
+func (x *IdeAnalysis) ProtoReflect() protoreflect.Message {
+ mi := &file_ide_query_proto_msgTypes[3]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use IdeAnalysis.ProtoReflect.Descriptor instead.
+func (*IdeAnalysis) Descriptor() ([]byte, []int) {
+ return file_ide_query_proto_rawDescGZIP(), []int{3}
+}
+
+func (x *IdeAnalysis) GetBuildArtifactRoot() string {
+ if x != nil {
+ return x.BuildArtifactRoot
+ }
+ return ""
+}
+
+func (x *IdeAnalysis) GetSources() []*SourceFile {
+ if x != nil {
+ return x.Sources
+ }
+ return nil
+}
+
+func (x *IdeAnalysis) GetStatus() *Status {
+ if x != nil {
+ return x.Status
+ }
+ return nil
+}
+
+var File_ide_query_proto protoreflect.FileDescriptor
+
+var file_ide_query_proto_rawDesc = []byte{
+ 0x0a, 0x0f, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74,
+ 0x6f, 0x12, 0x15, 0x63, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2e, 0x63,
+ 0x6f, 0x6d, 0x70, 0x61, 0x6e, 0x69, 0x6f, 0x6e, 0x22, 0x88, 0x01, 0x0a, 0x06, 0x53, 0x74, 0x61,
+ 0x74, 0x75, 0x73, 0x12, 0x36, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x0e, 0x32, 0x22, 0x2e, 0x63, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2e,
+ 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x6e, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73,
+ 0x2e, 0x43, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x6d,
+ 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07,
+ 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x22, 0x1b, 0x0a, 0x04, 0x43, 0x6f,
+ 0x64, 0x65, 0x12, 0x06, 0x0a, 0x02, 0x4f, 0x4b, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x41,
+ 0x49, 0x4c, 0x55, 0x52, 0x45, 0x10, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x65, 0x73, 0x73,
+ 0x61, 0x67, 0x65, 0x22, 0x51, 0x0a, 0x0d, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64,
+ 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74,
+ 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f,
+ 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x63, 0x6f,
+ 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x8f, 0x02, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72,
+ 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a,
+ 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x44, 0x69, 0x72, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f,
+ 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73,
+ 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72,
+ 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x42, 0x0a, 0x09, 0x67, 0x65, 0x6e,
+ 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x63,
+ 0x69, 0x64, 0x65, 0x72, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x61,
+ 0x6e, 0x69, 0x6f, 0x6e, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x46, 0x69,
+ 0x6c, 0x65, 0x52, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x12, 0x12, 0x0a,
+ 0x04, 0x64, 0x65, 0x70, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x64, 0x65, 0x70,
+ 0x73, 0x12, 0x3a, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2e,
+ 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x6e, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73,
+ 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a,
+ 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0xc1, 0x01, 0x0a, 0x0b, 0x49, 0x64, 0x65,
+ 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x62, 0x75, 0x69, 0x6c,
+ 0x64, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x74, 0x69,
+ 0x66, 0x61, 0x63, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x3b, 0x0a, 0x07, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x63, 0x69, 0x64, 0x65,
+ 0x72, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x6e, 0x69, 0x6f,
+ 0x6e, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x07, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18,
+ 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x62, 0x75,
+ 0x69, 0x6c, 0x64, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x6e, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x74,
+ 0x61, 0x74, 0x75, 0x73, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x88, 0x01,
+ 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x42, 0x1b, 0x5a, 0x19,
+ 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75, 0x65, 0x72, 0x79, 0x2f, 0x69, 0x64, 0x65, 0x5f, 0x71, 0x75,
+ 0x65, 0x72, 0x79, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
+ 0x33,
+}
+
+var (
+ file_ide_query_proto_rawDescOnce sync.Once
+ file_ide_query_proto_rawDescData = file_ide_query_proto_rawDesc
+)
+
+func file_ide_query_proto_rawDescGZIP() []byte {
+ file_ide_query_proto_rawDescOnce.Do(func() {
+ file_ide_query_proto_rawDescData = protoimpl.X.CompressGZIP(file_ide_query_proto_rawDescData)
+ })
+ return file_ide_query_proto_rawDescData
+}
+
+var file_ide_query_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
+var file_ide_query_proto_msgTypes = make([]protoimpl.MessageInfo, 4)
+var file_ide_query_proto_goTypes = []interface{}{
+ (Status_Code)(0), // 0: cider.build.companion.Status.Code
+ (*Status)(nil), // 1: cider.build.companion.Status
+ (*GeneratedFile)(nil), // 2: cider.build.companion.GeneratedFile
+ (*SourceFile)(nil), // 3: cider.build.companion.SourceFile
+ (*IdeAnalysis)(nil), // 4: cider.build.companion.IdeAnalysis
+}
+var file_ide_query_proto_depIdxs = []int32{
+ 0, // 0: cider.build.companion.Status.code:type_name -> cider.build.companion.Status.Code
+ 2, // 1: cider.build.companion.SourceFile.generated:type_name -> cider.build.companion.GeneratedFile
+ 1, // 2: cider.build.companion.SourceFile.status:type_name -> cider.build.companion.Status
+ 3, // 3: cider.build.companion.IdeAnalysis.sources:type_name -> cider.build.companion.SourceFile
+ 1, // 4: cider.build.companion.IdeAnalysis.status:type_name -> cider.build.companion.Status
+ 5, // [5:5] is the sub-list for method output_type
+ 5, // [5:5] is the sub-list for method input_type
+ 5, // [5:5] is the sub-list for extension type_name
+ 5, // [5:5] is the sub-list for extension extendee
+ 0, // [0:5] is the sub-list for field type_name
+}
+
+func init() { file_ide_query_proto_init() }
+func file_ide_query_proto_init() {
+ if File_ide_query_proto != nil {
+ return
+ }
+ if !protoimpl.UnsafeEnabled {
+ file_ide_query_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*Status); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_ide_query_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*GeneratedFile); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_ide_query_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*SourceFile); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_ide_query_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*IdeAnalysis); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
+ file_ide_query_proto_msgTypes[0].OneofWrappers = []interface{}{}
+ file_ide_query_proto_msgTypes[1].OneofWrappers = []interface{}{}
+ file_ide_query_proto_msgTypes[2].OneofWrappers = []interface{}{}
+ file_ide_query_proto_msgTypes[3].OneofWrappers = []interface{}{}
+ type x struct{}
+ out := protoimpl.TypeBuilder{
+ File: protoimpl.DescBuilder{
+ GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+ RawDescriptor: file_ide_query_proto_rawDesc,
+ NumEnums: 1,
+ NumMessages: 4,
+ NumExtensions: 0,
+ NumServices: 0,
+ },
+ GoTypes: file_ide_query_proto_goTypes,
+ DependencyIndexes: file_ide_query_proto_depIdxs,
+ EnumInfos: file_ide_query_proto_enumTypes,
+ MessageInfos: file_ide_query_proto_msgTypes,
+ }.Build()
+ File_ide_query_proto = out.File
+ file_ide_query_proto_rawDesc = nil
+ file_ide_query_proto_goTypes = nil
+ file_ide_query_proto_depIdxs = nil
+}
diff --git a/tools/ide_query/ide_query_proto/ide_query.proto b/tools/ide_query/ide_query_proto/ide_query.proto
new file mode 100644
index 0000000..63eea39
--- /dev/null
+++ b/tools/ide_query/ide_query_proto/ide_query.proto
@@ -0,0 +1,66 @@
+syntax = "proto3";
+
+package ide_query;
+option go_package = "ide_query/ide_query_proto";
+
+// Indicates the success/failure for analysis.
+message Status {
+ enum Code {
+ OK = 0;
+ FAILURE = 1;
+ }
+ Code code = 1;
+ // Details about the status, might be displayed to user.
+ optional string message = 2;
+}
+
+message GeneratedFile {
+ // Path to the file relative to IdeAnalysis.build_artifact_root.
+ string path = 1;
+
+ // The text of the generated file, if not provided contents will be read
+ // from the path above in user's workstation.
+ optional bytes contents = 2;
+}
+
+message SourceFile {
+ // Path to the source file relative to repository root.
+ string path = 1;
+
+ // Working directory used by the build system. All the relative
+ // paths in compiler_arguments should be relative to this path.
+ // Relative to repository root.
+ string working_dir = 2;
+
+ // Compiler arguments to compile the source file. If multiple variants
+ // of the module being compiled are possible, the query script will choose
+ // one.
+ repeated string compiler_arguments = 3;
+
+ // Any generated files that are used in compiling the file.
+ repeated GeneratedFile generated = 4;
+
+ // Paths to all of the sources, like build files, code generators,
+ // proto files etc. that were used during analysis. Used to figure
+ // out when a set of build artifacts are stale and the query tool
+ // must be re-run.
+ // Relative to repository root.
+ repeated string deps = 5;
+
+ // Represents analysis status for this particular file. e.g. not part
+ // of the build graph.
+ optional Status status = 6;
+}
+
+message IdeAnalysis {
+ // Path relative to repository root, containing all the artifacts
+ // generated by the build system. GeneratedFile.path are always
+ // relative to this directory.
+ string build_artifact_root = 1;
+
+ repeated SourceFile sources = 2;
+
+ // Status representing overall analysis.
+ // Should fail only when no analysis can be performed.
+ optional Status status = 3;
+}
diff --git a/tools/ide_query/ide_query_proto/regen.sh b/tools/ide_query/ide_query_proto/regen.sh
new file mode 100755
index 0000000..eec4f37
--- /dev/null
+++ b/tools/ide_query/ide_query_proto/regen.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+aprotoc --go_out=paths=source_relative:. ide_query.proto
diff --git a/tools/perf/benchmarks b/tools/perf/benchmarks
index ad34586..6998ecd 100755
--- a/tools/perf/benchmarks
+++ b/tools/perf/benchmarks
@@ -249,6 +249,21 @@
undo=lambda: orig.write()
)
+def ChangePublicApi():
+ change = AddJavaField("frameworks/base/core/java/android/provider/Settings.java",
+ "@android.annotation.SuppressLint(\"UnflaggedApi\") public")
+ orig_current_text = Snapshot("frameworks/base/core/api/current.txt")
+
+ def undo():
+ change.undo()
+ orig_current_text.write()
+
+ return Change(
+ label=change.label,
+ change=change.change,
+ undo=lambda: undo()
+ )
+
def AddJavaField(filename, prefix):
return Modify(filename,
lambda: f"{prefix} static final int BENCHMARK = {random.randint(0, 1000000)};\n",
@@ -740,9 +755,8 @@
),
Benchmark(id="framework_api",
title="Add API to Settings.java",
- change=AddJavaField("frameworks/base/core/java/android/provider/Settings.java",
- "@android.annotation.SuppressLint(\"UnflaggedApi\") public"),
- modules=["framework-minus-apex"],
+ change=ChangePublicApi(),
+ modules=["api-stubs-docs-non-updatable-update-current-api", "framework-minus-apex"],
preroll=1,
postroll=2,
),
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
index 198c973..76582c0 100644
--- a/tools/releasetools/merge/merge_meta.py
+++ b/tools/releasetools/merge/merge_meta.py
@@ -53,23 +53,31 @@
MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
-def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
- UPDATE_ENGINE_CONFIG_NAME = "update_engine_config.txt"
- config1_path = os.path.join(
- input_metadir1, UPDATE_ENGINE_CONFIG_NAME)
- config2_path = os.path.join(
- input_metadir2, UPDATE_ENGINE_CONFIG_NAME)
- config1 = ParseUpdateEngineConfig(config1_path)
- config2 = ParseUpdateEngineConfig(config2_path)
- # Copy older config to merged target files for maximum compatibility
- # update_engine in system partition is from system side, but
- # update_engine_sideload in recovery is from vendor side.
- if config1 < config2:
- shutil.copy(config1_path, os.path.join(
- merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
+def MergeUpdateEngineConfig(framework_meta_dir, vendor_meta_dir,
+ merged_meta_dir):
+ """Merges META/update_engine_config.txt.
+
+ The output is the configuration for maximum compatibility.
+ """
+ _CONFIG_NAME = 'update_engine_config.txt'
+ framework_config_path = os.path.join(framework_meta_dir, _CONFIG_NAME)
+ vendor_config_path = os.path.join(vendor_meta_dir, _CONFIG_NAME)
+ merged_config_path = os.path.join(merged_meta_dir, _CONFIG_NAME)
+
+ if os.path.exists(framework_config_path):
+ framework_config = ParseUpdateEngineConfig(framework_config_path)
+ vendor_config = ParseUpdateEngineConfig(vendor_config_path)
+ # Copy older config to merged target files for maximum compatibility
+ # update_engine in system partition is from system side, but
+ # update_engine_sideload in recovery is from vendor side.
+ if framework_config < vendor_config:
+ shutil.copy(framework_config_path, merged_config_path)
+ else:
+ shutil.copy(vendor_config_path, merged_config_path)
else:
- shutil.copy(config2_path, os.path.join(
- merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
+ if not OPTIONS.allow_partial_ab:
+ raise FileNotFoundError(framework_config_path)
+ shutil.copy(vendor_config_path, merged_config_path)
def MergeMetaFiles(temp_dir, merged_dir, framework_partitions):
@@ -125,8 +133,7 @@
if OPTIONS.merged_misc_info.get('ab_update') == 'true':
MergeUpdateEngineConfig(
- framework_meta_dir,
- vendor_meta_dir, merged_meta_dir)
+ framework_meta_dir, vendor_meta_dir, merged_meta_dir)
# Write the now-finalized OPTIONS.merged_misc_info.
merge_utils.WriteSortedData(
@@ -140,16 +147,24 @@
The output contains the union of the partition names.
"""
- with open(os.path.join(framework_meta_dir, 'ab_partitions.txt')) as f:
- # Filter out some partitions here to support the case that the
- # ab_partitions.txt of framework-target-files has non-framework partitions.
- # This case happens when we use a complete merged target files package as
- # the framework-target-files.
- framework_ab_partitions = [
- partition
- for partition in f.read().splitlines()
- if partition in framework_partitions
- ]
+ framework_ab_partitions = []
+ framework_ab_config = os.path.join(framework_meta_dir, 'ab_partitions.txt')
+ if os.path.exists(framework_ab_config):
+ with open(framework_ab_config) as f:
+ # Filter out some partitions here to support the case that the
+ # ab_partitions.txt of framework-target-files has non-framework
+ # partitions. This case happens when we use a complete merged target
+ # files package as the framework-target-files.
+ framework_ab_partitions.extend([
+ partition
+ for partition in f.read().splitlines()
+ if partition in framework_partitions
+ ])
+ else:
+ if not OPTIONS.allow_partial_ab:
+ raise FileNotFoundError(framework_ab_config)
+ logger.info('Use partial AB because framework ab_partitions.txt does not '
+ 'exist.')
with open(os.path.join(vendor_meta_dir, 'ab_partitions.txt')) as f:
vendor_ab_partitions = f.read().splitlines()
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 4619246..fdba927 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -98,6 +98,10 @@
If provided, resolve the conflict AVB rollback index location when
necessary.
+ --allow-partial-ab
+ If provided, allow merging non-AB framework target files with AB vendor
+ target files, which means that only the vendor has AB partitions.
+
The following only apply when using the VSDK to perform dexopt on vendor apps:
--framework-dexpreopt-config
@@ -154,6 +158,7 @@
OPTIONS.rebuild_sepolicy = False
OPTIONS.keep_tmp = False
OPTIONS.avb_resolve_rollback_index_location_conflict = False
+OPTIONS.allow_partial_ab = False
OPTIONS.framework_dexpreopt_config = None
OPTIONS.framework_dexpreopt_tools = None
OPTIONS.vendor_dexpreopt_config = None
@@ -576,6 +581,8 @@
OPTIONS.keep_tmp = True
elif o == '--avb-resolve-rollback-index-location-conflict':
OPTIONS.avb_resolve_rollback_index_location_conflict = True
+ elif o == '--allow-partial-ab':
+ OPTIONS.allow_partial_ab = True
elif o == '--framework-dexpreopt-config':
OPTIONS.framework_dexpreopt_config = a
elif o == '--framework-dexpreopt-tools':
@@ -617,6 +624,7 @@
'rebuild-sepolicy',
'keep-tmp',
'avb-resolve-rollback-index-location-conflict',
+ 'allow-partial-ab',
],
extra_option_handler=option_handler)