Merge "Generalize vts config module for other Suites"
diff --git a/CleanSpec.mk b/CleanSpec.mk
index ddee654..8219afc 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -673,6 +673,14 @@
$(call add-clean-step, rm -rf $(HOST_OUT)/fuzz/*)
$(call add-clean-step, rm -rf $(SOONG_OUT_DIR)/host/*/fuzz/*)
+# Change file layout of system_other
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system_other)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/apex)
+
+# Migrate preopt files to system_other for some devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/*/*app/*/oat)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/core/Makefile b/core/Makefile
index 951761c..f02eb19 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1924,7 +1924,8 @@
ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT) \
$(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
# Named '.dat' so we don't attempt to use imgdiff for patching it.
-RECOVERY_RESOURCE_ZIP := $(TARGET_OUT)/etc/recovery-resource.dat
+RECOVERY_RESOURCE_ZIP := $(TARGET_OUT_VENDOR)/etc/recovery-resource.dat
+ALL_DEFAULT_INSTALLED_MODULES += $(RECOVERY_RESOURCE_ZIP)
else
RECOVERY_RESOURCE_ZIP :=
endif
@@ -2153,6 +2154,7 @@
$(error MTD device is no longer supported and thus BOARD_NAND_SPARE_SIZE is deprecated.)
endif
+ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
# -----------------------------------------------------------------
# the debug ramdisk, which is the original ramdisk plus additional
# files: force_debuggable, adb_debug.prop and userdebug sepolicy.
@@ -2249,41 +2251,116 @@
# Using a test key to sign boot-debug.img to continue booting with the mismatched
# public key, if the device is unlocked.
ifneq ($(BOARD_AVB_BOOT_KEY_PATH),)
-BOARD_AVB_DEBUG_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
-$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): PRIVATE_AVB_DEBUG_BOOT_SIGNING_ARGS := \
- --algorithm SHA256_RSA2048 --key $(BOARD_AVB_DEBUG_BOOT_KEY_PATH)
-$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_DEBUG_BOOT_KEY_PATH)
+$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_BOOT_TEST_KEY_PATH)
endif
+BOARD_AVB_BOOT_TEST_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
+INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS := --algorithm SHA256_RSA2048 --key $(BOARD_AVB_BOOT_TEST_KEY_PATH)
+# $(1): the bootimage to sign
+define test-key-sign-bootimage
+$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
+$(AVBTOOL) add_hash_footer \
+ --image $(1) \
+ --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
+ --partition_name boot $(INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS) \
+ $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+$(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+endef
+
# Depends on original boot.img and ramdisk-debug.img, to build the new boot-debug.img
$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
$(call pretty,"Target boot debug image: $@")
$(MKBOOTIMG) $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(if $(BOARD_AVB_BOOT_KEY_PATH),\
- $(call assert-max-image-size,$@,$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))); \
- $(AVBTOOL) add_hash_footer \
- --image $@ \
- --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
- --partition_name boot $(PRIVATE_AVB_DEBUG_BOOT_SIGNING_ARGS) \
- $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS), \
- $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@))
.PHONY: bootimage_debug-nodeps
bootimage_debug-nodeps: $(MKBOOTIMG)
echo "make $@: ignoring dependencies"
$(MKBOOTIMG) $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
- $(if $(BOARD_AVB_BOOT_KEY_PATH),\
- $(call assert-max-image-size,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE))); \
- $(AVBTOOL) add_hash_footer \
- --image $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
- --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
- --partition_name boot $(PRIVATE_AVB_DEBUG_BOOT_SIGNING_ARGS) \
- $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS), \
- $(call assert-max-image-size,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET)))
endif # TARGET_NO_KERNEL
# -----------------------------------------------------------------
+# The test harness ramdisk, which is based off debug_ramdisk, plus a
+# few additional test-harness-specific properties in adb_debug.prop.
+
+ifdef BUILDING_RAMDISK_IMAGE
+BUILT_TEST_HARNESS_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk-test-harness.img
+INSTALLED_TEST_HARNESS_RAMDISK_TARGET := $(BUILT_TEST_HARNESS_RAMDISK_TARGET)
+
+# rsync the content from ramdisk-debug.img to ramdisk-test-harness.img, then
+# appends a few test harness specific properties into the adb_debug.prop.
+TEST_HARNESS_RAMDISK_SYNC_DIR := $(PRODUCT_OUT)/debug_ramdisk
+TEST_HARNESS_RAMDISK_ROOT_DIR := $(PRODUCT_OUT)/test_harness_ramdisk
+
+# The following TARGET_TEST_HARNESS_RAMDISK_OUT will be $(PRODUCT_OUT)/test_harness_ramdisk/first_stage_ramdisk,
+# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/test_harness_ramdisk.
+TEST_HARNESS_PROP_TARGET := $(TARGET_TEST_HARNESS_RAMDISK_OUT)/adb_debug.prop
+ADDITIONAL_TEST_HARNESS_PROPERTIES := ro.audio.silent=1
+ADDITIONAL_TEST_HARNESS_PROPERTIES += ro.test_harness=1
+
+# $(1): a list of key=value pairs for additional property assignments
+# $(2): the target .prop file to append the properties from $(1)
+define append-test-harness-props
+ echo "#" >> $(2); \
+ echo "# ADDITIONAL TEST HARNESS_PROPERTIES" >> $(2); \
+ echo "#" >> $(2);
+ $(foreach line,$(1), echo "$(line)" >> $(2);)
+endef
+
+$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_TEST_HARNESS_RAMDISK_FILES) | $(MINIGZIP)
+ $(call pretty,"Target test harness ram disk: $@")
+ rsync -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
+ $(call append-test-harness-props,$(ADDITIONAL_TEST_HARNESS_PROPERTIES),$(TEST_HARNESS_PROP_TARGET))
+ $(MKBOOTFS) -d $(TARGET_OUT) $(TEST_HARNESS_RAMDISK_ROOT_DIR) | $(MINIGZIP) > $@
+
+.PHONY: ramdisk_test_harness-nodeps
+ramdisk_test_harness-nodeps: $(MKBOOTFS) | $(MINIGZIP)
+ echo "make $@: ignoring dependencies"
+ rsync -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
+ $(call append-test-harness-props,$(ADDITIONAL_TEST_HARNESS_PROPERTIES),$(TEST_HARNESS_PROP_TARGET))
+ $(MKBOOTFS) -d $(TARGET_OUT) $(TEST_HARNESS_RAMDISK_ROOT_DIR) | $(MINIGZIP) > $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+
+endif # BUILDING_RAMDISK_IMAGE
+
+# -----------------------------------------------------------------
+# the boot-test-harness.img, which is the kernel plus ramdisk-test-harness.img
+#
+# Note: it's intentional to skip signing for boot-test-harness.img, because it
+# can only be used if the device is unlocked with verification error.
+ifneq ($(strip $(TARGET_NO_KERNEL)),true)
+
+INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot-test-harness.img
+
+# Replace ramdisk-debug.img in $(MKBOOTIMG) ARGS with ramdisk-test-harness.img to build boot-test-harness.img
+INTERNAL_TEST_HARNESS_BOOTIMAGE_ARGS := $(subst $(INSTALLED_DEBUG_RAMDISK_TARGET),$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET),$(INTERNAL_DEBUG_BOOTIMAGE_ARGS))
+
+# If boot.img is chained but boot-test-harness.img is not signed, libavb in bootloader
+# will fail to find valid AVB metadata from the end of /boot, thus stop booting.
+# Using a test key to sign boot-test-harness.img to continue booting with the mismatched
+# public key, if the device is unlocked.
+ifneq ($(BOARD_AVB_BOOT_KEY_PATH),)
+$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_BOOT_TEST_KEY_PATH)
+endif
+
+# Build the new boot-test-harness.img, based on boot-debug.img and ramdisk-test-harness.img.
+$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+ $(call pretty,"Target boot test harness image: $@")
+ $(MKBOOTIMG) $(INTERNAL_TEST_HARNESS_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@))
+
+.PHONY: bootimage_test_harness-nodeps
+bootimage_test_harness-nodeps: $(MKBOOTIMG)
+ echo "make $@: ignoring dependencies"
+ $(MKBOOTIMG) $(INTERNAL_TEST_HARNESS_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)))
+
+endif # TARGET_NO_KERNEL
+endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true
+
+# -----------------------------------------------------------------
# system image
#
# Remove overridden packages from $(ALL_PDK_FUSION_FILES)
@@ -2294,8 +2371,7 @@
INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
$(ALL_GENERATED_SOURCES) \
$(ALL_DEFAULT_INSTALLED_MODULES) \
- $(PDK_FUSION_SYSIMG_FILES) \
- $(RECOVERY_RESOURCE_ZIP)) \
+ $(PDK_FUSION_SYSIMG_FILES)) \
$(PDK_FUSION_SYMLINK_STAMP))
FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
@@ -3213,29 +3289,55 @@
$(error BOARD_AVB_VBMETA_SYSTEM and BOARD_AVB_VBMETA_VENDOR cannot have duplicates)
endif
+# When building a standalone recovery image for non-A/B devices, recovery image must be self-signed
+# to be verified independently, and cannot be chained into vbmeta.img. See the link below for
+# details.
+ifneq ($(AB_OTA_UPDATER),true)
+ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
+$(if $(BOARD_AVB_RECOVERY_KEY_PATH),,\
+ $(error BOARD_AVB_RECOVERY_KEY_PATH must be defined for non-A/B devices. \
+ See https://android.googlesource.com/platform/external/avb/+/master/README.md#booting-into-recovery))
+endif
+endif
+
# Appends os version and security patch level as a AVB property descriptor
BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.system.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
--prop com.android.build.system.os_version:$(PLATFORM_VERSION) \
--prop com.android.build.system.security_patch:$(PLATFORM_SECURITY_PATCH)
BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.product.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
--prop com.android.build.product.os_version:$(PLATFORM_VERSION) \
--prop com.android.build.product.security_patch:$(PLATFORM_SECURITY_PATCH)
BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.system_ext.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
--prop com.android.build.system_ext.os_version:$(PLATFORM_VERSION) \
--prop com.android.build.system_ext.security_patch:$(PLATFORM_SECURITY_PATCH)
BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
--prop com.android.build.boot.os_version:$(PLATFORM_VERSION)
+BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.vendor_boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
+
+BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.recovery.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE)
+
BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.vendor.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
--prop com.android.build.vendor.os_version:$(PLATFORM_VERSION)
BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS += \
+ --prop com.android.build.odm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
--prop com.android.build.odm.os_version:$(PLATFORM_VERSION)
+BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.dtbo.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE)
+
# The following vendor- and odm-specific images needs explicit SPL set per board.
ifdef BOOT_SECURITY_PATCH
BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
@@ -3282,8 +3384,11 @@
$(eval $(_signing_args) := \
--algorithm $($(_signing_algorithm)) --key $($(_key_path)))
-$(eval INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --chain_partition $(part):$($(_rollback_index_location)):$(AVB_CHAIN_KEY_DIR)/$(part).avbpubkey)
+# The recovery partition in non-A/B devices should be verified separately. Skip adding the chain
+# partition descriptor for recovery partition into vbmeta.img.
+$(if $(or $(filter true,$(AB_OTA_UPDATER)),$(filter-out recovery,$(part))),\
+ $(eval INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+ --chain_partition $(part):$($(_rollback_index_location)):$(AVB_CHAIN_KEY_DIR)/$(part).avbpubkey))
# Set rollback_index via footer args for non-chained vbmeta image. Chained vbmeta image will pick up
# the index via a separate flag (e.g. BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX).
@@ -3893,6 +3998,9 @@
ifeq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
$(hide) echo "full_recovery_image=true" >> $@
endif
+ifdef BOARD_USES_VENDORIMAGE
+ $(hide) echo "board_uses_vendorimage=true" >> $@
+endif
ifeq ($(BOARD_AVB_ENABLE),true)
$(hide) echo "avb_enable=true" >> $@
$(hide) echo "avb_vbmeta_key_path=$(BOARD_AVB_KEY_PATH)" >> $@
@@ -4032,6 +4140,18 @@
$(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
$(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
endif
+
+ # Not checking in board_config.mk, since AB_OTA_PARTITIONS may be updated in Android.mk (e.g. to
+ # additionally include radio or bootloader partitions).
+ ifeq ($(AB_OTA_PARTITIONS),)
+ $(error AB_OTA_PARTITIONS must be defined when using AB_OTA_UPDATER)
+ endif
+endif
+
+ifneq ($(AB_OTA_PARTITIONS),)
+ ifneq ($(AB_OTA_UPDATER),true)
+ $(error AB_OTA_UPDATER must be true when defining AB_OTA_PARTITIONS)
+ endif
endif
# Run fs_config while creating the target files package
@@ -4299,10 +4419,8 @@
$(zip_root)/META/$(notdir $(PRODUCT_ODM_BASE_FS_PATH))
endif
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
-ifdef BUILDING_SYSTEM_IMAGE
$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
$(MAKE_RECOVERY_PATCH) $(zip_root) $(zip_root)
-endif # BUILDING_SYSTEM_IMAGE
endif
ifeq ($(AB_OTA_UPDATER),true)
@# When using the A/B updater, include the updater config files in the zip.
@@ -4420,9 +4538,8 @@
ifeq ($(PRODUCT_VIRTUAL_AB_OTA),true)
echo "virtual_ab=true" >> $(zip_root)/META/dynamic_partitions_info.txt
endif # PRODUCT_VIRTUAL_AB_OTA
- @# TODO(b/134525174): Remove `-r` after addressing the issue with recovery patch generation.
- $(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
- $(ADD_IMG_TO_TARGET_FILES) -a -r -v -p $(HOST_OUT) $(zip_root)
+ PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+ $(ADD_IMG_TO_TARGET_FILES) -a -v -p $(HOST_OUT) $(zip_root)
ifeq ($(BUILD_QEMU_IMAGES),true)
$(hide) AVBTOOL=$(AVBTOOL) $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH) $(zip_root)/IMAGES/vbmeta.img \
$(zip_root)/IMAGES/system.img $(zip_root)/IMAGES/VerifiedBootParams.textproto
diff --git a/core/binary.mk b/core/binary.mk
index d9763f9..51259b2 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -120,8 +120,6 @@
$(error $(LOCAL_PATH): LOCAL_SDK_VERSION cannot be used in host module)
endif
- my_cflags += -D__ANDROID_NDK__
-
# Make sure we've built the NDK.
my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
diff --git a/core/board_config.mk b/core/board_config.mk
index 242012f..4c128f1 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -86,6 +86,8 @@
_build_broken_var_list := \
BUILD_BROKEN_DUP_RULES \
+ BUILD_BROKEN_PREBUILT_ELF_FILES \
+ BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
BUILD_BROKEN_USES_NETWORK \
_build_broken_var_list += \
@@ -262,6 +264,7 @@
# Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
+TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk/first_stage_ramdisk
endif
###########################################
@@ -504,9 +507,9 @@
###########################################
# Now we can substitute with the real value of TARGET_COPY_OUT_ODM
ifeq ($(TARGET_COPY_OUT_ODM),$(_odm_path_placeholder))
- TARGET_COPY_OUT_ODM := vendor/odm
-else ifeq ($(filter odm vendor/odm,$(TARGET_COPY_OUT_ODM)),)
- $(error TARGET_COPY_OUT_ODM must be either 'odm' or 'vendor/odm', seeing '$(TARGET_COPY_OUT_ODM)'.)
+ TARGET_COPY_OUT_ODM := $(TARGET_COPY_OUT_VENDOR)/odm
+else ifeq ($(filter odm system/vendor/odm vendor/odm,$(TARGET_COPY_OUT_ODM)),)
+ $(error TARGET_COPY_OUT_ODM must be either 'odm', 'system/vendor/odm' or 'vendor/odm', seeing '$(TARGET_COPY_OUT_ODM)'.)
endif
PRODUCT_COPY_FILES := $(subst $(_odm_path_placeholder),$(TARGET_COPY_OUT_ODM),$(PRODUCT_COPY_FILES))
@@ -583,8 +586,16 @@
# APEXes are by default flattened, i.e. non-updatable.
# It can be unflattened (and updatable) by inheriting from
# updatable_apex.mk
-ifeq (,$(TARGET_FLATTEN_APEX))
-TARGET_FLATTEN_APEX := true
+#
+# APEX flattening can also be forcibly enabled (resp. disabled) by
+# setting OVERRIDE_TARGET_FLATTEN_APEX to true (resp. false), e.g. by
+# setting the OVERRIDE_TARGET_FLATTEN_APEX environment variable.
+ifdef OVERRIDE_TARGET_FLATTEN_APEX
+ TARGET_FLATTEN_APEX := $(OVERRIDE_TARGET_FLATTEN_APEX)
+else
+ ifeq (,$(TARGET_FLATTEN_APEX))
+ TARGET_FLATTEN_APEX := true
+ endif
endif
ifeq (,$(TARGET_BUILD_APPS))
diff --git a/core/check_elf_file.mk b/core/check_elf_file.mk
index 0faaadd..7a5de67 100644
--- a/core/check_elf_file.mk
+++ b/core/check_elf_file.mk
@@ -38,12 +38,18 @@
$<
$(hide) touch $@
-ifneq ($(PRODUCT_CHECK_ELF_FILES)$(CHECK_ELF_FILES),)
ifneq ($(strip $(LOCAL_CHECK_ELF_FILES)),false)
+ifneq ($(strip $(BUILD_BROKEN_PREBUILT_ELF_FILES)),true)
+# TODO(b/141176116): Remove the PRODUCT_CHECK_ELF_FILES condition below and
+# cover `make droid` targets after everything goes well with `make checkbuild`
+# targets.
+ifneq ($(PRODUCT_CHECK_ELF_FILES)$(CHECK_ELF_FILES),)
$(LOCAL_BUILT_MODULE): $(check_elf_files_stamp)
-check-elf-files: $(check_elf_files_stamp)
-endif # LOCAL_CHECK_ELF_FILES
endif # PRODUCT_CHECK_ELF_FILES or CHECK_ELF_FILES
+check-elf-files: $(check_elf_files_stamp)
+endif # BUILD_BROKEN_PREBUILT_ELF_FILES
+endif # LOCAL_CHECK_ELF_FILES
+
endif # SHARED_LIBRARIES, EXECUTABLES, NATIVE_TESTS
endif # !LOCAL_IS_HOST_MODULE
diff --git a/core/config.mk b/core/config.mk
index d120d61..241ac8d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -630,9 +630,11 @@
USE_OPENJDK9 := true
ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),)
-TARGET_OPENJDK9 :=
+TARGET_OPENJDK9 := true
else ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),true)
TARGET_OPENJDK9 := true
+else ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),false)
+TARGET_OPENJDK9 :=
endif
# Path to tools.jar
@@ -1171,8 +1173,10 @@
systemotherimage-nodeps \
ramdisk-nodeps \
ramdisk_debug-nodeps \
+ ramdisk_test_harness-nodeps \
bootimage-nodeps \
bootimage_debug-nodeps \
+ bootimage_test_harness-nodeps \
recoveryimage-nodeps \
vbmetaimage-nodeps \
product-graph dump-products
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index 8e4a46c..95b1090 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -33,12 +33,6 @@
endif
endif
-# Yes, this is actually what the clang driver does.
-linux_dynamic_gcclibs := -lgcc_s -lgcc -lc -lgcc_s -lgcc
-linux_static_gcclibs := -Wl,--start-group -lgcc -lgcc_eh -lc -Wl,--end-group
-darwin_dynamic_gcclibs := -lc -lSystem
-darwin_static_gcclibs := NO_STATIC_HOST_BINARIES_ON_DARWIN
-
my_link_type := dynamic
ifdef LOCAL_IS_HOST_MODULE
ifneq (,$(BUILD_HOST_static))
@@ -79,8 +73,7 @@
ifdef LOCAL_IS_HOST_MODULE
my_cppflags += -nostdinc++
- my_ldflags += -nodefaultlibs
- my_cxx_ldlibs += $($($(my_prefix)OS)_$(my_link_type)_gcclibs)
+ my_ldflags += -nostdlib++
else
my_static_libraries += libc++demangle
ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
@@ -99,8 +92,7 @@
else ifeq ($(my_cxx_stl),none)
ifdef LOCAL_IS_HOST_MODULE
my_cppflags += -nostdinc++
- my_ldflags += -nodefaultlibs
- my_cxx_ldlibs += $($($(my_prefix)OS)_$(my_link_type)_gcclibs)
+ my_ldflags += -nostdlib++
endif
else
$(error $(LOCAL_PATH): $(LOCAL_MODULE): $(my_cxx_stl) is not a supported STL.)
diff --git a/core/definitions.mk b/core/definitions.mk
index 7450d4f..a8bf4d5 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2279,7 +2279,7 @@
# Align STORED entries of a package on 4-byte boundaries to make them easier to mmap.
#
define align-package
-$(hide) if ! $(ZIPALIGN) -c $(ZIPALIGN_PAGE_ALIGN_FLAGS) 4 $@ >/dev/null ; then \
+$(hide) if ! $(ZIPALIGN) -c -p 4 $@ >/dev/null ; then \
mv $@ $@.unaligned; \
$(ZIPALIGN) \
-f \
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 69eaea1..cfe918f 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -4,7 +4,8 @@
DEX_PREOPT_DEFAULT ?= true
# The default filter for which files go into the system_other image (if it is
-# being used). To bundle everything one should set this to '%'
+# being used). Note that each pattern p here matches both '/<p>' and /system/<p>'.
+# To bundle everything one should set this to '%'.
SYSTEM_OTHER_ODEX_FILTER ?= \
app/% \
priv-app/% \
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 5a615ac..0c58cd6 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -228,6 +228,7 @@
TARGET_COPY_OUT_OEM := oem
TARGET_COPY_OUT_RAMDISK := ramdisk
TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk
+TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk
TARGET_COPY_OUT_ROOT := root
TARGET_COPY_OUT_RECOVERY := recovery
# The directory used for optional partitions depend on the BoardConfig, so
@@ -825,6 +826,7 @@
TARGET_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_RAMDISK)
TARGET_RAMDISK_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
TARGET_DEBUG_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DEBUG_RAMDISK)
+TARGET_TEST_HARNESS_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_TEST_HARNESS_RAMDISK)
TARGET_VENDOR_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR_RAMDISK)
diff --git a/core/main.mk b/core/main.mk
index d2cc47b..29c5a4c 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -193,17 +193,7 @@
# The pdk (Platform Development Kit) build
include build/make/core/pdk_config.mk
-#
# -----------------------------------------------------------------
-# Enable dynamic linker warnings for userdebug, eng and non-REL builds
-ifneq ($(TARGET_BUILD_VARIANT),user)
- ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
-else
-# Enable it for user builds as long as they are not final.
-ifneq ($(PLATFORM_VERSION_CODENAME),REL)
- ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
-endif
-endif
ADDITIONAL_BUILD_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
@@ -230,6 +220,9 @@
ADDITIONAL_BUILD_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
endif
+# Define ro.sanitize.<name> properties for all global sanitizers.
+ADDITIONAL_BUILD_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
+
# Sets the default value of ro.postinstall.fstab.prefix to /system.
# Device board config should override the value to /product when needed by:
#
@@ -1148,6 +1141,7 @@
libdt_socket.so \
libicui18n.so \
libicuuc.so \
+ libicu_jni.so \
libjavacore.so \
libjdwp.so \
libm.so \
@@ -1500,6 +1494,9 @@
.PHONY: ramdisk_debug
ramdisk_debug: $(INSTALLED_DEBUG_RAMDISK_TARGET)
+.PHONY: ramdisk_test_harness
+ramdisk_test_harness: $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+
.PHONY: userdataimage
userdataimage: $(INSTALLED_USERDATAIMAGE_TARGET)
@@ -1540,6 +1537,9 @@
.PHONY: bootimage_debug
bootimage_debug: $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
+.PHONY: bootimage_test_harness
+bootimage_test_harness: $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)
+
.PHONY: vbmetaimage
vbmetaimage: $(INSTALLED_VBMETAIMAGE_TARGET)
@@ -1707,6 +1707,10 @@
$(INSTALLED_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
)
+ $(call dist-for-goals, bootimage_test_harness, \
+ $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
+ $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
+ )
endif
ifeq ($(EMMA_INSTRUMENT),true)
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index 09eb419..9e3f0d3 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -136,7 +136,7 @@
ifndef LOCAL_IS_HOST_MODULE
ifdef LOCAL_SOONG_UNSTRIPPED_BINARY
- ifneq ($(LOCAL_VNDK_DEPEND_ON_CORE_VARIANT),true)
+ ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
my_symbol_path := $(if $(LOCAL_SOONG_SYMBOL_PATH),$(LOCAL_SOONG_SYMBOL_PATH),$(my_module_path))
# Store a copy with symbols for symbolic debugging
my_unstripped_path := $(TARGET_OUT_UNSTRIPPED)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_symbol_path))
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 30890c0..ff5fb42 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -250,7 +250,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2019-09-05
+ PLATFORM_SECURITY_PATCH := 2019-10-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/envsetup.sh b/envsetup.sh
index f91b820..f0c6b9b 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -524,7 +524,7 @@
export TARGET_BUILD_VARIANT=$default_value
elif (echo -n $ANSWER | grep -q -e "^[0-9][0-9]*$") ; then
if [ "$ANSWER" -le "${#VARIANT_CHOICES[@]}" ] ; then
- export TARGET_BUILD_VARIANT=${VARIANT_CHOICES[$(($ANSWER-1))]}
+ export TARGET_BUILD_VARIANT=${VARIANT_CHOICES[@]:$(($ANSWER-1)):1}
fi
else
if check_variant $ANSWER
@@ -580,7 +580,7 @@
local i=1
local choice
- for choice in $choices
+ for choice in $(echo $choices)
do
echo " $i. $choice"
i=$(($i+1))
@@ -1295,10 +1295,10 @@
echo "Invalid choice"
continue
fi
- pathname=${lines[$(($choice-1))]}
+ pathname=${lines[@]:$(($choice-1)):1}
done
else
- pathname=${lines[0]}
+ pathname=${lines[@]:0:1}
fi
\cd $T/$pathname
}
diff --git a/target/board/mainline_arm64/BoardConfig.mk b/target/board/mainline_arm64/BoardConfig.mk
index 70505f4..7cb2609 100644
--- a/target/board/mainline_arm64/BoardConfig.mk
+++ b/target/board/mainline_arm64/BoardConfig.mk
@@ -32,5 +32,11 @@
AB_OTA_UPDATER := true
AB_OTA_PARTITIONS := system
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+
+# Mainline devices support apex
+# TODO: move this to BoardConfigMainlineCommon. Currently, GSI wants flattened
+# apexes, but emulator wants .apex files, preventing this.
+TARGET_FLATTEN_APEX := false
diff --git a/target/board/mainline_x86/BoardConfig.mk b/target/board/mainline_x86/BoardConfig.mk
new file mode 100644
index 0000000..a20d17c
--- /dev/null
+++ b/target/board/mainline_x86/BoardConfig.mk
@@ -0,0 +1,35 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+TARGET_ARCH := x86
+TARGET_ARCH_VARIANT := x86
+TARGET_CPU_ABI := x86
+
+include build/make/target/board/BoardConfigMainlineCommon.mk
+
+TARGET_NO_KERNEL := true
+
+# Build generic A/B format system-only OTA.
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
+
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+
+# Mainline devices support apex
+# TODO: move this to product makefile and use updatable_apex.mk
+TARGET_FLATTEN_APEX := false
diff --git a/target/board/mainline_x86_arm/BoardConfig.mk b/target/board/mainline_x86_arm/BoardConfig.mk
new file mode 100644
index 0000000..6b282c2
--- /dev/null
+++ b/target/board/mainline_x86_arm/BoardConfig.mk
@@ -0,0 +1,40 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+TARGET_ARCH := x86
+TARGET_ARCH_VARIANT := x86
+TARGET_CPU_ABI := x86
+
+TARGET_NATIVE_BRIDGE_ARCH := arm
+TARGET_NATIVE_BRIDGE_ARCH_VARIANT := armv7-a-neon
+TARGET_NATIVE_BRIDGE_CPU_VARIANT := generic
+TARGET_NATIVE_BRIDGE_ABI := armeabi-v7a armeabi
+
+include build/make/target/board/BoardConfigMainlineCommon.mk
+
+TARGET_NO_KERNEL := true
+
+# Build generic A/B format system-only OTA.
+AB_OTA_UPDATER := true
+AB_OTA_PARTITIONS := system
+
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
+
+# Mainline devices support apex
+# TODO: move this to product makefile and use updatable_apex.mk
+TARGET_FLATTEN_APEX := false
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 174916d..ca4ed2c 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -55,6 +55,8 @@
$(LOCAL_DIR)/gsi_arm64.mk \
$(LOCAL_DIR)/mainline_arm64.mk \
$(LOCAL_DIR)/mainline_system_arm64.mk \
+ $(LOCAL_DIR)/mainline_system_x86.mk \
+ $(LOCAL_DIR)/mainline_system_x86_arm.mk \
$(LOCAL_DIR)/sdk_arm64.mk \
$(LOCAL_DIR)/sdk.mk \
$(LOCAL_DIR)/sdk_phone_arm64.mk \
diff --git a/target/product/aosp_product.mk b/target/product/aosp_product.mk
index 8c87983..aefad82 100644
--- a/target/product/aosp_product.mk
+++ b/target/product/aosp_product.mk
@@ -21,21 +21,6 @@
# Default AOSP sounds
$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
-# TODO(b/133643923): Clean up the mainline whitelist
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
- system/app/messaging/messaging.apk \
- system/app/messaging/oat/% \
- system/app/WAPPushManager/WAPPushManager.apk \
- system/app/WAPPushManager/oat/% \
- system/bin/healthd \
- system/etc/init/healthd.rc \
- system/etc/vintf/manifest/manifest_healthd.xml \
- system/lib/libframesequence.so \
- system/lib/libgiftranscode.so \
- system/lib64/libframesequence.so \
- system/lib64/libgiftranscode.so \
-
-
# Additional settings used in all AOSP builds
PRODUCT_PRODUCT_PROPERTIES += \
ro.config.ringtone=Ring_Synth_04.ogg \
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 749d2c2..2ed550c 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -17,7 +17,6 @@
# Base modules and settings for the product partition.
PRODUCT_PACKAGES += \
group_product \
- healthd \
ModuleMetadata \
passwd_product \
product_compatibility_matrix.xml \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 4e33d23..7cc3270 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -29,7 +29,6 @@
android.test.mock \
android.test.runner \
apexd \
- applypatch \
appops \
app_process \
appwidget \
@@ -49,6 +48,7 @@
cgroups.json \
charger \
cmd \
+ com.android.apex.cts.shim.v1_prebuilt \
com.android.conscrypt \
com.android.i18n \
com.android.location.provider \
@@ -174,8 +174,6 @@
libspeexresampler \
libsqlite \
libstagefright \
- libstagefright_amrnb_common \
- libstagefright_enc_common \
libstagefright_foundation \
libstagefright_omx \
libstdc++ \
@@ -366,6 +364,7 @@
logpersist.start \
logtagd.rc \
procrank \
+ remount \
showmap \
sqlite3 \
ss \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 9d79e0f..1657e71 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -75,3 +75,8 @@
# VINTF data for vendor image
PRODUCT_PACKAGES += \
device_compatibility_matrix.xml \
+
+# Packages to update the recovery partition, which will be installed on
+# /vendor. TODO(b/141648565): Don't install these unless they're needed.
+PRODUCT_PACKAGES += \
+ applypatch
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 7683389..8fed53c 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -42,8 +42,6 @@
VNDK-SP: libhardware.so
VNDK-SP: libhidlbase.so
VNDK-SP: libhidlmemory.so
-VNDK-SP: libhidltransport.so
-VNDK-SP: libhwbinder.so
VNDK-SP: libion.so
VNDK-SP: libjsoncpp.so
VNDK-SP: liblzma.so
@@ -79,6 +77,7 @@
VNDK-core: android.hardware.bluetooth.audio@2.0.so
VNDK-core: android.hardware.bluetooth@1.0.so
VNDK-core: android.hardware.boot@1.0.so
+VNDK-core: android.hardware.boot@1.1.so
VNDK-core: android.hardware.broadcastradio@1.0.so
VNDK-core: android.hardware.broadcastradio@1.1.so
VNDK-core: android.hardware.broadcastradio@2.0.so
@@ -138,6 +137,7 @@
VNDK-core: android.hardware.neuralnetworks@1.0.so
VNDK-core: android.hardware.neuralnetworks@1.1.so
VNDK-core: android.hardware.neuralnetworks@1.2.so
+VNDK-core: android.hardware.neuralnetworks@1.3.so
VNDK-core: android.hardware.nfc@1.0.so
VNDK-core: android.hardware.nfc@1.1.so
VNDK-core: android.hardware.nfc@1.2.so
@@ -181,6 +181,7 @@
VNDK-core: android.hardware.vibrator@1.1.so
VNDK-core: android.hardware.vibrator@1.2.so
VNDK-core: android.hardware.vibrator@1.3.so
+VNDK-core: android.hardware.vibrator@1.4.so
VNDK-core: android.hardware.vr@1.0.so
VNDK-core: android.hardware.weaver@1.0.so
VNDK-core: android.hardware.wifi.hostapd@1.0.so
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index 43bc45f..cd6a0f7 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -64,8 +64,8 @@
# For ringtones that rely on forward lock encryption
PRODUCT_PACKAGES += libfwdlockengine
-# System libraries commonly depended on by things on the product partition.
-# This list will be pruned periodically.
+# System libraries commonly depended on by things on the system_ext or product partitions.
+# These lists will be pruned periodically.
PRODUCT_PACKAGES += \
android.hardware.biometrics.fingerprint@2.1 \
android.hardware.radio@1.0 \
@@ -78,6 +78,7 @@
android.hardware.secure_element@1.0 \
android.hardware.wifi@1.0 \
libaudio-resampler \
+ libaudiohal \
libdrm \
liblogwrap \
liblz4 \
@@ -85,6 +86,13 @@
libnl \
libprotobuf-cpp-full \
+# These libraries are empty and have been combined into libhidlbase, but are still depended
+# on by things off /system.
+# TODO(b/135686713): remove these
+PRODUCT_PACKAGES += \
+ libhidltransport \
+ libhwbinder \
+
# Camera service uses 'libdepthphoto' for adding dynamic depth
# metadata inside depth jpegs.
PRODUCT_PACKAGES += \
@@ -105,8 +113,7 @@
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
-PRODUCT_PACKAGES += \
- com.android.apex.cts.shim.v1_prebuilt
+PRODUCT_ENFORCE_RRO_TARGETS := *
PRODUCT_NAME := mainline_system
PRODUCT_BRAND := generic
diff --git a/target/product/mainline_system_x86.mk b/target/product/mainline_system_x86.mk
new file mode 100644
index 0000000..ac33068
--- /dev/null
+++ b/target/product/mainline_system_x86.mk
@@ -0,0 +1,40 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call enforce-product-packages-exist,)
+
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_NAME := mainline_system_x86
+PRODUCT_DEVICE := mainline_x86
+PRODUCT_BRAND := generic
+PRODUCT_SHIPPING_API_LEVEL := 28
+PRODUCT_RESTRICT_VENDOR_FILES := all
diff --git a/target/product/mainline_system_x86_arm.mk b/target/product/mainline_system_x86_arm.mk
new file mode 100644
index 0000000..0ed86cc
--- /dev/null
+++ b/target/product/mainline_system_x86_arm.mk
@@ -0,0 +1,40 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call enforce-product-packages-exist,)
+
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_NAME := mainline_system_x86_arm
+PRODUCT_DEVICE := mainline_x86_arm
+PRODUCT_BRAND := generic
+PRODUCT_SHIPPING_API_LEVEL := 28
+PRODUCT_RESTRICT_VENDOR_FILES := all
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 3631cfd..d6a8b53 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -80,30 +80,3 @@
$(extra_recovery_keys)
$(SOONG_ZIP) -o $@ -j \
$(foreach key_file, $(PRIVATE_CERT) $(PRIVATE_EXTRA_RECOVERY_KEYS), -f $(key_file))
-
-
-#######################################
-# update_engine_payload_key, used by update_engine. We use the same key as otacerts but in RSA
-# public key format.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := update_engine_payload_key
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_STEM := update-payload-key.pub.pem
-LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/update_engine
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
- openssl x509 -pubkey -noout -in $< > $@
-
-
-#######################################
-# update_engine_payload_key for recovery image, used by update_engine.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := update_engine_payload_key.recovery
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_STEM := update-payload-key.pub.pem
-LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
- openssl x509 -pubkey -noout -in $< > $@
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
index a9f4baf..bdaf545 100644
--- a/target/product/updatable_apex.mk
+++ b/target/product/updatable_apex.mk
@@ -16,5 +16,7 @@
# Inherit this when the target needs to support updating APEXes
-PRODUCT_PROPERTY_OVERRIDES := ro.apex.updatable=true
-TARGET_FLATTEN_APEX := false
+ifneq ($(OVERRIDE_TARGET_FLATTEN_APEX),true)
+ PRODUCT_PROPERTY_OVERRIDES := ro.apex.updatable=true
+ TARGET_FLATTEN_APEX := false
+endif
diff --git a/tools/releasetools/OWNERS b/tools/releasetools/OWNERS
index 766adb4..a8295d4 100644
--- a/tools/releasetools/OWNERS
+++ b/tools/releasetools/OWNERS
@@ -1,2 +1,3 @@
-tbao@google.com
+nhdo@google.com
xunchang@google.com
+zhaojiac@google.com
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index a5816bc..8249915 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -165,9 +165,12 @@
else:
common.ZipWrite(output_zip, output_file, arc_name)
- if (OPTIONS.rebuild_recovery and recovery_img is not None and
- boot_img is not None):
- logger.info("Building new recovery patch")
+ board_uses_vendorimage = OPTIONS.info_dict.get(
+ "board_uses_vendorimage") == "true"
+
+ if (OPTIONS.rebuild_recovery and not board_uses_vendorimage and
+ recovery_img is not None and boot_img is not None):
+ logger.info("Building new recovery patch on system at system/vendor")
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
boot_img, info_dict=OPTIONS.info_dict)
@@ -190,7 +193,7 @@
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system_other", img)
-def AddVendor(output_zip):
+def AddVendor(output_zip, recovery_img=None, boot_img=None):
"""Turn the contents of VENDOR into a vendor image and store in it
output_zip."""
@@ -199,6 +202,27 @@
logger.info("vendor.img already exists; no need to rebuild...")
return img.name
+ def output_sink(fn, data):
+ ofile = open(os.path.join(OPTIONS.input_tmp, "VENDOR", fn), "w")
+ ofile.write(data)
+ ofile.close()
+
+ if output_zip:
+ arc_name = "VENDOR/" + fn
+ if arc_name in output_zip.namelist():
+ OPTIONS.replace_updated_files_list.append(arc_name)
+ else:
+ common.ZipWrite(output_zip, ofile.name, arc_name)
+
+ board_uses_vendorimage = OPTIONS.info_dict.get(
+ "board_uses_vendorimage") == "true"
+
+ if (OPTIONS.rebuild_recovery and board_uses_vendorimage and
+ recovery_img is not None and boot_img is not None):
+ logger.info("Building new recovery patch on vendor")
+ common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
+ boot_img, info_dict=OPTIONS.info_dict)
+
block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "vendor", img,
block_list=block_list)
@@ -293,11 +317,6 @@
logger.info("creating %s.img...", what)
image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
- fstab = info_dict["fstab"]
- mount_point = "/" + what
- if fstab and mount_point in fstab:
- image_props["fs_type"] = fstab[mount_point].fs_type
-
image_props["timestamp"] = FIXED_FILE_TIMESTAMP
if what == "system":
@@ -318,13 +337,8 @@
# Use repeatable ext4 FS UUID and hash_seed UUID (based on partition name and
# build fingerprint).
- uuid_seed = what + "-"
- if "build.prop" in info_dict:
- build_prop = info_dict["build.prop"]
- if "ro.build.fingerprint" in build_prop:
- uuid_seed += build_prop["ro.build.fingerprint"]
- elif "ro.build.thumbprint" in build_prop:
- uuid_seed += build_prop["ro.build.thumbprint"]
+ build_info = common.BuildInfo(info_dict)
+ uuid_seed = what + "-" + build_info.fingerprint
image_props["uuid"] = str(uuid.uuid5(uuid.NAMESPACE_URL, uuid_seed))
hash_seed = "hash_seed-" + uuid_seed
image_props["hash_seed"] = str(uuid.uuid5(uuid.NAMESPACE_URL, hash_seed))
@@ -382,9 +396,6 @@
else:
user_dir = common.MakeTempDir()
- fstab = OPTIONS.info_dict["fstab"]
- if fstab:
- image_props["fs_type"] = fstab["/data"].fs_type
build_image.BuildImage(user_dir, image_props, img.name)
common.CheckSize(img.name, "userdata.img", OPTIONS.info_dict)
@@ -471,10 +482,6 @@
image_props["timestamp"] = FIXED_FILE_TIMESTAMP
user_dir = common.MakeTempDir()
-
- fstab = OPTIONS.info_dict["fstab"]
- if fstab:
- image_props["fs_type"] = fstab["/cache"].fs_type
build_image.BuildImage(user_dir, image_props, img.name)
common.CheckSize(img.name, "cache.img", OPTIONS.info_dict)
@@ -716,7 +723,7 @@
# A map between partition names and their paths, which could be used when
# generating AVB vbmeta image.
- partitions = dict()
+ partitions = {}
def banner(s):
logger.info("\n\n++++ %s ++++\n\n", s)
@@ -781,7 +788,8 @@
if has_vendor:
banner("vendor")
- partitions['vendor'] = AddVendor(output_zip)
+ partitions['vendor'] = AddVendor(
+ output_zip, recovery_img=recovery_image, boot_img=boot_image)
if has_product:
banner("product")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 974d4b0..031db1d 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -47,22 +47,23 @@
class Options(object):
+
def __init__(self):
- base_out_path = os.getenv('OUT_DIR_COMMON_BASE')
- if base_out_path is None:
- base_search_path = "out"
- else:
- base_search_path = os.path.join(base_out_path,
- os.path.basename(os.getcwd()))
+ # Set up search path, in order to find framework/ and lib64/. At the time of
+ # running this function, user-supplied search path (`--path`) hasn't been
+ # available. So the value set here is the default, which might be overridden
+ # by commandline flag later.
+ exec_path = sys.argv[0]
+ if exec_path.endswith('.py'):
+ script_name = os.path.basename(exec_path)
+ # logger hasn't been initialized yet at this point. Use print to output
+ # warnings.
+ print(
+ 'Warning: releasetools script should be invoked as hermetic Python '
+ 'executable -- build and run `{}` directly.'.format(script_name[:-3]),
+ file=sys.stderr)
+ self.search_path = os.path.realpath(os.path.join(exec_path, '..'))
- # Python >= 3.3 returns 'linux', whereas Python 2.7 gives 'linux2'.
- platform_search_path = {
- "linux": os.path.join(base_search_path, "host/linux-x86"),
- "linux2": os.path.join(base_search_path, "host/linux-x86"),
- "darwin": os.path.join(base_search_path, "host/darwin-x86"),
- }
-
- self.search_path = platform_search_path.get(sys.platform)
self.signapk_path = "framework/signapk.jar" # Relative to search_path
self.signapk_shared_library_path = "lib64" # Relative to search_path
self.extra_signapk_args = []
@@ -284,6 +285,225 @@
pass
+class BuildInfo(object):
+ """A class that holds the information for a given build.
+
+ This class wraps up the property querying for a given source or target build.
+ It abstracts away the logic of handling OEM-specific properties, and caches
+ the commonly used properties such as fingerprint.
+
+ There are two types of info dicts: a) build-time info dict, which is generated
+ at build time (i.e. included in a target_files zip); b) OEM info dict that is
+ specified at package generation time (via command line argument
+ '--oem_settings'). If a build doesn't use OEM-specific properties (i.e. not
+ having "oem_fingerprint_properties" in build-time info dict), all the queries
+ would be answered based on build-time info dict only. Otherwise if using
+ OEM-specific properties, some of them will be calculated from two info dicts.
+
+ Users can query properties similarly as using a dict() (e.g. info['fstab']),
+ or to query build properties via GetBuildProp() or GetVendorBuildProp().
+
+ Attributes:
+ info_dict: The build-time info dict.
+ is_ab: Whether it's a build that uses A/B OTA.
+ oem_dicts: A list of OEM dicts.
+ oem_props: A list of OEM properties that should be read from OEM dicts; None
+ if the build doesn't use any OEM-specific property.
+ fingerprint: The fingerprint of the build, which would be calculated based
+ on OEM properties if applicable.
+ device: The device name, which could come from OEM dicts if applicable.
+ """
+
+ _RO_PRODUCT_RESOLVE_PROPS = ["ro.product.brand", "ro.product.device",
+ "ro.product.manufacturer", "ro.product.model",
+ "ro.product.name"]
+ _RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER = ["product", "odm", "vendor",
+ "system_ext", "system"]
+
+ def __init__(self, info_dict, oem_dicts=None):
+ """Initializes a BuildInfo instance with the given dicts.
+
+ Note that it only wraps up the given dicts, without making copies.
+
+ Arguments:
+ info_dict: The build-time info dict.
+ oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
+ that it always uses the first dict to calculate the fingerprint or the
+ device name. The rest would be used for asserting OEM properties only
+ (e.g. one package can be installed on one of these devices).
+
+ Raises:
+ ValueError: On invalid inputs.
+ """
+ self.info_dict = info_dict
+ self.oem_dicts = oem_dicts
+
+ self._is_ab = info_dict.get("ab_update") == "true"
+ self._oem_props = info_dict.get("oem_fingerprint_properties")
+
+ if self._oem_props:
+ assert oem_dicts, "OEM source required for this build"
+
+ # These two should be computed only after setting self._oem_props.
+ self._device = self.GetOemProperty("ro.product.device")
+ self._fingerprint = self.CalculateFingerprint()
+
+ # Sanity check the build fingerprint.
+ if (' ' in self._fingerprint or
+ any(ord(ch) > 127 for ch in self._fingerprint)):
+ raise ValueError(
+ 'Invalid build fingerprint: "{}". See the requirement in Android CDD '
+ '3.2.2. Build Parameters.'.format(self._fingerprint))
+
+ @property
+ def is_ab(self):
+ return self._is_ab
+
+ @property
+ def device(self):
+ return self._device
+
+ @property
+ def fingerprint(self):
+ return self._fingerprint
+
+ @property
+ def vendor_fingerprint(self):
+ return self._fingerprint_of("vendor")
+
+ @property
+ def product_fingerprint(self):
+ return self._fingerprint_of("product")
+
+ @property
+ def odm_fingerprint(self):
+ return self._fingerprint_of("odm")
+
+ def _fingerprint_of(self, partition):
+ if partition + ".build.prop" not in self.info_dict:
+ return None
+ build_prop = self.info_dict[partition + ".build.prop"]
+ if "ro." + partition + ".build.fingerprint" in build_prop:
+ return build_prop["ro." + partition + ".build.fingerprint"]
+ if "ro." + partition + ".build.thumbprint" in build_prop:
+ return build_prop["ro." + partition + ".build.thumbprint"]
+ return None
+
+ @property
+ def oem_props(self):
+ return self._oem_props
+
+ def __getitem__(self, key):
+ return self.info_dict[key]
+
+ def __setitem__(self, key, value):
+ self.info_dict[key] = value
+
+ def get(self, key, default=None):
+ return self.info_dict.get(key, default)
+
+ def items(self):
+ return self.info_dict.items()
+
+ def GetBuildProp(self, prop):
+ """Returns the inquired build property."""
+ if prop in BuildInfo._RO_PRODUCT_RESOLVE_PROPS:
+ return self._ResolveRoProductBuildProp(prop)
+
+ try:
+ return self.info_dict.get("build.prop", {})[prop]
+ except KeyError:
+ raise ExternalError("couldn't find %s in build.prop" % (prop,))
+
+ def _ResolveRoProductBuildProp(self, prop):
+ """Resolves the inquired ro.product.* build property"""
+ prop_val = self.info_dict.get("build.prop", {}).get(prop)
+ if prop_val:
+ return prop_val
+
+ source_order_val = self.info_dict.get("build.prop", {}).get(
+ "ro.product.property_source_order")
+ if source_order_val:
+ source_order = source_order_val.split(",")
+ else:
+ source_order = BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
+
+ # Check that all sources in ro.product.property_source_order are valid
+ if any([x not in BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
+ for x in source_order]):
+ raise ExternalError(
+ "Invalid ro.product.property_source_order '{}'".format(source_order))
+
+ for source in source_order:
+ source_prop = prop.replace(
+ "ro.product", "ro.product.{}".format(source), 1)
+ prop_val = self.info_dict.get(
+ "{}.build.prop".format(source), {}).get(source_prop)
+ if prop_val:
+ return prop_val
+
+ raise ExternalError("couldn't resolve {}".format(prop))
+
+ def GetVendorBuildProp(self, prop):
+ """Returns the inquired vendor build property."""
+ try:
+ return self.info_dict.get("vendor.build.prop", {})[prop]
+ except KeyError:
+ raise ExternalError(
+ "couldn't find %s in vendor.build.prop" % (prop,))
+
+ def GetOemProperty(self, key):
+ if self.oem_props is not None and key in self.oem_props:
+ return self.oem_dicts[0][key]
+ return self.GetBuildProp(key)
+
+ def CalculateFingerprint(self):
+ if self.oem_props is None:
+ try:
+ return self.GetBuildProp("ro.build.fingerprint")
+ except ExternalError:
+ return "{}/{}/{}:{}/{}/{}:{}/{}".format(
+ self.GetBuildProp("ro.product.brand"),
+ self.GetBuildProp("ro.product.name"),
+ self.GetBuildProp("ro.product.device"),
+ self.GetBuildProp("ro.build.version.release"),
+ self.GetBuildProp("ro.build.id"),
+ self.GetBuildProp("ro.build.version.incremental"),
+ self.GetBuildProp("ro.build.type"),
+ self.GetBuildProp("ro.build.tags"))
+ return "%s/%s/%s:%s" % (
+ self.GetOemProperty("ro.product.brand"),
+ self.GetOemProperty("ro.product.name"),
+ self.GetOemProperty("ro.product.device"),
+ self.GetBuildProp("ro.build.thumbprint"))
+
+ def WriteMountOemScript(self, script):
+ assert self.oem_props is not None
+ recovery_mount_options = self.info_dict.get("recovery_mount_options")
+ script.Mount("/oem", recovery_mount_options)
+
+ def WriteDeviceAssertions(self, script, oem_no_mount):
+ # Read the property directly if not using OEM properties.
+ if not self.oem_props:
+ script.AssertDevice(self.device)
+ return
+
+ # Otherwise assert OEM properties.
+ if not self.oem_dicts:
+ raise ExternalError(
+ "No OEM file provided to answer expected assertions")
+
+ for prop in self.oem_props.split():
+ values = []
+ for oem_dict in self.oem_dicts:
+ if prop in oem_dict:
+ values.append(oem_dict[prop])
+ if not values:
+ raise ExternalError(
+ "The OEM file is missing the property %s" % (prop,))
+ script.AssertOemProperty(prop, values, oem_no_mount)
+
+
def LoadInfoDict(input_file, repacking=False):
"""Loads the key/value pairs from the given input target_files.
@@ -393,37 +613,8 @@
makeint("boot_size")
makeint("fstab_version")
- # We changed recovery.fstab path in Q, from ../RAMDISK/etc/recovery.fstab to
- # ../RAMDISK/system/etc/recovery.fstab. LoadInfoDict() has to handle both
- # cases, since it may load the info_dict from an old build (e.g. when
- # generating incremental OTAs from that build).
- system_root_image = d.get("system_root_image") == "true"
- if d.get("no_recovery") != "true":
- recovery_fstab_path = "RECOVERY/RAMDISK/system/etc/recovery.fstab"
- if isinstance(input_file, zipfile.ZipFile):
- if recovery_fstab_path not in input_file.namelist():
- recovery_fstab_path = "RECOVERY/RAMDISK/etc/recovery.fstab"
- else:
- path = os.path.join(input_file, *recovery_fstab_path.split("/"))
- if not os.path.exists(path):
- recovery_fstab_path = "RECOVERY/RAMDISK/etc/recovery.fstab"
- d["fstab"] = LoadRecoveryFSTab(
- read_helper, d["fstab_version"], recovery_fstab_path, system_root_image)
-
- elif d.get("recovery_as_boot") == "true":
- recovery_fstab_path = "BOOT/RAMDISK/system/etc/recovery.fstab"
- if isinstance(input_file, zipfile.ZipFile):
- if recovery_fstab_path not in input_file.namelist():
- recovery_fstab_path = "BOOT/RAMDISK/etc/recovery.fstab"
- else:
- path = os.path.join(input_file, *recovery_fstab_path.split("/"))
- if not os.path.exists(path):
- recovery_fstab_path = "BOOT/RAMDISK/etc/recovery.fstab"
- d["fstab"] = LoadRecoveryFSTab(
- read_helper, d["fstab_version"], recovery_fstab_path, system_root_image)
-
- else:
- d["fstab"] = None
+ # Load recovery fstab if applicable.
+ d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
# Tries to load the build props for all partitions with care_map, including
# system and vendor.
@@ -438,18 +629,11 @@
read_helper, "{}/etc/build.prop".format(partition.upper()))
d["build.prop"] = d["system.build.prop"]
- # Set up the salt (based on fingerprint or thumbprint) that will be used when
- # adding AVB footer.
+ # Set up the salt (based on fingerprint) that will be used when adding AVB
+ # hash / hashtree footers.
if d.get("avb_enable") == "true":
- fp = None
- if "build.prop" in d:
- build_prop = d["build.prop"]
- if "ro.build.fingerprint" in build_prop:
- fp = build_prop["ro.build.fingerprint"]
- elif "ro.build.thumbprint" in build_prop:
- fp = build_prop["ro.build.thumbprint"]
- if fp:
- d["avb_salt"] = sha256(fp).hexdigest()
+ build_info = BuildInfo(d)
+ d["avb_salt"] = sha256(build_info.fingerprint).hexdigest()
return d
@@ -549,6 +733,47 @@
return d
+def _FindAndLoadRecoveryFstab(info_dict, input_file, read_helper):
+ """Finds the path to recovery fstab and loads its contents."""
+ # recovery fstab is only meaningful when installing an update via recovery
+ # (i.e. non-A/B OTA). Skip loading fstab if device used A/B OTA.
+ if info_dict.get('ab_update') == 'true':
+ return None
+
+ # We changed recovery.fstab path in Q, from ../RAMDISK/etc/recovery.fstab to
+ # ../RAMDISK/system/etc/recovery.fstab. This function has to handle both
+ # cases, since it may load the info_dict from an old build (e.g. when
+ # generating incremental OTAs from that build).
+ system_root_image = info_dict.get('system_root_image') == 'true'
+ if info_dict.get('no_recovery') != 'true':
+ recovery_fstab_path = 'RECOVERY/RAMDISK/system/etc/recovery.fstab'
+ if isinstance(input_file, zipfile.ZipFile):
+ if recovery_fstab_path not in input_file.namelist():
+ recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
+ else:
+ path = os.path.join(input_file, *recovery_fstab_path.split('/'))
+ if not os.path.exists(path):
+ recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
+ return LoadRecoveryFSTab(
+ read_helper, info_dict['fstab_version'], recovery_fstab_path,
+ system_root_image)
+
+ if info_dict.get('recovery_as_boot') == 'true':
+ recovery_fstab_path = 'BOOT/RAMDISK/system/etc/recovery.fstab'
+ if isinstance(input_file, zipfile.ZipFile):
+ if recovery_fstab_path not in input_file.namelist():
+ recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
+ else:
+ path = os.path.join(input_file, *recovery_fstab_path.split('/'))
+ if not os.path.exists(path):
+ recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
+ return LoadRecoveryFSTab(
+ read_helper, info_dict['fstab_version'], recovery_fstab_path,
+ system_root_image)
+
+ return None
+
+
def DumpInfoDict(d):
for k, v in sorted(d.items()):
logger.info("%-25s = (%s) %s", k, type(v).__name__, v)
@@ -629,7 +854,7 @@
cmd.extend(["--salt", avb_salt])
-def GetAvbPartitionArg(partition, image, info_dict = None):
+def GetAvbPartitionArg(partition, image, info_dict=None):
"""Returns the VBMeta arguments for partition.
It sets up the VBMeta argument by including the partition descriptor from the
@@ -649,12 +874,21 @@
# Check if chain partition is used.
key_path = info_dict.get("avb_" + partition + "_key_path")
- if key_path:
- chained_partition_arg = GetAvbChainedPartitionArg(partition, info_dict)
- return ["--chain_partition", chained_partition_arg]
- else:
+ if not key_path:
return ["--include_descriptors_from_image", image]
+ # For a non-A/B device, we don't chain /recovery nor include its descriptor
+ # into vbmeta.img. The recovery image will be configured on an independent
+ # boot chain, to be verified with AVB_SLOT_VERIFY_FLAGS_NO_VBMETA_PARTITION.
+ # See details at
+ # https://android.googlesource.com/platform/external/avb/+/master/README.md#booting-into-recovery.
+ if info_dict.get("ab_update") != "true" and partition == "recovery":
+ return []
+
+ # Otherwise chain the partition into vbmeta.
+ chained_partition_arg = GetAvbChainedPartitionArg(partition, info_dict)
+ return ["--chain_partition", chained_partition_arg]
+
def GetAvbChainedPartitionArg(partition, info_dict, key=None):
"""Constructs and returns the arg to build or verify a chained partition.
@@ -2535,13 +2769,25 @@
info_dict = OPTIONS.info_dict
full_recovery_image = info_dict.get("full_recovery_image") == "true"
+ board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
+
+ if board_uses_vendorimage:
+ # In this case, the output sink is rooted at VENDOR
+ recovery_img_path = "etc/recovery.img"
+ recovery_resource_dat_path = "VENDOR/etc/recovery-resource.dat"
+ sh_dir = "bin"
+ else:
+ # In this case the output sink is rooted at SYSTEM
+ recovery_img_path = "vendor/etc/recovery.img"
+ recovery_resource_dat_path = "SYSTEM/vendor/etc/recovery-resource.dat"
+ sh_dir = "vendor/bin"
if full_recovery_image:
- output_sink("etc/recovery.img", recovery_img.data)
+ output_sink(recovery_img_path, recovery_img.data)
else:
system_root_image = info_dict.get("system_root_image") == "true"
- path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
+ path = os.path.join(input_dir, recovery_resource_dat_path)
# With system-root-image, boot and recovery images will have mismatching
# entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
# to handle such a case.
@@ -2554,7 +2800,7 @@
if os.path.exists(path):
diff_program.append("-b")
diff_program.append(path)
- bonus_args = "--bonus /system/etc/recovery-resource.dat"
+ bonus_args = "--bonus /vendor/etc/recovery-resource.dat"
else:
bonus_args = ""
@@ -2571,10 +2817,16 @@
return
if full_recovery_image:
- sh = """#!/system/bin/sh
+
+ # Note that we use /vendor to refer to the recovery resources. This will
+ # work for a separate vendor partition mounted at /vendor or a
+ # /system/vendor subdirectory on the system partition, for which init will
+ # create a symlink from /vendor to /system/vendor.
+
+ sh = """#!/vendor/bin/sh
if ! applypatch --check %(type)s:%(device)s:%(size)d:%(sha1)s; then
applypatch \\
- --flash /system/etc/recovery.img \\
+ --flash /vendor/etc/recovery.img \\
--target %(type)s:%(device)s:%(size)d:%(sha1)s && \\
log -t recovery "Installing new recovery image: succeeded" || \\
log -t recovery "Installing new recovery image: failed"
@@ -2586,10 +2838,10 @@
'sha1': recovery_img.sha1,
'size': recovery_img.size}
else:
- sh = """#!/system/bin/sh
+ sh = """#!/vendor/bin/sh
if ! applypatch --check %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
applypatch %(bonus_args)s \\
- --patch /system/recovery-from-boot.p \\
+ --patch /vendor/recovery-from-boot.p \\
--source %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s \\
--target %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s && \\
log -t recovery "Installing new recovery image: succeeded" || \\
@@ -2607,9 +2859,9 @@
'recovery_device': recovery_device,
'bonus_args': bonus_args}
- # The install script location moved from /system/etc to /system/bin
- # in the L release.
- sh_location = "bin/install-recovery.sh"
+ # The install script location moved from /system/etc to /system/bin in the L
+ # release. In the R release it is in VENDOR/bin or SYSTEM/vendor/bin.
+ sh_location = os.path.join(sh_dir, "install-recovery.sh")
logger.info("putting script in %s", sh_location)
diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py
index 725b355..1497d69 100644
--- a/tools/releasetools/make_recovery_patch.py
+++ b/tools/releasetools/make_recovery_patch.py
@@ -47,8 +47,17 @@
if not recovery_img or not boot_img:
sys.exit(0)
+ board_uses_vendorimage = OPTIONS.info_dict.get(
+ "board_uses_vendorimage") == "true"
+
+ if board_uses_vendorimage:
+ target_files_dir = "VENDOR"
+ else:
+ target_files_dir = "SYSTEM"
+
def output_sink(fn, data):
- with open(os.path.join(output_dir, "SYSTEM", *fn.split("/")), "wb") as f:
+ with open(os.path.join(output_dir, target_files_dir,
+ *fn.split("/")), "wb") as f:
f.write(data)
common.MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index ba70986..544f996 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -68,8 +68,7 @@
files package and saves it at this path.
--rebuild_recovery
- Rebuild the recovery patch used by non-A/B devices and write it to the
- system image.
+ Deprecated; does nothing.
--keep-tmp
Keep tempoary files for debugging purposes.
@@ -106,6 +105,7 @@
OPTIONS.output_ota = None
OPTIONS.output_img = None
OPTIONS.output_super_empty = None
+# TODO(b/132730255): Remove this option.
OPTIONS.rebuild_recovery = False
OPTIONS.keep_tmp = False
@@ -372,32 +372,6 @@
write_sorted_data(data=output_ab_partitions, path=output_ab_partitions_txt)
-def append_recovery_to_filesystem_config(output_target_files_temp_dir):
- """Performs special processing for META/filesystem_config.txt.
-
- This function appends recovery information to META/filesystem_config.txt so
- that recovery patch regeneration will succeed.
-
- Args:
- output_target_files_temp_dir: The name of a directory that will be used to
- create the output target files package after all the special cases are
- processed. We find filesystem_config.txt here.
- """
-
- filesystem_config_txt = os.path.join(output_target_files_temp_dir, 'META',
- 'filesystem_config.txt')
-
- with open(filesystem_config_txt, 'a') as f:
- # TODO(bpeckham) this data is hard coded. It should be generated
- # programmatically.
- f.write('system/bin/install-recovery.sh 0 0 750 '
- 'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
- f.write('system/recovery-from-boot.p 0 0 644 '
- 'selabel=u:object_r:system_file:s0 capabilities=0x0\n')
- f.write('system/etc/recovery.img 0 0 440 '
- 'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
-
-
def process_misc_info_txt(framework_target_files_temp_dir,
vendor_target_files_temp_dir,
output_target_files_temp_dir,
@@ -594,7 +568,7 @@
def process_special_cases(framework_target_files_temp_dir,
vendor_target_files_temp_dir,
output_target_files_temp_dir,
- framework_misc_info_keys, rebuild_recovery):
+ framework_misc_info_keys):
"""Performs special-case processing for certain target files items.
Certain files in the output target files package require special-case
@@ -611,8 +585,6 @@
framework_misc_info_keys: A list of keys to obtain from the framework
instance of META/misc_info.txt. The remaining keys from the vendor
instance.
- rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
- devices and write it to the system image.
"""
if 'ab_update' in framework_misc_info_keys:
@@ -621,10 +593,6 @@
vendor_target_files_temp_dir=vendor_target_files_temp_dir,
output_target_files_temp_dir=output_target_files_temp_dir)
- if rebuild_recovery:
- append_recovery_to_filesystem_config(
- output_target_files_temp_dir=output_target_files_temp_dir)
-
copy_file_contexts(
framework_target_files_dir=framework_target_files_temp_dir,
vendor_target_files_dir=vendor_target_files_temp_dir,
@@ -757,8 +725,7 @@
framework_target_files_temp_dir=framework_target_files_temp_dir,
vendor_target_files_temp_dir=vendor_target_files_temp_dir,
output_target_files_temp_dir=output_target_files_temp_dir,
- framework_misc_info_keys=framework_misc_info_keys,
- rebuild_recovery=rebuild_recovery)
+ framework_misc_info_keys=framework_misc_info_keys)
return output_target_files_temp_dir
@@ -779,6 +746,7 @@
add_img_args = ['--verbose']
add_img_args.append('--add_missing')
+ # TODO(b/132730255): Remove this if statement.
if rebuild_recovery:
add_img_args.append('--rebuild_recovery')
add_img_args.append(target_files_dir)
@@ -1016,7 +984,7 @@
OPTIONS.output_img = a
elif o == '--output-super-empty':
OPTIONS.output_super_empty = a
- elif o == '--rebuild_recovery':
+ elif o == '--rebuild_recovery': # TODO(b/132730255): Warn
OPTIONS.rebuild_recovery = True
elif o == '--keep-tmp':
OPTIONS.keep_tmp = True
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index de947f3..dfcfb49 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -171,8 +171,16 @@
--payload_signer_args <args>
Specify the arguments needed for payload signer.
+ --payload_signer_maximum_signature_size <signature_size>
+ The maximum signature size (in bytes) that would be generated by the given
+ payload signer. Only meaningful when custom payload signer is specified
+ via '--payload_signer'.
+ If the signer uses a RSA key, this should be the number of bytes to
+ represent the modulus. If it uses an EC key, this is the size of a
+ DER-encoded ECDSA signature.
+
--payload_signer_key_size <key_size>
- Specify the key size in bytes of the payload signer.
+ Deprecated. Use the '--payload_signer_maximum_signature_size' instead.
--skip_postinstall
Skip the postinstall hooks when generating an A/B OTA package (default:
@@ -231,7 +239,7 @@
OPTIONS.log_diff = None
OPTIONS.payload_signer = None
OPTIONS.payload_signer_args = []
-OPTIONS.payload_signer_key_size = None
+OPTIONS.payload_signer_maximum_signature_size = None
OPTIONS.extracted_input = None
OPTIONS.key_passwords = []
OPTIONS.skip_postinstall = False
@@ -258,225 +266,6 @@
'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor', 'vendor']
-class BuildInfo(object):
- """A class that holds the information for a given build.
-
- This class wraps up the property querying for a given source or target build.
- It abstracts away the logic of handling OEM-specific properties, and caches
- the commonly used properties such as fingerprint.
-
- There are two types of info dicts: a) build-time info dict, which is generated
- at build time (i.e. included in a target_files zip); b) OEM info dict that is
- specified at package generation time (via command line argument
- '--oem_settings'). If a build doesn't use OEM-specific properties (i.e. not
- having "oem_fingerprint_properties" in build-time info dict), all the queries
- would be answered based on build-time info dict only. Otherwise if using
- OEM-specific properties, some of them will be calculated from two info dicts.
-
- Users can query properties similarly as using a dict() (e.g. info['fstab']),
- or to query build properties via GetBuildProp() or GetVendorBuildProp().
-
- Attributes:
- info_dict: The build-time info dict.
- is_ab: Whether it's a build that uses A/B OTA.
- oem_dicts: A list of OEM dicts.
- oem_props: A list of OEM properties that should be read from OEM dicts; None
- if the build doesn't use any OEM-specific property.
- fingerprint: The fingerprint of the build, which would be calculated based
- on OEM properties if applicable.
- device: The device name, which could come from OEM dicts if applicable.
- """
-
- _RO_PRODUCT_RESOLVE_PROPS = ["ro.product.brand", "ro.product.device",
- "ro.product.manufacturer", "ro.product.model",
- "ro.product.name"]
- _RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER = ["product", "odm", "vendor",
- "system_ext", "system"]
-
- def __init__(self, info_dict, oem_dicts):
- """Initializes a BuildInfo instance with the given dicts.
-
- Note that it only wraps up the given dicts, without making copies.
-
- Arguments:
- info_dict: The build-time info dict.
- oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
- that it always uses the first dict to calculate the fingerprint or the
- device name. The rest would be used for asserting OEM properties only
- (e.g. one package can be installed on one of these devices).
-
- Raises:
- ValueError: On invalid inputs.
- """
- self.info_dict = info_dict
- self.oem_dicts = oem_dicts
-
- self._is_ab = info_dict.get("ab_update") == "true"
- self._oem_props = info_dict.get("oem_fingerprint_properties")
-
- if self._oem_props:
- assert oem_dicts, "OEM source required for this build"
-
- # These two should be computed only after setting self._oem_props.
- self._device = self.GetOemProperty("ro.product.device")
- self._fingerprint = self.CalculateFingerprint()
-
- # Sanity check the build fingerprint.
- if (' ' in self._fingerprint or
- any(ord(ch) > 127 for ch in self._fingerprint)):
- raise ValueError(
- 'Invalid build fingerprint: "{}". See the requirement in Android CDD '
- '3.2.2. Build Parameters.'.format(self._fingerprint))
-
- @property
- def is_ab(self):
- return self._is_ab
-
- @property
- def device(self):
- return self._device
-
- @property
- def fingerprint(self):
- return self._fingerprint
-
- @property
- def vendor_fingerprint(self):
- return self._fingerprint_of("vendor")
-
- @property
- def product_fingerprint(self):
- return self._fingerprint_of("product")
-
- @property
- def odm_fingerprint(self):
- return self._fingerprint_of("odm")
-
- def _fingerprint_of(self, partition):
- if partition + ".build.prop" not in self.info_dict:
- return None
- build_prop = self.info_dict[partition + ".build.prop"]
- if "ro." + partition + ".build.fingerprint" in build_prop:
- return build_prop["ro." + partition + ".build.fingerprint"]
- if "ro." + partition + ".build.thumbprint" in build_prop:
- return build_prop["ro." + partition + ".build.thumbprint"]
- return None
-
- @property
- def oem_props(self):
- return self._oem_props
-
- def __getitem__(self, key):
- return self.info_dict[key]
-
- def __setitem__(self, key, value):
- self.info_dict[key] = value
-
- def get(self, key, default=None):
- return self.info_dict.get(key, default)
-
- def items(self):
- return self.info_dict.items()
-
- def GetBuildProp(self, prop):
- """Returns the inquired build property."""
- if prop in BuildInfo._RO_PRODUCT_RESOLVE_PROPS:
- return self._ResolveRoProductBuildProp(prop)
-
- try:
- return self.info_dict.get("build.prop", {})[prop]
- except KeyError:
- raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
-
- def _ResolveRoProductBuildProp(self, prop):
- """Resolves the inquired ro.product.* build property"""
- prop_val = self.info_dict.get("build.prop", {}).get(prop)
- if prop_val:
- return prop_val
-
- source_order_val = self.info_dict.get("build.prop", {}).get(
- "ro.product.property_source_order")
- if source_order_val:
- source_order = source_order_val.split(",")
- else:
- source_order = BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
-
- # Check that all sources in ro.product.property_source_order are valid
- if any([x not in BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
- for x in source_order]):
- raise common.ExternalError(
- "Invalid ro.product.property_source_order '{}'".format(source_order))
-
- for source in source_order:
- source_prop = prop.replace(
- "ro.product", "ro.product.{}".format(source), 1)
- prop_val = self.info_dict.get(
- "{}.build.prop".format(source), {}).get(source_prop)
- if prop_val:
- return prop_val
-
- raise common.ExternalError("couldn't resolve {}".format(prop))
-
- def GetVendorBuildProp(self, prop):
- """Returns the inquired vendor build property."""
- try:
- return self.info_dict.get("vendor.build.prop", {})[prop]
- except KeyError:
- raise common.ExternalError(
- "couldn't find %s in vendor.build.prop" % (prop,))
-
- def GetOemProperty(self, key):
- if self.oem_props is not None and key in self.oem_props:
- return self.oem_dicts[0][key]
- return self.GetBuildProp(key)
-
- def CalculateFingerprint(self):
- if self.oem_props is None:
- try:
- return self.GetBuildProp("ro.build.fingerprint")
- except common.ExternalError:
- return "{}/{}/{}:{}/{}/{}:{}/{}".format(
- self.GetBuildProp("ro.product.brand"),
- self.GetBuildProp("ro.product.name"),
- self.GetBuildProp("ro.product.device"),
- self.GetBuildProp("ro.build.version.release"),
- self.GetBuildProp("ro.build.id"),
- self.GetBuildProp("ro.build.version.incremental"),
- self.GetBuildProp("ro.build.type"),
- self.GetBuildProp("ro.build.tags"))
- return "%s/%s/%s:%s" % (
- self.GetOemProperty("ro.product.brand"),
- self.GetOemProperty("ro.product.name"),
- self.GetOemProperty("ro.product.device"),
- self.GetBuildProp("ro.build.thumbprint"))
-
- def WriteMountOemScript(self, script):
- assert self.oem_props is not None
- recovery_mount_options = self.info_dict.get("recovery_mount_options")
- script.Mount("/oem", recovery_mount_options)
-
- def WriteDeviceAssertions(self, script, oem_no_mount):
- # Read the property directly if not using OEM properties.
- if not self.oem_props:
- script.AssertDevice(self.device)
- return
-
- # Otherwise assert OEM properties.
- if not self.oem_dicts:
- raise common.ExternalError(
- "No OEM file provided to answer expected assertions")
-
- for prop in self.oem_props.split():
- values = []
- for oem_dict in self.oem_dicts:
- if prop in oem_dict:
- values.append(oem_dict[prop])
- if not values:
- raise common.ExternalError(
- "The OEM file is missing the property %s" % (prop,))
- script.AssertOemProperty(prop, values, oem_no_mount)
-
-
class PayloadSigner(object):
"""A class that wraps the payload signing works.
@@ -507,35 +296,31 @@
self.signer = "openssl"
self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
"-pkeyopt", "digest:sha256"]
- self.key_size = self._GetKeySizeInBytes(signing_key)
+ self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
+ signing_key)
else:
self.signer = OPTIONS.payload_signer
self.signer_args = OPTIONS.payload_signer_args
- if OPTIONS.payload_signer_key_size:
- self.key_size = int(OPTIONS.payload_signer_key_size)
- assert self.key_size == 256 or self.key_size == 512, \
- "Unsupported key size {}".format(OPTIONS.payload_signer_key_size)
+ if OPTIONS.payload_signer_maximum_signature_size:
+ self.maximum_signature_size = int(
+ OPTIONS.payload_signer_maximum_signature_size)
else:
- self.key_size = 256
+ # The legacy config uses RSA2048 keys.
+ logger.warning("The maximum signature size for payload signer is not"
+ " set, default to 256 bytes.")
+ self.maximum_signature_size = 256
@staticmethod
- def _GetKeySizeInBytes(signing_key):
- modulus_file = common.MakeTempFile(prefix="modulus-")
- cmd = ["openssl", "rsa", "-inform", "PEM", "-in", signing_key, "-modulus",
- "-noout", "-out", modulus_file]
- common.RunAndCheckOutput(cmd, verbose=False)
-
- with open(modulus_file) as f:
- modulus_string = f.read()
- # The modulus string has the format "Modulus=$data", where $data is the
- # concatenation of hex dump of the modulus.
- MODULUS_PREFIX = "Modulus="
- assert modulus_string.startswith(MODULUS_PREFIX)
- modulus_string = modulus_string[len(MODULUS_PREFIX):]
- key_size = len(modulus_string) // 2
- assert key_size == 256 or key_size == 512, \
- "Unsupported key size {}".format(key_size)
- return key_size
+ def _GetMaximumSignatureSizeInBytes(signing_key):
+ out_signature_size_file = common.MakeTempFile("signature_size")
+ cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
+ out_signature_size_file), "--private_key={}".format(signing_key)]
+ common.RunAndCheckOutput(cmd)
+ with open(out_signature_size_file) as f:
+ signature_size = f.read().rstrip()
+ logger.info("% outputs the maximum signature size: %", cmd[0],
+ signature_size)
+ return int(signature_size)
def Sign(self, in_file):
"""Signs the given input file. Returns the output filename."""
@@ -615,7 +400,7 @@
metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
cmd = ["brillo_update_payload", "hash",
"--unsigned_payload", self.payload_file,
- "--signature_size", str(payload_signer.key_size),
+ "--signature_size", str(payload_signer.maximum_signature_size),
"--metadata_hash_file", metadata_sig_file,
"--payload_hash_file", payload_sig_file]
self._Run(cmd)
@@ -630,7 +415,7 @@
cmd = ["brillo_update_payload", "sign",
"--unsigned_payload", self.payload_file,
"--payload", signed_payload_file,
- "--signature_size", str(payload_signer.key_size),
+ "--signature_size", str(payload_signer.maximum_signature_size),
"--metadata_signature_file", signed_metadata_sig_file,
"--payload_signature_file", signed_payload_sig_file]
self._Run(cmd)
@@ -731,10 +516,19 @@
script.WriteRawImage("/boot", "recovery.img")
-def HasRecoveryPatch(target_files_zip):
+def HasRecoveryPatch(target_files_zip, info_dict):
+ board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
+
+ if board_uses_vendorimage:
+ target_files_dir = "VENDOR"
+ else:
+ target_files_dir = "SYSTEM/vendor"
+
+ patch = "%s/recovery-from-boot.p" % target_files_dir
+ img = "%s/etc/recovery.img" %target_files_dir
+
namelist = [name for name in target_files_zip.namelist()]
- return ("SYSTEM/recovery-from-boot.p" in namelist or
- "SYSTEM/etc/recovery.img" in namelist)
+ return (patch in namelist or img in namelist)
def HasPartition(target_files_zip, partition):
@@ -895,7 +689,7 @@
def WriteFullOTAPackage(input_zip, output_file):
- target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+ target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
# We don't know what version it will be installed on top of. We expect the API
# just won't change very often. Similarly for fstab, it might have changed in
@@ -925,7 +719,7 @@
metadata=metadata,
info_dict=OPTIONS.info_dict)
- assert HasRecoveryPatch(input_zip)
+ assert HasRecoveryPatch(input_zip, info_dict=OPTIONS.info_dict)
# Assertions (e.g. downgrade check, device properties check).
ts = target_info.GetBuildProp("ro.build.date.utc")
@@ -1121,8 +915,8 @@
Returns:
A dict to be written into package metadata entry.
"""
- assert isinstance(target_info, BuildInfo)
- assert source_info is None or isinstance(source_info, BuildInfo)
+ assert isinstance(target_info, common.BuildInfo)
+ assert source_info is None or isinstance(source_info, common.BuildInfo)
metadata = {
'post-build' : target_info.fingerprint,
@@ -1535,8 +1329,8 @@
def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
- target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
- source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+ target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+ source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
target_api_version = target_info["recovery_api_version"]
source_api_version = source_info["recovery_api_version"]
@@ -2015,10 +1809,10 @@
compression=zipfile.ZIP_DEFLATED)
if source_file is not None:
- target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
- source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+ target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+ source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
else:
- target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+ target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
source_info = None
# Metadata to comply with Android OTA package format.
@@ -2215,8 +2009,13 @@
OPTIONS.payload_signer = a
elif o == "--payload_signer_args":
OPTIONS.payload_signer_args = shlex.split(a)
+ elif o == "--payload_signer_maximum_signature_size":
+ OPTIONS.payload_signer_maximum_signature_size = a
elif o == "--payload_signer_key_size":
- OPTIONS.payload_signer_key_size = a
+ # TODO(Xunchang) remove this option after cleaning up the callers.
+ logger.warning("The option '--payload_signer_key_size' is deprecated."
+ " Use '--payload_signer_maximum_signature_size' instead.")
+ OPTIONS.payload_signer_maximum_signature_size = a
elif o == "--extracted_input_target_files":
OPTIONS.extracted_input = a
elif o == "--skip_postinstall":
@@ -2257,6 +2056,7 @@
"log_diff=",
"payload_signer=",
"payload_signer_args=",
+ "payload_signer_maximum_signature_size=",
"payload_signer_key_size=",
"extracted_input_target_files=",
"skip_postinstall",
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 710147b..0f4f1da 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -153,6 +153,20 @@
OPTIONS.avb_extra_args = {}
+AVB_FOOTER_ARGS_BY_PARTITION = {
+ 'boot' : 'avb_boot_add_hash_footer_args',
+ 'dtbo' : 'avb_dtbo_add_hash_footer_args',
+ 'recovery' : 'avb_recovery_add_hash_footer_args',
+ 'system' : 'avb_system_add_hashtree_footer_args',
+ 'system_other' : 'avb_system_other_add_hashtree_footer_args',
+ 'vendor' : 'avb_vendor_add_hashtree_footer_args',
+ 'vendor_boot' : 'avb_vendor_boot_add_hash_footer_args',
+ 'vbmeta' : 'avb_vbmeta_args',
+ 'vbmeta_system' : 'avb_vbmeta_system_args',
+ 'vbmeta_vendor' : 'avb_vbmeta_vendor_args',
+}
+
+
def GetApkCerts(certmap):
# apply the key remapping to the contents of the file
for apk, cert in certmap.items():
@@ -543,14 +557,13 @@
OPTIONS.rebuild_recovery = True
# Don't copy OTA certs if we're replacing them.
+ # Replacement of update-payload-key.pub.pem was removed in b/116660991.
elif (
OPTIONS.replace_ota_keys and
filename in (
"BOOT/RAMDISK/system/etc/security/otacerts.zip",
- "BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
"RECOVERY/RAMDISK/system/etc/security/otacerts.zip",
- "SYSTEM/etc/security/otacerts.zip",
- "SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
+ "SYSTEM/etc/security/otacerts.zip")):
pass
# Skip META/misc_info.txt since we will write back the new values later.
@@ -622,6 +635,10 @@
# Replace the AVB signing keys, if any.
ReplaceAvbSigningKeys(misc_info)
+ # Rewrite the props in AVB signing args.
+ if misc_info.get('avb_enable') == 'true':
+ RewriteAvbProps(misc_info)
+
# Write back misc_info with the latest values.
ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
@@ -814,24 +831,6 @@
# We DO NOT include the extra_recovery_keys (if any) here.
WriteOtacerts(output_tf_zip, "SYSTEM/etc/security/otacerts.zip", mapped_keys)
- # For A/B devices, update the payload verification key.
- if misc_info.get("ab_update") == "true":
- # Unlike otacerts.zip that may contain multiple keys, we can only specify
- # ONE payload verification key.
- if len(mapped_keys) > 1:
- print("\n WARNING: Found more than one OTA keys; Using the first one"
- " as payload verification key.\n\n")
-
- print("Using %s for payload verification." % (mapped_keys[0],))
- pubkey = common.ExtractPublicKey(mapped_keys[0])
- common.ZipWriteStr(
- output_tf_zip,
- "SYSTEM/etc/update_engine/update-payload-key.pub.pem",
- pubkey)
- common.ZipWriteStr(
- output_tf_zip,
- "BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
- pubkey)
def ReplaceVerityPublicKey(output_zip, filename, key_path):
@@ -910,18 +909,6 @@
def ReplaceAvbSigningKeys(misc_info):
"""Replaces the AVB signing keys."""
- AVB_FOOTER_ARGS_BY_PARTITION = {
- 'boot' : 'avb_boot_add_hash_footer_args',
- 'dtbo' : 'avb_dtbo_add_hash_footer_args',
- 'recovery' : 'avb_recovery_add_hash_footer_args',
- 'system' : 'avb_system_add_hashtree_footer_args',
- 'system_other' : 'avb_system_other_add_hashtree_footer_args',
- 'vendor' : 'avb_vendor_add_hashtree_footer_args',
- 'vbmeta' : 'avb_vbmeta_args',
- 'vbmeta_system' : 'avb_vbmeta_system_args',
- 'vbmeta_vendor' : 'avb_vbmeta_vendor_args',
- }
-
def ReplaceAvbPartitionSigningKey(partition):
key = OPTIONS.avb_keys.get(partition)
if not key:
@@ -946,6 +933,32 @@
ReplaceAvbPartitionSigningKey(partition)
+def RewriteAvbProps(misc_info):
+ """Rewrites the props in AVB signing args."""
+ for partition, args_key in AVB_FOOTER_ARGS_BY_PARTITION.items():
+ args = misc_info.get(args_key)
+ if not args:
+ continue
+
+ tokens = []
+ changed = False
+ for token in args.split(' '):
+ fingerprint_key = 'com.android.build.{}.fingerprint'.format(partition)
+ if not token.startswith(fingerprint_key):
+ tokens.append(token)
+ continue
+ prefix, tag = token.rsplit('/', 1)
+ tokens.append('{}/{}'.format(prefix, EditTags(tag)))
+ changed = True
+
+ if changed:
+ result = ' '.join(tokens)
+ print('Rewriting AVB prop for {}:\n'.format(partition))
+ print(' replace: {}'.format(args))
+ print(' with: {}'.format(result))
+ misc_info[args_key] = result
+
+
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
if s is None: # -d option
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 3367896..a249081 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -249,8 +249,9 @@
with open(fn) as f:
for line in f:
- fn, ranges = line.split(None, 1)
- ranges = rangelib.RangeSet.parse(ranges)
+ fn, ranges_text = line.rstrip().split(None, 1)
+ ranges = rangelib.RangeSet.parse(ranges_text)
+ ranges.extra['text_str'] = ranges_text
if allow_shared_blocks:
# Find the shared blocks that have been claimed by others. If so, tag
@@ -261,9 +262,6 @@
if not non_shared:
continue
- # There shouldn't anything in the extra dict yet.
- assert not ranges.extra, "Non-empty RangeSet.extra"
-
# Put the non-shared RangeSet as the value in the block map, which
# has a copy of the original RangeSet.
non_shared.extra['uses_shared_blocks'] = ranges
diff --git a/tools/releasetools/test_check_target_files_vintf.py b/tools/releasetools/test_check_target_files_vintf.py
index a1328c2..79f9018 100644
--- a/tools/releasetools/test_check_target_files_vintf.py
+++ b/tools/releasetools/test_check_target_files_vintf.py
@@ -78,6 +78,8 @@
for root, _, files in os.walk(test_delta_dir):
rel_root = os.path.relpath(root, test_delta_dir)
for f in files:
+ if not f.endswith('.xml'):
+ continue
output_file = os.path.join(test_dir, rel_root, f)
with open(os.path.join(root, f)) as inp:
write_string_to_file(inp.read(), output_file)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index ceb023f..8a52419 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -44,6 +44,210 @@
yield b'\0' * (step_size - block_size)
+class BuildInfoTest(test_utils.ReleaseToolsTestCase):
+
+ TEST_INFO_DICT = {
+ 'build.prop' : {
+ 'ro.product.device' : 'product-device',
+ 'ro.product.name' : 'product-name',
+ 'ro.build.fingerprint' : 'build-fingerprint',
+ 'ro.build.foo' : 'build-foo',
+ },
+ 'vendor.build.prop' : {
+ 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+ },
+ 'property1' : 'value1',
+ 'property2' : 4096,
+ }
+
+ TEST_INFO_DICT_USES_OEM_PROPS = {
+ 'build.prop' : {
+ 'ro.product.name' : 'product-name',
+ 'ro.build.thumbprint' : 'build-thumbprint',
+ 'ro.build.bar' : 'build-bar',
+ },
+ 'vendor.build.prop' : {
+ 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+ },
+ 'property1' : 'value1',
+ 'property2' : 4096,
+ 'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
+ }
+
+ TEST_OEM_DICTS = [
+ {
+ 'ro.product.brand' : 'brand1',
+ 'ro.product.device' : 'device1',
+ },
+ {
+ 'ro.product.brand' : 'brand2',
+ 'ro.product.device' : 'device2',
+ },
+ {
+ 'ro.product.brand' : 'brand3',
+ 'ro.product.device' : 'device3',
+ },
+ ]
+
+ def test_init(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('product-device', target_info.device)
+ self.assertEqual('build-fingerprint', target_info.fingerprint)
+ self.assertFalse(target_info.is_ab)
+ self.assertIsNone(target_info.oem_props)
+
+ def test_init_with_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('device1', target_info.device)
+ self.assertEqual('brand1/product-name/device1:build-thumbprint',
+ target_info.fingerprint)
+
+ # Swap the order in oem_dicts, which would lead to different BuildInfo.
+ oem_dicts = copy.copy(self.TEST_OEM_DICTS)
+ oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ oem_dicts)
+ self.assertEqual('device3', target_info.device)
+ self.assertEqual('brand3/product-name/device3:build-thumbprint',
+ target_info.fingerprint)
+
+ # Missing oem_dict should be rejected.
+ self.assertRaises(AssertionError, common.BuildInfo,
+ self.TEST_INFO_DICT_USES_OEM_PROPS, None)
+
+ def test_init_badFingerprint(self):
+ info_dict = copy.deepcopy(self.TEST_INFO_DICT)
+ info_dict['build.prop']['ro.build.fingerprint'] = 'bad fingerprint'
+ self.assertRaises(ValueError, common.BuildInfo, info_dict, None)
+
+ info_dict['build.prop']['ro.build.fingerprint'] = 'bad\x80fingerprint'
+ self.assertRaises(ValueError, common.BuildInfo, info_dict, None)
+
+ def test___getitem__(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('value1', target_info['property1'])
+ self.assertEqual(4096, target_info['property2'])
+ self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+
+ def test___getitem__with_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('value1', target_info['property1'])
+ self.assertEqual(4096, target_info['property2'])
+ self.assertRaises(KeyError,
+ lambda: target_info['build.prop']['ro.build.foo'])
+
+ def test___setitem__(self):
+ target_info = common.BuildInfo(copy.deepcopy(self.TEST_INFO_DICT), None)
+ self.assertEqual('value1', target_info['property1'])
+ target_info['property1'] = 'value2'
+ self.assertEqual('value2', target_info['property1'])
+
+ self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+ target_info['build.prop']['ro.build.foo'] = 'build-bar'
+ self.assertEqual('build-bar', target_info['build.prop']['ro.build.foo'])
+
+ def test_get(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('value1', target_info.get('property1'))
+ self.assertEqual(4096, target_info.get('property2'))
+ self.assertEqual(4096, target_info.get('property2', 1024))
+ self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+ self.assertEqual('build-foo', target_info.get('build.prop')['ro.build.foo'])
+
+ def test_get_with_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('value1', target_info.get('property1'))
+ self.assertEqual(4096, target_info.get('property2'))
+ self.assertEqual(4096, target_info.get('property2', 1024))
+ self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+ self.assertIsNone(target_info.get('build.prop').get('ro.build.foo'))
+ self.assertRaises(KeyError,
+ lambda: target_info.get('build.prop')['ro.build.foo'])
+
+ def test_items(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ items = target_info.items()
+ self.assertIn(('property1', 'value1'), items)
+ self.assertIn(('property2', 4096), items)
+
+ def test_GetBuildProp(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
+ self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_GetBuildProp_with_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
+ self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_GetVendorBuildProp(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('vendor-build-fingerprint',
+ target_info.GetVendorBuildProp(
+ 'ro.vendor.build.fingerprint'))
+ self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_GetVendorBuildProp_with_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('vendor-build-fingerprint',
+ target_info.GetVendorBuildProp(
+ 'ro.vendor.build.fingerprint'))
+ self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_vendor_fingerprint(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('vendor-build-fingerprint',
+ target_info.vendor_fingerprint)
+
+ def test_vendor_fingerprint_blacklisted(self):
+ target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+ del target_info_dict['vendor.build.prop']['ro.vendor.build.fingerprint']
+ target_info = common.BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
+ self.assertIsNone(target_info.vendor_fingerprint)
+
+ def test_vendor_fingerprint_without_vendor_build_prop(self):
+ target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+ del target_info_dict['vendor.build.prop']
+ target_info = common.BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
+ self.assertIsNone(target_info.vendor_fingerprint)
+
+ def test_WriteMountOemScript(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ script_writer = test_utils.MockScriptWriter()
+ target_info.WriteMountOemScript(script_writer)
+ self.assertEqual([('Mount', '/oem', None)], script_writer.lines)
+
+ def test_WriteDeviceAssertions(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
+ script_writer = test_utils.MockScriptWriter()
+ target_info.WriteDeviceAssertions(script_writer, False)
+ self.assertEqual([('AssertDevice', 'product-device')], script_writer.lines)
+
+ def test_WriteDeviceAssertions_with_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ script_writer = test_utils.MockScriptWriter()
+ target_info.WriteDeviceAssertions(script_writer, False)
+ self.assertEqual(
+ [
+ ('AssertOemProperty', 'ro.product.device',
+ ['device1', 'device2', 'device3'], False),
+ ('AssertOemProperty', 'ro.product.brand',
+ ['brand1', 'brand2', 'brand3'], False),
+ ],
+ script_writer.lines)
+
+
class CommonZipTest(test_utils.ReleaseToolsTestCase):
def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
@@ -749,10 +953,12 @@
self.assertNotIn(
'incomplete', sparse_image.file_map['/system/file2'].extra)
- # All other entries should look normal without any tags.
+ # '/system/file1' will only contain one field -- a copy of the input text.
+ self.assertEqual(1, len(sparse_image.file_map['/system/file1'].extra))
+
+ # Meta entries should not have any extra tag.
self.assertFalse(sparse_image.file_map['__COPY'].extra)
self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
- self.assertFalse(sparse_image.file_map['/system/file1'].extra)
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetSparseImage_incompleteRanges(self):
@@ -775,7 +981,9 @@
with zipfile.ZipFile(target_files, 'r') as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
- self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+ self.assertEqual(
+ '1-5 9-10',
+ sparse_image.file_map['/system/file1'].extra['text_str'])
self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
@test_utils.SkipIfExternalToolsUnavailable()
@@ -801,7 +1009,9 @@
with zipfile.ZipFile(target_files, 'r') as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
- self.assertFalse(sparse_image.file_map['//system/file1'].extra)
+ self.assertEqual(
+ '1-5 9-10',
+ sparse_image.file_map['//system/file1'].extra['text_str'])
self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
self.assertTrue(
sparse_image.file_map['/system/app/file3'].extra['incomplete'])
@@ -826,7 +1036,9 @@
with zipfile.ZipFile(target_files, 'r') as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
- self.assertFalse(sparse_image.file_map['//system/file1'].extra)
+ self.assertEqual(
+ '1-5 9-10',
+ sparse_image.file_map['//system/file1'].extra['text_str'])
self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1161,6 +1373,39 @@
self.assertEqual('5', chained_partition_args[1])
self.assertTrue(os.path.exists(chained_partition_args[2]))
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendVBMetaArgsForPartition_recoveryAsChainedPartition_nonAb(self):
+ testdata_dir = test_utils.get_testdata_dir()
+ pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_recovery_key_path': pubkey,
+ 'avb_recovery_rollback_index_location': 3,
+ }
+ cmd = common.GetAvbPartitionArg(
+ 'recovery', '/path/to/recovery.img', info_dict)
+ self.assertFalse(cmd)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendVBMetaArgsForPartition_recoveryAsChainedPartition_ab(self):
+ testdata_dir = test_utils.get_testdata_dir()
+ pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ info_dict = {
+ 'ab_update': 'true',
+ 'avb_avbtool': 'avbtool',
+ 'avb_recovery_key_path': pubkey,
+ 'avb_recovery_rollback_index_location': 3,
+ }
+ cmd = common.GetAvbPartitionArg(
+ 'recovery', '/path/to/recovery.img', info_dict)
+ self.assertEqual(2, len(cmd))
+ self.assertEqual('--chain_partition', cmd[0])
+ chained_partition_args = cmd[1].split(':')
+ self.assertEqual(3, len(chained_partition_args))
+ self.assertEqual('recovery', chained_partition_args[0])
+ self.assertEqual('3', chained_partition_args[1])
+ self.assertTrue(os.path.exists(chained_partition_args[2]))
+
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
@@ -1224,24 +1469,6 @@
self._info)
-class MockScriptWriter(object):
- """A class that mocks edify_generator.EdifyGenerator."""
-
- def __init__(self, enable_comments=False):
- self.lines = []
- self.enable_comments = enable_comments
-
- def Comment(self, comment):
- if self.enable_comments:
- self.lines.append('# {}'.format(comment))
-
- def AppendExtra(self, extra):
- self.lines.append(extra)
-
- def __str__(self):
- return '\n'.join(self.lines)
-
-
class MockBlockDifference(object):
def __init__(self, partition, tgt, src=None):
@@ -1279,7 +1506,7 @@
if not line.startswith(b'#')]
def setUp(self):
- self.script = MockScriptWriter()
+ self.script = test_utils.MockScriptWriter()
self.output_path = common.MakeTempFile(suffix='.zip')
def test_full(self):
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 9825a5e..38faf64 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -22,7 +22,7 @@
import common
import test_utils
from ota_from_target_files import (
- _LoadOemDicts, AbOtaPropertyFiles, BuildInfo, FinalizeMetadata,
+ _LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
GetTargetFilesZipWithoutPostinstallConfig, NonAbOtaPropertyFiles,
Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
@@ -74,291 +74,6 @@
return target_files
-class MockScriptWriter(object):
- """A class that mocks edify_generator.EdifyGenerator.
-
- It simply pushes the incoming arguments onto script stack, which is to assert
- the calls to EdifyGenerator functions.
- """
-
- def __init__(self):
- self.script = []
-
- def Mount(self, *args):
- self.script.append(('Mount',) + args)
-
- def AssertDevice(self, *args):
- self.script.append(('AssertDevice',) + args)
-
- def AssertOemProperty(self, *args):
- self.script.append(('AssertOemProperty',) + args)
-
- def AssertFingerprintOrThumbprint(self, *args):
- self.script.append(('AssertFingerprintOrThumbprint',) + args)
-
- def AssertSomeFingerprint(self, *args):
- self.script.append(('AssertSomeFingerprint',) + args)
-
- def AssertSomeThumbprint(self, *args):
- self.script.append(('AssertSomeThumbprint',) + args)
-
-
-class BuildInfoTest(test_utils.ReleaseToolsTestCase):
-
- TEST_INFO_DICT = {
- 'build.prop' : {
- 'ro.product.device' : 'product-device',
- 'ro.product.name' : 'product-name',
- 'ro.build.fingerprint' : 'build-fingerprint',
- 'ro.build.foo' : 'build-foo',
- },
- 'vendor.build.prop' : {
- 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
- },
- 'property1' : 'value1',
- 'property2' : 4096,
- }
-
- TEST_INFO_DICT_USES_OEM_PROPS = {
- 'build.prop' : {
- 'ro.product.name' : 'product-name',
- 'ro.build.thumbprint' : 'build-thumbprint',
- 'ro.build.bar' : 'build-bar',
- },
- 'vendor.build.prop' : {
- 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
- },
- 'property1' : 'value1',
- 'property2' : 4096,
- 'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
- }
-
- TEST_OEM_DICTS = [
- {
- 'ro.product.brand' : 'brand1',
- 'ro.product.device' : 'device1',
- },
- {
- 'ro.product.brand' : 'brand2',
- 'ro.product.device' : 'device2',
- },
- {
- 'ro.product.brand' : 'brand3',
- 'ro.product.device' : 'device3',
- },
- ]
-
- def test_init(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('product-device', target_info.device)
- self.assertEqual('build-fingerprint', target_info.fingerprint)
- self.assertFalse(target_info.is_ab)
- self.assertIsNone(target_info.oem_props)
-
- def test_init_with_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- self.assertEqual('device1', target_info.device)
- self.assertEqual('brand1/product-name/device1:build-thumbprint',
- target_info.fingerprint)
-
- # Swap the order in oem_dicts, which would lead to different BuildInfo.
- oem_dicts = copy.copy(self.TEST_OEM_DICTS)
- oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, oem_dicts)
- self.assertEqual('device3', target_info.device)
- self.assertEqual('brand3/product-name/device3:build-thumbprint',
- target_info.fingerprint)
-
- # Missing oem_dict should be rejected.
- self.assertRaises(AssertionError, BuildInfo,
- self.TEST_INFO_DICT_USES_OEM_PROPS, None)
-
- def test_init_badFingerprint(self):
- info_dict = copy.deepcopy(self.TEST_INFO_DICT)
- info_dict['build.prop']['ro.build.fingerprint'] = 'bad fingerprint'
- self.assertRaises(ValueError, BuildInfo, info_dict, None)
-
- info_dict['build.prop']['ro.build.fingerprint'] = 'bad\x80fingerprint'
- self.assertRaises(ValueError, BuildInfo, info_dict, None)
-
- def test___getitem__(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('value1', target_info['property1'])
- self.assertEqual(4096, target_info['property2'])
- self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
-
- def test___getitem__with_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- self.assertEqual('value1', target_info['property1'])
- self.assertEqual(4096, target_info['property2'])
- self.assertRaises(KeyError,
- lambda: target_info['build.prop']['ro.build.foo'])
-
- def test___setitem__(self):
- target_info = BuildInfo(copy.deepcopy(self.TEST_INFO_DICT), None)
- self.assertEqual('value1', target_info['property1'])
- target_info['property1'] = 'value2'
- self.assertEqual('value2', target_info['property1'])
-
- self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
- target_info['build.prop']['ro.build.foo'] = 'build-bar'
- self.assertEqual('build-bar', target_info['build.prop']['ro.build.foo'])
-
- def test_get(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('value1', target_info.get('property1'))
- self.assertEqual(4096, target_info.get('property2'))
- self.assertEqual(4096, target_info.get('property2', 1024))
- self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
- self.assertEqual('build-foo', target_info.get('build.prop')['ro.build.foo'])
-
- def test_get_with_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- self.assertEqual('value1', target_info.get('property1'))
- self.assertEqual(4096, target_info.get('property2'))
- self.assertEqual(4096, target_info.get('property2', 1024))
- self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
- self.assertIsNone(target_info.get('build.prop').get('ro.build.foo'))
- self.assertRaises(KeyError,
- lambda: target_info.get('build.prop')['ro.build.foo'])
-
- def test_items(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- items = target_info.items()
- self.assertIn(('property1', 'value1'), items)
- self.assertIn(('property2', 4096), items)
-
- def test_GetBuildProp(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
- self.assertRaises(common.ExternalError, target_info.GetBuildProp,
- 'ro.build.nonexistent')
-
- def test_GetBuildProp_with_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
- self.assertRaises(common.ExternalError, target_info.GetBuildProp,
- 'ro.build.nonexistent')
-
- def test_GetVendorBuildProp(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('vendor-build-fingerprint',
- target_info.GetVendorBuildProp(
- 'ro.vendor.build.fingerprint'))
- self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
- 'ro.build.nonexistent')
-
- def test_GetVendorBuildProp_with_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- self.assertEqual('vendor-build-fingerprint',
- target_info.GetVendorBuildProp(
- 'ro.vendor.build.fingerprint'))
- self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
- 'ro.build.nonexistent')
-
- def test_vendor_fingerprint(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- self.assertEqual('vendor-build-fingerprint',
- target_info.vendor_fingerprint)
-
- def test_vendor_fingerprint_blacklisted(self):
- target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
- del target_info_dict['vendor.build.prop']['ro.vendor.build.fingerprint']
- target_info = BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
- self.assertIsNone(target_info.vendor_fingerprint)
-
- def test_vendor_fingerprint_without_vendor_build_prop(self):
- target_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
- del target_info_dict['vendor.build.prop']
- target_info = BuildInfo(target_info_dict, self.TEST_OEM_DICTS)
- self.assertIsNone(target_info.vendor_fingerprint)
-
- def test_WriteMountOemScript(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- script_writer = MockScriptWriter()
- target_info.WriteMountOemScript(script_writer)
- self.assertEqual([('Mount', '/oem', None)], script_writer.script)
-
- def test_WriteDeviceAssertions(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- script_writer = MockScriptWriter()
- target_info.WriteDeviceAssertions(script_writer, False)
- self.assertEqual([('AssertDevice', 'product-device')], script_writer.script)
-
- def test_WriteDeviceAssertions_with_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- script_writer = MockScriptWriter()
- target_info.WriteDeviceAssertions(script_writer, False)
- self.assertEqual(
- [
- ('AssertOemProperty', 'ro.product.device',
- ['device1', 'device2', 'device3'], False),
- ('AssertOemProperty', 'ro.product.brand',
- ['brand1', 'brand2', 'brand3'], False),
- ],
- script_writer.script)
-
- def test_WriteFingerprintAssertion_without_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- source_info_dict = copy.deepcopy(self.TEST_INFO_DICT)
- source_info_dict['build.prop']['ro.build.fingerprint'] = (
- 'source-build-fingerprint')
- source_info = BuildInfo(source_info_dict, None)
-
- script_writer = MockScriptWriter()
- WriteFingerprintAssertion(script_writer, target_info, source_info)
- self.assertEqual(
- [('AssertSomeFingerprint', 'source-build-fingerprint',
- 'build-fingerprint')],
- script_writer.script)
-
- def test_WriteFingerprintAssertion_with_source_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT, None)
- source_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
-
- script_writer = MockScriptWriter()
- WriteFingerprintAssertion(script_writer, target_info, source_info)
- self.assertEqual(
- [('AssertFingerprintOrThumbprint', 'build-fingerprint',
- 'build-thumbprint')],
- script_writer.script)
-
- def test_WriteFingerprintAssertion_with_target_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- source_info = BuildInfo(self.TEST_INFO_DICT, None)
-
- script_writer = MockScriptWriter()
- WriteFingerprintAssertion(script_writer, target_info, source_info)
- self.assertEqual(
- [('AssertFingerprintOrThumbprint', 'build-fingerprint',
- 'build-thumbprint')],
- script_writer.script)
-
- def test_WriteFingerprintAssertion_with_both_oem_props(self):
- target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
- self.TEST_OEM_DICTS)
- source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
- source_info_dict['build.prop']['ro.build.thumbprint'] = (
- 'source-build-thumbprint')
- source_info = BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
-
- script_writer = MockScriptWriter()
- WriteFingerprintAssertion(script_writer, target_info, source_info)
- self.assertEqual(
- [('AssertSomeThumbprint', 'build-thumbprint',
- 'source-build-thumbprint')],
- script_writer.script)
-
-
class LoadOemDictsTest(test_utils.ReleaseToolsTestCase):
def test_NoneDict(self):
@@ -416,6 +131,35 @@
},
}
+ TEST_INFO_DICT_USES_OEM_PROPS = {
+ 'build.prop' : {
+ 'ro.product.name' : 'product-name',
+ 'ro.build.thumbprint' : 'build-thumbprint',
+ 'ro.build.bar' : 'build-bar',
+ },
+ 'vendor.build.prop' : {
+ 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+ },
+ 'property1' : 'value1',
+ 'property2' : 4096,
+ 'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
+ }
+
+ TEST_OEM_DICTS = [
+ {
+ 'ro.product.brand' : 'brand1',
+ 'ro.product.device' : 'device1',
+ },
+ {
+ 'ro.product.brand' : 'brand2',
+ 'ro.product.device' : 'device2',
+ },
+ {
+ 'ro.product.brand' : 'brand3',
+ 'ro.product.device' : 'device3',
+ },
+ ]
+
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.assertTrue(os.path.exists(self.testdata_dir))
@@ -437,7 +181,7 @@
def test_GetPackageMetadata_abOta_full(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
- target_info = BuildInfo(target_info_dict, None)
+ target_info = common.BuildInfo(target_info_dict, None)
metadata = GetPackageMetadata(target_info)
self.assertDictEqual(
{
@@ -455,8 +199,8 @@
def test_GetPackageMetadata_abOta_incremental(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
- target_info = BuildInfo(target_info_dict, None)
- source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+ target_info = common.BuildInfo(target_info_dict, None)
+ source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
metadata = GetPackageMetadata(target_info, source_info)
self.assertDictEqual(
@@ -475,7 +219,7 @@
metadata)
def test_GetPackageMetadata_nonAbOta_full(self):
- target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
metadata = GetPackageMetadata(target_info)
self.assertDictEqual(
{
@@ -490,8 +234,8 @@
metadata)
def test_GetPackageMetadata_nonAbOta_incremental(self):
- target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
- source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+ target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
metadata = GetPackageMetadata(target_info, source_info)
self.assertDictEqual(
@@ -509,7 +253,7 @@
metadata)
def test_GetPackageMetadata_wipe(self):
- target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.wipe_user_data = True
metadata = GetPackageMetadata(target_info)
self.assertDictEqual(
@@ -526,7 +270,7 @@
metadata)
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
- target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.retrofit_dynamic_partitions = True
metadata = GetPackageMetadata(target_info)
self.assertDictEqual(
@@ -555,8 +299,8 @@
self._test_GetPackageMetadata_swapBuildTimestamps(
target_info_dict, source_info_dict)
- target_info = BuildInfo(target_info_dict, None)
- source_info = BuildInfo(source_info_dict, None)
+ target_info = common.BuildInfo(target_info_dict, None)
+ source_info = common.BuildInfo(source_info_dict, None)
common.OPTIONS.incremental_source = ''
self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
source_info)
@@ -567,8 +311,8 @@
self._test_GetPackageMetadata_swapBuildTimestamps(
target_info_dict, source_info_dict)
- target_info = BuildInfo(target_info_dict, None)
- source_info = BuildInfo(source_info_dict, None)
+ target_info = common.BuildInfo(target_info_dict, None)
+ source_info = common.BuildInfo(source_info_dict, None)
common.OPTIONS.incremental_source = ''
common.OPTIONS.downgrade = True
common.OPTIONS.wipe_user_data = True
@@ -596,7 +340,7 @@
with zipfile.ZipFile(target_file) as verify_zip:
namelist = verify_zip.namelist()
- ab_partitions = verify_zip.read('META/ab_partitions.txt')
+ ab_partitions = verify_zip.read('META/ab_partitions.txt').decode()
self.assertIn('META/ab_partitions.txt', namelist)
self.assertIn('IMAGES/system.img', namelist)
@@ -676,9 +420,9 @@
with zipfile.ZipFile(target_file) as verify_zip:
namelist = verify_zip.namelist()
- updated_misc_info = verify_zip.read('META/misc_info.txt')
+ updated_misc_info = verify_zip.read('META/misc_info.txt').decode()
updated_dynamic_partitions_info = verify_zip.read(
- 'META/dynamic_partitions_info.txt')
+ 'META/dynamic_partitions_info.txt').decode()
self.assertIn('META/ab_partitions.txt', namelist)
self.assertIn('IMAGES/system.img', namelist)
@@ -781,6 +525,59 @@
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
self.assertIn('ota-test-property-files', metadata)
+ def test_WriteFingerprintAssertion_without_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+ source_info_dict['build.prop']['ro.build.fingerprint'] = (
+ 'source-build-fingerprint')
+ source_info = common.BuildInfo(source_info_dict, None)
+
+ script_writer = test_utils.MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertSomeFingerprint', 'source-build-fingerprint',
+ 'build-fingerprint-target')],
+ script_writer.lines)
+
+ def test_WriteFingerprintAssertion_with_source_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ source_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+
+ script_writer = test_utils.MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
+ 'build-thumbprint')],
+ script_writer.lines)
+
+ def test_WriteFingerprintAssertion_with_target_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ source_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+
+ script_writer = test_utils.MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
+ 'build-thumbprint')],
+ script_writer.lines)
+
+ def test_WriteFingerprintAssertion_with_both_oem_props(self):
+ target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+ source_info_dict['build.prop']['ro.build.thumbprint'] = (
+ 'source-build-thumbprint')
+ source_info = common.BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
+
+ script_writer = test_utils.MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertSomeThumbprint', 'build-thumbprint',
+ 'source-build-thumbprint')],
+ script_writer.lines)
+
class TestPropertyFiles(PropertyFiles):
"""A class that extends PropertyFiles for testing purpose."""
@@ -1088,10 +885,28 @@
payload_offset, metadata_total = (
property_files._GetPayloadMetadataOffsetAndSize(input_zip))
- # Read in the metadata signature directly.
+ # The signature proto has the following format (details in
+ # /platform/system/update_engine/update_metadata.proto):
+ # message Signature {
+ # optional uint32 version = 1;
+ # optional bytes data = 2;
+ # optional fixed32 unpadded_signature_size = 3;
+ # }
+ #
+ # According to the protobuf encoding, the tail of the signature message will
+ # be [signature string(256 bytes) + encoding of the fixed32 number 256]. And
+ # 256 is encoded as 'x1d\x00\x01\x00\x00':
+ # [3 (field number) << 3 | 5 (type) + byte reverse of 0x100 (256)].
+ # Details in (https://developers.google.com/protocol-buffers/docs/encoding)
+ signature_tail_length = self.SIGNATURE_SIZE + 5
+ self.assertGreater(metadata_total, signature_tail_length)
with open(output_file, 'rb') as verify_fp:
- verify_fp.seek(payload_offset + metadata_total - self.SIGNATURE_SIZE)
- metadata_signature = verify_fp.read(self.SIGNATURE_SIZE)
+ verify_fp.seek(payload_offset + metadata_total - signature_tail_length)
+ metadata_signature_proto_tail = verify_fp.read(signature_tail_length)
+
+ self.assertEqual(b'\x1d\x00\x01\x00\x00',
+ metadata_signature_proto_tail[-5:])
+ metadata_signature = metadata_signature_proto_tail[:-5]
# Now we extract the metadata hash via brillo_update_payload script, which
# will serve as the oracle result.
@@ -1253,11 +1068,13 @@
with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
self.assertEqual(fp1.read(), fp2.read())
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_init(self):
payload_signer = PayloadSigner()
self.assertEqual('openssl', payload_signer.signer)
- self.assertEqual(256, payload_signer.key_size)
+ self.assertEqual(256, payload_signer.maximum_signature_size)
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_init_withPassword(self):
common.OPTIONS.package_key = os.path.join(
self.testdata_dir, 'testkey_with_passwd')
@@ -1270,18 +1087,27 @@
def test_init_withExternalSigner(self):
common.OPTIONS.payload_signer = 'abc'
common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
- common.OPTIONS.payload_signer_key_size = '512'
+ common.OPTIONS.payload_signer_maximum_signature_size = '512'
payload_signer = PayloadSigner()
self.assertEqual('abc', payload_signer.signer)
self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
- self.assertEqual(512, payload_signer.key_size)
+ self.assertEqual(512, payload_signer.maximum_signature_size)
- def test_GetKeySizeInBytes_512Bytes(self):
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetMaximumSignatureSizeInBytes_512Bytes(self):
signing_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
# pylint: disable=protected-access
- key_size = PayloadSigner._GetKeySizeInBytes(signing_key)
- self.assertEqual(512, key_size)
+ signature_size = PayloadSigner._GetMaximumSignatureSizeInBytes(signing_key)
+ self.assertEqual(512, signature_size)
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetMaximumSignatureSizeInBytes_ECKey(self):
+ signing_key = os.path.join(self.testdata_dir, 'testkey_EC.key')
+ # pylint: disable=protected-access
+ signature_size = PayloadSigner._GetMaximumSignatureSizeInBytes(signing_key)
+ self.assertEqual(72, signature_size)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_Sign(self):
payload_signer = PayloadSigner()
input_file = os.path.join(self.testdata_dir, self.SIGFILE)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index e0a635a..2b84413 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -23,7 +23,8 @@
import test_utils
from sign_target_files_apks import (
CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
- ReplaceCerts, ReplaceVerityKeyId, RewriteProps, WriteOtacerts)
+ ReplaceCerts, ReplaceVerityKeyId, RewriteAvbProps, RewriteProps,
+ WriteOtacerts)
class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
@@ -52,6 +53,40 @@
# Tags are sorted.
self.assertEqual(EditTags('xyz,abc,dev-keys,xyz'), ('abc,release-keys,xyz'))
+ def test_RewriteAvbProps(self):
+ misc_info = {
+ 'avb_boot_add_hash_footer_args':
+ ('--prop com.android.build.boot.os_version:R '
+ '--prop com.android.build.boot.security_patch:2019-09-05'),
+ 'avb_system_add_hashtree_footer_args':
+ ('--prop com.android.build.system.os_version:R '
+ '--prop com.android.build.system.security_patch:2019-09-05 '
+ '--prop com.android.build.system.fingerprint:'
+ 'Android/aosp_taimen/taimen:R/QT/foo:userdebug/test-keys'),
+ 'avb_vendor_add_hashtree_footer_args':
+ ('--prop com.android.build.vendor.os_version:R '
+ '--prop com.android.build.vendor.security_patch:2019-09-05 '
+ '--prop com.android.build.vendor.fingerprint:'
+ 'Android/aosp_taimen/taimen:R/QT/foo:userdebug/dev-keys'),
+ }
+ expected_dict = {
+ 'avb_boot_add_hash_footer_args':
+ ('--prop com.android.build.boot.os_version:R '
+ '--prop com.android.build.boot.security_patch:2019-09-05'),
+ 'avb_system_add_hashtree_footer_args':
+ ('--prop com.android.build.system.os_version:R '
+ '--prop com.android.build.system.security_patch:2019-09-05 '
+ '--prop com.android.build.system.fingerprint:'
+ 'Android/aosp_taimen/taimen:R/QT/foo:userdebug/release-keys'),
+ 'avb_vendor_add_hashtree_footer_args':
+ ('--prop com.android.build.vendor.os_version:R '
+ '--prop com.android.build.vendor.security_patch:2019-09-05 '
+ '--prop com.android.build.vendor.fingerprint:'
+ 'Android/aosp_taimen/taimen:R/QT/foo:userdebug/release-keys'),
+ }
+ RewriteAvbProps(misc_info)
+ self.assertDictEqual(expected_dict, misc_info)
+
def test_RewriteProps(self):
props = (
('', ''),
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index 2445671..e999757 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -145,6 +145,47 @@
return sparse_image
+class MockScriptWriter(object):
+ """A class that mocks edify_generator.EdifyGenerator.
+
+ It simply pushes the incoming arguments onto script stack, which is to assert
+ the calls to EdifyGenerator functions.
+ """
+
+ def __init__(self, enable_comments=False):
+ self.lines = []
+ self.enable_comments = enable_comments
+
+ def Mount(self, *args):
+ self.lines.append(('Mount',) + args)
+
+ def AssertDevice(self, *args):
+ self.lines.append(('AssertDevice',) + args)
+
+ def AssertOemProperty(self, *args):
+ self.lines.append(('AssertOemProperty',) + args)
+
+ def AssertFingerprintOrThumbprint(self, *args):
+ self.lines.append(('AssertFingerprintOrThumbprint',) + args)
+
+ def AssertSomeFingerprint(self, *args):
+ self.lines.append(('AssertSomeFingerprint',) + args)
+
+ def AssertSomeThumbprint(self, *args):
+ self.lines.append(('AssertSomeThumbprint',) + args)
+
+ def Comment(self, comment):
+ if not self.enable_comments:
+ return
+ self.lines.append('# {}'.format(comment))
+
+ def AppendExtra(self, extra):
+ self.lines.append(extra)
+
+ def __str__(self):
+ return '\n'.join(self.lines)
+
+
class ReleaseToolsTestCase(unittest.TestCase):
"""A common base class for all the releasetools unittests."""
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index 9c816eb..6504515 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -238,14 +238,14 @@
system_root = os.path.join(input_tmp, "SYSTEM")
os.mkdir(system_root)
- # Write the test file that contain multiple blocks of zeros, and these
- # zero blocks will be omitted by kernel. And the test files will occupy one
- # block range each in the final system image.
+ # Write test files that contain multiple blocks of zeros, and these zero
+ # blocks will be omitted by kernel. Each test file will occupy one block in
+ # the final system image.
with open(os.path.join(system_root, 'a'), 'w') as f:
- f.write("aaa")
+ f.write('aaa')
f.write('\0' * 4096 * 3)
with open(os.path.join(system_root, 'b'), 'w') as f:
- f.write("bbb")
+ f.write('bbb')
f.write('\0' * 4096 * 3)
raw_file_map = os.path.join(input_tmp, 'IMAGES', 'raw_system.map')
@@ -254,7 +254,7 @@
# Parse the generated file map and update the block ranges for each file.
file_map_list = {}
image_ranges = RangeSet()
- with open(raw_file_map, 'r') as f:
+ with open(raw_file_map) as f:
for line in f.readlines():
info = line.split()
self.assertEqual(2, len(info))
@@ -265,7 +265,7 @@
mock_shared_block = RangeSet("10-20").subtract(image_ranges).first(1)
with open(os.path.join(input_tmp, 'IMAGES', 'system.map'), 'w') as f:
for key in sorted(file_map_list.keys()):
- line = "{} {}\n".format(
+ line = '{} {}\n'.format(
key, file_map_list[key].union(mock_shared_block))
f.write(line)
@@ -277,9 +277,55 @@
for name in all_entries:
input_zip.write(os.path.join(input_tmp, name), arcname=name)
- input_zip = zipfile.ZipFile(input_file, 'r')
- info_dict = {'extfs_sparse_flag': '-s'}
-
# Expect the validation to pass and both files are skipped due to
# 'incomplete' block range.
- ValidateFileConsistency(input_zip, input_tmp, info_dict)
+ with zipfile.ZipFile(input_file) as input_zip:
+ info_dict = {'extfs_sparse_flag': '-s'}
+ ValidateFileConsistency(input_zip, input_tmp, info_dict)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_ValidateFileConsistency_nonMonotonicRanges(self):
+ input_tmp = common.MakeTempDir()
+ os.mkdir(os.path.join(input_tmp, 'IMAGES'))
+ system_image = os.path.join(input_tmp, 'IMAGES', 'system.img')
+ system_root = os.path.join(input_tmp, "SYSTEM")
+ os.mkdir(system_root)
+
+ # Write the test file that contain three blocks of 'a', 'b', 'c'.
+ with open(os.path.join(system_root, 'abc'), 'w') as f:
+ f.write('a' * 4096 + 'b' * 4096 + 'c' * 4096)
+ raw_file_map = os.path.join(input_tmp, 'IMAGES', 'raw_system.map')
+ self._generate_system_image(system_image, system_root, raw_file_map)
+
+ # Parse the generated file map and manipulate the block ranges of 'abc' to
+ # be 'cba'.
+ file_map_list = {}
+ with open(raw_file_map) as f:
+ for line in f.readlines():
+ info = line.split()
+ self.assertEqual(2, len(info))
+ ranges = RangeSet(info[1])
+ self.assertTrue(ranges.monotonic)
+ blocks = reversed(list(ranges.next_item()))
+ file_map_list[info[0]] = ' '.join([str(block) for block in blocks])
+
+ # Update the contents of 'abc' to be 'cba'.
+ with open(os.path.join(system_root, 'abc'), 'w') as f:
+ f.write('c' * 4096 + 'b' * 4096 + 'a' * 4096)
+
+ # Update the system.map.
+ with open(os.path.join(input_tmp, 'IMAGES', 'system.map'), 'w') as f:
+ for key in sorted(file_map_list.keys()):
+ f.write('{} {}\n'.format(key, file_map_list[key]))
+
+ # Get the target zip file.
+ input_file = common.MakeTempFile()
+ all_entries = ['SYSTEM/', 'SYSTEM/abc', 'IMAGES/',
+ 'IMAGES/system.map', 'IMAGES/system.img']
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
+ for name in all_entries:
+ input_zip.write(os.path.join(input_tmp, name), arcname=name)
+
+ with zipfile.ZipFile(input_file) as input_zip:
+ info_dict = {'extfs_sparse_flag': '-s'}
+ ValidateFileConsistency(input_zip, input_tmp, info_dict)
diff --git a/tools/releasetools/testdata/testkey_EC.key b/tools/releasetools/testdata/testkey_EC.key
new file mode 100644
index 0000000..9e65a68
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_EC.key
@@ -0,0 +1,5 @@
+-----BEGIN PRIVATE KEY-----
+MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgGaguGj8Yb1KkqKHd
+ISblUsjtOCbzAuVpX81i02sm8FWhRANCAARBnuotwKOsuvjH6iwTDhOAi7Q5pLWz
+xDkZjg2pcfbfi9FFTvLYETas7B2W6fx9PUezUmHTFTDV2JZuMYYFdZOw
+-----END PRIVATE KEY-----
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index c299a48..9c2bc51 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -36,20 +36,21 @@
import os.path
import re
import zipfile
+from hashlib import sha1
import common
+import rangelib
def _ReadFile(file_name, unpacked_name, round_up=False):
"""Constructs and returns a File object. Rounds up its size if needed."""
-
assert os.path.exists(unpacked_name)
with open(unpacked_name, 'rb') as f:
file_data = f.read()
file_size = len(file_data)
if round_up:
file_size_rounded_up = common.RoundUpTo4K(file_size)
- file_data += '\0' * (file_size_rounded_up - file_size)
+ file_data += b'\0' * (file_size_rounded_up - file_size)
return common.File(file_name, file_data)
@@ -96,13 +97,15 @@
logging.warning('Skipping %s that has incomplete block list', entry)
continue
- # TODO(b/79951650): Handle files with non-monotonic ranges.
+ # If the file has non-monotonic ranges, read each range in order.
if not file_ranges.monotonic:
- logging.warning(
- 'Skipping %s that has non-monotonic ranges: %s', entry, file_ranges)
- continue
-
- blocks_sha1 = image.RangeSha1(file_ranges)
+ h = sha1()
+ for file_range in file_ranges.extra['text_str'].split(' '):
+ for data in image.ReadRangeSet(rangelib.RangeSet(file_range)):
+ h.update(data)
+ blocks_sha1 = h.hexdigest()
+ else:
+ blocks_sha1 = image.RangeSha1(file_ranges)
# The filename under unpacked directory, such as SYSTEM/bin/sh.
unpacked_name = os.path.join(
@@ -138,7 +141,7 @@
1. full recovery:
...
if ! applypatch --check type:device:size:sha1; then
- applypatch --flash /system/etc/recovery.img \\
+ applypatch --flash /vendor/etc/recovery.img \\
type:device:size:sha1 && \\
...
@@ -146,18 +149,26 @@
...
if ! applypatch --check type:recovery_device:recovery_size:recovery_sha1; then
applypatch [--bonus bonus_args] \\
- --patch /system/recovery-from-boot.p \\
+ --patch /vendor/recovery-from-boot.p \\
--source type:boot_device:boot_size:boot_sha1 \\
--target type:recovery_device:recovery_size:recovery_sha1 && \\
...
- For full recovery, we want to calculate the SHA-1 of /system/etc/recovery.img
+ For full recovery, we want to calculate the SHA-1 of /vendor/etc/recovery.img
and compare it against the one embedded in the script. While for recovery
from boot, we want to check the SHA-1 for both recovery.img and boot.img
under IMAGES/.
"""
- script_path = 'SYSTEM/bin/install-recovery.sh'
+ board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
+
+ if board_uses_vendorimage:
+ script_path = 'VENDOR/bin/install-recovery.sh'
+ recovery_img = 'VENDOR/etc/recovery.img'
+ else:
+ script_path = 'SYSTEM/vendor/bin/install-recovery.sh'
+ recovery_img = 'SYSTEM/vendor/etc/recovery.img'
+
if not os.path.exists(os.path.join(input_tmp, script_path)):
logging.info('%s does not exist in input_tmp', script_path)
return
@@ -188,7 +199,7 @@
# Validate the SHA-1 of the recovery image.
recovery_sha1 = flash_partition.split(':')[3]
ValidateFileAgainstSha1(
- input_tmp, 'recovery.img', 'SYSTEM/etc/recovery.img', recovery_sha1)
+ input_tmp, 'recovery.img', recovery_img, recovery_sha1)
else:
assert len(lines) == 11, "Invalid line count: {}".format(lines)
@@ -335,20 +346,25 @@
key = info_dict['avb_vbmeta_key_path']
# avbtool verifies all the images that have descriptors listed in vbmeta.
+ # Using `--follow_chain_partitions` so it would additionally verify chained
+ # vbmeta partitions (e.g. vbmeta_system).
image = os.path.join(input_tmp, 'IMAGES', 'vbmeta.img')
cmd = [info_dict['avb_avbtool'], 'verify_image', '--image', image,
- '--key', key]
+ '--key', key, '--follow_chain_partitions']
# Append the args for chained partitions if any.
for partition in common.AVB_PARTITIONS + common.AVB_VBMETA_PARTITIONS:
key_name = 'avb_' + partition + '_key_path'
if info_dict.get(key_name) is not None:
+ if info_dict.get('ab_update') != 'true' and partition == 'recovery':
+ continue
+
# Use the key file from command line if specified; otherwise fall back
# to the one in info dict.
key_file = options.get(key_name, info_dict[key_name])
chained_partition_arg = common.GetAvbChainedPartitionArg(
partition, info_dict, key_file)
- cmd.extend(["--expected_chain_partition", chained_partition_arg])
+ cmd.extend(['--expected_chain_partition', chained_partition_arg])
proc = common.Run(cmd)
stdoutdata, _ = proc.communicate()
@@ -360,6 +376,22 @@
'Verified %s with avbtool (key: %s):\n%s', image, key,
stdoutdata.rstrip())
+ # avbtool verifies recovery image for non-A/B devices.
+ if (info_dict.get('ab_update') != 'true' and
+ info_dict.get('no_recovery') != 'true'):
+ image = os.path.join(input_tmp, 'IMAGES', 'recovery.img')
+ key = info_dict['avb_recovery_key_path']
+ cmd = [info_dict['avb_avbtool'], 'verify_image', '--image', image,
+ '--key', key]
+ proc = common.Run(cmd)
+ stdoutdata, _ = proc.communicate()
+ assert proc.returncode == 0, \
+ 'Failed to verify {} with avbtool (key: {}):\n{}'.format(
+ image, key, stdoutdata)
+ logging.info(
+ 'Verified %s with avbtool (key: %s):\n%s', image, key,
+ stdoutdata.rstrip())
+
def main():
parser = argparse.ArgumentParser(
diff --git a/tools/warn.py b/tools/warn.py
index 5994124..6218f93 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -2264,7 +2264,7 @@
i['compiled_patterns'].append(re.compile(pat))
-def find_android_root(path):
+def find_warn_py_and_android_root(path):
"""Set and return android_root path if it is found."""
global android_root
parts = path.split('/')
@@ -2273,8 +2273,36 @@
# Android root directory should contain this script.
if os.path.exists(root_path + '/build/make/tools/warn.py'):
android_root = root_path
- return root_path
- return ''
+ return True
+ return False
+
+
+def find_android_root():
+ """Guess android_root from common prefix of file paths."""
+ # Use the longest common prefix of the absolute file paths
+ # of the first 10000 warning messages as the android_root.
+ global android_root
+ warning_lines = set()
+ warning_pattern = re.compile('^/[^ ]*/[^ ]*: warning: .*')
+ count = 0
+ infile = io.open(args.buildlog, mode='r', encoding='utf-8')
+ for line in infile:
+ if warning_pattern.match(line):
+ warning_lines.add(line)
+ count += 1
+ if count > 9999:
+ break
+ # Try to find warn.py and use its location to find
+ # the source tree root.
+ if count < 100:
+ path = os.path.normpath(re.sub(':.*$', '', line))
+ if find_warn_py_and_android_root(path):
+ return
+ # Do not use common prefix of a small number of paths.
+ if count > 10:
+ root_path = os.path.commonprefix(warning_lines)
+ if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
+ android_root = root_path[:-1]
def remove_android_root_prefix(path):
@@ -2289,13 +2317,10 @@
"""Normalize file path relative to android_root."""
# If path is not an absolute path, just normalize it.
path = os.path.normpath(path)
- if path[0] != '/':
- return path
# Remove known prefix of root path and normalize the suffix.
- if android_root or find_android_root(path):
+ if path[0] == '/' and android_root:
return remove_android_root_prefix(path)
- else:
- return path
+ return path
def normalize_warning_line(line):
@@ -2670,6 +2695,7 @@
def main():
+ find_android_root()
# We must use 'utf-8' codec to parse some non-ASCII code in warnings.
warning_lines = parse_input_file(
io.open(args.buildlog, mode='r', encoding='utf-8'))