Merge "Add test_mainline_modules option support in mk"
diff --git a/core/Makefile b/core/Makefile
index e099a72..1f9bd14 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -584,6 +584,10 @@
# #################################################################
ifneq ($(strip $(TARGET_NO_BOOTLOADER)),true)
INSTALLED_BOOTLOADER_MODULE := $(PRODUCT_OUT)/bootloader
+ ifdef BOARD_PREBUILT_BOOTLOADER
+ $(eval $(call copy-one-file,$(BOARD_PREBUILT_BOOTLOADER),$(INSTALLED_BOOTLOADER_MODULE)))
+ $(call dist-for-goals,dist_files,$(INSTALLED_BOOTLOADER_MODULE))
+ endif # BOARD_PREBUILT_BOOTLOADER
ifeq ($(strip $(TARGET_BOOTLOADER_IS_2ND)),true)
INSTALLED_2NDBOOTLOADER_TARGET := $(PRODUCT_OUT)/2ndbootloader
else
@@ -2286,6 +2290,26 @@
endif # TARGET_NO_KERNEL
endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true
+# Creates a compatibility symlink between two partitions, e.g. /system/vendor to /vendor
+# $1: from location (e.g $(TARGET_OUT)/vendor)
+# $2: destination location (e.g. /vendor)
+# $3: partition image name (e.g. vendor.img)
+define create-partition-compat-symlink
+$(eval \
+$1:
+ @echo Symlink $(patsubst $(PRODUCT_OUT)/%,%,$1) to $2
+ mkdir -p $(dir $1)
+ if [ -d $1 ] && [ ! -h $1 ]; then \
+ echo 'Non-symlink $1 detected!' 1>&2; \
+ echo 'You cannot install files to $1 while building a separate $3!' 1>&2; \
+ exit 1; \
+ fi
+ ln -sfn $2 $1
+)
+$1
+endef
+
+
# -----------------------------------------------------------------
# system image
@@ -2293,6 +2317,21 @@
$(ALL_GENERATED_SOURCES) \
$(ALL_DEFAULT_INSTALLED_MODULES)))
+# Create symlink /system/vendor to /vendor if necessary.
+ifdef BOARD_USES_VENDORIMAGE
+ INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+endif
+
+# Create symlink /system/product to /product if necessary.
+ifdef BOARD_USES_PRODUCTIMAGE
+ INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+endif
+
+# Create symlink /system/system_ext to /system_ext if necessary.
+ifdef BOARD_USES_SYSTEM_EXTIMAGE
+ INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+endif
+
FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
# ASAN libraries in the system image - add dependency.
@@ -2332,57 +2371,9 @@
$(call intermediates-dir-for,PACKAGING,systemimage)
BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
-# Create symlink /system/vendor to /vendor if necessary.
-ifdef BOARD_USES_VENDORIMAGE
-define create-system-vendor-symlink
-$(hide) if [ -d $(TARGET_OUT)/vendor ] && [ ! -h $(TARGET_OUT)/vendor ]; then \
- echo 'Non-symlink $(TARGET_OUT)/vendor detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT)/vendor while building a separate vendor.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /vendor $(TARGET_OUT)/vendor
-endef
-else
-define create-system-vendor-symlink
-endef
-endif
-
-# Create symlink /system/product to /product if necessary.
-ifdef BOARD_USES_PRODUCTIMAGE
-define create-system-product-symlink
-$(hide) if [ -d $(TARGET_OUT)/product ] && [ ! -h $(TARGET_OUT)/product ]; then \
- echo 'Non-symlink $(TARGET_OUT)/product detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT)/product while building a separate product.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /product $(TARGET_OUT)/product
-endef
-else
-define create-system-product-symlink
-endef
-endif
-
-# Create symlink /system/system_ext to /system_ext if necessary.
-ifdef BOARD_USES_SYSTEM_EXTIMAGE
-define create-system-system_ext-symlink
-$(hide) if [ -d $(TARGET_OUT)/system_ext ] && [ ! -h $(TARGET_OUT)/system_ext ]; then \
- echo 'Non-symlink $(TARGET_OUT)/system_ext detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT)/system_ext while building a separate system_ext.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /system_ext $(TARGET_OUT)/system_ext
-endef
-else
-define create-system-system_ext-symlink
-endef
-endif
-
# $(1): output file
define build-systemimage-target
@echo "Target system fs image: $(1)"
- $(call create-system-vendor-symlink)
- $(call create-system-product-symlink)
- $(call create-system-system_ext-symlink)
@mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
$(call generate-image-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt,system, \
skip_fsck=true)
@@ -2665,29 +2656,10 @@
$(filter $(TARGET_OUT_VENDOR)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES))
-INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
-INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
-$(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
-$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
- @echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
# Create symlink /vendor/odm to /odm if necessary.
ifdef BOARD_USES_ODMIMAGE
-define create-vendor-odm-symlink
-$(hide) if [ -d $(TARGET_OUT_VENDOR)/odm ] && [ ! -h $(TARGET_OUT_VENDOR)/odm ]; then \
- echo 'Non-symlink $(TARGET_OUT_VENDOR)/odm detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT_VENDOR)/odm while building a separate odm.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /odm $(TARGET_OUT_VENDOR)/odm
-endef
-else
-define create-vendor-odm-symlink
-endef
+ INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
endif
# Create symlinks for vendor_dlkm on devices with a vendor_dlkm partition:
@@ -2705,48 +2677,18 @@
# The vendor DLKMs and other vendor_dlkm files must not be accessed using other paths because they
# are not guaranteed to exist on all devices.
ifdef BOARD_USES_VENDOR_DLKMIMAGE
-define create-vendor-vendor_dlkm-symlink
-$(hide) mkdir -p $(TARGET_OUT_VENDOR)/lib
-$(hide) if [ -d $(TARGET_OUT_VENDOR)/lib/modules ] && [ ! -h $(TARGET_OUT_VENDOR)/lib/modules ]; then \
- echo 'Non-symlink $(TARGET_OUT_VENDOR)/lib/modules detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT_VENDOR)/lib/modules while building a separate vendor_dlkm.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /vendor_dlkm/lib/modules $(TARGET_OUT_VENDOR)/lib/modules
-endef
-else
-define create-vendor-vendor_dlkm-symlink
-endef
+ INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
endif
-# Create symlinks for odm_dlkm on devices with a odm_dlkm partition:
-# /odm/lib/modules -> /odm_dlkm/lib/modules
-#
-# On devices with a odm_dlkm partition,
-# - /odm/lib/modules is a symlink to a directory that stores odm DLKMs.
-# - /odm_dlkm/{etc,...} store other odm_dlkm files directly. The odm_dlkm partition is
-# mounted at /odm_dlkm at runtime and the symlinks created in system/core/rootdir/Android.mk
-# are hidden.
-# On devices without a odm_dlkm partition,
-# - /odm/lib/modules stores odm DLKMs directly.
-# - /odm_dlkm/{etc,...} are symlinks to directories that store other odm_dlkm files.
-# See system/core/rootdir/Android.mk for a list of created symlinks.
-# The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
-# are not guaranteed to exist on all devices.
-ifdef BOARD_USES_ODM_DLKMIMAGE
-define create-odm-odm_dlkm-symlink
-$(hide) mkdir -p $(TARGET_OUT_ODM)/lib
-$(hide) if [ -d $(TARGET_OUT_ODM)/lib/modules ] && [ ! -h $(TARGET_OUT_ODM)/lib/modules ]; then \
- echo 'Non-symlink $(TARGET_OUT_ODM)/lib/modules detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT_ODM)/lib/modules while building a separate odm_dlkm.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /odm_dlkm/lib/modules $(TARGET_OUT_ODM)/lib/modules
-endef
-else
-define create-odm-odm_dlkm-symlink
-endef
-endif
+INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
+INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
+$(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
+$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
+ @echo Installed file list: $@
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+ $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
vendorimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor)
@@ -2754,9 +2696,6 @@
define build-vendorimage-target
$(call pretty,"Target vendor fs image: $(INSTALLED_VENDORIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_VENDOR)
- $(call create-vendor-odm-symlink)
- $(call create-vendor-vendor_dlkm-symlink)
- $(call create-odm-odm_dlkm-symlink)
@mkdir -p $(vendorimage_intermediates) && rm -rf $(vendorimage_intermediates)/vendor_image_info.txt
$(call generate-image-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt,vendor,skip_fsck=true)
PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
@@ -2897,6 +2836,24 @@
$(filter $(TARGET_OUT_ODM)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES))
+# Create symlinks for odm_dlkm on devices with a odm_dlkm partition:
+# /odm/lib/modules -> /odm_dlkm/lib/modules
+#
+# On devices with a odm_dlkm partition,
+# - /odm/lib/modules is a symlink to a directory that stores odm DLKMs.
+# - /odm_dlkm/{etc,...} store other odm_dlkm files directly. The odm_dlkm partition is
+# mounted at /odm_dlkm at runtime and the symlinks created in system/core/rootdir/Android.mk
+# are hidden.
+# On devices without a odm_dlkm partition,
+# - /odm/lib/modules stores odm DLKMs directly.
+# - /odm_dlkm/{etc,...} are symlinks to directories that store other odm_dlkm files.
+# See system/core/rootdir/Android.mk for a list of created symlinks.
+# The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
+# are not guaranteed to exist on all devices.
+ifdef BOARD_USES_ODM_DLKMIMAGE
+ INTERNAL_ODMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+endif
+
INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
INSTALLED_FILES_JSON_ODM := $(INSTALLED_FILES_FILE_ODM:.txt=.json)
$(INSTALLED_FILES_FILE_ODM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM)
@@ -4156,6 +4113,9 @@
ifdef DEVICE_MANIFEST_FILE
$(hide) echo "vintf_include_empty_vendor_sku=true" >> $@
endif
+ifeq ($(BOARD_BOOTLOADER_IN_UPDATE_PACKAGE),true)
+ $(hide) echo "bootloader_in_update_package=true" >> $@
+endif
.PHONY: misc_info
misc_info: $(INSTALLED_MISC_INFO_TARGET)
@@ -4304,34 +4264,98 @@
# full system image deps, we speed up builds that do not build the system
# image.
ifdef BUILDING_SYSTEM_IMAGE
-$(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+ $(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+endif
+
+ifdef BUILDING_USERDATA_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_USERDATAIMAGE_FILES)
+endif
+
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+endif
+
+ifdef BUILDING_VENDOR_BOOT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
+endif
+
+ifdef BUILDING_RECOVERY_IMAGE
+ # TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
+ # BUILD_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
+ # commands in build-recoveryimage-target, which would touch the files under
+ # TARGET_RECOVERY_OUT and race with packaging target-files.zip.
+ ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+ else
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_RECOVERYIMAGE_TARGET)
+ endif
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERYIMAGE_FILES)
+endif
+
+# Conditionally depend on the image files if the image is being built so the
+# target-files.zip rule doesn't wait on the image creation rule, or the image
+# if it is coming from a prebuilt.
+
+ifdef BUILDING_VENDOR_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDORIMAGE_FILES)
+else ifdef BOARD_PREBUILT_VENDORIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDORIMAGE_TARGET)
+endif
+
+ifdef BUILDING_PRODUCT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_PRODUCTIMAGE_FILES)
+else ifdef BOARD_PREBUILT_PRODUCTIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+endif
+
+ifdef BUILDING_BOOT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RAMDISK_FILES)
+else ifdef BOARD_PREBUILT_BOOTIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+endif
+
+ifdef BUILDING_ODM_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODMIMAGE_FILES)
+else ifdef BOARD_PREBUILT_ODMIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODMIMAGE_TARGET)
+endif
+
+ifdef BUILDING_VENDOR_DLKM_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+else ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+endif
+
+ifdef BUILDING_ODM_DLKM_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+else ifdef BOARD_ODM_VENDOR_DLKIMMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
endif
ifeq ($(BUILD_QEMU_IMAGES),true)
-MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
-$(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+ MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
+ $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+endif
+
+ifdef BOARD_PREBUILT_BOOTLOADER
+$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTLOADER_MODULE)
+droidcore: $(INSTALLED_BOOTLOADER_MODULE)
endif
# Depending on the various images guarantees that the underlying
# directories are up-to-date.
$(BUILT_TARGET_FILES_PACKAGE): \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
$(INSTALLED_RADIOIMAGE_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
$(INSTALLED_CACHEIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_VBMETAIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
$(INSTALLED_DTBOIMAGE_TARGET) \
$(INSTALLED_CUSTOMIMAGES_TARGET) \
- $(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
$(INSTALLED_KERNEL_TARGET) \
$(INSTALLED_DTBIMAGE_TARGET) \
@@ -4359,12 +4383,6 @@
$(BUILT_KERNEL_VERSION_FILE) \
| $(ACP)
@echo "Package target files: $@"
- $(call create-system-vendor-symlink)
- $(call create-system-product-symlink)
- $(call create-system-system_ext-symlink)
- $(call create-vendor-odm-symlink)
- $(call create-vendor-vendor_dlkm-symlink)
- $(call create-odm-odm_dlkm-symlink)
$(hide) rm -rf $@ $@.list $(zip_root)
$(hide) mkdir -p $(dir $@) $(zip_root)
ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@@ -4407,7 +4425,7 @@
ifdef BOARD_KERNEL_PAGESIZE
echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/pagesize
endif
-endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET not defined
+endif # not (BUILDING_VENDOR_BOOT_IMAGE and AB_OTA_UPDATER)
endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true
@# Components of the boot image
$(hide) mkdir -p $(zip_root)/BOOT
@@ -4597,14 +4615,22 @@
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_ODMIMAGE_TARGET) $(zip_root)/IMAGES/
endif
-ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
$(hide) mkdir -p $(zip_root)/IMAGES
- $(hide) cp $(INSTALLED_VENDOR_DLKIMMAGE_TARGET) $(zip_root)/IMAGES/
+ $(hide) cp $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
+ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_ODM_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
endif
ifdef BOARD_PREBUILT_DTBOIMAGE
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
endif # BOARD_PREBUILT_DTBOIMAGE
+ifdef BOARD_PREBUILT_BOOTLOADER
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_BOOTLOADER_MODULE) $(zip_root)/IMAGES/
+endif
ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
@@ -4683,9 +4709,7 @@
.PHONY: target-files-package
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
-ifneq ($(filter $(MAKECMDGOALS),target-files-package),)
$(call dist-for-goals, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
-endif
# -----------------------------------------------------------------
# NDK Sysroot Package
@@ -4759,13 +4783,12 @@
APPCOMPAT_ZIP := $(PRODUCT_OUT)/appcompat.zip
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(APPCOMPAT_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+$(APPCOMPAT_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INTERNAL_RAMDISK_FILES) \
+ $(INTERNAL_USERDATAIMAGE_FILES) \
+ $(INTERNAL_VENDORIMAGE_FILES) \
+ $(INTERNAL_PRODUCTIMAGE_FILES) \
+ $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
endif
$(APPCOMPAT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,appcompat)/filelist
$(APPCOMPAT_ZIP): $(SOONG_ZIP)
@@ -4788,16 +4811,15 @@
SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+$(SYMBOLS_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INTERNAL_RAMDISK_FILES) \
+ $(INTERNAL_USERDATAIMAGE_FILES) \
+ $(INTERNAL_VENDORIMAGE_FILES) \
+ $(INTERNAL_PRODUCTIMAGE_FILES) \
+ $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
+ $(INTERNAL_ODMIMAGE_FILES) \
+ $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
+ $(INTERNAL_ODM_DLKMIMAGE_FILES) \
$(updater_dep)
endif
$(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
@@ -4816,16 +4838,15 @@
endif
COVERAGE_ZIP := $(PRODUCT_OUT)/$(name).zip
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET)
+$(COVERAGE_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INTERNAL_RAMDISK_FILES) \
+ $(INTERNAL_USERDATAIMAGE_FILES) \
+ $(INTERNAL_VENDORIMAGE_FILES) \
+ $(INTERNAL_PRODUCTIMAGE_FILES) \
+ $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
+ $(INTERNAL_ODMIMAGE_FILES) \
+ $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
+ $(INTERNAL_ODM_DLKMIMAGE_FILES)
endif
$(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
$(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -4859,7 +4880,7 @@
name := $(name)-apps-$(FILE_NAME_TAG)
APPS_ZIP := $(PRODUCT_OUT)/$(name).zip
-$(APPS_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET)
+$(APPS_ZIP): $(FULL_SYSTEMIMAGE_DEPS)
@echo "Package apps: $@"
$(hide) rm -rf $@
$(hide) mkdir -p $(dir $@)
@@ -4894,16 +4915,15 @@
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifeq (,$(TARGET_BUILD_UNBUNDLED))
$(PROGUARD_DICT_ZIP): \
- $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+ $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INTERNAL_RAMDISK_FILES) \
+ $(INTERNAL_USERDATAIMAGE_FILES) \
+ $(INTERNAL_VENDORIMAGE_FILES) \
+ $(INTERNAL_PRODUCTIMAGE_FILES) \
+ $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
+ $(INTERNAL_ODMIMAGE_FILES) \
+ $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
+ $(INTERNAL_ODM_DLKMIMAGE_FILES) \
$(updater_dep)
endif
$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard)/filelist
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index d4ca56f..137b118 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -22,6 +22,17 @@
# autogen_test_config_file: Path to the test config file generated.
autogen_test_config_file := $(dir $(LOCAL_BUILT_MODULE))$(LOCAL_MODULE).config
+# TODO: (b/167308193) Switch to /data/local/tests/unrestricted as the default install base.
+autogen_test_install_base := /data/local/tmp
+# Automatically setup test root for native test.
+ifeq (true,$(is_native))
+ ifeq (true,$(LOCAL_VENDOR_MODULE))
+ autogen_test_install_base = /data/local/tests/vendor
+ endif
+ ifeq (true,$(LOCAL_USE_VNDK))
+ autogen_test_install_base = /data/local/tests/vendor
+ endif
+endif
ifeq (true,$(is_native))
ifeq ($(LOCAL_NATIVE_BENCHMARK),true)
autogen_test_config_template := $(NATIVE_BENCHMARK_TEST_CONFIG_TEMPLATE)
@@ -33,10 +44,11 @@
endif
endif
# Auto generating test config file for native test
+$(autogen_test_config_file): PRIVATE_TEST_INSTALL_BASE := $(autogen_test_install_base)
$(autogen_test_config_file): PRIVATE_MODULE_NAME := $(LOCAL_MODULE)
$(autogen_test_config_file) : $(autogen_test_config_template)
@echo "Auto generating test config $(notdir $@)"
- $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g;s&{EXTRA_CONFIGS}&&g' $< > $@
+ $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g;s&{TEST_INSTALL_BASE}&$(PRIVATE_TEST_INSTALL_BASE)&g;s&{EXTRA_CONFIGS}&&g' $< > $@
my_auto_generate_config := true
else
# Auto generating test config file for instrumentation test
diff --git a/core/base_rules.mk b/core/base_rules.mk
index da6991e..1b41898 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -115,6 +115,7 @@
include $(BUILD_SYSTEM)/local_vndk.mk
include $(BUILD_SYSTEM)/local_systemsdk.mk
+include $(BUILD_SYSTEM)/local_current_sdk.mk
my_module_tags := $(LOCAL_MODULE_TAGS)
ifeq ($(my_host_cross),true)
@@ -515,7 +516,11 @@
$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
@echo "Install: $@"
+ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(copy-file-or-link-to-new-target)
+else
$(copy-file-to-new-target)
+endif
$(PRIVATE_POST_INSTALL_CMD)
endif
diff --git a/core/board_config.mk b/core/board_config.mk
index 12b26c6..95d8af8 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -20,6 +20,7 @@
# ###############################################################
_board_strip_readonly_list := \
+ BOARD_BOOTLOADER_IN_UPDATE_PACKAGE \
BOARD_EGL_CFG \
BOARD_HAVE_BLUETOOTH \
BOARD_INSTALLER_CMDLINE \
diff --git a/core/config.mk b/core/config.mk
index 57296d8..1bbb78c 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -678,33 +678,22 @@
PRODUCT_USE_VNDK := $(PRODUCT_FULL_TREBLE)
endif
-# Define PRODUCT_PRODUCT_VNDK_VERSION if PRODUCT_USE_VNDK is true and
-# PRODUCT_SHIPPING_API_LEVEL is greater than 29.
-PRODUCT_USE_PRODUCT_VNDK := false
ifeq ($(PRODUCT_USE_VNDK),true)
- ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
- PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
- else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
- # No shipping level defined
- else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
- PRODUCT_USE_PRODUCT_VNDK := true
- endif
-
ifndef BOARD_VNDK_VERSION
BOARD_VNDK_VERSION := current
endif
-
- ifeq ($(PRODUCT_USE_PRODUCT_VNDK),true)
- ifndef PRODUCT_PRODUCT_VNDK_VERSION
- PRODUCT_PRODUCT_VNDK_VERSION := current
- endif
- endif
endif
$(KATI_obsolete_var PRODUCT_USE_VNDK,Use BOARD_VNDK_VERSION instead)
$(KATI_obsolete_var PRODUCT_USE_VNDK_OVERRIDE,Use BOARD_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+
+ifdef PRODUCT_PRODUCT_VNDK_VERSION
+ ifndef BOARD_VNDK_VERSION
+ # VNDK for product partition is not available unless BOARD_VNDK_VERSION
+ # defined.
+ $(error PRODUCT_PRODUCT_VNDK_VERSION cannot be defined without defining BOARD_VNDK_VERSION)
+ endif
+endif
# Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
# devices if unset.
@@ -720,6 +709,16 @@
endif
endif
+ifndef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
+ BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES := current
+else
+ ifdef PRODUCT_SHIPPING_API_LEVEL
+ ifneq ($(call math_lt,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES),$(PRODUCT_SHIPPING_API_LEVEL)),)
+ $(error BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES ($(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)) must be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
+ endif
+ endif
+endif
+.KATI_READONLY := BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
ifdef PRODUCT_SHIPPING_API_LEVEL
ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
diff --git a/core/definitions.mk b/core/definitions.mk
index 2bf1ba6..ace3ff8 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -110,6 +110,9 @@
# All compatibility suites mentioned in LOCAL_COMPATIBILITY_SUITES
ALL_COMPATIBILITY_SUITES :=
+# All compatibility suite files to dist.
+ALL_COMPATIBILITY_DIST_FILES :=
+
# All LINK_TYPE entries
ALL_LINK_TYPES :=
@@ -2551,6 +2554,18 @@
$(hide) cp $< $@
endef
+# The same as copy-file-to-new-target, but preserve symlinks. Symlinks are
+# converted to absolute to not break.
+define copy-file-or-link-to-new-target
+@mkdir -p $(dir $@)
+$(hide) rm -f $@
+$(hide) if [ -h $< ]; then \
+ ln -s $$(realpath $<) $@; \
+else \
+ cp $< $@; \
+fi
+endef
+
# Copy a prebuilt file to a target location.
define transform-prebuilt-to-target
@echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
@@ -2563,6 +2578,13 @@
$(copy-file-to-target-strip-comments)
endef
+# Copy a prebuilt file to a target location, but preserve symlinks rather than
+# dereference them.
+define copy-or-link-prebuilt-to-target
+@echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
+$(copy-file-or-link-to-new-target)
+endef
+
# Copy a list of files/directories to target location, with sub dir structure preserved.
# For example $(HOST_OUT_EXECUTABLES)/aapt -> $(staging)/bin/aapt .
# $(1): the source list of files/directories.
@@ -2812,6 +2834,7 @@
# 2. Add all the files to each suite's dependent files list.
# 3. Do the dependency addition to my_all_targets.
# 4. Save the module name to COMPATIBILITY.$(suite).MODULES for each suite.
+# 5. Collect files to dist to ALL_COMPATIBILITY_DIST_FILES.
# Requires for each suite: use my_compat_dist_config_$(suite) to define the test config.
# and use my_compat_dist_$(suite) to define the others.
define create-suite-dependencies
@@ -2824,9 +2847,11 @@
$$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
$$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
$$(my_compat_dist_test_data_$(suite))) \
+ $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \
$(eval COMPATIBILITY.$(suite).MODULES += $$(my_register_name))) \
-$(eval $(my_all_targets) : $(call copy-many-files, \
- $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))) \
+$(eval $(my_all_targets) : \
+ $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE), \
+ $(foreach f,$(my_compat_dist_$(suite)), $(call word-colon,2,$(f))))) \
$(call copy-many-xml-files-checked, \
$(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_config_$(suite))))))
endef
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 167fed9..76e7dd3 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -328,7 +328,7 @@
HOST_OUT := $(HOST_OUT_ROOT)/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
SOONG_HOST_OUT := $(SOONG_OUT_DIR)/host/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
-HOST_CROSS_OUT := $(HOST_OUT_ROOT)/windows-$(HOST_PREBUILT_ARCH)
+HOST_CROSS_OUT := $(HOST_OUT_ROOT)/$(HOST_CROSS_OS)-$(HOST_CROSS_ARCH)
.KATI_READONLY := HOST_OUT SOONG_HOST_OUT HOST_CROSS_OUT
diff --git a/core/local_current_sdk.mk b/core/local_current_sdk.mk
new file mode 100644
index 0000000..ea7da8a
--- /dev/null
+++ b/core/local_current_sdk.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+ifdef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
+ ifneq (current,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
+ ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+ ifeq (current,$(LOCAL_SDK_VERSION))
+ LOCAL_SDK_VERSION := $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+ else ifeq (system_current,$(LOCAL_SDK_VERSION))
+ LOCAL_SDK_VERSION := system_$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+ endif
+ endif
+ endif
+endif
diff --git a/core/main.mk b/core/main.mk
index bf07b49..36071b8 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -419,7 +419,7 @@
sdk_repo_goal := $(strip $(filter sdk_repo,$(MAKECMDGOALS)))
MAKECMDGOALS := $(strip $(filter-out sdk_repo,$(MAKECMDGOALS)))
-ifneq ($(words $(sort $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests target-files-package,$(MAKECMDGOALS)))),1)
+ifneq ($(words $(sort $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests,$(MAKECMDGOALS)))),1)
$(error The 'sdk' target may not be specified with any other targets)
endif
@@ -1406,6 +1406,10 @@
test_files :=
endif
+# Dedpulicate compatibility suite dist files across modules and packages before
+# copying them to their requested locations. Assign the eval result to an unused
+# var to prevent Make from trying to make a sense of it.
+_unused := $(call copy-many-files, $(sort $(ALL_COMPATIBILITY_DIST_FILES)))
# Don't include any GNU General Public License shared objects or static
# libraries in SDK images. GPL executables (not static/dynamic libraries)
diff --git a/core/native_test_config_template.xml b/core/native_test_config_template.xml
index ef1818f..ea982cf 100644
--- a/core/native_test_config_template.xml
+++ b/core/native_test_config_template.xml
@@ -22,11 +22,11 @@
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="true" />
- <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ <option name="push" value="{MODULE}->{TEST_INSTALL_BASE}/{MODULE}" />
</target_preparer>
<test class="com.android.tradefed.testtype.GTest" >
- <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="native-test-device-path" value="{TEST_INSTALL_BASE}" />
<option name="module-name" value="{MODULE}" />
</test>
</configuration>
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 4d1009f..6fccacd 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -35,7 +35,6 @@
sdk_addon \
sdk_repo \
stnod \
- target-files-package \
test-art% \
user \
userdataimage \
diff --git a/core/product.mk b/core/product.mk
index 324010c..624501e 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -323,7 +323,7 @@
# VNDK version of product partition. It can be 'current' if the product
# partitions uses PLATFORM_VNDK_VERSION.
-_product_single_value_var += PRODUCT_PRODUCT_VNDK_VERSION
+_product_single_value_vars += PRODUCT_PRODUCT_VNDK_VERSION
# Whether the list of allowed of actionable compatible properties should be disabled or not
_product_single_value_vars += PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE
diff --git a/core/product_config.mk b/core/product_config.mk
index 38926c2..6170b5b 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -358,6 +358,12 @@
$(KATI_obsolete_var OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS \
,Use PRODUCT_EXTRA_VNDK_VERSIONS instead)
+# If build command defines OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE,
+# override PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE with it unless it is
+# defined as `false`. If the value is `false` clear
+# PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
+# OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE can be used for
+# testing only.
ifdef OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
ifeq (false,$(OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))
PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE :=
@@ -367,11 +373,35 @@
else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
# No shipping level defined
else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
+ # Enforce product interface if PRODUCT_SHIPPING_API_LEVEL is greater than 29.
PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := true
endif
$(KATI_obsolete_var OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE,Use PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE instead)
+# If build command defines PRODUCT_USE_PRODUCT_VNDK_OVERRIDE as `false`,
+# PRODUCT_PRODUCT_VNDK_VERSION will not be defined automatically.
+# PRODUCT_USE_PRODUCT_VNDK_OVERRIDE can be used for testing only.
+PRODUCT_USE_PRODUCT_VNDK := false
+ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
+ PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+ # No shipping level defined
+else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
+ # Enforce product interface for VNDK if PRODUCT_SHIPPING_API_LEVEL is greater
+ # than 29.
+ PRODUCT_USE_PRODUCT_VNDK := true
+endif
+
+ifeq ($(PRODUCT_USE_PRODUCT_VNDK),true)
+ ifndef PRODUCT_PRODUCT_VNDK_VERSION
+ PRODUCT_PRODUCT_VNDK_VERSION := current
+ endif
+endif
+
+$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+
define product-overrides-config
$$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
$$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index c9b742a..a0315a5 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -142,8 +142,16 @@
$(LOCAL_BUILT_MODULE): $(same_vndk_variants_stamp)
endif
+# Use copy-or-link-prebuilt-to-target for host executables and shared libraries,
+# to preserve symlinks to the source trees. They can then run directly from the
+# prebuilt directories where the linker can load their dependencies using
+# relative RUNPATHs.
$(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
+ $(copy-or-link-prebuilt-to-target)
+else
$(transform-prebuilt-to-target)
+endif
ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
$(hide) chmod +x $@
endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 4731250..ad2e816 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -210,6 +210,7 @@
$(call end_json_map)
$(call add_json_bool, EnforceProductPartitionInterface, $(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))
+$(call add_json_str, DeviceCurrentApiLevelForVendorModules, $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
$(call add_json_bool, InstallExtraFlattenedApexes, $(PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES))
diff --git a/core/soong_rust_prebuilt.mk b/core/soong_rust_prebuilt.mk
index 804e37e..de6bafd 100644
--- a/core/soong_rust_prebuilt.mk
+++ b/core/soong_rust_prebuilt.mk
@@ -57,7 +57,11 @@
endif
$(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
+ $(copy-or-link-prebuilt-to-target)
+else
$(transform-prebuilt-to-target)
+endif
ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
$(hide) chmod +x $@
endif
diff --git a/core/tasks/platform_availability_check.mk b/core/tasks/platform_availability_check.mk
index 043d130..f252ff5 100644
--- a/core/tasks/platform_availability_check.mk
+++ b/core/tasks/platform_availability_check.mk
@@ -26,11 +26,31 @@
$(if $(filter true,$(ALL_MODULES.$(m).NOT_AVAILABLE_FOR_PLATFORM)),\
$(m))))))
-_violators_with_path := $(foreach m,$(sort $(_modules_not_available_for_platform)),\
+ifndef ALLOW_MISSING_DEPENDENCIES
+ _violators_with_path := $(foreach m,$(sort $(_modules_not_available_for_platform)),\
$(m):$(word 1,$(ALL_MODULES.$(m).PATH))\
-)
+ )
-$(call maybe-print-list-and-error,$(_violators_with_path),\
+ $(call maybe-print-list-and-error,$(_violators_with_path),\
Following modules are requested to be installed. But are not available \
for platform because they do not have "//apex_available:platform" or \
they depend on other modules that are not available for platform)
+
+else
+
+# Don't error out immediately when ALLOW_MISSING_DEPENDENCIES is set.
+# Instead, add a dependency on a rule that prints the error message.
+ define not_available_for_platform_rule
+ not_installable_file := $(patsubst $(OUT_DIR)/%,$(OUT_DIR)/NOT_AVAILABLE_FOR_PLATFORM/%,$(1)))
+ $(1): $$(not_installable_file)
+ $$(not_installable_file):
+ $(call echo-error,$(2),Module is requested to be installed but is not \
+available for platform because it does not have "//apex_available:platform" or \
+it depends on other modules that are not available for platform.)
+ exit 1
+ endef
+
+ $(foreach m,$(_modules_not_available_for_platform),\
+ $(foreach i,$(ALL_MODULES.$(m).INSTALLED),\
+ $(eval $(call not_available_for_platform_rule,$(i),$(m)))))
+endif
diff --git a/rbesetup.sh b/rbesetup.sh
index adcf081..724ad7d 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -36,6 +36,7 @@
# This function sets RBE specific environment variables needed for the build to
# executed by RBE. This file should be sourced once per checkout of Android code.
function _set_rbe_vars() {
+ unset USE_GOMA
export USE_RBE="true"
export RBE_CXX_EXEC_STRATEGY="remote_local_fallback"
export RBE_JAVAC=1
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index f6770fb..cf32977 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -188,6 +188,7 @@
libwilhelm \
linker \
linkerconfig \
+ llkd \
lmkd \
LocalTransport \
locksettings \
@@ -352,6 +353,8 @@
PRODUCT_SYSTEM_PROPERTIES += debug.atrace.tags.enableflags=0
+PRODUCT_PROPERTY_OVERRIDES += ro.gfx.angle.supported=true
+
# Packages included only for eng or userdebug builds, previously debug tagged
PRODUCT_PACKAGES_DEBUG := \
adb_keys \
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index bf74c4a..81ac600 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -39,9 +39,6 @@
Tag \
TimeZoneUpdater \
-# Binaries
-PRODUCT_PACKAGES += llkd
-
# OTA support
PRODUCT_PACKAGES += \
recovery-refresh \
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index ace00ac..7f727fb 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -49,6 +49,7 @@
required: [
"blk_alloc_to_base_fs",
"e2fsck",
+ "mkuserimg_mke2fs",
"simg2img",
"tune2fs",
],
@@ -88,16 +89,35 @@
],
}
+python_library_host {
+ name: "ota_metadata_proto",
+ version: {
+ py2: {
+ enabled: true,
+ },
+ py3: {
+ enabled: true,
+ },
+ },
+ srcs: [
+ "ota_metadata.proto",
+ ],
+ proto: {
+ canonical_path_from_root: false,
+ },
+}
+
python_defaults {
name: "releasetools_ota_from_target_files_defaults",
srcs: [
"edify_generator.py",
- "ota_from_target_files.py",
"non_ab_ota.py",
- "target_files_diff.py",
+ "ota_from_target_files.py",
"ota_utils.py",
+ "target_files_diff.py",
],
libs: [
+ "ota_metadata_proto",
"releasetools_check_target_files_vintf",
"releasetools_common",
"releasetools_verity_utils",
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 56785d6..c77d8c6 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -118,7 +118,7 @@
AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
# Partitions that should have their care_map added to META/care_map.pb
-PARTITIONS_WITH_CARE_MAP = (
+PARTITIONS_WITH_CARE_MAP = [
'system',
'vendor',
'product',
@@ -126,7 +126,7 @@
'odm',
'vendor_dlkm',
'odm_dlkm',
-)
+]
class ErrorCode(object):
@@ -729,10 +729,14 @@
fingerprint = build_info.GetPartitionFingerprint(partition)
if fingerprint:
d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
-
+ try:
+ d["ab_partitions"] = read_helper("META/ab_partitions.txt").split("\n")
+ except KeyError:
+ logger.warning("Can't find META/ab_partitions.txt")
return d
+
def LoadListFromFile(file_path):
with open(file_path) as f:
return f.read().splitlines()
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index ab38d0d..5409194 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -58,6 +58,7 @@
OPTIONS.additional_entries = []
OPTIONS.bootable_only = False
OPTIONS.put_super = None
+OPTIONS.put_bootloader = None
OPTIONS.dynamic_partition_list = None
OPTIONS.super_device_list = None
OPTIONS.retrofit_dap = None
@@ -75,6 +76,7 @@
info = OPTIONS.info_dict = common.LoadInfoDict(input_zip)
OPTIONS.put_super = info.get('super_image_in_update_package') == 'true'
+ OPTIONS.put_bootloader = info.get('bootloader_in_update_package') == 'true'
OPTIONS.dynamic_partition_list = info.get('dynamic_partition_list',
'').strip().split()
OPTIONS.super_device_list = info.get('super_block_devices',
@@ -122,9 +124,11 @@
for image_path in [name for name in namelist if name.startswith('IMAGES/')]:
image = os.path.basename(image_path)
- if OPTIONS.bootable_only and image not in ('boot.img', 'recovery.img'):
+ if OPTIONS.bootable_only and image not in('boot.img', 'recovery.img', 'bootloader'):
continue
- if not image.endswith('.img'):
+ if not image.endswith('.img') and image != 'bootloader':
+ continue
+ if image == 'bootloader' and not OPTIONS.put_bootloader:
continue
# Filter out super_empty and the images that are already in super partition.
if OPTIONS.put_super:
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 3a87957..471ef25 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -276,7 +276,7 @@
script.SetProgress(1)
script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
- metadata["ota-required-cache"] = str(script.required_cache)
+ metadata.required_cache = script.required_cache
# We haven't written the metadata entry, which will be done in
# FinalizeMetadata.
@@ -530,7 +530,7 @@
script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
else:
script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
- metadata["ota-required-cache"] = str(script.required_cache)
+ metadata.required_cache = script.required_cache
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f42974f..2833397 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -216,6 +216,7 @@
import zipfile
import common
+import ota_utils
import target_files_diff
from check_target_files_vintf import CheckVintfIfTrebleEnabled
from non_ab_ota import GenerateNonAbOtaPackage
@@ -228,20 +229,16 @@
logger = logging.getLogger(__name__)
-OPTIONS = common.OPTIONS
-OPTIONS.package_key = None
-OPTIONS.incremental_source = None
+OPTIONS = ota_utils.OPTIONS
OPTIONS.verify = False
OPTIONS.patch_threshold = 0.95
OPTIONS.wipe_user_data = False
-OPTIONS.downgrade = False
OPTIONS.extra_script = None
OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
if OPTIONS.worker_threads == 0:
OPTIONS.worker_threads = 1
OPTIONS.two_step = False
OPTIONS.include_secondary = False
-OPTIONS.no_signing = False
OPTIONS.block_based = True
OPTIONS.updater_binary = None
OPTIONS.oem_dicts = None
@@ -257,14 +254,9 @@
OPTIONS.payload_signer_args = []
OPTIONS.payload_signer_maximum_signature_size = None
OPTIONS.extracted_input = None
-OPTIONS.key_passwords = []
OPTIONS.skip_postinstall = False
-OPTIONS.retrofit_dynamic_partitions = False
OPTIONS.skip_compatibility_check = False
-OPTIONS.output_metadata_path = None
OPTIONS.disable_fec_computation = False
-OPTIONS.force_non_ab = False
-OPTIONS.boot_variable_file = None
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -786,7 +778,7 @@
with open(new_ab_partitions, 'w') as f:
for partition in ab_partitions:
if (partition in dynamic_partition_list and
- partition not in super_block_devices):
+ partition not in super_block_devices):
logger.info("Dropping %s from ab_partitions.txt", partition)
continue
f.write(partition + "\n")
@@ -833,31 +825,49 @@
compression=zipfile.ZIP_DEFLATED)
if source_file is not None:
+ assert "ab_partitions" in OPTIONS.source_info_dict, \
+ "META/ab_partitions.txt is required for ab_update."
+ assert "ab_partitions" in OPTIONS.target_info_dict, \
+ "META/ab_partitions.txt is required for ab_update."
target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
else:
+ assert "ab_partitions" in OPTIONS.info_dict, \
+ "META/ab_partitions.txt is required for ab_update."
target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
source_info = None
- # Metadata to comply with Android OTA package format.
- metadata = GetPackageMetadata(target_info, source_info)
-
if OPTIONS.retrofit_dynamic_partitions:
target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
target_file, target_info.get("super_block_devices").strip().split(),
target_info.get("dynamic_partition_list").strip().split())
elif OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
+ # Target_file may have been modified, reparse ab_partitions
+ with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
+ target_info.info_dict['ab_partitions'] = zfp.read(
+ AB_PARTITIONS).strip().split("\n")
+ # Metadata to comply with Android OTA package format.
+ metadata = GetPackageMetadata(target_info, source_info)
# Generate payload.
payload = Payload()
+ partition_timestamps = []
# Enforce a max timestamp this payload can be applied on top of.
if OPTIONS.downgrade:
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
else:
- max_timestamp = metadata["post-timestamp"]
+ max_timestamp = str(metadata.postcondition.timestamp)
+ partition_timestamps = [
+ part.partition_name + ":" + part.version
+ for part in metadata.postcondition.partition_state]
additional_args = ["--max_timestamp", max_timestamp]
+ if partition_timestamps:
+ additional_args.extend(
+ ["--partition_timestamps", ",".join(
+ partition_timestamps)]
+ )
payload.Generate(target_file, source_file, additional_args)
@@ -885,7 +895,7 @@
# into A/B OTA package.
target_zip = zipfile.ZipFile(target_file, "r")
if (target_info.get("verity") == "true" or
- target_info.get("avb_enable") == "true"):
+ target_info.get("avb_enable") == "true"):
care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
"META/" + x in target_zip.namelist()]
@@ -1081,7 +1091,7 @@
# use_dynamic_partitions but target build does.
if (OPTIONS.source_info_dict and
OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
- OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
+ OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
raise common.ExternalError(
"Expect to generate incremental OTA for retrofitting dynamic "
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
new file mode 100644
index 0000000..20d3091
--- /dev/null
+++ b/tools/releasetools/ota_metadata.proto
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// If you change this file,
+// Please update ota_metadata_pb2.py by executing
+// protoc ota_metadata.proto --python_out $ANDROID_BUILD_TOP/build/tools/releasetools
+
+
+syntax = "proto3";
+
+package build.tools.releasetools;
+option optimize_for = LITE_RUNTIME;
+
+// The build information of a particular partition on the device.
+message PartitionState {
+ string partition_name = 1;
+ repeated string device = 2;
+ repeated string build = 3;
+ // The version string of the partition. It's usually timestamp if present.
+ // One known exception is the boot image, who uses the kmi version, e.g.
+ // 5.4.42-android12-0
+ string version = 4;
+
+ // TODO(xunchang), revisit other necessary fields, e.g. security_patch_level.
+}
+
+// The build information on the device. The bytes of the running images are thus
+// inferred from the device state. For more information of the meaning of each
+// subfield, check
+// https://source.android.com/compatibility/android-cdd#3_2_2_build_parameters
+message DeviceState {
+ // device name. i.e. ro.product.device; if the field has multiple values, it
+ // means the ota package supports multiple devices. This usually happens when
+ // we use the same image to support multiple skus.
+ repeated string device = 1;
+ // device fingerprint. Up to R build, the value reads from
+ // ro.build.fingerprint.
+ repeated string build = 2;
+ // A value that specify a version of the android build.
+ string build_incremental = 3;
+ // The timestamp when the build is generated.
+ int64 timestamp = 4;
+ // The version of the currently-executing Android system.
+ string sdk_level = 5;
+ // A value indicating the security patch level of a build.
+ string security_patch_level = 6;
+
+ // The detailed state of each partition. For partial updates or devices with
+ // mixed build of partitions, some of the above fields may left empty. And the
+ // client will rely on the information of specific partitions to target the
+ // update.
+ repeated PartitionState partition_state = 7;
+}
+
+// The metadata of an OTA package. It contains the information of the package
+// and prerequisite to install the update correctly.
+message OtaMetadata {
+ enum OtaType {
+ UNKNOWN = 0;
+ AB = 1;
+ BLOCK = 2;
+ BRICK = 3;
+ };
+ OtaType type = 1;
+ // True if we need to wipe after the update.
+ bool wipe = 2;
+ // True if the timestamp of the post build is older than the pre build.
+ bool downgrade = 3;
+ // A map of name:content of property files, e.g. ota-property-files.
+ map<string, string> property_files = 4;
+
+ // The required device state in order to install the package.
+ DeviceState precondition = 5;
+ // The expected device state after the update.
+ DeviceState postcondition = 6;
+
+ // True if the ota that updates a device to support dynamic partitions, where
+ // the source build doesn't support it.
+ bool retrofit_dynamic_partitions = 7;
+ // The required size of the cache partition, only valid for non-A/B update.
+ int64 required_cache = 8;
+}
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
new file mode 100644
index 0000000..ff2b2c5
--- /dev/null
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -0,0 +1,343 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: ota_metadata.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='ota_metadata.proto',
+ package='build.tools.releasetools',
+ syntax='proto3',
+ serialized_options=b'H\003',
+ serialized_pb=b'\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"\xe1\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3'
+)
+
+
+
+_OTAMETADATA_OTATYPE = _descriptor.EnumDescriptor(
+ name='OtaType',
+ full_name='build.tools.releasetools.OtaMetadata.OtaType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='AB', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BLOCK', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BRICK', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=777,
+ serialized_end=829,
+)
+_sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
+
+
+_PARTITIONSTATE = _descriptor.Descriptor(
+ name='PartitionState',
+ full_name='build.tools.releasetools.PartitionState',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='partition_name', full_name='build.tools.releasetools.PartitionState.partition_name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='device', full_name='build.tools.releasetools.PartitionState.device', index=1,
+ number=2, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='build', full_name='build.tools.releasetools.PartitionState.build', index=2,
+ number=3, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='version', full_name='build.tools.releasetools.PartitionState.version', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=48,
+ serialized_end=136,
+)
+
+
+_DEVICESTATE = _descriptor.Descriptor(
+ name='DeviceState',
+ full_name='build.tools.releasetools.DeviceState',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='device', full_name='build.tools.releasetools.DeviceState.device', index=0,
+ number=1, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='build', full_name='build.tools.releasetools.DeviceState.build', index=1,
+ number=2, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='build_incremental', full_name='build.tools.releasetools.DeviceState.build_incremental', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='timestamp', full_name='build.tools.releasetools.DeviceState.timestamp', index=3,
+ number=4, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='sdk_level', full_name='build.tools.releasetools.DeviceState.sdk_level', index=4,
+ number=5, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='security_patch_level', full_name='build.tools.releasetools.DeviceState.security_patch_level', index=5,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='partition_state', full_name='build.tools.releasetools.DeviceState.partition_state', index=6,
+ number=7, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=139,
+ serialized_end=345,
+)
+
+
+_OTAMETADATA_PROPERTYFILESENTRY = _descriptor.Descriptor(
+ name='PropertyFilesEntry',
+ full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=b'8\001',
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=723,
+ serialized_end=775,
+)
+
+_OTAMETADATA = _descriptor.Descriptor(
+ name='OtaMetadata',
+ full_name='build.tools.releasetools.OtaMetadata',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='build.tools.releasetools.OtaMetadata.type', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='wipe', full_name='build.tools.releasetools.OtaMetadata.wipe', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='downgrade', full_name='build.tools.releasetools.OtaMetadata.downgrade', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='property_files', full_name='build.tools.releasetools.OtaMetadata.property_files', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='precondition', full_name='build.tools.releasetools.OtaMetadata.precondition', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='postcondition', full_name='build.tools.releasetools.OtaMetadata.postcondition', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='retrofit_dynamic_partitions', full_name='build.tools.releasetools.OtaMetadata.retrofit_dynamic_partitions', index=6,
+ number=7, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='required_cache', full_name='build.tools.releasetools.OtaMetadata.required_cache', index=7,
+ number=8, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_OTAMETADATA_PROPERTYFILESENTRY, ],
+ enum_types=[
+ _OTAMETADATA_OTATYPE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=348,
+ serialized_end=829,
+)
+
+_DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
+_OTAMETADATA_PROPERTYFILESENTRY.containing_type = _OTAMETADATA
+_OTAMETADATA.fields_by_name['type'].enum_type = _OTAMETADATA_OTATYPE
+_OTAMETADATA.fields_by_name['property_files'].message_type = _OTAMETADATA_PROPERTYFILESENTRY
+_OTAMETADATA.fields_by_name['precondition'].message_type = _DEVICESTATE
+_OTAMETADATA.fields_by_name['postcondition'].message_type = _DEVICESTATE
+_OTAMETADATA_OTATYPE.containing_type = _OTAMETADATA
+DESCRIPTOR.message_types_by_name['PartitionState'] = _PARTITIONSTATE
+DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
+DESCRIPTOR.message_types_by_name['OtaMetadata'] = _OTAMETADATA
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+PartitionState = _reflection.GeneratedProtocolMessageType('PartitionState', (_message.Message,), {
+ 'DESCRIPTOR' : _PARTITIONSTATE,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.PartitionState)
+ })
+_sym_db.RegisterMessage(PartitionState)
+
+DeviceState = _reflection.GeneratedProtocolMessageType('DeviceState', (_message.Message,), {
+ 'DESCRIPTOR' : _DEVICESTATE,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.DeviceState)
+ })
+_sym_db.RegisterMessage(DeviceState)
+
+OtaMetadata = _reflection.GeneratedProtocolMessageType('OtaMetadata', (_message.Message,), {
+
+ 'PropertyFilesEntry' : _reflection.GeneratedProtocolMessageType('PropertyFilesEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _OTAMETADATA_PROPERTYFILESENTRY,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.OtaMetadata.PropertyFilesEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _OTAMETADATA,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.OtaMetadata)
+ })
+_sym_db.RegisterMessage(OtaMetadata)
+_sym_db.RegisterMessage(OtaMetadata.PropertyFilesEntry)
+
+
+DESCRIPTOR._options = None
+_OTAMETADATA_PROPERTYFILESENTRY._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 874ab95..d444d41 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -17,11 +17,25 @@
import os
import zipfile
+import ota_metadata_pb2
from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
+
+OPTIONS.no_signing = False
+OPTIONS.force_non_ab = False
+OPTIONS.wipe_user_data = False
+OPTIONS.downgrade = False
+OPTIONS.key_passwords = {}
+OPTIONS.package_key = None
+OPTIONS.incremental_source = None
+OPTIONS.retrofit_dynamic_partitions = False
+OPTIONS.output_metadata_path = None
+OPTIONS.boot_variable_file = None
+
METADATA_NAME = 'META-INF/com/android/metadata'
+METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
@@ -50,11 +64,12 @@
# Write the current metadata entry with placeholders.
with zipfile.ZipFile(input_file) as input_zip:
for property_files in needed_property_files:
- metadata[property_files.name] = property_files.Compute(input_zip)
+ metadata.property_files[property_files.name] = property_files.Compute(
+ input_zip)
namelist = input_zip.namelist()
- if METADATA_NAME in namelist:
- ZipDelete(input_file, METADATA_NAME)
+ if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
+ ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
output_zip = zipfile.ZipFile(input_file, 'a')
WriteMetadata(metadata, output_zip)
ZipClose(output_zip)
@@ -69,8 +84,9 @@
def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
for property_files in needed_property_files:
- metadata[property_files.name] = property_files.Finalize(
- prelim_signing_zip, len(metadata[property_files.name]))
+ metadata.property_files[property_files.name] = property_files.Finalize(
+ prelim_signing_zip,
+ len(metadata.property_files[property_files.name]))
# SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
# entries, as well as padding the entry headers. We do a preliminary signing
@@ -91,7 +107,7 @@
FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
# Replace the METADATA entry.
- ZipDelete(prelim_signing, METADATA_NAME)
+ ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
output_zip = zipfile.ZipFile(prelim_signing, 'a')
WriteMetadata(metadata, output_zip)
ZipClose(output_zip)
@@ -105,7 +121,8 @@
# Reopen the final signed zip to double check the streaming metadata.
with zipfile.ZipFile(output_file) as output_zip:
for property_files in needed_property_files:
- property_files.Verify(output_zip, metadata[property_files.name].strip())
+ property_files.Verify(
+ output_zip, metadata.property_files[property_files.name].strip())
# If requested, dump the metadata to a separate file.
output_metadata_path = OPTIONS.output_metadata_path
@@ -113,30 +130,101 @@
WriteMetadata(metadata, output_metadata_path)
-def WriteMetadata(metadata, output):
+def WriteMetadata(metadata_proto, output):
"""Writes the metadata to the zip archive or a file.
Args:
- metadata: The metadata dict for the package.
- output: A ZipFile object or a string of the output file path.
+ metadata_proto: The metadata protobuf for the package.
+ output: A ZipFile object or a string of the output file path. If a string
+ path is given, the metadata in the protobuf format will be written to
+ {output}.pb, e.g. ota_metadata.pb
"""
- value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
+ metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
+ legacy_metadata = "".join(["%s=%s\n" % kv for kv in
+ sorted(metadata_dict.items())])
if isinstance(output, zipfile.ZipFile):
- ZipWriteStr(output, METADATA_NAME, value,
+ ZipWriteStr(output, METADATA_PROTO_NAME, metadata_proto.SerializeToString(),
+ compress_type=zipfile.ZIP_STORED)
+ ZipWriteStr(output, METADATA_NAME, legacy_metadata,
compress_type=zipfile.ZIP_STORED)
return
+ with open('{}.pb'.format(output), 'w') as f:
+ f.write(metadata_proto.SerializeToString())
with open(output, 'w') as f:
- f.write(value)
+ f.write(legacy_metadata)
+
+
+def UpdateDeviceState(device_state, build_info, boot_variable_values,
+ is_post_build):
+ """Update the fields of the DeviceState proto with build info."""
+
+ def UpdatePartitionStates(partition_states):
+ """Update the per-partition state according to its build.prop"""
+ if not build_info.is_ab:
+ return
+ build_info_set = ComputeRuntimeBuildInfos(build_info,
+ boot_variable_values)
+ assert "ab_partitions" in build_info.info_dict,\
+ "ab_partitions property required for ab update."
+ ab_partitions = set(build_info.info_dict.get("ab_partitions"))
+
+ # delta_generator will error out on unused timestamps,
+ # so only generate timestamps for dynamic partitions
+ # used in OTA update.
+ for partition in sorted(set(PARTITIONS_WITH_CARE_MAP) & ab_partitions):
+ partition_prop = build_info.info_dict.get(
+ '{}.build.prop'.format(partition))
+ # Skip if the partition is missing, or it doesn't have a build.prop
+ if not partition_prop or not partition_prop.build_props:
+ continue
+
+ partition_state = partition_states.add()
+ partition_state.partition_name = partition
+ # Update the partition's runtime device names and fingerprints
+ partition_devices = set()
+ partition_fingerprints = set()
+ for runtime_build_info in build_info_set:
+ partition_devices.add(
+ runtime_build_info.GetPartitionBuildProp('ro.product.device',
+ partition))
+ partition_fingerprints.add(
+ runtime_build_info.GetPartitionFingerprint(partition))
+
+ partition_state.device.extend(sorted(partition_devices))
+ partition_state.build.extend(sorted(partition_fingerprints))
+
+ # TODO(xunchang) set the boot image's version with kmi. Note the boot
+ # image doesn't have a file map.
+ partition_state.version = build_info.GetPartitionBuildProp(
+ 'ro.build.date.utc', partition)
+
+ # TODO(xunchang), we can save a call to ComputeRuntimeBuildInfos.
+ build_devices, build_fingerprints = \
+ CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values)
+ device_state.device.extend(sorted(build_devices))
+ device_state.build.extend(sorted(build_fingerprints))
+ device_state.build_incremental = build_info.GetBuildProp(
+ 'ro.build.version.incremental')
+
+ UpdatePartitionStates(device_state.partition_state)
+
+ if is_post_build:
+ device_state.sdk_level = build_info.GetBuildProp(
+ 'ro.build.version.sdk')
+ device_state.security_patch_level = build_info.GetBuildProp(
+ 'ro.build.version.security_patch')
+ # Use the actual post-timestamp, even for a downgrade case.
+ device_state.timestamp = int(build_info.GetBuildProp('ro.build.date.utc'))
def GetPackageMetadata(target_info, source_info=None):
- """Generates and returns the metadata dict.
+ """Generates and returns the metadata proto.
- It generates a dict() that contains the info to be written into an OTA
- package (META-INF/com/android/metadata). It also handles the detection of
- downgrade / data wipe based on the global options.
+ It generates a ota_metadata protobuf that contains the info to be written
+ into an OTA package (META-INF/com/android/metadata.pb). It also handles the
+ detection of downgrade / data wipe based on the global options.
Args:
target_info: The BuildInfo instance that holds the target build info.
@@ -144,66 +232,96 @@
None if generating full OTA.
Returns:
- A dict to be written into package metadata entry.
+ A protobuf to be written into package metadata entry.
"""
assert isinstance(target_info, BuildInfo)
assert source_info is None or isinstance(source_info, BuildInfo)
- separator = '|'
-
boot_variable_values = {}
if OPTIONS.boot_variable_file:
d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
for key, values in d.items():
boot_variable_values[key] = [val.strip() for val in values.split(',')]
- post_build_devices, post_build_fingerprints = \
- CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
- metadata = {
- 'post-build': separator.join(sorted(post_build_fingerprints)),
- 'post-build-incremental': target_info.GetBuildProp(
- 'ro.build.version.incremental'),
- 'post-sdk-level': target_info.GetBuildProp(
- 'ro.build.version.sdk'),
- 'post-security-patch-level': target_info.GetBuildProp(
- 'ro.build.version.security_patch'),
- }
+ metadata_proto = ota_metadata_pb2.OtaMetadata()
+ # TODO(xunchang) some fields, e.g. post-device isn't necessary. We can
+ # consider skipping them if they aren't used by clients.
+ UpdateDeviceState(metadata_proto.postcondition, target_info,
+ boot_variable_values, True)
if target_info.is_ab and not OPTIONS.force_non_ab:
- metadata['ota-type'] = 'AB'
- metadata['ota-required-cache'] = '0'
+ metadata_proto.type = ota_metadata_pb2.OtaMetadata.AB
+ metadata_proto.required_cache = 0
else:
- metadata['ota-type'] = 'BLOCK'
+ metadata_proto.type = ota_metadata_pb2.OtaMetadata.BLOCK
+ # cache requirement will be updated by the non-A/B codes.
if OPTIONS.wipe_user_data:
- metadata['ota-wipe'] = 'yes'
+ metadata_proto.wipe = True
if OPTIONS.retrofit_dynamic_partitions:
- metadata['ota-retrofit-dynamic-partitions'] = 'yes'
+ metadata_proto.retrofit_dynamic_partitions = True
is_incremental = source_info is not None
if is_incremental:
- pre_build_devices, pre_build_fingerprints = \
- CalculateRuntimeDevicesAndFingerprints(source_info,
- boot_variable_values)
- metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
- metadata['pre-build-incremental'] = source_info.GetBuildProp(
- 'ro.build.version.incremental')
- metadata['pre-device'] = separator.join(sorted(pre_build_devices))
+ UpdateDeviceState(metadata_proto.precondition, source_info,
+ boot_variable_values, False)
else:
- metadata['pre-device'] = separator.join(sorted(post_build_devices))
-
- # Use the actual post-timestamp, even for a downgrade case.
- metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+ metadata_proto.precondition.device.extend(
+ metadata_proto.postcondition.device)
# Detect downgrades and set up downgrade flags accordingly.
if is_incremental:
- HandleDowngradeMetadata(metadata, target_info, source_info)
+ HandleDowngradeMetadata(metadata_proto, target_info, source_info)
- return metadata
+ return metadata_proto
-def HandleDowngradeMetadata(metadata, target_info, source_info):
+def BuildLegacyOtaMetadata(metadata_proto):
+ """Converts the metadata proto to a legacy metadata dict.
+
+ This metadata dict is used to build the legacy metadata text file for
+ backward compatibility. We won't add new keys to the legacy metadata format.
+ If new information is needed, we should add it as a new field in OtaMetadata
+ proto definition.
+ """
+
+ separator = '|'
+
+ metadata_dict = {}
+ if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
+ metadata_dict['ota-type'] = 'AB'
+ elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
+ metadata_dict['ota-type'] = 'BLOCK'
+ if metadata_proto.wipe:
+ metadata_dict['ota-wipe'] = 'yes'
+ if metadata_proto.retrofit_dynamic_partitions:
+ metadata_dict['ota-retrofit-dynamic-partitions'] = 'yes'
+ if metadata_proto.downgrade:
+ metadata_dict['ota-downgrade'] = 'yes'
+
+ metadata_dict['ota-required-cache'] = str(metadata_proto.required_cache)
+
+ post_build = metadata_proto.postcondition
+ metadata_dict['post-build'] = separator.join(post_build.build)
+ metadata_dict['post-build-incremental'] = post_build.build_incremental
+ metadata_dict['post-sdk-level'] = post_build.sdk_level
+ metadata_dict['post-security-patch-level'] = post_build.security_patch_level
+ metadata_dict['post-timestamp'] = str(post_build.timestamp)
+
+ pre_build = metadata_proto.precondition
+ metadata_dict['pre-device'] = separator.join(pre_build.device)
+ # incremental updates
+ if len(pre_build.build) != 0:
+ metadata_dict['pre-build'] = separator.join(pre_build.build)
+ metadata_dict['pre-build-incremental'] = pre_build.build_incremental
+
+ metadata_dict.update(metadata_proto.property_files)
+
+ return metadata_dict
+
+
+def HandleDowngradeMetadata(metadata_proto, target_info, source_info):
# Only incremental OTAs are allowed to reach here.
assert OPTIONS.incremental_source is not None
@@ -216,7 +334,7 @@
raise RuntimeError(
"--downgrade or --override_timestamp specified but no downgrade "
"detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
- metadata["ota-downgrade"] = "yes"
+ metadata_proto.downgrade = True
else:
if is_downgrade:
raise RuntimeError(
@@ -225,14 +343,12 @@
"building the incremental." % (pre_timestamp, post_timestamp))
-def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values):
- """Returns a tuple of sets for runtime devices and fingerprints"""
+def ComputeRuntimeBuildInfos(default_build_info, boot_variable_values):
+ """Returns a set of build info objects that may exist during runtime."""
- device_names = {build_info.device}
- fingerprints = {build_info.fingerprint}
-
+ build_info_set = {default_build_info}
if not boot_variable_values:
- return device_names, fingerprints
+ return build_info_set
# Calculate all possible combinations of the values for the boot variables.
keys = boot_variable_values.keys()
@@ -242,7 +358,7 @@
for placeholder_values in combinations:
# Reload the info_dict as some build properties may change their values
# based on the value of ro.boot* properties.
- info_dict = copy.deepcopy(build_info.info_dict)
+ info_dict = copy.deepcopy(default_build_info.info_dict)
for partition in PARTITIONS_WITH_CARE_MAP:
partition_prop_key = "{}.build.prop".format(partition)
input_file = info_dict[partition_prop_key].input_file
@@ -256,10 +372,22 @@
PartitionBuildProps.FromInputFile(input_file, partition,
placeholder_values)
info_dict["build.prop"] = info_dict["system.build.prop"]
+ build_info_set.add(BuildInfo(info_dict, default_build_info.oem_dicts))
- new_build_info = BuildInfo(info_dict, build_info.oem_dicts)
- device_names.add(new_build_info.device)
- fingerprints.add(new_build_info.fingerprint)
+ return build_info_set
+
+
+def CalculateRuntimeDevicesAndFingerprints(default_build_info,
+ boot_variable_values):
+ """Returns a tuple of sets for runtime devices and fingerprints"""
+
+ device_names = set()
+ fingerprints = set()
+ build_info_set = ComputeRuntimeBuildInfos(default_build_info,
+ boot_variable_values)
+ for runtime_build_info in build_info_set:
+ device_names.add(runtime_build_info.device)
+ fingerprints.add(runtime_build_info.fingerprint)
return device_names, fingerprints
@@ -403,8 +531,10 @@
# reserved space serves the metadata entry only.
if reserve_space:
tokens.append('metadata:' + ' ' * 15)
+ tokens.append('metadata.pb:' + ' ' * 15)
else:
tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
+ tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
return ','.join(tokens)
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
index 7b4a4b0..339ddc7 100644
--- a/tools/releasetools/test_apex_utils.py
+++ b/tools/releasetools/test_apex_utils.py
@@ -160,7 +160,7 @@
self.payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
apex_file = signer.ProcessApexFile(apk_keys, self.payload_key)
- package_name_extract_cmd = ['aapt', 'dump', 'badging', apex_file]
+ package_name_extract_cmd = ['aapt2', 'dump', 'badging', apex_file]
output = common.RunAndCheckOutput(package_name_extract_cmd)
for line in output.splitlines():
# Sample output from aapt: "package: name='com.google.android.wifi'
diff --git a/tools/releasetools/test_non_ab_ota.py b/tools/releasetools/test_non_ab_ota.py
index ee1b411..5207e2f 100644
--- a/tools/releasetools/test_non_ab_ota.py
+++ b/tools/releasetools/test_non_ab_ota.py
@@ -42,12 +42,13 @@
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(1, len(tokens))
+ self.assertEqual(2, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Finalize(self):
entries = [
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
@@ -57,14 +58,16 @@
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(1, len(tokens))
+ self.assertEqual(2, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata'.
entries[0] = 'metadata'
+ entries[1] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
def test_Verify(self):
entries = (
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 52aa487..84cd4c8 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -20,17 +20,20 @@
import zipfile
import common
+import ota_metadata_pb2
import test_utils
-from ota_utils import CalculateRuntimeDevicesAndFingerprints
+from ota_utils import (
+ BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
+ FinalizeMetadata, GetPackageMetadata, PropertyFiles)
from ota_from_target_files import (
- _LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
- GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
+ _LoadOemDicts, AbOtaPropertyFiles,
+ GetTargetFilesZipForSecondaryImages,
GetTargetFilesZipWithoutPostinstallConfig,
- Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
- StreamingPropertyFiles)
-from non_ab_ota import NonAbOtaPropertyFiles
+ Payload, PayloadSigner, POSTINSTALL_CONFIG,
+ StreamingPropertyFiles, AB_PARTITIONS)
from test_utils import PropertyFilesTestCase
+
def construct_target_files(secondary=False):
"""Returns a target-files.zip file for generating OTA packages."""
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -143,14 +146,13 @@
),
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
'vendor', {
- 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
+ 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
),
'property1': 'value1',
'property2': 4096,
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
}
-
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.assertTrue(os.path.exists(self.testdata_dir))
@@ -164,63 +166,71 @@
common.OPTIONS.no_signing = False
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
common.OPTIONS.search_path = test_utils.get_search_path()
+ @staticmethod
+ def GetLegacyOtaMetadata(target_info, source_info=None):
+ metadata_proto = GetPackageMetadata(target_info, source_info)
+ return BuildLegacyOtaMetadata(metadata_proto)
+
def test_GetPackageMetadata_abOta_full(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
+ target_info_dict['ab_partitions'] = []
target_info = common.BuildInfo(target_info_dict, None)
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-type' : 'AB',
- 'ota-required-cache' : '0',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-type': 'AB',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
def test_GetPackageMetadata_abOta_incremental(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
+ target_info_dict['ab_partitions'] = []
target_info = common.BuildInfo(target_info_dict, None)
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
- 'ota-type' : 'AB',
- 'ota-required-cache' : '0',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
- 'pre-build' : 'build-fingerprint-source',
- 'pre-build-incremental' : 'build-version-incremental-source',
+ 'ota-type': 'AB',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
+ 'pre-build': 'build-fingerprint-source',
+ 'pre-build-incremental': 'build-version-incremental-source',
},
metadata)
def test_GetPackageMetadata_nonAbOta_full(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-type' : 'BLOCK',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
@@ -228,52 +238,55 @@
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
- 'ota-type' : 'BLOCK',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
- 'pre-build' : 'build-fingerprint-source',
- 'pre-build-incremental' : 'build-version-incremental-source',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
+ 'pre-build': 'build-fingerprint-source',
+ 'pre-build-incremental': 'build-version-incremental-source',
},
metadata)
def test_GetPackageMetadata_wipe(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.wipe_user_data = True
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-type' : 'BLOCK',
- 'ota-wipe' : 'yes',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'ota-wipe': 'yes',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.retrofit_dynamic_partitions = True
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-retrofit-dynamic-partitions' : 'yes',
- 'ota-type' : 'BLOCK',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-retrofit-dynamic-partitions': 'yes',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
@@ -293,7 +306,7 @@
target_info = common.BuildInfo(target_info_dict, None)
source_info = common.BuildInfo(source_info_dict, None)
common.OPTIONS.incremental_source = ''
- self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+ self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
source_info)
def test_GetPackageMetadata_downgrade(self):
@@ -307,20 +320,22 @@
common.OPTIONS.incremental_source = ''
common.OPTIONS.downgrade = True
common.OPTIONS.wipe_user_data = True
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata = self.GetLegacyOtaMetadata(target_info, source_info)
+
self.assertDictEqual(
{
- 'ota-downgrade' : 'yes',
- 'ota-type' : 'BLOCK',
- 'ota-wipe' : 'yes',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1400000000',
- 'pre-device' : 'product-device',
- 'pre-build' : 'build-fingerprint-source',
- 'pre-build-incremental' : 'build-version-incremental-source',
+ 'ota-downgrade': 'yes',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'ota-wipe': 'yes',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1400000000',
+ 'pre-device': 'product-device',
+ 'pre-build': 'build-fingerprint-source',
+ 'pre-build-incremental': 'build-version-incremental-source',
},
metadata)
@@ -464,13 +479,13 @@
'A' * 1024 * 1024 * 1024,
zipfile.ZIP_STORED)
- metadata = {}
+ metadata = ota_metadata_pb2.OtaMetadata()
output_file = common.MakeTempFile(suffix='.zip')
needed_property_files = (
TestPropertyFiles(),
)
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
- self.assertIn('ota-test-property-files', metadata)
+ self.assertIn('ota-test-property-files', metadata.property_files)
@test_utils.SkipIfExternalToolsUnavailable()
def test_FinalizeMetadata(self):
@@ -508,13 +523,13 @@
'A' * 1024 * 1024,
zipfile.ZIP_STORED)
- metadata = {}
+ metadata = ota_metadata_pb2.OtaMetadata()
needed_property_files = (
TestPropertyFiles(),
)
output_file = common.MakeTempFile(suffix='.zip')
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
- self.assertIn('ota-test-property-files', metadata)
+ self.assertIn('ota-test-property-files', metadata.property_files)
class TestPropertyFiles(PropertyFiles):
@@ -532,8 +547,8 @@
'optional-entry2',
)
-class PropertyFilesTest(PropertyFilesTestCase):
+class PropertyFilesTest(PropertyFilesTestCase):
@test_utils.SkipIfExternalToolsUnavailable()
def test_Compute(self):
@@ -547,7 +562,7 @@
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(3, len(tokens))
+ self.assertEqual(4, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Compute_withOptionalEntries(self):
@@ -563,7 +578,7 @@
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(5, len(tokens))
+ self.assertEqual(6, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Compute_missingRequiredEntry(self):
@@ -581,6 +596,7 @@
'required-entry1',
'required-entry2',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
@@ -590,10 +606,11 @@
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
- self.assertEqual(3, len(tokens))
+ self.assertEqual(4, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
# streaming metadata.
entries[2] = 'metadata'
+ entries[3] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
@test_utils.SkipIfExternalToolsUnavailable()
@@ -604,6 +621,7 @@
'optional-entry1',
'optional-entry2',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
@@ -638,6 +656,7 @@
'optional-entry1',
'optional-entry2',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
@@ -687,7 +706,7 @@
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(5, len(tokens))
+ self.assertEqual(6, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Finalize(self):
@@ -697,6 +716,7 @@
'care_map.txt',
'compatibility.zip',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
@@ -706,10 +726,11 @@
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
- self.assertEqual(5, len(tokens))
+ self.assertEqual(6, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
# streaming metadata.
entries[4] = 'metadata'
+ entries[5] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
def test_Verify(self):
@@ -719,6 +740,7 @@
'care_map.txt',
'compatibility.zip',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
@@ -750,7 +772,7 @@
common.OPTIONS.payload_signer_args = None
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
def test_init(self):
@@ -855,6 +877,7 @@
# Put META-INF/com/android/metadata if needed.
if with_metadata:
entries.append('META-INF/com/android/metadata')
+ entries.append('META-INF/com/android/metadata.pb')
for entry in entries:
zip_fp.writestr(
@@ -870,9 +893,9 @@
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- # "6" indcludes the four entries above, one metadata entry, and one entry
+ # "7" indcludes the four entries above, two metadata entries, and one entry
# for payload-metadata.bin.
- self.assertEqual(6, len(tokens))
+ self.assertEqual(7, len(tokens))
self._verify_entries(
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
@@ -883,12 +906,13 @@
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
- property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
+ property_files_string = property_files.Finalize(
+ zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
- # "6" indcludes the four entries above, one metadata entry, and one entry
+ # "7" includes the four entries above, two metadata entries, and one entry
# for payload-metadata.bin.
- self.assertEqual(6, len(tokens))
+ self.assertEqual(7, len(tokens))
self._verify_entries(
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
@@ -916,7 +940,7 @@
common.OPTIONS.payload_signer_args = []
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
def _assertFilesEqual(self, file1, file2):
@@ -934,7 +958,7 @@
common.OPTIONS.package_key = os.path.join(
self.testdata_dir, 'testkey_with_passwd')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : 'foo',
+ common.OPTIONS.package_key: 'foo',
}
payload_signer = PayloadSigner()
self.assertEqual('openssl', payload_signer.signer)
@@ -1011,7 +1035,7 @@
common.OPTIONS.payload_signer_args = None
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
@staticmethod
@@ -1166,8 +1190,8 @@
# Then assert these entries are stored.
for entry_info in verify_zip.infolist():
if entry_info.filename not in (
- Payload.SECONDARY_PAYLOAD_BIN,
- Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
+ Payload.SECONDARY_PAYLOAD_BIN,
+ Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
continue
self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
@@ -1177,6 +1201,7 @@
'recovery_api_version=3',
'fstab_version=2',
'recovery_as_boot=true',
+ 'ab_update=true',
]
BUILD_PROP = [
@@ -1187,10 +1212,29 @@
'ro.build.tags=build-tags',
'ro.build.version.sdk=30',
'ro.build.version.security_patch=2020',
- 'ro.build.date.utc=12345678'
+ 'ro.build.date.utc=12345678',
+ 'ro.system.build.version.release=version-release',
+ 'ro.system.build.id=build-id',
+ 'ro.system.build.version.incremental=version-incremental',
+ 'ro.system.build.type=build-type',
+ 'ro.system.build.tags=build-tags',
+ 'ro.system.build.version.sdk=30',
+ 'ro.system.build.version.security_patch=2020',
+ 'ro.system.build.date.utc=12345678',
+ 'ro.product.system.brand=generic',
+ 'ro.product.system.name=generic',
+ 'ro.product.system.device=generic',
]
VENDOR_BUILD_PROP = [
+ 'ro.vendor.build.version.release=version-release',
+ 'ro.vendor.build.id=build-id',
+ 'ro.vendor.build.version.incremental=version-incremental',
+ 'ro.vendor.build.type=build-type',
+ 'ro.vendor.build.tags=build-tags',
+ 'ro.vendor.build.version.sdk=30',
+ 'ro.vendor.build.version.security_patch=2020',
+ 'ro.vendor.build.date.utc=12345678',
'ro.product.vendor.brand=vendor-product-brand',
'ro.product.vendor.name=vendor-product-name',
'ro.product.vendor.device=vendor-product-device'
@@ -1319,6 +1363,7 @@
'ro.product.vendor.name=vendor-product-std',
'VENDOR/etc/build_pro.prop':
'ro.product.vendor.name=vendor-product-pro',
+ AB_PARTITIONS: '\n'.join(['system', 'vendor']),
}, self.test_dir)
common.OPTIONS.boot_variable_file = common.MakeTempFile()
@@ -1326,8 +1371,8 @@
f.write('ro.boot.sku_name=std,pro')
build_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
- metadata = GetPackageMetadata(build_info)
- self.assertEqual('vendor-product-device', metadata['pre-device'])
+ metadata_dict = BuildLegacyOtaMetadata(GetPackageMetadata(build_info))
+ self.assertEqual('vendor-product-device', metadata_dict['pre-device'])
fingerprints = [
self.constructFingerprint(
'vendor-product-brand/vendor-product-name/vendor-product-device'),
@@ -1336,7 +1381,33 @@
self.constructFingerprint(
'vendor-product-brand/vendor-product-std/vendor-product-device'),
]
- self.assertEqual('|'.join(fingerprints), metadata['post-build'])
+ self.assertEqual('|'.join(fingerprints), metadata_dict['post-build'])
+
+ def CheckMetadataEqual(self, metadata_dict, metadata_proto):
+ post_build = metadata_proto.postcondition
+ self.assertEqual('|'.join(post_build.build),
+ metadata_dict['post-build'])
+ self.assertEqual(post_build.build_incremental,
+ metadata_dict['post-build-incremental'])
+ self.assertEqual(post_build.sdk_level,
+ metadata_dict['post-sdk-level'])
+ self.assertEqual(post_build.security_patch_level,
+ metadata_dict['post-security-patch-level'])
+
+ if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
+ ota_type = 'AB'
+ elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
+ ota_type = 'BLOCK'
+ else:
+ ota_type = ''
+ self.assertEqual(ota_type, metadata_dict['ota-type'])
+ self.assertEqual(metadata_proto.wipe,
+ metadata_dict.get('ota-wipe') == 'yes')
+ self.assertEqual(metadata_proto.required_cache,
+ int(metadata_dict.get('ota-required-cache', 0)))
+ self.assertEqual(metadata_proto.retrofit_dynamic_partitions,
+ metadata_dict.get(
+ 'ota-retrofit-dynamic-partitions') == 'yes')
def test_GetPackageMetadata_incremental_package(self):
vendor_build_prop = copy.deepcopy(self.VENDOR_BUILD_PROP)
@@ -1344,6 +1415,8 @@
'import /vendor/etc/build_${ro.boot.sku_name}.prop',
])
self.writeFiles({
+ 'META/misc_info.txt': '\n'.join(self.MISC_INFO),
+ 'META/ab_partitions.txt': '\n'.join(['system', 'vendor', 'product']),
'SYSTEM/build.prop': '\n'.join(self.BUILD_PROP),
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
'VENDOR/etc/build_std.prop':
@@ -1365,10 +1438,22 @@
'ro.build.tags=build-tags',
'ro.build.version.sdk=29',
'ro.build.version.security_patch=2020',
- 'ro.build.date.utc=12340000'
+ 'ro.build.date.utc=12340000',
+ 'ro.system.build.version.release=source-version-release',
+ 'ro.system.build.id=source-build-id',
+ 'ro.system.build.version.incremental=source-version-incremental',
+ 'ro.system.build.type=build-type',
+ 'ro.system.build.tags=build-tags',
+ 'ro.system.build.version.sdk=29',
+ 'ro.system.build.version.security_patch=2020',
+ 'ro.system.build.date.utc=12340000',
+ 'ro.product.system.brand=generic',
+ 'ro.product.system.name=generic',
+ 'ro.product.system.device=generic',
]
self.writeFiles({
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
+ 'META/ab_partitions.txt': '\n'.join(['system', 'vendor', 'product']),
'SYSTEM/build.prop': '\n'.join(source_build_prop),
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
'VENDOR/etc/build_std.prop':
@@ -1381,21 +1466,22 @@
target_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
source_info = common.BuildInfo(common.LoadInfoDict(source_dir))
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata_proto = GetPackageMetadata(target_info, source_info)
+ metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
self.assertEqual(
'vendor-device-pro|vendor-device-std|vendor-product-device',
- metadata['pre-device'])
- suffix = ':source-version-release/source-build-id/' \
- 'source-version-incremental:build-type/build-tags'
+ metadata_dict['pre-device'])
+ source_suffix = ':source-version-release/source-build-id/' \
+ 'source-version-incremental:build-type/build-tags'
pre_fingerprints = [
'vendor-product-brand/vendor-product-name/vendor-device-pro'
- '{}'.format(suffix),
+ '{}'.format(source_suffix),
'vendor-product-brand/vendor-product-name/vendor-device-std'
- '{}'.format(suffix),
+ '{}'.format(source_suffix),
'vendor-product-brand/vendor-product-name/vendor-product-device'
- '{}'.format(suffix),
+ '{}'.format(source_suffix),
]
- self.assertEqual('|'.join(pre_fingerprints), metadata['pre-build'])
+ self.assertEqual('|'.join(pre_fingerprints), metadata_dict['pre-build'])
post_fingerprints = [
self.constructFingerprint(
@@ -1405,4 +1491,31 @@
self.constructFingerprint(
'vendor-product-brand/vendor-product-name/vendor-product-device'),
]
- self.assertEqual('|'.join(post_fingerprints), metadata['post-build'])
+ self.assertEqual('|'.join(post_fingerprints), metadata_dict['post-build'])
+
+ self.CheckMetadataEqual(metadata_dict, metadata_proto)
+
+ pre_partition_states = metadata_proto.precondition.partition_state
+ self.assertEqual(2, len(pre_partition_states))
+ self.assertEqual('system', pre_partition_states[0].partition_name)
+ self.assertEqual(['generic'], pre_partition_states[0].device)
+ self.assertEqual(['generic/generic/generic{}'.format(source_suffix)],
+ pre_partition_states[0].build)
+
+ self.assertEqual('vendor', pre_partition_states[1].partition_name)
+ self.assertEqual(['vendor-device-pro', 'vendor-device-std',
+ 'vendor-product-device'], pre_partition_states[1].device)
+ vendor_fingerprints = post_fingerprints
+ self.assertEqual(vendor_fingerprints, pre_partition_states[1].build)
+
+ post_partition_states = metadata_proto.postcondition.partition_state
+ self.assertEqual(2, len(post_partition_states))
+ self.assertEqual('system', post_partition_states[0].partition_name)
+ self.assertEqual(['generic'], post_partition_states[0].device)
+ self.assertEqual([self.constructFingerprint('generic/generic/generic')],
+ post_partition_states[0].build)
+
+ self.assertEqual('vendor', post_partition_states[1].partition_name)
+ self.assertEqual(['vendor-device-pro', 'vendor-device-std',
+ 'vendor-product-device'], post_partition_states[1].device)
+ self.assertEqual(vendor_fingerprints, post_partition_states[1].build)
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index 65092d8..7b7f22a 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -22,6 +22,7 @@
import logging
import os
import os.path
+import re
import struct
import sys
import unittest
@@ -224,13 +225,26 @@
input_fp.seek(offset)
if entry == 'metadata':
expected = b'META-INF/COM/ANDROID/METADATA'
+ elif entry == 'metadata.pb':
+ expected = b'META-INF/COM/ANDROID/METADATA-PB'
else:
expected = entry.replace('.', '-').upper().encode()
self.assertEqual(expected, input_fp.read(size))
if __name__ == '__main__':
- testsuite = unittest.TestLoader().discover(
- os.path.dirname(os.path.realpath(__file__)))
+ # We only want to run tests from the top level directory. Unfortunately the
+ # pattern option of unittest.discover, internally using fnmatch, doesn't
+ # provide a good API to filter the test files based on directory. So we do an
+ # os walk and load them manually.
+ test_modules = []
+ base_path = os.path.dirname(os.path.realpath(__file__))
+ for dirpath, _, files in os.walk(base_path):
+ for fn in files:
+ if dirpath == base_path and re.match('test_.*\\.py$', fn):
+ test_modules.append(fn[:-3])
+
+ test_suite = unittest.TestLoader().loadTestsFromNames(test_modules)
+
# atest needs a verbosity level of >= 2 to correctly parse the result.
- unittest.TextTestRunner(verbosity=2).run(testsuite)
+ unittest.TextTestRunner(verbosity=2).run(test_suite)