Merge "Disable BLE_VND_INCLUDED in GSI"
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 6352e38..41defb2 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -505,9 +505,9 @@
# Remove *_OUT_INTERMEDIATE_LIBRARIES
$(call add-clean-step, rm -rf $(addsuffix /lib,\
- $(HOST_OUT_INTERMEDIATES) $(2ND_HOST_OUT_INTERMEDIATES) \
- $(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
- $(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
+$(HOST_OUT_INTERMEDIATES) $(2ND_HOST_OUT_INTERMEDIATES) \
+$(HOST_CROSS_OUT_INTERMEDIATES) $(2ND_HOST_CROSS_OUT_INTERMEDIATES) \
+$(TARGET_OUT_INTERMEDIATES) $(2ND_TARGET_OUT_INTERMEDIATES)))
# Remove strip.sh intermediates to save space
$(call add-clean-step, find $(OUT_DIR) \( -name "*.so.debug" -o -name "*.so.dynsyms" -o -name "*.so.funcsyms" -o -name "*.so.keep_symbols" -o -name "*.so.mini_debuginfo.xz" \) -print0 | xargs -0 rm -f)
@@ -646,6 +646,8 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/odm/build.prop)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/odm/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/apex)
+
# Remove libcameraservice and libcamera_client from base_system
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libcameraservice.so)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libcamera_client.so)
@@ -685,6 +687,9 @@
# Migrate preopt files to system_other for some devices
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/*/*app/*/oat)
+# Migrate preopt files from system_other for some devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system_other)
+
# Remove Android Core Library artifacts from the system partition, now
# that they live in the ART APEX (b/142944799).
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/framework/*.jar)
@@ -699,9 +704,27 @@
# again, as the original change removing them was reverted.
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/framework/*.jar)
+# Remove cas@1.1 from the vendor partition
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/bin/hw/android.hardware.cas@1.1*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/init/android.hardware.cas@1.1*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/vintf/manifest/android.hardware.cas@1.1*)
+
+# Remove com.android.cellbroadcast apex for Go devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/apex/com.android.cellbroadcast.apex)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/apex/com.android.cellbroadcast)
+
+# Remove CellBroadcastLegacyApp for Go devices
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/CellBroadcastLegacyApp)
+
+# Remove MediaProvider after moving into APEX
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/MediaProvider)
+
# The core image variant has been renamed to ""
$(call add-clean-step, find $(SOONG_OUT_DIR)/.intermediates -type d -name "android_*_core*" -print0 | xargs -0 rm -rf)
+# Remove 'media' command
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/media)
+
# Remove CtsShim apks from system partition, since the have been moved inside
# the cts shim apex. Also remove the cts shim apex prebuilt since it has been
# removed in flattened apexs configurations.
@@ -713,6 +736,9 @@
$(call add-clean-step, find $(SOONG_OUT_DIR)/.intermediates -type d -name "android_*_recovery*" -print0 | xargs -0 rm -rf)
$(call add-clean-step, find $(SOONG_OUT_DIR)/.intermediates -type d -name "android_*_vendor*" -print0 | xargs -0 rm -rf)
+# Remove PermissionController after moving into APEX
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/priv-app/*PermissionController)
+
# Clean up VTS-Core and VTS10 related artifacts.
$(call add-clean-step, rm -rf $(HOST_OUT)/vts-core/*)
$(call add-clean-step, rm -rf $(HOST_OUT)/framework/vts-core-tradefed.jar)
@@ -725,6 +751,10 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/default.prop)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/prop.default)
+# Workaround for Soong not being able to rebuild the host binary if its
+# JNI dependencies change: b/170389375
+$(call add-clean-step, rm -rf $(OUT_DIR)/soong/host/*/lib*/libconscrypt_openjdk_jni.so)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/OWNERS b/OWNERS
index 05f8b3d..4cac0f5 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,11 +1 @@
-# Core build team (MTV)
-ccross@android.com
-dwillemsen@google.com
-asmundak@google.com
-jungjw@google.com
-
-# To expedite LON reviews
-hansson@google.com
-
-# For version updates
-per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
+include platform/build/soong:/OWNERS
diff --git a/common/math.mk b/common/math.mk
index 83f2218..ec15f88 100644
--- a/common/math.mk
+++ b/common/math.mk
@@ -181,6 +181,22 @@
$(call math-expect,(call numbers_less_than,4,0 2 1 3),0 2 1 3)
$(call math-expect,(call numbers_less_than,3,0 2 1 3 2),0 2 1 2)
+# Returns the words in $2 that are numbers and are greater or equal to $1
+define numbers_greater_or_equal_to
+$(strip \
+ $(foreach n,$2, \
+ $(if $(call math_is_number,$(n)), \
+ $(if $(call math_gt_or_eq,$(n),$(1)), \
+ $(n)))))
+endef
+
+$(call math-expect,(call numbers_greater_or_equal_to,4,0 1 2 3),)
+$(call math-expect,(call numbers_greater_or_equal_to,3,0 2 1 3),3)
+$(call math-expect,(call numbers_greater_or_equal_to,2,0 2 1 3),2 3)
+$(call math-expect,(call numbers_greater_or_equal_to,1,0 2 1 3),2 1 3)
+$(call math-expect,(call numbers_greater_or_equal_to,0,0 2 1 3),0 2 1 3)
+$(call math-expect,(call numbers_greater_or_equal_to,1,0 2 1 3 2),2 1 3 2)
+
_INT_LIMIT_WORDS := $(foreach a,x x,$(foreach b,x x x x x x x x x x x x x x x x,\
$(foreach c,x x x x x x x x x x x x x x x x,x x x x x x x x x x x x x x x x)))
diff --git a/core/Makefile b/core/Makefile
index 92723d3..79255ca 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -40,6 +40,10 @@
)
endef
+# Phony target to check PRODUCT_COPY_FILES copy pairs don't contain ELF files
+.PHONY: check-elf-prebuilt-product-copy-files
+check-elf-prebuilt-product-copy-files:
+
check_elf_prebuilt_product_copy_files := true
ifneq (,$(filter true,$(BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES)))
check_elf_prebuilt_product_copy_files :=
@@ -162,11 +166,19 @@
$(call dist-for-goals,sdk,$(API_FINGERPRINT))
INSTALLED_RECOVERYIMAGE_TARGET :=
+# Build recovery image if
+# BUILDING_RECOVERY_IMAGE && !BOARD_USES_RECOVERY_AS_BOOT && !BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT.
+# If BOARD_USES_RECOVERY_AS_BOOT is true, leave empty because INSTALLED_BOOTIMAGE_TARGET is built
+# with recovery resources.
+# If BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT is true, leave empty to build recovery resources
+# but not the final recovery image.
ifdef BUILDING_RECOVERY_IMAGE
ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img
endif
endif
+endif
include $(BUILD_SYSTEM)/sysprop.mk
@@ -378,10 +390,31 @@
VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR :=
endif
+# Create the "kernel module directory" to "vendor ramdisk fragment" inverse mapping.
+$(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+ $(if $(and $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
+ $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)), \
+ $(error Must not specify KERNEL_MODULE_DIRS for prebuilt vendor ramdisk fragment "$(vendor_ramdisk_fragment)": $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS))) \
+ $(eval VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR := $(call intermediates-dir-for,PACKAGING,vendor_ramdisk_fragment-dlkm-$(vendor_ramdisk_fragment))) \
+ $(eval VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).FILES :=) \
+ $(foreach dir,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
+ $(eval kmd_vrf := KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(dir)) \
+ $(if $($(kmd_vrf)),$(error Kernel module directory "$(dir)" belongs to multiple vendor ramdisk fragments: "$($(kmd_vrf))" "$(vendor_ramdisk_fragment)", each kernel module directory should belong to exactly one or none vendor ramdisk fragment)) \
+ $(eval $(kmd_vrf) := $(vendor_ramdisk_fragment)) \
+ ) \
+)
+
BOARD_KERNEL_MODULE_DIRS += top
$(foreach dir,$(BOARD_KERNEL_MODULE_DIRS), \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,RECOVERY,$(TARGET_RECOVERY_ROOT_OUT),,modules.load.recovery,,$(dir))) \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(TARGET_VENDOR_RAMDISK_OUT),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
+ $(eval vendor_ramdisk_fragment := $(KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(dir))) \
+ $(if $(vendor_ramdisk_fragment), \
+ $(eval output_dir := $(VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR)) \
+ $(eval result_var := VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).FILES) \
+ $(eval ### else ###), \
+ $(eval output_dir := $(TARGET_VENDOR_RAMDISK_OUT)) \
+ $(eval result_var := ALL_DEFAULT_INSTALLED_MODULES)) \
+ $(eval $(result_var) += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(output_dir),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-recovery-load,$(dir))) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(dir))) \
@@ -437,7 +470,7 @@
$(if $(PACKAGES.$(p).APKCERTS_FILE),\
$(call _apkcerts_merge,$(PACKAGES.$(p).APKCERTS_FILE), $@),\
$(if $(PACKAGES.$(p).EXTERNAL_KEY),\
- $(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),"EXTERNAL","",$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@),\
+ $(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),EXTERNAL,,$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@),\
$(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@))))
# In case value of PACKAGES is empty.
$(hide) touch $@
@@ -495,9 +528,20 @@
--title="Remaining Android.mk files for $(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)" \
--codesearch=$(PRIVATE_CODE_SEARCH_BASE_URL) \
--out_dir="$(OUT_DIR)" \
+ --mode=html \
> $@
$(call dist-for-goals,droidcore,$(MK2BP_REMAINING_HTML))
+MK2BP_REMAINING_CSV := $(PRODUCT_OUT)/mk2bp_remaining.csv
+$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+ @rm -f $@
+ $(hide) $(MK2BP_CATALOG_SCRIPT) \
+ --device=$(TARGET_DEVICE) \
+ --out_dir="$(OUT_DIR)" \
+ --mode=csv \
+ > $@
+$(call dist-for-goals,droidcore,$(MK2BP_REMAINING_CSV))
+
# -----------------------------------------------------------------
# Modules use -Wno-error, or added default -Wall -Werror
WALL_WERROR := $(PRODUCT_OUT)/wall_werror.txt
@@ -524,6 +568,12 @@
$(call dist-for-goals,droidcore,$(PGO_PROFILE_MISSING))
+CERTIFICATE_VIOLATION_MODULES_FILENAME := $(PRODUCT_OUT)/certificate_violation_modules.txt
+$(CERTIFICATE_VIOLATION_MODULES_FILENAME):
+ rm -f $@
+ $(foreach m,$(sort $(CERTIFICATE_VIOLATION_MODULES)), echo $(m) >> $@;)
+$(call dist-for-goals,droidcore,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
+
# -----------------------------------------------------------------
# The dev key is used to sign this package, and as the key required
# for future OTA packages installed by this system. Actual product
@@ -584,6 +634,10 @@
# #################################################################
ifneq ($(strip $(TARGET_NO_BOOTLOADER)),true)
INSTALLED_BOOTLOADER_MODULE := $(PRODUCT_OUT)/bootloader
+ ifdef BOARD_PREBUILT_BOOTLOADER
+ $(eval $(call copy-one-file,$(BOARD_PREBUILT_BOOTLOADER),$(INSTALLED_BOOTLOADER_MODULE)))
+ $(call dist-for-goals,dist_files,$(INSTALLED_BOOTLOADER_MODULE))
+ endif # BOARD_PREBUILT_BOOTLOADER
ifeq ($(strip $(TARGET_BOOTLOADER_IS_2ND)),true)
INSTALLED_2NDBOOTLOADER_TARGET := $(PRODUCT_OUT)/2ndbootloader
else
@@ -688,6 +742,34 @@
BUILT_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
endif
+INTERNAL_PREBUILT_BOOTIMAGE :=
+
+my_installed_prebuilt_gki_apex := $(strip $(foreach package,$(PRODUCT_PACKAGES),$(if $(ALL_MODULES.$(package).EXTRACTED_BOOT_IMAGE),$(package))))
+ifdef my_installed_prebuilt_gki_apex
+ ifneq (1,$(words $(my_installed_prebuilt_gki_apex))) # len(my_installed_prebuilt_gki_apex) > 1
+ $(error More than one prebuilt GKI APEXes are installed: $(my_installed_prebuilt_gki_apex))
+ endif # len(my_installed_prebuilt_gki_apex) > 1
+
+ ifdef BOARD_PREBUILT_BOOTIMAGE
+ $(error Must not define BOARD_PREBUILT_BOOTIMAGE because a prebuilt GKI APEX is installed: $(my_installed_prebuilt_gki_apex))
+ endif # BOARD_PREBUILT_BOOTIMAGE defined
+
+ my_apex_extracted_boot_image := $(ALL_MODULES.$(my_installed_prebuilt_gki_apex).EXTRACTED_BOOT_IMAGE)
+ INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
+ $(eval $(call copy-one-file,$(my_apex_extracted_boot_image),$(INSTALLED_BOOTIMAGE_TARGET)))
+
+ INTERNAL_PREBUILT_BOOTIMAGE := $(my_apex_extracted_boot_image)
+
+else # my_installed_prebuilt_gki_apex not defined
+
+# $1: boot image target
+# returns the kernel used to make the bootimage
+define bootimage-to-kernel
+ $(if $(BOARD_KERNEL_BINARIES),\
+ $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $(1)))),\
+ $(INSTALLED_KERNEL_TARGET))
+endef
+
ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
BOARD_KERNEL_BOOTIMAGE_PARTITION_SIZE := $(BOARD_BOOTIMAGE_PARTITION_SIZE)
endif
@@ -700,8 +782,7 @@
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
INTERNAL_BOOTIMAGE_ARGS := \
- $(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
- --kernel $(INSTALLED_KERNEL_TARGET)
+ $(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
@@ -724,25 +805,30 @@
INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID))
-ifndef BUILDING_VENDOR_BOOT_IMAGE
-ifdef BOARD_KERNEL_BASE
- INTERNAL_BOOTIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
-endif
-ifdef BOARD_KERNEL_PAGESIZE
- INTERNAL_BOOTIMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
-endif
-ifdef INTERNAL_KERNEL_CMDLINE
- INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)"
-endif
-else
-# building vendor boot image, dtb/base/pagesize go there
-ifdef GENERIC_KERNEL_CMDLINE
- INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(GENERIC_KERNEL_CMDLINE)"
-endif
-endif
+# kernel cmdline/base/pagesize in boot.
+# - If using GKI, use GENERIC_KERNEL_CMDLINE. Remove kernel base and pagesize because they are
+# device-specific.
+# - If not using GKI:
+# - If building vendor_boot, INTERNAL_KERNEL_CMDLINE, base and pagesize goes in vendor_boot.
+# - Otherwise, put them in boot.
+ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE))
+ ifdef GENERIC_KERNEL_CMDLINE
+ INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(GENERIC_KERNEL_CMDLINE)"
+ endif
+else ifndef BUILDING_VENDOR_BOOT_IMAGE # && BOARD_USES_GENERIC_KERNEL_IMAGE != true
+ ifdef INTERNAL_KERNEL_CMDLINE
+ INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)"
+ endif
+ ifdef BOARD_KERNEL_BASE
+ INTERNAL_BOOTIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
+ endif
+ ifdef BOARD_KERNEL_PAGESIZE
+ INTERNAL_BOOTIMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
+ endif
+endif # BUILDING_VENDOR_BOOT_IMAGE == "" && BOARD_USES_GENERIC_KERNEL_IMAGE != true
INTERNAL_MKBOOTIMG_VERSION_ARGS := \
- --os_version $(PLATFORM_VERSION) \
+ --os_version $(PLATFORM_VERSION_LAST_STABLE) \
--os_patch_level $(PLATFORM_SECURITY_PATCH)
# Define these only if we are building boot
@@ -751,74 +837,86 @@
ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
$(error TARGET_BOOTIMAGE_USE_EXT2 is not supported anymore)
+endif # TARGET_BOOTIMAGE_USE_EXT2
-else ifeq (true,$(BOARD_AVB_ENABLE)) # TARGET_BOOTIMAGE_USE_EXT2 != true
+$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET), $(eval $(call add-dependency,$(b),$(call bootimage-to-kernel,$(b)))))
+
+ifeq (true,$(BOARD_AVB_ENABLE))
+
+# $1: boot image target
+define build_boot_board_avb_enabled
+ $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+ $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot)))
+ $(AVBTOOL) add_hash_footer \
+ --image $(1) \
+ --partition_size $(call get-bootimage-partition-size,$(1),boot) \
+ --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
+ $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+endef
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH)
$(call pretty,"Target boot image: $@")
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(hide) $(call assert-max-image-size,$@,$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
- $(hide) $(AVBTOOL) add_hash_footer \
- --image $@ \
- --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
- --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
- $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+ $(call build_boot_board_avb_enabled,$@)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
@echo "make $@: ignoring dependencies"
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
- $(hide) $(AVBTOOL) add_hash_footer \
- --image $(INSTALLED_BOOTIMAGE_TARGET) \
- --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
- --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
- $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+ $(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
else ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
+# $1: boot image target
+define build_boot_supports_boot_signer
+ $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+ $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)
+ $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
+endef
+
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
$(call pretty,"Target boot image: $@")
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
- $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+ $(call build_boot_supports_boot_signer,$@)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
@echo "make $@: ignoring dependencies"
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+ $(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_boot_signer,$(b)))
else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
+# $1: boot image target
+define build_boot_supports_vboot
+ $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned
+ $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1)
+ $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
+endef
+
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY)
$(call pretty,"Target boot image: $@")
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@.unsigned
- $(VBOOT_SIGNER) $(FUTILITY) $@.unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $@.keyblock $@
- $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+ $(call build_boot_supports_vboot,$@)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
@echo "make $@: ignoring dependencies"
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET).unsigned
- $(VBOOT_SIGNER) $(FUTILITY) $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(INSTALLED_BOOTIMAGE_TARGET).keyblock $(INSTALLED_BOOTIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+ $(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_vboot,$(b)))
else # PRODUCT_SUPPORTS_VBOOT != true
+# $1: boot image target
+define build_boot_novboot
+ $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+ $(call assert-max-image-size,$1,$(call get-bootimage-partition-size,$(1),boot))
+endef
+
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES)
$(call pretty,"Target boot image: $@")
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+ $(call build_boot_novboot,$@)
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG)
@echo "make $@: ignoring dependencies"
- $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+ $(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_novboot,$(b)))
-endif # TARGET_BOOTIMAGE_USE_EXT2
+endif # BOARD_AVB_ENABLE
endif # BUILDING_BOOT_IMAGE
else # TARGET_NO_KERNEL == "true"
@@ -827,12 +925,23 @@
# Remove when b/63676296 is resolved.
$(error Prebuilt bootimage is only supported for AB targets)
endif
+INTERNAL_PREBUILT_BOOTIMAGE := $(BOARD_PREBUILT_BOOTIMAGE)
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
-$(eval $(call copy-one-file,$(BOARD_PREBUILT_BOOTIMAGE),$(INSTALLED_BOOTIMAGE_TARGET)))
+$(eval $(call copy-one-file,$(INTERNAL_PREBUILT_BOOTIMAGE),$(INSTALLED_BOOTIMAGE_TARGET)))
else # BOARD_PREBUILT_BOOTIMAGE not defined
INSTALLED_BOOTIMAGE_TARGET :=
endif # BOARD_PREBUILT_BOOTIMAGE
endif # TARGET_NO_KERNEL
+endif # my_installed_prebuilt_gki_apex not defined
+
+my_apex_extracted_boot_image :=
+my_installed_prebuilt_gki_apex :=
+
+# -----------------------------------------------------------------
+# declare recovery ramdisk files
+ifeq ($(BUILDING_RECOVERY_IMAGE),true)
+INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP := $(call intermediates-dir-for,PACKAGING,recovery)/ramdisk_files-timestamp
+endif
# -----------------------------------------------------------------
# vendor boot image
@@ -847,8 +956,25 @@
$(ALL_DEFAULT_INSTALLED_MODULES))
INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor-boot)/vendor-ramdisk.cpio$(RAMDISK_EXT)
+
+ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+$(INTERNAL_VENDOR_RAMDISK_TARGET): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+$(INTERNAL_VENDOR_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+endif
+
$(INTERNAL_VENDOR_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
- $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
+ $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
+
+INSTALLED_FILES_FILE_VENDOR_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk.txt
+INSTALLED_FILES_JSON_VENDOR_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_RAMDISK:.txt=.json)
+$(INSTALLED_FILES_FILE_VENDOR_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_RAMDISK)
+$(INSTALLED_FILES_FILE_VENDOR_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_TARGET)
+$(INSTALLED_FILES_FILE_VENDOR_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
+ echo Installed file list: $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_VENDOR_RAMDISK_OUT) > $(@:.txt=.json)
+ $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
@@ -863,12 +989,58 @@
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)"
endif
+# $(1): Build target name
+# $(2): Staging dir to be compressed
+# $(3): Build dependencies
+define build-vendor-ramdisk-fragment-target
+$(1): $(3) $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
+ $(MKBOOTFS) -d $(TARGET_OUT) $(2) | $(COMPRESSION_COMMAND) > $$@
+endef
+
+# $(1): Ramdisk name
+define build-vendor-ramdisk-fragment
+$(strip \
+ $(eval build_target := $(call intermediates-dir-for,PACKAGING,vendor_ramdisk_fragments)/$(1).cpio$(RAMDISK_EXT)) \
+ $(eval $(call build-vendor-ramdisk-fragment-target,$(build_target),$(VENDOR_RAMDISK_FRAGMENT.$(1).STAGING_DIR),$(VENDOR_RAMDISK_FRAGMENT.$(1).FILES))) \
+ $(build_target) \
+)
+endef
+
+# $(1): Ramdisk name
+# $(2): Prebuilt file path
+define build-prebuilt-vendor-ramdisk-fragment
+$(strip \
+ $(eval build_target := $(call intermediates-dir-for,PACKAGING,prebuilt_vendor_ramdisk_fragments)/$(1)) \
+ $(eval $(call copy-one-file,$(2),$(build_target))) \
+ $(build_target) \
+)
+endef
+
+INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS :=
+INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS :=
+
+$(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+ $(eval prebuilt_vendor_ramdisk_fragment_file := $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)) \
+ $(if $(prebuilt_vendor_ramdisk_fragment_file), \
+ $(eval vendor_ramdisk_fragment_target := $(call build-prebuilt-vendor-ramdisk-fragment,$(vendor_ramdisk_fragment),$(prebuilt_vendor_ramdisk_fragment_file))) \
+ $(eval ### else ###), \
+ $(eval vendor_ramdisk_fragment_target := $(call build-vendor-ramdisk-fragment,$(vendor_ramdisk_fragment))) \
+ $(if $(filter --ramdisk_type,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)),, \
+ $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_type DLKM))) \
+ $(if $(filter --ramdisk_name,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)), \
+ $(error Must not specify --ramdisk_name for vendor ramdisk fragment: $(vendor_ramdisk_fragment))) \
+ $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_name $(vendor_ramdisk_fragment)) \
+ $(eval INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS += $(vendor_ramdisk_fragment_target)) \
+ $(eval INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS += $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS) --vendor_ramdisk_fragment $(vendor_ramdisk_fragment_target)) \
+)
+
INSTALLED_VENDOR_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot.img
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DTBIMAGE_TARGET)
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
ifeq ($(BOARD_AVB_ENABLE),true)
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOTIMAGE_KEY_PATH)
$(call pretty,"Target vendor_boot image: $@")
- $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INTERNAL_VENDOR_RAMDISK_TARGET) --vendor_boot $@
+ $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS) --vendor_boot $@
$(call assert-max-image-size,$@,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
$(AVBTOOL) add_hash_footer \
--image $@ \
@@ -878,7 +1050,7 @@
else
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET):
$(call pretty,"Target vendor_boot image: $@")
- $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INTERNAL_VENDOR_RAMDISK_TARGET) --vendor_boot $@
+ $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS) --vendor_boot $@
$(call assert-max-image-size,$@,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
endif
endif # BUILDING_VENDOR_BOOT_IMAGE
@@ -1020,6 +1192,8 @@
license_modules := $(filter-out $(TARGET_OUT_TESTCASES)/%,$(license_modules))
# filesystem images: system, vendor, product, system_ext, odm, vendor_dlkm, and odm_dlkm
license_modules_system := $(filter $(TARGET_OUT)/%,$(license_modules))
+# system_other is relevant to system partition.
+license_modules_system += $(filter $(TARGET_OUT_SYSTEM_OTHER)/%,$(license_modules))
license_modules_vendor := $(filter $(TARGET_OUT_VENDOR)/%,$(license_modules))
license_modules_product := $(filter $(TARGET_OUT_PRODUCT)/%,$(license_modules))
license_modules_system_ext := $(filter $(TARGET_OUT_SYSTEM_EXT)/%,$(license_modules))
@@ -1211,6 +1385,9 @@
ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
endif
+ifneq (true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED))
+ INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG := -s
+endif
ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
endif
@@ -1235,6 +1412,18 @@
$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE) \
$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE) \
$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
+ ,erofs),)
+INTERNAL_USERIMAGES_DEPS += $(MKEROFSUSERIMG)
+endif
+
+ifneq ($(filter \
+ $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) \
+ $(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE) \
+ $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE) \
+ $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) \
+ $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE) \
+ $(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE) \
+ $(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
,squashfs),)
INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
endif
@@ -1253,7 +1442,13 @@
# Get a colon-separated list of search paths.
INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
+# Collects file_contexts files from modules to be installed
+$(call merge-fc-files, \
+ $(sort $(foreach m,$(product_MODULES),$(ALL_MODULES.$(m).FILE_CONTEXTS))),\
+ $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.modules.tmp)
+
SELINUX_FC := $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.bin
+
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
@@ -1272,6 +1467,8 @@
$(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(INTERNAL_SYSTEM_OTHER_PARTITION_SIZE),$(hide) echo "system_other_size=$(INTERNAL_SYSTEM_OTHER_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_COMPRESS),$(hide) echo "system_fs_compress=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_COMPRESS)" >> $(1))
+ $(if $(BOARD_SYSTEMIMAGE_F2FS_SLOAD_COMPRESS_FLAGS),$(hide) echo "system_f2fs_sldc_flags=$(BOARD_SYSTEMIMAGE_F2FS_SLOAD_COMPRESS_FLAGS)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_EXTFS_RSV_PCT),$(hide) echo "system_extfs_rsv_pct=$(BOARD_SYSTEMIMAGE_EXTFS_RSV_PCT)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
@@ -1293,6 +1490,7 @@
$(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(PRODUCT_FS_CASEFOLD),$(hide) echo "needs_casefold=$(PRODUCT_FS_CASEFOLD)" >> $(1))
$(if $(PRODUCT_QUOTA_PROJID),$(hide) echo "needs_projid=$(PRODUCT_QUOTA_PROJID)" >> $(1))
+ $(if $(PRODUCT_FS_COMPRESSION),$(hide) echo "needs_compress=$(PRODUCT_FS_COMPRESSION)" >> $(1))
$(hide) echo "userdata_selinux_fc=$(SELINUX_FC)" >> $(1)
$(hide) echo "building_userdata_image=$(BUILDING_USERDATA_IMAGE)" >> $(1)
)
@@ -1400,6 +1598,7 @@
$(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
+$(if $(INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG),$(hide) echo "erofs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG)" >> $(1))
$(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
$(if $(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG),$(hide) echo "f2fs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG)" >> $(1))
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
@@ -1546,16 +1745,15 @@
INSTALLED_FILES_FILE_RECOVERY := $(PRODUCT_OUT)/installed-files-recovery.txt
INSTALLED_FILES_JSON_RECOVERY := $(INSTALLED_FILES_FILE_RECOVERY:.txt=.json)
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
+endif
+
# TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
# INSTALLED_FILES_FILE_* rules. Because currently there're cp/rsync/rm commands in
# build-recoveryimage-target, which would touch the files under TARGET_RECOVERY_OUT and race with
# the call to FILELIST.
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
-$(INSTALLED_FILES_FILE_RECOVERY): $(INSTALLED_BOOTIMAGE_TARGET)
-else
-$(INSTALLED_FILES_FILE_RECOVERY): $(INSTALLED_RECOVERYIMAGE_TARGET)
-endif
+$(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
$(INSTALLED_FILES_FILE_RECOVERY): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RECOVERY)
$(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERYIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@@ -1582,7 +1780,9 @@
# SELinux files
IGNORE_RECOVERY_SEPOLICY := $(patsubst $(TARGET_RECOVERY_OUT)/%,--exclude=/%,$(recovery_sepolicy))
-recovery_kernel := $(INSTALLED_KERNEL_TARGET) # same as a non-recovery system
+# if building multiple boot images from multiple kernels, use the first kernel listed
+# for the recovery image
+recovery_kernel := $(firstword $(INSTALLED_KERNEL_TARGET))
recovery_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.img
recovery_resources_common := bootable/recovery/res
@@ -1752,9 +1952,13 @@
# e) We include the recovery ACPIO image within recovery - not needing the resource file as we
# do bsdiff because boot and recovery will contain different number of entries
# (BOARD_INCLUDE_RECOVERY_ACPIO = true).
+# f) We build a single image that contains vendor_boot and recovery both - no recovery image to
+# install
+# (BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT = true).
ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT) \
- $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
+ $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO) \
+ $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
# Named '.dat' so we don't attempt to use imgdiff for patching it.
RECOVERY_RESOURCE_ZIP := $(TARGET_OUT_VENDOR)/etc/recovery-resource.dat
ALL_DEFAULT_INSTALLED_MODULES += $(RECOVERY_RESOURCE_ZIP)
@@ -1807,19 +2011,25 @@
$(hide) cat $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET) >> $@
$(call append-recovery-ui-properties,$(PRIVATE_RECOVERY_UI_PROPERTIES),$@)
-ifeq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(AB_OTA_UPDATER)))
- INTERNAL_RECOVERYIMAGE_ARGS := --ramdisk $(recovery_ramdisk)
-ifdef GENERIC_KERNEL_CMDLINE
- INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(GENERIC_KERNEL_CMDLINE)"
+# Only install boot/etc/build.prop to recovery image on recovery_as_boot.
+# On device with dedicated recovery partition, the file should come from the boot
+# ramdisk.
+ifeq (true,$(BOARD_USES_RECOVERY_AS_BOOT))
+INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RECOVERY_ROOT_OUT)/$(RAMDISK_BUILD_PROP_REL_PATH)
+$(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET): $(INSTALLED_RAMDISK_BUILD_PROP_TARGET)
+ $(copy-file-to-target)
endif
-else # not (BUILDING_VENDOR_BOOT_IMAGE and AB_OTA_UPDATER)
- INTERNAL_RECOVERYIMAGE_ARGS := \
- $(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
- --ramdisk $(recovery_ramdisk)
+
+INTERNAL_RECOVERYIMAGE_ARGS := --ramdisk $(recovery_ramdisk)
+
+ifneq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(BOARD_USES_RECOVERY_AS_BOOT)))
+INTERNAL_RECOVERYIMAGE_ARGS += $(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
# Assumes this has already been stripped
+ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE))
ifdef INTERNAL_KERNEL_CMDLINE
INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)"
-endif
+endif # INTERNAL_KERNEL_CMDLINE != ""
+endif # BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE != true
ifdef BOARD_KERNEL_BASE
INTERNAL_RECOVERYIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
endif
@@ -1839,18 +2049,19 @@
ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
INTERNAL_RECOVERYIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
endif
-endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET not defined
+endif # (BUILDING_VENDOR_BOOT_IMAGE and BOARD_USES_RECOVERY_AS_BOOT)
ifndef BOARD_RECOVERY_MKBOOTIMG_ARGS
BOARD_RECOVERY_MKBOOTIMG_ARGS := $(BOARD_MKBOOTIMG_ARGS)
endif
-$(recovery_ramdisk): $(MKBOOTFS) $(COMPRESSION_COMMAND_DEPS) \
+$(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP): $(MKBOOTFS) $(COMPRESSION_COMMAND_DEPS) \
$(INTERNAL_ROOT_FILES) \
$(INSTALLED_RAMDISK_TARGET) \
$(INTERNAL_RECOVERYIMAGE_FILES) \
$(recovery_sepolicy) \
$(INSTALLED_2NDBOOTLOADER_TARGET) \
$(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
+ $(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET) \
$(recovery_resource_deps) \
$(recovery_fstab)
# Making recovery image
@@ -1879,16 +2090,19 @@
cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe)
ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop
$(BOARD_RECOVERY_IMAGE_PREPARE)
+ $(hide) touch $@
+
+$(recovery_ramdisk): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(COMPRESSION_COMMAND) > $(recovery_ramdisk)
# $(1): output file
-# $(2): kernel file
+# $(2): optional kernel file
define build-recoveryimage-target
$(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
- $(MKBOOTIMG) --kernel $(2) $(INTERNAL_RECOVERYIMAGE_ARGS) \
+ $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
$(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \
--output $(1).unsigned, \
- $(MKBOOTIMG) --kernel $(2) $(INTERNAL_RECOVERYIMAGE_ARGS) \
+ $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
$(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \
--output $(1))
$(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
@@ -1933,13 +2147,15 @@
endif
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET), $(eval $(call add-dependency,$(b),$(call bootimage-to-kernel,$(b)))))
$(INSTALLED_BOOTIMAGE_TARGET): $(recoveryimage-deps)
$(call pretty,"Target boot image from recovery: $@")
$(call build-recoveryimage-target, $@, $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $@))))
endif # BOARD_USES_RECOVERY_AS_BOOT
$(INSTALLED_RECOVERYIMAGE_TARGET): $(recoveryimage-deps)
- $(call build-recoveryimage-target, $@, $(recovery_kernel))
+ $(call build-recoveryimage-target, $@, \
+ $(if $(filter true, $(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)),, $(recovery_kernel)))
ifdef RECOVERY_RESOURCE_ZIP
$(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME)
@@ -1951,7 +2167,8 @@
.PHONY: recoveryimage-nodeps
recoveryimage-nodeps:
@echo "make $@: ignoring dependencies"
- $(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET))
+ $(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET), \
+ $(if $(filter true, $(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)),, $(recovery_kernel)))
else # BUILDING_RECOVERY_IMAGE
RECOVERY_RESOURCE_ZIP :=
@@ -2049,6 +2266,7 @@
#
# Note: it's intentional to skip signing for boot-debug.img, because it
# can only be used if the device is unlocked with verification error.
+ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(foreach k,$(subst kernel,boot-debug,$(BOARD_KERNEL_BINARIES)), \
@@ -2103,15 +2321,20 @@
$(foreach b,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call build-debug-bootimage-target,$b))
endif # TARGET_NO_KERNEL
+endif # INSTALLED_BOOTIMAGE_TARGET
ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
ifeq ($(BUILDING_RAMDISK_IMAGE),true)
# -----------------------------------------------------------------
# vendor debug ramdisk
# Combines vendor ramdisk files and debug ramdisk files to build the vendor debug ramdisk.
-INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor-ramdisk-debug.cpio$(RAMDISK_EXT)
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
+INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor-ramdisk-debug.cpio$(RAMDISK_EXT)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
+
+ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+endif
INTERNAL_VENDOR_DEBUG_RAMDISK_FILES := $(filter $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/%, \
$(ALL_GENERATED_SOURCES) \
@@ -2121,16 +2344,16 @@
# if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/vendor_debug_ramdisk.
# But the path of $(VENDOR_DEBUG_RAMDISK_DIR) to build the vendor debug ramdisk, is always
# $(PRODUCT_OUT)/vendor_debug_ramdisk.
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/debug_ramdisk
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
$(call pretty,"Target vendor debug ram disk: $@")
mkdir -p $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)
touch $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/force_debuggable
$(foreach debug_file,$(DEBUG_RAMDISK_FILES), \
- cp -f $(debug_file) $(subst $(PRODUCT_OUT)/debug_ramdisk,$(PRODUCT_OUT)/vendor_debug_ramdisk,$(debug_file)) &&) true
- rsync -a $(VENDOR_RAMDISK_DIR)/ $(VENDOR_DEBUG_RAMDISK_DIR)
- $(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_DEBUG_RAMDISK_DIR) | $(COMPRESSION_COMMAND) > $@
+ cp -f $(debug_file) $(patsubst $(DEBUG_RAMDISK_DIR)/%,$(VENDOR_DEBUG_RAMDISK_DIR)/%,$(debug_file)) &&) true
+ $(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_RAMDISK_DIR) $(VENDOR_DEBUG_RAMDISK_DIR) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk-debug.txt
INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK:.txt=.json)
@@ -2139,7 +2362,7 @@
# The vendor debug ramdisk will rsync from $(TARGET_VENDOR_RAMDISK_OUT) and $(INTERNAL_DEBUG_RAMDISK_FILES),
# so we have to wait for the vendor debug ramdisk to be built before generating the installed file list.
-$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
echo Installed file list: $@
mkdir -p $(dir $@)
@@ -2170,9 +2393,10 @@
endif
# Depends on vendor_boot.img and vendor-ramdisk-debug.cpio.gz to build the new vendor_boot-debug.img
-$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
$(call pretty,"Target vendor_boot debug image: $@")
- $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) --vendor_boot $@
+ $(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS) --vendor_boot $@
$(call assert-max-image-size,$@,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
$(if $(BOARD_AVB_VENDOR_BOOT_KEY_PATH),$(call test-key-sign-vendor-bootimage,$@))
@@ -2268,6 +2492,27 @@
endif # TARGET_NO_KERNEL
endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true
+# Creates a compatibility symlink between two partitions, e.g. /system/vendor to /vendor
+# $1: from location (e.g $(TARGET_OUT)/vendor)
+# $2: destination location (e.g. /vendor)
+# $3: partition image name (e.g. vendor.img)
+define create-partition-compat-symlink
+$(eval \
+$1:
+ @echo Symlink $(patsubst $(PRODUCT_OUT)/%,%,$1) to $2
+ mkdir -p $(dir $1)
+ if [ -d $1 ] && [ ! -h $1 ]; then \
+ echo 'Non-symlink $1 detected!' 1>&2; \
+ echo 'You cannot install files to $1 while building a separate $3!' 1>&2; \
+ exit 1; \
+ fi
+ ln -sfn $2 $1
+$1: .KATI_SYMLINK_OUTPUTS := $1
+)
+$1
+endef
+
+
# -----------------------------------------------------------------
# system image
@@ -2275,6 +2520,21 @@
$(ALL_GENERATED_SOURCES) \
$(ALL_DEFAULT_INSTALLED_MODULES)))
+# Create symlink /system/vendor to /vendor if necessary.
+ifdef BOARD_USES_VENDORIMAGE
+ INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+endif
+
+# Create symlink /system/product to /product if necessary.
+ifdef BOARD_USES_PRODUCTIMAGE
+ INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+endif
+
+# Create symlink /system/system_ext to /system_ext if necessary.
+ifdef BOARD_USES_SYSTEM_EXTIMAGE
+ INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+endif
+
FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
# ASAN libraries in the system image - add dependency.
@@ -2290,6 +2550,15 @@
# -----------------------------------------------------------------
ifdef BUILDING_SYSTEM_IMAGE
+# Install system linker configuration
+# Collect all available stub libraries installed in system and install with predefined linker configuration
+SYSTEM_LINKER_CONFIG := $(TARGET_OUT)/etc/linker.config.pb
+$(SYSTEM_LINKER_CONFIG) : $(INTERNAL_SYSTEMIMAGE_FILES) $(LINKER_CONFIG_PATH_system_linker_config) | conv_linker_config
+ $(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $(LINKER_CONFIG_PATH_system_linker_config)\
+ --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
+
+FULL_SYSTEMIMAGE_DEPS += $(SYSTEM_LINKER_CONFIG)
+
# installed file list
# Depending on anything that $(BUILT_SYSTEMIMAGE) depends on.
# We put installed-files.txt ahead of image itself in the dependency graph
@@ -2314,57 +2583,9 @@
$(call intermediates-dir-for,PACKAGING,systemimage)
BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
-# Create symlink /system/vendor to /vendor if necessary.
-ifdef BOARD_USES_VENDORIMAGE
-define create-system-vendor-symlink
-$(hide) if [ -d $(TARGET_OUT)/vendor ] && [ ! -h $(TARGET_OUT)/vendor ]; then \
- echo 'Non-symlink $(TARGET_OUT)/vendor detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT)/vendor while building a separate vendor.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /vendor $(TARGET_OUT)/vendor
-endef
-else
-define create-system-vendor-symlink
-endef
-endif
-
-# Create symlink /system/product to /product if necessary.
-ifdef BOARD_USES_PRODUCTIMAGE
-define create-system-product-symlink
-$(hide) if [ -d $(TARGET_OUT)/product ] && [ ! -h $(TARGET_OUT)/product ]; then \
- echo 'Non-symlink $(TARGET_OUT)/product detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT)/product while building a separate product.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /product $(TARGET_OUT)/product
-endef
-else
-define create-system-product-symlink
-endef
-endif
-
-# Create symlink /system/system_ext to /system_ext if necessary.
-ifdef BOARD_USES_SYSTEM_EXTIMAGE
-define create-system-system_ext-symlink
-$(hide) if [ -d $(TARGET_OUT)/system_ext ] && [ ! -h $(TARGET_OUT)/system_ext ]; then \
- echo 'Non-symlink $(TARGET_OUT)/system_ext detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT)/system_ext while building a separate system_ext.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /system_ext $(TARGET_OUT)/system_ext
-endef
-else
-define create-system-system_ext-symlink
-endef
-endif
-
# $(1): output file
define build-systemimage-target
@echo "Target system fs image: $(1)"
- $(call create-system-vendor-symlink)
- $(call create-system-product-symlink)
- $(call create-system-system_ext-symlink)
@mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
$(call generate-image-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt,system, \
skip_fsck=true)
@@ -2407,11 +2628,11 @@
$(RECOVERY_FROM_BOOT_PATCH): PRIVATE_DIFF_TOOL := $(diff_tool)
$(RECOVERY_FROM_BOOT_PATCH): \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
+ $(firstword $(INSTALLED_BOOTIMAGE_TARGET)) \
$(diff_tool)
@echo "Construct recovery from boot"
mkdir -p $(dir $@)
- $(PRIVATE_DIFF_TOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
+ $(PRIVATE_DIFF_TOOL) $(firstword $(INSTALLED_BOOTIMAGE_TARGET)) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
else # $(BOARD_USES_FULL_RECOVERY_IMAGE) == true
RECOVERY_FROM_BOOT_PATCH := $(INSTALLED_RECOVERYIMAGE_TARGET)
endif # BOARD_USES_FULL_RECOVERY_IMAGE
@@ -2647,29 +2868,10 @@
$(filter $(TARGET_OUT_VENDOR)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES))
-INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
-INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
-$(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
-$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
- @echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
# Create symlink /vendor/odm to /odm if necessary.
ifdef BOARD_USES_ODMIMAGE
-define create-vendor-odm-symlink
-$(hide) if [ -d $(TARGET_OUT_VENDOR)/odm ] && [ ! -h $(TARGET_OUT_VENDOR)/odm ]; then \
- echo 'Non-symlink $(TARGET_OUT_VENDOR)/odm detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT_VENDOR)/odm while building a separate odm.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /odm $(TARGET_OUT_VENDOR)/odm
-endef
-else
-define create-vendor-odm-symlink
-endef
+ INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
endif
# Create symlinks for vendor_dlkm on devices with a vendor_dlkm partition:
@@ -2687,48 +2889,18 @@
# The vendor DLKMs and other vendor_dlkm files must not be accessed using other paths because they
# are not guaranteed to exist on all devices.
ifdef BOARD_USES_VENDOR_DLKMIMAGE
-define create-vendor-vendor_dlkm-symlink
-$(hide) mkdir -p $(TARGET_OUT_VENDOR)/lib
-$(hide) if [ -d $(TARGET_OUT_VENDOR)/lib/modules ] && [ ! -h $(TARGET_OUT_VENDOR)/lib/modules ]; then \
- echo 'Non-symlink $(TARGET_OUT_VENDOR)/lib/modules detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT_VENDOR)/lib/modules while building a separate vendor_dlkm.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /vendor_dlkm/lib/modules $(TARGET_OUT_VENDOR)/lib/modules
-endef
-else
-define create-vendor-vendor_dlkm-symlink
-endef
+ INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
endif
-# Create symlinks for odm_dlkm on devices with a odm_dlkm partition:
-# /odm/lib/modules -> /odm_dlkm/lib/modules
-#
-# On devices with a odm_dlkm partition,
-# - /odm/lib/modules is a symlink to a directory that stores odm DLKMs.
-# - /odm_dlkm/{etc,...} store other odm_dlkm files directly. The odm_dlkm partition is
-# mounted at /odm_dlkm at runtime and the symlinks created in system/core/rootdir/Android.mk
-# are hidden.
-# On devices without a odm_dlkm partition,
-# - /odm/lib/modules stores odm DLKMs directly.
-# - /odm_dlkm/{etc,...} are symlinks to directories that store other odm_dlkm files.
-# See system/core/rootdir/Android.mk for a list of created symlinks.
-# The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
-# are not guaranteed to exist on all devices.
-ifdef BOARD_USES_ODM_DLKMIMAGE
-define create-odm-odm_dlkm-symlink
-$(hide) mkdir -p $(TARGET_OUT_ODM)/lib
-$(hide) if [ -d $(TARGET_OUT_ODM)/lib/modules ] && [ ! -h $(TARGET_OUT_ODM)/lib/modules ]; then \
- echo 'Non-symlink $(TARGET_OUT_ODM)/lib/modules detected!' 1>&2; \
- echo 'You cannot install files to $(TARGET_OUT_ODM)/lib/modules while building a separate odm_dlkm.img!' 1>&2; \
- exit 1; \
-fi
-$(hide) ln -sf /odm_dlkm/lib/modules $(TARGET_OUT_ODM)/lib/modules
-endef
-else
-define create-odm-odm_dlkm-symlink
-endef
-endif
+INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
+INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
+$(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
+$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
+ @echo Installed file list: $@
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+ $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
vendorimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor)
@@ -2736,9 +2908,6 @@
define build-vendorimage-target
$(call pretty,"Target vendor fs image: $(INSTALLED_VENDORIMAGE_TARGET)")
@mkdir -p $(TARGET_OUT_VENDOR)
- $(call create-vendor-odm-symlink)
- $(call create-vendor-vendor_dlkm-symlink)
- $(call create-odm-odm_dlkm-symlink)
@mkdir -p $(vendorimage_intermediates) && rm -rf $(vendorimage_intermediates)/vendor_image_info.txt
$(call generate-image-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt,vendor,skip_fsck=true)
PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
@@ -2879,6 +3048,24 @@
$(filter $(TARGET_OUT_ODM)/%,\
$(ALL_DEFAULT_INSTALLED_MODULES))
+# Create symlinks for odm_dlkm on devices with a odm_dlkm partition:
+# /odm/lib/modules -> /odm_dlkm/lib/modules
+#
+# On devices with a odm_dlkm partition,
+# - /odm/lib/modules is a symlink to a directory that stores odm DLKMs.
+# - /odm_dlkm/{etc,...} store other odm_dlkm files directly. The odm_dlkm partition is
+# mounted at /odm_dlkm at runtime and the symlinks created in system/core/rootdir/Android.mk
+# are hidden.
+# On devices without a odm_dlkm partition,
+# - /odm/lib/modules stores odm DLKMs directly.
+# - /odm_dlkm/{etc,...} are symlinks to directories that store other odm_dlkm files.
+# See system/core/rootdir/Android.mk for a list of created symlinks.
+# The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
+# are not guaranteed to exist on all devices.
+ifdef BOARD_USES_ODM_DLKMIMAGE
+ INTERNAL_ODMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+endif
+
INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
INSTALLED_FILES_JSON_ODM := $(INSTALLED_FILES_FILE_ODM:.txt=.json)
$(INSTALLED_FILES_FILE_ODM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM)
@@ -3046,6 +3233,26 @@
endif # BOARD_PREBUILT_DTBOIMAGE
+# -----------------------------------------------------------------
+# Protected VM firmware image
+ifdef BOARD_PREBUILT_PVMFWIMAGE
+INSTALLED_PVMFWIMAGE_TARGET := $(PRODUCT_OUT)/pvmfw.img
+
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(INSTALLED_PVMFWIMAGE_TARGET): $(BOARD_PREBUILT_PVMFWIMAGE) $(AVBTOOL) $(BOARD_AVB_PVMFW_KEY_PATH)
+ cp $(BOARD_PREBUILT_PVMFWIMAGE) $@
+ $(AVBTOOL) add_hash_footer \
+ --image $@ \
+ --partition_size $(BOARD_PVMFWIMG_PARTITION_SIZE) \
+ --partition_name pvmfw $(INTERNAL_AVB_PVMFW_SIGNING_ARGS) \
+ $(BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS)
+else
+$(INSTALLED_PVMFWIMAGE_TARGET): $(BOARD_PREBUILT_PVMFWIMAGE)
+ cp $(BOARD_PREBUILT_PVMFWIMAGE) $@
+endif
+
+endif # BOARD_PREBUILT_PVMFWIMAGE
+
# Returns a list of image targets corresponding to the given list of partitions. For example, it
# returns "$(INSTALLED_PRODUCTIMAGE_TARGET)" for "product", or "$(INSTALLED_SYSTEMIMAGE_TARGET)
# $(INSTALLED_VENDORIMAGE_TARGET)" for "system vendor".
@@ -3151,22 +3358,22 @@
BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += \
--prop com.android.build.system.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
- --prop com.android.build.system.os_version:$(PLATFORM_VERSION) \
+ --prop com.android.build.system.os_version:$(PLATFORM_VERSION_LAST_STABLE) \
--prop com.android.build.system.security_patch:$(PLATFORM_SECURITY_PATCH)
BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += \
--prop com.android.build.product.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
- --prop com.android.build.product.os_version:$(PLATFORM_VERSION) \
+ --prop com.android.build.product.os_version:$(PLATFORM_VERSION_LAST_STABLE) \
--prop com.android.build.product.security_patch:$(PLATFORM_SECURITY_PATCH)
BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += \
--prop com.android.build.system_ext.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
- --prop com.android.build.system_ext.os_version:$(PLATFORM_VERSION) \
+ --prop com.android.build.system_ext.os_version:$(PLATFORM_VERSION_LAST_STABLE) \
--prop com.android.build.system_ext.security_patch:$(PLATFORM_SECURITY_PATCH)
BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
--prop com.android.build.boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
- --prop com.android.build.boot.os_version:$(PLATFORM_VERSION)
+ --prop com.android.build.boot.os_version:$(PLATFORM_VERSION_LAST_STABLE)
BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS += \
--prop com.android.build.vendor_boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
@@ -3176,11 +3383,11 @@
BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += \
--prop com.android.build.vendor.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
- --prop com.android.build.vendor.os_version:$(PLATFORM_VERSION)
+ --prop com.android.build.vendor.os_version:$(PLATFORM_VERSION_LAST_STABLE)
BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS += \
--prop com.android.build.odm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
- --prop com.android.build.odm.os_version:$(PLATFORM_VERSION)
+ --prop com.android.build.odm.os_version:$(PLATFORM_VERSION_LAST_STABLE)
BOARD_AVB_VENDOR_DLKM_ADD_HASHTREE_FOOTER_ARGS += \
--prop com.android.build.vendor_dlkm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
@@ -3193,6 +3400,9 @@
BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS += \
--prop com.android.build.dtbo.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE)
+BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.pvmfw.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE)
+
# The following vendor- and odm-specific images needs explicit SPL set per board.
ifdef BOOT_SECURITY_PATCH
BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
@@ -3219,9 +3429,15 @@
--prop com.android.build.odm_dlkm.security_patch:$(ODM_DLKM_SECURITY_PATCH)
endif
+ifdef PVMFW_SECURITY_PATCH
+BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS += \
+ --prop com.android.build.pvmfw.security_patch:$(PVMFW_SECURITY_PATCH)
+endif
+
BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
VENDOR_BOOT_FOOTER_ARGS := BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS
DTBO_FOOTER_ARGS := BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS
+PVMFW_FOOTER_ARGS := BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS
SYSTEM_FOOTER_ARGS := BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS
VENDOR_FOOTER_ARGS := BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS
RECOVERY_FOOTER_ARGS := BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS
@@ -3336,6 +3552,10 @@
$(eval $(call check-and-set-avb-args,dtbo))
endif
+ifdef INSTALLED_PVMFWIMAGE_TARGET
+$(eval $(call check-and-set-avb-args,pvmfw))
+endif
+
ifdef INSTALLED_RECOVERYIMAGE_TARGET
$(eval $(call check-and-set-avb-args,recovery))
endif
@@ -3421,6 +3641,9 @@
$(if $(BOARD_AVB_DTBO_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
--output $(1)/dtbo.avbpubkey)
+ $(if $(BOARD_AVB_PVMFW_KEY_PATH),\
+ $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_PVMFW_KEY_PATH) \
+ --output $(1)/pvmfw.avbpubkey)
$(if $(BOARD_AVB_RECOVERY_KEY_PATH),\
$(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_RECOVERY_KEY_PATH) \
--output $(1)/recovery.avbpubkey)
@@ -3503,6 +3726,7 @@
$(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
$(INSTALLED_ODM_DLKMIMAGE_TARGET) \
$(INSTALLED_DTBOIMAGE_TARGET) \
+ $(INSTALLED_PVMFWIMAGE_TARGET) \
$(INSTALLED_CUSTOMIMAGES_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
$(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET) \
@@ -3521,6 +3745,18 @@
endif # BOARD_AVB_ENABLE
+# List of files from all images
+INTERNAL_ALLIMAGES_FILES := \
+ $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INTERNAL_RAMDISK_FILES) \
+ $(INTERNAL_USERDATAIMAGE_FILES) \
+ $(INTERNAL_VENDORIMAGE_FILES) \
+ $(INTERNAL_PRODUCTIMAGE_FILES) \
+ $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
+ $(INTERNAL_ODMIMAGE_FILES) \
+ $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
+ $(INTERNAL_ODM_DLKMIMAGE_FILES) \
+
# -----------------------------------------------------------------
# Check VINTF of build
@@ -3539,13 +3775,7 @@
$(TARGET_OUT_PRODUCT)/etc/vintf/% \
$(TARGET_OUT_SYSTEM_EXT)/etc/vintf/% \
-check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns), \
- $(INTERNAL_SYSTEMIMAGE_FILES) \
- $(INTERNAL_VENDORIMAGE_FILES) \
- $(INTERNAL_ODMIMAGE_FILES) \
- $(INTERNAL_PRODUCTIMAGE_FILES) \
- $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-))
+check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns),$(INTERNAL_ALLIMAGES_FILES)))
check_vintf_common_srcs_patterns :=
check_vintf_has_system :=
@@ -3562,11 +3792,19 @@
check_vintf_system_deps := $(filter $(TARGET_OUT)/etc/vintf/%, $(check_vintf_common_srcs))
ifneq ($(check_vintf_system_deps),)
check_vintf_has_system := true
+
check_vintf_system_log := $(intermediates)/check_vintf_system_log
check_vintf_all_deps += $(check_vintf_system_log)
$(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
check_vintf_system_log :=
+
+vintffm_log := $(intermediates)/vintffm_log
+check_vintf_all_deps += $(vintffm_log)
+$(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps)
+ @( $< --check --dirmap /system:$(TARGET_OUT) \
+ $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
+
endif # check_vintf_system_deps
check_vintf_system_deps :=
@@ -3654,7 +3892,7 @@
$(BUILT_KERNEL_CONFIGS_FILE): $(EXTRACT_KERNEL) $(firstword $(INSTALLED_KERNEL_TARGET))
$< --tools $(PRIVATE_DECOMPRESS_TOOLS) --input $(firstword $(INSTALLED_KERNEL_TARGET)) \
--output-configs $@ \
- --output-version $(BUILT_KERNEL_VERSION_FILE)
+ --output-release $(BUILT_KERNEL_VERSION_FILE)
my_decompress_tools :=
@@ -3663,7 +3901,7 @@
endif # INSTALLED_KERNEL_TARGET
-check_vintf_compatible_args += --kernel $$(cat $(BUILT_KERNEL_VERSION_FILE)):$(BUILT_KERNEL_CONFIGS_FILE)
+check_vintf_compatible_args += --kernel $(BUILT_KERNEL_VERSION_FILE):$(BUILT_KERNEL_CONFIGS_FILE)
check_vintf_compatible_deps += $(BUILT_KERNEL_CONFIGS_FILE) $(BUILT_KERNEL_VERSION_FILE)
endif # PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
@@ -3848,6 +4086,7 @@
fec \
fs_config \
generate_verity_key \
+ host_init_verifier \
img2simg \
img_from_target_files \
imgdiff \
@@ -3864,6 +4103,8 @@
mkbootimg \
mke2fs \
mke2fs.conf \
+ mkfs.erofs \
+ mkerofsimage.sh \
mkf2fsuserimg.sh \
mksquashfs \
mksquashfsimage.sh \
@@ -3877,6 +4118,7 @@
signapk \
simg2img \
sload_f2fs \
+ toybox \
tune2fs \
unpack_bootimg \
update_host_simulator \
@@ -4045,6 +4287,9 @@
$(hide) echo "board_uses_vendorimage=true" >> $@
endif
ifeq ($(BOARD_AVB_ENABLE),true)
+ifeq ($(BUILDING_VBMETA_IMAGE),true)
+ $(hide) echo "avb_building_vbmeta_image=true" >> $@
+endif # BUILDING_VBMETA_IMAGE
$(hide) echo "avb_enable=true" >> $@
$(hide) echo "avb_vbmeta_key_path=$(BOARD_AVB_KEY_PATH)" >> $@
$(hide) echo "avb_vbmeta_algorithm=$(BOARD_AVB_ALGORITHM)" >> $@
@@ -4121,6 +4366,18 @@
endif # BOARD_AVB_DTBO_KEY_PATH
endif # BOARD_AVB_ENABLE
endif # BOARD_PREBUILT_DTBOIMAGE
+ifdef BOARD_PREBUILT_PVMFWIMAGE
+ $(hide) echo "has_pvmfw=true" >> $@
+ifeq ($(BOARD_AVB_ENABLE),true)
+ $(hide) echo "pvmfw_size=$(BOARD_PVMFWIMG_PARTITION_SIZE)" >> $@
+ $(hide) echo "avb_pvmfw_add_hash_footer_args=$(BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS)" >> $@
+ifdef BOARD_AVB_PVMFW_KEY_PATH
+ $(hide) echo "avb_pvmfw_key_path=$(BOARD_AVB_PVMFW_KEY_PATH)" >> $@
+ $(hide) echo "avb_pvmfw_algorithm=$(BOARD_AVB_PVMFW_ALGORITHM)" >> $@
+ $(hide) echo "avb_pvmfw_rollback_index_location=$(BOARD_AVB_PVMFW_ROLLBACK_INDEX_LOCATION)" >> $@
+endif # BOARD_AVB_PVMFW_KEY_PATH
+endif # BOARD_AVB_ENABLE
+endif # BOARD_PREBUILT_PVMFWIMAGE
$(call dump-dynamic-partitions-info,$@)
@# VINTF checks
ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
@@ -4138,6 +4395,15 @@
ifdef DEVICE_MANIFEST_FILE
$(hide) echo "vintf_include_empty_vendor_sku=true" >> $@
endif
+ifeq ($(BOARD_BOOTLOADER_IN_UPDATE_PACKAGE),true)
+ $(hide) echo "bootloader_in_update_package=true" >> $@
+endif
+ifeq ($(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE),true)
+ $(hide) echo "exclude_kernel_from_recovery_image=true" >> $@
+endif
+ifneq ($(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST),)
+ $(hide) echo "partial_ota_update_partitions_list=$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)" >> $@
+endif
.PHONY: misc_info
misc_info: $(INSTALLED_MISC_INFO_TARGET)
@@ -4196,8 +4462,11 @@
$(BUILT_TARGET_FILES_PACKAGE): $(updater_dep)
# If we are using recovery as boot, output recovery files to BOOT/.
+# If we are moving recovery resources to vendor_boot, output recovery files to VENDOR_BOOT/.
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT
+else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
else
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
endif
@@ -4278,6 +4547,8 @@
echo "super_partition_error_limit=$(BOARD_SUPER_PARTITION_ERROR_LIMIT)" >> $(1))
$(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA)), \
echo "virtual_ab=true" >> $(1))
+ $(if $(filter true,$(PRODUCT_VIRTUAL_AB_COMPRESSION)), \
+ echo "virtual_ab_compression=true" >> $(1))
$(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)), \
echo "virtual_ab_retrofit=true" >> $(1))
endef
@@ -4286,36 +4557,104 @@
# full system image deps, we speed up builds that do not build the system
# image.
ifdef BUILDING_SYSTEM_IMAGE
-$(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+ $(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+endif
+
+ifdef BUILDING_USERDATA_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_USERDATAIMAGE_FILES)
+endif
+
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+endif
+
+ifdef BUILDING_VENDOR_BOOT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+endif
+
+ifdef BUILDING_RECOVERY_IMAGE
+ # TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
+ # BUILD_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
+ # commands in build-recoveryimage-target, which would touch the files under
+ # TARGET_RECOVERY_OUT and race with packaging target-files.zip.
+ ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+ else
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_RECOVERYIMAGE_TARGET)
+ endif
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERYIMAGE_FILES)
+endif
+
+# Conditionally depend on the image files if the image is being built so the
+# target-files.zip rule doesn't wait on the image creation rule, or the image
+# if it is coming from a prebuilt.
+
+ifdef BUILDING_VENDOR_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDORIMAGE_FILES)
+else ifdef BOARD_PREBUILT_VENDORIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDORIMAGE_TARGET)
+endif
+
+ifdef BUILDING_PRODUCT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_PRODUCTIMAGE_FILES)
+else ifdef BOARD_PREBUILT_PRODUCTIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+endif
+
+ifdef BUILDING_BOOT_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RAMDISK_FILES)
+endif
+ifneq (,$(INTERNAL_PREBUILT_BOOTIMAGE) $(filter true,$(BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES)))
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+endif
+
+ifdef BUILDING_ODM_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODMIMAGE_FILES)
+else ifdef BOARD_PREBUILT_ODMIMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODMIMAGE_TARGET)
+endif
+
+ifdef BUILDING_VENDOR_DLKM_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+else ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+endif
+
+ifdef BUILDING_ODM_DLKM_IMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+else ifdef BOARD_ODM_VENDOR_DLKIMMAGE
+ $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
endif
ifeq ($(BUILD_QEMU_IMAGES),true)
-MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
-$(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+ MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
+ $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+endif
+
+ifdef BOARD_PREBUILT_BOOTLOADER
+$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTLOADER_MODULE)
+droidcore: $(INSTALLED_BOOTLOADER_MODULE)
endif
# Depending on the various images guarantees that the underlying
# directories are up-to-date.
$(BUILT_TARGET_FILES_PACKAGE): \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
$(INSTALLED_RADIOIMAGE_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
$(INSTALLED_CACHEIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_VBMETAIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
$(INSTALLED_DTBOIMAGE_TARGET) \
+ $(INSTALLED_PVMFWIMAGE_TARGET) \
$(INSTALLED_CUSTOMIMAGES_TARGET) \
- $(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
$(INSTALLED_KERNEL_TARGET) \
+ $(INSTALLED_RAMDISK_TARGET) \
$(INSTALLED_DTBIMAGE_TARGET) \
$(INSTALLED_2NDBOOTLOADER_TARGET) \
$(BOARD_PREBUILT_DTBOIMAGE) \
@@ -4341,25 +4680,21 @@
$(BUILT_KERNEL_VERSION_FILE) \
| $(ACP)
@echo "Package target files: $@"
- $(call create-system-vendor-symlink)
- $(call create-system-product-symlink)
- $(call create-system-system_ext-symlink)
- $(call create-vendor-odm-symlink)
- $(call create-vendor-vendor_dlkm-symlink)
- $(call create-odm-odm_dlkm-symlink)
$(hide) rm -rf $@ $@.list $(zip_root)
$(hide) mkdir -p $(dir $@) $(zip_root)
-ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
+ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))$(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
@# Components of the recovery image
$(hide) mkdir -p $(zip_root)/$(PRIVATE_RECOVERY_OUT)
$(hide) $(call package_files-copy-root, \
$(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
ifdef INSTALLED_KERNEL_TARGET
+ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/
+else ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE))
+ cp $(firstword $(INSTALLED_KERNEL_TARGET)) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
endif
-ifeq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(AB_OTA_UPDATER)))
- echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
-else # not (BUILDING_VENDOR_BOOT_IMAGE and AB_OTA_UPDATER)
+endif
+ifneq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(BOARD_USES_RECOVERY_AS_BOOT)))
ifdef INSTALLED_2NDBOOTLOADER_TARGET
cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second
endif
@@ -4376,16 +4711,18 @@
ifdef INSTALLED_DTBIMAGE_TARGET
cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/dtb
endif
+ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE))
ifdef INTERNAL_KERNEL_CMDLINE
echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
-endif
+endif # INTERNAL_KERNEL_CMDLINE != ""
+endif # BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE != true
ifdef BOARD_KERNEL_BASE
echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/base
endif
ifdef BOARD_KERNEL_PAGESIZE
echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/pagesize
endif
-endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET not defined
+endif # not (BUILDING_VENDOR_BOOT_IMAGE and BOARD_USES_RECOVERY_AS_BOOT)
endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true
@# Components of the boot image
$(hide) mkdir -p $(zip_root)/BOOT
@@ -4399,25 +4736,25 @@
$(TARGET_RAMDISK_OUT),$(zip_root)/BOOT/RAMDISK)
endif
ifdef INSTALLED_KERNEL_TARGET
- $(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
+ $(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/
endif
-ifndef INSTALLED_VENDOR_BOOTIMAGE_TARGET
+ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE))
+ echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline
+else ifndef INSTALLED_VENDOR_BOOTIMAGE_TARGET # && BOARD_USES_GENERIC_KERNEL_IMAGE != true
+ echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline
ifdef INSTALLED_2NDBOOTLOADER_TARGET
cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/BOOT/second
endif
ifdef INSTALLED_DTBIMAGE_TARGET
cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/BOOT/dtb
endif
- echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline
ifdef BOARD_KERNEL_BASE
echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/BOOT/base
endif
ifdef BOARD_KERNEL_PAGESIZE
echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/BOOT/pagesize
endif
-else # INSTALLED_VENDOR_BOOTIMAGE_TARGET defined
- echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline
-endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET defined
+endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET == "" && BOARD_USES_GENERIC_KERNEL_IMAGE != true
endif # BOARD_USES_RECOVERY_AS_BOOT not true
$(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\
mkdir -p $(zip_root)/RADIO; \
@@ -4436,6 +4773,19 @@
echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/VENDOR_BOOT/pagesize
endif
echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/VENDOR_BOOT/vendor_cmdline
+ifdef BOARD_VENDOR_RAMDISK_FRAGMENTS
+ echo "$(BOARD_VENDOR_RAMDISK_FRAGMENTS)" > "$(zip_root)/VENDOR_BOOT/vendor_ramdisk_fragments"
+ $(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+ mkdir -p $(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment); \
+ echo "$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)" > "$(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment)/mkbootimg_args"; \
+ $(eval prebuilt_ramdisk := $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)) \
+ $(if $(prebuilt_ramdisk), \
+ cp "$(prebuilt_ramdisk)" "$(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment)/prebuilt_ramdisk";, \
+ $(call package_files-copy-root, \
+ $(VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR), \
+ $(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment)/RAMDISK); \
+ ))
+endif # BOARD_VENDOR_RAMDISK_FRAGMENTS != ""
endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET
ifdef BUILDING_SYSTEM_IMAGE
@# Contents of the system image
@@ -4485,6 +4835,13 @@
@# Extra contents of the OTA package
$(hide) mkdir -p $(zip_root)/OTA
$(hide) cp $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/
+ifdef BUILDING_RAMDISK_IMAGE
+ifeq (true,$(BOARD_IMG_USE_RAMDISK))
+ @# Contents of the ramdisk image
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_RAMDISK_TARGET) $(zip_root)/IMAGES/
+endif
+endif
ifeq ($(TARGET_OTA_ALLOW_NON_AB),true)
ifneq ($(built_ota_tools),)
$(hide) mkdir -p $(zip_root)/OTA/bin
@@ -4567,22 +4924,36 @@
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) $(zip_root)/IMAGES/
endif
-ifdef BOARD_PREBUILT_BOOTIMAGE
+ifneq (,$(INTERNAL_PREBUILT_BOOTIMAGE) $(filter true,$(BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES)))
+ifdef INSTALLED_BOOTIMAGE_TARGET
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
-endif
+endif # INSTALLED_BOOTIMAGE_TARGET
+endif # INTERNAL_PREBUILT_BOOTIMAGE != "" || BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES == true
ifdef BOARD_PREBUILT_ODMIMAGE
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_ODMIMAGE_TARGET) $(zip_root)/IMAGES/
endif
-ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
$(hide) mkdir -p $(zip_root)/IMAGES
- $(hide) cp $(INSTALLED_VENDOR_DLKIMMAGE_TARGET) $(zip_root)/IMAGES/
+ $(hide) cp $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
+ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_ODM_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
endif
ifdef BOARD_PREBUILT_DTBOIMAGE
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
endif # BOARD_PREBUILT_DTBOIMAGE
+ifdef BOARD_PREBUILT_PVMFWIMAGE
+ $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
+ $(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
+endif # BOARD_PREBUILT_PVMFWIMAGE
+ifdef BOARD_PREBUILT_BOOTLOADER
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_BOOTLOADER_MODULE) $(zip_root)/IMAGES/
+endif
ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
$(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
@@ -4661,9 +5032,7 @@
.PHONY: target-files-package
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
-ifneq ($(filter $(MAKECMDGOALS),target-files-package),)
$(call dist-for-goals, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
-endif
# -----------------------------------------------------------------
# NDK Sysroot Package
@@ -4681,7 +5050,7 @@
# $(1): output file
# $(2): additional args
define build-ota-package-target
-PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
+PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$(dir $(ZIP2ZIP)):$$PATH \
$(OTA_FROM_TARGET_FILES) \
--verbose \
--extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
@@ -4691,18 +5060,18 @@
$(BUILT_TARGET_FILES_PACKAGE) $(1)
endef
-name := $(TARGET_PRODUCT)
+product_name := $(TARGET_PRODUCT)
ifeq ($(TARGET_BUILD_TYPE),debug)
- name := $(name)_debug
+ product_name := $(product_name)_debug
endif
-name := $(name)-ota-$(FILE_NAME_TAG)
+name := $(product_name)-ota-$(FILE_NAME_TAG)
INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
INTERNAL_OTA_METADATA := $(PRODUCT_OUT)/ota_metadata
$(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
$(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
@echo "Package OTA: $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
@@ -4710,17 +5079,14 @@
otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
ifeq ($(BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE),true)
-name := $(TARGET_PRODUCT)
-ifeq ($(TARGET_BUILD_TYPE),debug)
- name := $(name)_debug
-endif
-name := $(name)-ota-retrofit-$(FILE_NAME_TAG)
+name := $(product_name)-ota-retrofit-$(FILE_NAME_TAG)
INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): \
$(BUILT_TARGET_FILES_PACKAGE) \
- $(OTA_FROM_TARGET_FILES)
+ $(OTA_FROM_TARGET_FILES) \
+ $(INTERNAL_OTATOOLS_FILES)
@echo "Package OTA (retrofit dynamic partitions): $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --retrofit_dynamic_partitions)
@@ -4730,6 +5096,20 @@
endif # BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE
+ifneq ($(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST),)
+name := $(product_name)-partial-ota-$(FILE_NAME_TAG)
+
+INTERNAL_OTA_PARTIAL_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
+$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
+$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+ @echo "Package partial OTA: $@"
+ $(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --partial "$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)")
+
+.PHONY: partialotapackage
+partialotapackage: $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET)
+
+endif # BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST
+
endif # build_ota_package
# -----------------------------------------------------------------
@@ -4737,13 +5117,12 @@
APPCOMPAT_ZIP := $(PRODUCT_OUT)/appcompat.zip
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(APPCOMPAT_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+$(APPCOMPAT_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
+ $(INTERNAL_RAMDISK_FILES) \
+ $(INTERNAL_USERDATAIMAGE_FILES) \
+ $(INTERNAL_VENDORIMAGE_FILES) \
+ $(INTERNAL_PRODUCTIMAGE_FILES) \
+ $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
endif
$(APPCOMPAT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,appcompat)/filelist
$(APPCOMPAT_ZIP): $(SOONG_ZIP)
@@ -4766,17 +5145,7 @@
SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
- $(updater_dep)
+$(SYMBOLS_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
endif
$(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
$(SYMBOLS_ZIP): $(SOONG_ZIP)
@@ -4794,16 +5163,7 @@
endif
COVERAGE_ZIP := $(PRODUCT_OUT)/$(name).zip
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
- $(INSTALLED_RAMDISK_TARGET) \
- $(INSTALLED_BOOTIMAGE_TARGET) \
- $(INSTALLED_USERDATAIMAGE_TARGET) \
- $(INSTALLED_VENDORIMAGE_TARGET) \
- $(INSTALLED_PRODUCTIMAGE_TARGET) \
- $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
- $(INSTALLED_ODMIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
- $(INSTALLED_ODM_DLKMIMAGE_TARGET)
+$(COVERAGE_ZIP): $(INTERNAL_ALLIMAGES_FILES)
endif
$(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
$(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -4837,7 +5197,7 @@
name := $(name)-apps-$(FILE_NAME_TAG)
APPS_ZIP := $(PRODUCT_OUT)/$(name).zip
-$(APPS_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET)
+$(APPS_ZIP): $(FULL_SYSTEMIMAGE_DEPS)
@echo "Package apps: $@"
$(hide) rm -rf $@
$(hide) mkdir -p $(dir $@)
@@ -4859,9 +5219,12 @@
JACOCO_REPORT_CLASSES_ALL := $(PRODUCT_OUT)/jacoco-report-classes-all.jar
$(JACOCO_REPORT_CLASSES_ALL) :
@echo "Collecting uninstrumented classes"
- find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" 2>/dev/null | sort > $@.list
+ find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" -o -name "proguard_usage.zip" 2>/dev/null | sort > $@.list
$(SOONG_ZIP) -o $@ -L 0 -C $(OUT_DIR) -P out -l $@.list
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+ $(JACOCO_REPORT_CLASSES_ALL): $(INTERNAL_ALLIMAGES_FILES)
+endif
endif # EMMA_INSTRUMENT=true
@@ -4871,7 +5234,23 @@
PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_TAG).zip
# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(PROGUARD_DICT_ZIP): \
+$(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
+endif
+$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard)/filelist
+$(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
+ @echo "Packaging Proguard obfuscation dictionary files."
+ mkdir -p $(dir $@) $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS $(dir $(PRIVATE_LIST_FILE))
+ find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary | \
+ sed -e 's/\(.*\)\/proguard_dictionary/\0\n\1\/classes.jar/' > $(PRIVATE_LIST_FILE)
+ $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
+
+#------------------------------------------------------------------
+# A zip of Proguard usage files.
+#
+PROGUARD_USAGE_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-usage-$(FILE_NAME_TAG).zip
+# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(PROGUARD_USAGE_ZIP): \
$(INSTALLED_SYSTEMIMAGE_TARGET) \
$(INSTALLED_RAMDISK_TARGET) \
$(INSTALLED_BOOTIMAGE_TARGET) \
@@ -4884,14 +5263,12 @@
$(INSTALLED_ODM_DLKMIMAGE_TARGET) \
$(updater_dep)
endif
-$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard)/filelist
-$(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
- @echo "Packaging Proguard obfuscation dictionary files."
+$(PROGUARD_USAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_usage)/filelist
+$(PROGUARD_USAGE_ZIP): $(MERGE_ZIPS)
+ @echo "Packaging Proguard usage files."
mkdir -p $(dir $@) $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS $(dir $(PRIVATE_LIST_FILE))
- find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary | \
- sed -e 's/\(.*\)\/proguard_dictionary/\0\n\1\/classes.jar/' > $(PRIVATE_LIST_FILE)
- $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
-
+ find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
+ $(MERGE_ZIPS) $@ @$(PRIVATE_LIST_FILE)
ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
@@ -5380,7 +5757,3 @@
.PHONY: haiku
haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
$(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
-
-# -----------------------------------------------------------------
-# The makefile for haiku line coverage.
-include $(BUILD_SYSTEM)/line_coverage.mk
diff --git a/core/OWNERS b/core/OWNERS
index 459683e..5456d4f 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,2 +1,6 @@
per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,dbrazdil@google.com
per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
+
+# For version updates
+per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
+
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index ee12c8c..883f92d 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -27,3 +27,10 @@
# Add variables to the namespace below:
$(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
+
+# TODO(b/172480615): Remove when platform uses ART Module prebuilts by default.
+ifeq (,$(filter art_module,$(SOONG_CONFIG_NAMESPACES)))
+ $(call add_soong_config_namespace,art_module)
+ SOONG_CONFIG_art_module += source_build
+endif
+SOONG_CONFIG_art_module_source_build ?= true
diff --git a/core/artifact_path_requirements.mk b/core/artifact_path_requirements.mk
new file mode 100644
index 0000000..ceaefa2
--- /dev/null
+++ b/core/artifact_path_requirements.mk
@@ -0,0 +1,60 @@
+# This file contains logic to enforce artifact path requirements
+# defined in product makefiles.
+
+# Fakes don't get installed, and NDK stubs aren't installed to device.
+static_allowed_patterns := $(TARGET_OUT_FAKE)/% $(SOONG_OUT_DIR)/ndk/%
+# RROs become REQUIRED by the source module, but are always placed on the vendor partition.
+static_allowed_patterns += %__auto_generated_rro_product.apk
+static_allowed_patterns += %__auto_generated_rro_vendor.apk
+# Auto-included targets are not considered
+static_allowed_patterns += $(call product-installed-files,)
+# $(PRODUCT_OUT)/apex is where shared libraries in APEXes get installed.
+# The path can be considered as a fake path, as the shared libraries
+# are installed there just to have symbols files for them under
+# $(PRODUCT_OUT)/symbols/apex for debugging purpose. The /apex directory
+# is never compiled into a filesystem image.
+static_allowed_patterns += $(PRODUCT_OUT)/apex/%
+ifeq (true,$(BOARD_USES_SYSTEM_OTHER_ODEX))
+ # Allow system_other odex space optimization.
+ static_allowed_patterns += \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.odex \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.vdex \
+ $(TARGET_OUT_SYSTEM_OTHER)/%.art
+endif
+
+all_offending_files :=
+$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
+ $(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
+ $(eval ### Verify that the product only produces files inside its path requirements.) \
+ $(eval allowed := $(PRODUCTS.$(makefile).ARTIFACT_PATH_ALLOWED_LIST)) \
+ $(eval path_patterns := $(call resolve-product-relative-paths,$(requirements),%)) \
+ $(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
+ $(eval files := $(call product-installed-files, $(makefile))) \
+ $(eval offending_files := $(filter-out $(path_patterns) $(allowed_patterns) $(static_allowed_patterns),$(files))) \
+ $(call maybe-print-list-and-error,$(offending_files),\
+ $(makefile) produces files outside its artifact path requirement. \
+ Allowed paths are $(subst $(space),$(comma)$(space),$(addsuffix *,$(requirements)))) \
+ $(eval unused_allowed := $(filter-out $(files),$(allowed_patterns))) \
+ $(if $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENT_IS_RELAXED),, \
+ $(call maybe-print-list-and-error,$(unused_allowed),$(makefile) includes redundant allowed entries in its artifact path requirement.) \
+ ) \
+ $(eval ### Optionally verify that nothing else produces files inside this artifact path requirement.) \
+ $(eval extra_files := $(filter-out $(files) $(HOST_OUT)/%,$(product_target_FILES))) \
+ $(eval files_in_requirement := $(filter $(path_patterns),$(extra_files))) \
+ $(eval all_offending_files += $(files_in_requirement)) \
+ $(eval allowed := $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST)) \
+ $(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
+ $(eval offending_files := $(filter-out $(allowed_patterns),$(files_in_requirement))) \
+ $(eval enforcement := $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
+ $(if $(enforcement),\
+ $(call maybe-print-list-and-error,$(offending_files),\
+ $(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement. \
+ $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT)) \
+ $(eval unused_allowed := $(if $(filter true strict,$(enforcement)),\
+ $(foreach p,$(allowed_patterns),$(if $(filter $(p),$(extra_files)),,$(p))))) \
+ $(call maybe-print-list-and-error,$(unused_allowed),$(INTERNAL_PRODUCT) includes redundant artifact path requirement allowed list entries.) \
+ ) \
+)
+$(PRODUCT_OUT)/offending_artifacts.txt:
+ rm -f $@
+ $(foreach f,$(sort $(all_offending_files)),echo $(f) >> $@;)
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index d4ca56f..137b118 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -22,6 +22,17 @@
# autogen_test_config_file: Path to the test config file generated.
autogen_test_config_file := $(dir $(LOCAL_BUILT_MODULE))$(LOCAL_MODULE).config
+# TODO: (b/167308193) Switch to /data/local/tests/unrestricted as the default install base.
+autogen_test_install_base := /data/local/tmp
+# Automatically setup test root for native test.
+ifeq (true,$(is_native))
+ ifeq (true,$(LOCAL_VENDOR_MODULE))
+ autogen_test_install_base = /data/local/tests/vendor
+ endif
+ ifeq (true,$(LOCAL_USE_VNDK))
+ autogen_test_install_base = /data/local/tests/vendor
+ endif
+endif
ifeq (true,$(is_native))
ifeq ($(LOCAL_NATIVE_BENCHMARK),true)
autogen_test_config_template := $(NATIVE_BENCHMARK_TEST_CONFIG_TEMPLATE)
@@ -33,10 +44,11 @@
endif
endif
# Auto generating test config file for native test
+$(autogen_test_config_file): PRIVATE_TEST_INSTALL_BASE := $(autogen_test_install_base)
$(autogen_test_config_file): PRIVATE_MODULE_NAME := $(LOCAL_MODULE)
$(autogen_test_config_file) : $(autogen_test_config_template)
@echo "Auto generating test config $(notdir $@)"
- $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g;s&{EXTRA_CONFIGS}&&g' $< > $@
+ $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g;s&{TEST_INSTALL_BASE}&$(PRIVATE_TEST_INSTALL_BASE)&g;s&{EXTRA_CONFIGS}&&g' $< > $@
my_auto_generate_config := true
else
# Auto generating test config file for instrumentation test
diff --git a/core/base_rules.mk b/core/base_rules.mk
index ddf736b..4fd8baa 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -115,6 +115,7 @@
include $(BUILD_SYSTEM)/local_vndk.mk
include $(BUILD_SYSTEM)/local_systemsdk.mk
+include $(BUILD_SYSTEM)/local_current_sdk.mk
my_module_tags := $(LOCAL_MODULE_TAGS)
ifeq ($(my_host_cross),true)
@@ -273,6 +274,12 @@
endif
endif
+ifeq ($(LOCAL_IS_UNIT_TEST),true)
+ ifeq ($(LOCAL_IS_HOST_MODULE),true)
+ LOCAL_COMPATIBILITY_SUITE += host-unit-tests
+ endif
+endif
+
ifeq ($(my_module_path),)
install_path_var := $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT$(partition_tag)_$(LOCAL_MODULE_CLASS)
ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
@@ -515,7 +522,11 @@
$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
@echo "Install: $@"
+ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ $(copy-file-or-link-to-new-target)
+else
$(copy-file-to-new-target)
+endif
$(PRIVATE_POST_INSTALL_CMD)
endif
@@ -757,6 +768,13 @@
$(test_config):$(dir)/$(LOCAL_MODULE).config)))
endif
+ ifneq (,$(LOCAL_EXTRA_FULL_TEST_CONFIGS))
+ $(foreach test_config_file, $(LOCAL_EXTRA_FULL_TEST_CONFIGS), \
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(test_config_file):$(dir)/$(basename $(notdir $(test_config_file))).config))))
+ endif
+
ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
@@ -973,6 +991,12 @@
ALL_MODULES.$(my_register_name).MODULE_NAME := $(LOCAL_MODULE)
ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := $(LOCAL_COMPATIBILITY_SUITE)
ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
+ALL_MODULES.$(my_register_name).EXTRA_TEST_CONFIGS := $(LOCAL_EXTRA_FULL_TEST_CONFIGS)
+ALL_MODULES.$(my_register_name).TEST_MAINLINE_MODULES := $(LOCAL_TEST_MAINLINE_MODULES)
+ifndef LOCAL_IS_HOST_MODULE
+ALL_MODULES.$(my_register_name).FILE_CONTEXTS := $(LOCAL_FILE_CONTEXTS)
+endif
+ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
test_config :=
INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/binary.mk b/core/binary.mk
index be008e6..fa36d64 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1333,11 +1333,6 @@
my_c_includes += $(TOPDIR)$(LOCAL_PATH) $(intermediates) $(generated_sources_dir)
-# The platform JNI header is for platform modules only.
-ifeq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
- my_c_includes += $(JNI_H_INCLUDE)
-endif
-
my_c_includes := $(foreach inc,$(my_c_includes),$(call clean-path,$(inc)))
my_outside_includes := $(filter-out $(OUT_DIR)/%,$(filter /%,$(my_c_includes)) $(filter ../%,$(my_c_includes)))
diff --git a/core/board_config.mk b/core/board_config.mk
index b7d0178..725c0a5 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -19,87 +19,111 @@
# and checks the variable defined therein.
# ###############################################################
-_board_strip_readonly_list := \
- BOARD_EGL_CFG \
- BOARD_HAVE_BLUETOOTH \
- BOARD_INSTALLER_CMDLINE \
- BOARD_KERNEL_CMDLINE \
- BOARD_KERNEL_BASE \
- BOARD_USES_GENERIC_AUDIO \
- BOARD_USES_RECOVERY_AS_BOOT \
- BOARD_VENDOR_USE_AKMD \
- BOARD_WPA_SUPPLICANT_DRIVER \
- BOARD_WLAN_DEVICE \
- TARGET_ARCH \
- TARGET_ARCH_VARIANT \
- TARGET_CPU_ABI \
- TARGET_CPU_ABI2 \
- TARGET_CPU_VARIANT \
- TARGET_CPU_VARIANT_RUNTIME \
- TARGET_2ND_ARCH \
- TARGET_2ND_ARCH_VARIANT \
- TARGET_2ND_CPU_ABI \
- TARGET_2ND_CPU_ABI2 \
- TARGET_2ND_CPU_VARIANT \
- TARGET_2ND_CPU_VARIANT_RUNTIME \
- TARGET_BOARD_PLATFORM \
- TARGET_BOARD_PLATFORM_GPU \
- TARGET_BOOTLOADER_BOARD_NAME \
- TARGET_FS_CONFIG_GEN \
- TARGET_NO_BOOTLOADER \
- TARGET_NO_KERNEL \
- TARGET_NO_RECOVERY \
- TARGET_NO_RADIOIMAGE \
- TARGET_HARDWARE_3D \
- WITH_DEXPREOPT \
+_board_strip_readonly_list :=
+_board_strip_readonly_list += BOARD_BOOTLOADER_IN_UPDATE_PACKAGE
+_board_strip_readonly_list += BOARD_EGL_CFG
+_board_strip_readonly_list += BOARD_HAVE_BLUETOOTH
+_board_strip_readonly_list += BOARD_INSTALLER_CMDLINE
+_board_strip_readonly_list += BOARD_KERNEL_CMDLINE
+_board_strip_readonly_list += BOARD_KERNEL_BASE
+_board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO
+_board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT
+_board_strip_readonly_list += BOARD_VENDOR_USE_AKMD
+_board_strip_readonly_list += BOARD_WPA_SUPPLICANT_DRIVER
+_board_strip_readonly_list += BOARD_WLAN_DEVICE
+_board_strip_readonly_list += TARGET_BOARD_PLATFORM
+_board_strip_readonly_list += TARGET_BOARD_PLATFORM_GPU
+_board_strip_readonly_list += TARGET_BOOTLOADER_BOARD_NAME
+_board_strip_readonly_list += TARGET_FS_CONFIG_GEN
+_board_strip_readonly_list += TARGET_NO_BOOTLOADER
+_board_strip_readonly_list += TARGET_NO_KERNEL
+_board_strip_readonly_list += TARGET_NO_RECOVERY
+_board_strip_readonly_list += TARGET_NO_RADIOIMAGE
+_board_strip_readonly_list += TARGET_HARDWARE_3D
+_board_strip_readonly_list += WITH_DEXPREOPT
+
+# Arch variables
+_board_strip_readonly_list += TARGET_ARCH
+_board_strip_readonly_list += TARGET_ARCH_VARIANT
+_board_strip_readonly_list += TARGET_CPU_ABI
+_board_strip_readonly_list += TARGET_CPU_ABI2
+_board_strip_readonly_list += TARGET_CPU_VARIANT
+_board_strip_readonly_list += TARGET_CPU_VARIANT_RUNTIME
+_board_strip_readonly_list += TARGET_2ND_ARCH
+_board_strip_readonly_list += TARGET_2ND_ARCH_VARIANT
+_board_strip_readonly_list += TARGET_2ND_CPU_ABI
+_board_strip_readonly_list += TARGET_2ND_CPU_ABI2
+_board_strip_readonly_list += TARGET_2ND_CPU_VARIANT
+_board_strip_readonly_list += TARGET_2ND_CPU_VARIANT_RUNTIME
+# TARGET_ARCH_SUITE is an alternative arch configuration to TARGET_ARCH (and related variables),
+# that can be used for soong-only builds to build for several architectures at once.
+# Allowed values currently are "ndk" and "mainline_sdk".
+_board_strip_readonly_list += TARGET_ARCH_SUITE
# File system variables
-_board_strip_readonly_list += \
- BOARD_FLASH_BLOCK_SIZE \
- BOARD_BOOTIMAGE_PARTITION_SIZE \
- BOARD_RECOVERYIMAGE_PARTITION_SIZE \
- BOARD_SYSTEMIMAGE_PARTITION_SIZE \
- BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE \
- BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
- BOARD_USERDATAIMAGE_PARTITION_SIZE \
- BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
- BOARD_CACHEIMAGE_PARTITION_SIZE \
- BOARD_VENDORIMAGE_PARTITION_SIZE \
- BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
- BOARD_PRODUCTIMAGE_PARTITION_SIZE \
- BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
- BOARD_SYSTEM_EXTIMAGE_PARTITION_SIZE \
- BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE \
- BOARD_ODMIMAGE_PARTITION_SIZE \
- BOARD_ODMIMAGE_FILE_SYSTEM_TYPE \
- BOARD_VENDOR_DLKMIMAGE_PARTITION_SIZE \
- BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE \
- BOARD_ODM_DLKMIMAGE_PARTITION_SIZE \
- BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE \
+_board_strip_readonly_list += BOARD_FLASH_BLOCK_SIZE
+_board_strip_readonly_list += BOARD_BOOTIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_RECOVERYIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_SYSTEMIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_USERDATAIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_CACHEIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_VENDORIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_PRODUCTIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_SYSTEM_EXTIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_ODMIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_ODMIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_VENDOR_DLKMIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_ODM_DLKMIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE
# Logical partitions related variables.
-_dynamic_partitions_var_list += \
- BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_VENDOR_DLKMIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_ODM_DLKMIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_SYSTEM_EXTIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_SUPER_PARTITION_SIZE \
- BOARD_SUPER_PARTITION_GROUPS \
-
-_board_strip_readonly_list += $(_dynamic_partitions_var_list)
+_board_strip_readonly_list += BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_VENDOR_DLKMIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_ODM_DLKMIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_SYSTEM_EXTIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_SUPER_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_SUPER_PARTITION_GROUPS
# Kernel related variables
-_board_strip_readonly_list += \
- BOARD_KERNEL_BINARIES \
- BOARD_KERNEL_MODULE_INTERFACE_VERSIONS \
+_board_strip_readonly_list += BOARD_KERNEL_BINARIES
+_board_strip_readonly_list += BOARD_KERNEL_MODULE_INTERFACE_VERSIONS
+
+# Variables related to generic kernel image (GKI) and generic boot image
+# - BOARD_USES_GENERIC_KERNEL_IMAGE is the global variable that defines if the
+# board uses GKI and generic boot image.
+# Update mechanism of the boot image is not enforced by this variable.
+# - BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE controls whether the recovery image
+# contains a kernel or not.
+# - BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT controls whether ramdisk
+# recovery resources are built to vendor_boot.
+# - BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT controls whether GSI AVB keys are
+# built to vendor_boot.
+# - BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES controls whether boot images in $OUT are added
+# to target files package directly.
+_board_strip_readonly_list += BOARD_USES_GENERIC_KERNEL_IMAGE
+_board_strip_readonly_list += BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE
+_board_strip_readonly_list += BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT
+_board_strip_readonly_list += BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT
+_board_strip_readonly_list += BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES
+
+# Defines the list of logical vendor ramdisk names to build or include in vendor_boot.
+_board_strip_readonly_list += BOARD_VENDOR_RAMDISK_FRAGMENTS
_build_broken_var_list := \
BUILD_BROKEN_DUP_RULES \
BUILD_BROKEN_DUP_SYSPROP \
BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \
+ BUILD_BROKEN_ENFORCE_SYSPROP_OWNER \
BUILD_BROKEN_MISSING_REQUIRED_MODULES \
BUILD_BROKEN_OUTSIDE_INCLUDE_DIRS \
BUILD_BROKEN_PREBUILT_ELF_FILES \
@@ -153,10 +177,19 @@
TARGET_DEVICE_DIR := $(patsubst %/,%,$(dir $(board_config_mk)))
.KATI_READONLY := TARGET_DEVICE_DIR
endif
+
include $(board_config_mk)
-ifeq ($(TARGET_ARCH),)
- $(error TARGET_ARCH not defined by board config: $(board_config_mk))
+
+ifneq (,$(and $(TARGET_ARCH),$(TARGET_ARCH_SUITE)))
+ $(error $(board_config_mk) erroneously sets both TARGET_ARCH and TARGET_ARCH_SUITE)
endif
+ifeq ($(TARGET_ARCH)$(TARGET_ARCH_SUITE),)
+ $(error Target architectures not defined by board config: $(board_config_mk))
+endif
+ifeq ($(TARGET_CPU_ABI)$(TARGET_ARCH_SUITE),)
+ $(error TARGET_CPU_ABI not defined by board config: $(board_config_mk))
+endif
+
ifneq ($(MALLOC_IMPL),)
$(warning *** Unsupported option MALLOC_IMPL defined by board config: $(board_config_mk).)
$(error Use `MALLOC_SVELTE := true` to configure jemalloc for low-memory)
@@ -173,10 +206,12 @@
TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
-# The combo makefiles check and set defaults for various CPU configuration
-combo_target := TARGET_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
+ifdef TARGET_ARCH
+ # The combo makefiles check and set defaults for various CPU configuration
+ combo_target := TARGET_
+ combo_2nd_arch_prefix :=
+ include $(BUILD_SYSTEM)/combo/select.mk
+endif
ifdef TARGET_2ND_ARCH
combo_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
@@ -186,9 +221,7 @@
.KATI_READONLY := $(_board_strip_readonly_list)
INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
-ifeq ($(TARGET_CPU_ABI),)
- $(error No TARGET_CPU_ABI defined by board config: $(board_config_mk))
-endif
+
ifneq ($(filter %64,$(TARGET_ARCH)),)
TARGET_IS_64_BIT := true
endif
@@ -289,7 +322,8 @@
###########################################
# Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT) \
+ $(BOARD_GKI_NONAB_COMPAT) $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK := vendor_debug_ramdisk/first_stage_ramdisk
TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk/first_stage_ramdisk
@@ -341,23 +375,34 @@
# Are we building a boot image
BUILDING_BOOT_IMAGE :=
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
- BUILDING_BOOT_IMAGE :=
-else ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),)
- ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
+ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),)
+ ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ BUILDING_BOOT_IMAGE :=
+ else ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
+ BUILDING_BOOT_IMAGE := true
+ else ifneq (,$(foreach kernel,$(BOARD_KERNEL_BINARIES),$(BOARD_$(call to-upper,$(kernel))_BOOTIMAGE_PARTITION_SIZE)))
BUILDING_BOOT_IMAGE := true
endif
else ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),true)
- BUILDING_BOOT_IMAGE := true
+ ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ $(warning *** PRODUCT_BUILD_BOOT_IMAGE is true, but so is BOARD_USES_RECOVERY_AS_BOOT.)
+ $(warning *** Skipping building boot image.)
+ BUILDING_BOOT_IMAGE :=
+ else
+ BUILDING_BOOT_IMAGE := true
+ endif
endif
.KATI_READONLY := BUILDING_BOOT_IMAGE
# Are we building a recovery image
BUILDING_RECOVERY_IMAGE :=
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
- BUILDING_RECOVERY_IMAGE := true
-else ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),)
- ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
+ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),)
+ ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ BUILDING_RECOVERY_IMAGE := true
+ else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
+ # Set to true to build recovery resources for vendor_boot
+ BUILDING_RECOVERY_IMAGE := true
+ else ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
BUILDING_RECOVERY_IMAGE := true
endif
@@ -371,7 +416,9 @@
BUILDING_VENDOR_BOOT_IMAGE :=
ifdef BOARD_BOOT_HEADER_VERSION
ifneq ($(call math_gt_or_eq,$(BOARD_BOOT_HEADER_VERSION),3),)
- ifneq ($(TARGET_NO_VENDOR_BOOT),true)
+ ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),)
+ BUILDING_VENDOR_BOOT_IMAGE := true
+ else ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),true)
BUILDING_VENDOR_BOOT_IMAGE := true
endif
endif
@@ -686,6 +733,16 @@
TARGET_VENDOR_TEST_SUFFIX :=
endif
+# If PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is set,
+# BOARD_VNDK_VERSION must be set because PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY
+# is a enforcement of inter-partition dependency, and it doesn't have any meaning
+# when BOARD_VNDK_VERSION isn't set.
+ifeq ($(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY),true)
+ ifeq ($(BOARD_VNDK_VERSION),)
+ $(error BOARD_VNDK_VERSION must be set when PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true)
+ endif
+endif
+
###########################################
# APEXes are by default flattened, i.e. non-updatable.
# It can be unflattened (and updatable) by inheriting from
@@ -731,3 +788,39 @@
$(if $(filter true,$(BUILD_BROKEN_USES_$(m))),\
$(KATI_deprecated_var $(m),Please convert to Soong),\
$(KATI_obsolete_var $(m),Please convert to Soong)))
+
+ifndef BUILDING_RECOVERY_IMAGE
+ ifeq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE))
+ $(error Should not set BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE if not building recovery image)
+ endif
+endif
+
+ifndef BUILDING_VENDOR_BOOT_IMAGE
+ ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+ $(error Should not set BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT if not building vendor_boot image)
+ endif
+ ifdef BOARD_VENDOR_RAMDISK_FRAGMENTS
+ $(error Should not set BOARD_VENDOR_RAMDISK_FRAGMENTS if not building vendor_boot image)
+ endif
+endif
+
+ifneq ($(words $(BOARD_VENDOR_RAMDISK_FRAGMENTS)),$(words $(sort $(BOARD_VENDOR_RAMDISK_FRAGMENTS))))
+ $(error BOARD_VENDOR_RAMDISK_FRAGMENTS has duplicate entries: $(BOARD_VENDOR_RAMDISK_FRAGMENTS))
+endif
+
+# If BOARD_USES_GENERIC_KERNEL_IMAGE is set, BOARD_USES_RECOVERY_AS_BOOT must not be set.
+# Devices without a dedicated recovery partition uses BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT to
+# build recovery into vendor_boot.
+ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE))
+ ifeq (true,$(BOARD_USES_RECOVERY_AS_BOOT))
+ $(error BOARD_USES_RECOVERY_AS_BOOT cannot be true if BOARD_USES_GENERIC_KERNEL_IMAGE is true. \
+ Use BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT instead)
+ endif
+endif
+
+ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+ ifeq (true,$(BOARD_USES_RECOVERY_AS_BOOT))
+ $(error BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT and BOARD_USES_RECOVERY_AS_BOOT cannot be \
+ both true. Recovery resources should be installed to either boot or vendor_boot, but not both)
+ endif
+endif
diff --git a/core/build-system.html b/core/build-system.html
index 9cd7b0b..b872909c 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -440,6 +440,33 @@
LOCAL_GENERATED_SOURCES += $(GEN)
</pre>
+<h3><a name="unbundled-build"/>Unbundled build</h3>
+<p>Unbundled build has several meanings by the context.
+Let me explain the meaning by the flags related to "unbundled build"</p>
+<h4>TARGET_BUILD_UNBUNDLED</h4>
+<p>The source tree might not have the full platform sources. It is always set if
+<code>TARGET_BUILD_APPS</code> or <code>TARGET_BUILD_UNBUNDLED_IMAGE</code> is set.</p>
+<h4>TARGET_BUILD_USE_PREBUILT_SDKS</h4>
+<p>It is an internal flag. If it is set, prebuilt SDKs are used, even if a module's
+<code>LOCAL_SDK_VERSION</code> is <code>current</code> (including <code>system_current</code>,
+<code>core_current</code>, and so on). If it is unset, build current SDKs,
+and use them as usual.</p>
+<h4>DISABLE_PREOPT</h4>
+<p>It is an internal flag as well. If it is set, dexpreopt is disabled.
+It is always set if <code>TARGET_BUILD_APPS</code> or <code>TARGET_BUILD_UNBUNDLED_IMAGE</code> is set,
+because dexpreopt tightly depends on the platform.</p>
+<h4>TARGET_BUILD_APPS</h4>
+<p>Build the apps that can be distributed outside the platform, so it turns on
+<code>TARGET_BUILD_UNBUNDLED</code> and <code>DISABLE_PREOPT</code>.
+Also, it turns on <code>TARGET_BUILD_USE_PREBUILT_SDKS</code>, unless
+<code>UNBUNDLED_BUILD_SDKS_FROM_SOURCE</code> is set.</p>
+<h4>TARGET_BUILD_UNBUNDLED_IMAGE</h4>
+<p>It is similar to <code>TARGET_BUILD_APPS</code>, but its target is an unbundled partition
+(such as the vendor partition). Accordingly, it sets <code>TARGET_BUILD_UNBUNDLED</code> and <code>DISABLE_PREOPT</code>.
+We can call the partition unbundled, because the partition can be distributed outside the platform.
+And also, it turns on <code>TARGET_BUILD_USE_PREBUILT_SDKS</code>, unless
+<code>UNBUNDLED_BUILD_SDKS_FROM_SOURCE</code> is set.</p>
+
<h3><a name="platform-specific"/>Platform specific conditionals</h3>
<p>Sometimes you need to set flags specifically for different platforms. Here
is a list of which values the different build-system defined variables will be
diff --git a/core/check_elf_file.mk b/core/check_elf_file.mk
index d54a5b7..b5be81f 100644
--- a/core/check_elf_file.mk
+++ b/core/check_elf_file.mk
@@ -14,12 +14,14 @@
# - my_installed_module_stem
# - my_prebuilt_src_file
# - my_check_elf_file_shared_lib_files
+# - my_system_shared_libraries
ifndef LOCAL_IS_HOST_MODULE
ifneq ($(filter $(LOCAL_MODULE_CLASS),SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS),)
check_elf_files_stamp := $(intermediates)/check_elf_files.timestamp
$(check_elf_files_stamp): PRIVATE_SONAME := $(if $(filter $(LOCAL_MODULE_CLASS),SHARED_LIBRARIES),$(my_installed_module_stem))
$(check_elf_files_stamp): PRIVATE_ALLOW_UNDEFINED_SYMBOLS := $(LOCAL_ALLOW_UNDEFINED_SYMBOLS)
+$(check_elf_files_stamp): PRIVATE_SYSTEM_SHARED_LIBRARIES := $(my_system_shared_libraries)
# PRIVATE_SHARED_LIBRARY_FILES are file paths to built shared libraries.
# In addition to $(my_check_elf_file_shared_lib_files), some file paths are
# added by `resolve-shared-libs-for-elf-file-check` from `core/main.mk`.
@@ -33,6 +35,7 @@
--skip-unknown-elf-machine \
$(if $(PRIVATE_SONAME),--soname $(PRIVATE_SONAME)) \
$(foreach l,$(PRIVATE_SHARED_LIBRARY_FILES),--shared-lib $(l)) \
+ $(foreach l,$(PRIVATE_SYSTEM_SHARED_LIBRARIES),--system-shared-lib $(l)) \
$(if $(PRIVATE_ALLOW_UNDEFINED_SYMBOLS),--allow-undefined-symbols) \
--llvm-readobj=$(LLVM_READOBJ) \
$<
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 307c2c2..5effac7 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -74,7 +74,6 @@
LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:=
LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR:=
LOCAL_DROIDDOC_DOC_ZIP :=
-LOCAL_DROIDDOC_JDIFF_DOC_ZIP :=
LOCAL_DROIDDOC_HTML_DIR:=
LOCAL_DROIDDOC_METADATA_ZIP:=
LOCAL_DROIDDOC_OPTIONS:=
@@ -98,9 +97,11 @@
LOCAL_EXPORT_SDK_LIBRARIES:=
LOCAL_EXPORT_SHARED_LIBRARY_HEADERS:=
LOCAL_EXPORT_STATIC_LIBRARY_HEADERS:=
+LOCAL_EXTRA_FULL_TEST_CONFIGS:=
LOCAL_EXTRACT_APK:=
LOCAL_EXTRACT_DPI_APK:=
LOCAL_FDO_SUPPORT:=
+LOCAL_FILE_CONTEXTS:=
LOCAL_FINDBUGS_FLAGS:=
LOCAL_FORCE_STATIC_EXECUTABLE:=
LOCAL_FULL_CLASSES_JACOCO_JAR:=
@@ -129,6 +130,7 @@
LOCAL_IS_FUZZ_TARGET:=
LOCAL_IS_HOST_MODULE:=
LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:=
+LOCAL_IS_UNIT_TEST:=
LOCAL_JACK_CLASSPATH:=
LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
@@ -157,6 +159,10 @@
LOCAL_CERTIFICATE_LINEAGE:=
LOCAL_LDFLAGS:=
LOCAL_LDLIBS:=
+LOCAL_LICENSE_CONDITIONS:=
+LOCAL_LICENSE_KINDS:=
+LOCAL_LICENSE_INSTALL_MAP:=
+LOCAL_LICENSE_PACKAGE_NAME:=
LOCAL_LOGTAGS_FILES:=
LOCAL_MANIFEST_FILE:=
LOCAL_MANIFEST_INSTRUMENTATION_FOR:=
@@ -168,6 +174,7 @@
LOCAL_MODULE_HOST_ARCH_WARN:=
LOCAL_MODULE_HOST_CROSS_ARCH:=
LOCAL_MODULE_HOST_OS:=
+LOCAL_MODULE_IS_CONTAINER:=
LOCAL_MODULE_OWNER:=
LOCAL_MODULE_PATH:=
LOCAL_MODULE_RELATIVE_PATH :=
@@ -236,6 +243,7 @@
# lite(default),micro,nano,stream,full,nanopb-c,nanopb-c-enable_malloc,nanopb-c-16bit,nanopb-c-enable_malloc-16bit,nanopb-c-32bit,nanopb-c-enable_malloc-32bit
LOCAL_PROTOC_OPTIMIZE_TYPE:=
LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
+LOCAL_PROVIDES_USES_LIBRARY:=
LOCAL_R8_FLAG_FILES:=
LOCAL_RECORDED_MODULE_TYPE:=
LOCAL_RENDERSCRIPT_CC:=
@@ -275,6 +283,7 @@
LOCAL_SOONG_LINK_TYPE :=
LOCAL_SOONG_LINT_REPORTS :=
LOCAL_SOONG_PROGUARD_DICT :=
+LOCAL_SOONG_PROGUARD_USAGE_ZIP :=
LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
LOCAL_SOONG_DEVICE_RRO_DIRS :=
LOCAL_SOONG_PRODUCT_RRO_DIRS :=
@@ -298,6 +307,7 @@
LOCAL_TARGET_REQUIRED_MODULES:=
LOCAL_TEST_CONFIG:=
LOCAL_TEST_DATA:=
+LOCAL_TEST_MAINLINE_MODULES:=
LOCAL_TEST_MODULE_TO_PROGUARD_WITH:=
LOCAL_TIDY:=
LOCAL_TIDY_CHECKS:=
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/core/combo/HOST_CROSS_linux_bionic-arm64.mk
similarity index 69%
copy from target/product/virtual_ab_ota_retrofit.mk
copy to core/combo/HOST_CROSS_linux_bionic-arm64.mk
index 3416a4f..df6865f 100644
--- a/target/product/virtual_ab_ota_retrofit.mk
+++ b/core/combo/HOST_CROSS_linux_bionic-arm64.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2019 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +14,9 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+# Configuration for builds hosted on linux_arm-arm64
+# Included by combo/select.mk
-PRODUCT_VIRTUAL_AB_OTA_RETROFIT := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.retrofit=true
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
diff --git a/core/tasks/apidiff.mk b/core/combo/arch/arm64/armv8-2a-dotprod.mk
similarity index 71%
copy from core/tasks/apidiff.mk
copy to core/combo/arch/arm64/armv8-2a-dotprod.mk
index 76e4749..c775cf7 100644
--- a/core/tasks/apidiff.mk
+++ b/core/combo/arch/arm64/armv8-2a-dotprod.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2017 The Android Open Source Project
+#
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,11 +12,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-#
-# Rules for building API diffs.
#
-.PHONY: api-diff
-
-api-diff: api-stubs-docs-jdiff
+# .mk file required to support build for the new armv8-2a-dotprod Arm64 arch
+# variant. The file just needs to be present but does not require to contain
+# anything
diff --git a/core/config.mk b/core/config.mk
index 57296d8..ed6429a 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -155,6 +155,9 @@
$(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.)
$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.)
+$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead.)
+$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead.)
+$(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead)
# Used to force goals to build. Only use for conditionally defined goals.
.PHONY: FORCE
@@ -552,6 +555,7 @@
FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
+MKEROFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkerofsimage.sh
MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
@@ -601,13 +605,9 @@
APICHECK_COMMAND := $(JAVA) -Xmx4g -jar $(APICHECK) --no-banner --compatible-output=no
# Boolean variable determining if the allow list for compatible properties is enabled
-PRODUCT_COMPATIBLE_PROPERTY := false
-ifneq ($(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE),)
- PRODUCT_COMPATIBLE_PROPERTY := $(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)
-else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
- #$(warning no product shipping level defined)
-else ifneq ($(call math_lt,27,$(PRODUCT_SHIPPING_API_LEVEL)),)
- PRODUCT_COMPATIBLE_PROPERTY := true
+PRODUCT_COMPATIBLE_PROPERTY := true
+ifeq ($(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE),false)
+ $(error PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE is obsolete)
endif
.KATI_READONLY := \
@@ -678,33 +678,22 @@
PRODUCT_USE_VNDK := $(PRODUCT_FULL_TREBLE)
endif
-# Define PRODUCT_PRODUCT_VNDK_VERSION if PRODUCT_USE_VNDK is true and
-# PRODUCT_SHIPPING_API_LEVEL is greater than 29.
-PRODUCT_USE_PRODUCT_VNDK := false
ifeq ($(PRODUCT_USE_VNDK),true)
- ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
- PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
- else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
- # No shipping level defined
- else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
- PRODUCT_USE_PRODUCT_VNDK := true
- endif
-
ifndef BOARD_VNDK_VERSION
BOARD_VNDK_VERSION := current
endif
-
- ifeq ($(PRODUCT_USE_PRODUCT_VNDK),true)
- ifndef PRODUCT_PRODUCT_VNDK_VERSION
- PRODUCT_PRODUCT_VNDK_VERSION := current
- endif
- endif
endif
$(KATI_obsolete_var PRODUCT_USE_VNDK,Use BOARD_VNDK_VERSION instead)
$(KATI_obsolete_var PRODUCT_USE_VNDK_OVERRIDE,Use BOARD_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+
+ifdef PRODUCT_PRODUCT_VNDK_VERSION
+ ifndef BOARD_VNDK_VERSION
+ # VNDK for product partition is not available unless BOARD_VNDK_VERSION
+ # defined.
+ $(error PRODUCT_PRODUCT_VNDK_VERSION cannot be defined without defining BOARD_VNDK_VERSION)
+ endif
+endif
# Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
# devices if unset.
@@ -720,6 +709,16 @@
endif
endif
+ifndef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
+ BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES := current
+else
+ ifdef PRODUCT_SHIPPING_API_LEVEL
+ ifneq ($(call math_lt,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES),$(PRODUCT_SHIPPING_API_LEVEL)),)
+ $(error BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES ($(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)) must be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
+ endif
+ endif
+endif
+.KATI_READONLY := BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
ifdef PRODUCT_SHIPPING_API_LEVEL
ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
@@ -769,7 +768,7 @@
# is made which breaks compatibility with the previous platform sepolicy version,
# not just on every increase in PLATFORM_SDK_VERSION. The minor version should
# be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 29
+sepolicy_major_vers := 30
sepolicy_minor_vers := 0
ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
@@ -991,6 +990,13 @@
endif # PRODUCT_USE_DYNAMIC_PARTITIONS
+# By default, we build the hidden API csv files from source. You can use
+# prebuilt hiddenapi files by setting BOARD_PREBUILT_HIDDENAPI_DIR to the name
+# of a directory containing both prebuilt hiddenapi-flags.csv and
+# hiddenapi-index.csv.
+BOARD_PREBUILT_HIDDENAPI_DIR ?=
+.KATI_READONLY := BOARD_PREBUILT_HIDDENAPI_DIR
+
# ###############################################################
# Set up final options.
# ###############################################################
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index eaab1b5..f39b84a 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -53,6 +53,18 @@
endif
endif
+# Disable global memtag_heap in excluded paths
+ifneq ($(filter memtag_heap, $(my_global_sanitize)),)
+ combined_exclude_paths := $(MEMTAG_HEAP_EXCLUDE_PATHS) \
+ $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS)
+
+ ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_exclude_paths)),\
+ $(filter $(dir)%,$(LOCAL_PATH)))),)
+ my_global_sanitize := $(filter-out memtag_heap,$(my_global_sanitize))
+ my_global_sanitize_diag := $(filter-out memtag_heap,$(my_global_sanitize_diag))
+ endif
+endif
+
ifneq ($(my_global_sanitize),)
my_sanitize := $(my_global_sanitize) $(my_sanitize)
endif
@@ -116,6 +128,25 @@
endif
endif
+# Enable memtag_heap in included paths (for Arm64 only).
+ifeq ($(filter memtag_heap, $(my_sanitize)),)
+ ifneq ($(filter arm64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
+ combined_sync_include_paths := $(MEMTAG_HEAP_SYNC_INCLUDE_PATHS) \
+ $(PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS)
+ combined_async_include_paths := $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) \
+ $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS)
+
+ ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_sync_include_paths)),\
+ $(filter $(dir)%,$(LOCAL_PATH)))),)
+ my_sanitize := memtag_heap $(my_sanitize)
+ my_sanitize_diag := memtag_heap $(my_sanitize)
+ else ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_async_include_paths)),\
+ $(filter $(dir)%,$(LOCAL_PATH)))),)
+ my_sanitize := memtag_heap $(my_sanitize)
+ endif
+ endif
+endif
+
# If CFI is disabled globally, remove it from my_sanitize.
ifeq ($(strip $(ENABLE_CFI)),false)
my_sanitize := $(filter-out cfi,$(my_sanitize))
@@ -164,6 +195,7 @@
ifneq ($(filter arm x86 x86_64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
my_sanitize := $(filter-out hwaddress,$(my_sanitize))
+ my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
endif
ifneq ($(filter hwaddress,$(my_sanitize)),)
@@ -183,6 +215,20 @@
endif
endif
+ifneq ($(filter memtag_heap,$(my_sanitize)),)
+ # Add memtag ELF note.
+ ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+ my_whole_static_libraries += note_memtag_heap_sync
+ else
+ my_whole_static_libraries += note_memtag_heap_async
+ endif
+ # This is all that memtag_heap does - it is not an actual -fsanitize argument.
+ # Remove it from the list.
+ my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
+endif
+
+my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+
# TSAN is not supported on 32-bit architectures. For non-multilib cases, make
# its use an error. For multilib cases, don't use it for the 32-bit case.
ifneq ($(filter thread,$(my_sanitize)),)
@@ -441,3 +487,13 @@
endif
endif
endif
+
+# http://b/177566116, libc++ may crash with this sanitizer.
+# Disable this check unless it has been explicitly specified.
+ifneq ($(findstring fsanitize,$(my_cflags)),)
+ ifneq ($(findstring integer,$(my_cflags)),)
+ ifeq ($(findstring sanitize=unsigned-shift-base,$(my_cflags)),)
+ my_cflags += -fno-sanitize=unsigned-shift-base
+ endif
+ endif
+endif
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index a2abb1a..f71ef72 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -78,18 +78,12 @@
my_static_libraries += libc++demangle
ifeq ($(my_link_type),static)
- my_static_libraries += libm libc
- ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
- my_static_libraries += libunwind_llvm
- my_ldflags += -Wl,--exclude-libs,libunwind_llvm.a
- else
- my_static_libraries += libgcc_stripped
- my_ldflags += -Wl,--exclude-libs,libgcc_stripped.a
- endif
+ my_static_libraries += libm libc libunwind
endif
endif
else ifeq ($(my_cxx_stl),ndk)
# Using an NDK STL. Handled in binary.mk, except for the unwinder.
+ # TODO: Switch the NDK over to the LLVM unwinder for non-arm32 architectures.
ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
my_static_libraries += libunwind_llvm
my_ldflags += -Wl,--exclude-libs,libunwind_llvm.a
diff --git a/core/definitions.mk b/core/definitions.mk
index 2bf1ba6..4300efe 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -107,9 +107,12 @@
# All tests that should be skipped in presubmit check.
ALL_DISABLED_PRESUBMIT_TESTS :=
-# All compatibility suites mentioned in LOCAL_COMPATIBILITY_SUITES
+# All compatibility suites mentioned in LOCAL_COMPATIBILITY_SUITE
ALL_COMPATIBILITY_SUITES :=
+# All compatibility suite files to dist.
+ALL_COMPATIBILITY_DIST_FILES :=
+
# All LINK_TYPE entries
ALL_LINK_TYPES :=
@@ -522,6 +525,96 @@
endef
###########################################################
+## Sometimes a notice dependency will reference an unadorned
+## module name that only appears in ALL_MODULES adorned with
+## an ARCH suffix or a `host_cross_` prefix.
+##
+## After all of the modules are processed in base_rules.mk,
+## replace all such dependencies with every matching adorned
+## module name.
+###########################################################
+
+define fix-notice-deps
+$(strip \
+ $(eval _all_module_refs := \
+ $(sort \
+ $(foreach m,$(sort $(ALL_MODULES)), \
+ $(ALL_MODULES.$(m).NOTICE_DEPS) \
+ ) \
+ ) \
+ ) \
+ $(foreach m, $(_all_module_refs), \
+ $(eval _lookup.$(m) := \
+ $(sort \
+ $(if $(strip $(ALL_MODULES.$(m).PATH)), \
+ $(m), \
+ $(filter $(m)_32 $(m)_64 host_cross_$(m) host_cross_$(m)_32 host_cross_$(m)_64, $(ALL_MODULES)) \
+ ) \
+ ) \
+ ) \
+ ) \
+ $(foreach m, $(ALL_MODULES), \
+ $(eval ALL_MODULES.$(m).NOTICE_DEPS := \
+ $(sort \
+ $(foreach d,$(ALL_MODULES.$(m).NOTICE_DEPS), \
+ $(_lookup.$(d)) \
+ ) \
+ ) \
+ ) \
+ ) \
+)
+endef
+
+###########################################################
+## Target directory for license metadata files.
+###########################################################
+define license-metadata-dir
+$(call generated-sources-dir-for,META,lic,)
+endef
+
+###########################################################
+## License metadata build rule for my_register_name $1
+###########################################################
+define license-metadata-rule
+$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _deps := $(sort $(filter-out $(_dir)/$(1).meta_lic,$(foreach d,$(ALL_MODULES.$(1).NOTICE_DEPS), $(_dir)/$(d).meta_lic)))))
+$(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)),
+$(_dir)/$(b).meta_module ::
+ mkdir -p $$(dir $$@)
+ echo $(_dir)/$(1).meta_lic >> $$@
+ sort -u $$@ -o $$@
+
+)
+$(_dir)/$(1).meta_lic: PRIVATE_KINDS := $(sort $(ALL_MODULES.$(1).LICENSE_KINDS))
+$(_dir)/$(1).meta_lic: PRIVATE_CONDITIONS := $(sort $(ALL_MODULES.$(1).LICENSE_CONDITIONS))
+$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(sort $(ALL_MODULES.$(1).NOTICES))
+$(_dir)/$(1).meta_lic: PRIVATE_NOTICE_DEPS := $(_deps)
+$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))
+$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(sort $(ALL_MODULES.$(1).IS_CONTAINER))
+$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME)
+$(_dir)/$(1).meta_lic: PRIVATE_INSTALL_MAP := $(sort $(ALL_MODULES.$(1).LICENSE_INSTALL_MAP))
+$(_dir)/$(1).meta_lic : $(_deps) $(ALL_MODULES.$(1).NOTICES) $(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
+ rm -f $$@
+ mkdir -p $$(dir $$@)
+ build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(filter-out false,$$(PRIVATE_IS_CONTAINER)),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
+
+$(1) : $(_dir)/$(1).meta_lic
+
+$(if $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE),$(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE) : $(_dir)/$(1).meta_lic)
+
+.PHONY: $(1).meta_lic
+$(1).meta_lic : $(_dir)/$(1).meta_lic
+
+endef
+
+###########################################################
+## Declares a license metadata build rule for ALL_MODULES
+###########################################################
+define build-license-metadata
+$(foreach m,$(ALL_MODULES),$(eval $(call license-metadata-rule,$(m))))
+endef
+
+###########################################################
## Returns correct _idfPrefix from the list:
## { HOST, HOST_CROSS, TARGET }
###########################################################
@@ -2401,6 +2494,7 @@
$(1) \
$(HOST_INIT_VERIFIER) \
$(call intermediates-dir-for,ETC,passwd_system)/passwd_system \
+ $(call intermediates-dir-for,ETC,passwd_system_ext)/passwd_system_ext \
$(call intermediates-dir-for,ETC,passwd_vendor)/passwd_vendor \
$(call intermediates-dir-for,ETC,passwd_odm)/passwd_odm \
$(call intermediates-dir-for,ETC,passwd_product)/passwd_product \
@@ -2411,6 +2505,7 @@
$(call intermediates-dir-for,ETC,odm_property_contexts)/odm_property_contexts
$(hide) $(HOST_INIT_VERIFIER) \
-p $(call intermediates-dir-for,ETC,passwd_system)/passwd_system \
+ -p $(call intermediates-dir-for,ETC,passwd_system_ext)/passwd_system_ext \
-p $(call intermediates-dir-for,ETC,passwd_vendor)/passwd_vendor \
-p $(call intermediates-dir-for,ETC,passwd_odm)/passwd_odm \
-p $(call intermediates-dir-for,ETC,passwd_product)/passwd_product \
@@ -2488,15 +2583,25 @@
# $(2): destination file
# $(3): message to print on error
define copy-non-elf-file-checked
-$(2): $(1) $(LLVM_READOBJ)
- @echo "Copy non-ELF: $$@"
+$(eval check_non_elf_file_timestamp := \
+ $(call intermediates-dir-for,FAKE,check-non-elf-file-timestamps)/$(2).timestamp)
+$(check_non_elf_file_timestamp): $(1) $(LLVM_READOBJ)
+ @echo "Check non-ELF: $$<"
+ $(hide) mkdir -p "$$(dir $$@)"
+ $(hide) rm -f "$$@"
$(hide) \
- if $(LLVM_READOBJ) -h $$< >/dev/null 2>&1; then \
- $(call echo-error,$$@,$(3)); \
- $(call echo-error,$$@,found ELF file: $$<); \
+ if $(LLVM_READOBJ) -h "$$<" >/dev/null 2>&1; then \
+ $(call echo-error,$(2),$(3)); \
+ $(call echo-error,$(2),found ELF file: $$<); \
false; \
fi
+ $(hide) touch "$$@"
+
+$(2): $(1) $(check_non_elf_file_timestamp)
+ @echo "Copy non-ELF: $$@"
$$(copy-file-to-target)
+
+check-elf-prebuilt-product-copy-files: $(check_non_elf_file_timestamp)
endef
# The -t option to acp and the -p option to cp is
@@ -2551,6 +2656,18 @@
$(hide) cp $< $@
endef
+# The same as copy-file-to-new-target, but preserve symlinks. Symlinks are
+# converted to absolute to not break.
+define copy-file-or-link-to-new-target
+@mkdir -p $(dir $@)
+$(hide) rm -f $@
+$(hide) if [ -h $< ]; then \
+ ln -s $$(realpath $<) $@; \
+else \
+ cp $< $@; \
+fi
+endef
+
# Copy a prebuilt file to a target location.
define transform-prebuilt-to-target
@echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
@@ -2563,6 +2680,13 @@
$(copy-file-to-target-strip-comments)
endef
+# Copy a prebuilt file to a target location, but preserve symlinks rather than
+# dereference them.
+define copy-or-link-prebuilt-to-target
+@echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
+$(copy-file-or-link-to-new-target)
+endef
+
# Copy a list of files/directories to target location, with sub dir structure preserved.
# For example $(HOST_OUT_EXECUTABLES)/aapt -> $(staging)/bin/aapt .
# $(1): the source list of files/directories.
@@ -2588,6 +2712,7 @@
@mkdir -p $(dir $$@)
@rm -rf $$@
$(hide) ln -sf $(2) $$@
+$(3): .KATI_SYMLINK_OUTPUTS := $(3)
endef
# Copy an apk to a target location while removing classes*.dex
@@ -2812,6 +2937,7 @@
# 2. Add all the files to each suite's dependent files list.
# 3. Do the dependency addition to my_all_targets.
# 4. Save the module name to COMPATIBILITY.$(suite).MODULES for each suite.
+# 5. Collect files to dist to ALL_COMPATIBILITY_DIST_FILES.
# Requires for each suite: use my_compat_dist_config_$(suite) to define the test config.
# and use my_compat_dist_$(suite) to define the others.
define create-suite-dependencies
@@ -2824,9 +2950,11 @@
$$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
$$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
$$(my_compat_dist_test_data_$(suite))) \
+ $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \
$(eval COMPATIBILITY.$(suite).MODULES += $$(my_register_name))) \
-$(eval $(my_all_targets) : $(call copy-many-files, \
- $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))) \
+$(eval $(my_all_targets) : \
+ $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE), \
+ $(foreach f,$(my_compat_dist_$(suite)), $(call word-colon,2,$(f))))) \
$(call copy-many-xml-files-checked, \
$(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_config_$(suite))))))
endef
@@ -3162,11 +3290,12 @@
###########################################################
## Find system_$(VER) in LOCAL_SDK_VERSION
+## note: system_server_* is excluded. It's a different API surface
##
## $(1): LOCAL_SDK_VERSION
###########################################################
define has-system-sdk-version
-$(filter system_%,$(1))
+$(filter-out system_server_%,$(filter system_%,$(1)))
endef
###########################################################
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 41a2be9..06e2fb7 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,16 +1,24 @@
DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config
ENABLE_PREOPT := true
+ENABLE_PREOPT_BOOT_IMAGES := true
ifneq (true,$(filter true,$(WITH_DEXPREOPT)))
+ # Disable dexpreopt for libraries/apps and for boot images.
ENABLE_PREOPT :=
+ ENABLE_PREOPT_BOOT_IMAGES :=
else ifneq (true,$(filter true,$(PRODUCT_USES_DEFAULT_ART_CONFIG)))
+ # Disable dexpreopt for libraries/apps and for boot images: not having default
+ # ART config means that some important system properties are not set, which
+ # would result in passing bad arguments to dex2oat and failing the build.
ENABLE_PREOPT :=
+ ENABLE_PREOPT_BOOT_IMAGES :=
else ifeq (true,$(DISABLE_PREOPT))
+ # Disable dexpreopt for libraries/apps, but do compile boot images.
ENABLE_PREOPT :=
endif
# The default value for LOCAL_DEX_PREOPT
-DEX_PREOPT_DEFAULT ?= true
+DEX_PREOPT_DEFAULT ?= $(ENABLE_PREOPT)
# The default filter for which files go into the system_other image (if it is
# being used). Note that each pattern p here matches both '/<p>' and /system/<p>'.
@@ -46,14 +54,6 @@
endif
endif
-# Use the first preloaded-classes file in PRODUCT_COPY_FILES.
-PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
- $(filter %system/etc/preloaded-classes,$(PRODUCT_COPY_FILES))))
-
-# Use the first dirty-image-objects file in PRODUCT_COPY_FILES.
-DIRTY_IMAGE_OBJECTS := $(call word-colon,1,$(firstword \
- $(filter %system/etc/dirty-image-objects,$(PRODUCT_COPY_FILES))))
-
# Get value of a property. It is first searched from PRODUCT_VENDOR_PROPERTIES
# and then falls back to PRODUCT_SYSTEM_PROPERTIES
# $1: name of the property
@@ -73,6 +73,7 @@
$(call json_start)
$(call add_json_bool, DisablePreopt, $(call invert_bool,$(ENABLE_PREOPT)))
+ $(call add_json_bool, DisablePreoptBootImages, $(call invert_bool,$(ENABLE_PREOPT_BOOT_IMAGES)))
$(call add_json_list, DisablePreoptModules, $(DEXPREOPT_DISABLED_MODULES))
$(call add_json_bool, OnlyPreoptBootImageAndSystemServer, $(filter true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY)))
$(call add_json_bool, UseArtImage, $(filter true,$(DEXPREOPT_USE_ART_IMAGE)))
@@ -108,6 +109,7 @@
$(call add_json_str, Dex2oatXms, $(DEX2OAT_XMS))
$(call add_json_str, EmptyDirectory, $(OUT_DIR)/empty)
+ifdef TARGET_ARCH
$(call add_json_map, CpuVariant)
$(call add_json_str, $(TARGET_ARCH), $(DEX2OAT_TARGET_CPU_VARIANT))
ifdef TARGET_2ND_ARCH
@@ -121,8 +123,8 @@
$(call add_json_str, $(TARGET_2ND_ARCH), $($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES))
endif
$(call end_json_map)
+endif
- $(call add_json_str, DirtyImageObjects, $(DIRTY_IMAGE_OBJECTS))
$(call add_json_list, BootImageProfiles, $(PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION))
$(call add_json_str, BootFlags, $(PRODUCT_DEX_PREOPT_BOOT_FLAGS))
$(call add_json_str, Dex2oatImageXmx, $(DEX2OAT_IMAGE_XMX))
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 799b623..b74e047 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -189,20 +189,43 @@
my_filtered_optional_uses_libraries := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
$(LOCAL_OPTIONAL_USES_LIBRARIES))
- # dexpreopt needs the paths to the dex jars of these libraries in order to
- # construct class loader context for dex2oat.
- my_extra_dexpreopt_libs := \
- org.apache.http.legacy \
+ # compatibility libraries are added to class loader context of an app only if
+ # targetSdkVersion in the app's manifest is lower than the given SDK version
+
+ my_dexpreopt_libs_compat_28 := \
+ org.apache.http.legacy
+
+ my_dexpreopt_libs_compat_29 := \
android.hidl.base-V1.0-java \
- android.hidl.manager-V1.0-java \
+ android.hidl.manager-V1.0-java
+
+ my_dexpreopt_libs_compat_30 := \
android.test.base \
+ android.test.mock
+
+ my_dexpreopt_libs_compat := \
+ $(my_dexpreopt_libs_compat_28) \
+ $(my_dexpreopt_libs_compat_29) \
+ $(my_dexpreopt_libs_compat_30)
my_dexpreopt_libs := $(sort \
$(LOCAL_USES_LIBRARIES) \
$(my_filtered_optional_uses_libraries) \
- $(my_extra_dexpreopt_libs) \
)
+ # 1: SDK version
+ # 2: list of libraries
+ add_json_class_loader_context = \
+ $(call add_json_map, $(1)) \
+ $(foreach lib, $(2),\
+ $(call add_json_map, $(lib)) \
+ $(eval file := $(filter %/$(lib).jar, $(call module-installed-files,$(lib)))) \
+ $(call add_json_str, Host, $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
+ $(call add_json_str, Device, $(call install-path-to-on-device-path,$(file))) \
+ $(call add_json_map, Subcontexts, ${$}) $(call end_json_map) \
+ $(call end_json_map)) \
+ $(call end_json_map)
+
# Record dex-preopt config.
DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
DEXPREOPT.$(LOCAL_MODULE).MULTILIB := $(LOCAL_MULTILIB)
@@ -230,15 +253,12 @@
$(call add_json_str, ProfileClassListing, $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE)))
$(call add_json_bool, ProfileIsTextListing, $(my_profile_is_text_listing))
$(call add_json_bool, EnforceUsesLibraries, $(LOCAL_ENFORCE_USES_LIBRARIES))
- $(call add_json_list, OptionalUsesLibraries, $(my_filtered_optional_uses_libraries))
- $(call add_json_list, UsesLibraries, $(LOCAL_USES_LIBRARIES))
- $(call add_json_map, LibraryPaths)
- $(foreach lib,$(my_dexpreopt_libs),\
- $(call add_json_map, $(lib)) \
- $(eval file := $(filter %/$(lib).jar, $(call module-installed-files,$(lib)))) \
- $(call add_json_str, Host, $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
- $(call add_json_str, Device, $(call install-path-to-on-device-path,$(file))) \
- $(call end_json_map))
+ $(call add_json_str, ProvidesUsesLibrary, $(firstword $(LOCAL_PROVIDES_USES_LIBRARY) $(LOCAL_MODULE)))
+ $(call add_json_map, ClassLoaderContexts)
+ $(call add_json_class_loader_context, any, $(my_dexpreopt_libs))
+ $(call add_json_class_loader_context, 28, $(my_dexpreopt_libs_compat_28))
+ $(call add_json_class_loader_context, 29, $(my_dexpreopt_libs_compat_29))
+ $(call add_json_class_loader_context, 30, $(my_dexpreopt_libs_compat_30))
$(call end_json_map)
$(call add_json_list, Archs, $(my_dexpreopt_archs))
$(call add_json_list, DexPreoptImages, $(my_dexpreopt_images))
@@ -280,7 +300,7 @@
my_dexpreopt_deps := $(my_dex_jar)
my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
my_dexpreopt_deps += \
- $(foreach lib, $(my_dexpreopt_libs), \
+ $(foreach lib, $(my_dexpreopt_libs) $(my_dexpreopt_libs_compat), \
$(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
my_dexpreopt_deps += $(my_dexpreopt_images_deps)
my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
diff --git a/core/dumpvar.mk b/core/dumpvar.mk
index b2ee8fd..6b5c030 100644
--- a/core/dumpvar.mk
+++ b/core/dumpvar.mk
@@ -24,9 +24,14 @@
# Input variables:
# DUMP_MANY_VARS: the list of variable names.
# DUMP_VAR_PREFIX: an optional prefix of the variable name added to the output.
+# The value is printed in parts because large variables like PRODUCT_PACKAGES
+# can exceed the maximum linux command line size
.PHONY: dump-many-vars
dump-many-vars :
@$(foreach v, $(DUMP_MANY_VARS),\
- printf "%s='%s'\n" '$(DUMP_VAR_PREFIX)$(v)' '$($(v))';)
+ printf "%s='%s" '$(DUMP_VAR_PREFIX)$(v)' '$(firstword $($(v)))'; \
+ $(foreach part, $(wordlist 2, $(words $($(v))), $($(v))),\
+ printf " %s" '$(part)'$(newline))\
+ printf "'\n";)
endif # CALLED_FROM_SETUP
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 167fed9..a5571ae 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -148,15 +148,25 @@
# BUILD_OS is the real host doing the build.
BUILD_OS := $(HOST_OS)
-HOST_CROSS_OS :=
-# We can cross-build Windows binaries on Linux
+# We can do the cross-build only on Linux
ifeq ($(HOST_OS),linux)
-ifeq ($(BUILD_HOST_static),)
-HOST_CROSS_OS := windows
-HOST_CROSS_ARCH := x86
-HOST_CROSS_2ND_ARCH := x86_64
-2ND_HOST_CROSS_IS_64_BIT := true
-endif
+ # Windows has been the default host_cross OS
+ ifeq (,$(filter-out windows,$(HOST_CROSS_OS)))
+ # We can only create static host binaries for Linux, so if static host
+ # binaries are requested, turn off Windows cross-builds.
+ ifeq ($(BUILD_HOST_static),)
+ HOST_CROSS_OS := windows
+ HOST_CROSS_ARCH := x86
+ HOST_CROSS_2ND_ARCH := x86_64
+ 2ND_HOST_CROSS_IS_64_BIT := true
+ endif
+ else ifeq ($(HOST_CROSS_OS),linux_bionic)
+ ifeq (,$(HOST_CROSS_ARCH))
+ $(error HOST_CROSS_ARCH missing.)
+ endif
+ else
+ $(error Unsupported HOST_CROSS_OS $(HOST_CROSS_OS))
+ endif
endif
ifeq ($(HOST_OS),)
@@ -328,7 +338,7 @@
HOST_OUT := $(HOST_OUT_ROOT)/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
SOONG_HOST_OUT := $(SOONG_OUT_DIR)/host/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
-HOST_CROSS_OUT := $(HOST_OUT_ROOT)/windows-$(HOST_PREBUILT_ARCH)
+HOST_CROSS_OUT := $(HOST_OUT_ROOT)/$(HOST_CROSS_OS)-$(HOST_CROSS_ARCH)
.KATI_READONLY := HOST_OUT SOONG_HOST_OUT HOST_CROSS_OUT
diff --git a/core/java_host_unit_test_config_template.xml b/core/java_host_unit_test_config_template.xml
new file mode 100644
index 0000000..ff300da
--- /dev/null
+++ b/core/java_host_unit_test_config_template.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- This test config file is auto-generated. -->
+<configuration description="Runs {MODULE}">
+ <option name="test-suite-tag" value="apct" />
+ <option name="test-suite-tag" value="apct-unit-tests" />
+
+ {EXTRA_CONFIGS}
+
+ <test class="com.android.tradefed.testtype.IsolatedHostTest" >
+ <option name="jar" value="{MODULE}.jar" />
+ </test>
+</configuration>
diff --git a/core/line_coverage.mk b/core/line_coverage.mk
deleted file mode 100644
index 6bfbb8d..0000000
--- a/core/line_coverage.mk
+++ /dev/null
@@ -1,94 +0,0 @@
-# -----------------------------------------------------------------
-# Make target for line coverage. This target generates a zip file
-# called `line_coverage_profiles.zip` that contains a large set of
-# zip files one for each fuzz target/critical component. Each zip
-# file contains a set of profile files (*.gcno) that we will use
-# to generate line coverage reports. Furthermore, target compiles
-# all fuzz targets with line coverage instrumentation enabled and
-# packs them into another zip file called `line_coverage_profiles.zip`.
-#
-# To run the make target set the coverage related envvars first:
-# NATIVE_COVERAGE=true NATIVE_COVERAGE_PATHS=* make haiku-line-coverage
-# -----------------------------------------------------------------
-
-# TODO(b/148306195): Due this issue some fuzz targets cannot be built with
-# line coverage instrumentation. For now we just block them.
-blocked_fuzz_targets := libneuralnetworks_fuzzer
-
-fuzz_targets := $(ALL_FUZZ_TARGETS)
-fuzz_targets := $(filter-out $(blocked_fuzz_targets),$(fuzz_targets))
-
-
-# Android components that considered critical.
-# Please note that adding/Removing critical components is very rare.
-critical_components_static := \
- lib-bt-packets \
- libbt-stack \
- libffi \
- libhevcdec \
- libhevcenc \
- libmpeg2dec \
- libosi \
- libpdx \
- libselinux \
- libvold \
- libyuv
-
-# Format is <module_name> or <module_name>:<apex_name>
-critical_components_shared := \
- libaudioprocessing \
- libbinder \
- libbluetooth_gd \
- libbrillo \
- libcameraservice \
- libcurl \
- libhardware \
- libinputflinger \
- libopus \
- libstagefright \
- libvixl:com.android.art.debug
-
-# Use the intermediates directory to avoid installing libraries to the device.
-intermediates := $(call intermediates-dir-for,PACKAGING,haiku-line-coverage)
-
-
-# We want the profile files for all fuzz targets + critical components.
-line_coverage_profiles := $(intermediates)/line_coverage_profiles.zip
-
-critical_components_static_inputs := $(foreach lib,$(critical_components_static), \
- $(call intermediates-dir-for,STATIC_LIBRARIES,$(lib))/$(lib).a)
-
-critical_components_shared_inputs := $(foreach lib,$(critical_components_shared), \
- $(eval filename := $(call word-colon,1,$(lib))) \
- $(eval modulename := $(subst :,.,$(lib))) \
- $(call intermediates-dir-for,SHARED_LIBRARIES,$(modulename))/$(filename).so)
-
-fuzz_target_inputs := $(foreach fuzz,$(fuzz_targets), \
- $(call intermediates-dir-for,EXECUTABLES,$(fuzz))/$(fuzz))
-
-# When coverage is enabled (NATIVE_COVERAGE is set), make creates
-# a "coverage" directory and stores all profile (*.gcno) files in inside.
-# We need everything that is stored inside this directory.
-$(line_coverage_profiles): $(fuzz_target_inputs)
-$(line_coverage_profiles): $(critical_components_static_inputs)
-$(line_coverage_profiles): $(critical_components_shared_inputs)
-$(line_coverage_profiles): $(SOONG_ZIP)
- $(SOONG_ZIP) -o $@ -D $(PRODUCT_OUT)/coverage
-
-
-# Zip all fuzz targets compiled with line coverage.
-line_coverage_fuzz_targets := $(intermediates)/line_coverage_fuzz_targets.zip
-
-$(line_coverage_fuzz_targets): $(fuzz_target_inputs)
-$(line_coverage_fuzz_targets): $(SOONG_ZIP)
- $(SOONG_ZIP) -o $@ -j $(addprefix -f ,$(fuzz_target_inputs))
-
-
-.PHONY: haiku-line-coverage
-haiku-line-coverage: $(line_coverage_profiles) $(line_coverage_fuzz_targets)
-$(call dist-for-goals, haiku-line-coverage, \
- $(line_coverage_profiles):line_coverage_profiles.zip \
- $(line_coverage_fuzz_targets):line_coverage_fuzz_targets.zip)
-
-line_coverage_profiles :=
-line_coverage_fuzz_targets :=
diff --git a/core/local_current_sdk.mk b/core/local_current_sdk.mk
new file mode 100644
index 0000000..ea7da8a
--- /dev/null
+++ b/core/local_current_sdk.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+ifdef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
+ ifneq (current,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
+ ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+ ifeq (current,$(LOCAL_SDK_VERSION))
+ LOCAL_SDK_VERSION := $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+ else ifeq (system_current,$(LOCAL_SDK_VERSION))
+ LOCAL_SDK_VERSION := system_$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+ endif
+ endif
+ endif
+endif
diff --git a/core/main.mk b/core/main.mk
index a3d594b..5ea95c8 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -81,6 +81,8 @@
-include test/mts/tools/build/config.mk
# VTS-Core-specific config.
-include test/vts/tools/vts-core-tradefed/build/config.mk
+# CSUITE-specific config.
+-include test/app_compat/csuite/tools/build/config.mk
# Clean rules
.PHONY: clean-dex-files
@@ -113,6 +115,11 @@
endif
endif
+ifdef TARGET_ARCH_SUITE
+ # TODO(b/175577370): Enable this error.
+ # $(error TARGET_ARCH_SUITE is not supported in kati/make builds)
+endif
+
# ADDITIONAL_<partition>_PROPERTIES are properties that are determined by the
# build system itself. Don't let it be defined from outside of the core build
# system like Android.mk or <product>.mk files.
@@ -191,11 +198,7 @@
# Sets ro.actionable_compatible_property.enabled to know on runtime whether the
# allowed list of actionable compatible properties is enabled or not.
-ifeq ($(PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE),true)
-ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=false
-else
-ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=${PRODUCT_COMPATIBLE_PROPERTY}
-endif
+ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true
# Add the system server compiler filter if they are specified for the product.
ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
@@ -282,9 +285,6 @@
ADDITIONAL_VENDOR_PROPERTIES += \
ro.vendor.build.security_patch=$(VENDOR_SECURITY_PATCH) \
- ro.vendor.product.cpu.abilist=$(TARGET_CPU_ABI_LIST) \
- ro.vendor.product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT) \
- ro.vendor.product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT) \
ro.product.board=$(TARGET_BOOTLOADER_BOARD_NAME) \
ro.board.platform=$(TARGET_BOARD_PLATFORM) \
ro.hwui.use_vulkan=$(TARGET_USES_VULKAN)
@@ -299,11 +299,6 @@
ro.build.ab_update=$(AB_OTA_UPDATER)
endif
-ADDITIONAL_ODM_PROPERTIES += \
- ro.odm.product.cpu.abilist=$(TARGET_CPU_ABI_LIST) \
- ro.odm.product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT) \
- ro.odm.product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)
-
# Set ro.product.vndk.version to know the VNDK version required by product
# modules. It uses the version in PRODUCT_PRODUCT_VNDK_VERSION. If the value
# is "current", use PLATFORM_VNDK_VERSION.
@@ -419,7 +414,7 @@
sdk_repo_goal := $(strip $(filter sdk_repo,$(MAKECMDGOALS)))
MAKECMDGOALS := $(strip $(filter-out sdk_repo,$(MAKECMDGOALS)))
-ifneq ($(words $(sort $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests target-files-package,$(MAKECMDGOALS)))),1)
+ifneq ($(words $(sort $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests,$(MAKECMDGOALS)))),1)
$(error The 'sdk' target may not be specified with any other targets)
endif
@@ -606,8 +601,8 @@
$(eval modules_32 := $(patsubst %:32,%,$(filter %:32,$(2)))) \
$(eval modules_64 := $(patsubst %:64,%,$(filter %:64,$(2)))) \
$(eval modules_both := $(filter-out %:32 %:64,$(2))) \
- $(eval ### For host cross modules, the primary arch is windows x86 and secondary is x86_64) \
- $(if $(filter HOST_CROSS,$(1)), \
+ $(eval ### if 2ND_HOST_CROSS_IS_64_BIT, then primary/secondary are reversed for HOST_CROSS modules) \
+ $(if $(filter HOST_CROSS_true,$(1)_$(2ND_HOST_CROSS_IS_64_BIT)), \
$(eval modules_1st_arch := $(modules_32)) \
$(eval modules_2nd_arch := $(modules_64)), \
$(eval modules_1st_arch := $(modules_64)) \
@@ -697,10 +692,23 @@
$(call select-bitness-of-target-host-required-modules,HOST,TARGET)
_nonexistent_required := $(sort $(_nonexistent_required))
+check_missing_required_modules := true
+ifneq (,$(filter true,$(ALLOW_MISSING_DEPENDENCIES) $(BUILD_BROKEN_MISSING_REQUIRED_MODULES)))
+ check_missing_required_modules :=
+endif # ALLOW_MISSING_DEPENDENCIES == true || BUILD_BROKEN_MISSING_REQUIRED_MODULES == true
+
+# Some executables are skipped in ASAN SANITIZE_TARGET build, thus breaking their dependencies.
+ifneq (,$(filter address,$(SANITIZE_TARGET)))
+ check_missing_required_modules :=
+endif # SANITIZE_TARGET has ASAN
+
# HOST OS darwin build is broken, disable this check for darwin for now.
-# TODO(b/162102724): Remove this
-ifeq (,$(filter $(HOST_OS),darwin))
-ifeq (,$(filter true,$(ALLOW_MISSING_DEPENDENCIES) $(BUILD_BROKEN_MISSING_REQUIRED_MODULES)))
+# TODO(b/162102724): Remove this when darwin host has no broken dependency.
+ifneq (,$(filter $(HOST_OS),darwin))
+ check_missing_required_modules :=
+endif # HOST_OS == darwin
+
+ifeq (true,$(check_missing_required_modules))
ifneq (,$(_nonexistent_required))
$(warning Missing required dependencies:)
$(foreach r_i,$(_nonexistent_required), \
@@ -710,8 +718,7 @@
$(warning Set BUILD_BROKEN_MISSING_REQUIRED_MODULES := true to bypass this check if this is intentional)
$(error Build failed)
endif # _nonexistent_required != empty
-endif # ALLOW_MISSING_DEPENDENCIES != true && BUILD_BROKEN_MISSING_REQUIRED_MODULES != true
-endif # HOST_OS != darwin
+endif # check_missing_required_modules == true
define add-required-deps
$(1): | $(2)
@@ -734,7 +741,7 @@
$(eval r := $(call module-installed-files,$(r))) \
$(eval h_m := $(filter $(HOST_OUT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
$(eval h_r := $(filter $(HOST_OUT)/%, $(r))) \
- $(eval h_m := $(filter-out $(h_r), $(h_m))) \
+ $(eval h_r := $(filter-out $(h_m), $(h_r))) \
$(if $(h_m), $(eval $(call add-required-deps, $(h_m),$(h_r)))) \
) \
)
@@ -750,7 +757,7 @@
$(eval r := $(call module-installed-files,$(r))) \
$(eval hc_m := $(filter $(HOST_CROSS_OUT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
$(eval hc_r := $(filter $(HOST_CROSS_OUT)/%, $(r))) \
- $(eval hc_m := $(filter-out $(hc_r), $(hc_m))) \
+ $(eval hc_r := $(filter-out $(hc_m), $(hc_r))) \
$(if $(hc_m), $(eval $(call add-required-deps, $(hc_m),$(hc_r)))) \
) \
)
@@ -766,7 +773,7 @@
$(eval r := $(call module-installed-files,$(r))) \
$(eval t_m := $(filter $(TARGET_OUT_ROOT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
$(eval t_r := $(filter $(TARGET_OUT_ROOT)/%, $(r))) \
- $(eval t_m := $(filter-out $(t_r), $(t_m))) \
+ $(eval t_r := $(filter-out $(t_m), $(t_r))) \
$(if $(t_m), $(eval $(call add-required-deps, $(t_m),$(t_r)))) \
) \
)
@@ -793,7 +800,6 @@
)\
$(eval req_files := $(strip $(req_files)))\
$(eval mod_files := $(filter $(HOST_OUT)/%, $(call module-installed-files,$(m)))) \
- $(eval mod_files := $(filter-out $(req_files),$(mod_files)))\
$(if $(mod_files),\
$(eval $(call add-required-deps, $(mod_files),$(req_files))) \
)\
@@ -822,7 +828,6 @@
)\
$(eval req_files := $(strip $(req_files)))\
$(eval mod_files := $(filter $(TARGET_OUT_ROOT)/%, $(call module-installed-files,$(m))))\
- $(eval mod_files := $(filter-out $(req_files),$(mod_files)))\
$(if $(mod_files),\
$(eval $(call add-required-deps, $(mod_files),$(req_files))) \
)\
@@ -881,7 +886,7 @@
# Scan all modules in general-tests, device-tests and other selected suites and
# flatten the shared library dependencies.
define update-host-shared-libs-deps-for-suites
-$(foreach suite,general-tests device-tests vts,\
+$(foreach suite,general-tests device-tests vts art-host-tests host-unit-tests,\
$(foreach m,$(COMPATIBILITY.$(suite).MODULES),\
$(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
$(foreach dep,$(my_deps),\
@@ -1107,7 +1112,11 @@
# Expand a list of modules to the modules that they override (if any)
# $(1): The list of modules.
define module-overrides
-$(foreach m,$(1),$(PACKAGES.$(m).OVERRIDES) $(EXECUTABLES.$(m).OVERRIDES) $(SHARED_LIBRARIES.$(m).OVERRIDES) $(ETC.$(m).OVERRIDES))
+$(foreach m,$(1),\
+ $(eval _mo_overrides := $(PACKAGES.$(m).OVERRIDES) $(EXECUTABLES.$(m).OVERRIDES) $(SHARED_LIBRARIES.$(m).OVERRIDES) $(ETC.$(m).OVERRIDES))\
+ $(if $(filter $(m),$(_mo_overrides)),\
+ $(error Module $(m) cannot override itself),\
+ $(_mo_overrides)))
endef
###########################################################
@@ -1291,67 +1300,7 @@
# Verify the artifact path requirements made by included products.
is_asan := $(if $(filter address,$(SANITIZE_TARGET)),true)
ifneq (true,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS)))
- # Fakes don't get installed, and NDK stubs aren't installed to device.
- static_allowed_patterns := $(TARGET_OUT_FAKE)/% $(SOONG_OUT_DIR)/ndk/%
- # RROs become REQUIRED by the source module, but are always placed on the vendor partition.
- static_allowed_patterns += %__auto_generated_rro_product.apk
- static_allowed_patterns += %__auto_generated_rro_vendor.apk
- # Auto-included targets are not considered
- static_allowed_patterns += $(call product-installed-files,)
- # $(PRODUCT_OUT)/apex is where shared libraries in APEXes get installed.
- # The path can be considered as a fake path, as the shared libraries
- # are installed there just to have symbols files for them under
- # $(PRODUCT_OUT)/symbols/apex for debugging purpose. The /apex directory
- # is never compiled into a filesystem image.
- static_allowed_patterns += $(PRODUCT_OUT)/apex/%
- ifeq (true,$(BOARD_USES_SYSTEM_OTHER_ODEX))
- # Allow system_other odex space optimization.
- static_allowed_patterns += \
- $(TARGET_OUT_SYSTEM_OTHER)/%.odex \
- $(TARGET_OUT_SYSTEM_OTHER)/%.vdex \
- $(TARGET_OUT_SYSTEM_OTHER)/%.art
- endif
-
-CERTIFICATE_VIOLATION_MODULES_FILENAME := $(PRODUCT_OUT)/certificate_violation_modules.txt
-$(CERTIFICATE_VIOLATION_MODULES_FILENAME):
- rm -f $@
- $(foreach m,$(sort $(CERTIFICATE_VIOLATION_MODULES)), echo $(m) >> $@;)
-$(call dist-for-goals,droidcore,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
-
- all_offending_files :=
- $(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
- $(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
- $(eval ### Verify that the product only produces files inside its path requirements.) \
- $(eval allowed := $(PRODUCTS.$(makefile).ARTIFACT_PATH_ALLOWED_LIST)) \
- $(eval path_patterns := $(call resolve-product-relative-paths,$(requirements),%)) \
- $(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
- $(eval files := $(call product-installed-files, $(makefile))) \
- $(eval offending_files := $(filter-out $(path_patterns) $(allowed_patterns) $(static_allowed_patterns),$(files))) \
- $(call maybe-print-list-and-error,$(offending_files),\
- $(makefile) produces files outside its artifact path requirement. \
- Allowed paths are $(subst $(space),$(comma)$(space),$(addsuffix *,$(requirements)))) \
- $(eval unused_allowed := $(filter-out $(files),$(allowed_patterns))) \
- $(call maybe-print-list-and-error,$(unused_allowed),$(makefile) includes redundant allowed entries in its artifact path requirement.) \
- $(eval ### Optionally verify that nothing else produces files inside this artifact path requirement.) \
- $(eval extra_files := $(filter-out $(files) $(HOST_OUT)/%,$(product_target_FILES))) \
- $(eval files_in_requirement := $(filter $(path_patterns),$(extra_files))) \
- $(eval all_offending_files += $(files_in_requirement)) \
- $(eval allowed := $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST)) \
- $(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
- $(eval offending_files := $(filter-out $(allowed_patterns),$(files_in_requirement))) \
- $(eval enforcement := $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
- $(if $(enforcement),\
- $(call maybe-print-list-and-error,$(offending_files),\
- $(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement. \
- $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT)) \
- $(eval unused_allowed := $(if $(filter true strict,$(enforcement)),\
- $(foreach p,$(allowed_patterns),$(if $(filter $(p),$(extra_files)),,$(p))))) \
- $(call maybe-print-list-and-error,$(unused_allowed),$(INTERNAL_PRODUCT) includes redundant artifact path requirement allowed list entries.) \
- ) \
- )
-$(PRODUCT_OUT)/offending_artifacts.txt:
- rm -f $@
- $(foreach f,$(sort $(all_offending_files)),echo $(f) >> $@;)
+ include $(BUILD_SYSTEM)/artifact_path_requirements.mk
endif
else
# We're not doing a full build, and are probably only including
@@ -1394,6 +1343,10 @@
test_files :=
endif
+# Dedpulicate compatibility suite dist files across modules and packages before
+# copying them to their requested locations. Assign the eval result to an unused
+# var to prevent Make from trying to make a sense of it.
+_unused := $(call copy-many-files, $(sort $(ALL_COMPATIBILITY_DIST_FILES)))
# Don't include any GNU General Public License shared objects or static
# libraries in SDK images. GPL executables (not static/dynamic libraries)
@@ -1447,6 +1400,17 @@
ALL_DEFAULT_INSTALLED_MODULES :=
+# Some notice deps refer to module names without prefix or arch suffix where
+# only the variants with them get built.
+# fix-notice-deps replaces those unadorned module names with every built variant.
+$(call fix-notice-deps)
+
+# Create a license metadata rule per module. Could happen in base_rules.mk or
+# notice_files.mk; except, it has to happen after fix-notice-deps to avoid
+# missing dependency errors.
+$(call build-license-metadata)
+
+
# These are additional goals that we build, in order to make sure that there
# is as little code as possible in the tree that doesn't build.
modules_to_check := $(foreach m,$(ALL_MODULES),$(ALL_MODULES.$(m).CHECKED))
@@ -1499,9 +1463,6 @@
.PHONY: ramdisk_test_harness
ramdisk_test_harness: $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
-.PHONY: vendor_ramdisk_debug
-vendor_ramdisk_debug: $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
-
.PHONY: userdataimage
userdataimage: $(INSTALLED_USERDATAIMAGE_TARGET)
@@ -1581,7 +1542,6 @@
$(INSTALLED_BPTIMAGE_TARGET) \
$(INSTALLED_VENDORIMAGE_TARGET) \
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
$(INSTALLED_ODMIMAGE_TARGET) \
$(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
@@ -1609,6 +1569,8 @@
$(INSTALLED_FILES_JSON_RAMDISK) \
$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
$(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_RAMDISK) \
$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_FILES_FILE_ROOT) \
@@ -1621,6 +1583,11 @@
# dist_files only for putting your library into the dist directory with a full build.
.PHONY: dist_files
+ifeq ($(SOONG_COLLECT_JAVA_DEPS), true)
+ $(call dist-for-goals, dist_files, $(SOONG_OUT_DIR)/module_bp_java_deps.json)
+ $(call dist-for-goals, dist_files, $(PRODUCT_OUT)/module-info.json)
+endif
+
.PHONY: apps_only
ifneq ($(TARGET_BUILD_APPS),)
# If this build is just for apps, only build apps and not the full system by default.
@@ -1666,6 +1633,9 @@
$(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
$(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
+ $(PROGUARD_USAGE_ZIP) : $(apps_only_installed_files)
+ $(call dist-for-goals,apps_only, $(PROGUARD_USAGE_ZIP))
+
$(SYMBOLS_ZIP) : $(apps_only_installed_files)
$(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
@@ -1690,10 +1660,12 @@
$(INTERNAL_UPDATE_PACKAGE_TARGET) \
$(INTERNAL_OTA_PACKAGE_TARGET) \
$(INTERNAL_OTA_METADATA) \
+ $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET) \
$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
$(BUILT_OTATOOLS_PACKAGE) \
$(SYMBOLS_ZIP) \
$(PROGUARD_DICT_ZIP) \
+ $(PROGUARD_USAGE_ZIP) \
$(COVERAGE_ZIP) \
$(APPCOMPAT_ZIP) \
$(INSTALLED_FILES_FILE) \
@@ -1719,6 +1691,7 @@
$(INSTALLED_PRODUCT_BUILD_PROP_TARGET):build.prop-product \
$(INSTALLED_ODM_BUILD_PROP_TARGET):build.prop-odm \
$(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET):build.prop-system_ext \
+ $(INSTALLED_RAMDISK_BUILD_PROP_TARGET):build.prop-ramdisk \
$(BUILT_TARGET_FILES_PACKAGE) \
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
$(INSTALLED_MISC_INFO_TARGET) \
@@ -1747,11 +1720,12 @@
$(INSTALLED_FILES_JSON_RAMDISK) \
$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
$(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+ $(INSTALLED_FILES_FILE_VENDOR_RAMDISK) \
+ $(INSTALLED_FILES_JSON_VENDOR_RAMDISK) \
$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
)
$(call dist-for-goals, bootimage_test_harness, \
@@ -1767,16 +1741,17 @@
endif
ifeq ($(EMMA_INSTRUMENT),true)
- $(JACOCO_REPORT_CLASSES_ALL) : $(modules_to_install)
$(call dist-for-goals, dist_files, $(JACOCO_REPORT_CLASSES_ALL))
endif
# Put XML formatted API files in the dist dir.
$(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-files,android_stubs_current) $(APICHECK)
$(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-files,android_system_stubs_current) $(APICHECK)
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/module-lib-api.xml: $(call java-lib-files,android_module_lib_stubs_current) $(APICHECK)
+ $(TARGET_OUT_COMMON_INTERMEDIATES)/system-server-api.xml: $(call java-lib-files,android_system_server_stubs_current) $(APICHECK)
$(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-files,android_test_stubs_current) $(APICHECK)
- api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml test-api.xml)
+ api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml module-lib-api.xml system-server-api.xml test-api.xml)
$(api_xmls):
$(hide) echo "Converting API file to XML: $@"
$(hide) mkdir -p $(dir $@)
@@ -1869,6 +1844,11 @@
ndk: $(SOONG_OUT_DIR)/ndk.timestamp
.PHONY: ndk
+# Checks that build/soong/apex/allowed_deps.txt remains up to date
+ifneq ($(UNSAFE_DISABLE_APEX_ALLOWED_DEPS_CHECK),true)
+ droidcore: ${APEX_ALLOWED_DEPS_CHECK}
+endif
+
$(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
diff --git a/core/native_test_config_template.xml b/core/native_test_config_template.xml
index ef1818f..ea982cf 100644
--- a/core/native_test_config_template.xml
+++ b/core/native_test_config_template.xml
@@ -22,11 +22,11 @@
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="true" />
- <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ <option name="push" value="{MODULE}->{TEST_INSTALL_BASE}/{MODULE}" />
</target_preparer>
<test class="com.android.tradefed.testtype.GTest" >
- <option name="native-test-device-path" value="/data/local/tmp" />
+ <option name="native-test-device-path" value="{TEST_INSTALL_BASE}" />
<option name="module-name" value="{MODULE}" />
</test>
</configuration>
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 4d1009f..2e1bd69 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -35,12 +35,10 @@
sdk_addon \
sdk_repo \
stnod \
- target-files-package \
test-art% \
user \
userdataimage \
userdebug \
- vts10 \
win_sdk \
winsdk-tools
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 721a034..89f822b 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -9,6 +9,32 @@
notice_file:=$(strip $(wildcard $(LOCAL_PATH)/LICENSE $(LOCAL_PATH)/LICENCE $(LOCAL_PATH)/NOTICE))
endif
+ifneq (,$(strip $(LOCAL_LICENSE_PACKAGE_NAME)))
+license_package_name:=$(strip $(LOCAL_LICENSE_PACKAGE_NAME))
+else ifdef my_register_name
+license_package_name:=$(my_register_name)
+else
+license_package_name:=$(strip $(LOCAL_MODULE))
+endif
+
+ifneq (,$(strip $(LOCAL_LICENSE_INSTALL_MAP)))
+install_map:=$(strip $(LOCAL_LICENSE_INSTALL_MAP))
+else
+install_map:=
+endif
+
+ifneq (,$(strip $(LOCAL_LICENSE_KINDS)))
+license_kinds:=$(strip $(LOCAL_LICENSE_KINDS))
+else
+license_kinds:=legacy_by_exception_only
+endif
+
+ifneq (,$(strip $(LOCAL_LICENSE_CONDITIONS)))
+license_conditions:=$(strip $(LOCAL_LICENSE_CONDITIONS))
+else
+license_conditions:=by_exception_only
+endif
+
ifeq ($(LOCAL_MODULE_CLASS),GYP)
# We ignore NOTICE files for modules of type GYP.
notice_file :=
@@ -40,10 +66,64 @@
installed_notice_file :=
+is_container:=$(strip $(LOCAL_MODULE_IS_CONTAINER))
+ifeq (,$(is_container))
+ifneq (,$(strip $(filter %.zip %.tar %.tgz %.tar.gz %.apk %.img %.srcszip %.apex, $(LOCAL_BUILT_MODULE))))
+is_container:=true
+else
+is_container:=false
+endif
+else ifneq (,$(strip $(filter-out true false,$(is_container))))
+$(error Unrecognized value '$(is_container)' for LOCAL_MODULE_IS_CONTAINER)
+endif
+
+ifeq (true,$(is_container))
+# Include shared libraries' notices for "container" types, but not for binaries etc.
+notice_deps := \
+ $(sort \
+ $(LOCAL_REQUIRED_MODULES) \
+ $(LOCAL_STATIC_LIBRARIES) \
+ $(LOCAL_WHOLE_STATIC_LIBRARIES) \
+ $(LOCAL_SHARED_LIBRARIES) \
+ $(LOCAL_DYLIB_LIBRARIES) \
+ $(LOCAL_RLIB_LIBRARIES) \
+ $(LOCAL_PROC_MACRO_LIBRARIES) \
+ $(LOCAL_HEADER_LIBRARIES) \
+ $(LOCAL_STATIC_JAVA_LIBRARIES) \
+ $(LOCAL_JAVA_LIBRARIES) \
+ $(LOCAL_JNI_SHARED_LIBRARIES) \
+ )
+else
+notice_deps := \
+ $(sort \
+ $(LOCAL_REQUIRED_MODULES) \
+ $(LOCAL_STATIC_LIBRARIES) \
+ $(LOCAL_WHOLE_STATIC_LIBRARIES) \
+ $(LOCAL_RLIB_LIBRARIES) \
+ $(LOCAL_PROC_MACRO_LIBRARIES) \
+ $(LOCAL_HEADER_LIBRARIES) \
+ $(LOCAL_STATIC_JAVA_LIBRARIES) \
+ )
+endif
+ifeq ($(LOCAL_IS_HOST_MODULE),true)
+notice_deps := $(sort $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
+else
+notice_deps := $(sort $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
+endif
+
+ifdef my_register_name
+ALL_MODULES.$(my_register_name).LICENSE_PACKAGE_NAME := $(strip $(license_package_name))
+ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds))
+ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions))
+ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map))
+ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(sort $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps))
+ALL_MODULES.$(my_register_name).IS_CONTAINER := $(sort $(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container))
+endif
+
ifdef notice_file
ifdef my_register_name
-ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
+ALL_MODULES.$(my_register_name).NOTICES := $(sort $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file))
endif
# This relies on the name of the directory in PRODUCT_OUT matching where
@@ -79,15 +159,14 @@
endif
module_installed_filename := \
$(patsubst $(PRODUCT_OUT)/%,%,$($(my_prefix)OUT_JAVA_LIBRARIES))/$(module_leaf)
- else ifeq ($(LOCAL_MODULE_CLASS),ETC)
- # ETC modules may be uninstallable, yet still have a NOTICE file. e.g. apex components
+ else ifneq ($(filter ETC DATA,$(LOCAL_MODULE_CLASS)),)
+ # ETC and DATA modules may be uninstallable, yet still have a NOTICE file.
+ # e.g. apex components
module_installed_filename :=
else ifneq (,$(and $(filter %.sdk,$(LOCAL_MODULE)),$(filter $(patsubst %.sdk,%,$(LOCAL_MODULE)),$(SOONG_SDK_VARIANT_MODULES))))
# Soong produces uninstallable *.sdk shared libraries for embedding in APKs.
module_installed_filename := \
$(patsubst $(PRODUCT_OUT)/%,%,$($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_SHARED_LIBRARIES))/$(notdir $(LOCAL_BUILT_MODULE))
- else
- $(error Cannot determine where to install NOTICE file for $(LOCAL_MODULE))
endif # JAVA_LIBRARIES
endif # STATIC_LIBRARIES
endif
@@ -100,12 +179,17 @@
installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
+ifdef my_register_name
+ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(installed_notice_file)
+endif
+
$(installed_notice_file): PRIVATE_INSTALLED_MODULE := $(module_installed_filename)
+$(installed_notice_file) : PRIVATE_NOTICES := $(notice_file)
$(installed_notice_file): $(notice_file)
@echo Notice file: $< -- $@
$(hide) mkdir -p $(dir $@)
- $(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $^ > $@
+ $(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $(PRIVATE_NOTICES) > $@
ifdef LOCAL_INSTALLED_MODULE
# Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
diff --git a/core/package_internal.mk b/core/package_internal.mk
index a97e401..1b40624 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -552,6 +552,10 @@
ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
$(LOCAL_BUILT_MODULE) : $(ZIP2ZIP)
endif
+ifeq ($(full_classes_jar),)
+ # We don't build jar, need to add the Java resources here.
+ $(LOCAL_BUILT_MODULE): $(java_resource_sources)
+endif
$(LOCAL_BUILT_MODULE): PRIVATE_USE_EMBEDDED_NATIVE_LIBS := $(LOCAL_USE_EMBEDDED_NATIVE_LIBS)
$(LOCAL_BUILT_MODULE):
@echo "target Package: $(PRIVATE_MODULE) ($@)"
@@ -603,6 +607,8 @@
else
$(my_bundle_module): PRIVATE_DEX_FILE :=
$(my_bundle_module): PRIVATE_SOURCE_ARCHIVE :=
+ # We don't build jar, need to add the Java resources here.
+ $(my_bundle_module): $(java_resource_sources)
endif # full_classes_jar
$(my_bundle_module): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
diff --git a/core/product.mk b/core/product.mk
index 324010c..2ab4b06 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -120,7 +120,7 @@
# The resoure configuration options to use for this product.
_product_list_vars += PRODUCT_LOCALES
_product_list_vars += PRODUCT_AAPT_CONFIG
-_product_list_vars += PRODUCT_AAPT_PREF_CONFIG
+_product_single_value_vars += PRODUCT_AAPT_PREF_CONFIG
_product_list_vars += PRODUCT_AAPT_PREBUILT_DPI
_product_list_vars += PRODUCT_HOST_PACKAGES
_product_list_vars += PRODUCT_PACKAGES
@@ -209,7 +209,7 @@
_product_list_vars += PRODUCT_SOONG_NAMESPACES
_product_list_vars += PRODUCT_DEFAULT_WIFI_CHANNELS
-_product_list_vars += PRODUCT_DEFAULT_DEV_CERTIFICATE
+_product_single_value_vars += PRODUCT_DEFAULT_DEV_CERTIFICATE
_product_list_vars += PRODUCT_MAINLINE_SEPOLICY_DEV_CERTIFICATES
_product_list_vars += PRODUCT_RESTRICT_VENDOR_FILES
@@ -233,7 +233,7 @@
# List of system_server jars delivered via apex. Format = <apex name>:<jar name>.
_product_list_vars += PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS
# If true, then suboptimal order of system server jars does not cause an error.
-_product_list_vars += PRODUCT_BROKEN_SUBOPTIMAL_ORDER_OF_SYSTEM_SERVER_JARS
+_product_single_value_vars += PRODUCT_BROKEN_SUBOPTIMAL_ORDER_OF_SYSTEM_SERVER_JARS
# Additional system server jars to be appended at the end of the common list.
# This is necessary to avoid jars reordering due to makefile inheritance order.
@@ -258,13 +258,13 @@
# Per-module dex-preopt configs.
_product_list_vars += PRODUCT_DEX_PREOPT_MODULE_CONFIGS
-_product_list_vars += PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER
+_product_single_value_vars += PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER
_product_list_vars += PRODUCT_DEX_PREOPT_DEFAULT_FLAGS
-_product_list_vars += PRODUCT_DEX_PREOPT_BOOT_FLAGS
-_product_list_vars += PRODUCT_DEX_PREOPT_PROFILE_DIR
-_product_list_vars += PRODUCT_DEX_PREOPT_GENERATE_DM_FILES
-_product_list_vars += PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING
-_product_list_vars += PRODUCT_DEX_PREOPT_RESOLVE_STARTUP_STRINGS
+_product_single_value_vars += PRODUCT_DEX_PREOPT_BOOT_FLAGS
+_product_single_value_vars += PRODUCT_DEX_PREOPT_PROFILE_DIR
+_product_single_value_vars += PRODUCT_DEX_PREOPT_GENERATE_DM_FILES
+_product_single_value_vars += PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING
+_product_single_value_vars += PRODUCT_DEX_PREOPT_RESOLVE_STARTUP_STRINGS
# Boot image options.
_product_single_value_vars += \
@@ -272,7 +272,7 @@
PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
PRODUCT_USES_DEFAULT_ART_CONFIG \
-_product_list_vars += PRODUCT_SYSTEM_SERVER_COMPILER_FILTER
+_product_single_value_vars += PRODUCT_SYSTEM_SERVER_COMPILER_FILTER
# Per-module sanitizer configs
_product_list_vars += PRODUCT_SANITIZER_MODULE_CONFIGS
_product_single_value_vars += PRODUCT_SYSTEM_BASE_FS_PATH
@@ -315,18 +315,15 @@
# Whether the Scudo hardened allocator is disabled platform-wide
_product_single_value_vars += PRODUCT_DISABLE_SCUDO
-# A flag to override PRODUCT_COMPATIBLE_PROPERTY
-_product_single_value_vars += PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE
-
# List of extra VNDK versions to be included
_product_list_vars += PRODUCT_EXTRA_VNDK_VERSIONS
+# Whether APEX should be compressed or not
+_product_single_value_vars += PRODUCT_COMPRESSED_APEX
+
# VNDK version of product partition. It can be 'current' if the product
# partitions uses PLATFORM_VNDK_VERSION.
-_product_single_value_var += PRODUCT_PRODUCT_VNDK_VERSION
-
-# Whether the list of allowed of actionable compatible properties should be disabled or not
-_product_single_value_vars += PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE
+_product_single_value_vars += PRODUCT_PRODUCT_VNDK_VERSION
_product_single_value_vars += PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS
_product_single_value_vars += PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT
@@ -380,6 +377,7 @@
_product_single_value_vars += PRODUCT_BUILD_USERDATA_IMAGE
_product_single_value_vars += PRODUCT_BUILD_RECOVERY_IMAGE
_product_single_value_vars += PRODUCT_BUILD_BOOT_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_VENDOR_BOOT_IMAGE
_product_single_value_vars += PRODUCT_BUILD_VBMETA_IMAGE
# List of boot jars delivered via apex
@@ -391,6 +389,9 @@
# If set, device uses virtual A/B.
_product_single_value_vars += PRODUCT_VIRTUAL_AB_OTA
+# If set, device uses virtual A/B Compression.
+_product_single_value_vars += PRODUCT_VIRTUAL_AB_COMPRESSION
+
# If set, device retrofits virtual A/B.
_product_single_value_vars += PRODUCT_VIRTUAL_AB_OTA_RETROFIT
@@ -404,6 +405,20 @@
# If set, Java module in product partition cannot use hidden APIs.
_product_single_value_vars += PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
+# If set, only java_sdk_library can be used at inter-partition dependency.
+# Note: Build error if BOARD_VNDK_VERSION is not set while
+# PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true, because
+# PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY has no meaning if
+# BOARD_VNDK_VERSION is not set.
+# Note: When PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE is not set, there are
+# no restrictions at dependency between system and product partition.
+_product_single_value_vars += PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY
+
+# Allowlist for PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY option.
+# Listed modules are allowed at inter-partition dependency even if it isn't
+# a java_sdk_library module.
+_product_list_vars += PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST
+
_product_single_value_vars += PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES
.KATI_READONLY := _product_single_value_vars _product_list_vars
@@ -459,6 +474,13 @@
$(sort $(ARTIFACT_PATH_REQUIREMENT_PRODUCTS) $(current_mk)))
endef
+# Like require-artifacts-in-path, but does not require all allow-list entries to
+# have an effect.
+define require-artifacts-in-path-relaxed
+ $(require-artifacts-in-path) \
+ $(eval PRODUCTS.$(current_mk).ARTIFACT_PATH_REQUIREMENT_IS_RELAXED := true)
+endef
+
# Makes including non-existent modules in PRODUCT_PACKAGES an error.
# $(1): list of non-existent modules to allow.
define enforce-product-packages-exist
diff --git a/core/product_config.mk b/core/product_config.mk
index 38926c2..7b72b5e 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -160,15 +160,16 @@
$(call import-products, $(current_product_makefile))
endif # Import all or just the current product makefile
+# Quick check
+$(check-all-products)
+
# Import all the products that have made artifact path requirements, so that we can verify
# the artifacts they produce.
+# These are imported after check-all-products because some of them might not be real products.
$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
$(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
)
-# Quick check
-$(check-all-products)
-
ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
$(dump-products)
endif
@@ -184,18 +185,6 @@
all_product_makefiles :=
all_product_configs :=
-# Jacoco agent JARS to be built and installed, if any.
-ifeq ($(EMMA_INSTRUMENT),true)
- ifneq ($(EMMA_INSTRUMENT_STATIC),true)
- # For instrumented build, if Jacoco is not being included statically
- # in instrumented packages then include Jacoco classes into the
- # bootclasspath.
- $(foreach product,$(PRODUCTS),\
- $(eval PRODUCTS.$(product).PRODUCT_PACKAGES += jacocoagent)\
- $(eval PRODUCTS.$(product).PRODUCT_BOOT_JARS += jacocoagent))
- endif # EMMA_INSTRUMENT_STATIC
-endif # EMMA_INSTRUMENT
-
############################################################################
# Strip and assign the PRODUCT_ variables.
$(call strip-product-vars)
@@ -355,9 +344,25 @@
PRODUCT_EXTRA_VNDK_VERSIONS := $(OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS)
endif
+###########################################
+# APEXes are by default not compressed
+#
+# APEX compression can be forcibly enabled (resp. disabled) by
+# setting OVERRIDE_PRODUCT_COMPRESSED_APEX to true (resp. false), e.g. by
+# setting the OVERRIDE_PRODUCT_COMPRESSED_APEX environment variable.
+ifdef OVERRIDE_PRODUCT_COMPRESSED_APEX
+ PRODUCT_COMPRESSED_APEX := $(OVERRIDE_PRODUCT_COMPRESSED_APEX)
+endif
+
$(KATI_obsolete_var OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS \
,Use PRODUCT_EXTRA_VNDK_VERSIONS instead)
+# If build command defines OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE,
+# override PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE with it unless it is
+# defined as `false`. If the value is `false` clear
+# PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
+# OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE can be used for
+# testing only.
ifdef OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
ifeq (false,$(OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))
PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE :=
@@ -367,11 +372,35 @@
else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
# No shipping level defined
else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
+ # Enforce product interface if PRODUCT_SHIPPING_API_LEVEL is greater than 29.
PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := true
endif
$(KATI_obsolete_var OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE,Use PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE instead)
+# If build command defines PRODUCT_USE_PRODUCT_VNDK_OVERRIDE as `false`,
+# PRODUCT_PRODUCT_VNDK_VERSION will not be defined automatically.
+# PRODUCT_USE_PRODUCT_VNDK_OVERRIDE can be used for testing only.
+PRODUCT_USE_PRODUCT_VNDK := false
+ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
+ PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+ # No shipping level defined
+else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
+ # Enforce product interface for VNDK if PRODUCT_SHIPPING_API_LEVEL is greater
+ # than 29.
+ PRODUCT_USE_PRODUCT_VNDK := true
+endif
+
+ifeq ($(PRODUCT_USE_PRODUCT_VNDK),true)
+ ifndef PRODUCT_PRODUCT_VNDK_VERSION
+ PRODUCT_PRODUCT_VNDK_VERSION := current
+ endif
+endif
+
+$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+
define product-overrides-config
$$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
$$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/rbe.mk b/core/rbe.mk
index f4f7f5e..91606d4 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -19,7 +19,7 @@
ifdef RBE_DIR
rbe_dir := $(RBE_DIR)
else
- rbe_dir := $(HOME)/rbe
+ rbe_dir := prebuilts/remoteexecution-client/live/
endif
ifdef RBE_CXX_EXEC_STRATEGY
@@ -37,19 +37,19 @@
ifdef RBE_JAVAC_EXEC_STRATEGY
javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
else
- javac_exec_strategy := local
+ javac_exec_strategy := remote_local_fallback
endif
ifdef RBE_R8_EXEC_STRATEGY
r8_exec_strategy := $(RBE_R8_EXEC_STRATEGY)
else
- r8_exec_strategy := local
+ r8_exec_strategy := remote_local_fallback
endif
ifdef RBE_D8_EXEC_STRATEGY
d8_exec_strategy := $(RBE_D8_EXEC_STRATEGY)
else
- d8_exec_strategy := local
+ d8_exec_strategy := remote_local_fallback
endif
platform := container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62
@@ -57,7 +57,7 @@
java_r8_d8_platform := $(platform),Pool=java16
RBE_WRAPPER := $(rbe_dir)/rewrapper
- RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_whitelist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
+ RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_allowlist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
# Append rewrapper to existing *_WRAPPER variables so it's possible to
# use both ccache and rewrapper.
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index e79abd5..e62175f 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -15,8 +15,8 @@
-->
<!-- This test config file is auto-generated. -->
<configuration description="Runs {MODULE}">
- <option name="test-suite-tag" value="apct" />
- <option name="test-suite-tag" value="apct-junit" />
+ <option name="test-suite-tag" value="robolectric" />
+ <option name="test-suite-tag" value="robolectric-tests" />
<option name="java-folder" value="prebuilts/jdk/jdk9/linux-x86/" />
<option name="exclude-paths" value="java" />
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
index 0259a9c..1742925 100644
--- a/core/sdk_font.mk
+++ b/core/sdk_font.mk
@@ -19,9 +19,9 @@
# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
sdk_font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
-sdk_font_config := $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_font_config)))
+sdk_font_config := $(addprefix $(SDK_FONT_TEMP)/standard/, $(notdir $(sdk_font_config)))
-$(sdk_font_config): $(SDK_FONT_TEMP)/%.xml: \
+$(sdk_font_config): $(SDK_FONT_TEMP)/standard/%.xml: \
frameworks/base/data/fonts/%.xml
$(hide) mkdir -p $(dir $@)
$(hide) cp -vf $< $@
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index b994b17..50ac93a 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -7,6 +7,7 @@
# LOCAL_SOONG_HEADER_JAR
# LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
# LOCAL_SOONG_PROGUARD_DICT
+# LOCAL_SOONG_PROGUARD_USAGE
# LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
# LOCAL_SOONG_RRO_DIRS
# LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
@@ -85,6 +86,13 @@
$(intermediates.COMMON)/proguard_dictionary)
endif
+ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
+ $(intermediates.COMMON)/proguard_usage.zip))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(intermediates.COMMON)/proguard_usage.zip)
+endif
+
ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
resource_export_package := $(intermediates.COMMON)/package-export.apk
resource_export_stamp := $(intermediates.COMMON)/src/R.stamp
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index c9b742a..a12ef66 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -91,6 +91,7 @@
ifdef LOCAL_INSTALLED_MODULE
ifneq ($(LOCAL_CHECK_ELF_FILES),)
my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
+ my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
include $(BUILD_SYSTEM)/check_elf_file.mk
endif
endif
@@ -142,10 +143,21 @@
$(LOCAL_BUILT_MODULE): $(same_vndk_variants_stamp)
endif
+# Use copy-or-link-prebuilt-to-target for host executables and shared libraries,
+# to preserve symlinks to the source trees. They can then run directly from the
+# prebuilt directories where the linker can load their dependencies using
+# relative RUNPATHs.
$(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
+ $(copy-or-link-prebuilt-to-target)
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ [ -x $@ ] || ( $(call echo-error,$@,Target of symlink is not executable); false )
+ endif
+else
$(transform-prebuilt-to-target)
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
$(hide) chmod +x $@
+ endif
endif
ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 4731250..fde5832 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -1,7 +1,3 @@
-SOONG := $(SOONG_OUT_DIR)/soong
-SOONG_BOOTSTRAP := $(SOONG_OUT_DIR)/.soong.bootstrap
-SOONG_BUILD_NINJA := $(SOONG_OUT_DIR)/build.ninja
-SOONG_IN_MAKE := $(SOONG_OUT_DIR)/.soong.in_make
SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.variables
SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
@@ -42,6 +38,7 @@
$(call add_json_bool, Unbundled_build, $(TARGET_BUILD_UNBUNDLED))
$(call add_json_bool, Unbundled_build_apps, $(TARGET_BUILD_APPS))
$(call add_json_bool, Always_use_prebuilt_sdks, $(TARGET_BUILD_USE_PREBUILT_SDKS))
+$(call add_json_bool, Skip_boot_jars_check, $(SKIP_BOOT_JARS_CHECK))
$(call add_json_bool, Debuggable, $(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
$(call add_json_bool, Eng, $(filter eng,$(TARGET_BUILD_VARIANT)))
@@ -57,6 +54,9 @@
$(call add_json_str, DeviceSecondaryCpuVariant, $(TARGET_2ND_CPU_VARIANT))
$(call add_json_list, DeviceSecondaryAbi, $(TARGET_2ND_CPU_ABI) $(TARGET_2ND_CPU_ABI2))
+$(call add_json_bool, Aml_abis, $(if $(filter mainline_sdk,$(TARGET_ARCH_SUITE)),true))
+$(call add_json_bool, Ndk_abis, $(if $(filter ndk, $(TARGET_ARCH_SUITE)),true))
+
$(call add_json_str, NativeBridgeArch, $(TARGET_NATIVE_BRIDGE_ARCH))
$(call add_json_str, NativeBridgeArchVariant, $(TARGET_NATIVE_BRIDGE_ARCH_VARIANT))
$(call add_json_str, NativeBridgeCpuVariant, $(TARGET_NATIVE_BRIDGE_CPU_VARIANT))
@@ -103,7 +103,9 @@
$(call add_json_list, CFIIncludePaths, $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
$(call add_json_list, IntegerOverflowExcludePaths, $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
-$(call add_json_bool, Experimental_mte, $(filter true,$(TARGET_EXPERIMENTAL_MTE)))
+$(call add_json_list, MemtagHeapExcludePaths, $(MEMTAG_HEAP_EXCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS))
+$(call add_json_list, MemtagHeapAsyncIncludePaths, $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS))
+$(call add_json_list, MemtagHeapSyncIncludePaths, $(MEMTAG_HEAP_SYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS))
$(call add_json_bool, DisableScudo, $(filter true,$(PRODUCT_DISABLE_SCUDO)))
@@ -129,9 +131,10 @@
$(call add_json_str, ProductVndkVersion, $(PRODUCT_PRODUCT_VNDK_VERSION))
$(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS))
$(call add_json_list, DeviceSystemSdkVersions, $(BOARD_SYSTEMSDK_VERSIONS))
+$(call add_json_str, RecoverySnapshotVersion, $(RECOVERY_SNAPSHOT_VERSION))
$(call add_json_list, Platform_systemsdk_versions, $(PLATFORM_SYSTEMSDK_VERSIONS))
$(call add_json_bool, Malloc_not_svelte, $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
-$(call add_json_bool, Malloc_zero_contents, $(MALLOC_ZERO_CONTENTS))
+$(call add_json_bool, Malloc_zero_contents, $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS))))
$(call add_json_bool, Malloc_pattern_fill_contents, $(MALLOC_PATTERN_FILL_CONTENTS))
$(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER))
@@ -144,6 +147,12 @@
$(call add_json_bool, VndkUseCoreVariant, $(TARGET_VNDK_USE_CORE_VARIANT))
$(call add_json_bool, VndkSnapshotBuildArtifacts, $(VNDK_SNAPSHOT_BUILD_ARTIFACTS))
+$(call add_json_bool, DirectedVendorSnapshot, $(DIRECTED_VENDOR_SNAPSHOT))
+$(call add_json_map, VendorSnapshotModules)
+$(foreach module,$(VENDOR_SNAPSHOT_MODULES),\
+ $(call add_json_bool,$(module),true))
+$(call end_json_map)
+
$(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
$(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
@@ -170,15 +179,19 @@
$(call add_json_list, PgoAdditionalProfileDirs, $(PGO_ADDITIONAL_PROFILE_DIRS))
+$(call add_json_list, BoardReqdMaskPolicy, $(BOARD_REQD_MASK_POLICY))
$(call add_json_list, BoardVendorSepolicyDirs, $(BOARD_VENDOR_SEPOLICY_DIRS) $(BOARD_SEPOLICY_DIRS))
$(call add_json_list, BoardOdmSepolicyDirs, $(BOARD_ODM_SEPOLICY_DIRS))
$(call add_json_list, BoardVendorDlkmSepolicyDirs, $(BOARD_VENDOR_DLKM_SEPOLICY_DIRS))
$(call add_json_list, BoardOdmDlkmSepolicyDirs, $(BOARD_ODM_DLKM_SEPOLICY_DIRS))
-$(call add_json_list, BoardPlatPublicSepolicyDirs, $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
-$(call add_json_list, BoardPlatPrivateSepolicyDirs, $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
+# TODO: BOARD_PLAT_* dirs only kept for compatibility reasons. Will be a hard error on API level 31
+$(call add_json_list, SystemExtPublicSepolicyDirs, $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS) $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
+$(call add_json_list, SystemExtPrivateSepolicyDirs, $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS) $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
$(call add_json_list, BoardSepolicyM4Defs, $(BOARD_SEPOLICY_M4DEFS))
+$(call add_json_str, BoardSepolicyVers, $(BOARD_SEPOLICY_VERS))
$(call add_json_bool, Flatten_apex, $(filter true,$(TARGET_FLATTEN_APEX)))
+$(call add_json_bool, ForceApexSymlinkOptimization, $(filter true,$(TARGET_FORCE_APEX_SYMLINK_OPTIMIZATION)))
$(call add_json_str, DexpreoptGlobalConfig, $(DEX_PREOPT_CONFIG))
@@ -195,7 +208,6 @@
$(call add_json_list, ProductPublicSepolicyDirs, $(PRODUCT_PUBLIC_SEPOLICY_DIRS))
$(call add_json_list, ProductPrivateSepolicyDirs, $(PRODUCT_PRIVATE_SEPOLICY_DIRS))
-$(call add_json_bool, ProductCompatibleProperty, $(PRODUCT_COMPATIBLE_PROPERTY))
$(call add_json_list, TargetFSConfigGen, $(TARGET_FS_CONFIG_GEN))
@@ -210,14 +222,23 @@
$(call end_json_map)
$(call add_json_bool, EnforceProductPartitionInterface, $(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))
+$(call add_json_str, DeviceCurrentApiLevelForVendorModules, $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
+
+$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY))
+$(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST))
$(call add_json_bool, InstallExtraFlattenedApexes, $(PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES))
+$(call add_json_bool, CompressedApex, $(PRODUCT_COMPRESSED_APEX))
+
$(call add_json_bool, BoardUsesRecoveryAsBoot, $(BOARD_USES_RECOVERY_AS_BOOT))
$(call add_json_list, BoardKernelBinaries, $(BOARD_KERNEL_BINARIES))
$(call add_json_list, BoardKernelModuleInterfaceVersions, $(BOARD_KERNEL_MODULE_INTERFACE_VERSIONS))
+$(call add_json_bool, BoardMoveRecoveryResourcesToVendorBoot, $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+$(call add_json_str, PrebuiltHiddenApiDir, $(BOARD_PREBUILT_HIDDENAPI_DIR))
+
$(call json_end)
$(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/soong_droiddoc_prebuilt.mk b/core/soong_droiddoc_prebuilt.mk
index c0467df..4dc5d08 100644
--- a/core/soong_droiddoc_prebuilt.mk
+++ b/core/soong_droiddoc_prebuilt.mk
@@ -29,16 +29,6 @@
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_API_VERSIONS_XML),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml))
endif
-ifdef LOCAL_DROIDDOC_JDIFF_DOC_ZIP
-$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_JDIFF_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip))
-$(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip)
-
-ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip
-
-.PHONY: $(LOCAL_MODULE) $(LOCAL_MODULE)-jdiff
-$(LOCAL_MODULE) $(LOCAL_MODULE)-jdiff : $(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip
-endif
-
ifdef LOCAL_DROIDDOC_METADATA_ZIP
$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_METADATA_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip))
endif
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 05f700d..5444d96 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -58,6 +58,14 @@
$(intermediates.COMMON)/proguard_dictionary)
endif
+ifdef LOCAL_SOONG_PROGUARD_USAGE
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
+ $(intermediates.COMMON)/proguard_usage.zip))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(intermediates.COMMON)/proguard_usage.zip)
+endif
+
+
ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
my_res_package := $(intermediates.COMMON)/package-res.apk
diff --git a/core/soong_rust_prebuilt.mk b/core/soong_rust_prebuilt.mk
index 804e37e..4cfb01f 100644
--- a/core/soong_rust_prebuilt.mk
+++ b/core/soong_rust_prebuilt.mk
@@ -28,9 +28,9 @@
$(call pretty-error,Unsupported LOCAL_MODULE_$(my_prefix)ARCH=$(LOCAL_MODULE_$(my_prefix)ARCH))
endif
-# Don't install rlib/proc_macro libraries.
+# Don't install static/rlib/proc_macro libraries.
ifndef LOCAL_UNINSTALLABLE_MODULE
- ifneq ($(filter RLIB_LIBRARIES PROC_MACRO_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+ ifneq ($(filter STATIC_LIBRARIES RLIB_LIBRARIES PROC_MACRO_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
LOCAL_UNINSTALLABLE_MODULE := true
endif
endif
@@ -57,9 +57,16 @@
endif
$(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
+ $(copy-or-link-prebuilt-to-target)
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ [ -x $@ ] || ( $(call echo-error,$@,Target of symlink is not executable); false )
+ endif
+else
$(transform-prebuilt-to-target)
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
$(hide) chmod +x $@
+ endif
endif
ifndef LOCAL_IS_HOST_MODULE
@@ -75,8 +82,19 @@
endif
endif
+create_coverage_zip :=
ifeq ($(NATIVE_COVERAGE),true)
+ create_coverage_zip := true
+endif
+
+# Until Rust supports LLVM coverage, Soong assumes GCOV coverage in both cases.
+# Therefore we should create the coverage zip with the gcno files in this case as well.
+ifeq ($(CLANG_COVERAGE),true)
+ create_coverage_zip := true
+endif
+
+ifdef create_coverage_zip
ifneq (,$(strip $(LOCAL_PREBUILT_COVERAGE_ARCHIVE)))
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).zip))
ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
diff --git a/core/sysprop.mk b/core/sysprop.mk
index d01255f..df27067 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -47,6 +47,11 @@
echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
)\
+ $(if $(filter system vendor odm,$(1)),\
+ echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST) " >> $(2);\
+ echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ )\
echo "ro.$(1).build.date=`$(DATE_FROM_FILE)`" >> $(2);\
echo "ro.$(1).build.date.utc=`$(DATE_FROM_FILE) +%s`" >> $(2);\
echo "ro.$(1).build.fingerprint=$(BUILD_FINGERPRINT_FROM_FILE)" >> $(2);\
@@ -54,7 +59,8 @@
echo "ro.$(1).build.tags=$(BUILD_VERSION_TAGS)" >> $(2);\
echo "ro.$(1).build.type=$(TARGET_BUILD_VARIANT)" >> $(2);\
echo "ro.$(1).build.version.incremental=$(BUILD_NUMBER_FROM_FILE)" >> $(2);\
- echo "ro.$(1).build.version.release=$(PLATFORM_VERSION)" >> $(2);\
+ echo "ro.$(1).build.version.release=$(PLATFORM_VERSION_LAST_STABLE)" >> $(2);\
+ echo "ro.$(1).build.version.release_or_codename=$(PLATFORM_VERSION)" >> $(2);\
echo "ro.$(1).build.version.sdk=$(PLATFORM_SDK_VERSION)" >> $(2);\
endef
@@ -67,7 +73,10 @@
# emitted to the output
# $(4): list of variable names each of which contains name=value pairs
# $(5): optional list of prop names to force remove from the output. Properties from both
-# $(3) and (4) are affected.
+# $(3) and (4) are affected
+# $(6): optional list of files to append at the end. The content of each file is emitted
+# to the output
+# $(7): optional flag to skip common properties generation
define build-properties
ALL_DEFAULT_INSTALLED_MODULES += $(2)
@@ -89,11 +98,13 @@
$(eval _option := --allow-dup)\
)
-$(2): $(POST_PROCESS_PROPS) $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) $(3)
+$(2): $(POST_PROCESS_PROPS) $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) $(3) $(6)
$(hide) echo Building $$@
$(hide) mkdir -p $$(dir $$@)
$(hide) rm -f $$@ && touch $$@
+ifneq ($(strip $(7)), true)
$(hide) $$(call generate-common-build-props,$(call to-lower,$(strip $(1))),$$@)
+endif
$(hide) $(foreach file,$(strip $(3)),\
if [ -f "$(file)" ]; then\
echo "" >> $$@;\
@@ -112,6 +123,10 @@
)\
)
$(hide) $(POST_PROCESS_PROPS) $$(_option) $$@ $(5)
+ $(hide) $(foreach file,$(strip $(6)),\
+ if [ -f "$(file)" ]; then\
+ cat $(file) >> $$@;\
+ fi;)
$(hide) echo "# end of file" >> $$@
endef
@@ -231,7 +246,7 @@
endef
gen_from_buildinfo_sh := $(call intermediates-dir-for,PACKAGING,system_build_prop)/buildinfo.prop
-$(gen_from_buildinfo_sh): $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT)
+$(gen_from_buildinfo_sh): $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) | $(BUILD_DATETIME_FILE) $(BUILD_NUMBER_FILE)
$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
TARGET_BUILD_FLAVOR="$(TARGET_BUILD_FLAVOR)" \
TARGET_DEVICE="$(TARGET_DEVICE)" \
@@ -264,19 +279,6 @@
TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
bash $(BUILDINFO_SH) > $@
-ifneq ($(PRODUCT_OEM_PROPERTIES),)
-import_oem_prop := $(call intermediates-dir-for,ETC,system_build_prop)/oem.prop
-
-$(import_oem_prop):
- $(hide) echo "#" >> $@; \
- echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
- echo "#" >> $@;
- $(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \
- echo "import /oem/oem.prop $(prop)" >> $@;)
-else
-import_oem_prop :=
-endif
-
ifdef TARGET_SYSTEM_PROP
system_prop_file := $(TARGET_SYSTEM_PROP)
else
@@ -284,7 +286,6 @@
endif
_prop_files_ := \
- $(import_oem_prop) \
$(gen_from_buildinfo_sh) \
$(system_prop_file)
@@ -300,7 +301,8 @@
ifndef property_overrides_split_enabled
_prop_vars_ += \
- ADDITIONAL_VENDOR_PROPERTIES
+ ADDITIONAL_VENDOR_PROPERTIES \
+ PRODUCT_VENDOR_PROPERTIES
endif
_blacklist_names_ := \
@@ -309,9 +311,14 @@
INSTALLED_BUILD_PROP_TARGET := $(TARGET_OUT)/build.prop
-$(eval $(call build-properties,system,$(INSTALLED_BUILD_PROP_TARGET),\
-$(_prop_files_),$(_prop_vars_),\
-$(_blacklist_names_)))
+$(eval $(call build-properties,\
+ system,\
+ $(INSTALLED_BUILD_PROP_TARGET),\
+ $(_prop_files_),\
+ $(_prop_vars_),\
+ $(_blacklist_names_),\
+ $(empty),\
+ $(empty)))
# -----------------------------------------------------------------
# vendor/build.prop
@@ -347,7 +354,9 @@
$(INSTALLED_VENDOR_BUILD_PROP_TARGET),\
$(_prop_files_),\
$(_prop_vars_),\
- $(PRODUCT_VENDOR_PROPERTY_BLACKLIST)))
+ $(PRODUCT_VENDOR_PROPERTY_BLACKLIST),\
+ $(empty),\
+ $(empty)))
# -----------------------------------------------------------------
# product/etc/build.prop
@@ -364,12 +373,44 @@
PRODUCT_PRODUCT_PROPERTIES
INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/etc/build.prop
+
+ifdef PRODUCT_OEM_PROPERTIES
+import_oem_prop := $(call intermediates-dir-for,ETC,import_oem_prop)/oem.prop
+
+$(import_oem_prop):
+ $(hide) echo "####################################" >> $@; \
+ echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
+ echo "####################################" >> $@;
+ $(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \
+ echo "import /oem/oem.prop $(prop)" >> $@;)
+
+_footers_ := $(import_oem_prop)
+else
+_footers_ :=
+endif
+
+# Skip common /product properties generation if device released before R and
+# has no product partition. This is the first part of the check.
+ifeq ($(call math_lt,$(if $(PRODUCT_SHIPPING_API_LEVEL),$(PRODUCT_SHIPPING_API_LEVEL),30),30), true)
+ _skip_common_properties := true
+endif
+
+# The second part of the check - always generate common properties for the
+# devices with product partition regardless of shipping level.
+ifneq ($(BOARD_USES_PRODUCTIMAGE),)
+ _skip_common_properties :=
+endif
+
$(eval $(call build-properties,\
product,\
$(INSTALLED_PRODUCT_BUILD_PROP_TARGET),\
$(_prop_files_),\
$(_prop_vars_),\
- $(empty)))
+ $(empty),\
+ $(_footers_),\
+ $(_skip_common_properties)))
+
+_skip_common_properties :=
# ----------------------------------------------------------------
# odm/etc/build.prop
@@ -390,6 +431,8 @@
$(INSTALLED_ODM_BUILD_PROP_TARGET),\
$(_prop_files),\
$(_prop_vars_),\
+ $(empty),\
+ $(empty),\
$(empty)))
# ----------------------------------------------------------------
@@ -399,7 +442,12 @@
INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_VENDOR_DLKM)/etc/build.prop
$(eval $(call build-properties,\
vendor_dlkm,\
- $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET)))
+ $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET),\
+ $(empty),\
+ $(empty),\
+ $(empty),\
+ $(empty),\
+ $(empty)))
# ----------------------------------------------------------------
# odm_dlkm/etc/build.prop
@@ -408,7 +456,12 @@
INSTALLED_ODM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM_DLKM)/etc/build.prop
$(eval $(call build-properties,\
odm_dlkm,\
- $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET)))
+ $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET),\
+ $(empty),\
+ $(empty),\
+ $(empty),\
+ $(empty),\
+ $(empty)))
# -----------------------------------------------------------------
# system_ext/etc/build.prop
@@ -427,4 +480,21 @@
$(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET),\
$(_prop_files_),\
$(_prop_vars_),\
+ $(empty),\
+ $(empty),\
+ $(empty)))
+
+# ----------------------------------------------------------------
+# ramdisk/boot/etc/build.prop
+#
+
+RAMDISK_BUILD_PROP_REL_PATH := system/etc/ramdisk/build.prop
+INSTALLED_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RAMDISK_OUT)/$(RAMDISK_BUILD_PROP_REL_PATH)
+$(eval $(call build-properties,\
+ bootimage,\
+ $(INSTALLED_RAMDISK_BUILD_PROP_TARGET),\
+ $(empty),\
+ $(empty),\
+ $(empty),\
+ $(empty),\
$(empty)))
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
new file mode 100644
index 0000000..d771b06
--- /dev/null
+++ b/core/tasks/art-host-tests.mk
@@ -0,0 +1,44 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.PHONY: art-host-tests
+
+intermediates_dir := $(call intermediates-dir-for,PACKAGING,art-host-tests)
+art_host_tests_zip := $(PRODUCT_OUT)/art-host-tests.zip
+# Get the hostside libraries to be packaged in the test zip. Unlike
+# device-tests.mk or general-tests.mk, the files are not copied to the
+# testcases directory.
+my_host_shared_lib_for_art_host_tests := $(foreach f,$(COMPATIBILITY.art-host-tests.HOST_SHARED_LIBRARY.FILES),$(strip \
+ $(eval _cmf_tuple := $(subst :, ,$(f))) \
+ $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
+ $(_cmf_src)))
+
+$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+
+$(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests) $(SOONG_ZIP)
+ echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $@.list
+ grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+ $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+ echo $$shared_lib >> $@-host-libs.list; \
+ done
+ grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+ $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
+ -P target -C $(PRODUCT_OUT) -l $@-target.list \
+ -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
+ rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
+
+art-host-tests: $(art_host_tests_zip)
+$(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
+
+tests: art-host-tests
diff --git a/core/tasks/boot_jars_package_check.mk b/core/tasks/boot_jars_package_check.mk
deleted file mode 100644
index c9a8e27..0000000
--- a/core/tasks/boot_jars_package_check.mk
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright (C) 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# Rules to check if classes in the boot jars are from the list of allowed packages.
-#
-
-ifneq ($(SKIP_BOOT_JARS_CHECK),true)
-ifdef PRODUCT_BOOT_JARS
-
-intermediates := $(call intermediates-dir-for, PACKAGING, boot-jars-package-check,,COMMON)
-stamp := $(intermediates)/stamp
-
-# Convert the colon-separated components <apex>:<jar> to <jar>.<apex> names
-# (e.g. com.android.media:updatable-media -> updatable-media.com.android.media).
-# Special cases:
-# - for the "platform" or "system_ext" apex drop the .<apex> suffix
-# - for the ART apex select release variant
-boot_jars := $(foreach pair,$(PRODUCT_BOOT_JARS) $(PRODUCT_UPDATABLE_BOOT_JARS), \
- $(eval apex := $(call word-colon,1,$(pair))) \
- $(eval jar := $(call word-colon,2,$(pair))) \
- $(eval q := :) \
- $(eval sfx := $(q).$(apex)$(q)) \
- $(eval sfx := $(subst $(q).platform$(q),$(q)$(q),$(sfx))) \
- $(eval sfx := $(subst $(q).system_ext$(q),$(q)$(q),$(sfx))) \
- $(eval sfx := $(subst $(q).com.android.art$(q),$(q).com.android.art.release$(q),$(sfx))) \
- $(eval sfx := $(patsubst $(q)%$(q),%,$(sfx))) \
- $(jar)$(sfx))
-
-# Convert boot jar names to build paths.
-built_boot_jars := $(foreach j, $(boot_jars), \
- $(call intermediates-dir-for, JAVA_LIBRARIES, $(j),,COMMON)/classes.jar)
-
-script := build/make/core/tasks/check_boot_jars/check_boot_jars.py
-allowed_file := build/make/core/tasks/check_boot_jars/package_allowed_list.txt
-
-$(stamp): PRIVATE_BOOT_JARS := $(built_boot_jars)
-$(stamp): PRIVATE_SCRIPT := $(script)
-$(stamp): PRIVATE_ALLOWED := $(allowed_file)
-$(stamp) : $(built_boot_jars) $(script) $(allowed_file)
- @echo "Check package name for $(PRIVATE_BOOT_JARS)"
- $(hide) $(PRIVATE_SCRIPT) $(PRIVATE_ALLOWED) $(PRIVATE_BOOT_JARS)
- $(hide) mkdir -p $(dir $@) && touch $@
-
-.PHONY: check-boot-jars
-check-boot-jars : $(stamp)
-
-# Run check-boot-jars by default
-droidcore : check-boot-jars
-
-endif # PRODUCT_BOOT_JARS
-endif # SKIP_BOOT_JARS_CHECK not true
diff --git a/core/tasks/check_boot_jars/check_boot_jars.py b/core/tasks/check_boot_jars/check_boot_jars.py
deleted file mode 100755
index cf4ef27..0000000
--- a/core/tasks/check_boot_jars/check_boot_jars.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Check boot jars.
-
-Usage: check_boot_jars.py <package_allow_list_file> <jar1> <jar2> ...
-"""
-import logging
-import os.path
-import re
-import subprocess
-import sys
-
-
-# The compiled allow list RE.
-allow_list_re = None
-
-
-def LoadAllowList(filename):
- """ Load and compile allow list regular expressions from filename.
- """
- lines = []
- with open(filename, 'r') as f:
- for line in f:
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- lines.append(line)
- combined_re = r'^(%s)$' % '|'.join(lines)
- global allow_list_re
- try:
- allow_list_re = re.compile(combined_re)
- except re.error:
- logging.exception(
- 'Cannot compile package allow list regular expression: %r',
- combined_re)
- allow_list_re = None
- return False
- return True
-
-
-def CheckJar(allow_list_path, jar):
- """Check a jar file.
- """
- # Get the list of files inside the jar file.
- p = subprocess.Popen(args='jar tf %s' % jar,
- stdout=subprocess.PIPE, shell=True)
- stdout, _ = p.communicate()
- if p.returncode != 0:
- return False
- items = stdout.split()
- classes = 0
- for f in items:
- if f.endswith('.class'):
- classes += 1
- package_name = os.path.dirname(f)
- package_name = package_name.replace('/', '.')
- if not package_name or not allow_list_re.match(package_name):
- print >> sys.stderr, ('Error: %s contains class file %s, whose package name %s is empty or'
- ' not in the allow list %s of packages allowed on the bootclasspath.'
- % (jar, f, package_name, allow_list_path))
- return False
- if classes == 0:
- print >> sys.stderr, ('Error: %s does not contain any class files.' % jar)
- return False
- return True
-
-
-def main(argv):
- if len(argv) < 2:
- print __doc__
- return 1
- allow_list_path = argv[0]
-
- if not LoadAllowList(allow_list_path):
- return 1
-
- passed = True
- for jar in argv[1:]:
- if not CheckJar(allow_list_path, jar):
- passed = False
- if not passed:
- return 1
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/core/tasks/check_boot_jars/package_allowed_list.txt b/core/tasks/check_boot_jars/package_allowed_list.txt
deleted file mode 100644
index 6240ffd..0000000
--- a/core/tasks/check_boot_jars/package_allowed_list.txt
+++ /dev/null
@@ -1,248 +0,0 @@
-# Boot jar package name allowed list.
-# Each line is interpreted as a regular expression.
-
-###################################################
-# core-libart.jar & core-oj.jar
-java\.awt\.font
-java\.beans
-java\.io
-java\.lang
-java\.lang\.annotation
-java\.lang\.invoke
-java\.lang\.ref
-java\.lang\.reflect
-java\.math
-java\.net
-java\.nio
-java\.nio\.file
-java\.nio\.file\.spi
-java\.nio\.file\.attribute
-java\.nio\.channels
-java\.nio\.channels\.spi
-java\.nio\.charset
-java\.nio\.charset\.spi
-java\.security
-java\.security\.acl
-java\.security\.cert
-java\.security\.interfaces
-java\.security\.spec
-java\.sql
-java\.text
-java\.text\.spi
-java\.time
-java\.time\.chrono
-java\.time\.format
-java\.time\.temporal
-java\.time\.zone
-java\.util
-java\.util\.concurrent
-java\.util\.concurrent\.atomic
-java\.util\.concurrent\.locks
-java\.util\.function
-java\.util\.jar
-java\.util\.logging
-java\.util\.prefs
-java\.util\.regex
-java\.util\.spi
-java\.util\.stream
-java\.util\.zip
-# TODO: Remove javax.annotation.processing if possible, see http://b/132338110:
-javax\.annotation\.processing
-javax\.crypto
-javax\.crypto\.interfaces
-javax\.crypto\.spec
-javax\.net
-javax\.net\.ssl
-javax\.security\.auth
-javax\.security\.auth\.callback
-javax\.security\.auth\.login
-javax\.security\.auth\.x500
-javax\.security\.cert
-javax\.sql
-javax\.xml
-javax\.xml\.datatype
-javax\.xml\.namespace
-javax\.xml\.parsers
-javax\.xml\.transform
-javax\.xml\.transform\.dom
-javax\.xml\.transform\.sax
-javax\.xml\.transform\.stream
-javax\.xml\.validation
-javax\.xml\.xpath
-jdk\.internal\.util
-jdk\.internal\.vm\.annotation
-jdk\.net
-org\.w3c\.dom
-org\.w3c\.dom\.ls
-org\.w3c\.dom\.traversal
-# OpenJdk internal implementation.
-sun\.invoke\.util
-sun\.invoke\.empty
-sun\.misc
-sun\.util.*
-sun\.text.*
-sun\.security.*
-sun\.reflect.*
-sun\.nio.*
-sun\.net.*
-com\.sun\..*
-
-# TODO: Move these internal org.apache.harmony classes to libcore.*
-org\.apache\.harmony\.crypto\.internal
-org\.apache\.harmony\.dalvik
-org\.apache\.harmony\.dalvik\.ddmc
-org\.apache\.harmony\.luni\.internal\.util
-org\.apache\.harmony\.security
-org\.apache\.harmony\.security\.asn1
-org\.apache\.harmony\.security\.fortress
-org\.apache\.harmony\.security\.pkcs10
-org\.apache\.harmony\.security\.pkcs7
-org\.apache\.harmony\.security\.pkcs8
-org\.apache\.harmony\.security\.provider\.crypto
-org\.apache\.harmony\.security\.utils
-org\.apache\.harmony\.security\.x501
-org\.apache\.harmony\.security\.x509
-org\.apache\.harmony\.security\.x509\.tsp
-org\.apache\.harmony\.xml
-org\.apache\.harmony\.xml\.dom
-org\.apache\.harmony\.xml\.parsers
-
-org\.json
-org\.xmlpull\.v1
-org\.xmlpull\.v1\.sax2
-
-# TODO: jarjar org.kxml2.io to com.android org\.kxml2\.io
-org\.kxml2\.io
-org\.xml
-org\.xml\.sax
-org\.xml\.sax\.ext
-org\.xml\.sax\.helpers
-
-dalvik\..*
-libcore\..*
-android\..*
-com\.android\..*
-
-
-###################################################
-# android.test.base.jar
-junit\.extensions
-junit\.framework
-android\.test
-android\.test\.suitebuilder\.annotation
-
-
-###################################################
-# ext.jar
-# TODO: jarjar javax.sip to com.android
-javax\.sip
-javax\.sip\.address
-javax\.sip\.header
-javax\.sip\.message
-
-# TODO: jarjar org.apache.commons to com.android
-org\.apache\.commons\.codec
-org\.apache\.commons\.codec\.binary
-org\.apache\.commons\.codec\.language
-org\.apache\.commons\.codec\.net
-org\.apache\.commons\.logging
-org\.apache\.commons\.logging\.impl
-org\.apache\.http
-org\.apache\.http\.auth
-org\.apache\.http\.auth\.params
-org\.apache\.http\.client
-org\.apache\.http\.client\.entity
-org\.apache\.http\.client\.methods
-org\.apache\.http\.client\.params
-org\.apache\.http\.client\.protocol
-org\.apache\.http\.client\.utils
-org\.apache\.http\.conn
-org\.apache\.http\.conn\.params
-org\.apache\.http\.conn\.routing
-org\.apache\.http\.conn\.scheme
-org\.apache\.http\.conn\.ssl
-org\.apache\.http\.conn\.util
-org\.apache\.http\.cookie
-org\.apache\.http\.cookie\.params
-org\.apache\.http\.entity
-org\.apache\.http\.impl
-org\.apache\.http\.impl\.auth
-org\.apache\.http\.impl\.client
-org\.apache\.http\.impl\.client
-org\.apache\.http\.impl\.conn
-org\.apache\.http\.impl\.conn\.tsccm
-org\.apache\.http\.impl\.cookie
-org\.apache\.http\.impl\.entity
-org\.apache\.http\.impl\.io
-org\.apache\.http\.impl\.io
-org\.apache\.http\.io
-org\.apache\.http\.message
-org\.apache\.http\.params
-org\.apache\.http\.protocol
-org\.apache\.http\.util
-
-# TODO: jarjar gov.nist to com.android
-gov\.nist\.core
-gov\.nist\.core\.net
-gov\.nist\.javax\.sip
-gov\.nist\.javax\.sip\.address
-gov\.nist\.javax\.sip\.clientauthutils
-gov\.nist\.javax\.sip\.header
-gov\.nist\.javax\.sip\.header\.extensions
-gov\.nist\.javax\.sip\.header\.ims
-gov\.nist\.javax\.sip\.message
-gov\.nist\.javax\.sip\.parser
-gov\.nist\.javax\.sip\.parser\.extensions
-gov\.nist\.javax\.sip\.parser\.ims
-gov\.nist\.javax\.sip\.stack
-
-org\.ccil\.cowan\.tagsoup
-org\.ccil\.cowan\.tagsoup\.jaxp
-
-###################################################
-# framework.jar
-javax\.microedition\.khronos\.opengles
-javax\.microedition\.khronos\.egl
-
-android
-
-###################################################
-# apache-xml.jar
-org\.apache\.xml\.res
-org\.apache\.xml\.utils
-org\.apache\.xml\.utils\.res
-org\.apache\.xml\.dtm
-org\.apache\.xml\.dtm\.ref
-org\.apache\.xml\.dtm\.ref\.dom2dtm
-org\.apache\.xml\.dtm\.ref\.sax2dtm
-org\.apache\.xml\.serializer
-org\.apache\.xml\.serializer\.utils
-org\.apache\.xml\.serializer\.dom3
-org\.apache\.xpath
-org\.apache\.xpath\.operations
-org\.apache\.xpath\.domapi
-org\.apache\.xpath\.functions
-org\.apache\.xpath\.res
-org\.apache\.xpath\.axes
-org\.apache\.xpath\.objects
-org\.apache\.xpath\.patterns
-org\.apache\.xpath\.jaxp
-org\.apache\.xpath\.compiler
-org\.apache\.xalan
-org\.apache\.xalan\.res
-org\.apache\.xalan\.templates
-org\.apache\.xalan\.serialize
-org\.apache\.xalan\.extensions
-org\.apache\.xalan\.processor
-org\.apache\.xalan\.transformer
-org\.apache\.xalan\.xslt
-
-###################################################
-# Packages in the google namespace across all bootclasspath jars.
-com\.google\.android\..*
-com\.google\.vr\.platform.*
-
-###################################################
-# Packages used for Android in Chrome OS
-org\.chromium\.arc
-org\.chromium\.arc\..*
diff --git a/core/tasks/apidiff.mk b/core/tasks/csuite.mk
similarity index 63%
copy from core/tasks/apidiff.mk
copy to core/tasks/csuite.mk
index 76e4749..a8dba1d 100644
--- a/core/tasks/apidiff.mk
+++ b/core/tasks/csuite.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,10 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#
-# Rules for building API diffs.
-#
+test_suite_name := csuite
+test_suite_tradefed := csuite-tradefed
+test_suite_readme := test/app_compat/csuite/README.md
-.PHONY: api-diff
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
-api-diff: api-stubs-docs-jdiff
+.PHONY: csuite
+csuite: $(compatibility_zip)
+$(call dist-for-goals, csuite, $(compatibility_zip))
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index cd5fa8e..fdd9591 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -14,9 +14,8 @@
test_suite_name := cts
test_suite_tradefed := cts-tradefed
-test_suite_dynamic_config := test/suite_harness/tools/cts-tradefed/DynamicConfig.xml
-test_suite_readme := test/suite_harness/tools/cts-tradefed/README
-include_test_suite_notice := true
+test_suite_dynamic_config := cts/tools/cts-tradefed/DynamicConfig.xml
+test_suite_readme := cts/tools/cts-tradefed/README
include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
diff --git a/core/tasks/find-shareduid-violation.mk b/core/tasks/find-shareduid-violation.mk
index 86052f2..d6885eb 100644
--- a/core/tasks/find-shareduid-violation.mk
+++ b/core/tasks/find-shareduid-violation.mk
@@ -16,8 +16,6 @@
shareduid_violation_modules_filename := $(PRODUCT_OUT)/shareduid_violation_modules.json
-find_shareduid_script := $(BUILD_SYSTEM)/tasks/find-shareduid-violation.py
-
$(shareduid_violation_modules_filename): $(INSTALLED_SYSTEMIMAGE_TARGET) \
$(INSTALLED_RAMDISK_TARGET) \
$(INSTALLED_BOOTIMAGE_TARGET) \
@@ -26,7 +24,15 @@
$(INSTALLED_PRODUCTIMAGE_TARGET) \
$(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
-$(shareduid_violation_modules_filename): $(find_shareduid_script)
+$(shareduid_violation_modules_filename): $(HOST_OUT_EXECUTABLES)/find_shareduid_violation
$(shareduid_violation_modules_filename): $(AAPT2)
- $(find_shareduid_script) $(PRODUCT_OUT) $(AAPT2) > $@
+ $(HOST_OUT_EXECUTABLES)/find_shareduid_violation \
+ --product_out $(PRODUCT_OUT) \
+ --aapt $(AAPT2) \
+ --copy_out_system $(TARGET_COPY_OUT_SYSTEM) \
+ --copy_out_vendor $(TARGET_COPY_OUT_VENDOR) \
+ --copy_out_product $(TARGET_COPY_OUT_PRODUCT) \
+ --copy_out_system_ext $(TARGET_COPY_OUT_SYSTEM_EXT) \
+ > $@
+
$(call dist-for-goals,droidcore,$(shareduid_violation_modules_filename))
diff --git a/core/tasks/find-shareduid-violation.py b/core/tasks/find-shareduid-violation.py
deleted file mode 100755
index 1f8e4df..0000000
--- a/core/tasks/find-shareduid-violation.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import os
-import subprocess
-from glob import glob
-from collections import defaultdict
-import sys
-import json
-
-if len(sys.argv) < 3:
- product_out = os.environ["PRODUCT_OUT"]
- aapt = "aapt2"
-else:
- product_out = sys.argv[1]
- aapt = sys.argv[2]
-
-def execute(cmd):
- p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = map(lambda b: b.decode('utf-8'), p.communicate())
- return p.returncode == 0, out, err
-
-def make_aapt_cmds(file):
- return [aapt + ' dump ' + file + ' --file AndroidManifest.xml',
- aapt + ' dump xmltree ' + file + ' --file AndroidManifest.xml']
-
-def extract_shared_uid(file):
- for cmd in make_aapt_cmds(file):
- success, manifest, error_msg = execute(cmd)
- if success:
- break
- else:
- print(error_msg, file=sys.stderr)
- sys.exit()
- return None
-
- for l in manifest.split('\n'):
- if "sharedUserId" in l:
- return l.split('"')[-2]
- return None
-
-
-partitions = ["system", "vendor", "product"]
-
-shareduid_app_dict = defaultdict(list)
-
-for p in partitions:
- for f in glob(os.path.join(product_out, p, "*", "*", "*.apk")):
- apk_file = os.path.basename(f)
- shared_uid = extract_shared_uid(f)
-
- if shared_uid is None:
- continue
- shareduid_app_dict[shared_uid].append((p, apk_file))
-
-
-output = defaultdict(lambda: defaultdict(list))
-
-for uid, app_infos in shareduid_app_dict.items():
- partitions = {p for p, _ in app_infos}
- if len(partitions) > 1:
- for part in partitions:
- output[uid][part].extend([a for p, a in app_infos if p == part])
-
-print(json.dumps(output, indent=2, sort_keys=True))
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
new file mode 100644
index 0000000..755b589
--- /dev/null
+++ b/core/tasks/host-unit-tests.mk
@@ -0,0 +1,50 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# `host-unit-tests` shall only include hostside unittest that don't require a device to run. Tests
+# included will be run as part of presubmit check.
+# To add tests to the suite, do one of the following:
+# * For test modules configured with Android.bp, set attribute `test_options: { unit_test: true }`
+# * For test modules configured with mk, set `LOCAL_IS_UNIT_TEST := true`
+.PHONY: host-unit-tests
+
+intermediates_dir := $(call intermediates-dir-for,PACKAGING,host-unit-tests)
+host_unit_tests_zip := $(PRODUCT_OUT)/host-unit-tests.zip
+# Get the hostside libraries to be packaged in the test zip. Unlike
+# device-tests.mk or general-tests.mk, the files are not copied to the
+# testcases directory.
+my_host_shared_lib_for_host_unit_tests := $(foreach f,$(COMPATIBILITY.host-unit-tests.HOST_SHARED_LIBRARY.FILES),$(strip \
+ $(eval _cmf_tuple := $(subst :, ,$(f))) \
+ $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
+ $(_cmf_src)))
+
+$(host_unit_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_host_unit_tests)
+
+$(host_unit_tests_zip) : $(COMPATIBILITY.host-unit-tests.FILES) $(my_host_shared_lib_for_host_unit_tests) $(SOONG_ZIP)
+ echo $(sort $(COMPATIBILITY.host-unit-tests.FILES)) | tr " " "\n" > $@.list
+ grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+ echo "" >> $@-host-libs.list
+ $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+ echo $$shared_lib >> $@-host-libs.list; \
+ done
+ grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
+ $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
+ -P target -C $(PRODUCT_OUT) -l $@-target.list \
+ -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
+ rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
+
+host-unit-tests: $(host_unit_tests_zip)
+$(call dist-for-goals, host-unit-tests, $(host_unit_tests_zip))
+
+tests: host-unit-tests
diff --git a/core/tasks/host_init_verifier.mk b/core/tasks/host_init_verifier.mk
new file mode 100644
index 0000000..bdf996c
--- /dev/null
+++ b/core/tasks/host_init_verifier.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+host_init_verifier_output := $(PRODUCT_OUT)/host_init_verifier_output.txt
+
+$(host_init_verifier_output): \
+ $(INSTALLED_SYSTEMIMAGE_TARGET) \
+ $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
+ $(INSTALLED_VENDORIMAGE_TARGET) \
+ $(INSTALLED_ODMIMAGE_TARGET) \
+ $(INSTALLED_PRODUCTIMAGE_TARGET) \
+ $(call intermediates-dir-for,ETC,passwd_system)/passwd_system \
+ $(call intermediates-dir-for,ETC,passwd_system_ext)/passwd_system_ext \
+ $(call intermediates-dir-for,ETC,passwd_vendor)/passwd_vendor \
+ $(call intermediates-dir-for,ETC,passwd_odm)/passwd_odm \
+ $(call intermediates-dir-for,ETC,passwd_product)/passwd_product \
+ $(call intermediates-dir-for,ETC,plat_property_contexts)/plat_property_contexts \
+ $(call intermediates-dir-for,ETC,system_ext_property_contexts)/system_ext_property_contexts \
+ $(call intermediates-dir-for,ETC,product_property_contexts)/product_property_contexts \
+ $(call intermediates-dir-for,ETC,vendor_property_contexts)/vendor_property_contexts \
+ $(call intermediates-dir-for,ETC,odm_property_contexts)/odm_property_contexts
+
+# Run host_init_verifier on the partition staging directories.
+$(host_init_verifier_output): $(HOST_INIT_VERIFIER)
+ $(HOST_INIT_VERIFIER) \
+ -p $(call intermediates-dir-for,ETC,passwd_system)/passwd_system \
+ -p $(call intermediates-dir-for,ETC,passwd_system_ext)/passwd_system_ext \
+ -p $(call intermediates-dir-for,ETC,passwd_vendor)/passwd_vendor \
+ -p $(call intermediates-dir-for,ETC,passwd_odm)/passwd_odm \
+ -p $(call intermediates-dir-for,ETC,passwd_product)/passwd_product \
+ --property-contexts=$(call intermediates-dir-for,ETC,plat_property_contexts)/plat_property_contexts \
+ --property-contexts=$(call intermediates-dir-for,ETC,system_ext_property_contexts)/system_ext_property_contexts \
+ --property-contexts=$(call intermediates-dir-for,ETC,product_property_contexts)/product_property_contexts \
+ --property-contexts=$(call intermediates-dir-for,ETC,vendor_property_contexts)/vendor_property_contexts \
+ --property-contexts=$(call intermediates-dir-for,ETC,odm_property_contexts)/odm_property_contexts \
+ --out_system $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM) \
+ --out_system_ext $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM_EXT) \
+ --out_vendor $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR) \
+ --out_odm $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ODM) \
+ --out_product $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT) \
+ > $@
+
+$(call dist-for-goals,droidcore,$(host_init_verifier_output))
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index f6cec15..4bbfd39 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -14,11 +14,13 @@
'"compatibility_suites": [$(foreach w,$(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES)),"$(w)", )], ' \
'"auto_test_config": [$(ALL_MODULES.$(m).auto_test_config)], ' \
'"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)", ' \
- '"test_config": [$(if $(ALL_MODULES.$(m).TEST_CONFIG),"$(ALL_MODULES.$(m).TEST_CONFIG)")], ' \
+ '"test_config": [$(foreach w,$(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS)),"$(w)", )], ' \
'"dependencies": [$(foreach w,$(sort $(ALL_DEPS.$(m).ALL_DEPS)),"$(w)", )], ' \
'"srcs": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCS)),"$(w)", )], ' \
'"srcjars": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCJARS)),"$(w)", )], ' \
'"classes_jar": [$(foreach w,$(sort $(ALL_MODULES.$(m).CLASSES_JAR)),"$(w)", )], ' \
+ '"test_mainline_modules": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)", )], ' \
+ '"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)", ' \
'},\n' \
) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
$(hide) echo '}' >> $@
diff --git a/core/tasks/mts.mk b/core/tasks/mts.mk
index e800505..e084856 100644
--- a/core/tasks/mts.mk
+++ b/core/tasks/mts.mk
@@ -13,13 +13,20 @@
# limitations under the License.
ifneq ($(wildcard test/mts/README.md),)
-test_suite_name := mts
-test_suite_tradefed := mts-tradefed
-test_suite_readme := test/mts/README.md
-include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+mts_test_suites :=
+mts_test_suites += mts
-.PHONY: mts
-mts: $(compatibility_zip)
-$(call dist-for-goals, mts, $(compatibility_zip))
+$(foreach module, $(mts_modules), $(eval mts_test_suites += mts-$(module)))
+
+$(foreach suite, $(mts_test_suites), \
+ $(eval test_suite_name := $(suite)) \
+ $(eval test_suite_tradefed := mts-tradefed) \
+ $(eval test_suite_readme := test/mts/README.md) \
+ $(eval include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk) \
+ $(eval .PHONY: $(suite)) \
+ $(eval $(suite): $(compatibility_zip)) \
+ $(eval $(call dist-for-goals, $(suite), $(compatibility_zip))) \
+)
+
endif
diff --git a/core/tasks/platform_availability_check.mk b/core/tasks/platform_availability_check.mk
index 043d130..1524758 100644
--- a/core/tasks/platform_availability_check.mk
+++ b/core/tasks/platform_availability_check.mk
@@ -17,6 +17,9 @@
# Check whether there is any module that isn't available for platform
# is installed to the platform.
+# Skip for unbundled builds that don't produce a platform image.
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+
# Filter FAKE and NON_INSTALLABLE modules out and then collect those are not
# available for platform
_modules_not_available_for_platform := \
@@ -26,11 +29,33 @@
$(if $(filter true,$(ALL_MODULES.$(m).NOT_AVAILABLE_FOR_PLATFORM)),\
$(m))))))
-_violators_with_path := $(foreach m,$(sort $(_modules_not_available_for_platform)),\
+ifndef ALLOW_MISSING_DEPENDENCIES
+ _violators_with_path := $(foreach m,$(sort $(_modules_not_available_for_platform)),\
$(m):$(word 1,$(ALL_MODULES.$(m).PATH))\
-)
+ )
-$(call maybe-print-list-and-error,$(_violators_with_path),\
+ $(call maybe-print-list-and-error,$(_violators_with_path),\
Following modules are requested to be installed. But are not available \
for platform because they do not have "//apex_available:platform" or \
they depend on other modules that are not available for platform)
+
+else
+
+# Don't error out immediately when ALLOW_MISSING_DEPENDENCIES is set.
+# Instead, add a dependency on a rule that prints the error message.
+ define not_available_for_platform_rule
+ not_installable_file := $(patsubst $(OUT_DIR)/%,$(OUT_DIR)/NOT_AVAILABLE_FOR_PLATFORM/%,$(1))
+ $(1): $$(not_installable_file)
+ $$(not_installable_file):
+ $(call echo-error,$(2),Module is requested to be installed but is not \
+available for platform because it does not have "//apex_available:platform" or \
+it depends on other modules that are not available for platform.)
+ exit 1
+ endef
+
+ $(foreach m,$(_modules_not_available_for_platform),\
+ $(foreach i,$(filter-out $(HOST_OUT)/%,$(ALL_MODULES.$(m).INSTALLED)),\
+ $(eval $(call not_available_for_platform_rule,$(i),$(m)))))
+endif
+
+endif
diff --git a/core/tasks/recovery_snapshot.mk b/core/tasks/recovery_snapshot.mk
new file mode 100644
index 0000000..525273b
--- /dev/null
+++ b/core/tasks/recovery_snapshot.mk
@@ -0,0 +1,34 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+current_makefile := $(lastword $(MAKEFILE_LIST))
+
+# RECOVERY_SNAPSHOT_VERSION must be set to 'current' in order to generate a recovery snapshot.
+ifeq ($(RECOVERY_SNAPSHOT_VERSION),current)
+
+.PHONY: recovery-snapshot
+recovery-snapshot: $(SOONG_RECOVERY_SNAPSHOT_ZIP)
+
+$(call dist-for-goals, recovery-snapshot, $(SOONG_RECOVERY_SNAPSHOT_ZIP))
+
+else # RECOVERY_SNAPSHOT_VERSION is NOT set to 'current'
+
+.PHONY: recovery-snapshot
+recovery-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
+recovery-snapshot:
+ $(call echo-error,$(PRIVATE_MAKEFILE),\
+ "CANNOT generate Recovery snapshot. RECOVERY_SNAPSHOT_VERSION must be set to 'current'.")
+ exit 1
+
+endif # RECOVERY_SNAPSHOT_VERSION
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index f394b96..570a39a 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -29,15 +29,12 @@
test_suite_subdir := android-$(test_suite_name)
out_dir := $(HOST_OUT)/$(test_suite_name)/$(test_suite_subdir)
test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
-test_tools := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
+test_tools := $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
$(HOST_OUT_JAVA_LIBRARIES)/tradefed-no-fwk.jar \
$(HOST_OUT_JAVA_LIBRARIES)/tradefed-test-framework.jar \
$(HOST_OUT_JAVA_LIBRARIES)/loganalysis.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util-tests.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/compatibility-common-util-tests.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed-tests.jar \
+ $(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed.jar \
$(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed).jar \
$(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed)-tests.jar \
$(HOST_OUT_EXECUTABLES)/$(test_suite_tradefed) \
@@ -80,10 +77,8 @@
$(HOST_OUT_NOTICE_FILES) $(TARGET_OUT_NOTICE_FILES), \
$(compatibility_zip_deps)))
-ifeq ($(include_test_suite_notice),true)
- compatibility_zip_deps += $(test_suite_notice_txt)
- compatibility_zip_resources += $(test_suite_notice_txt)
-endif
+compatibility_zip_deps += $(test_suite_notice_txt)
+compatibility_zip_resources += $(test_suite_notice_txt)
compatibility_zip := $(out_dir).zip
$(compatibility_zip): PRIVATE_OUT_DIR := $(out_dir)
@@ -112,7 +107,6 @@
test_suite_readme :=
test_suite_prebuilt_tools :=
test_suite_tools :=
-include_test_suite_notice :=
test_suite_jdk :=
test_suite_jdk_dir :=
host_shared_libs :=
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 6cafa4a..2b43f0f 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -50,7 +50,7 @@
ifeq ($(ALLOW_MISSING_DEPENDENCIES),true)
# Ignore unknown installed files on partial builds
my_missing_files =
-else ifeq ($(my_modules_strict),true)
+else ifneq ($(my_modules_strict),false)
my_missing_files = $(shell $(call echo-error,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(eval my_missing_error := true)
endif
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
new file mode 100644
index 0000000..47bf29c
--- /dev/null
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -0,0 +1,33 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# $(1): List of target native files to copy.
+# $(2): Copy destination directory.
+# Evaluates to a list of ":"-separated pairs src:dst.
+define target-native-copy-pairs
+$(foreach m,$(1),\
+ $(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
+ $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
+ $(foreach i, $(_built_files),\
+ $(eval bui_ins := $(subst :,$(space),$(i)))\
+ $(eval ins := $(word 2,$(bui_ins)))\
+ $(if $(filter $(TARGET_OUT_ROOT)/%,$(ins)),\
+ $(eval bui := $(word 1,$(bui_ins)))\
+ $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
+ $(patsubst system/%,DATA/%,\
+ $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+ $(bui):$(2)/$(my_copy_dest))))
+endef
diff --git a/core/tasks/vendor_snapshot.mk b/core/tasks/vendor_snapshot.mk
index 8234e3f..83c1379 100644
--- a/core/tasks/vendor_snapshot.mk
+++ b/core/tasks/vendor_snapshot.mk
@@ -22,6 +22,11 @@
$(call dist-for-goals, vendor-snapshot, $(SOONG_VENDOR_SNAPSHOT_ZIP))
+.PHONY: vendor-fake-snapshot
+vendor-fake-snapshot: $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP)
+
+$(call dist-for-goals, vendor-fake-snapshot, $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP):fake/$(notdir $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP)))
+
else # BOARD_VNDK_VERSION is NOT set to 'current'
.PHONY: vendor-snapshot
@@ -31,4 +36,11 @@
"CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.")
exit 1
+.PHONY: vendor-fake-snapshot
+vendor-fake-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
+vendor-fake-snapshot:
+ $(call echo-error,$(PRIVATE_MAKEFILE),\
+ "CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.")
+ exit 1
+
endif # BOARD_VNDK_VERSION
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index a3247da..95c4d24 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -15,6 +15,8 @@
-include external/linux-kselftest/android/kselftest_test_list.mk
-include external/ltp/android/ltp_package_list.mk
+include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
+
test_suite_name := vts
test_suite_tradefed := vts-tradefed
test_suite_readme := test/vts/tools/vts-core-tradefed/README
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index c877e8b..fe90165 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -84,40 +84,17 @@
# generate the range of allowed SDK versions, so it must have an entry for every
# unreleased API level targetable by this branch, not just those that are valid
# lunch targets for this branch.
-PLATFORM_VERSION.RP1A := R
-PLATFORM_VERSION.SP1A := S
+
+# The last stable version name of the platform that was released. During
+# development, this stays at that previous version, while the codename indicates
+# further work based on the previous version.
+PLATFORM_VERSION_LAST_STABLE := 11
+.KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
# These are the current development codenames, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.RP1A := R
PLATFORM_VERSION_CODENAME.SP1A := S
-ifndef PLATFORM_VERSION
- PLATFORM_VERSION := $(PLATFORM_VERSION.$(TARGET_PLATFORM_VERSION))
- ifndef PLATFORM_VERSION
- # PLATFORM_VERSION falls back to TARGET_PLATFORM_VERSION
- PLATFORM_VERSION := $(TARGET_PLATFORM_VERSION)
- endif
-endif
-.KATI_READONLY := PLATFORM_VERSION
-
-ifndef PLATFORM_SDK_VERSION
- # This is the canonical definition of the SDK version, which defines
- # the set of APIs and functionality available in the platform. It
- # is a single integer that increases monotonically as updates to
- # the SDK are released. It should only be incremented when the APIs for
- # the new release are frozen (so that developers don't write apps against
- # intermediate builds). During development, this number remains at the
- # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
- # the code-name of the new development work.
-
- # When you increment the PLATFORM_SDK_VERSION please ensure you also
- # clear out the following text file of all older PLATFORM_VERSION's:
- # cts/tests/tests/os/assets/platform_versions.txt
- PLATFORM_SDK_VERSION := 29
-endif
-.KATI_READONLY := PLATFORM_SDK_VERSION
-
ifndef PLATFORM_VERSION_CODENAME
PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
ifndef PLATFORM_VERSION_CODENAME
@@ -152,6 +129,32 @@
PLATFORM_VERSION_CODENAME \
PLATFORM_VERSION_ALL_CODENAMES
+ifndef PLATFORM_VERSION
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ PLATFORM_VERSION := $(PLATFORM_VERSION_LAST_STABLE)
+ else
+ PLATFORM_VERSION := $(PLATFORM_VERSION_CODENAME)
+ endif
+endif
+.KATI_READONLY := PLATFORM_VERSION
+
+ifndef PLATFORM_SDK_VERSION
+ # This is the canonical definition of the SDK version, which defines
+ # the set of APIs and functionality available in the platform. It
+ # is a single integer that increases monotonically as updates to
+ # the SDK are released. It should only be incremented when the APIs for
+ # the new release are frozen (so that developers don't write apps against
+ # intermediate builds). During development, this number remains at the
+ # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
+ # the code-name of the new development work.
+
+ # When you increment the PLATFORM_SDK_VERSION please ensure you also
+ # clear out the following text file of all older PLATFORM_VERSION's:
+ # cts/tests/tests/os/assets/platform_versions.txt
+ PLATFORM_SDK_VERSION := 30
+endif
+.KATI_READONLY := PLATFORM_SDK_VERSION
+
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
PLATFORM_PREVIEW_SDK_VERSION := 0
else
@@ -237,7 +240,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2020-08-05
+ PLATFORM_SECURITY_PATCH := 2021-01-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/envsetup.sh b/envsetup.sh
index e981034..8fa608b 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -34,6 +34,7 @@
- gomod: Go to the directory containing a module.
- pathmod: Get the directory containing a module.
- refreshmod: Refresh list of modules for allmod/gomod/pathmod.
+- syswrite: Remount partitions (e.g. system.img) as writable, rebooting if necessary.
Environment options:
- SANITIZE_HOST: Set to 'address' to use ASAN for all host modules.
@@ -306,6 +307,9 @@
unset ANDROID_HOST_OUT
export ANDROID_HOST_OUT=$(get_abs_build_var HOST_OUT)
+ unset ANDROID_SOONG_HOST_OUT
+ export ANDROID_SOONG_HOST_OUT=$(get_abs_build_var SOONG_HOST_OUT)
+
unset ANDROID_HOST_OUT_TESTCASES
export ANDROID_HOST_OUT_TESTCASES=$(get_abs_build_var HOST_OUT_TESTCASES)
@@ -317,6 +321,22 @@
#export HOST_EXTRACFLAGS="-I "$T/system/kernel_headers/host_include
}
+function bazel()
+{
+ local T="$(gettop)"
+ if [ ! "$T" ]; then
+ echo "Couldn't locate the top of the tree. Try setting TOP."
+ return
+ fi
+
+ if which bazel &>/dev/null; then
+ >&2 echo "NOTE: bazel() function sourced from envsetup.sh is being used instead of $(which bazel)"
+ >&2 echo
+ fi
+
+ "$T/tools/bazel" "$@"
+}
+
function printconfig()
{
local T=$(gettop)
@@ -355,7 +375,7 @@
function addcompletions()
{
- local T dir f
+ local f=
# Keep us from trying to run in something that's neither bash nor zsh.
if [ -z "$BASH_VERSION" -a -z "$ZSH_VERSION" ]; then
@@ -768,7 +788,7 @@
local TOPFILE=build/make/core/envsetup.mk
if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
# The following circumlocution ensures we remove symlinks from TOP.
- (cd $TOP; PWD= /bin/pwd)
+ (cd "$TOP"; PWD= /bin/pwd)
else
if [ -f $TOPFILE ] ; then
# The following circumlocution (repeated below as well) ensures
@@ -778,13 +798,13 @@
else
local HERE=$PWD
local T=
- while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do
+ while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
\cd ..
T=`PWD= /bin/pwd -P`
done
- \cd $HERE
+ \cd "$HERE"
if [ -f "$T/$TOPFILE" ]; then
- echo $T
+ echo "$T"
fi
fi
fi
@@ -857,6 +877,18 @@
fi
}
+# syswrite - disable verity, reboot if needed, and remount image
+#
+# Easy way to make system.img/etc writable
+function syswrite() {
+ adb wait-for-device && adb root || return 1
+ if [[ $(adb disable-verity | grep "reboot") ]]; then
+ echo "rebooting"
+ adb reboot && adb wait-for-device && adb root || return 1
+ fi
+ adb wait-for-device && adb remount || return 1
+}
+
# coredump_setup - enable core dumps globally for any process
# that has the core-file-size limit set correctly
#
@@ -1342,7 +1374,7 @@
mkdir -p $ANDROID_PRODUCT_OUT || return 1
# Note, can't use absolute path because of the way make works.
- m out/target/product/$(get_build_var TARGET_DEVICE)/module-info.json \
+ m $(get_build_var PRODUCT_OUT)/module-info.json \
> $ANDROID_PRODUCT_OUT/module-info.json.build.log 2>&1
}
@@ -1587,25 +1619,26 @@
# This allows loading only approved vendorsetup.sh files
function source_vendorsetup() {
unset VENDOR_PYTHONPATH
+ local T="$(gettop)"
allowed=
- for f in $(find -L device vendor product -maxdepth 4 -name 'allowed-vendorsetup_sh-files' 2>/dev/null | sort); do
+ for f in $(cd "$T" && find -L device vendor product -maxdepth 4 -name 'allowed-vendorsetup_sh-files' 2>/dev/null | sort); do
if [ -n "$allowed" ]; then
echo "More than one 'allowed_vendorsetup_sh-files' file found, not including any vendorsetup.sh files:"
echo " $allowed"
echo " $f"
return
fi
- allowed="$f"
+ allowed="$T/$f"
done
allowed_files=
[ -n "$allowed" ] && allowed_files=$(cat "$allowed")
for dir in device vendor product; do
- for f in $(test -d $dir && \
+ for f in $(cd "$T" && test -d $dir && \
find -L $dir -maxdepth 4 -name 'vendorsetup.sh' 2>/dev/null | sort); do
if [[ -z "$allowed" || "$allowed_files" =~ $f ]]; then
- echo "including $f"; . "$f"
+ echo "including $f"; . "$T/$f"
else
echo "ignoring $f, not in $allowed"
fi
@@ -1613,6 +1646,41 @@
done
}
+function showcommands() {
+ local T=$(gettop)
+ if [[ -z "$TARGET_PRODUCT" ]]; then
+ >&2 echo "TARGET_PRODUCT not set. Run lunch."
+ return
+ fi
+ case $(uname -s) in
+ Darwin)
+ PREBUILT_NAME=darwin-x86
+ ;;
+ Linux)
+ PREBUILT_NAME=linux-x86
+ ;;
+ *)
+ >&2 echo Unknown host $(uname -s)
+ return
+ ;;
+ esac
+ if [[ -z "$OUT_DIR" ]]; then
+ if [[ -z "$OUT_DIR_COMMON_BASE" ]]; then
+ OUT_DIR=out
+ else
+ OUT_DIR=${OUT_DIR_COMMON_BASE}/${PWD##*/}
+ fi
+ fi
+ if [[ "$1" == "--regenerate" ]]; then
+ shift 1
+ NINJA_ARGS="-t commands $@" m
+ else
+ (cd $T && prebuilts/build-tools/$PREBUILT_NAME/bin/ninja \
+ -f $OUT_DIR/combined-${TARGET_PRODUCT}.ninja \
+ -t commands "$@")
+ fi
+}
+
validate_current_shell
source_vendorsetup
addcompletions
diff --git a/rbesetup.sh b/rbesetup.sh
index f9317a3..ec39e6e 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -1,4 +1,31 @@
-source build/envsetup.sh
+function _source_env_setup_script() {
+ local -r ENV_SETUP_SCRIPT="build/make/envsetup.sh"
+ local -r TOP_DIR=$(
+ while [[ ! -f "${ENV_SETUP_SCRIPT}" ]] && [[ "${PWD}" != "/" ]]; do
+ \cd ..
+ done
+ if [[ -f "${ENV_SETUP_SCRIPT}" ]]; then
+ echo "$(PWD= /bin/pwd -P)"
+ fi
+ )
+
+ local -r FULL_PATH_ENV_SETUP_SCRIPT="${TOP_DIR}/${ENV_SETUP_SCRIPT}"
+ if [[ ! -f "${FULL_PATH_ENV_SETUP_SCRIPT}" ]]; then
+ echo "ERROR: Unable to source ${ENV_SETUP_SCRIPT}"
+ return 1
+ fi
+
+ # Need to change directory to the repo root so vendor scripts can be sourced
+ # as well.
+ local -r CUR_DIR=$PWD
+ \cd "${TOP_DIR}"
+ source "${FULL_PATH_ENV_SETUP_SCRIPT}"
+ \cd "${CUR_DIR}"
+}
+
+# This function needs to run first as the remaining defining functions may be
+# using the envsetup.sh defined functions.
+_source_env_setup_script || return
# This function prefixes the given command with appropriate variables needed
# for the build to be executed with RBE.
@@ -28,9 +55,25 @@
# ANDROID_ENABLE_METRICS_UPLOAD.
function _export_metrics_uploader() {
local uploader_path="$(gettop)/vendor/google/misc/metrics_uploader_prebuilt/metrics_uploader.sh"
- if [ -x "${uploader_path}" ]; then
+ if [[ -x "${uploader_path}" ]]; then
export ANDROID_ENABLE_METRICS_UPLOAD="${uploader_path}"
fi
}
+# This function sets RBE specific environment variables needed for the build to
+# executed by RBE. This file should be sourced once per checkout of Android code.
+function _set_rbe_vars() {
+ unset USE_GOMA
+ export USE_RBE="true"
+ export RBE_CXX_EXEC_STRATEGY="racing"
+ export RBE_JAVAC_EXEC_STRATEGY="racing"
+ export RBE_R8_EXEC_STRATEGY="racing"
+ export RBE_D8_EXEC_STRATEGY="racing"
+ export RBE_use_unified_cas_ops="true"
+ export RBE_JAVAC=1
+ export RBE_R8=1
+ export RBE_D8=1
+}
+
_export_metrics_uploader
+_set_rbe_vars
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
index e9fb096..342abd7 100644
--- a/target/board/BoardConfigEmuCommon.mk
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -33,8 +33,8 @@
# emulator needs super.img
BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
- # 3G + header
- BOARD_SUPER_PARTITION_SIZE := 3229614080
+ # 4G + 8M
+ BOARD_SUPER_PARTITION_SIZE := 4303355904
BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
ifeq ($(QEMU_USE_SYSTEM_EXT_PARTITIONS),true)
@@ -56,8 +56,8 @@
vendor
endif
- # 3G
- BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE := 3221225472
+ # 4G
+ BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE := 4294967296
# in build environment to speed up make -j
ifeq ($(QEMU_DISABLE_AVB),true)
@@ -73,11 +73,11 @@
endif
#vendor boot
-TARGET_NO_VENDOR_BOOT := false
BOARD_INCLUDE_DTB_IN_BOOTIMG := false
BOARD_BOOT_HEADER_VERSION := 3
BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION)
BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE := 0x06000000
+BOARD_RAMDISK_USE_LZ4 := true
# Enable chain partition for system.
BOARD_AVB_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 8d80c9b..a2150ad 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -11,7 +11,9 @@
# This flag is set by mainline but isn't desired for GSI.
BOARD_USES_SYSTEM_OTHER_ODEX :=
-# system.img is always ext4 with sparse option
+# system.img is always ext4 and non-sparsed.
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+
# GSI also includes make_f2fs to support userdata parition in f2fs
# for some devices
TARGET_USERIMAGES_USE_F2FS := true
diff --git a/target/board/BoardConfigModuleCommon.mk b/target/board/BoardConfigModuleCommon.mk
new file mode 100644
index 0000000..24c01a5
--- /dev/null
+++ b/target/board/BoardConfigModuleCommon.mk
@@ -0,0 +1,6 @@
+# BoardConfigModuleCommon.mk
+#
+# Common compile-time settings for module builds.
+
+# Required for all module devices.
+TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/BoardConfigPixelCommon.mk b/target/board/BoardConfigPixelCommon.mk
new file mode 100644
index 0000000..a970fec
--- /dev/null
+++ b/target/board/BoardConfigPixelCommon.mk
@@ -0,0 +1,18 @@
+# BoardConfigPixelCommon.mk
+#
+# Common compile-time definitions for Pixel devices.
+
+# Using sha256 for dm-verity partitions. b/156162446
+# system, system_other, system_ext and product.
+BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+
+# vendor and odm.
+BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+
+# vendor_dlkm and odm_dlkm.
+BOARD_AVB_VENDOR_DLKM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_ODM_DLKM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
diff --git a/target/board/emulator_arm64/BoardConfig.mk b/target/board/emulator_arm64/BoardConfig.mk
index b34ccb4..9293625 100644
--- a/target/board/emulator_arm64/BoardConfig.mk
+++ b/target/board/emulator_arm64/BoardConfig.mk
@@ -19,11 +19,7 @@
TARGET_CPU_VARIANT := generic
TARGET_CPU_ABI := arm64-v8a
-TARGET_2ND_ARCH := arm
-TARGET_2ND_CPU_ABI := armeabi-v7a
-TARGET_2ND_CPU_ABI2 := armeabi
-
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk vts10,$(MAKECMDGOALS)),)
+ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
# DO NOT USE
# DO NOT USE
#
@@ -56,7 +52,6 @@
include build/make/target/board/BoardConfigEmuCommon.mk
TARGET_NO_KERNEL := false
-TARGET_NO_VENDOR_BOOT := false
BOARD_USES_RECOVERY_AS_BOOT := true
BOARD_BOOTIMAGE_PARTITION_SIZE := 0x02000000
diff --git a/target/board/emulator_arm64/device.mk b/target/board/emulator_arm64/device.mk
index 73dc2f4..dc84192 100644
--- a/target/board/emulator_arm64/device.mk
+++ b/target/board/emulator_arm64/device.mk
@@ -19,7 +19,7 @@
# Cuttlefish has GKI kernel prebuilts, so use those for the GKI boot.img.
ifeq ($(TARGET_PREBUILT_KERNEL),)
- LOCAL_KERNEL := device/google/cuttlefish_kernel/5.4-arm64/kernel
+ LOCAL_KERNEL := kernel/prebuilts/5.4/arm64/kernel-5.4-lz4
else
LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL)
endif
diff --git a/target/board/generic_64bitonly_x86_64/BoardConfig.mk b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
new file mode 100644
index 0000000..71c4357
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
@@ -0,0 +1,45 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# x86_64 emulator specific definitions
+TARGET_CPU_ABI := x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := x86_64
+
+# Keep the following for 32-bit native code support
+# There are a few native services still on 32-bit modes, e.g. media & audio.
+# Remove them in S.
+TARGET_2ND_CPU_ABI := x86
+TARGET_2ND_ARCH := x86
+TARGET_2ND_ARCH_VARIANT := x86_64
+
+TARGET_PRELINK_MODULE := false
+include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigEmuCommon.mk
+
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+
+BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
+
+# Wifi.
+BOARD_WLAN_DEVICE := emulator
+BOARD_HOSTAPD_DRIVER := NL80211
+BOARD_WPA_SUPPLICANT_DRIVER := NL80211
+BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
+BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
+WPA_SUPPLICANT_VERSION := VER_0_8_X
+WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
+WIFI_DRIVER_FW_PATH_STA := "/dev/null"
+WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/generic_64bitonly_x86_64/README.txt b/target/board/generic_64bitonly_x86_64/README.txt
new file mode 100644
index 0000000..dc7efd3
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/README.txt
@@ -0,0 +1,7 @@
+The "generic_x86_64_app" product defines a non-hardware-specific IA target
+without a kernel or bootloader.
+
+It can be used to build the entire user-level system, and
+will work with the IA version of the emulator,
+
+This supports 64-bit apps only.
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/target/board/generic_64bitonly_x86_64/device.mk
similarity index 68%
copy from target/product/virtual_ab_ota_retrofit.mk
copy to target/board/generic_64bitonly_x86_64/device.mk
index 3416a4f..bb49057 100644
--- a/target/product/virtual_ab_ota_retrofit.mk
+++ b/target/board/generic_64bitonly_x86_64/device.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2019 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +14,11 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+ifdef NET_ETH0_STARTONBOOT
+ PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
+endif
-PRODUCT_VIRTUAL_AB_OTA_RETROFIT := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.retrofit=true
+# Ensure we package the BIOS files too.
+PRODUCT_HOST_PACKAGES += \
+ bios.bin \
+ vgabios-cirrus.bin \
diff --git a/target/board/generic_64bitonly_x86_64/system.prop b/target/board/generic_64bitonly_x86_64/system.prop
new file mode 100644
index 0000000..ed9d173
--- /dev/null
+++ b/target/board/generic_64bitonly_x86_64/system.prop
@@ -0,0 +1,5 @@
+#
+# system.prop for generic sdk
+#
+
+rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 22108fa..30c033d 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -23,7 +23,7 @@
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk vts10,$(MAKECMDGOALS)),)
+ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
# DO NOT USE
# DO NOT USE
#
@@ -54,24 +54,59 @@
include build/make/target/board/BoardConfigGsiCommon.mk
-TARGET_NO_KERNEL := false
-TARGET_NO_VENDOR_BOOT := true
-BOARD_USES_RECOVERY_AS_BOOT := true
-
+BOARD_KERNEL-4.19-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
BOARD_KERNEL-5.4_BOOTIMAGE_PARTITION_SIZE := 67108864
+BOARD_KERNEL-5.4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_KERNEL-5.4-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
+BOARD_KERNEL-5.4-GZ-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 47185920
BOARD_KERNEL-5.4-LZ4_BOOTIMAGE_PARTITION_SIZE := 53477376
+BOARD_KERNEL-5.4-LZ4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 53477376
+BOARD_KERNEL-5.10_BOOTIMAGE_PARTITION_SIZE := 67108864
+BOARD_KERNEL-5.10-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
+BOARD_KERNEL-5.10-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
+BOARD_KERNEL-5.10-GZ-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 47185920
+BOARD_KERNEL-5.10-LZ4_BOOTIMAGE_PARTITION_SIZE := 53477376
+BOARD_KERNEL-5.10-LZ4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 53477376
BOARD_KERNEL-MAINLINE_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_KERNEL-MAINLINE-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
BOARD_KERNEL-MAINLINE-LZ4_BOOTIMAGE_PARTITION_SIZE := 53477376
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+BOARD_RAMDISK_USE_LZ4 := true
BOARD_BOOT_HEADER_VERSION := 3
BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION)
-BOARD_KERNEL_BINARIES := kernel-5.4 kernel-5.4-gz kernel-5.4-lz4 \
- kernel-mainline kernel-mainline-gz kernel-mainline-lz4
+BOARD_KERNEL_BINARIES := \
+ kernel-4.19-gz \
+ kernel-5.4 kernel-5.4-gz kernel-5.4-lz4 \
+ kernel-5.10 kernel-5.10-gz kernel-5.10-lz4 \
+ kernel-mainline kernel-mainline-gz kernel-mainline-lz4 \
+
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+BOARD_KERNEL_BINARIES += \
+ kernel-5.4-allsyms kernel-5.4-gz-allsyms kernel-5.4-lz4-allsyms \
+ kernel-5.10-allsyms kernel-5.10-gz-allsyms kernel-5.10-lz4-allsyms \
+
+endif
+
+# Boot image
+BOARD_USES_RECOVERY_AS_BOOT :=
+TARGET_NO_KERNEL := false
+BOARD_USES_GENERIC_KERNEL_IMAGE := true
+BOARD_KERNEL_MODULE_INTERFACE_VERSIONS := \
+ 5.4-android12-0 \
+ 5.10-android12-0 \
+
+# Copy boot image in $OUT to target files. This is defined for targets where
+# the installed GKI APEXes are built from source.
+BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES := true
+
+# No vendor_boot
+BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT :=
+
+# No recovery
+BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE :=
# Some vendors still haven't cleaned up all device specific directories under
# root!
diff --git a/target/board/generic_arm64/README.txt b/target/board/generic_arm64/README.txt
index 21c71d7..8711a14 100644
--- a/target/board/generic_arm64/README.txt
+++ b/target/board/generic_arm64/README.txt
@@ -1,5 +1,7 @@
The "generic_arm64" product defines a non-hardware-specific arm64 target
-without a kernel or bootloader.
+without a bootloader.
+
+It is also the target to build the generic kernel image (GKI).
It is not a product "base class"; no other products inherit
from it or use it in any way.
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index d8d06cd..37c0f25 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -15,9 +15,29 @@
#
PRODUCT_COPY_FILES += \
- device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4:kernel-5.4 \
- device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4-gz:kernel-5.4-gz \
- device/google/cuttlefish_kernel/5.4-arm64/kernel-5.4-lz4:kernel-5.4-lz4 \
- kernel/prebuilts/mainline/arm64/kernel-mainline:kernel-mainline \
- kernel/prebuilts/mainline/arm64/kernel-mainline-gz:kernel-mainline-gz \
- kernel/prebuilts/mainline/arm64/kernel-mainline-lz4:kernel-mainline-lz4
+ kernel/prebuilts/4.19/arm64/kernel-4.19-gz:kernel-4.19-gz \
+ kernel/prebuilts/5.4/arm64/kernel-5.4:kernel-5.4 \
+ kernel/prebuilts/5.4/arm64/kernel-5.4-gz:kernel-5.4-gz \
+ kernel/prebuilts/5.4/arm64/kernel-5.4-lz4:kernel-5.4-lz4 \
+ kernel/prebuilts/5.10/arm64/kernel-5.10:kernel-5.10 \
+ kernel/prebuilts/5.10/arm64/kernel-5.10-gz:kernel-5.10-gz \
+ kernel/prebuilts/5.10/arm64/kernel-5.10-lz4:kernel-5.10-lz4 \
+ kernel/prebuilts/mainline/arm64/kernel-mainline-allsyms:kernel-mainline \
+ kernel/prebuilts/mainline/arm64/kernel-mainline-gz-allsyms:kernel-mainline-gz \
+ kernel/prebuilts/mainline/arm64/kernel-mainline-lz4-allsyms:kernel-mainline-lz4
+
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+PRODUCT_COPY_FILES += \
+ kernel/prebuilts/5.4/arm64/kernel-5.4:kernel-5.4-allsyms \
+ kernel/prebuilts/5.4/arm64/kernel-5.4-gz:kernel-5.4-gz-allsyms \
+ kernel/prebuilts/5.4/arm64/kernel-5.4-lz4:kernel-5.4-lz4-allsyms \
+ kernel/prebuilts/5.10/arm64/kernel-5.10:kernel-5.10-allsyms \
+ kernel/prebuilts/5.10/arm64/kernel-5.10-gz:kernel-5.10-gz-allsyms \
+ kernel/prebuilts/5.10/arm64/kernel-5.10-lz4:kernel-5.10-lz4-allsyms \
+
+endif
+
+PRODUCT_BUILD_VENDOR_BOOT_IMAGE := false
+PRODUCT_BUILD_RECOVERY_IMAGE := false
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_ramdisk.mk)
diff --git a/target/board/generic_arm64/sepolicy/OWNERS b/target/board/generic_arm64/sepolicy/OWNERS
index ff29677..6dc2b86 100644
--- a/target/board/generic_arm64/sepolicy/OWNERS
+++ b/target/board/generic_arm64/sepolicy/OWNERS
@@ -1,8 +1 @@
-alanstokes@google.com
-bowgotsai@google.com
-jbires@google.com
-jeffv@google.com
-jgalenson@google.com
-sspatil@google.com
-tomcherry@google.com
-trong@google.com
+include platform/system/sepolicy:/OWNERS
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
deleted file mode 100644
index 7c91607..0000000
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ /dev/null
@@ -1,39 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/make/target/board/BoardConfigGsiCommon.mk
-
-TARGET_ARCH := arm64
-TARGET_ARCH_VARIANT := armv8-a
-TARGET_CPU_ABI := arm64-v8a
-TARGET_CPU_ABI2 :=
-TARGET_CPU_VARIANT := generic
-
-TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv8-a
-TARGET_2ND_CPU_ABI := armeabi-v7a
-TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := generic
-
-# TODO(jiyong) These might be SoC specific.
-BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
-BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
-BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
-
-# TODO(b/36764215): remove this setting when the generic system image
-# no longer has QCOM-specific directories under /.
-BOARD_SEPOLICY_DIRS += build/make/target/board/generic_arm64/sepolicy
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
deleted file mode 100644
index 21b763c..0000000
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/make/target/board/BoardConfigGsiCommon.mk
-
-TARGET_ARCH := arm
-TARGET_ARCH_VARIANT := armv7-a-neon
-TARGET_CPU_ABI := armeabi-v7a
-TARGET_CPU_ABI2 := armeabi
-TARGET_CPU_VARIANT := generic
-
-# Legacy GSI keeps 32 bits binder for 32 bits CPU Arch
-TARGET_USES_64_BIT_BINDER := false
-
-# TODO(jiyong) These might be SoC specific.
-BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
-BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
-BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/image:/firmware/image
-BOARD_ROOT_EXTRA_SYMLINKS += /vendor/firmware_mnt/verinfo:/firmware/verinfo
-
-# TODO(b/36764215): remove this setting when the generic system image
-# no longer has QCOM-specific directories under /.
-BOARD_SEPOLICY_DIRS += build/make/target/board/generic_arm64/sepolicy
diff --git a/target/board/mainline_arm64/sepolicy/OWNERS b/target/board/mainline_arm64/sepolicy/OWNERS
index ff29677..6dc2b86 100644
--- a/target/board/mainline_arm64/sepolicy/OWNERS
+++ b/target/board/mainline_arm64/sepolicy/OWNERS
@@ -1,8 +1 @@
-alanstokes@google.com
-bowgotsai@google.com
-jbires@google.com
-jeffv@google.com
-jgalenson@google.com
-sspatil@google.com
-tomcherry@google.com
-trong@google.com
+include platform/system/sepolicy:/OWNERS
diff --git a/core/tasks/apidiff.mk b/target/board/mainline_sdk/BoardConfig.mk
similarity index 76%
copy from core/tasks/apidiff.mk
copy to target/board/mainline_sdk/BoardConfig.mk
index 76e4749..84f8b2d 100644
--- a/core/tasks/apidiff.mk
+++ b/target/board/mainline_sdk/BoardConfig.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,11 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-#
-# Rules for building API diffs.
#
-.PHONY: api-diff
+TARGET_ARCH_SUITE := mainline_sdk
-api-diff: api-stubs-docs-jdiff
+HOST_CROSS_OS := linux_bionic
+HOST_CROSS_ARCH := x86_64
+HOST_CROSS_2ND_ARCH :=
diff --git a/target/board/mainline_sdk/README.md b/target/board/mainline_sdk/README.md
new file mode 100644
index 0000000..714f797
--- /dev/null
+++ b/target/board/mainline_sdk/README.md
@@ -0,0 +1,2 @@
+This device is suitable for a soong-only build that builds for all the architectures
+needed for mainline modules sdk prebuilts.
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/target/board/module_arm/BoardConfig.mk
similarity index 67%
copy from target/product/virtual_ab_ota_retrofit.mk
copy to target/board/module_arm/BoardConfig.mk
index 3416a4f..3f35c06 100644
--- a/target/product/virtual_ab_ota_retrofit.mk
+++ b/target/board/module_arm/BoardConfig.mk
@@ -1,5 +1,4 @@
-#
-# Copyright (C) 2019 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +13,10 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+include build/make/target/board/BoardConfigModuleCommon.mk
-PRODUCT_VIRTUAL_AB_OTA_RETROFIT := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.retrofit=true
+TARGET_ARCH := arm
+TARGET_ARCH_VARIANT := armv7-a-neon
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := armeabi-v7a
+TARGET_CPU_ABI2 := armeabi
diff --git a/target/board/module_arm/README.md b/target/board/module_arm/README.md
new file mode 100644
index 0000000..b893573
--- /dev/null
+++ b/target/board/module_arm/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to an arm
+device.
diff --git a/core/tasks/apidiff.mk b/target/board/module_arm/device.mk
similarity index 73%
copy from core/tasks/apidiff.mk
copy to target/board/module_arm/device.mk
index 76e4749..cceb987 100644
--- a/core/tasks/apidiff.mk
+++ b/target/board/module_arm/device.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2017 The Android Open Source Project
+#
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,11 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-#
-# Rules for building API diffs.
#
-.PHONY: api-diff
-
-api-diff: api-stubs-docs-jdiff
+$(call inherit-product, build/make/target/product/default_art_config.mk)
+$(call inherit-product, build/make/target/product/languages_default.mk)
diff --git a/target/board/module_arm64/BoardConfig.mk b/target/board/module_arm64/BoardConfig.mk
new file mode 100644
index 0000000..3700056
--- /dev/null
+++ b/target/board/module_arm64/BoardConfig.mk
@@ -0,0 +1,27 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+include build/make/target/board/BoardConfigModuleCommon.mk
+
+TARGET_ARCH := arm64
+TARGET_ARCH_VARIANT := armv8-a
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := arm64-v8a
+
+TARGET_2ND_ARCH := arm
+TARGET_2ND_ARCH_VARIANT := armv8-a
+TARGET_2ND_CPU_ABI := armeabi-v7a
+TARGET_2ND_CPU_ABI2 := armeabi
+TARGET_2ND_CPU_VARIANT := generic
diff --git a/target/board/module_arm64/README.md b/target/board/module_arm64/README.md
new file mode 100644
index 0000000..cb36fbf
--- /dev/null
+++ b/target/board/module_arm64/README.md
@@ -0,0 +1,3 @@
+This device is suitable for an unbundled module targeted specifically to an
+arm64 device. 32 bit binaries built with this product will not be suitable for a
+32-bit arm device.
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/target/board/module_arm64/device.mk
similarity index 67%
copy from target/product/virtual_ab_ota_retrofit.mk
copy to target/board/module_arm64/device.mk
index 3416a4f..0d4c543 100644
--- a/target/product/virtual_ab_ota_retrofit.mk
+++ b/target/board/module_arm64/device.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2019 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +14,6 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
-
-PRODUCT_VIRTUAL_AB_OTA_RETROFIT := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.retrofit=true
+$(call inherit-product, build/make/target/product/default_art_config.mk)
+$(call inherit-product, build/make/target/product/core_64_bit.mk)
+$(call inherit-product, build/make/target/product/languages_default.mk)
diff --git a/target/board/generic_x86_ab/BoardConfig.mk b/target/board/module_x86/BoardConfig.mk
similarity index 75%
rename from target/board/generic_x86_ab/BoardConfig.mk
rename to target/board/module_x86/BoardConfig.mk
index 53acffd..a93ac97 100644
--- a/target/board/generic_x86_ab/BoardConfig.mk
+++ b/target/board/module_x86/BoardConfig.mk
@@ -1,5 +1,4 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,11 +13,8 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigModuleCommon.mk
TARGET_CPU_ABI := x86
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
-
-# Legacy GSI keeps 32 bits binder for 32 bits CPU Arch
-TARGET_USES_64_BIT_BINDER := false
diff --git a/target/board/module_x86/README.md b/target/board/module_x86/README.md
new file mode 100644
index 0000000..10866b7
--- /dev/null
+++ b/target/board/module_x86/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to an
+x86 device.
diff --git a/core/tasks/apidiff.mk b/target/board/module_x86/device.mk
similarity index 73%
copy from core/tasks/apidiff.mk
copy to target/board/module_x86/device.mk
index 76e4749..cceb987 100644
--- a/core/tasks/apidiff.mk
+++ b/target/board/module_x86/device.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2017 The Android Open Source Project
+#
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,11 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-#
-# Rules for building API diffs.
#
-.PHONY: api-diff
-
-api-diff: api-stubs-docs-jdiff
+$(call inherit-product, build/make/target/product/default_art_config.mk)
+$(call inherit-product, build/make/target/product/languages_default.mk)
diff --git a/target/board/generic_x86_64_ab/BoardConfig.mk b/target/board/module_x86_64/BoardConfig.mk
similarity index 86%
rename from target/board/generic_x86_64_ab/BoardConfig.mk
rename to target/board/module_x86_64/BoardConfig.mk
index 1dd5e48..1ed3be0 100644
--- a/target/board/generic_x86_64_ab/BoardConfig.mk
+++ b/target/board/module_x86_64/BoardConfig.mk
@@ -1,5 +1,4 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,7 +13,7 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigGsiCommon.mk
+include build/make/target/board/BoardConfigModuleCommon.mk
TARGET_CPU_ABI := x86_64
TARGET_ARCH := x86_64
diff --git a/target/board/module_x86_64/README.md b/target/board/module_x86_64/README.md
new file mode 100644
index 0000000..3377baa
--- /dev/null
+++ b/target/board/module_x86_64/README.md
@@ -0,0 +1,3 @@
+This device is suitable for an unbundled module targeted specifically to an
+x86_64 device. 32 bit binaries built with this product will not be suitable for
+a 32-bit x86 device.
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/target/board/module_x86_64/device.mk
similarity index 67%
copy from target/product/virtual_ab_ota_retrofit.mk
copy to target/board/module_x86_64/device.mk
index 3416a4f..0d4c543 100644
--- a/target/product/virtual_ab_ota_retrofit.mk
+++ b/target/board/module_x86_64/device.mk
@@ -1,5 +1,5 @@
#
-# Copyright (C) 2019 The Android Open-Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,8 +14,6 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
-
-PRODUCT_VIRTUAL_AB_OTA_RETROFIT := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.retrofit=true
+$(call inherit-product, build/make/target/product/default_art_config.mk)
+$(call inherit-product, build/make/target/product/core_64_bit.mk)
+$(call inherit-product, build/make/target/product/languages_default.mk)
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 3949737..c27badc 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -43,24 +43,24 @@
else
PRODUCT_MAKEFILES := \
- $(LOCAL_DIR)/aosp_arm64_ab.mk \
+ $(LOCAL_DIR)/aosp_64bitonly_x86_64.mk \
$(LOCAL_DIR)/aosp_arm64.mk \
- $(LOCAL_DIR)/aosp_arm_ab.mk \
$(LOCAL_DIR)/aosp_arm.mk \
- $(LOCAL_DIR)/aosp_x86_64_ab.mk \
$(LOCAL_DIR)/aosp_x86_64.mk \
- $(LOCAL_DIR)/aosp_x86_ab.mk \
$(LOCAL_DIR)/aosp_x86_arm.mk \
$(LOCAL_DIR)/aosp_x86.mk \
$(LOCAL_DIR)/full.mk \
$(LOCAL_DIR)/full_x86.mk \
$(LOCAL_DIR)/generic.mk \
+ $(LOCAL_DIR)/generic_system_arm64.mk \
+ $(LOCAL_DIR)/generic_system_x86.mk \
+ $(LOCAL_DIR)/generic_system_x86_64.mk \
+ $(LOCAL_DIR)/generic_system_x86_arm.mk \
$(LOCAL_DIR)/generic_x86.mk \
- $(LOCAL_DIR)/gsi_arm64.mk \
$(LOCAL_DIR)/mainline_system_arm64.mk \
$(LOCAL_DIR)/mainline_system_x86.mk \
- $(LOCAL_DIR)/mainline_system_x86_arm.mk \
$(LOCAL_DIR)/mainline_system_x86_64.mk \
+ $(LOCAL_DIR)/mainline_system_x86_arm.mk \
$(LOCAL_DIR)/sdk_arm64.mk \
$(LOCAL_DIR)/sdk.mk \
$(LOCAL_DIR)/sdk_phone_arm64.mk \
@@ -72,6 +72,8 @@
endif
+PRODUCT_MAKEFILES += $(LOCAL_DIR)/mainline_sdk.mk
+
COMMON_LUNCH_CHOICES := \
aosp_arm64-eng \
aosp_arm-eng \
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
new file mode 100644
index 0000000..4de4e0c
--- /dev/null
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -0,0 +1,72 @@
+#
+# Copyright 2020 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+# The system image of aosp_x86_64_app-userdebug is a GSI for the devices with:
+# - x86 64 bits user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+# GSI for system/product & support 64-bit apps only
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+
+# Enable mainline checking for excat this product name
+ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+endif
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
+
+#
+# Special settings for GSI releasing
+#
+ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
+endif
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+ root/init.zygote64.rc
+
+# This build configuration supports 64-bit apps only
+PRODUCT_NAME := aosp_64bitonly_x86_64
+PRODUCT_DEVICE := generic_64bitonly_x86_64
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on x86_64 App
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 0cec14b..90acc17 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -26,7 +26,7 @@
#
# All components inherited here go to system image
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking for excat this product name
ifeq (aosp_arm,$(TARGET_PRODUCT))
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index 3254ccf..38f82a2 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -30,7 +30,7 @@
# All components inherited here go to system image
#
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking for excat this product name
ifeq (aosp_arm64,$(TARGET_PRODUCT))
diff --git a/target/product/aosp_arm64_ab.mk b/target/product/aosp_arm64_ab.mk
deleted file mode 100644
index 0b2ae09..0000000
--- a/target/product/aosp_arm64_ab.mk
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_VENDOR_PROPERTIES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# gsi_system.prop.
-
-# aosp_arm64_ab-userdebug is a Legacy GSI for the devices with:
-# - ARM 64 bits user space
-# - 64 bits binder interface
-# - system-as-root
-
-#
-# All components inherited here go to system image
-# (The system image of Legacy GSI is not CSI)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-
-# Enable mainline checking for excat this product name
-ifeq (aosp_arm64_ab,$(TARGET_PRODUCT))
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-endif
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# Special settings for GSI releasing
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_release.mk)
-
-PRODUCT_NAME := aosp_arm64_ab
-PRODUCT_DEVICE := generic_arm64_ab
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on ARM64
diff --git a/target/product/aosp_arm_ab.mk b/target/product/aosp_arm_ab.mk
deleted file mode 100644
index ec2e5d7..0000000
--- a/target/product/aosp_arm_ab.mk
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_VENDOR_PROPERTIES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# gsi_system.prop.
-
-# aosp_arm_ab-userdebug is a Legacy GSI for the devices with:
-# - ARM 32 bits user space
-# - 32 bits binder interface
-# - system-as-root
-
-#
-# All components inherited here go to system image
-# (The system image of Legacy GSI is not CSI)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-
-# Enable mainline checking for excat this product name
-ifeq (aosp_arm_ab,$(TARGET_PRODUCT))
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-endif
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# Special settings for GSI releasing
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_release.mk)
-
-PRODUCT_NAME := aosp_arm_ab
-PRODUCT_DEVICE := generic_arm_ab
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on ARM32
diff --git a/target/product/aosp_product.mk b/target/product/aosp_product.mk
index a3da1c9..e396ad1 100644
--- a/target/product/aosp_product.mk
+++ b/target/product/aosp_product.mk
@@ -31,6 +31,7 @@
PRODUCT_PACKAGES += \
messaging \
PhotoTable \
+ preinstalled-packages-platform-aosp-product.xml \
WallpaperPicker \
# Telephony:
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 51b5daf..7db2c0f 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -26,7 +26,7 @@
#
# All components inherited here go to system image
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking for excat this product name
ifeq (aosp_x86,$(TARGET_PRODUCT))
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 9b26716..5d78264 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -32,7 +32,7 @@
# All components inherited here go to system image
#
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking for excat this product name
ifeq (aosp_x86_64,$(TARGET_PRODUCT))
diff --git a/target/product/aosp_x86_64_ab.mk b/target/product/aosp_x86_64_ab.mk
deleted file mode 100644
index 578a254..0000000
--- a/target/product/aosp_x86_64_ab.mk
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_VENDOR_PROPERTIES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# gsi_system.prop.
-
-# aosp_x86_64_ab-userdebug is a Legacy GSI for the devices with:
-# - x86 64 bits user space
-# - 64 bits binder interface
-# - system-as-root
-
-#
-# All components inherited here go to system image
-# (The system image of Legacy GSI is not CSI)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-
-# Enable mainline checking for excat this product name
-ifeq (aosp_x86_64_ab,$(TARGET_PRODUCT))
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-endif
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# Special settings for GSI releasing
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_release.mk)
-
-PRODUCT_NAME := aosp_x86_64_ab
-PRODUCT_DEVICE := generic_x86_64_ab
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on x86_64
diff --git a/target/product/aosp_x86_ab.mk b/target/product/aosp_x86_ab.mk
deleted file mode 100644
index 40c1d83..0000000
--- a/target/product/aosp_x86_ab.mk
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# PRODUCT_VENDOR_PROPERTIES cannot be used here because sysprops will be at
-# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/
-# gsi_system.prop.
-
-# aosp_x86_ab-userdebug is a Legacy GSI for the devices with:
-# - x86 32 bits user space
-# - 32 bits binder interface
-# - system-as-root
-
-#
-# All components inherited here go to system image
-# (The system image of Legacy GSI is not CSI)
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-
-# Enable mainline checking for excat this product name
-ifeq (aosp_x86_ab,$(TARGET_PRODUCT))
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-endif
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# Special settings for GSI releasing
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/legacy_gsi_release.mk)
-
-PRODUCT_NAME := aosp_x86_ab
-PRODUCT_DEVICE := generic_x86_ab
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on x86
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index deba3d9..f96e068 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -19,7 +19,7 @@
#
# All components inherited here go to system image
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking
ifeq (aosp_x86_arm,$(TARGET_PRODUCT))
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 2ed550c..5446064 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -16,8 +16,11 @@
# Base modules and settings for the product partition.
PRODUCT_PACKAGES += \
+ fs_config_dirs_product \
+ fs_config_files_product \
group_product \
ModuleMetadata \
passwd_product \
product_compatibility_matrix.xml \
product_manifest.xml \
+ selinux_policy_product \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index f6770fb..c7ae1f0 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -50,21 +50,27 @@
charger \
cmd \
com.android.adbd \
- com.android.apex.cts.shim.v1 \
com.android.conscrypt \
com.android.cronet \
+ com.android.extservices \
com.android.i18n \
com.android.ipsec \
com.android.location.provider \
com.android.media \
com.android.media.swcodec \
+ com.android.mediaprovider \
+ com.android.os.statsd \
+ com.android.permission \
com.android.resolv \
com.android.neuralnetworks \
com.android.sdkext \
com.android.tethering \
com.android.tzdata \
+ com.android.wifi \
ContactsProvider \
content \
+ CtsShimPrebuilt \
+ CtsShimPrivPrebuilt \
debuggerd\
device_config \
dmctl \
@@ -75,7 +81,6 @@
dumpsys \
DynamicSystemInstallationService \
e2fsck \
- ExtServices \
ExtShared \
flags_health_check \
framework-minus-apex \
@@ -94,7 +99,6 @@
gpuservice \
hid \
hwservicemanager \
- idmap \
idmap2 \
idmap2d \
ime \
@@ -102,6 +106,7 @@
incident \
incidentd \
incident_helper \
+ incident-helper-cmd \
init.environ.rc \
init_system \
input \
@@ -112,6 +117,8 @@
ip-up-vpn \
javax.obex \
keystore \
+ keystore2 \
+ credstore \
ld.mc \
libaaudio \
libamidi \
@@ -161,7 +168,6 @@
libOpenMAXAL \
libOpenSLES \
libpdfium \
- libpixelflinger \
libpower \
libpowermanager \
libradio_metadata \
@@ -184,10 +190,10 @@
libusbhost \
libutils \
libvulkan \
- libwifi-service \
libwilhelm \
linker \
linkerconfig \
+ llkd \
lmkd \
LocalTransport \
locksettings \
@@ -196,13 +202,11 @@
lpdump \
lshal \
mdnsd \
- media \
mediacodec.policy \
- mediadrmserver \
mediaextractor \
mediametrics \
media_profiles_V1_0.dtd \
- MediaProvider \
+ MediaProviderLegacy \
mediaserver \
mke2fs \
monkey \
@@ -215,12 +219,12 @@
PackageInstaller \
passwd_system \
perfetto \
- PermissionController \
ping \
ping6 \
platform.xml \
pm \
pppd \
+ preinstalled-packages-platform.xml \
privapp-permissions-platform.xml \
racoon \
recovery-persist \
@@ -245,7 +249,8 @@
shell_and_utilities_system \
sm \
snapshotctl \
- statsd \
+ snapuserd \
+ SoundPicker \
storaged \
surfaceflinger \
svc \
@@ -268,7 +273,7 @@
WallpaperBackup \
watchdogd \
wificond \
- wifi-service \
+ wifi.rc \
wm \
# VINTF data for system image
@@ -276,6 +281,23 @@
system_manifest.xml \
system_compatibility_matrix.xml \
+# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
+ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
+ PRODUCT_PACKAGES += \
+ libclang_rt.hwasan-aarch64-android.bootstrap
+endif
+
+# Jacoco agent JARS to be built and installed, if any.
+ifeq ($(EMMA_INSTRUMENT),true)
+ ifneq ($(EMMA_INSTRUMENT_STATIC),true)
+ # For instrumented build, if Jacoco is not being included statically
+ # in instrumented packages then include Jacoco classes into the
+ # bootclasspath.
+ PRODUCT_PACKAGES += jacocoagent
+ PRODUCT_BOOT_JARS += jacocoagent
+ endif # EMMA_INSTRUMENT_STATIC
+endif # EMMA_INSTRUMENT
+
# Host tools to install
PRODUCT_HOST_PACKAGES += \
BugReport \
@@ -312,50 +334,25 @@
tz_version_host \
tz_version_host_tzdata_apex \
-ifeq ($(ART_APEX_JARS),)
-$(error ART_APEX_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
-endif
-
-# The order matters for runtime class lookup performance.
-PRODUCT_BOOT_JARS := \
- $(ART_APEX_JARS) \
- framework-minus-apex \
- ext \
- com.android.i18n:core-icu4j \
- telephony-common \
- voip-common \
- ims-common \
-
-PRODUCT_UPDATABLE_BOOT_JARS := \
- com.android.conscrypt:conscrypt \
- com.android.media:updatable-media \
- com.android.sdkext:framework-sdkextensions \
- com.android.tethering:framework-tethering
PRODUCT_COPY_FILES += \
system/core/rootdir/init.usb.rc:system/etc/init/hw/init.usb.rc \
system/core/rootdir/init.usb.configfs.rc:system/etc/init/hw/init.usb.configfs.rc \
system/core/rootdir/etc/hosts:system/etc/hosts
-# Add the compatibility library that is needed when android.test.base
-# is removed from the bootclasspath.
-# Default to excluding android.test.base from the bootclasspath.
-ifneq ($(REMOVE_ATB_FROM_BCP),false)
-PRODUCT_PACKAGES += framework-atb-backward-compatibility
-PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
-else
-PRODUCT_BOOT_JARS += android.test.base
-endif
-
PRODUCT_COPY_FILES += system/core/rootdir/init.zygote32.rc:system/etc/init/hw/init.zygote32.rc
-PRODUCT_SYSTEM_PROPERTIES += ro.zygote?=zygote32
+PRODUCT_VENDOR_PROPERTIES += ro.zygote?=zygote32
PRODUCT_SYSTEM_PROPERTIES += debug.atrace.tags.enableflags=0
+PRODUCT_SYSTEM_PROPERTIES += persist.traced.enable=1
+
+PRODUCT_PROPERTY_OVERRIDES += ro.gfx.angle.supported=true
# Packages included only for eng or userdebug builds, previously debug tagged
PRODUCT_PACKAGES_DEBUG := \
adb_keys \
arping \
+ dmuserd \
gdbserver \
idlcli \
init-debug.rc \
@@ -365,6 +362,8 @@
logpersist.start \
logtagd.rc \
procrank \
+ profcollectd \
+ profcollectctl \
remount \
showmap \
sqlite3 \
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index b67549a..852d7ca 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -16,6 +16,9 @@
# Base modules and settings for the system_ext partition.
PRODUCT_PACKAGES += \
+ fs_config_dirs_system_ext \
+ fs_config_files_system_ext \
group_system_ext \
- system_ext_manifest.xml \
passwd_system_ext \
+ selinux_policy_system_ext \
+ system_ext_manifest.xml \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index b3368d6..b955841 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -40,7 +40,7 @@
# Base modules and settings for the vendor partition.
PRODUCT_PACKAGES += \
- android.hardware.cas@1.1-service \
+ android.hardware.cas@1.2-service \
android.hardware.media.omx@1.0-service \
boringssl_self_test_vendor \
dumpsys_vendor \
@@ -66,11 +66,11 @@
passwd_vendor \
selinux_policy_nonsystem \
shell_and_utilities_vendor \
- vndservice \
# Base module when shipping api level is less than or equal to 29
PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
android.hardware.configstore@1.1-service \
+ vndservice \
vndservicemanager \
# VINTF data for vendor image
diff --git a/target/product/cfi-common.mk b/target/product/cfi-common.mk
index 42edd92..925d70e 100644
--- a/target/product/cfi-common.mk
+++ b/target/product/cfi-common.mk
@@ -17,8 +17,8 @@
# This is a set of common components to enable CFI for (across
# compatible product configs)
PRODUCT_CFI_INCLUDE_PATHS := \
+ device/generic/goldfish/wifi/wpa_supplicant_8_lib \
device/google/cuttlefish/guest/libs/wpa_supplicant_8_lib \
- device/google/wahoo/wifi_offload \
external/tinyxml2 \
external/wpa_supplicant_8 \
frameworks/av/camera \
@@ -28,7 +28,8 @@
hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib \
hardware/interfaces/nfc \
hardware/qcom/wlan/qcwcn/wpa_supplicant_8_lib \
- harware/interfaces/keymaster \
+ hardware/interfaces/keymaster \
+ hardware/interfaces/security \
system/bt \
system/chre \
system/core/libnetutils \
diff --git a/target/product/core_64_bit.mk b/target/product/core_64_bit.mk
index 7fa6ed2..322fa80 100644
--- a/target/product/core_64_bit.mk
+++ b/target/product/core_64_bit.mk
@@ -27,7 +27,7 @@
# Set the zygote property to select the 64-bit primary, 32-bit secondary script
# This line must be parsed before the one in core_minimal.mk
-PRODUCT_SYSTEM_PROPERTIES += ro.zygote=zygote64_32
+PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64_32
TARGET_SUPPORTS_32_BIT_APPS := true
TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
index 63beea9..53c9c74 100644
--- a/target/product/core_64_bit_only.mk
+++ b/target/product/core_64_bit_only.mk
@@ -24,7 +24,7 @@
# Set the zygote property to select the 64-bit script.
# This line must be parsed before the one in core_minimal.mk
-PRODUCT_SYSTEM_PROPERTIES += ro.zygote=zygote64
+PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64
TARGET_SUPPORTS_32_BIT_APPS := false
TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
new file mode 100644
index 0000000..131ba31
--- /dev/null
+++ b/target/product/default_art_config.mk
@@ -0,0 +1,58 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ifeq ($(ART_APEX_JARS),)
+ $(error ART_APEX_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
+endif
+
+# The order matters for runtime class lookup performance.
+PRODUCT_BOOT_JARS := \
+ $(ART_APEX_JARS) \
+ framework-minus-apex \
+ ext \
+ com.android.i18n:core-icu4j \
+ telephony-common \
+ voip-common \
+ ims-common
+
+PRODUCT_UPDATABLE_BOOT_JARS := \
+ com.android.conscrypt:conscrypt \
+ com.android.media:updatable-media \
+ com.android.mediaprovider:framework-mediaprovider \
+ com.android.os.statsd:framework-statsd \
+ com.android.permission:framework-permission \
+ com.android.sdkext:framework-sdkextensions \
+ com.android.wifi:framework-wifi \
+ com.android.tethering:framework-tethering
+
+# Add the compatibility library that is needed when android.test.base
+# is removed from the bootclasspath.
+# Default to excluding android.test.base from the bootclasspath.
+ifneq ($(REMOVE_ATB_FROM_BCP),false)
+ PRODUCT_PACKAGES += framework-atb-backward-compatibility
+ PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
+else
+ PRODUCT_BOOT_JARS += android.test.base
+endif
+
+# Minimal configuration for running dex2oat (default argument values).
+# PRODUCT_USES_DEFAULT_ART_CONFIG must be true to enable boot image compilation.
+PRODUCT_USES_DEFAULT_ART_CONFIG := true
+PRODUCT_SYSTEM_PROPERTIES += \
+ dalvik.vm.image-dex2oat-Xms=64m \
+ dalvik.vm.image-dex2oat-Xmx=64m \
+ dalvik.vm.dex2oat-Xms=64m \
+ dalvik.vm.dex2oat-Xmx=512m \
diff --git a/target/product/emulated_storage.mk b/target/product/emulated_storage.mk
index 4c6c644..7d380d9 100644
--- a/target/product/emulated_storage.mk
+++ b/target/product/emulated_storage.mk
@@ -19,3 +19,5 @@
PRODUCT_FS_CASEFOLD := 1
PRODUCT_VENDOR_PROPERTIES += external_storage.casefold.enabled=1
+
+PRODUCT_VENDOR_PROPERTIES += external_storage.sdcardfs.enabled=0
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index 9dffc1a..36da1f7 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -50,12 +50,6 @@
#PRODUCT_VENDOR_PROPERTIES += \
#config.disable_location=true
-# Enable Perfetto traced
-# There is a stable property API for this prop so we can move it to /product.
-# https://android-review.googlesource.com/c/platform/system/libsysprop/+/952375
-PRODUCT_PRODUCT_PROPERTIES += \
- persist.traced.enable=1
-
# enable Google-specific location features,
# like NetworkLocationProvider and LocationCollector
PRODUCT_SYSTEM_EXT_PROPERTIES += \
diff --git a/target/product/emulator_vendor.mk b/target/product/emulator_vendor.mk
index bb679ec..4d46358 100644
--- a/target/product/emulator_vendor.mk
+++ b/target/product/emulator_vendor.mk
@@ -26,7 +26,7 @@
PRODUCT_PACKAGES += \
vndk-sp
-PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
+DEVICE_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
PRODUCT_CHARACTERISTICS := emulator
@@ -42,12 +42,6 @@
#PRODUCT_VENDOR_PROPERTIES += \
#config.disable_location=true
-# Enable Perfetto traced
-# There is a stable property API for this prop so we can move it to /product.
-# https://android-review.googlesource.com/c/platform/system/libsysprop/+/952375
-PRODUCT_PRODUCT_PROPERTIES += \
- persist.traced.enable=1
-
# enable Google-specific location features,
# like NetworkLocationProvider and LocationCollector
PRODUCT_SYSTEM_EXT_PROPERTIES += \
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index 64f61ff..a8e1e91 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -25,7 +25,8 @@
PRODUCT_PACKAGES += \
LiveWallpapersPicker \
- PhotoTable
+ PhotoTable \
+ preinstalled-packages-platform-full-base.xml
# Bluetooth:
# audio.a2dp.default is a system module. Generic system image includes
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
new file mode 100644
index 0000000..ae81329
--- /dev/null
+++ b/target/product/generic_ramdisk.mk
@@ -0,0 +1,39 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile installs contents of the generic ramdisk.
+# Inherit from this makefile to declare that this product uses generic ramdisk.
+# This makefile checks that other makefiles must not install things to the
+# ramdisk.
+
+# Ramdisk
+PRODUCT_PACKAGES += \
+ init_first_stage \
+
+# Debug ramdisk
+PRODUCT_PACKAGES += \
+ userdebug_plat_sepolicy.cil \
+
+_my_paths := \
+ $(TARGET_COPY_OUT_RAMDISK)/ \
+ $(TARGET_COPY_OUT_DEBUG_RAMDISK)/ \
+ system/usr/share/zoneinfo/tz_version \
+ system/usr/share/zoneinfo/tzdata \
+
+# We use the "relaxed" version here because tzdata / tz_version is only produced
+# by this makefile on a subset of devices.
+# TODO: remove this
+$(call require-artifacts-in-path-relaxed, $(_my_paths), )
diff --git a/target/product/mainline_system.mk b/target/product/generic_system.mk
similarity index 90%
rename from target/product/mainline_system.mk
rename to target/product/generic_system.mk
index e3b3e7d..9580ade 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/generic_system.mk
@@ -34,14 +34,10 @@
PartnerBookmarksProvider \
PresencePolling \
RcsService \
- SafetyRegulatoryInfo \
Stk \
Tag \
TimeZoneUpdater \
-# Binaries
-PRODUCT_PACKAGES += llkd
-
# OTA support
PRODUCT_PACKAGES += \
recovery-refresh \
@@ -96,11 +92,6 @@
libhidltransport \
libhwbinder \
-# Camera service uses 'libdepthphoto' for adding dynamic depth
-# metadata inside depth jpegs.
-PRODUCT_PACKAGES += \
- libdepthphoto \
-
PRODUCT_PACKAGES_DEBUG += \
avbctl \
bootctl \
@@ -114,6 +105,11 @@
PRODUCT_HOST_PACKAGES += \
tinyplay
+# Enable configurable audio policy
+PRODUCT_PACKAGES += \
+ libaudiopolicyengineconfigurable \
+ libpolicy-subsystem
+
# Include all zygote init scripts. "ro.zygote" will select one of them.
PRODUCT_COPY_FILES += \
system/core/rootdir/init.zygote32.rc:system/etc/init/hw/init.zygote32.rc \
@@ -125,14 +121,11 @@
PRODUCT_ENFORCE_RRO_TARGETS := *
-# TODO(b/150820813) Settings depends on static overlay, remove this after eliminating the dependency.
-PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS := Settings
-
-PRODUCT_NAME := mainline_system
+PRODUCT_NAME := generic_system
PRODUCT_BRAND := generic
# Define /system partition-specific product properties to identify that /system
-# partition is mainline_system.
+# partition is generic_system.
PRODUCT_SYSTEM_NAME := mainline
PRODUCT_SYSTEM_BRAND := Android
PRODUCT_SYSTEM_MANUFACTURER := Android
diff --git a/target/product/generic_system_arm64.mk b/target/product/generic_system_arm64.mk
new file mode 100644
index 0000000..2c64479
--- /dev/null
+++ b/target/product/generic_system_arm64.mk
@@ -0,0 +1,43 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+$(call enforce-product-packages-exist,)
+
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
+PRODUCT_BUILD_ODM_DLKM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_SHIPPING_API_LEVEL := 29
+
+PRODUCT_NAME := generic_system_arm64
+PRODUCT_DEVICE := mainline_arm64
+PRODUCT_BRAND := generic
diff --git a/target/product/generic_system_x86.mk b/target/product/generic_system_x86.mk
new file mode 100644
index 0000000..cf38a98
--- /dev/null
+++ b/target/product/generic_system_x86.mk
@@ -0,0 +1,42 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+$(call enforce-product-packages-exist,)
+
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
+PRODUCT_BUILD_ODM_DLKM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_SHIPPING_API_LEVEL := 29
+
+PRODUCT_NAME := generic_system_x86
+PRODUCT_DEVICE := mainline_x86
+PRODUCT_BRAND := generic
diff --git a/target/product/generic_system_x86_64.mk b/target/product/generic_system_x86_64.mk
new file mode 100644
index 0000000..5f3829b
--- /dev/null
+++ b/target/product/generic_system_x86_64.mk
@@ -0,0 +1,43 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+$(call enforce-product-packages-exist,)
+
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
+PRODUCT_BUILD_ODM_DLKM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_SHIPPING_API_LEVEL := 29
+
+PRODUCT_NAME := generic_system_x86_64
+PRODUCT_DEVICE := mainline_x86_64
+PRODUCT_BRAND := generic
diff --git a/target/product/generic_system_x86_arm.mk b/target/product/generic_system_x86_arm.mk
new file mode 100644
index 0000000..923f32d
--- /dev/null
+++ b/target/product/generic_system_x86_arm.mk
@@ -0,0 +1,42 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# All components inherited here go to system image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
+$(call enforce-product-packages-exist,)
+
+# Enable mainline checking
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
+
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_ODM_IMAGE := false
+PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
+PRODUCT_BUILD_ODM_DLKM_IMAGE := false
+PRODUCT_BUILD_PRODUCT_IMAGE := false
+PRODUCT_BUILD_RAMDISK_IMAGE := false
+PRODUCT_BUILD_SYSTEM_IMAGE := true
+PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
+PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+
+PRODUCT_SHIPPING_API_LEVEL := 29
+
+PRODUCT_NAME := generic_system_x86_arm
+PRODUCT_DEVICE := mainline_x86_arm
+PRODUCT_BRAND := generic
diff --git a/target/product/generic_x86.mk b/target/product/generic_x86.mk
index 0274b5b..eeb8216 100644
--- a/target/product/generic_x86.mk
+++ b/target/product/generic_x86.mk
@@ -17,7 +17,7 @@
# This is a generic phone product that isn't specialized for a specific device.
# It includes the base Android platform.
-include $(SRC_TARGET_DIR)/product/generic.mk
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic.mk)
# Overrides
PRODUCT_BRAND := generic_x86
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index d324aa9..7f19615 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -20,7 +20,6 @@
# Set lowram options and enable traced by default
PRODUCT_VENDOR_PROPERTIES += \
ro.config.low_ram=true \
- persist.traced.enable=1 \
# Speed profile services and wifi-service to reduce RAM and storage.
PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
@@ -40,6 +39,7 @@
# Do not spin up a separate process for the network stack on go devices, use an in-process APK.
PRODUCT_PACKAGES += InProcessNetworkStack
PRODUCT_PACKAGES += CellBroadcastAppPlatform
+PRODUCT_PACKAGES += CellBroadcastServiceModulePlatform
PRODUCT_PACKAGES += com.android.tethering.inprocess
# Strip the local variable table and the local variable type table to reduce
diff --git a/target/product/gsi/30.txt b/target/product/gsi/30.txt
new file mode 100644
index 0000000..0589517
--- /dev/null
+++ b/target/product/gsi/30.txt
@@ -0,0 +1,309 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libbinder_ndk.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libselinux.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.common-V1-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common-V1-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libgralloctypes.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.frameworks.automotive.display@1.0.so
+VNDK-core: android.frameworks.cameraservice.common@2.0.so
+VNDK-core: android.frameworks.cameraservice.device@2.0.so
+VNDK-core: android.frameworks.cameraservice.service@2.0.so
+VNDK-core: android.frameworks.cameraservice.service@2.1.so
+VNDK-core: android.frameworks.displayservice@1.0.so
+VNDK-core: android.frameworks.schedulerservice@1.0.so
+VNDK-core: android.frameworks.sensorservice@1.0.so
+VNDK-core: android.frameworks.stats@1.0.so
+VNDK-core: android.hardware.atrace@1.0.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.audio.common@4.0.so
+VNDK-core: android.hardware.audio.common@5.0.so
+VNDK-core: android.hardware.audio.common@6.0.so
+VNDK-core: android.hardware.audio.effect@2.0.so
+VNDK-core: android.hardware.audio.effect@4.0.so
+VNDK-core: android.hardware.audio.effect@5.0.so
+VNDK-core: android.hardware.audio.effect@6.0.so
+VNDK-core: android.hardware.audio@2.0.so
+VNDK-core: android.hardware.audio@4.0.so
+VNDK-core: android.hardware.audio@5.0.so
+VNDK-core: android.hardware.audio@6.0.so
+VNDK-core: android.hardware.authsecret@1.0.so
+VNDK-core: android.hardware.automotive.audiocontrol@1.0.so
+VNDK-core: android.hardware.automotive.audiocontrol@2.0.so
+VNDK-core: android.hardware.automotive.can@1.0.so
+VNDK-core: android.hardware.automotive.evs@1.0.so
+VNDK-core: android.hardware.automotive.evs@1.1.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
+VNDK-core: android.hardware.automotive.sv@1.0.so
+VNDK-core: android.hardware.automotive.vehicle@2.0.so
+VNDK-core: android.hardware.biometrics.face@1.0.so
+VNDK-core: android.hardware.biometrics.fingerprint@2.1.so
+VNDK-core: android.hardware.biometrics.fingerprint@2.2.so
+VNDK-core: android.hardware.bluetooth.a2dp@1.0.so
+VNDK-core: android.hardware.bluetooth.audio@2.0.so
+VNDK-core: android.hardware.bluetooth@1.0.so
+VNDK-core: android.hardware.bluetooth@1.1.so
+VNDK-core: android.hardware.boot@1.0.so
+VNDK-core: android.hardware.boot@1.1.so
+VNDK-core: android.hardware.broadcastradio@1.0.so
+VNDK-core: android.hardware.broadcastradio@1.1.so
+VNDK-core: android.hardware.broadcastradio@2.0.so
+VNDK-core: android.hardware.camera.common@1.0.so
+VNDK-core: android.hardware.camera.device@1.0.so
+VNDK-core: android.hardware.camera.device@3.2.so
+VNDK-core: android.hardware.camera.device@3.3.so
+VNDK-core: android.hardware.camera.device@3.4.so
+VNDK-core: android.hardware.camera.device@3.5.so
+VNDK-core: android.hardware.camera.device@3.6.so
+VNDK-core: android.hardware.camera.metadata@3.2.so
+VNDK-core: android.hardware.camera.metadata@3.3.so
+VNDK-core: android.hardware.camera.metadata@3.4.so
+VNDK-core: android.hardware.camera.metadata@3.5.so
+VNDK-core: android.hardware.camera.provider@2.4.so
+VNDK-core: android.hardware.camera.provider@2.5.so
+VNDK-core: android.hardware.camera.provider@2.6.so
+VNDK-core: android.hardware.cas.native@1.0.so
+VNDK-core: android.hardware.cas@1.0.so
+VNDK-core: android.hardware.cas@1.1.so
+VNDK-core: android.hardware.cas@1.2.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.confirmationui@1.0.so
+VNDK-core: android.hardware.contexthub@1.0.so
+VNDK-core: android.hardware.contexthub@1.1.so
+VNDK-core: android.hardware.drm@1.0.so
+VNDK-core: android.hardware.drm@1.1.so
+VNDK-core: android.hardware.drm@1.2.so
+VNDK-core: android.hardware.drm@1.3.so
+VNDK-core: android.hardware.dumpstate@1.0.so
+VNDK-core: android.hardware.dumpstate@1.1.so
+VNDK-core: android.hardware.fastboot@1.0.so
+VNDK-core: android.hardware.gatekeeper@1.0.so
+VNDK-core: android.hardware.gnss.measurement_corrections@1.0.so
+VNDK-core: android.hardware.gnss.measurement_corrections@1.1.so
+VNDK-core: android.hardware.gnss.visibility_control@1.0.so
+VNDK-core: android.hardware.gnss@1.0.so
+VNDK-core: android.hardware.gnss@1.1.so
+VNDK-core: android.hardware.gnss@2.0.so
+VNDK-core: android.hardware.gnss@2.1.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.graphics.composer@2.1.so
+VNDK-core: android.hardware.graphics.composer@2.2.so
+VNDK-core: android.hardware.graphics.composer@2.3.so
+VNDK-core: android.hardware.graphics.composer@2.4.so
+VNDK-core: android.hardware.health.storage@1.0.so
+VNDK-core: android.hardware.health@1.0.so
+VNDK-core: android.hardware.health@2.0.so
+VNDK-core: android.hardware.health@2.1.so
+VNDK-core: android.hardware.identity-V2-ndk_platform.so
+VNDK-core: android.hardware.input.classifier@1.0.so
+VNDK-core: android.hardware.input.common@1.0.so
+VNDK-core: android.hardware.ir@1.0.so
+VNDK-core: android.hardware.keymaster-V2-ndk_platform.so
+VNDK-core: android.hardware.keymaster@3.0.so
+VNDK-core: android.hardware.keymaster@4.0.so
+VNDK-core: android.hardware.keymaster@4.1.so
+VNDK-core: android.hardware.light-V1-ndk_platform.so
+VNDK-core: android.hardware.light@2.0.so
+VNDK-core: android.hardware.media.bufferpool@1.0.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.c2@1.0.so
+VNDK-core: android.hardware.media.c2@1.1.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.neuralnetworks@1.0.so
+VNDK-core: android.hardware.neuralnetworks@1.1.so
+VNDK-core: android.hardware.neuralnetworks@1.2.so
+VNDK-core: android.hardware.neuralnetworks@1.3.so
+VNDK-core: android.hardware.nfc@1.0.so
+VNDK-core: android.hardware.nfc@1.1.so
+VNDK-core: android.hardware.nfc@1.2.so
+VNDK-core: android.hardware.oemlock@1.0.so
+VNDK-core: android.hardware.power-V1-ndk_platform.so
+VNDK-core: android.hardware.power.stats@1.0.so
+VNDK-core: android.hardware.power@1.0.so
+VNDK-core: android.hardware.power@1.1.so
+VNDK-core: android.hardware.power@1.2.so
+VNDK-core: android.hardware.power@1.3.so
+VNDK-core: android.hardware.radio.config@1.0.so
+VNDK-core: android.hardware.radio.config@1.1.so
+VNDK-core: android.hardware.radio.config@1.2.so
+VNDK-core: android.hardware.radio.deprecated@1.0.so
+VNDK-core: android.hardware.radio@1.0.so
+VNDK-core: android.hardware.radio@1.1.so
+VNDK-core: android.hardware.radio@1.2.so
+VNDK-core: android.hardware.radio@1.3.so
+VNDK-core: android.hardware.radio@1.4.so
+VNDK-core: android.hardware.radio@1.5.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
+VNDK-core: android.hardware.secure_element@1.0.so
+VNDK-core: android.hardware.secure_element@1.1.so
+VNDK-core: android.hardware.secure_element@1.2.so
+VNDK-core: android.hardware.sensors@1.0.so
+VNDK-core: android.hardware.sensors@2.0.so
+VNDK-core: android.hardware.sensors@2.1.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.soundtrigger@2.1.so
+VNDK-core: android.hardware.soundtrigger@2.2.so
+VNDK-core: android.hardware.soundtrigger@2.3.so
+VNDK-core: android.hardware.tetheroffload.config@1.0.so
+VNDK-core: android.hardware.tetheroffload.control@1.0.so
+VNDK-core: android.hardware.thermal@1.0.so
+VNDK-core: android.hardware.thermal@1.1.so
+VNDK-core: android.hardware.thermal@2.0.so
+VNDK-core: android.hardware.tv.cec@1.0.so
+VNDK-core: android.hardware.tv.cec@2.0.so
+VNDK-core: android.hardware.tv.input@1.0.so
+VNDK-core: android.hardware.tv.tuner@1.0.so
+VNDK-core: android.hardware.usb.gadget@1.0.so
+VNDK-core: android.hardware.usb.gadget@1.1.so
+VNDK-core: android.hardware.usb@1.0.so
+VNDK-core: android.hardware.usb@1.1.so
+VNDK-core: android.hardware.usb@1.2.so
+VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
+VNDK-core: android.hardware.vibrator@1.0.so
+VNDK-core: android.hardware.vibrator@1.1.so
+VNDK-core: android.hardware.vibrator@1.2.so
+VNDK-core: android.hardware.vibrator@1.3.so
+VNDK-core: android.hardware.vr@1.0.so
+VNDK-core: android.hardware.weaver@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.0.so
+VNDK-core: android.hardware.wifi.hostapd@1.1.so
+VNDK-core: android.hardware.wifi.hostapd@1.2.so
+VNDK-core: android.hardware.wifi.offload@1.0.so
+VNDK-core: android.hardware.wifi.supplicant@1.0.so
+VNDK-core: android.hardware.wifi.supplicant@1.1.so
+VNDK-core: android.hardware.wifi.supplicant@1.2.so
+VNDK-core: android.hardware.wifi.supplicant@1.3.so
+VNDK-core: android.hardware.wifi@1.0.so
+VNDK-core: android.hardware.wifi@1.1.so
+VNDK-core: android.hardware.wifi@1.2.so
+VNDK-core: android.hardware.wifi@1.3.so
+VNDK-core: android.hardware.wifi@1.4.so
+VNDK-core: android.hidl.allocator@1.0.so
+VNDK-core: android.hidl.memory.block@1.0.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.system.net.netd@1.0.so
+VNDK-core: android.system.net.netd@1.1.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: android.system.wifi.keystore@1.0.so
+VNDK-core: libadf.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libjpeg.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index b4df5fe..f21fe16 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -161,7 +161,10 @@
include $(BUILD_PHONY_PACKAGE)
include $(CLEAR_VARS)
-_vndk_versions := $(PRODUCT_EXTRA_VNDK_VERSIONS)
+_vndk_versions :=
+ifeq ($(filter com.android.vndk.current.on_vendor, $(PRODUCT_PACKAGES)),)
+ _vndk_versions += $(PRODUCT_EXTRA_VNDK_VERSIONS)
+endif
ifneq ($(BOARD_VNDK_VERSION),current)
_vndk_versions += $(BOARD_VNDK_VERSION)
endif
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 345faa4..2ca6687 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -18,14 +18,20 @@
LLNDK: libsync.so
LLNDK: libvndksupport.so
LLNDK: libvulkan.so
-VNDK-SP: android.hardware.common-V1-ndk_platform.so
-VNDK-SP: android.hardware.graphics.common-V1-ndk_platform.so
+VNDK-SP: android.hardware.common-V2-ndk_platform.so
+VNDK-SP: android.hardware.common-unstable-ndk_platform.so
+VNDK-SP: android.hardware.common.fmq-V1-ndk_platform.so
+VNDK-SP: android.hardware.common.fmq-ndk_platform.so
+VNDK-SP: android.hardware.common.fmq-unstable-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common-V2-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common-unstable-ndk_platform.so
VNDK-SP: android.hardware.graphics.common@1.0.so
VNDK-SP: android.hardware.graphics.common@1.1.so
VNDK-SP: android.hardware.graphics.common@1.2.so
VNDK-SP: android.hardware.graphics.mapper@2.0.so
VNDK-SP: android.hardware.graphics.mapper@2.1.so
VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
VNDK-SP: android.hardware.renderscript@1.0.so
VNDK-SP: android.hidl.memory.token@1.0.so
VNDK-SP: android.hidl.memory@1.0-impl.so
@@ -41,6 +47,8 @@
VNDK-SP: libc++.so
VNDK-SP: libcompiler_rt.so
VNDK-SP: libcutils.so
+VNDK-SP: libdmabufheap.so
+VNDK-SP: libgralloctypes.so
VNDK-SP: libhardware.so
VNDK-SP: libhidlbase.so
VNDK-SP: libhidlmemory.so
@@ -53,32 +61,69 @@
VNDK-SP: libutilscallstack.so
VNDK-SP: libz.so
VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.authsecret-V1-ndk_platform.so
+VNDK-core: android.hardware.authsecret-ndk_platform.so
+VNDK-core: android.hardware.authsecret-unstable-ndk_platform.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
+VNDK-core: android.hardware.automotive.occupant_awareness-ndk_platform.so
VNDK-core: android.hardware.configstore-utils.so
VNDK-core: android.hardware.configstore@1.0.so
VNDK-core: android.hardware.configstore@1.1.so
VNDK-core: android.hardware.confirmationui-support-lib.so
VNDK-core: android.hardware.graphics.allocator@2.0.so
VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health.storage-V1-ndk_platform.so
+VNDK-core: android.hardware.health.storage-ndk_platform.so
+VNDK-core: android.hardware.health.storage-unstable-ndk_platform.so
VNDK-core: android.hardware.identity-V2-ndk_platform.so
+VNDK-core: android.hardware.identity-ndk_platform.so
VNDK-core: android.hardware.keymaster-V2-ndk_platform.so
+VNDK-core: android.hardware.keymaster-ndk_platform.so
VNDK-core: android.hardware.light-V1-ndk_platform.so
+VNDK-core: android.hardware.light-ndk_platform.so
VNDK-core: android.hardware.media.bufferpool@2.0.so
VNDK-core: android.hardware.media.omx@1.0.so
VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack-V1-ndk_platform.so
+VNDK-core: android.hardware.memtrack-ndk_platform.so
+VNDK-core: android.hardware.memtrack-unstable-ndk_platform.so
VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.oemlock-V1-ndk_platform.so
+VNDK-core: android.hardware.oemlock-ndk_platform.so
+VNDK-core: android.hardware.oemlock-unstable-ndk_platform.so
VNDK-core: android.hardware.power-V1-ndk_platform.so
+VNDK-core: android.hardware.power-ndk_platform.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
+VNDK-core: android.hardware.rebootescrow-ndk_platform.so
+VNDK-core: android.hardware.security.keymint-V1-ndk_platform.so
+VNDK-core: android.hardware.security.keymint-ndk_platform.so
+VNDK-core: android.hardware.security.keymint-unstable-ndk_platform.so
+VNDK-core: android.hardware.security.secureclock-V1-ndk_platform.so
+VNDK-core: android.hardware.security.secureclock-ndk_platform.so
+VNDK-core: android.hardware.security.secureclock-unstable-ndk_platform.so
+VNDK-core: android.hardware.security.sharedsecret-V1-ndk_platform.so
+VNDK-core: android.hardware.security.sharedsecret-ndk_platform.so
+VNDK-core: android.hardware.security.sharedsecret-unstable-ndk_platform.so
VNDK-core: android.hardware.soundtrigger@2.0-core.so
VNDK-core: android.hardware.soundtrigger@2.0.so
VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
+VNDK-core: android.hardware.vibrator-ndk_platform.so
+VNDK-core: android.hardware.weaver-V1-ndk_platform.so
+VNDK-core: android.hardware.weaver-ndk_platform.so
+VNDK-core: android.hardware.weaver-unstable-ndk_platform.so
VNDK-core: android.hidl.token@1.0-utils.so
VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.system.keystore2-V1-ndk_platform.so
+VNDK-core: android.system.keystore2-ndk_platform.so
+VNDK-core: android.system.keystore2-unstable-ndk_platform.so
VNDK-core: android.system.suspend@1.0.so
-VNDK-core: libadf.so
VNDK-core: libaudioroute.so
VNDK-core: libaudioutils.so
VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
VNDK-core: libcamera_metadata.so
VNDK-core: libcap.so
VNDK-core: libcn-cbor.so
@@ -136,3 +181,70 @@
VNDK-private: libcompiler_rt.so
VNDK-private: libft2.so
VNDK-private: libgui.so
+VNDK-product: android.hardware.audio.common@2.0.so
+VNDK-product: android.hardware.configstore@1.0.so
+VNDK-product: android.hardware.configstore@1.1.so
+VNDK-product: android.hardware.graphics.allocator@2.0.so
+VNDK-product: android.hardware.graphics.allocator@3.0.so
+VNDK-product: android.hardware.graphics.allocator@4.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-product: android.hardware.graphics.common@1.0.so
+VNDK-product: android.hardware.graphics.common@1.1.so
+VNDK-product: android.hardware.graphics.common@1.2.so
+VNDK-product: android.hardware.graphics.mapper@2.0.so
+VNDK-product: android.hardware.graphics.mapper@2.1.so
+VNDK-product: android.hardware.graphics.mapper@3.0.so
+VNDK-product: android.hardware.graphics.mapper@4.0.so
+VNDK-product: android.hardware.media.bufferpool@2.0.so
+VNDK-product: android.hardware.media.omx@1.0.so
+VNDK-product: android.hardware.media@1.0.so
+VNDK-product: android.hardware.memtrack@1.0.so
+VNDK-product: android.hardware.renderscript@1.0.so
+VNDK-product: android.hardware.soundtrigger@2.0.so
+VNDK-product: android.hidl.memory.token@1.0.so
+VNDK-product: android.hidl.memory@1.0.so
+VNDK-product: android.hidl.safe_union@1.0.so
+VNDK-product: android.hidl.token@1.0.so
+VNDK-product: android.system.suspend@1.0.so
+VNDK-product: libaudioutils.so
+VNDK-product: libbacktrace.so
+VNDK-product: libbase.so
+VNDK-product: libc++.so
+VNDK-product: libcamera_metadata.so
+VNDK-product: libcap.so
+VNDK-product: libcompiler_rt.so
+VNDK-product: libcrypto.so
+VNDK-product: libcurl.so
+VNDK-product: libcutils.so
+VNDK-product: libevent.so
+VNDK-product: libexpat.so
+VNDK-product: libfmq.so
+VNDK-product: libhidlbase.so
+VNDK-product: libhidlmemory.so
+VNDK-product: libion.so
+VNDK-product: libjpeg.so
+VNDK-product: libjsoncpp.so
+VNDK-product: libldacBT_abr.so
+VNDK-product: libldacBT_enc.so
+VNDK-product: liblz4.so
+VNDK-product: liblzma.so
+VNDK-product: libminijail.so
+VNDK-product: libnl.so
+VNDK-product: libpcre2.so
+VNDK-product: libpiex.so
+VNDK-product: libpng.so
+VNDK-product: libprocessgroup.so
+VNDK-product: libprocinfo.so
+VNDK-product: libspeexresampler.so
+VNDK-product: libssl.so
+VNDK-product: libtinyalsa.so
+VNDK-product: libtinyxml2.so
+VNDK-product: libunwindstack.so
+VNDK-product: libutils.so
+VNDK-product: libutilscallstack.so
+VNDK-product: libwifi-system-iface.so
+VNDK-product: libxml2.so
+VNDK-product: libyuv.so
+VNDK-product: libz.so
+VNDK-product: libziparchive.so
diff --git a/target/product/gsi_arm64.mk b/target/product/gsi_arm64.mk
deleted file mode 100644
index adf7ca5..0000000
--- a/target/product/gsi_arm64.mk
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-
-# Enable mainline checking
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# Special settings for GSI releasing
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
-
-
-PRODUCT_NAME := gsi_arm64
-PRODUCT_DEVICE := gsi_arm64
-PRODUCT_BRAND := generic
-PRODUCT_MODEL := GSI on ARM64
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 46c956d..2c74ce0 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -37,20 +37,22 @@
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
-# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
-
# GSI targets should install "unflattened" APEXes in /system
TARGET_FLATTEN_APEX := false
# GSI targets should install "flattened" APEXes in /system_ext as well
PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES := true
+# The flattened version of com.android.apex.cts.shim.v1 should be explicitly installed
+# because the shim apex is prebuilt one and PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES is not
+# supported for prebuilt_apex modules yet.
+PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_with_prebuilts.flattened
+
# GSI specific tasks on boot
PRODUCT_PACKAGES += \
gsi_skip_mount.cfg \
init.gsi.rc \
init.vndk-nodef.rc \
-# Support additional P and Q VNDK packages
-PRODUCT_EXTRA_VNDK_VERSIONS := 28 29
+# Support additional P, Q and R VNDK packages
+PRODUCT_EXTRA_VNDK_VERSIONS := 28 29 30
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
index e03c212..2199c57 100644
--- a/target/product/handheld_product.mk
+++ b/target/product/handheld_product.mk
@@ -31,6 +31,7 @@
LatinIME \
Music \
OneTimeInitializer \
+ preinstalled-packages-platform-handheld-product.xml \
QuickSearchBox \
SettingsIntelligence \
frameworks-base-overlays
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index e2c91b6..c2608c4 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -53,10 +53,9 @@
librs_jni \
ManagedProvisioning \
MmsService \
- MtpDocumentsProvider \
+ MtpService \
MusicFX \
NfcNci \
- OsuLogin \
PacProcessor \
PrintRecommendationService \
PrintSpooler \
diff --git a/core/tasks/apidiff.mk b/target/product/iorap_large_memory_config.mk
similarity index 78%
copy from core/tasks/apidiff.mk
copy to target/product/iorap_large_memory_config.mk
index 76e4749..9aa6642 100644
--- a/core/tasks/apidiff.mk
+++ b/target/product/iorap_large_memory_config.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,11 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-#
-# Rules for building API diffs.
#
-.PHONY: api-diff
-
-api-diff: api-stubs-docs-jdiff
+# Disable Camera pinner by default
+PRODUCT_PRODUCT_PROPERTIES += \
+ pinner.pin_camera=false
diff --git a/core/tasks/apidiff.mk b/target/product/mainline_sdk.mk
similarity index 79%
rename from core/tasks/apidiff.mk
rename to target/product/mainline_sdk.mk
index 76e4749..343aed6 100644
--- a/core/tasks/apidiff.mk
+++ b/target/product/mainline_sdk.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2017 The Android Open Source Project
+# Copyright (C) 2020 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,11 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-#
-# Rules for building API diffs.
#
-.PHONY: api-diff
-
-api-diff: api-stubs-docs-jdiff
+PRODUCT_NAME := mainline_sdk
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_sdk
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
new file mode 120000
index 0000000..0b6eaf0
--- /dev/null
+++ b/target/product/mainline_system.mk
@@ -0,0 +1 @@
+generic_system.mk
\ No newline at end of file
diff --git a/target/product/mainline_system_arm64.mk b/target/product/mainline_system_arm64.mk
index a0c3e6d..5fa13ce 100644
--- a/target/product/mainline_system_arm64.mk
+++ b/target/product/mainline_system_arm64.mk
@@ -14,33 +14,10 @@
# limitations under the License.
#
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-$(call enforce-product-packages-exist,)
+# Do not modify this file. It's just alias of generic_system_arm64.mk
+# Will be removed when renaming from mainline_system to generic_system
+# complete
-# Enable mainline checking
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
-
-PRODUCT_BUILD_CACHE_IMAGE := false
-PRODUCT_BUILD_ODM_IMAGE := false
-PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
-PRODUCT_BUILD_ODM_DLKM_IMAGE := false
-PRODUCT_BUILD_PRODUCT_IMAGE := false
-PRODUCT_BUILD_RAMDISK_IMAGE := false
-PRODUCT_BUILD_SYSTEM_IMAGE := true
-PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
-PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
-PRODUCT_BUILD_USERDATA_IMAGE := false
-PRODUCT_BUILD_VENDOR_IMAGE := false
-
-PRODUCT_SHIPPING_API_LEVEL := 29
-
-# TODO(b/137033385): change this back to "all"
-PRODUCT_RESTRICT_VENDOR_FILES := owner
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system_arm64.mk)
PRODUCT_NAME := mainline_system_arm64
-PRODUCT_DEVICE := mainline_arm64
-PRODUCT_BRAND := generic
diff --git a/target/product/mainline_system_x86.mk b/target/product/mainline_system_x86.mk
index 3c4a867..3fb1963 100644
--- a/target/product/mainline_system_x86.mk
+++ b/target/product/mainline_system_x86.mk
@@ -14,32 +14,10 @@
# limitations under the License.
#
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-$(call enforce-product-packages-exist,)
+# Do not modify this file. It's just alias of generic_system_x86.mk
+# Will be removed when renaming from mainline_system to generic_system
+# complete
-# Enable mainline checking
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
-
-PRODUCT_BUILD_CACHE_IMAGE := false
-PRODUCT_BUILD_ODM_IMAGE := false
-PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
-PRODUCT_BUILD_ODM_DLKM_IMAGE := false
-PRODUCT_BUILD_PRODUCT_IMAGE := false
-PRODUCT_BUILD_RAMDISK_IMAGE := false
-PRODUCT_BUILD_SYSTEM_IMAGE := true
-PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
-PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
-PRODUCT_BUILD_USERDATA_IMAGE := false
-PRODUCT_BUILD_VENDOR_IMAGE := false
-
-PRODUCT_SHIPPING_API_LEVEL := 29
-
-# TODO(b/137033385): change this back to "all"
-PRODUCT_RESTRICT_VENDOR_FILES := owner
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system_x86.mk)
PRODUCT_NAME := mainline_system_x86
-PRODUCT_DEVICE := mainline_x86
-PRODUCT_BRAND := generic
diff --git a/target/product/mainline_system_x86_64.mk b/target/product/mainline_system_x86_64.mk
index 410c998..eab99c5 100644
--- a/target/product/mainline_system_x86_64.mk
+++ b/target/product/mainline_system_x86_64.mk
@@ -14,32 +14,10 @@
# limitations under the License.
#
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-$(call enforce-product-packages-exist,)
+# Do not modify this file. It's just alias of generic_system_x86_64.mk
+# Will be removed when renaming from mainline_system to generic_system
+# complete
-# Enable mainline checking
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
-
-PRODUCT_BUILD_CACHE_IMAGE := false
-PRODUCT_BUILD_ODM_IMAGE := false
-PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
-PRODUCT_BUILD_ODM_DLKM_IMAGE := false
-PRODUCT_BUILD_PRODUCT_IMAGE := false
-PRODUCT_BUILD_RAMDISK_IMAGE := false
-PRODUCT_BUILD_SYSTEM_IMAGE := true
-PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
-PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
-PRODUCT_BUILD_USERDATA_IMAGE := false
-PRODUCT_BUILD_VENDOR_IMAGE := false
-
-PRODUCT_SHIPPING_API_LEVEL := 29
-
-PRODUCT_RESTRICT_VENDOR_FILES := all
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system_x86_64.mk)
PRODUCT_NAME := mainline_system_x86_64
-PRODUCT_DEVICE := mainline_x86_64
-PRODUCT_BRAND := generic
diff --git a/target/product/mainline_system_x86_arm.mk b/target/product/mainline_system_x86_arm.mk
index 8c6611e..483fb58 100644
--- a/target/product/mainline_system_x86_arm.mk
+++ b/target/product/mainline_system_x86_arm.mk
@@ -14,32 +14,10 @@
# limitations under the License.
#
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-$(call enforce-product-packages-exist,)
+# Do not modify this file. It's just alias of generic_system_x86_arm.mk
+# Will be removed when renaming from mainline_system to generic_system
+# complete
-# Enable mainline checking
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := true
-
-PRODUCT_BUILD_CACHE_IMAGE := false
-PRODUCT_BUILD_ODM_IMAGE := false
-PRODUCT_BUILD_VENDOR_DLKM_IMAGE := false
-PRODUCT_BUILD_ODM_DLKM_IMAGE := false
-PRODUCT_BUILD_PRODUCT_IMAGE := false
-PRODUCT_BUILD_RAMDISK_IMAGE := false
-PRODUCT_BUILD_SYSTEM_IMAGE := true
-PRODUCT_BUILD_SYSTEM_EXT_IMAGE := false
-PRODUCT_BUILD_SYSTEM_OTHER_IMAGE := false
-PRODUCT_BUILD_USERDATA_IMAGE := false
-PRODUCT_BUILD_VENDOR_IMAGE := false
-
-PRODUCT_SHIPPING_API_LEVEL := 29
-
-# TODO(b/137033385): change this back to "all"
-PRODUCT_RESTRICT_VENDOR_FILES := owner
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system_x86_arm.mk)
PRODUCT_NAME := mainline_system_x86_arm
-PRODUCT_DEVICE := mainline_x86_arm
-PRODUCT_BRAND := generic
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 7a2dd73..4ebec51 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -52,12 +52,12 @@
PRODUCT_SYSTEM_SERVER_JARS := \
com.android.location.provider \
services \
- ethernet-service \
- wifi-service \
+ ethernet-service
# system server jars which are updated via apex modules.
# The values should be of the format <apex name>:<jar name>
PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS := \
+ com.android.permission:service-permission \
com.android.ipsec:android.net.ipsec.ike \
PRODUCT_COPY_FILES += \
diff --git a/target/product/media_vendor.mk b/target/product/media_vendor.mk
index 7d4af64..ef009ad 100644
--- a/target/product/media_vendor.mk
+++ b/target/product/media_vendor.mk
@@ -23,4 +23,3 @@
# /vendor packages
PRODUCT_PACKAGES += \
libaudiopreprocessing \
- libwebrtc_audio_preprocessing \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b96601d..e655d51 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -16,6 +16,8 @@
# Provides a functioning ART environment without Android frameworks
+$(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
+
# Additional mixins to the boot classpath.
PRODUCT_PACKAGES += \
android.test.base \
@@ -30,8 +32,8 @@
# ART APEX module.
# Note that this package includes the minimal boot classpath JARs (listed in
# ART_APEX_JARS), which should no longer be added directly to PRODUCT_PACKAGES.
-PRODUCT_PACKAGES += com.android.art
-PRODUCT_HOST_PACKAGES += com.android.art
+PRODUCT_PACKAGES += com.android.art-autoselect
+PRODUCT_HOST_PACKAGES += com.android.art-autoselect
# Certificates.
PRODUCT_PACKAGES += \
@@ -41,10 +43,6 @@
hiddenapi-package-whitelist.xml \
PRODUCT_SYSTEM_PROPERTIES += \
- dalvik.vm.image-dex2oat-Xms=64m \
- dalvik.vm.image-dex2oat-Xmx=64m \
- dalvik.vm.dex2oat-Xms=64m \
- dalvik.vm.dex2oat-Xmx=512m \
dalvik.vm.usejit=true \
dalvik.vm.usejitprofiles=true \
dalvik.vm.dexopt.secondary=true \
@@ -70,6 +68,11 @@
# or if it is empty speed-profile is equivalent to (quicken + empty app image).
PRODUCT_SYSTEM_PROPERTIES += \
pm.dexopt.install?=speed-profile \
+ pm.dexopt.install-fast?=skip \
+ pm.dexopt.install-bulk?=speed-profile \
+ pm.dexopt.install-bulk-secondary?=verify \
+ pm.dexopt.install-bulk-downgraded?=verify \
+ pm.dexopt.install-bulk-secondary-downgraded?=extract \
pm.dexopt.bg-dexopt?=speed-profile \
pm.dexopt.ab-ota?=speed-profile \
pm.dexopt.inactive?=verify \
@@ -92,8 +95,14 @@
dalvik.vm.minidebuginfo=true \
dalvik.vm.dex2oat-minidebuginfo=true
-# Disable iorapd by default
+# Two other device configs are added to IORap besides "ro.iorapd.enable".
+# IORap by default is off and starts when
+# (https://source.corp.google.com/android/system/iorap/iorapd.rc?q=iorapd.rc)
+#
+# * "ro.iorapd.enable" is true excluding unset
+# * One of the device configs is true.
+#
+# "ro.iorapd.enable" has to be set to true, so that iorap can be started.
PRODUCT_SYSTEM_PROPERTIES += \
- ro.iorapd.enable=false
+ ro.iorapd.enable?=true
-PRODUCT_USES_DEFAULT_ART_CONFIG := true
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index cefa288..761de05 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -30,7 +30,7 @@
# All components inherited here go to system image
#
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
#
# All components inherited here go to system_ext image
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index c4c5a38..5081a87 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -29,7 +29,7 @@
#
# All components inherited here go to system image
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
#
# All components inherited here go to system_ext image
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index bcee066..9096ff3 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
#
# All components inherited here go to system image
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking for exact this product name
ifeq (sdk_phone_x86,$(TARGET_PRODUCT))
@@ -50,8 +50,8 @@
$(call inherit-product, $(SRC_TARGET_DIR)/board/emulator_x86/device.mk)
# Define the host tools and libs that are parts of the SDK.
--include sdk/build/product_sdk.mk
--include development/build/product_sdk.mk
+$(call inherit-product-if-exists, sdk/build/product_sdk.mk)
+$(call inherit-product-if-exists, development/build/product_sdk.mk)
# Overrides
PRODUCT_BRAND := Android
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index 82bbee5..161043b 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -25,7 +25,7 @@
# All components inherited here go to system image
#
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
# Enable mainline checking for exact this product name
ifeq (sdk_phone_x86_64,$(TARGET_PRODUCT))
@@ -51,8 +51,8 @@
$(call inherit-product, $(SRC_TARGET_DIR)/board/emulator_x86_64/device.mk)
# Define the host tools and libs that are parts of the SDK.
--include sdk/build/product_sdk.mk
--include development/build/product_sdk.mk
+$(call inherit-product-if-exists, sdk/build/product_sdk.mk)
+$(call inherit-product-if-exists, development/build/product_sdk.mk)
# Overrides
PRODUCT_BRAND := Android
diff --git a/target/product/sysconfig/Android.bp b/target/product/sysconfig/Android.bp
new file mode 100644
index 0000000..5632d17
--- /dev/null
+++ b/target/product/sysconfig/Android.bp
@@ -0,0 +1,33 @@
+// Copyright (C} 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License"};
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+ name: "preinstalled-packages-platform-aosp-product.xml",
+ product_specific: true,
+ sub_dir: "sysconfig",
+ src: "preinstalled-packages-platform-aosp-product.xml",
+}
+
+prebuilt_etc {
+ name: "preinstalled-packages-platform-full-base.xml",
+ sub_dir: "sysconfig",
+ src: "preinstalled-packages-platform-full-base.xml",
+}
+
+prebuilt_etc {
+ name: "preinstalled-packages-platform-handheld-product.xml",
+ product_specific: true,
+ sub_dir: "sysconfig",
+ src: "preinstalled-packages-platform-handheld-product.xml",
+}
\ No newline at end of file
diff --git a/target/product/sysconfig/preinstalled-packages-platform-aosp-product.xml b/target/product/sysconfig/preinstalled-packages-platform-aosp-product.xml
new file mode 100644
index 0000000..eec1326
--- /dev/null
+++ b/target/product/sysconfig/preinstalled-packages-platform-aosp-product.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- System packages to preinstall on all devices with aosp_product, per user type.
+ Documentation at frameworks/base/data/etc/preinstalled-packages-platform.xml
+-->
+<config>
+ <install-in-user-type package="com.android.wallpaperpicker">
+ <install-in user-type="FULL" />
+ </install-in-user-type>
+</config>
diff --git a/target/product/sysconfig/preinstalled-packages-platform-full-base.xml b/target/product/sysconfig/preinstalled-packages-platform-full-base.xml
new file mode 100644
index 0000000..f601355
--- /dev/null
+++ b/target/product/sysconfig/preinstalled-packages-platform-full-base.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- System packages to preinstall on all devices with full_base, per user type.
+ Documentation at frameworks/base/data/etc/preinstalled-packages-platform.xml
+-->
+<config>
+ <install-in-user-type package="com.android.wallpaper.livepicker">
+ <install-in user-type="FULL" />
+ </install-in-user-type>
+</config>
diff --git a/target/product/sysconfig/preinstalled-packages-platform-handheld-product.xml b/target/product/sysconfig/preinstalled-packages-platform-handheld-product.xml
new file mode 100644
index 0000000..a5d9ba2
--- /dev/null
+++ b/target/product/sysconfig/preinstalled-packages-platform-handheld-product.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- System packages to preinstall on all devices with handheld_product, per user type.
+ Documentation at frameworks/base/data/etc/preinstalled-packages-platform.xml
+-->
+<config>
+ <install-in-user-type package="com.android.wallpapercropper">
+ <install-in user-type="FULL" />
+ </install-in-user-type>
+</config>
diff --git a/target/product/telephony_system.mk b/target/product/telephony_system.mk
index c306a04..ef48719 100644
--- a/target/product/telephony_system.mk
+++ b/target/product/telephony_system.mk
@@ -21,7 +21,7 @@
ONS \
CarrierDefaultApp \
CallLogBackup \
- CellBroadcastApp \
- CellBroadcastServiceModule \
+ com.android.cellbroadcast \
+ CellBroadcastLegacyApp \
PRODUCT_COPY_FILES := \
diff --git a/target/product/updatable_apex.mk b/target/product/updatable_apex.mk
index 2730f0e..c8dc8b0 100644
--- a/target/product/updatable_apex.mk
+++ b/target/product/updatable_apex.mk
@@ -17,6 +17,8 @@
# Inherit this when the target needs to support updating APEXes
ifneq ($(OVERRIDE_TARGET_FLATTEN_APEX),true)
+ # com.android.apex.cts.shim.v1_prebuilt overrides CtsShimPrebuilt
+ # and CtsShimPrivPrebuilt since they are packaged inside the APEX.
PRODUCT_PACKAGES += com.android.apex.cts.shim.v1_prebuilt
PRODUCT_VENDOR_PROPERTIES := ro.apex.updatable=true
TARGET_FLATTEN_APEX := false
diff --git a/target/product/virtual_ab_ota.mk b/target/product/virtual_ab_ota.mk
new file mode 120000
index 0000000..16f7329
--- /dev/null
+++ b/target/product/virtual_ab_ota.mk
@@ -0,0 +1 @@
+virtual_ab_ota/launch.mk
\ No newline at end of file
diff --git a/target/product/virtual_ab_ota/README.md b/target/product/virtual_ab_ota/README.md
new file mode 100644
index 0000000..2d40c03
--- /dev/null
+++ b/target/product/virtual_ab_ota/README.md
@@ -0,0 +1,16 @@
+# Virtual A/B makefiles
+
+Devices that uses Virtual A/B must inherit from one of the makefiles in this directory.
+
+## Structure
+
+```
+launch.mk
+ |- retrofit.mk
+ |- plus_non_ab.mk
+
+launch_with_vendor_ramdisk.mk
+ |- compression.mk
+
+compression_retrofit.mk
+```
diff --git a/target/product/virtual_ab_ota_plus_non_ab.mk b/target/product/virtual_ab_ota/compression.mk
similarity index 73%
copy from target/product/virtual_ab_ota_plus_non_ab.mk
copy to target/product/virtual_ab_ota/compression.mk
index 99a10ed..8301047 100644
--- a/target/product/virtual_ab_ota_plus_non_ab.mk
+++ b/target/product/virtual_ab_ota/compression.mk
@@ -14,8 +14,11 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/launch_with_vendor_ramdisk.mk)
-PRODUCT_OTA_FORCE_NON_AB_PACKAGE := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.allow_non_ab=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+PRODUCT_VIRTUAL_AB_COMPRESSION := true
+PRODUCT_PACKAGES += \
+ snapuserd.vendor_ramdisk \
+ snapuserd \
+ snapuserd.recovery
diff --git a/target/product/virtual_ab_ota/compression_retrofit.mk b/target/product/virtual_ab_ota/compression_retrofit.mk
new file mode 100644
index 0000000..6c29cba
--- /dev/null
+++ b/target/product/virtual_ab_ota/compression_retrofit.mk
@@ -0,0 +1,28 @@
+#
+# Copyright (C) 2020 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+PRODUCT_VIRTUAL_AB_COMPRESSION := true
+
+# For devices that are not GKI-capable (eg do not have vendor_boot),
+# snapuserd.ramdisk is included rather than snapuserd.vendor_ramdisk.
+# When using virtual_ab_ota_compression_retrofit.mk, either
+# virtual_ab_ota.mk or virtual_ab_ota_retrofit.mk must be inherited
+# as well.
+PRODUCT_PACKAGES += \
+ snapuserd.ramdisk \
+ snapuserd \
+ snapuserd.recovery
diff --git a/target/product/virtual_ab_ota.mk b/target/product/virtual_ab_ota/launch.mk
similarity index 100%
rename from target/product/virtual_ab_ota.mk
rename to target/product/virtual_ab_ota/launch.mk
diff --git a/target/product/legacy_gsi_release.mk b/target/product/virtual_ab_ota/launch_with_vendor_ramdisk.mk
similarity index 66%
rename from target/product/legacy_gsi_release.mk
rename to target/product/virtual_ab_ota/launch_with_vendor_ramdisk.mk
index 09b96fb..bc81b33 100644
--- a/target/product/legacy_gsi_release.mk
+++ b/target/product/virtual_ab_ota/launch_with_vendor_ramdisk.mk
@@ -14,10 +14,13 @@
# limitations under the License.
#
-include $(SRC_TARGET_DIR)/product/gsi_release.mk
+# Devices launching with Virtual A/B and has a vendor_boot partition is
+# preferred to inherit from this makefile instead of launch.mk.
-# Legacy GSI support additional O-MR1 interface
-PRODUCT_EXTRA_VNDK_VERSIONS += 27
+PRODUCT_VIRTUAL_AB_OTA := true
-# Legacy GSI relax the compatible property checking
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := false
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.enabled=true
+
+PRODUCT_PACKAGES += \
+ linker.vendor_ramdisk \
+ e2fsck.vendor_ramdisk \
diff --git a/target/product/virtual_ab_ota_plus_non_ab.mk b/target/product/virtual_ab_ota/plus_non_ab.mk
similarity index 97%
rename from target/product/virtual_ab_ota_plus_non_ab.mk
rename to target/product/virtual_ab_ota/plus_non_ab.mk
index 99a10ed..820fa1e 100644
--- a/target/product/virtual_ab_ota_plus_non_ab.mk
+++ b/target/product/virtual_ab_ota/plus_non_ab.mk
@@ -14,7 +14,7 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/launch.mk)
PRODUCT_OTA_FORCE_NON_AB_PACKAGE := true
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/target/product/virtual_ab_ota/retrofit.mk
similarity index 97%
rename from target/product/virtual_ab_ota_retrofit.mk
rename to target/product/virtual_ab_ota/retrofit.mk
index 3416a4f..93b42b7 100644
--- a/target/product/virtual_ab_ota_retrofit.mk
+++ b/target/product/virtual_ab_ota/retrofit.mk
@@ -14,7 +14,7 @@
# limitations under the License.
#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/launch.mk)
PRODUCT_VIRTUAL_AB_OTA_RETROFIT := true
diff --git a/target/product/virtual_ab_ota_plus_non_ab.mk b/target/product/virtual_ab_ota_plus_non_ab.mk
new file mode 120000
index 0000000..4979957
--- /dev/null
+++ b/target/product/virtual_ab_ota_plus_non_ab.mk
@@ -0,0 +1 @@
+virtual_ab_ota/plus_non_ab.mk
\ No newline at end of file
diff --git a/target/product/virtual_ab_ota_retrofit.mk b/target/product/virtual_ab_ota_retrofit.mk
new file mode 120000
index 0000000..1e16ca8
--- /dev/null
+++ b/target/product/virtual_ab_ota_retrofit.mk
@@ -0,0 +1 @@
+virtual_ab_ota/retrofit.mk
\ No newline at end of file
diff --git a/tools/build-license-metadata.sh b/tools/build-license-metadata.sh
new file mode 100755
index 0000000..3bad358
--- /dev/null
+++ b/tools/build-license-metadata.sh
@@ -0,0 +1,312 @@
+#!/bin/sh
+
+set -u
+
+ME=$(basename $0)
+
+USAGE="Usage: ${ME} {options}
+
+Builds a license metadata specification and outputs it to stdout or {outfile}.
+
+The available options are:
+
+-k kind... license kinds
+-c condition... license conditions
+-p package... license package name
+-n notice... license notice file
+-d dependency... license metadata file dependency
+-t target... targets
+-m target:installed... map dependent targets to their installed names
+-is_container preserved dependent target name when given
+-o outfile output file
+"
+
+# Global flag variables
+license_kinds=
+license_conditions=
+license_package_name=
+license_notice=
+license_deps=
+targets=
+installmap=
+is_container=false
+ofile=
+
+# Global variables
+depfiles=" "
+effective_conditions=
+
+
+# Exits with a message.
+#
+# When the exit status is 2, assumes a usage error and outputs the usage message
+# to stderr before outputting the specific error message to stderr.
+#
+# Parameters:
+# Optional numeric exit status (defaults to 2, i.e. a usage error.)
+# Remaining args treated as an error message sent to stderr.
+die() {
+ lstatus=2
+ case "${1:-}" in *[^0-9]*) ;; *) lstatus="$1"; shift ;; esac
+ case "${lstatus}" in 2) echo "${USAGE}" >&2; echo >&2 ;; esac
+ if [ -n "$*" ]; then
+ echo -e "$*\n" >&2
+ fi
+ exit $lstatus
+}
+
+
+# Sets the flag variables based on the command-line.
+#
+# invoke with: process_args "$@"
+process_args() {
+ lcurr_flag=
+ while [ "$#" -gt '0' ]; do
+ case "${1}" in
+ -h)
+ echo "${USAGE}"
+ exit 0
+ ;;
+ -k)
+ lcurr_flag=kind
+ ;;
+ -c)
+ lcurr_flag=condition
+ ;;
+ -p)
+ lcurr_flag=package
+ ;;
+ -n)
+ lcurr_flag=notice
+ ;;
+ -d)
+ lcurr_flag=dependency
+ ;;
+ -t)
+ lcurr_flag=target
+ ;;
+ -m)
+ lcurr_flag=installmap
+ ;;
+ -o)
+ lcurr_flag=ofile
+ ;;
+ -is_container)
+ lcurr_flag=
+ is_container=true
+ ;;
+ -*)
+ die "Unknown flag: \"${1}\""
+ ;;
+ *)
+ case "${lcurr_flag}" in
+ kind)
+ license_kinds="${license_kinds}${license_kinds:+ }${1}"
+ ;;
+ condition)
+ license_conditions="${license_conditions}${license_conditions:+ }${1}"
+ ;;
+ package)
+ license_package_name="${license_package_name}${license_package_name:+ }${1}"
+ ;;
+ notice)
+ license_notice="${license_notice}${license_notice:+ }${1}"
+ ;;
+ dependency)
+ license_deps="${license_deps}${license_deps:+ }${1}"
+ ;;
+ target)
+ targets="${targets}${targets:+ }${1}"
+ ;;
+ installmap)
+ installmap="${installmap}${installmap:+ }${1}"
+ ;;
+ ofile)
+ if [ -n "${ofile}" ]; then
+ die "Output file -o appears twice as \"${ofile}\" and \"${1}\""
+ fi
+ ofile="${1}"
+ ;;
+ *)
+ die "Must precede argument \"${1}\" with type flag."
+ ;;
+ esac
+ ;;
+ esac
+ shift
+ done
+}
+
+# Reads a license metadata file from stdin, and outputs the named dependencies.
+#
+# No parameters.
+extract_deps() {
+ awk '$1 == "dep_name:" { sub(/^"/, "", $2); sub(/"$/, "", $2); print $2; }'
+}
+
+# Populates the depfiles variable identifying dependency files.
+#
+# Starting with the dependencies enumerated in license_deps, calculates the
+# transitive closure of all dependencies.
+#
+# Dependency names ending in .meta_module indirectly reference license
+# metadata with 1 license metadata filename per line.
+#
+# No parameters; no output.
+read_deps() {
+ lnewdeps=
+ for d in ${license_deps}; do
+ case "${d}" in
+ *.meta_module)
+ lnewdeps="${lnewdeps}${lnewdeps:+ }"$(cat "${d}") ;;
+ *)
+ lnewdeps="${lnewdeps}${lnewdeps:+ }${d}" ;;
+ esac
+ done
+ lnewdeps=$(echo "${lnewdeps}" | tr ' ' '\n' | sort -u)
+ lalldeps=
+ ldeps=
+ lmod=
+ ldep=
+ while [ "${#lnewdeps}" -gt '0' ]; do
+ ldeps="${lnewdeps}"
+ lnewdeps=
+ for ldep in ${ldeps}; do
+ depfiles="${depfiles}${ldep} "
+ lalldeps="${lalldeps}${lalldeps:+ }"$(cat "${ldep}" | extract_deps)
+ done
+ lalldeps=$(for d in ${lalldeps}; do echo "${d}"; done | sort -u)
+ for d in ${lalldeps}; do
+ ldeps="${d}"
+ case "${d}" in *.meta_module) ldeps=$(cat "${d}") ;; esac
+ for lmod in ${ldeps}; do
+ if ! expr "${depfiles}" : ".* ${lmod} .*" >/dev/null 2>&1; then
+ lnewdeps="${lnewdeps}${lnewdeps:+ }${lmod}"
+ fi
+ done
+ done
+ lalldeps=
+ done
+}
+
+# Returns the effective license conditions for the current license metadata.
+#
+# If a module is restricted or links in a restricted module, the effective
+# license has a restricted condition.
+calculate_effective_conditions() {
+ lconditions="${license_conditions}"
+ case "${license_conditions}" in
+ *restricted*) : do nothing ;;
+ *)
+ for d in ${depfiles}; do
+ if cat "${d}" | egrep -q 'effective_condition\s*:.*restricted' ; then
+ lconditions="${lconditions}${lconditions:+ }restricted"
+ fi
+ done
+ ;;
+ esac
+ echo "${lconditions}"
+}
+
+
+process_args "$@"
+
+if [ -n "${ofile}" ]; then
+ # truncate the output file before appending results
+ : >"${ofile}"
+else
+ ofile=/dev/stdout
+fi
+
+# spit out the license metadata file content
+(
+ echo 'license_package_name: "'${license_package_name}'"'
+ for kind in ${license_kinds}; do
+ echo 'license_kind: "'${kind}'"'
+ done
+ for condition in ${license_conditions}; do
+ echo 'license_condition: "'${condition}'"'
+ done
+ for f in ${license_notice}; do
+ echo 'license_text: "'${f}'"'
+ done
+ echo "is_container: ${is_container}"
+ for t in ${targets}; do
+ echo 'target: "'${t}'"'
+ done
+ for m in ${installmap}; do
+ echo 'install_map: "'${m}'"'
+ done
+) >>"${ofile}"
+read_deps
+effective_conditions=$(calculate_effective_conditions)
+for condition in ${effective_conditions}; do
+ echo 'effective_condition: "'${condition}'"'
+done >>"${ofile}"
+for dep in ${depfiles}; do
+ echo 'dep {'
+ cat "${dep}" | \
+ awk -v name="${dep}" '
+ function strip_type() {
+ $1 = ""
+ sub(/^\s*/, "")
+ }
+ BEGIN {
+ print " dep_name: " name
+ }
+ $1 == "license_package_name:" {
+ strip_type()
+ print " dep_package_name: "$0
+ }
+ $1 == "dep_name:" {
+ print " dep_sub_dep: "$2
+ }
+ $1 == "license_kind:" {
+ print " dep_license_kind: "$2
+ }
+ $1 == "license_condition:" {
+ print " dep_license_condition: "$2
+ }
+ $1 == "is_container:" {
+ print " dep_is_container: "$2
+ }
+ $1 == "license_text:" {
+ strip_type()
+ print " dep_license_text: "$0
+ }
+ $1 == "target:" {
+ print " dep_target: "$2
+ }
+ $1 == "install_map:" {
+ print " dep_install_map: "$2
+ }
+ '
+ # The restricted license kind is contagious to all linked dependencies.
+ dep_conditions=$(echo $(
+ cat "${dep}" | awk '
+ $1 == "effective_condition:" {
+ $1 = ""
+ sub(/^\s*/, "")
+ gsub(/"/, "")
+ print
+ }
+ '
+ ))
+ for condition in ${dep_conditions}; do
+ echo ' dep_effective_condition: "'${condition}'"'
+ done
+ if ! ${is_container}; then
+ case "${dep_conditions}" in
+ *restricted*) : already restricted -- nothing to inherit ;;
+ *)
+ case "${effective_conditions}" in
+ *restricted*)
+ # "contagious" restricted infects everything linked to restricted
+ echo ' dep_effective_condition: "restricted"'
+ ;;
+ esac
+ ;;
+ esac
+ fi
+ echo '}'
+done >>"${ofile}"
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 09d8f70..f27ed8c 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -11,7 +11,8 @@
echo "ro.build.version.preview_sdk_fingerprint=$PLATFORM_PREVIEW_SDK_FINGERPRINT"
echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
echo "ro.build.version.all_codenames=$PLATFORM_VERSION_ALL_CODENAMES"
-echo "ro.build.version.release=$PLATFORM_VERSION"
+echo "ro.build.version.release=$PLATFORM_VERSION_LAST_STABLE"
+echo "ro.build.version.release_or_codename=$PLATFORM_VERSION"
echo "ro.build.version.security_patch=$PLATFORM_SECURITY_PATCH"
echo "ro.build.version.base_os=$PLATFORM_BASE_OS"
echo "ro.build.version.min_supported_target_sdk=$PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION"
@@ -34,9 +35,6 @@
if [ -n "$TARGET_CPU_ABI2" ] ; then
echo "ro.product.cpu.abi2=$TARGET_CPU_ABI2"
fi
-echo "ro.product.cpu.abilist=$TARGET_CPU_ABI_LIST"
-echo "ro.product.cpu.abilist32=$TARGET_CPU_ABI_LIST_32_BIT"
-echo "ro.product.cpu.abilist64=$TARGET_CPU_ABI_LIST_64_BIT"
if [ -n "$PRODUCT_DEFAULT_LOCALE" ] ; then
echo "ro.product.locale=$PRODUCT_DEFAULT_LOCALE"
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 372404b..1ff8e65 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -207,8 +207,8 @@
def _parse_llvm_readobj(cls, elf_file_path, header, lines):
"""Parse the output of llvm-readobj."""
lines_it = iter(lines)
- imported, exported = cls._parse_dynamic_symbols(lines_it)
dt_soname, dt_needed = cls._parse_dynamic_table(elf_file_path, lines_it)
+ imported, exported = cls._parse_dynamic_symbols(lines_it)
return ELF(dt_soname, dt_needed, imported, exported, header)
@@ -397,7 +397,7 @@
sys.exit(2)
- def check_dt_needed(self):
+ def check_dt_needed(self, system_shared_lib_names):
"""Check whether all DT_NEEDED entries are specified in the build
system."""
@@ -417,6 +417,11 @@
dt_needed = sorted(set(self._file_under_test.dt_needed))
modules = [re.sub('\\.so$', '', lib) for lib in dt_needed]
+ # Remove system shared libraries from the suggestion since they are added
+ # by default.
+ modules = [name for name in modules
+ if name not in system_shared_lib_names]
+
self._note()
self._note('Fix suggestions:')
self._note(
@@ -502,6 +507,11 @@
parser.add_argument('--shared-lib', action='append', default=[],
help='Path to shared library dependencies')
+ # System Shared library names
+ parser.add_argument('--system-shared-lib', action='append', default=[],
+ help='System shared libraries to be hidden from fix '
+ 'suggestions')
+
# Check options
parser.add_argument('--skip-bad-elf-magic', action='store_true',
help='Ignore the input file without the ELF magic word')
@@ -535,7 +545,7 @@
if args.soname:
checker.check_dt_soname(args.soname)
- checker.check_dt_needed()
+ checker.check_dt_needed(args.system_shared_lib)
if not args.allow_undefined_symbols:
checker.check_symbols()
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 41e8ca5..c338462 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -24,7 +24,7 @@
$(error Using $(TARGET_DEVICE_DIR)/android_filesystem_config.h is deprecated, please use TARGET_FS_CONFIG_GEN instead)
endif
-system_android_filesystem_config := system/core/include/private/android_filesystem_config.h
+system_android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h
system_capability_header := bionic/libc/kernel/uapi/linux/capability.h
# List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, product and system_ext Partitions
@@ -34,8 +34,6 @@
$(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm) \
$(if $(BOARD_USES_VENDOR_DLKMIMAGE)$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE),vendor_dlkm) \
$(if $(BOARD_USES_ODM_DLKMIMAGE)$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE),odm_dlkm) \
- $(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),product) \
- $(if $(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),system_ext) \
)
##################################
@@ -45,11 +43,12 @@
LOCAL_MODULE := fs_config_dirs
LOCAL_REQUIRED_MODULES := \
- fs_config_dirs_system \
- $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
+ fs_config_dirs_system \
+ fs_config_dirs_system_ext \
+ fs_config_dirs_product \
+ fs_config_dirs_nonsystem
include $(BUILD_PHONY_PACKAGE)
-
##################################
# Generate the <p>/etc/fs_config_files binary files for each partition.
# Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable.
@@ -58,27 +57,69 @@
LOCAL_MODULE := fs_config_files
LOCAL_REQUIRED_MODULES := \
fs_config_files_system \
- $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
+ fs_config_files_system_ext \
+ fs_config_files_product \
+ fs_config_files_nonsystem
+include $(BUILD_PHONY_PACKAGE)
+
+##################################
+# Generate the system_ext/etc/fs_config_dirs binary file for the target if the
+# system_ext partition is generated. Add fs_config_dirs or fs_config_dirs_system_ext
+# to PRODUCT_PACKAGES in the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_system_ext
+LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_system_ext)
+include $(BUILD_PHONY_PACKAGE)
+
+##################################
+# Generate the system_ext/etc/fs_config_files binary file for the target if the
+# system_ext partition is generated. Add fs_config_files or fs_config_files_system_ext
+# to PRODUCT_PACKAGES in the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_system_ext
+LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_system_ext)
+include $(BUILD_PHONY_PACKAGE)
+
+##################################
+# Generate the product/etc/fs_config_dirs binary file for the target if the
+# product partition is generated. Add fs_config_dirs or fs_config_dirs_product
+# to PRODUCT_PACKAGES in the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_product
+LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_product)
+include $(BUILD_PHONY_PACKAGE)
+
+##################################
+# Generate the product/etc/fs_config_files binary file for the target if the
+# product partition is generated. Add fs_config_files or fs_config_files_product
+# to PRODUCT_PACKAGES in the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_product
+LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_product)
include $(BUILD_PHONY_PACKAGE)
##################################
# Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions
-# excluding /system. Add fs_config_dirs_nonsystem to PRODUCT_PACKAGES in the
-# device make file to enable.
+# excluding /system, /system_ext and /product. Add fs_config_dirs_nonsystem to
+# PRODUCT_PACKAGES in the device make file to enable.
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_dirs_nonsystem
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_dirs_$(t))
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_dirs_$(t))
include $(BUILD_PHONY_PACKAGE)
##################################
# Generate the <p>/etc/fs_config_files binary files for all enabled partitions
-# excluding /system. Add fs_config_files_nonsystem to PRODUCT_PACKAGES in the
-# device make file to enable.
+# excluding /system, /system_ext and /product. Add fs_config_files_nonsystem to
+# PRODUCT_PACKAGES in the device make file to enable.
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_files_nonsystem
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_files_$(t))
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_files_$(t))
include $(BUILD_PHONY_PACKAGE)
##################################
@@ -134,11 +175,11 @@
ifneq ($(filter vendor,$(fs_config_generate_extra_partition_list)),)
##################################
# Generate the vendor/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_vendor to PRODUCT_PACKAGES in
-# the device make file to enable.
+# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_vendor
+LOCAL_MODULE := _fs_config_dirs_vendor
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -158,11 +199,11 @@
##################################
# Generate the vendor/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_vendor to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_vendor
+LOCAL_MODULE := _fs_config_files_vendor
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -185,11 +226,11 @@
ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),)
##################################
# Generate the oem/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_oem to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_oem
+LOCAL_MODULE := _fs_config_dirs_oem
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -209,11 +250,11 @@
##################################
# Generate the oem/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_oem to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_oem
+LOCAL_MODULE := _fs_config_files_oem
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -236,11 +277,11 @@
ifneq ($(filter odm,$(fs_config_generate_extra_partition_list)),)
##################################
# Generate the odm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_odm to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_odm
+LOCAL_MODULE := _fs_config_dirs_odm
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -260,11 +301,11 @@
##################################
# Generate the odm/etc/fs_config_files binary file for the target
-# Add fs_config_files of fs_config_files_odm to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_odm
+LOCAL_MODULE := _fs_config_files_odm
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -287,11 +328,11 @@
ifneq ($(filter vendor_dlkm,$(fs_config_generate_extra_partition_list)),)
##################################
# Generate the vendor_dlkm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_vendor_dlkm to PRODUCT_PACKAGES in
+# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES in
# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_vendor_dlkm
+LOCAL_MODULE := _fs_config_dirs_vendor_dlkm
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -311,11 +352,11 @@
##################################
# Generate the vendor_dlkm/etc/fs_config_files binary file for the target
-# Add fs_config_files of fs_config_files_vendor_dlkm to PRODUCT_PACKAGES in
+# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES in
# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_vendor_dlkm
+LOCAL_MODULE := _fs_config_files_vendor_dlkm
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -338,11 +379,11 @@
ifneq ($(filter odm_dlkm,$(fs_config_generate_extra_partition_list)),)
##################################
# Generate the odm_dlkm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_odm_dlkm to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_odm_dlkm
+LOCAL_MODULE := _fs_config_dirs_odm_dlkm
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -362,11 +403,11 @@
##################################
# Generate the odm_dlkm/etc/fs_config_files binary file for the target
-# Add fs_config_files of fs_config_files_odm_dlkm to PRODUCT_PACKAGES in
-# the device make file to enable
+# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_odm_dlkm
+LOCAL_MODULE := _fs_config_files_odm_dlkm
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -386,14 +427,14 @@
endif
-ifneq ($(filter product,$(fs_config_generate_extra_partition_list)),)
+ifneq ($(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),)
##################################
# Generate the product/etc/fs_config_dirs binary file for the target
# Add fs_config_dirs or fs_config_dirs_product to PRODUCT_PACKAGES in
# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_product
+LOCAL_MODULE := _fs_config_dirs_product
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -413,11 +454,11 @@
##################################
# Generate the product/etc/fs_config_files binary file for the target
-# Add fs_config_files of fs_config_files_product to PRODUCT_PACKAGES in
+# Add fs_config_files or fs_config_files_product to PRODUCT_PACKAGES in
# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_product
+LOCAL_MODULE := _fs_config_files_product
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -436,14 +477,14 @@
$(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
endif
-ifneq ($(filter system_ext,$(fs_config_generate_extra_partition_list)),)
+ifneq ($(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),)
##################################
# Generate the system_ext/etc/fs_config_dirs binary file for the target
# Add fs_config_dirs or fs_config_dirs_system_ext to PRODUCT_PACKAGES in
# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_dirs_system_ext
+LOCAL_MODULE := _fs_config_dirs_system_ext
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
@@ -463,11 +504,11 @@
##################################
# Generate the system_ext/etc/fs_config_files binary file for the target
-# Add fs_config_files of fs_config_files_system_ext to PRODUCT_PACKAGES in
+# Add fs_config_files or fs_config_files_system_ext to PRODUCT_PACKAGES in
# the device make file to enable
include $(CLEAR_VARS)
-LOCAL_MODULE := fs_config_files_system_ext
+LOCAL_MODULE := _fs_config_files_system_ext
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
diff --git a/tools/fs_config/OWNERS b/tools/fs_config/OWNERS
index 5599644..7529cb9 100644
--- a/tools/fs_config/OWNERS
+++ b/tools/fs_config/OWNERS
@@ -1,2 +1 @@
-tomcherry@google.com
-salyzyn@google.com
+include platform/system/core:/janitors/OWNERS
diff --git a/tools/fs_config/README b/tools/fs_config/README
deleted file mode 100644
index 21bdeb8..0000000
--- a/tools/fs_config/README
+++ /dev/null
@@ -1,137 +0,0 @@
- _____ _____ _____ _____ __ __ _____
-/ _ \/ __\/ _ \| _ \/ \/ \/ __\
-| _ <| __|| _ || | || \/ || __|
-\__|\_/\_____/\__|__/|_____/\__ \__/\_____/
-
-The fs_config_generator.py tool uses the platform android_filesystem_config.h and the
-TARGET_FS_CONFIG_GEN files to generate the fs_config_dirs and fs_config_files files for each
-partition, as well as passwd and group files, and the generated_oem_aid.h header.
-
-The fs_config_dirs and fs_config_files binary files are interpreted by the libcutils fs_config()
-function, along with the built-in defaults, to serve as overrides to complete the results. The
-Target files are used by filesystem and adb tools to ensure that the file and directory properties
-are preserved during runtime operations. The host files in the ${OUT} directory are used in the
-final stages when building the filesystem images to set the file and directory properties.
-
-See ./fs_config_generator.py fsconfig --help for how these files are generated.
-
-The passwd and group files are formatted as documented in man pages passwd(5) and group(5) and used
-by bionic for implementing getpwnam() and related functions.
-
-See ./fs_config_generator.py passwd --help and ./fs_config_generator.py group --help for how these
-files are generated.
-
-The generated_oem_aid.h creates identifiers for non-platform AIDs for developers wishing to use them
-in their native code. To do so, include the oemaids_headers header library in the corresponding
-makefile and #include "generated_oem_aid.h" in the code wishing to use these identifiers.
-
-See ./fs_config_generator.py oemaid --help for how this file is generated.
-
-The parsing of the TARGET_FS_CONFIG_GEN files follows the Python ConfigParser specification, with
-the sections and fields as defined below. There are two types of sections, both sections require all
-options to be specified. The first section type is the "caps" section.
-
-The "caps" section follows the following syntax:
-
-[path]
-mode: Octal file mode
-user: AID_<user>
-group: AID_<group>
-caps: cap*
-
-Where:
-
-[path]
- The filesystem path to configure. A path ending in / is considered a dir,
- else its a file.
-
-mode:
- A valid octal file mode of at least 3 digits. If 3 is specified, it is
- prefixed with a 0, else mode is used as is.
-
-user:
- Either the C define for a valid AID or the friendly name. For instance both
- AID_RADIO and radio are acceptable. Note custom AIDs can be defined in the
- AID section documented below.
-
-group:
- Same as user.
-
-caps:
- The name as declared in
- system/core/include/private/android_filesystem_capability.h without the
- leading CAP_. Mixed case is allowed. Caps can also be the raw:
- * binary (0b0101)
- * octal (0455)
- * int (42)
- * hex (0xFF)
- For multiple caps, just separate by whitespace.
-
-It is an error to specify multiple sections with the same [path] in different
-files. Note that the same file may contain sections that override the previous
-section in Python versions <= 3.2. In Python 3.2 it's set to strict mode.
-
-
-The next section type is the "AID" section, for specifying OEM specific AIDS.
-
-The AID section follows the following syntax:
-
-[AID_<name>]
-value: <number>
-
-Where:
-
-[AID_<name>]
- The <name> can contain characters in the set uppercase, numbers
- and underscores.
-
-value:
- A valid C style number string. Hex, octal, binary and decimal are supported.
- See "caps" above for more details on number formatting.
-
-It is an error to specify multiple sections with the same [AID_<name>]. With
-the same constraints as [path] described above. It is also an error to specify
-multiple sections with the same value option. It is also an error to specify a
-value that is outside of the inclusive OEM ranges:
- * AID_OEM_RESERVED_START(2900) - AID_OEM_RESERVED_END(2999)
- * AID_OEM_RESERVED_2_START(5000) - AID_OEM_RESERVED_2_END(5999)
-
-as defined by system/core/include/private/android_filesystem_config.h.
-
-Ordering within the TARGET_FS_CONFIG_GEN files is not relevant. The paths for files are sorted
-like so within their respective array definition:
- * specified path before prefix match
- ** ie foo before f*
- * lexicographical less than before other
- ** ie boo before foo
-
-Given these paths:
-
-paths=['ac', 'a', 'acd', 'an', 'a*', 'aa', 'ac*']
-
-The sort order would be:
-paths=['a', 'aa', 'ac', 'acd', 'an', 'ac*', 'a*']
-
-Thus the fs_config tools will match on specified paths before attempting prefix, and match on the
-longest matching prefix.
-
-The declared AIDS are sorted in ascending numerical order based on the option "value". The string
-representation of value is preserved. Both choices were made for maximum readability of the generated
-file and to line up files. Sync lines are placed with the source file as comments in the generated
-header file.
-
-Unit Tests:
-
-From within the fs_config directory, unit tests can be executed like so:
-$ python -m unittest test_fs_config_generator.Tests
-.............
-----------------------------------------------------------------------
-Ran 13 tests in 0.004s
-
-OK
-
-One could also use nose if they would like:
-$ nose2
-
-To add new tests, simply add a test_<xxx> method to the test class. It will automatically
-get picked up and added to the test suite.
diff --git a/tools/fs_config/README.md b/tools/fs_config/README.md
new file mode 100644
index 0000000..bad5e10
--- /dev/null
+++ b/tools/fs_config/README.md
@@ -0,0 +1,84 @@
+# FS Config Generator
+
+The `fs_config_generator.py` tool uses the platform `android_filesystem_config.h` and the
+`TARGET_FS_CONFIG_GEN` files to generate the following:
+* `fs_config_dirs` and `fs_config_files` files for each partition
+* `passwd` and `group` files for each partition
+* The `generated_oem_aid.h` header
+
+## Outputs
+
+### `fs_config_dirs` and `fs_config_files`
+
+The `fs_config_dirs` and `fs_config_files` binary files are interpreted by the libcutils
+`fs_config()` function, along with the built-in defaults, to serve as overrides to complete the
+results. The Target files are used by filesystem and adb tools to ensure that the file and directory
+properties are preserved during runtime operations. The host files in the `$OUT` directory are used
+in the final stages when building the filesystem images to set the file and directory properties.
+
+See `./fs_config_generator.py fsconfig --help` for how these files are generated.
+
+### `passwd` and `group` files
+
+The `passwd` and `group` files are formatted as documented in man pages passwd(5) and group(5) and
+used by bionic for implementing `getpwnam()` and related functions.
+
+See `./fs_config_generator.py passwd --help` and `./fs_config_generator.py group --help` for how
+these files are generated.
+
+### The `generated_oem_aid.h` header
+
+The `generated_oem_aid.h` creates identifiers for non-platform AIDs for developers wishing to use
+them in their native code. To do so, include the `oemaids_headers` header library in the
+corresponding makefile and `#include "generated_oem_aid.h"` in the code wishing to use these
+identifiers.
+
+See `./fs_config_generator.py oemaid --help` for how this file is generated.
+
+## Parsing
+
+See the documentation on [source.android.com](https://source.android.com/devices/tech/config/filesystem#configuring-aids) for details and examples.
+
+
+## Ordering
+
+Ordering within the `TARGET_FS_CONFIG_GEN` files is not relevant. The paths for files are sorted
+like so within their respective array definition:
+ * specified path before prefix match
+ * for example: foo before f*
+ * lexicographical less than before other
+ * for example: boo before foo
+
+Given these paths:
+
+ paths=['ac', 'a', 'acd', 'an', 'a*', 'aa', 'ac*']
+
+The sort order would be:
+
+ paths=['a', 'aa', 'ac', 'acd', 'an', 'ac*', 'a*']
+
+Thus the `fs_config` tools will match on specified paths before attempting prefix, and match on the
+longest matching prefix.
+
+The declared AIDs are sorted in ascending numerical order based on the option "value". The string
+representation of value is preserved. Both choices were made for maximum readability of the
+generated file and to line up files. Sync lines are placed with the source file as comments in the
+generated header file.
+
+## Unit Tests
+
+From within the `fs_config` directory, unit tests can be executed like so:
+
+ $ python -m unittest test_fs_config_generator.Tests
+ .............
+ ----------------------------------------------------------------------
+ Ran 13 tests in 0.004s
+
+ OK
+
+One could also use nose if they would like:
+
+ $ nose2
+
+To add new tests, simply add a `test_<xxx>` method to the test class. It will automatically
+get picked up and added to the test suite.
diff --git a/tools/fs_config/end_to_end_test/run_test.sh b/tools/fs_config/end_to_end_test/run_test.sh
index 7402276..b5a7e83 100755
--- a/tools/fs_config/end_to_end_test/run_test.sh
+++ b/tools/fs_config/end_to_end_test/run_test.sh
@@ -1,7 +1,7 @@
cd $ANDROID_BUILD_TOP/build/make/tools/fs_config/end_to_end_test
$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
- --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
--capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
--partition system \
--all-partitions vendor,product \
@@ -13,7 +13,7 @@
echo 'Fail: Mismatch between system_fs_config_files and result_system_fs_config_files'
$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
- --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
--capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
--partition system \
--all-partitions vendor,product \
@@ -25,7 +25,7 @@
echo 'Fail: Mismatch between system_fs_config_dirs and result_system_fs_config_dirs'
$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
- --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
--capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
--partition vendor \
--files \
@@ -36,7 +36,7 @@
echo 'Fail: Mismatch between vendor_fs_config_files and result_vendor_fs_config_files'
$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
- --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
--capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
--partition vendor \
--dirs \
@@ -47,7 +47,7 @@
echo 'Fail: Mismatch between vendor_fs_config_dirs and result_vendor_fs_config_dirs'
$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
- --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
--capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
--partition product \
--files \
@@ -58,7 +58,7 @@
echo 'Fail: Mismatch between product_fs_config_files and result_product_fs_config_files'
$ANDROID_BUILD_TOP/build/make/tools/fs_config/fs_config_generator.py fsconfig \
- --aid-header $ANDROID_BUILD_TOP/system/core/include/private/android_filesystem_config.h \
+ --aid-header $ANDROID_BUILD_TOP/system/core/libcutils/include/private/android_filesystem_config.h \
--capability-header $ANDROID_BUILD_TOP/bionic/libc/kernel/uapi/linux/capability.h \
--partition product \
--dirs \
diff --git a/tools/fs_config/fs_config.go b/tools/fs_config/fs_config.go
index 16bcefa..4c324fb 100644
--- a/tools/fs_config/fs_config.go
+++ b/tools/fs_config/fs_config.go
@@ -48,10 +48,10 @@
path := android.PathForModuleGen(ctx, "empty")
t.paths = android.Paths{path}
- rule := android.NewRuleBuilder()
+ rule := android.NewRuleBuilder(pctx, ctx)
rule.Command().Text("rm -rf").Output(path)
rule.Command().Text("touch").Output(path)
- rule.Build(pctx, ctx, "fs_config_empty", "create empty file")
+ rule.Build("fs_config_empty", "create empty file")
}
}
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 940a398..098fde6 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python2
"""Generates config files for Android file system properties.
This script is used for generating configuration files for configuring
diff --git a/tools/mk2bp_catalog.py b/tools/mk2bp_catalog.py
index 83abd62..c2afb9b 100755
--- a/tools/mk2bp_catalog.py
+++ b/tools/mk2bp_catalog.py
@@ -168,22 +168,24 @@
return True
return False
-def make_annotation_link(annotations, analysis, modules):
- if analysis:
- return "<a href='javascript:update_details(%d)'>%s</a>" % (
- annotations.Add(analysis, modules),
- len(analysis)
- )
- else:
- return "";
-
-
def is_clean(makefile):
for analysis in makefile.analyses.values():
if analysis:
return False
return True
+def clean_and_only_blocked_by_clean(soong, all_makefiles, makefile):
+ if not is_clean(makefile):
+ return False
+ modules = soong.reverse_makefiles[makefile.filename]
+ for module in modules:
+ for dep in soong.transitive_deps(module):
+ for filename in soong.makefiles.get(dep, []):
+ m = all_makefiles.get(filename)
+ if m and not is_clean(m):
+ return False
+ return True
+
class Annotations(object):
def __init__(self):
self.entries = []
@@ -205,6 +207,7 @@
self.makefiles = dict()
self.reverse_makefiles = dict()
self.installed = dict()
+ self.reverse_installed = dict()
self.modules = set()
for (module, module_type, problem, dependencies, makefiles, installed) in reader:
@@ -222,6 +225,29 @@
self.reverse_makefiles.setdefault(f, []).append(module)
for f in installed.strip().split(' '):
self.installed[f] = module
+ self.reverse_installed.setdefault(module, []).append(f)
+
+ def transitive_deps(self, module):
+ results = set()
+ def traverse(module):
+ for dep in self.deps.get(module, []):
+ if not dep in results:
+ results.add(dep)
+ traverse(module)
+ traverse(module)
+ return results
+
+ def contains_unblocked_modules(self, filename):
+ for m in self.reverse_makefiles[filename]:
+ if len(self.deps[m]) == 0:
+ return True
+ return False
+
+ def contains_blocked_modules(self, filename):
+ for m in self.reverse_makefiles[filename]:
+ if len(self.deps[m]) > 0:
+ return True
+ return False
def count_deps(depsdb, module, seen):
"""Based on the depsdb, count the number of transitive dependencies.
@@ -237,18 +263,6 @@
count += 1 + count_deps(depsdb, dep, seen)
return count
-def contains_unblocked_modules(soong, modules):
- for m in modules:
- if len(soong.deps[m]) == 0:
- return True
- return False
-
-def contains_blocked_modules(soong, modules):
- for m in modules:
- if len(soong.deps[m]) > 0:
- return True
- return False
-
OTHER_PARTITON = "_other"
HOST_PARTITON = "_host"
@@ -273,6 +287,27 @@
def format_module_list(modules):
return "".join(["<div>%s</div>" % format_module_link(m) for m in modules])
+def print_analysis_header(link, title):
+ print("""
+ <a name="%(link)s"></a>
+ <h2>%(title)s</h2>
+ <table>
+ <tr>
+ <th class="RowTitle">Directory</th>
+ <th class="Count">Total</th>
+ <th class="Count Clean">Easy</th>
+ <th class="Count Clean">Unblocked Clean</th>
+ <th class="Count Unblocked">Unblocked</th>
+ <th class="Count Blocked">Blocked</th>
+ <th class="Count Clean">Clean</th>
+ """ % {
+ "link": link,
+ "title": title
+ })
+ for analyzer in ANALYZERS:
+ print("""<th class="Count Warning">%s</th>""" % analyzer.title)
+ print(" </tr>")
+
def main():
parser = argparse.ArgumentParser(description="Info about remaining Android.mk files.")
parser.add_argument("--device", type=str, required=True,
@@ -287,6 +322,9 @@
help="Equivalent of $OUT_DIR, which will also be checked if"
+ " --out_dir is unset. If neither is set, default is"
+ " 'out'.")
+ parser.add_argument("--mode", type=str,
+ default="html",
+ help="output format: csv or html")
args = parser.parse_args()
@@ -297,14 +335,11 @@
args.out_dir = args.out_dir[:-1]
TARGET_DEVICE = args.device
- HOST_OUT_ROOT = args.out_dir + "host"
+ global HOST_OUT_ROOT
+ HOST_OUT_ROOT = args.out_dir + "/host"
+ global PRODUCT_OUT
PRODUCT_OUT = args.out_dir + "/target/product/%s" % TARGET_DEVICE
- if args.title:
- page_title = args.title
- else:
- page_title = "Remaining Android.mk files"
-
# Read target information
# TODO: Pull from configurable location. This is also slightly different because it's
# only a single build, where as the tree scanning we do below is all Android.mk files.
@@ -312,580 +347,688 @@
% PRODUCT_OUT, "r", errors="ignore") as csvfile:
soong = SoongData(csv.reader(csvfile))
- # Which modules are installed where
- modules_by_partition = dict()
- partitions = set()
- for installed, module in soong.installed.items():
- partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
- modules_by_partition.setdefault(partition, []).append(module)
- partitions.add(partition)
+ # Read the makefiles
+ all_makefiles = dict()
+ for filename, modules in soong.reverse_makefiles.items():
+ if filename.startswith(args.out_dir + "/"):
+ continue
+ all_makefiles[filename] = Makefile(filename)
- print("""
- <html>
- <head>
- <title>%(page_title)s</title>
- <style type="text/css">
- body, table {
- font-family: Roboto, sans-serif;
- font-size: 9pt;
- }
- body {
- margin: 0;
- padding: 0;
- display: flex;
- flex-direction: column;
- height: 100vh;
- }
- #container {
- flex: 1;
- display: flex;
- flex-direction: row;
- overflow: hidden;
- }
- #tables {
- padding: 0 20px 0 20px;
- overflow: scroll;
- flex: 2 2 600px;
- }
- #details {
- display: none;
- overflow: scroll;
- flex: 1 1 650px;
- padding: 0 20px 0 20px;
- }
- h1 {
- margin: 16px 0 16px 20px;
- }
- h2 {
- margin: 12px 0 4px 0;
- }
- .DirName {
- text-align: left;
- width: 200px;
- min-width: 200px;
- }
- .Count {
- text-align: center;
- width: 60px;
- min-width: 60px;
- max-width: 60px;
- }
- th.Clean,
- th.Unblocked {
- background-color: #1e8e3e;
- }
- th.Blocked {
- background-color: #d93025;
- }
- th.Warning {
- background-color: #e8710a;
- }
- th {
- background-color: #1a73e8;
- color: white;
- font-weight: bold;
- }
- td.Unblocked {
- background-color: #81c995;
- }
- td.Blocked {
- background-color: #f28b82;
- }
- td, th {
- padding: 2px 4px;
- border-right: 2px solid white;
- }
- tr.AospDir td {
- background-color: #e6f4ea;
- border-right-color: #e6f4ea;
- }
- tr.GoogleDir td {
- background-color: #e8f0fe;
- border-right-color: #e8f0fe;
- }
- tr.PartnerDir td {
- background-color: #fce8e6;
- border-right-color: #fce8e6;
- }
- table {
- border-spacing: 0;
- border-collapse: collapse;
- }
- div.Makefile {
- margin: 12px 0 0 0;
- }
- div.Makefile:first {
- margin-top: 0;
- }
- div.FileModules {
- padding: 4px 0 0 20px;
- }
- td.LineNo {
- vertical-align: baseline;
- padding: 6px 0 0 20px;
- width: 50px;
- vertical-align: baseline;
- }
- td.LineText {
- vertical-align: baseline;
- font-family: monospace;
- padding: 6px 0 0 0;
- }
- a.CsLink {
- font-family: monospace;
- }
- div.Help {
- width: 550px;
- }
- table.HelpColumns tr {
- border-bottom: 2px solid white;
- }
- .ModuleName {
- vertical-align: baseline;
- padding: 6px 0 0 20px;
- width: 275px;
- }
- .ModuleDeps {
- vertical-align: baseline;
- padding: 6px 0 0 0;
- }
- table#Modules td {
- vertical-align: baseline;
- }
- tr.Alt {
- background-color: #ececec;
- }
- tr.Alt td {
- border-right-color: #ececec;
- }
- .AnalysisCol {
- width: 300px;
- padding: 2px;
- line-height: 21px;
- }
- .Analysis {
- color: white;
- font-weight: bold;
- background-color: #e8710a;
- border-radius: 6px;
- margin: 4px;
- padding: 2px 6px;
- white-space: nowrap;
- }
- .Nav {
- margin: 4px 0 16px 20px;
- }
- .NavSpacer {
- display: inline-block;
- width: 6px;
- }
- .ModuleDetails {
- margin-top: 20px;
- }
- .ModuleDetails td {
- vertical-align: baseline;
- }
- </style>
- </head>
- <body>
- <h1>%(page_title)s</h1>
- <div class="Nav">
- <a href='#help'>Help</a>
- <span class='NavSpacer'></span><span class='NavSpacer'> </span>
- Partitions:
- """ % {
- "page_title": page_title,
- })
- for partition in sorted(partitions):
- print("<a href='#partition_%s'>%s</a><span class='NavSpacer'></span>" % (partition, partition))
+ if args.mode == "html":
+ HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+ elif args.mode == "csv":
+ CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
- print("""
- <span class='NavSpacer'></span><span class='NavSpacer'> </span>
- </div>
- <div id="container">
- <div id="tables">
- <a name="help"></a>
- <div class="Help">
- <p>
- This page analyzes the remaining Android.mk files in the Android Source tree.
- <p>
- The modules are first broken down by which of the device filesystem partitions
- they are installed to. This also includes host tools and testcases which don't
- actually reside in their own partition but convenitely group together.
- <p>
- The makefiles for each partition are further are grouped into a set of directories
- aritrarily picked to break down the problem size by owners.
- <ul style="width: 300px">
- <li style="background-color: #e6f4ea">AOSP directories are colored green.</li>
- <li style="background-color: #e8f0fe">Google directories are colored blue.</li>
- <li style="background-color: #fce8e6">Other partner directories are colored red.</li>
- </ul>
- Each of the makefiles are scanned for issues that are likely to come up during
- conversion to soong. Clicking the number in each cell shows additional information,
- including the line that triggered the warning.
- <p>
- <table class="HelpColumns">
- <tr>
- <th>Total</th>
- <td>The total number of makefiles in this each directory.</td>
- </tr>
- <tr>
- <th class="Unblocked">Unblocked</th>
- <td>Makefiles containing one or more modules that don't have any
- additional dependencies pending before conversion.</td>
- </tr>
- <tr>
- <th class="Blocked">Blocked</th>
- <td>Makefiles containiong one or more modules which <i>do</i> have
- additional prerequesite depenedencies that are not yet converted.</td>
- </tr>
- <tr>
- <th class="Clean">Clean</th>
- <td>The number of makefiles that have none of the following warnings.</td>
- </tr>
- <tr>
- <th class="Warning">ifeq / ifneq</th>
- <td>Makefiles that use <code>ifeq</code> or <code>ifneq</code>. i.e.
- conditionals.</td>
- </tr>
- <tr>
- <th class="Warning">Wacky Includes</th>
- <td>Makefiles that <code>include</code> files other than the standard build-system
- defined template and macros.</td>
- </tr>
- <tr>
- <th class="Warning">Calls base_rules</th>
- <td>Makefiles that include base_rules.mk directly.</td>
- </tr>
- <tr>
- <th class="Warning">Calls define</th>
- <td>Makefiles that define their own macros. Some of these are easy to convert
- to soong <code>defaults</code>, but others are complex.</td>
- </tr>
- <tr>
- <th class="Warning">Has ../</th>
- <td>Makefiles containing the string "../" outside of a comment. These likely
- access files outside their directories.</td>
- </tr>
- <tr>
- <th class="Warning">dist-for-goals</th>
- <td>Makefiles that call <code>dist-for-goals</code> directly.</td>
- </tr>
- <tr>
- <th class="Warning">.PHONY</th>
- <td>Makefiles that declare .PHONY targets.</td>
- </tr>
- <tr>
- <th class="Warning">renderscript</th>
- <td>Makefiles defining targets that depend on <code>.rscript</code> source files.</td>
- </tr>
- <tr>
- <th class="Warning">vts src</th>
- <td>Makefiles defining targets that depend on <code>.vts</code> source files.</td>
- </tr>
- <tr>
- <th class="Warning">COPY_HEADERS</th>
- <td>Makefiles using LOCAL_COPY_HEADERS.</td>
- </tr>
- </table>
- <p>
- Following the list of directories is a list of the modules that are installed on
- each partition. Potential issues from their makefiles are listed, as well as the
- total number of dependencies (both blocking that module and blocked by that module)
- and the list of direct dependencies. Note: The number is the number of all transitive
- dependencies and the list of modules is only the direct dependencies.
- </div>
- """)
+class HtmlProcessor(object):
+ def __init__(self, args, soong, all_makefiles):
+ self.args = args
+ self.soong = soong
+ self.all_makefiles = all_makefiles
+ self.annotations = Annotations()
- annotations = Annotations()
+ def execute(self):
+ if self.args.title:
+ page_title = self.args.title
+ else:
+ page_title = "Remaining Android.mk files"
- # For each partition
- makefiles_for_partitions = dict()
- for partition in sorted(partitions):
- modules = modules_by_partition[partition]
-
- makefiles = set(itertools.chain.from_iterable(
- [soong.makefiles[module] for module in modules]))
-
- # Read makefiles
- summary = Summary()
- for filename in makefiles:
- if not filename.startswith(args.out_dir + "/"):
- summary.Add(Makefile(filename))
-
- # Categorize directories by who is responsible
- aosp_dirs = []
- google_dirs = []
- partner_dirs = []
- for dirname in sorted(summary.directories.keys()):
- if is_aosp(dirname):
- aosp_dirs.append(dirname)
- elif is_google(dirname):
- google_dirs.append(dirname)
- else:
- partner_dirs.append(dirname)
+ # Which modules are installed where
+ modules_by_partition = dict()
+ partitions = set()
+ for installed, module in self.soong.installed.items():
+ partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
+ modules_by_partition.setdefault(partition, []).append(module)
+ partitions.add(partition)
print("""
- <a name="partition_%(partition)s"></a>
- <h2>%(partition)s</h2>
- <table>
- <tr>
- <th class="DirName">Directory</th>
- <th class="Count">Total</th>
- <th class="Count Unblocked">Unblocked</th>
- <th class="Count Blocked">Blocked</th>
- <th class="Count Clean">Clean</th>
+ <html>
+ <head>
+ <title>%(page_title)s</title>
+ <style type="text/css">
+ body, table {
+ font-family: Roboto, sans-serif;
+ font-size: 9pt;
+ }
+ body {
+ margin: 0;
+ padding: 0;
+ display: flex;
+ flex-direction: column;
+ height: 100vh;
+ }
+ #container {
+ flex: 1;
+ display: flex;
+ flex-direction: row;
+ overflow: hidden;
+ }
+ #tables {
+ padding: 0 20px 40px 20px;
+ overflow: scroll;
+ flex: 2 2 600px;
+ }
+ #details {
+ display: none;
+ overflow: scroll;
+ flex: 1 1 650px;
+ padding: 0 20px 0 20px;
+ }
+ h1 {
+ margin: 16px 0 16px 20px;
+ }
+ h2 {
+ margin: 12px 0 4px 0;
+ }
+ .RowTitle {
+ text-align: left;
+ width: 200px;
+ min-width: 200px;
+ }
+ .Count {
+ text-align: center;
+ width: 60px;
+ min-width: 60px;
+ max-width: 60px;
+ }
+ th.Clean,
+ th.Unblocked {
+ background-color: #1e8e3e;
+ }
+ th.Blocked {
+ background-color: #d93025;
+ }
+ th.Warning {
+ background-color: #e8710a;
+ }
+ th {
+ background-color: #1a73e8;
+ color: white;
+ font-weight: bold;
+ }
+ td.Unblocked {
+ background-color: #81c995;
+ }
+ td.Blocked {
+ background-color: #f28b82;
+ }
+ td, th {
+ padding: 2px 4px;
+ border-right: 2px solid white;
+ }
+ tr.TotalRow td {
+ background-color: white;
+ border-right-color: white;
+ }
+ tr.AospDir td {
+ background-color: #e6f4ea;
+ border-right-color: #e6f4ea;
+ }
+ tr.GoogleDir td {
+ background-color: #e8f0fe;
+ border-right-color: #e8f0fe;
+ }
+ tr.PartnerDir td {
+ background-color: #fce8e6;
+ border-right-color: #fce8e6;
+ }
+ table {
+ border-spacing: 0;
+ border-collapse: collapse;
+ }
+ div.Makefile {
+ margin: 12px 0 0 0;
+ }
+ div.Makefile:first {
+ margin-top: 0;
+ }
+ div.FileModules {
+ padding: 4px 0 0 20px;
+ }
+ td.LineNo {
+ vertical-align: baseline;
+ padding: 6px 0 0 20px;
+ width: 50px;
+ vertical-align: baseline;
+ }
+ td.LineText {
+ vertical-align: baseline;
+ font-family: monospace;
+ padding: 6px 0 0 0;
+ }
+ a.CsLink {
+ font-family: monospace;
+ }
+ div.Help {
+ width: 550px;
+ }
+ table.HelpColumns tr {
+ border-bottom: 2px solid white;
+ }
+ .ModuleName {
+ vertical-align: baseline;
+ padding: 6px 0 0 20px;
+ width: 275px;
+ }
+ .ModuleDeps {
+ vertical-align: baseline;
+ padding: 6px 0 0 0;
+ }
+ table#Modules td {
+ vertical-align: baseline;
+ }
+ tr.Alt {
+ background-color: #ececec;
+ }
+ tr.Alt td {
+ border-right-color: #ececec;
+ }
+ .AnalysisCol {
+ width: 300px;
+ padding: 2px;
+ line-height: 21px;
+ }
+ .Analysis {
+ color: white;
+ font-weight: bold;
+ background-color: #e8710a;
+ border-radius: 6px;
+ margin: 4px;
+ padding: 2px 6px;
+ white-space: nowrap;
+ }
+ .Nav {
+ margin: 4px 0 16px 20px;
+ }
+ .NavSpacer {
+ display: inline-block;
+ width: 6px;
+ }
+ .ModuleDetails {
+ margin-top: 20px;
+ }
+ .ModuleDetails td {
+ vertical-align: baseline;
+ }
+ </style>
+ </head>
+ <body>
+ <h1>%(page_title)s</h1>
+ <div class="Nav">
+ <a href='#help'>Help</a>
+ <span class='NavSpacer'></span><span class='NavSpacer'> </span>
+ Partitions:
""" % {
- "partition": partition
+ "page_title": page_title,
})
+ for partition in sorted(partitions):
+ print("<a href='#partition_%s'>%s</a><span class='NavSpacer'></span>" % (partition, partition))
- for analyzer in ANALYZERS:
- print("""<th class="Count Warning">%s</th>""" % analyzer.title)
-
- print(" </tr>")
- for dirgroup, rowclass in [(aosp_dirs, "AospDir"),
- (google_dirs, "GoogleDir"),
- (partner_dirs, "PartnerDir"),]:
- for dirname in dirgroup:
- makefiles = summary.directories[dirname]
-
- all_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles]
- clean_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
- if is_clean(makefile)]
- unblocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
- if contains_unblocked_modules(soong,
- soong.reverse_makefiles[makefile.filename])]
- blocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
- if contains_blocked_modules(soong,
- soong.reverse_makefiles[makefile.filename])]
-
- print("""
- <tr class="%(rowclass)s">
- <td class="DirName">%(dirname)s</td>
- <td class="Count">%(makefiles)s</td>
- <td class="Count">%(unblocked)s</td>
- <td class="Count">%(blocked)s</td>
- <td class="Count">%(clean)s</td>
- """ % {
- "rowclass": rowclass,
- "dirname": dirname,
- "makefiles": make_annotation_link(annotations, all_makefiles, modules),
- "unblocked": make_annotation_link(annotations, unblocked_makefiles, modules),
- "blocked": make_annotation_link(annotations, blocked_makefiles, modules),
- "clean": make_annotation_link(annotations, clean_makefiles, modules),
- })
- for analyzer in ANALYZERS:
- analyses = [m.analyses.get(analyzer) for m in makefiles if m.analyses.get(analyzer)]
- print("""<td class="Count">%s</td>"""
- % make_annotation_link(annotations, analyses, modules))
-
- print(" </tr>")
print("""
- </table>
+ <span class='NavSpacer'></span><span class='NavSpacer'> </span>
+ <a href='#summary'>Overall Summary</a>
+ </div>
+ <div id="container">
+ <div id="tables">
+ <a name="help"></a>
+ <div class="Help">
+ <p>
+ This page analyzes the remaining Android.mk files in the Android Source tree.
+ <p>
+ The modules are first broken down by which of the device filesystem partitions
+ they are installed to. This also includes host tools and testcases which don't
+ actually reside in their own partition but convenitely group together.
+ <p>
+ The makefiles for each partition are further are grouped into a set of directories
+ aritrarily picked to break down the problem size by owners.
+ <ul style="width: 300px">
+ <li style="background-color: #e6f4ea">AOSP directories are colored green.</li>
+ <li style="background-color: #e8f0fe">Google directories are colored blue.</li>
+ <li style="background-color: #fce8e6">Other partner directories are colored red.</li>
+ </ul>
+ Each of the makefiles are scanned for issues that are likely to come up during
+ conversion to soong. Clicking the number in each cell shows additional information,
+ including the line that triggered the warning.
+ <p>
+ <table class="HelpColumns">
+ <tr>
+ <th>Total</th>
+ <td>The total number of makefiles in this each directory.</td>
+ </tr>
+ <tr>
+ <th class="Clean">Easy</th>
+ <td>The number of makefiles that have no warnings themselves, and also
+ none of their dependencies have warnings either.</td>
+ </tr>
+ <tr>
+ <th class="Clean">Unblocked Clean</th>
+ <td>The number of makefiles that are both Unblocked and Clean.</td>
+ </tr>
+
+ <tr>
+ <th class="Unblocked">Unblocked</th>
+ <td>Makefiles containing one or more modules that don't have any
+ additional dependencies pending before conversion.</td>
+ </tr>
+ <tr>
+ <th class="Blocked">Blocked</th>
+ <td>Makefiles containiong one or more modules which <i>do</i> have
+ additional prerequesite depenedencies that are not yet converted.</td>
+ </tr>
+ <tr>
+ <th class="Clean">Clean</th>
+ <td>The number of makefiles that have none of the following warnings.</td>
+ </tr>
+ <tr>
+ <th class="Warning">ifeq / ifneq</th>
+ <td>Makefiles that use <code>ifeq</code> or <code>ifneq</code>. i.e.
+ conditionals.</td>
+ </tr>
+ <tr>
+ <th class="Warning">Wacky Includes</th>
+ <td>Makefiles that <code>include</code> files other than the standard build-system
+ defined template and macros.</td>
+ </tr>
+ <tr>
+ <th class="Warning">Calls base_rules</th>
+ <td>Makefiles that include base_rules.mk directly.</td>
+ </tr>
+ <tr>
+ <th class="Warning">Calls define</th>
+ <td>Makefiles that define their own macros. Some of these are easy to convert
+ to soong <code>defaults</code>, but others are complex.</td>
+ </tr>
+ <tr>
+ <th class="Warning">Has ../</th>
+ <td>Makefiles containing the string "../" outside of a comment. These likely
+ access files outside their directories.</td>
+ </tr>
+ <tr>
+ <th class="Warning">dist-for-goals</th>
+ <td>Makefiles that call <code>dist-for-goals</code> directly.</td>
+ </tr>
+ <tr>
+ <th class="Warning">.PHONY</th>
+ <td>Makefiles that declare .PHONY targets.</td>
+ </tr>
+ <tr>
+ <th class="Warning">renderscript</th>
+ <td>Makefiles defining targets that depend on <code>.rscript</code> source files.</td>
+ </tr>
+ <tr>
+ <th class="Warning">vts src</th>
+ <td>Makefiles defining targets that depend on <code>.vts</code> source files.</td>
+ </tr>
+ <tr>
+ <th class="Warning">COPY_HEADERS</th>
+ <td>Makefiles using LOCAL_COPY_HEADERS.</td>
+ </tr>
+ </table>
+ <p>
+ Following the list of directories is a list of the modules that are installed on
+ each partition. Potential issues from their makefiles are listed, as well as the
+ total number of dependencies (both blocking that module and blocked by that module)
+ and the list of direct dependencies. Note: The number is the number of all transitive
+ dependencies and the list of modules is only the direct dependencies.
+ </div>
""")
- module_details = [(count_deps(soong.deps, m, []), -count_deps(soong.reverse_deps, m, []), m)
- for m in modules]
- module_details.sort()
- module_details = [m[2] for m in module_details]
- print("""
- <table class="ModuleDetails">""")
- print("<tr>")
- print(" <th>Module Name</th>")
- print(" <th>Issues</th>")
- print(" <th colspan='2'>Blocked By</th>")
- print(" <th colspan='2'>Blocking</th>")
- print("</tr>")
- altRow = True
- for module in module_details:
- analyses = set()
- for filename in soong.makefiles[module]:
- makefile = summary.makefiles.get(filename)
+ overall_summary = Summary()
+
+ # For each partition
+ for partition in sorted(partitions):
+ modules = modules_by_partition[partition]
+
+ makefiles = set(itertools.chain.from_iterable(
+ [self.soong.makefiles[module] for module in modules]))
+
+ # Read makefiles
+ summary = Summary()
+ for filename in makefiles:
+ makefile = self.all_makefiles.get(filename)
if makefile:
- for analyzer, analysis in makefile.analyses.items():
- if analysis:
- analyses.add(analyzer.title)
+ summary.Add(makefile)
+ overall_summary.Add(makefile)
- altRow = not altRow
- print("<tr class='%s'>" % ("Alt" if altRow else "",))
- print(" <td><a name='module_%s'></a>%s</td>" % (module, module))
- print(" <td class='AnalysisCol'>%s</td>" % " ".join(["<span class='Analysis'>%s</span>" % title
- for title in analyses]))
- print(" <td>%s</td>" % count_deps(soong.deps, module, []))
- print(" <td>%s</td>" % format_module_list(soong.deps.get(module, [])))
- print(" <td>%s</td>" % count_deps(soong.reverse_deps, module, []))
- print(" <td>%s</td>" % format_module_list(soong.reverse_deps.get(module, [])))
+ # Categorize directories by who is responsible
+ aosp_dirs = []
+ google_dirs = []
+ partner_dirs = []
+ for dirname in sorted(summary.directories.keys()):
+ if is_aosp(dirname):
+ aosp_dirs.append(dirname)
+ elif is_google(dirname):
+ google_dirs.append(dirname)
+ else:
+ partner_dirs.append(dirname)
+
+ print_analysis_header("partition_" + partition, partition)
+
+ for dirgroup, rowclass in [(aosp_dirs, "AospDir"),
+ (google_dirs, "GoogleDir"),
+ (partner_dirs, "PartnerDir"),]:
+ for dirname in dirgroup:
+ self.print_analysis_row(summary, modules,
+ dirname, rowclass, summary.directories[dirname])
+
+ self.print_analysis_row(summary, modules,
+ "Total", "TotalRow",
+ set(itertools.chain.from_iterable(summary.directories.values())))
+ print("""
+ </table>
+ """)
+
+ module_details = [(count_deps(self.soong.deps, m, []),
+ -count_deps(self.soong.reverse_deps, m, []), m)
+ for m in modules]
+ module_details.sort()
+ module_details = [m[2] for m in module_details]
+ print("""
+ <table class="ModuleDetails">""")
+ print("<tr>")
+ print(" <th>Module Name</th>")
+ print(" <th>Issues</th>")
+ print(" <th colspan='2'>Blocked By</th>")
+ print(" <th colspan='2'>Blocking</th>")
print("</tr>")
- print("""</table>""")
+ altRow = True
+ for module in module_details:
+ analyses = set()
+ for filename in self.soong.makefiles[module]:
+ makefile = summary.makefiles.get(filename)
+ if makefile:
+ for analyzer, analysis in makefile.analyses.items():
+ if analysis:
+ analyses.add(analyzer.title)
- print("""
- <script type="text/javascript">
- function close_details() {
- document.getElementById('details').style.display = 'none';
- }
+ altRow = not altRow
+ print("<tr class='%s'>" % ("Alt" if altRow else "",))
+ print(" <td><a name='module_%s'></a>%s</td>" % (module, module))
+ print(" <td class='AnalysisCol'>%s</td>" % " ".join(["<span class='Analysis'>%s</span>" % title
+ for title in analyses]))
+ print(" <td>%s</td>" % count_deps(self.soong.deps, module, []))
+ print(" <td>%s</td>" % format_module_list(self.soong.deps.get(module, [])))
+ print(" <td>%s</td>" % count_deps(self.soong.reverse_deps, module, []))
+ print(" <td>%s</td>" % format_module_list(self.soong.reverse_deps.get(module, [])))
+ print("</tr>")
+ print("""</table>""")
- class LineMatch {
- constructor(lineno, text) {
- this.lineno = lineno;
- this.text = text;
- }
- }
+ print_analysis_header("summary", "Overall Summary")
- class Analysis {
- constructor(filename, modules, line_matches) {
- this.filename = filename;
- this.modules = modules;
- this.line_matches = line_matches;
- }
- }
+ modules = [module for installed, module in self.soong.installed.items()]
+ self.print_analysis_row(overall_summary, modules,
+ "All Makefiles", "TotalRow",
+ set(itertools.chain.from_iterable(overall_summary.directories.values())))
+ print("""
+ </table>
+ """)
- class Module {
- constructor(deps) {
- this.deps = deps;
- }
- }
-
- function make_module_link(module) {
- var a = document.createElement('a');
- a.className = 'ModuleLink';
- a.innerText = module;
- a.href = '#module_' + module;
- return a;
- }
-
- function update_details(id) {
- document.getElementById('details').style.display = 'block';
-
- var analyses = ANALYSIS[id];
-
- var details = document.getElementById("details_data");
- while (details.firstChild) {
- details.removeChild(details.firstChild);
+ print("""
+ <script type="text/javascript">
+ function close_details() {
+ document.getElementById('details').style.display = 'none';
}
- for (var i=0; i<analyses.length; i++) {
- var analysis = analyses[i];
+ class LineMatch {
+ constructor(lineno, text) {
+ this.lineno = lineno;
+ this.text = text;
+ }
+ }
- var makefileDiv = document.createElement('div');
- makefileDiv.className = 'Makefile';
- details.appendChild(makefileDiv);
+ class Analysis {
+ constructor(filename, modules, line_matches) {
+ this.filename = filename;
+ this.modules = modules;
+ this.line_matches = line_matches;
+ }
+ }
- var fileA = document.createElement('a');
- makefileDiv.appendChild(fileA);
- fileA.className = 'CsLink';
- fileA.href = '%(codesearch)s' + analysis.filename;
- fileA.innerText = analysis.filename;
- fileA.target = "_blank";
+ class Module {
+ constructor(deps) {
+ this.deps = deps;
+ }
+ }
- if (analysis.modules.length > 0) {
- var moduleTable = document.createElement('table');
- details.appendChild(moduleTable);
+ function make_module_link(module) {
+ var a = document.createElement('a');
+ a.className = 'ModuleLink';
+ a.innerText = module;
+ a.href = '#module_' + module;
+ return a;
+ }
- for (var j=0; j<analysis.modules.length; j++) {
- var moduleRow = document.createElement('tr');
- moduleTable.appendChild(moduleRow);
+ function update_details(id) {
+ document.getElementById('details').style.display = 'block';
- var moduleNameCell = document.createElement('td');
- moduleRow.appendChild(moduleNameCell);
- moduleNameCell.className = 'ModuleName';
- moduleNameCell.appendChild(make_module_link(analysis.modules[j]));
+ var analyses = ANALYSIS[id];
- var moduleData = MODULE_DATA[analysis.modules[j]];
- console.log(moduleData);
+ var details = document.getElementById("details_data");
+ while (details.firstChild) {
+ details.removeChild(details.firstChild);
+ }
- var depCell = document.createElement('td');
- moduleRow.appendChild(depCell);
+ for (var i=0; i<analyses.length; i++) {
+ var analysis = analyses[i];
- if (moduleData.deps.length == 0) {
- depCell.className = 'ModuleDeps Unblocked';
- depCell.innerText = 'UNBLOCKED';
- } else {
- depCell.className = 'ModuleDeps Blocked';
+ var makefileDiv = document.createElement('div');
+ makefileDiv.className = 'Makefile';
+ details.appendChild(makefileDiv);
- for (var k=0; k<moduleData.deps.length; k++) {
- depCell.appendChild(make_module_link(moduleData.deps[k]));
- depCell.appendChild(document.createElement('br'));
+ var fileA = document.createElement('a');
+ makefileDiv.appendChild(fileA);
+ fileA.className = 'CsLink';
+ fileA.href = '%(codesearch)s' + analysis.filename;
+ fileA.innerText = analysis.filename;
+ fileA.target = "_blank";
+
+ if (analysis.modules.length > 0) {
+ var moduleTable = document.createElement('table');
+ details.appendChild(moduleTable);
+
+ for (var j=0; j<analysis.modules.length; j++) {
+ var moduleRow = document.createElement('tr');
+ moduleTable.appendChild(moduleRow);
+
+ var moduleNameCell = document.createElement('td');
+ moduleRow.appendChild(moduleNameCell);
+ moduleNameCell.className = 'ModuleName';
+ moduleNameCell.appendChild(make_module_link(analysis.modules[j]));
+
+ var moduleData = MODULE_DATA[analysis.modules[j]];
+ console.log(moduleData);
+
+ var depCell = document.createElement('td');
+ moduleRow.appendChild(depCell);
+
+ if (moduleData.deps.length == 0) {
+ depCell.className = 'ModuleDeps Unblocked';
+ depCell.innerText = 'UNBLOCKED';
+ } else {
+ depCell.className = 'ModuleDeps Blocked';
+
+ for (var k=0; k<moduleData.deps.length; k++) {
+ depCell.appendChild(make_module_link(moduleData.deps[k]));
+ depCell.appendChild(document.createElement('br'));
+ }
}
}
}
- }
- if (analysis.line_matches.length > 0) {
- var lineTable = document.createElement('table');
- details.appendChild(lineTable);
+ if (analysis.line_matches.length > 0) {
+ var lineTable = document.createElement('table');
+ details.appendChild(lineTable);
- for (var j=0; j<analysis.line_matches.length; j++) {
- var line_match = analysis.line_matches[j];
+ for (var j=0; j<analysis.line_matches.length; j++) {
+ var line_match = analysis.line_matches[j];
- var lineRow = document.createElement('tr');
- lineTable.appendChild(lineRow);
+ var lineRow = document.createElement('tr');
+ lineTable.appendChild(lineRow);
- var linenoCell = document.createElement('td');
- lineRow.appendChild(linenoCell);
- linenoCell.className = 'LineNo';
+ var linenoCell = document.createElement('td');
+ lineRow.appendChild(linenoCell);
+ linenoCell.className = 'LineNo';
- var linenoA = document.createElement('a');
- linenoCell.appendChild(linenoA);
- linenoA.className = 'CsLink';
- linenoA.href = '%(codesearch)s' + analysis.filename
- + ';l=' + line_match.lineno;
- linenoA.innerText = line_match.lineno;
- linenoA.target = "_blank";
+ var linenoA = document.createElement('a');
+ linenoCell.appendChild(linenoA);
+ linenoA.className = 'CsLink';
+ linenoA.href = '%(codesearch)s' + analysis.filename
+ + ';l=' + line_match.lineno;
+ linenoA.innerText = line_match.lineno;
+ linenoA.target = "_blank";
- var textCell = document.createElement('td');
- lineRow.appendChild(textCell);
- textCell.className = 'LineText';
- textCell.innerText = line_match.text;
+ var textCell = document.createElement('td');
+ lineRow.appendChild(textCell);
+ textCell.className = 'LineText';
+ textCell.innerText = line_match.text;
+ }
}
}
}
- }
- var ANALYSIS = [
- """ % {
- "codesearch": args.codesearch,
- })
- for entry, mods in annotations.entries:
- print(" [")
- for analysis in entry:
- print(" new Analysis('%(filename)s', %(modules)s, [%(line_matches)s])," % {
- "filename": analysis.filename,
- #"modules": json.dumps([m for m in mods if m in filename in soong.makefiles[m]]),
- "modules": json.dumps(
- [m for m in soong.reverse_makefiles[analysis.filename] if m in mods]),
- "line_matches": ", ".join([
- "new LineMatch(%d, %s)" % (lineno, json.dumps(text))
- for lineno, text in analysis.line_matches]),
+ var ANALYSIS = [
+ """ % {
+ "codesearch": self.args.codesearch,
})
- print(" ],")
- print("""
- ];
- var MODULE_DATA = {
- """)
- for module in soong.modules:
- print(" '%(name)s': new Module(%(deps)s)," % {
- "name": module,
- "deps": json.dumps(soong.deps[module]),
- })
- print("""
- };
- </script>
+ for entry, mods in self.annotations.entries:
+ print(" [")
+ for analysis in entry:
+ print(" new Analysis('%(filename)s', %(modules)s, [%(line_matches)s])," % {
+ "filename": analysis.filename,
+ #"modules": json.dumps([m for m in mods if m in filename in self.soong.makefiles[m]]),
+ "modules": json.dumps(
+ [m for m in self.soong.reverse_makefiles[analysis.filename] if m in mods]),
+ "line_matches": ", ".join([
+ "new LineMatch(%d, %s)" % (lineno, json.dumps(text))
+ for lineno, text in analysis.line_matches]),
+ })
+ print(" ],")
+ print("""
+ ];
+ var MODULE_DATA = {
+ """)
+ for module in self.soong.modules:
+ print(" '%(name)s': new Module(%(deps)s)," % {
+ "name": module,
+ "deps": json.dumps(self.soong.deps[module]),
+ })
+ print("""
+ };
+ </script>
- """)
+ """)
- print("""
- </div> <!-- id=tables -->
- <div id="details">
- <div style="text-align: right;">
- <a href="javascript:close_details();">
- <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>
- </a>
+ print("""
+ </div> <!-- id=tables -->
+ <div id="details">
+ <div style="text-align: right;">
+ <a href="javascript:close_details();">
+ <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>
+ </a>
+ </div>
+ <div id="details_data"></div>
</div>
- <div id="details_data"></div>
- </div>
- </body>
- </html>
- """)
+ </body>
+ </html>
+ """)
+
+ def traverse_ready_makefiles(self, summary, makefiles):
+ return [Analysis(makefile.filename, []) for makefile in makefiles
+ if clean_and_only_blocked_by_clean(self.soong, self.all_makefiles, makefile)]
+
+ def print_analysis_row(self, summary, modules, rowtitle, rowclass, makefiles):
+ all_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles]
+ clean_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+ if is_clean(makefile)]
+ easy_makefiles = self.traverse_ready_makefiles(summary, makefiles)
+ unblocked_clean_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+ if (self.soong.contains_unblocked_modules(makefile.filename)
+ and is_clean(makefile))]
+ unblocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+ if self.soong.contains_unblocked_modules(makefile.filename)]
+ blocked_makefiles = [Analysis(makefile.filename, []) for makefile in makefiles
+ if self.soong.contains_blocked_modules(makefile.filename)]
+
+ print("""
+ <tr class="%(rowclass)s">
+ <td class="RowTitle">%(rowtitle)s</td>
+ <td class="Count">%(makefiles)s</td>
+ <td class="Count">%(easy)s</td>
+ <td class="Count">%(unblocked_clean)s</td>
+ <td class="Count">%(unblocked)s</td>
+ <td class="Count">%(blocked)s</td>
+ <td class="Count">%(clean)s</td>
+ """ % {
+ "rowclass": rowclass,
+ "rowtitle": rowtitle,
+ "makefiles": self.make_annotation_link(all_makefiles, modules),
+ "unblocked": self.make_annotation_link(unblocked_makefiles, modules),
+ "blocked": self.make_annotation_link(blocked_makefiles, modules),
+ "clean": self.make_annotation_link(clean_makefiles, modules),
+ "unblocked_clean": self.make_annotation_link(unblocked_clean_makefiles, modules),
+ "easy": self.make_annotation_link(easy_makefiles, modules),
+ })
+
+ for analyzer in ANALYZERS:
+ analyses = [m.analyses.get(analyzer) for m in makefiles if m.analyses.get(analyzer)]
+ print("""<td class="Count">%s</td>"""
+ % self.make_annotation_link(analyses, modules))
+
+ print(" </tr>")
+
+ def make_annotation_link(self, analysis, modules):
+ if analysis:
+ return "<a href='javascript:update_details(%d)'>%s</a>" % (
+ self.annotations.Add(analysis, modules),
+ len(analysis)
+ )
+ else:
+ return "";
+
+class CsvProcessor(object):
+ def __init__(self, args, soong, all_makefiles):
+ self.args = args
+ self.soong = soong
+ self.all_makefiles = all_makefiles
+
+ def execute(self):
+ csvout = csv.writer(sys.stdout)
+
+ # Title row
+ row = ["Filename", "Module", "Partitions", "Easy", "Unblocked Clean", "Unblocked",
+ "Blocked", "Clean"]
+ for analyzer in ANALYZERS:
+ row.append(analyzer.title)
+ csvout.writerow(row)
+
+ # Makefile & module data
+ for filename in sorted(self.all_makefiles.keys()):
+ makefile = self.all_makefiles[filename]
+ for module in self.soong.reverse_makefiles[filename]:
+ row = [filename, module]
+ # Partitions
+ row.append(";".join(sorted(set([get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT,
+ installed)
+ for installed
+ in self.soong.reverse_installed.get(module, [])]))))
+ # Easy
+ row.append(1
+ if clean_and_only_blocked_by_clean(self.soong, self.all_makefiles, makefile)
+ else "")
+ # Unblocked Clean
+ row.append(1
+ if (self.soong.contains_unblocked_modules(makefile.filename) and is_clean(makefile))
+ else "")
+ # Unblocked
+ row.append(1 if self.soong.contains_unblocked_modules(makefile.filename) else "")
+ # Blocked
+ row.append(1 if self.soong.contains_blocked_modules(makefile.filename) else "")
+ # Clean
+ row.append(1 if is_clean(makefile) else "")
+ # Analysis
+ for analyzer in ANALYZERS:
+ row.append(1 if makefile.analyses.get(analyzer) else "")
+ # Write results
+ csvout.writerow(row)
if __name__ == "__main__":
main()
diff --git a/tools/product_config/Android.bp b/tools/product_config/Android.bp
new file mode 100644
index 0000000..287ed5a
--- /dev/null
+++ b/tools/product_config/Android.bp
@@ -0,0 +1,23 @@
+java_defaults {
+ name: "product-config-defaults",
+ srcs: ["src/**/*.java"],
+}
+
+java_binary_host {
+ name: "product-config",
+ defaults: ["product-config-defaults"],
+ manifest: "MANIFEST.MF"
+}
+
+java_test_host {
+ name: "product-config-test",
+ defaults: ["product-config-defaults"],
+ srcs: [
+ "test/**/*.java",
+ ],
+ static_libs: [
+ "junit"
+ ],
+ test_suites: ["general-tests"]
+}
+
diff --git a/tools/product_config/MANIFEST.MF b/tools/product_config/MANIFEST.MF
new file mode 100644
index 0000000..db88df3
--- /dev/null
+++ b/tools/product_config/MANIFEST.MF
@@ -0,0 +1,2 @@
+Manifest-Version: 1.0
+Main-Class: com.android.build.config.Main
diff --git a/tools/product_config/TEST_MAPPING b/tools/product_config/TEST_MAPPING
new file mode 100644
index 0000000..d3568f1
--- /dev/null
+++ b/tools/product_config/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "product_config_test"
+ }
+ ]
+}
diff --git a/tools/product_config/src/com/android/build/config/ErrorReporter.java b/tools/product_config/src/com/android/build/config/ErrorReporter.java
new file mode 100644
index 0000000..f382b4e
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ErrorReporter.java
@@ -0,0 +1,263 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.lang.reflect.Field;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Base class for reporting errors.
+ */
+public class ErrorReporter {
+ /**
+ * List of Entries that have occurred.
+ */
+ // Also used as the lock for this object.
+ private final ArrayList<Entry> mEntries = new ArrayList();
+
+ /**
+ * The categories that are for this Errors object.
+ */
+ private Map<Integer, Category> mCategories;
+
+ /**
+ * Whether there has been a warning or an error yet.
+ */
+ private boolean mHadWarningOrError;
+
+ /**
+ * Whether there has been an error yet.
+ */
+ private boolean mHadError;
+
+ /**
+ * Whether errors are errors, warnings or hidden.
+ */
+ public static enum Level {
+ HIDDEN("hidden"),
+ WARNING("warning"),
+ ERROR("error");
+
+ private final String mLabel;
+
+ Level(String label) {
+ mLabel = label;
+ }
+
+ String getLabel() {
+ return mLabel;
+ }
+ }
+
+ /**
+ * The available error codes.
+ */
+ public class Category {
+ private final int mCode;
+ private boolean mIsLevelSettable;
+ private Level mLevel;
+ private String mHelp;
+
+ /**
+ * Construct a Category object.
+ */
+ public Category(int code, boolean isLevelSettable, Level level, String help) {
+ if (!isLevelSettable && level != Level.ERROR) {
+ throw new RuntimeException("Don't have WARNING or HIDDEN without isLevelSettable");
+ }
+ mCode = code;
+ mIsLevelSettable = isLevelSettable;
+ mLevel = level;
+ mHelp = help;
+ }
+
+ /**
+ * Get the numeric code for the Category, which can be used to set the level.
+ */
+ public int getCode() {
+ return mCode;
+ }
+
+ /**
+ * Get whether the level of this Category can be changed.
+ */
+ public boolean isLevelSettable() {
+ return mIsLevelSettable;
+ }
+
+ /**
+ * Set the level of this category.
+ */
+ public void setLevel(Level level) {
+ if (!mIsLevelSettable) {
+ throw new RuntimeException("Can't set level for error " + mCode);
+ }
+ mLevel = level;
+ }
+
+ /**
+ * Return the level, including any overrides.
+ */
+ public Level getLevel() {
+ return mLevel;
+ }
+
+ /**
+ * Return the category's help text.
+ */
+ public String getHelp() {
+ return mHelp;
+ }
+ }
+
+ /**
+ * An instance of an error happening.
+ */
+ public class Entry {
+ private final Category mCategory;
+ private final Position mPosition;
+ private final String mMessage;
+
+ Entry(Category category, Position position, String message) {
+ mCategory = category;
+ mPosition = position;
+ mMessage = message;
+ }
+
+ public Category getCategory() {
+ return mCategory;
+ }
+
+ public Position getPosition() {
+ return mPosition;
+ }
+
+ public String getMessage() {
+ return mMessage;
+ }
+ }
+
+ private void initLocked() {
+ if (mCategories == null) {
+ HashMap<Integer, Category> categories = new HashMap();
+ for (Field field: getClass().getFields()) {
+ if (Category.class.isAssignableFrom(field.getType())) {
+ Category category = null;
+ try {
+ category = (Category)field.get(this);
+ } catch (IllegalAccessException ex) {
+ // Wrap and rethrow, this is always on this class, so it's
+ // our programming error if this happens.
+ throw new RuntimeException("Categories on Errors should be public.", ex);
+ }
+ Category prev = categories.put(category.getCode(), category);
+ if (prev != null) {
+ throw new RuntimeException("Duplicate categories with code "
+ + category.getCode());
+ }
+ }
+ }
+ mCategories = Collections.unmodifiableMap(categories);
+ }
+ }
+
+ /**
+ * Returns a map of the category codes to the categories.
+ */
+ public Map<Integer, Category> getCategories() {
+ synchronized (mEntries) {
+ initLocked();
+ return mCategories;
+ }
+ }
+
+ /**
+ * Add an error with no source position.
+ */
+ public void add(Category category, String message) {
+ add(category, new Position(), message);
+ }
+
+ /**
+ * Add an error.
+ */
+ public void add(Category category, Position pos, String message) {
+ synchronized (mEntries) {
+ initLocked();
+ if (mCategories.get(category.getCode()) != category) {
+ throw new RuntimeException("Errors.Category used from the wrong Errors object.");
+ }
+ mEntries.add(new Entry(category, pos, message));
+ final Level level = category.getLevel();
+ if (level == Level.WARNING || level == Level.ERROR) {
+ mHadWarningOrError = true;
+ }
+ if (level == Level.ERROR) {
+ mHadError = true;
+ }
+ }
+ }
+
+ /**
+ * Returns whether there has been a warning or an error yet.
+ */
+ public boolean hadWarningOrError() {
+ synchronized (mEntries) {
+ return mHadWarningOrError;
+ }
+ }
+
+ /**
+ * Returns whether there has been an error yet.
+ */
+ public boolean hadError() {
+ synchronized (mEntries) {
+ return mHadError;
+ }
+ }
+
+ /**
+ * Returns a list of all entries that were added.
+ */
+ public List<Entry> getEntries() {
+ synchronized (mEntries) {
+ return new ArrayList<Entry>(mEntries);
+ }
+ }
+
+ /**
+ * Prints the errors.
+ */
+ public void printErrors(PrintStream out) {
+ synchronized (mEntries) {
+ for (Entry entry: mEntries) {
+ final Category category = entry.getCategory();
+ final Level level = category.getLevel();
+ if (level == Level.HIDDEN) {
+ continue;
+ }
+ out.println(entry.getPosition() + "[" + level.getLabel() + " "
+ + category.getCode() + "] " + entry.getMessage());
+ }
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Errors.java b/tools/product_config/src/com/android/build/config/Errors.java
new file mode 100644
index 0000000..63792c8
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Errors.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.lang.reflect.Field;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Error constants and error reporting.
+ * <p>
+ * <b>Naming Convention:</b>
+ * <ul>
+ * <li>ERROR_ for Categories with isLevelSettable false and Level.ERROR
+ * <li>WARNING_ for Categories with isLevelSettable false and default WARNING or HIDDEN
+ * <li>Don't have isLevelSettable true and not ERROR. (The constructor asserts this).
+ * </ul>
+ */
+public class Errors extends ErrorReporter {
+
+ public final Category ERROR_COMMAND_LINE = new Category(1, false, Level.ERROR,
+ "Error on the command line.");
+
+ public final Category WARNING_UNKNOWN_COMMAND_LINE_ERROR = new Category(2, true, Level.HIDDEN,
+ "Passing unknown errors on the command line. Hidden by default for\n"
+ + "forward compatibility.");
+}
diff --git a/tools/product_config/src/com/android/build/config/Main.java b/tools/product_config/src/com/android/build/config/Main.java
new file mode 100644
index 0000000..7669742
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Main.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+public class Main {
+ private final Errors mErrors;
+ private final Options mOptions;
+
+ public Main(Errors errors, Options options) {
+ mErrors = errors;
+ mOptions = options;
+ }
+
+ void run() {
+ System.out.println("Hello World");
+
+ // TODO: Check the build environment to make sure we're running in a real
+ // build environment, e.g. actually inside a source tree, with TARGET_PRODUCT
+ // and TARGET_BUILD_VARIANT defined, etc.
+
+ // TODO: Run kati and extract the variables and convert all that into starlark files.
+
+ // TODO: Run starlark with all the generated ones and the hand written ones.
+
+ // TODO: Get the variables that were defined in starlark and use that to write
+ // out the make, soong and bazel input files.
+ }
+
+ public static void main(String[] args) {
+ Errors errors = new Errors();
+
+ Options options = Options.parse(errors, args);
+ if (errors.hadError()) {
+ Options.printHelp(System.err);
+ System.err.println();
+ errors.printErrors(System.err);
+ System.exit(1);
+ }
+
+ switch (options.getAction()) {
+ case DEFAULT:
+ (new Main(errors, options)).run();
+ errors.printErrors(System.err);
+ return;
+ case HELP:
+ Options.printHelp(System.out);
+ return;
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Options.java b/tools/product_config/src/com/android/build/config/Options.java
new file mode 100644
index 0000000..494b947
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Options.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.TreeMap;
+
+public class Options {
+ public enum Action {
+ DEFAULT,
+ HELP
+ }
+
+ private Action mAction = Action.DEFAULT;
+
+ public Action getAction() {
+ return mAction;
+ }
+
+ public static void printHelp(PrintStream out) {
+ out.println("usage: product_config");
+ out.println();
+ out.println("OPTIONS");
+ out.println(" --hide ERROR_ID Suppress this error.");
+ out.println(" --error ERROR_ID Make this ERROR_ID a fatal error.");
+ out.println(" --help -h This message.");
+ out.println(" --warning ERROR_ID Make this ERROR_ID a warning.");
+ out.println();
+ out.println("ERRORS");
+ out.println(" The following are the errors that can be controlled on the");
+ out.println(" commandline with the --hide --warning --error flags.");
+
+ TreeMap<Integer,Errors.Category> sorted = new TreeMap((new Errors()).getCategories());
+
+ for (final Errors.Category category: sorted.values()) {
+ if (category.isLevelSettable()) {
+ out.println(String.format(" %-3d %s", category.getCode(),
+ category.getHelp().replace("\n", "\n ")));
+ }
+ }
+ }
+
+ static class Parser {
+ private class ParseException extends Exception {
+ public ParseException(String message) {
+ super(message);
+ }
+ }
+
+ private Errors mErrors;
+ private String[] mArgs;
+ private Options mResult = new Options();
+ private int mIndex;
+
+ public Parser(Errors errors, String[] args) {
+ mErrors = errors;
+ mArgs = args;
+ }
+
+ public Options parse() {
+ try {
+ while (mIndex < mArgs.length) {
+ final String arg = mArgs[mIndex];
+
+ if ("--hide".equals(arg)) {
+ handleErrorCode(arg, Errors.Level.HIDDEN);
+ } else if ("--error".equals(arg)) {
+ handleErrorCode(arg, Errors.Level.ERROR);
+ } else if ("--help".equals(arg) || "-h".equals(arg)) {
+ // Help overrides all other commands if there isn't an error, but
+ // we will stop here.
+ if (!mErrors.hadError()) {
+ mResult.mAction = Action.HELP;
+ }
+ return mResult;
+ } else if ("--warning".equals(arg)) {
+ handleErrorCode(arg, Errors.Level.WARNING);
+ } else {
+ throw new ParseException("Unknown command line argument: " + arg);
+ }
+
+ mIndex++;
+ }
+ } catch (ParseException ex) {
+ mErrors.add(mErrors.ERROR_COMMAND_LINE, ex.getMessage());
+ }
+
+ return mResult;
+ }
+
+ private void addWarning(Errors.Category category, String message) {
+ mErrors.add(category, message);
+ }
+
+ private String getNextNonFlagArg() {
+ if (mIndex == mArgs.length - 1) {
+ return null;
+ }
+ if (mArgs[mIndex + 1].startsWith("-")) {
+ return null;
+ }
+ mIndex++;
+ return mArgs[mIndex];
+ }
+
+ private int requireNextNumberArg(String arg) throws ParseException {
+ final String val = getNextNonFlagArg();
+ if (val == null) {
+ throw new ParseException(arg + " requires a numeric argument.");
+ }
+ try {
+ return Integer.parseInt(val);
+ } catch (NumberFormatException ex) {
+ throw new ParseException(arg + " requires a numeric argument. found: " + val);
+ }
+ }
+
+ private void handleErrorCode(String arg, Errors.Level level) throws ParseException {
+ final int code = requireNextNumberArg(arg);
+ final Errors.Category category = mErrors.getCategories().get(code);
+ if (category == null) {
+ mErrors.add(mErrors.WARNING_UNKNOWN_COMMAND_LINE_ERROR,
+ "Unknown error code: " + code);
+ return;
+ }
+ if (!category.isLevelSettable()) {
+ mErrors.add(mErrors.ERROR_COMMAND_LINE, "Can't set level for error " + code);
+ return;
+ }
+ category.setLevel(level);
+ }
+ }
+
+ /**
+ * Parse the arguments and return an options object.
+ * <p>
+ * Updates errors with the hidden / warning / error levels.
+ * <p>
+ * Adds errors encountered to Errors object.
+ */
+ public static Options parse(Errors errors, String[] args) {
+ return (new Parser(errors, args)).parse();
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Position.java b/tools/product_config/src/com/android/build/config/Position.java
new file mode 100644
index 0000000..7953942
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Position.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+/**
+ * Position in a source file.
+ */
+public class Position implements Comparable<Position> {
+ /**
+ * Sentinel line number for when there is no known line number.
+ */
+ public static final int NO_LINE = -1;
+
+ private final String mFile;
+ private final int mLine;
+
+ public Position() {
+ mFile = null;
+ mLine = NO_LINE;
+ }
+
+ public Position(String file) {
+ mFile = file;
+ mLine = NO_LINE;
+ }
+
+ public Position(String file, int line) {
+ if (line < NO_LINE) {
+ throw new IllegalArgumentException("Negative line number. file=" + file
+ + " line=" + line);
+ }
+ mFile = file;
+ mLine = line;
+ }
+
+ public int compareTo(Position that) {
+ int result = mFile.compareTo(that.mFile);
+ if (result != 0) {
+ return result;
+ }
+ return mLine - that.mLine;
+ }
+
+ public String getFile() {
+ return mFile;
+ }
+
+ public int getLine() {
+ return mLine;
+ }
+
+ @Override
+ public String toString() {
+ if (mFile == null && mLine == NO_LINE) {
+ return "";
+ } else if (mFile == null && mLine != NO_LINE) {
+ return "<unknown>:" + mLine + ": ";
+ } else if (mFile != null && mLine == NO_LINE) {
+ return mFile + ": ";
+ } else { // if (mFile != null && mLine != NO_LINE)
+ return mFile + ':' + mLine + ": ";
+ }
+ }
+}
diff --git a/tools/product_config/test/com/android/build/config/ErrorReporterTest.java b/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
new file mode 100644
index 0000000..2cde476
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashSet;
+import java.util.List;
+
+public class ErrorReporterTest {
+ /**
+ * Test that errors can be recorded and retrieved.
+ */
+ @Test
+ public void testAdding() {
+ TestErrors errors = new TestErrors();
+
+ errors.add(errors.ERROR, new Position("a", 12), "Errrororrrr");
+
+ Assert.assertTrue(errors.hadWarningOrError());
+ Assert.assertTrue(errors.hadError());
+
+ List<TestErrors.Entry> entries = errors.getEntries();
+ Assert.assertEquals(1, entries.size());
+
+ TestErrors.Entry entry = entries.get(0);
+ Assert.assertEquals(errors.ERROR, entry.getCategory());
+ Assert.assertEquals("a", entry.getPosition().getFile());
+ Assert.assertEquals(12, entry.getPosition().getLine());
+ Assert.assertEquals("Errrororrrr", entry.getMessage());
+
+ Assert.assertNotEquals("", errors.getErrorMessages());
+ }
+
+ /**
+ * Test that not adding an error doesn't record errors.
+ */
+ @Test
+ public void testNoError() {
+ TestErrors errors = new TestErrors();
+
+ Assert.assertFalse(errors.hadWarningOrError());
+ Assert.assertFalse(errors.hadError());
+ Assert.assertEquals("", errors.getErrorMessages());
+ }
+
+ /**
+ * Test that not adding a warning doesn't record errors.
+ */
+ @Test
+ public void testWarning() {
+ TestErrors errors = new TestErrors();
+
+ errors.add(errors.WARNING, "Waaaaarninggggg");
+
+ Assert.assertTrue(errors.hadWarningOrError());
+ Assert.assertFalse(errors.hadError());
+ Assert.assertNotEquals("", errors.getErrorMessages());
+ }
+
+ /**
+ * Test that hidden warnings don't report.
+ */
+ @Test
+ public void testHidden() {
+ TestErrors errors = new TestErrors();
+
+ errors.add(errors.HIDDEN, "Hidddeennn");
+
+ Assert.assertFalse(errors.hadWarningOrError());
+ Assert.assertFalse(errors.hadError());
+ Assert.assertEquals("", errors.getErrorMessages());
+ }
+
+ /**
+ * Test changing an error level.
+ */
+ @Test
+ public void testSetLevel() {
+ TestErrors errors = new TestErrors();
+ Assert.assertEquals(TestErrors.Level.ERROR, errors.ERROR.getLevel());
+
+ errors.ERROR.setLevel(TestErrors.Level.WARNING);
+
+ Assert.assertEquals(TestErrors.Level.WARNING, errors.ERROR.getLevel());
+ }
+
+ /**
+ * Test that changing a fixed error fails.
+ */
+ @Test
+ public void testSetLevelFails() {
+ TestErrors errors = new TestErrors();
+ Assert.assertEquals(TestErrors.Level.ERROR, errors.ERROR_FIXED.getLevel());
+
+ boolean exceptionThrown = false;
+ try {
+ errors.ERROR_FIXED.setLevel(TestErrors.Level.WARNING);
+ } catch (RuntimeException ex) {
+ exceptionThrown = true;
+ }
+
+ Assert.assertTrue(exceptionThrown);
+ Assert.assertEquals(TestErrors.Level.ERROR, errors.ERROR_FIXED.getLevel());
+ }
+}
diff --git a/tools/product_config/test/com/android/build/config/OptionsTest.java b/tools/product_config/test/com/android/build/config/OptionsTest.java
new file mode 100644
index 0000000..2c36322
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/OptionsTest.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class OptionsTest {
+ @Test
+ public void testErrorMissingLast() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--error"
+ });
+
+ Assert.assertNotEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ TestErrors.assertHasEntry(errors.ERROR_COMMAND_LINE, errors);
+ }
+
+ @Test
+ public void testErrorMissingNotLast() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--error", "--warning", "2"
+ });
+
+ Assert.assertNotEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ TestErrors.assertHasEntry(errors.ERROR_COMMAND_LINE, errors);
+ }
+
+ @Test
+ public void testErrorNotNumeric() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--error", "notgood"
+ });
+
+ Assert.assertNotEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ TestErrors.assertHasEntry(errors.ERROR_COMMAND_LINE, errors);
+ }
+
+ @Test
+ public void testErrorInvalidError() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--error", "50000"
+ });
+
+ Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ TestErrors.assertHasEntry(errors.WARNING_UNKNOWN_COMMAND_LINE_ERROR, errors);
+ }
+
+ @Test
+ public void testErrorOne() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--error", "2"
+ });
+
+ Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ Assert.assertFalse(errors.hadWarningOrError());
+ }
+
+ @Test
+ public void testWarningOne() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--warning", "2"
+ });
+
+ Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ Assert.assertFalse(errors.hadWarningOrError());
+ }
+
+ @Test
+ public void testHideOne() {
+ final Errors errors = new Errors();
+
+ final Options options = Options.parse(errors, new String[] {
+ "--hide", "2"
+ });
+
+ Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+ Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+ Assert.assertFalse(errors.hadWarningOrError());
+ }
+}
+
diff --git a/tools/product_config/test/com/android/build/config/TestErrors.java b/tools/product_config/test/com/android/build/config/TestErrors.java
new file mode 100644
index 0000000..dde88b0
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/TestErrors.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Errors for testing.
+ */
+public class TestErrors extends ErrorReporter {
+
+ public static final int ERROR_CODE = 1;
+
+ public final Category ERROR = new Category(ERROR_CODE, true, Level.ERROR,
+ "An error.");
+
+ public static final int WARNING_CODE = 2;
+
+ public final Category WARNING = new Category(WARNING_CODE, true, Level.WARNING,
+ "A warning.");
+
+ public static final int HIDDEN_CODE = 3;
+
+ public final Category HIDDEN = new Category(HIDDEN_CODE, true, Level.HIDDEN,
+ "A hidden warning.");
+
+ public static final int ERROR_FIXED_CODE = 4;
+
+ public final Category ERROR_FIXED = new Category(ERROR_FIXED_CODE, false, Level.ERROR,
+ "An error that can't have its level changed.");
+
+ public void assertHasEntry(Errors.Category category) {
+ assertHasEntry(category, this);
+ }
+
+ public String getErrorMessages() {
+ return getErrorMessages(this);
+ }
+
+ public static void assertHasEntry(Errors.Category category, ErrorReporter errors) {
+ StringBuilder found = new StringBuilder();
+ for (Errors.Entry entry: errors.getEntries()) {
+ if (entry.getCategory() == category) {
+ return;
+ }
+ found.append(' ');
+ found.append(entry.getCategory().getCode());
+ }
+ throw new AssertionError("No error category " + category.getCode() + " found."
+ + " Found category codes were:" + found);
+ }
+
+ public static String getErrorMessages(ErrorReporter errors) {
+ final ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ try {
+ errors.printErrors(new PrintStream(stream, true, StandardCharsets.UTF_8.name()));
+ } catch (UnsupportedEncodingException ex) {
+ // utf-8 is always supported
+ }
+ return new String(stream.toByteArray(), StandardCharsets.UTF_8);
+ }
+}
+
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index ace00ac..c28c4dc 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -49,6 +49,8 @@
required: [
"blk_alloc_to_base_fs",
"e2fsck",
+ "mkerofsimage.sh",
+ "mkuserimg_mke2fs",
"simg2img",
"tune2fs",
],
@@ -88,23 +90,47 @@
],
}
+python_library_host {
+ name: "ota_metadata_proto",
+ version: {
+ py2: {
+ enabled: true,
+ },
+ py3: {
+ enabled: true,
+ },
+ },
+ srcs: [
+ "ota_metadata.proto",
+ ],
+ proto: {
+ canonical_path_from_root: false,
+ },
+}
+
python_defaults {
name: "releasetools_ota_from_target_files_defaults",
srcs: [
"edify_generator.py",
- "ota_from_target_files.py",
"non_ab_ota.py",
- "target_files_diff.py",
+ "ota_from_target_files.py",
"ota_utils.py",
+ "target_files_diff.py",
],
libs: [
+ "ota_metadata_proto",
"releasetools_check_target_files_vintf",
"releasetools_common",
"releasetools_verity_utils",
+ "apex_manifest",
],
required: [
"brillo_update_payload",
"checkvintf",
+ "lz4",
+ "toybox",
+ "unpack_bootimg",
+ "deapexer",
],
target: {
darwin: {
@@ -145,6 +171,8 @@
"apex_utils.py",
],
libs: [
+ "apex_manifest",
+ "ota_metadata_proto",
"releasetools_common",
],
}
@@ -195,6 +223,8 @@
"lz4",
"mkbootfs",
"signapk",
+ "toybox",
+ "unpack_bootimg",
],
}
@@ -252,7 +282,11 @@
"bsdiff",
"imgdiff",
"minigzip",
+ "lz4",
"mkbootfs",
+ "signapk",
+ "toybox",
+ "unpack_bootimg",
],
}
@@ -343,6 +377,32 @@
],
}
+python_defaults {
+ name: "releasetools_find_shareduid_violation_defaults",
+ srcs: [
+ "find_shareduid_violation.py",
+ ],
+ libs: [
+ "releasetools_common",
+ ],
+}
+
+python_binary_host {
+ name: "find_shareduid_violation",
+ defaults: [
+ "releasetools_binary_defaults",
+ "releasetools_find_shareduid_violation_defaults",
+ ],
+}
+
+python_library_host {
+ name: "releasetools_find_shareduid_violation",
+ defaults: [
+ "releasetools_find_shareduid_violation_defaults",
+ "releasetools_library_defaults",
+ ],
+}
+
python_binary_host {
name: "make_recovery_patch",
defaults: ["releasetools_binary_defaults"],
@@ -377,11 +437,13 @@
"releasetools_build_super_image",
"releasetools_check_target_files_vintf",
"releasetools_common",
+ "releasetools_find_shareduid_violation",
"releasetools_img_from_target_files",
"releasetools_ota_from_target_files",
],
required: [
"checkvintf",
+ "host_init_verifier",
],
target: {
darwin: {
@@ -479,12 +541,15 @@
"releasetools_build_super_image",
"releasetools_check_target_files_vintf",
"releasetools_common",
+ "releasetools_find_shareduid_violation",
"releasetools_img_from_target_files",
"releasetools_ota_from_target_files",
"releasetools_verity_utils",
],
data: [
"testdata/**/*",
+ ":com.android.apex.compressed.v1",
+ ":com.android.apex.compressed.v1_original",
],
target: {
darwin: {
@@ -492,6 +557,9 @@
enabled: false,
},
},
+ required: [
+ "deapexer",
+ ],
}
python_test_host {
@@ -511,7 +579,9 @@
embedded_launcher: false,
},
},
- test_suites: ["general-tests"],
+ test_options: {
+ unit_test: true,
+ },
}
python_test_host {
@@ -528,5 +598,7 @@
embedded_launcher: false,
},
},
- test_suites: ["general-tests"],
+ test_options: {
+ unit_test: true,
+ },
}
diff --git a/tools/releasetools/TEST_MAPPING b/tools/releasetools/TEST_MAPPING
deleted file mode 100644
index 0af0f04..0000000
--- a/tools/releasetools/TEST_MAPPING
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "presubmit": [
- {
- "name": "releasetools_test",
- "host": true
- },
- {
- "name": "releasetools_py3_test",
- "host": true
- }
- ]
-}
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index a1f8e31..5f35d78 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -51,6 +51,7 @@
import os
import shlex
import shutil
+import stat
import sys
import uuid
import zipfile
@@ -330,6 +331,9 @@
# AVB-sign the image as needed.
if OPTIONS.info_dict.get("avb_enable") == "true":
+ # Signing requires +w
+ os.chmod(img.name, os.stat(img.name).st_mode | stat.S_IWUSR)
+
avbtool = OPTIONS.info_dict["avb_avbtool"]
part_size = OPTIONS.info_dict["dtbo_size"]
# The AVB hash footer will be replaced if already present.
@@ -739,6 +743,18 @@
common.ZipClose(output_zip)
+def HasPartition(partition_name):
+ """Determines if the target files archive should build a given partition."""
+
+ return ((os.path.isdir(
+ os.path.join(OPTIONS.input_tmp, partition_name.upper())) and
+ OPTIONS.info_dict.get(
+ "building_{}_image".format(partition_name)) == "true") or
+ os.path.exists(
+ os.path.join(OPTIONS.input_tmp, "IMAGES",
+ "{}.img".format(partition_name))))
+
+
def AddImagesToTargetFiles(filename):
"""Creates and adds images (boot/recovery/system/...) to a target_files.zip.
@@ -767,49 +783,16 @@
has_boot = OPTIONS.info_dict.get("no_boot") != "true"
has_vendor_boot = OPTIONS.info_dict.get("vendor_boot") == "true"
- # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm}.img
- # are unlike system.img or
- # system_other.img, because it could be built from source, or dropped into
- # target_files.zip as a prebuilt blob. We consider either of them as
- # {vendor,product,system_ext}.img being available, which could be
- # used when generating vbmeta.img for AVB.
- has_vendor = ((os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) and
- OPTIONS.info_dict.get("building_vendor_image") == "true") or
- os.path.exists(
- os.path.join(OPTIONS.input_tmp, "IMAGES", "vendor.img")))
- has_odm = ((os.path.isdir(os.path.join(OPTIONS.input_tmp, "ODM")) and
- OPTIONS.info_dict.get("building_odm_image") == "true") or
- os.path.exists(
- os.path.join(OPTIONS.input_tmp, "IMAGES", "odm.img")))
- has_vendor_dlkm = ((os.path.isdir(os.path.join(OPTIONS.input_tmp,
- "VENDOR_DLKM")) and
- OPTIONS.info_dict.get("building_vendor_dlkm_image")
- == "true") or
- os.path.exists(
- os.path.join(OPTIONS.input_tmp, "IMAGES",
- "vendor_dlkm.img")))
- has_odm_dlkm = ((os.path.isdir(os.path.join(OPTIONS.input_tmp,
- "ODM_DLKM")) and
- OPTIONS.info_dict.get("building_odm_dlkm_image")
- == "true") or
- os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
- "odm_dlkm.img")))
- has_product = ((os.path.isdir(os.path.join(OPTIONS.input_tmp, "PRODUCT")) and
- OPTIONS.info_dict.get("building_product_image") == "true") or
- os.path.exists(
- os.path.join(OPTIONS.input_tmp, "IMAGES", "product.img")))
- has_system_ext = (
- (os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM_EXT")) and
- OPTIONS.info_dict.get("building_system_ext_image") == "true") or
- os.path.exists(
- os.path.join(OPTIONS.input_tmp, "IMAGES", "system_ext.img")))
- has_system = (
- os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM")) and
- OPTIONS.info_dict.get("building_system_image") == "true")
-
- has_system_other = (
- os.path.isdir(os.path.join(OPTIONS.input_tmp, "SYSTEM_OTHER")) and
- OPTIONS.info_dict.get("building_system_other_image") == "true")
+ # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm, system, system_other}.img
+ # can be built from source, or dropped into target_files.zip as a prebuilt blob.
+ has_vendor = HasPartition("vendor")
+ has_odm = HasPartition("odm")
+ has_vendor_dlkm = HasPartition("vendor_dlkm")
+ has_odm_dlkm = HasPartition("odm_dlkm")
+ has_product = HasPartition("product")
+ has_system_ext = HasPartition("system_ext")
+ has_system = HasPartition("system")
+ has_system_other = HasPartition("system_other")
has_userdata = OPTIONS.info_dict.get("building_userdata_image") == "true"
has_cache = OPTIONS.info_dict.get("building_cache_image") == "true"
@@ -980,8 +963,9 @@
if item not in vbmeta_vendor.split()]
vbmeta_partitions.append("vbmeta_vendor")
- banner("vbmeta")
- AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions)
+ if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true":
+ banner("vbmeta")
+ AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions)
if OPTIONS.info_dict.get("use_dynamic_partitions") == "true":
banner("super_empty")
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index afebc40..7ccc95c 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -21,7 +21,12 @@
import shutil
import zipfile
+import apex_manifest
import common
+from common import UnzipTemp, RunAndCheckOutput, MakeTempFile, OPTIONS
+
+import ota_metadata_pb2
+
logger = logging.getLogger(__name__)
@@ -49,8 +54,13 @@
def __init__(self, apex_path, key_passwords, codename_to_api_level_map):
self.apex_path = apex_path
- self.key_passwords = key_passwords
+ if not key_passwords:
+ self.key_passwords = dict()
+ else:
+ self.key_passwords = key_passwords
self.codename_to_api_level_map = codename_to_api_level_map
+ self.debugfs_path = os.path.join(
+ OPTIONS.search_path, "bin", "debugfs_static")
def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
"""Scans and signs the apk files and repack the apex
@@ -61,7 +71,13 @@
Returns:
The repacked apex file containing the signed apk files.
"""
- list_cmd = ['deapexer', 'list', self.apex_path]
+ if not os.path.exists(self.debugfs_path):
+ raise ApexSigningError(
+ "Couldn't find location of debugfs_static: " +
+ "Path {} does not exist. ".format(self.debugfs_path) +
+ "Make sure bin/debugfs_static can be found in -p <path>")
+ list_cmd = ['deapexer', '--debugfs_path',
+ self.debugfs_path, 'list', self.apex_path]
entries_names = common.RunAndCheckOutput(list_cmd).split()
apk_entries = [name for name in entries_names if name.endswith('.apk')]
@@ -91,8 +107,14 @@
def ExtractApexPayloadAndSignApks(self, apk_entries, apk_keys):
"""Extracts the payload image and signs the containing apk files."""
+ if not os.path.exists(self.debugfs_path):
+ raise ApexSigningError(
+ "Couldn't find location of debugfs_static: " +
+ "Path {} does not exist. ".format(self.debugfs_path) +
+ "Make sure bin/debugfs_static can be found in -p <path>")
payload_dir = common.MakeTempDir()
- extract_cmd = ['deapexer', 'extract', self.apex_path, payload_dir]
+ extract_cmd = ['deapexer', '--debugfs_path',
+ self.debugfs_path, 'extract', self.apex_path, payload_dir]
common.RunAndCheckOutput(extract_cmd)
has_signed_apk = False
@@ -110,8 +132,9 @@
# signed apk file.
unsigned_apk = common.MakeTempFile()
os.rename(apk_path, unsigned_apk)
- common.SignFile(unsigned_apk, apk_path, key_name, self.key_passwords,
- codename_to_api_level_map=self.codename_to_api_level_map)
+ common.SignFile(
+ unsigned_apk, apk_path, key_name, self.key_passwords.get(key_name),
+ codename_to_api_level_map=self.codename_to_api_level_map)
has_signed_apk = True
return payload_dir, has_signed_apk
@@ -149,7 +172,8 @@
# Add quote to the signing_args as we will pass
# --signing_args "--signing_helper_with_files=%path" to apexer
if signing_args:
- generate_image_cmd.extend(['--signing_args', '"{}"'.format(signing_args)])
+ generate_image_cmd.extend(
+ ['--signing_args', '"{}"'.format(signing_args)])
# optional arguments for apex repacking
manifest_json = os.path.join(apex_dir, 'apex_manifest.json')
@@ -162,7 +186,7 @@
# Add the payload image back to the apex file.
common.ZipDelete(self.apex_path, APEX_PAYLOAD_IMAGE)
- with zipfile.ZipFile(self.apex_path, 'a') as output_apex:
+ with zipfile.ZipFile(self.apex_path, 'a', allowZip64=True) as output_apex:
common.ZipWrite(output_apex, payload_img, APEX_PAYLOAD_IMAGE,
compress_type=zipfile.ZIP_STORED)
return self.apex_path
@@ -282,13 +306,13 @@
return payload_info
-def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
- apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None):
- """Signs the current APEX with the given payload/container keys.
+def SignUncompressedApex(avbtool, apex_data, payload_key, container_key,
+ container_pw, apk_keys, codename_to_api_level_map,
+ no_hashtree, signing_args=None):
+ """Signs the current uncompressed APEX with the given payload/container keys.
Args:
- apex_data: Raw APEX data.
+ apex_data: Raw uncompressed APEX data.
payload_key: The path to payload signing key (w/ extension).
container_key: The path to container signing key (w/o extension).
container_pw: The matching password of the container_key, or None.
@@ -336,7 +360,7 @@
common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
if APEX_PUBKEY in zip_items:
common.ZipDelete(apex_file, APEX_PUBKEY)
- apex_zip = zipfile.ZipFile(apex_file, 'a')
+ apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
common.ZipClose(apex_zip)
@@ -352,12 +376,128 @@
extra_signapk_args = OPTIONS.extra_signapk_args[:]
extra_signapk_args.extend(['-a', '4096'])
+ password = container_pw.get(container_key) if container_pw else None
common.SignFile(
aligned_apex,
signed_apex,
container_key,
- container_pw,
+ password,
codename_to_api_level_map=codename_to_api_level_map,
extra_signapk_args=extra_signapk_args)
return signed_apex
+
+
+def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
+ apk_keys, codename_to_api_level_map,
+ no_hashtree, signing_args=None):
+ """Signs the current APEX with the given payload/container keys.
+
+ Args:
+ apex_file: Path to apex file path.
+ payload_key: The path to payload signing key (w/ extension).
+ container_key: The path to container signing key (w/o extension).
+ container_pw: The matching password of the container_key, or None.
+ apk_keys: A dict that holds the signing keys for apk files.
+ codename_to_api_level_map: A dict that maps from codename to API level.
+ no_hashtree: Don't include hashtree in the signed APEX.
+ signing_args: Additional args to be passed to the payload signer.
+
+ Returns:
+ The path to the signed APEX file.
+ """
+ apex_file = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+ with open(apex_file, 'wb') as output_fp:
+ output_fp.write(apex_data)
+
+ debugfs_path = os.path.join(OPTIONS.search_path, "bin", "debugfs_static")
+ cmd = ['deapexer', '--debugfs_path', debugfs_path,
+ 'info', '--print-type', apex_file]
+
+ try:
+ apex_type = common.RunAndCheckOutput(cmd).strip()
+ if apex_type == 'UNCOMPRESSED':
+ return SignUncompressedApex(
+ avbtool,
+ apex_data,
+ payload_key=payload_key,
+ container_key=container_key,
+ container_pw=None,
+ codename_to_api_level_map=codename_to_api_level_map,
+ no_hashtree=no_hashtree,
+ apk_keys=apk_keys,
+ signing_args=signing_args)
+ else:
+ # TODO(b/172912232): support signing compressed apex
+ raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
+
+ except common.ExternalError as e:
+ raise ApexInfoError(
+ 'Failed to get type for {}:\n{}'.format(apex_file, e))
+
+def GetApexInfoFromTargetFiles(input_file):
+ """
+ Get information about system APEX stored in the input_file zip
+
+ Args:
+ input_file: The filename of the target build target-files zip or directory.
+
+ Return:
+ A list of ota_metadata_pb2.ApexInfo() populated using the APEX stored in
+ /system partition of the input_file
+ """
+
+ # Extract the apex files so that we can run checks on them
+ if not isinstance(input_file, str):
+ raise RuntimeError("must pass filepath to target-files zip or directory")
+
+ if os.path.isdir(input_file):
+ tmp_dir = input_file
+ else:
+ tmp_dir = UnzipTemp(input_file, ["SYSTEM/apex/*"])
+ target_dir = os.path.join(tmp_dir, "SYSTEM/apex/")
+
+ apex_infos = []
+ for apex_filename in os.listdir(target_dir):
+ apex_filepath = os.path.join(target_dir, apex_filename)
+ if not os.path.isfile(apex_filepath) or \
+ not zipfile.is_zipfile(apex_filepath):
+ logger.info("Skipping %s because it's not a zipfile", apex_filepath)
+ continue
+ apex_info = ota_metadata_pb2.ApexInfo()
+ # Open the apex file to retrieve information
+ manifest = apex_manifest.fromApex(apex_filepath)
+ apex_info.package_name = manifest.name
+ apex_info.version = manifest.version
+ # Check if the file is compressed or not
+ debugfs_path = "debugfs"
+ if OPTIONS.search_path:
+ debugfs_path = os.path.join(OPTIONS.search_path, "bin", "debugfs_static")
+ deapexer = 'deapexer'
+ if OPTIONS.search_path:
+ deapexer_path = os.path.join(OPTIONS.search_path, "deapexer")
+ if os.path.isfile(deapexer_path):
+ deapexer = deapexer_path
+ apex_type = RunAndCheckOutput([
+ deapexer, "--debugfs_path", debugfs_path,
+ 'info', '--print-type', apex_filepath]).rstrip()
+ if apex_type == 'COMPRESSED':
+ apex_info.is_compressed = True
+ elif apex_type == 'UNCOMPRESSED':
+ apex_info.is_compressed = False
+ else:
+ raise RuntimeError('Not an APEX file: ' + apex_type)
+
+ # Decompress compressed APEX to determine its size
+ if apex_info.is_compressed:
+ decompressed_file_path = MakeTempFile(prefix="decompressed-",
+ suffix=".apex")
+ # Decompression target path should not exist
+ os.remove(decompressed_file_path)
+ RunAndCheckOutput([deapexer, 'decompress', '--input', apex_filepath,
+ '--output', decompressed_file_path])
+ apex_info.decompressed_size = os.path.getsize(decompressed_file_path)
+
+ apex_infos.append(apex_info)
+
+ return apex_infos
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 9cc072f..820c128 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -250,6 +250,7 @@
run_e2fsck = False
needs_projid = prop_dict.get("needs_projid", 0)
needs_casefold = prop_dict.get("needs_casefold", 0)
+ needs_compress = prop_dict.get("needs_compress", 0)
if fs_type.startswith("ext"):
build_command = [prop_dict["ext_mkuserimg"]]
@@ -295,6 +296,18 @@
build_command.extend(["--inode_size", "256"])
if "selinux_fc" in prop_dict:
build_command.append(prop_dict["selinux_fc"])
+ elif fs_type.startswith("erofs"):
+ build_command = ["mkerofsimage.sh"]
+ build_command.extend([in_dir, out_file])
+ if "erofs_sparse_flag" in prop_dict:
+ build_command.extend([prop_dict["erofs_sparse_flag"]])
+ build_command.extend(["-m", prop_dict["mount_point"]])
+ if target_out:
+ build_command.extend(["-d", target_out])
+ if fs_config:
+ build_command.extend(["-C", fs_config])
+ if "selinux_fc" in prop_dict:
+ build_command.extend(["-c", prop_dict["selinux_fc"]])
elif fs_type.startswith("squash"):
build_command = ["mksquashfsimage.sh"]
build_command.extend([in_dir, out_file])
@@ -337,6 +350,17 @@
build_command.append("--prjquota")
if (needs_casefold):
build_command.append("--casefold")
+ if (needs_compress or prop_dict.get("system_fs_compress") == "true"):
+ build_command.append("--compression")
+ if (prop_dict.get("system_fs_compress") == "true"):
+ build_command.append("--sldc")
+ if (prop_dict.get("system_f2fs_sldc_flags") == None):
+ build_command.append(str(0))
+ else:
+ sldc_flags_str = prop_dict.get("system_f2fs_sldc_flags")
+ sldc_flags = sldc_flags_str.split()
+ build_command.append(str(len(sldc_flags)))
+ build_command.extend(sldc_flags)
else:
raise BuildImageError(
"Error: unknown filesystem type: {}".format(fs_type))
@@ -402,7 +426,7 @@
fs_type = prop_dict.get("fs_type", "")
fs_spans_partition = True
- if fs_type.startswith("squash"):
+ if fs_type.startswith("squash") or fs_type.startswith("erofs"):
fs_spans_partition = False
# Get a builder for creating an image that's to be verified by Verified Boot,
@@ -412,7 +436,16 @@
if (prop_dict.get("use_dynamic_partition_size") == "true" and
"partition_size" not in prop_dict):
# If partition_size is not defined, use output of `du' + reserved_size.
- size = GetDiskUsage(in_dir)
+ # For compressed file system, it's better to use the compressed size to avoid wasting space.
+ if fs_type.startswith("erofs"):
+ tmp_dict = prop_dict.copy()
+ if "erofs_sparse_flag" in tmp_dict:
+ tmp_dict.pop("erofs_sparse_flag")
+ BuildImageMkfs(in_dir, tmp_dict, out_file, target_out, fs_config)
+ size = GetDiskUsage(out_file)
+ os.remove(out_file)
+ else:
+ size = GetDiskUsage(in_dir)
logger.info(
"The tree size of %s is %d MB.", in_dir, size // BYTES_IN_MB)
# If not specified, give us 16MB margin for GetDiskUsage error ...
@@ -529,7 +562,10 @@
common_props = (
"extfs_sparse_flag",
+ "erofs_sparse_flag",
"squashfs_sparse_flag",
+ "system_fs_compress",
+ "system_f2fs_sldc_flags",
"f2fs_sparse_flag",
"skip_fsck",
"ext_mkuserimg",
@@ -610,6 +646,7 @@
copy_prop("userdata_selinux_fc", "selinux_fc")
copy_prop("needs_casefold", "needs_casefold")
copy_prop("needs_projid", "needs_projid")
+ copy_prop("needs_compress", "needs_compress")
elif mount_point == "cache":
copy_prop("cache_fs_type", "fs_type")
copy_prop("cache_size", "partition_size")
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 0d990f1..58510a5 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -140,7 +140,7 @@
def VerifyAbOtaPayload(cert, package):
"""Verifies the payload and metadata signatures in an A/B OTA payload."""
- package_zip = zipfile.ZipFile(package, 'r')
+ package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
if 'payload.bin' not in package_zip.namelist():
common.ZipClose(package_zip)
return
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 0edefac..a2ddfe7 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -100,10 +100,7 @@
'PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS is not set')
return []
- with open(version_path) as f:
- version = f.read().strip()
-
- return ['--kernel', '{}:{}'.format(version, config_path)]
+ return ['--kernel', '{}:{}'.format(version_path, config_path)]
def CheckVintfFromExtractedTargetFiles(input_tmp, info_dict=None):
@@ -252,7 +249,7 @@
if os.path.isdir(target_files):
return os.path.isdir(os.path.join(target_files, "VENDOR"))
if zipfile.is_zipfile(target_files):
- return HasPartition(zipfile.ZipFile(target_files), "vendor")
+ return HasPartition(zipfile.ZipFile(target_files, allowZip64=True), "vendor")
raise ValueError("Unknown target_files argument")
return (HasVendorPartition(target_files) and
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 89900d3..26c4ae8 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -118,7 +118,7 @@
AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
# Partitions that should have their care_map added to META/care_map.pb
-PARTITIONS_WITH_CARE_MAP = (
+PARTITIONS_WITH_CARE_MAP = [
'system',
'vendor',
'product',
@@ -126,8 +126,14 @@
'odm',
'vendor_dlkm',
'odm_dlkm',
-)
+]
+# Partitions with a build.prop file
+PARTITIONS_WITH_BUILD_PROP = PARTITIONS_WITH_CARE_MAP + ['boot']
+
+# See sysprop.mk. If file is moved, add new search paths here; don't remove
+# existing search paths.
+RAMDISK_BUILD_PROP_REL_PATHS = ['system/etc/ramdisk/build.prop']
class ErrorCode(object):
"""Define error_codes for failures that happen during the actual
@@ -217,6 +223,25 @@
def SetHostToolLocation(tool_name, location):
OPTIONS.host_tools[tool_name] = location
+def FindHostToolPath(tool_name):
+ """Finds the path to the host tool.
+
+ Args:
+ tool_name: name of the tool to find
+ Returns:
+ path to the tool if found under either one of the host_tools map or under
+ the same directory as this binary is located at. If not found, tool_name
+ is returned.
+ """
+ if tool_name in OPTIONS.host_tools:
+ return OPTIONS.host_tools[tool_name]
+
+ my_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
+ tool_path = os.path.join(my_dir, tool_name)
+ if os.path.exists(tool_path):
+ return tool_path
+
+ return tool_name
def Run(args, verbose=None, **kwargs):
"""Creates and returns a subprocess.Popen object.
@@ -240,12 +265,10 @@
if 'universal_newlines' not in kwargs:
kwargs['universal_newlines'] = True
- # If explicitly set host tool location before, use that location to avoid
- # PATH violation. Make a copy of args in case client relies on the content
- # of args later.
- if args and args[0] in OPTIONS.host_tools:
+ if args:
+ # Make a copy of args in case client relies on the content of args later.
args = args[:]
- args[0] = OPTIONS.host_tools[args[0]]
+ args[0] = FindHostToolPath(args[0])
# Don't log any if caller explicitly says so.
if verbose:
@@ -400,7 +423,7 @@
"3.2.2. Build Parameters.".format(fingerprint))
self._partition_fingerprints = {}
- for partition in PARTITIONS_WITH_CARE_MAP:
+ for partition in PARTITIONS_WITH_BUILD_PROP:
try:
fingerprint = self.CalculatePartitionFingerprint(partition)
check_fingerprint(fingerprint)
@@ -408,7 +431,7 @@
except ExternalError:
continue
if "system" in self._partition_fingerprints:
- # system_other is not included in PARTITIONS_WITH_CARE_MAP, but does
+ # system_other is not included in PARTITIONS_WITH_BUILD_PROP, but does
# need a fingerprint when creating the image.
self._partition_fingerprints[
"system_other"] = self._partition_fingerprints["system"]
@@ -456,12 +479,16 @@
def GetPartitionBuildProp(self, prop, partition):
"""Returns the inquired build property for the provided partition."""
+
+ # Boot image uses ro.[product.]bootimage instead of boot.
+ prop_partition = "bootimage" if partition == "boot" else partition
+
# If provided a partition for this property, only look within that
# partition's build.prop.
if prop in BuildInfo._RO_PRODUCT_RESOLVE_PROPS:
- prop = prop.replace("ro.product", "ro.product.{}".format(partition))
+ prop = prop.replace("ro.product", "ro.product.{}".format(prop_partition))
else:
- prop = prop.replace("ro.", "ro.{}.".format(partition))
+ prop = prop.replace("ro.", "ro.{}.".format(prop_partition))
prop_val = self._GetRawBuildProp(prop, partition)
if prop_val is not None:
@@ -527,6 +554,27 @@
return BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER_LEGACY
return BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER_CURRENT
+ def _GetPlatformVersion(self):
+ version_sdk = self.GetBuildProp("ro.build.version.sdk")
+ # init code switches to version_release_or_codename (see b/158483506). After
+ # API finalization, release_or_codename will be the same as release. This
+ # is the best effort to support pre-S dev stage builds.
+ if int(version_sdk) >= 30:
+ try:
+ return self.GetBuildProp("ro.build.version.release_or_codename")
+ except ExternalError:
+ logger.warning('Failed to find ro.build.version.release_or_codename')
+
+ return self.GetBuildProp("ro.build.version.release")
+
+ def _GetPartitionPlatformVersion(self, partition):
+ try:
+ return self.GetPartitionBuildProp("ro.build.version.release_or_codename",
+ partition)
+ except ExternalError:
+ return self.GetPartitionBuildProp("ro.build.version.release",
+ partition)
+
def GetOemProperty(self, key):
if self.oem_props is not None and key in self.oem_props:
return self.oem_dicts[0][key]
@@ -543,7 +591,7 @@
self.GetPartitionBuildProp("ro.product.brand", partition),
self.GetPartitionBuildProp("ro.product.name", partition),
self.GetPartitionBuildProp("ro.product.device", partition),
- self.GetPartitionBuildProp("ro.build.version.release", partition),
+ self._GetPartitionPlatformVersion(partition),
self.GetPartitionBuildProp("ro.build.id", partition),
self.GetPartitionBuildProp(
"ro.build.version.incremental", partition),
@@ -559,7 +607,7 @@
self.GetBuildProp("ro.product.brand"),
self.GetBuildProp("ro.product.name"),
self.GetBuildProp("ro.product.device"),
- self.GetBuildProp("ro.build.version.release"),
+ self._GetPlatformVersion(),
self.GetBuildProp("ro.build.id"),
self.GetBuildProp("ro.build.version.incremental"),
self.GetBuildProp("ro.build.type"),
@@ -611,6 +659,20 @@
raise KeyError(fn)
+def ExtractFromInputFile(input_file, fn):
+ """Extracts the contents of fn from input zipfile or directory into a file."""
+ if isinstance(input_file, zipfile.ZipFile):
+ tmp_file = MakeTempFile(os.path.basename(fn))
+ with open(tmp_file, 'w') as f:
+ f.write(input_file.read(fn))
+ return tmp_file
+ else:
+ file = os.path.join(input_file, *fn.split("/"))
+ if not os.path.exists(file):
+ raise KeyError(fn)
+ return file
+
+
def LoadInfoDict(input_file, repacking=False):
"""Loads the key/value pairs from the given input target_files.
@@ -715,7 +777,7 @@
# Tries to load the build props for all partitions with care_map, including
# system and vendor.
- for partition in PARTITIONS_WITH_CARE_MAP:
+ for partition in PARTITIONS_WITH_BUILD_PROP:
partition_prop = "{}.build.prop".format(partition)
d[partition_prop] = PartitionBuildProps.FromInputFile(
input_file, partition)
@@ -725,14 +787,18 @@
# hash / hashtree footers.
if d.get("avb_enable") == "true":
build_info = BuildInfo(d)
- for partition in PARTITIONS_WITH_CARE_MAP:
+ for partition in PARTITIONS_WITH_BUILD_PROP:
fingerprint = build_info.GetPartitionFingerprint(partition)
if fingerprint:
d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
-
+ try:
+ d["ab_partitions"] = read_helper("META/ab_partitions.txt").split("\n")
+ except KeyError:
+ logger.warning("Can't find META/ab_partitions.txt")
return d
+
def LoadListFromFile(file_path):
with open(file_path) as f:
return f.read().splitlines()
@@ -797,6 +863,39 @@
@staticmethod
def FromInputFile(input_file, name, placeholder_values=None):
"""Loads the build.prop file and builds the attributes."""
+
+ if name == "boot":
+ data = PartitionBuildProps._ReadBootPropFile(input_file)
+ else:
+ data = PartitionBuildProps._ReadPartitionPropFile(input_file, name)
+
+ props = PartitionBuildProps(input_file, name, placeholder_values)
+ props._LoadBuildProp(data)
+ return props
+
+ @staticmethod
+ def _ReadBootPropFile(input_file):
+ """
+ Read build.prop for boot image from input_file.
+ Return empty string if not found.
+ """
+ try:
+ boot_img = ExtractFromInputFile(input_file, 'IMAGES/boot.img')
+ except KeyError:
+ logger.warning('Failed to read IMAGES/boot.img')
+ return ''
+ prop_file = GetBootImageBuildProp(boot_img)
+ if prop_file is None:
+ return ''
+ with open(prop_file) as f:
+ return f.read().decode()
+
+ @staticmethod
+ def _ReadPartitionPropFile(input_file, name):
+ """
+ Read build.prop for name from input_file.
+ Return empty string if not found.
+ """
data = ''
for prop_file in ['{}/etc/build.prop'.format(name.upper()),
'{}/build.prop'.format(name.upper())]:
@@ -805,9 +904,15 @@
break
except KeyError:
logger.warning('Failed to read %s', prop_file)
+ return data
- props = PartitionBuildProps(input_file, name, placeholder_values)
- props._LoadBuildProp(data)
+ @staticmethod
+ def FromBuildPropFile(name, build_prop_file):
+ """Constructs an instance from a build prop file."""
+
+ props = PartitionBuildProps("unknown", name)
+ with open(build_prop_file) as f:
+ props._LoadBuildProp(f.read())
return props
def _LoadBuildProp(self, data):
@@ -999,15 +1104,35 @@
Returns:
The merged dynamic partition info dictionary.
"""
- merged_dict = {}
+
+ def uniq_concat(a, b):
+ combined = set(a.split(" "))
+ combined.update(set(b.split(" ")))
+ combined = [item.strip() for item in combined if item.strip()]
+ return " ".join(sorted(combined))
+
+ if (framework_dict.get("use_dynamic_partitions") !=
+ "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
+ raise ValueError("Both dictionaries must have use_dynamic_partitions=true")
+
+ merged_dict = {"use_dynamic_partitions": "true"}
+
+ merged_dict["dynamic_partition_list"] = uniq_concat(
+ framework_dict.get("dynamic_partition_list", ""),
+ vendor_dict.get("dynamic_partition_list", ""))
+
+ # Super block devices are defined by the vendor dict.
+ if "super_block_devices" in vendor_dict:
+ merged_dict["super_block_devices"] = vendor_dict["super_block_devices"]
+ for block_device in merged_dict["super_block_devices"].split(" "):
+ key = "super_%s_device_size" % block_device
+ if key not in vendor_dict:
+ raise ValueError("Vendor dict does not contain required key %s." % key)
+ merged_dict[key] = vendor_dict[key]
+
# Partition groups and group sizes are defined by the vendor dict because
# these values may vary for each board that uses a shared system image.
merged_dict["super_partition_groups"] = vendor_dict["super_partition_groups"]
- framework_dynamic_partition_list = framework_dict.get(
- "dynamic_partition_list", "")
- vendor_dynamic_partition_list = vendor_dict.get("dynamic_partition_list", "")
- merged_dict["dynamic_partition_list"] = ("%s %s" % (
- framework_dynamic_partition_list, vendor_dynamic_partition_list)).strip()
for partition_group in merged_dict["super_partition_groups"].split(" "):
# Set the partition group's size using the value from the vendor dict.
key = "super_%s_group_size" % partition_group
@@ -1018,18 +1143,104 @@
# Set the partition group's partition list using a concatenation of the
# framework and vendor partition lists.
key = "super_%s_partition_list" % partition_group
- merged_dict[key] = (
- "%s %s" %
- (framework_dict.get(key, ""), vendor_dict.get(key, ""))).strip()
+ merged_dict[key] = uniq_concat(
+ framework_dict.get(key, ""), vendor_dict.get(key, ""))
- # Pick virtual ab related flags from vendor dict, if defined.
- if "virtual_ab" in vendor_dict.keys():
- merged_dict["virtual_ab"] = vendor_dict["virtual_ab"]
- if "virtual_ab_retrofit" in vendor_dict.keys():
- merged_dict["virtual_ab_retrofit"] = vendor_dict["virtual_ab_retrofit"]
+ # Various other flags should be copied from the vendor dict, if defined.
+ for key in ("virtual_ab", "virtual_ab_retrofit", "lpmake",
+ "super_metadata_device", "super_partition_error_limit",
+ "super_partition_size"):
+ if key in vendor_dict.keys():
+ merged_dict[key] = vendor_dict[key]
+
return merged_dict
+def PartitionMapFromTargetFiles(target_files_dir):
+ """Builds a map from partition -> path within an extracted target files directory."""
+ # Keep possible_subdirs in sync with build/make/core/board_config.mk.
+ possible_subdirs = {
+ "system": ["SYSTEM"],
+ "vendor": ["VENDOR", "SYSTEM/vendor"],
+ "product": ["PRODUCT", "SYSTEM/product"],
+ "system_ext": ["SYSTEM_EXT", "SYSTEM/system_ext"],
+ "odm": ["ODM", "VENDOR/odm", "SYSTEM/vendor/odm"],
+ "vendor_dlkm": [
+ "VENDOR_DLKM", "VENDOR/vendor_dlkm", "SYSTEM/vendor/vendor_dlkm"
+ ],
+ "odm_dlkm": ["ODM_DLKM", "VENDOR/odm_dlkm", "SYSTEM/vendor/odm_dlkm"],
+ }
+ partition_map = {}
+ for partition, subdirs in possible_subdirs.items():
+ for subdir in subdirs:
+ if os.path.exists(os.path.join(target_files_dir, subdir)):
+ partition_map[partition] = subdir
+ break
+ return partition_map
+
+
+def SharedUidPartitionViolations(uid_dict, partition_groups):
+ """Checks for APK sharedUserIds that cross partition group boundaries.
+
+ This uses a single or merged build's shareduid_violation_modules.json
+ output file, as generated by find_shareduid_violation.py or
+ core/tasks/find-shareduid-violation.mk.
+
+ An error is defined as a sharedUserId that is found in a set of partitions
+ that span more than one partition group.
+
+ Args:
+ uid_dict: A dictionary created by using the standard json module to read a
+ complete shareduid_violation_modules.json file.
+ partition_groups: A list of groups, where each group is a list of
+ partitions.
+
+ Returns:
+ A list of error messages.
+ """
+ errors = []
+ for uid, partitions in uid_dict.items():
+ found_in_groups = [
+ group for group in partition_groups
+ if set(partitions.keys()) & set(group)
+ ]
+ if len(found_in_groups) > 1:
+ errors.append(
+ "APK sharedUserId \"%s\" found across partition groups in partitions \"%s\""
+ % (uid, ",".join(sorted(partitions.keys()))))
+ return errors
+
+
+def RunHostInitVerifier(product_out, partition_map):
+ """Runs host_init_verifier on the init rc files within partitions.
+
+ host_init_verifier searches the etc/init path within each partition.
+
+ Args:
+ product_out: PRODUCT_OUT directory, containing partition directories.
+ partition_map: A map of partition name -> relative path within product_out.
+ """
+ allowed_partitions = ("system", "system_ext", "product", "vendor", "odm")
+ cmd = ["host_init_verifier"]
+ for partition, path in partition_map.items():
+ if partition not in allowed_partitions:
+ raise ExternalError("Unable to call host_init_verifier for partition %s" %
+ partition)
+ cmd.extend(["--out_%s" % partition, os.path.join(product_out, path)])
+ # Add --property-contexts if the file exists on the partition.
+ property_contexts = "%s_property_contexts" % (
+ "plat" if partition == "system" else partition)
+ property_contexts_path = os.path.join(product_out, path, "etc", "selinux",
+ property_contexts)
+ if os.path.exists(property_contexts_path):
+ cmd.append("--property-contexts=%s" % property_contexts_path)
+ # Add the passwd file if the file exists on the partition.
+ passwd_path = os.path.join(product_out, path, "etc", "passwd")
+ if os.path.exists(passwd_path):
+ cmd.extend(["-p", passwd_path])
+ return RunAndCheckOutput(cmd)
+
+
def AppendAVBSigningArgs(cmd, partition):
"""Append signing arguments for avbtool."""
# e.g., "--key path/to/signing_key --algorithm SHA256_RSA4096"
@@ -1253,23 +1464,27 @@
for building the requested image.
"""
+ if info_dict is None:
+ info_dict = OPTIONS.info_dict
+
# "boot" or "recovery", without extension.
partition_name = os.path.basename(sourcedir).lower()
+ kernel = None
if partition_name == "recovery":
- kernel = "kernel"
+ if info_dict.get("exclude_kernel_from_recovery_image") == "true":
+ logger.info("Excluded kernel binary from recovery image.")
+ else:
+ kernel = "kernel"
else:
kernel = image_name.replace("boot", "kernel")
kernel = kernel.replace(".img", "")
- if not os.access(os.path.join(sourcedir, kernel), os.F_OK):
+ if kernel and not os.access(os.path.join(sourcedir, kernel), os.F_OK):
return None
if has_ramdisk and not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK):
return None
- if info_dict is None:
- info_dict = OPTIONS.info_dict
-
img = tempfile.NamedTemporaryFile()
if has_ramdisk:
@@ -1279,7 +1494,9 @@
# use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
- cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, kernel)]
+ cmd = [mkbootimg]
+ if kernel:
+ cmd += ["--kernel", os.path.join(sourcedir, kernel)]
fn = os.path.join(sourcedir, "second")
if os.access(fn, os.F_OK):
@@ -1497,6 +1714,24 @@
cmd.extend(["--vendor_ramdisk", ramdisk_img.name])
cmd.extend(["--vendor_boot", img.name])
+ ramdisk_fragment_imgs = []
+ fn = os.path.join(sourcedir, "vendor_ramdisk_fragments")
+ if os.access(fn, os.F_OK):
+ ramdisk_fragments = shlex.split(open(fn).read().rstrip("\n"))
+ for ramdisk_fragment in ramdisk_fragments:
+ fn = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment, "mkbootimg_args")
+ cmd.extend(shlex.split(open(fn).read().rstrip("\n")))
+ fn = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment, "prebuilt_ramdisk")
+ # Use prebuilt image if found, else create ramdisk from supplied files.
+ if os.access(fn, os.F_OK):
+ ramdisk_fragment_pathname = fn
+ else:
+ ramdisk_fragment_root = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment)
+ ramdisk_fragment_img = _MakeRamdisk(ramdisk_fragment_root, lz4_ramdisks=use_lz4)
+ ramdisk_fragment_imgs.append(ramdisk_fragment_img)
+ ramdisk_fragment_pathname = ramdisk_fragment_img.name
+ cmd.extend(["--vendor_ramdisk_fragment", ramdisk_fragment_pathname])
+
RunAndCheckOutput(cmd)
# AVB: if enabled, calculate and add hash.
@@ -1514,6 +1749,8 @@
img.seek(os.SEEK_SET, 0)
data = img.read()
+ for f in ramdisk_fragment_imgs:
+ f.close()
ramdisk_img.close()
img.close()
@@ -1564,7 +1801,7 @@
cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
if patterns is not None:
# Filter out non-matching patterns. unzip will complain otherwise.
- with zipfile.ZipFile(filename) as input_zip:
+ with zipfile.ZipFile(filename, allowZip64=True) as input_zip:
names = input_zip.namelist()
filtered = [
pattern for pattern in patterns if fnmatch.filter(names, pattern)]
@@ -3176,8 +3413,8 @@
'recovery_sha1': recovery_img.sha1,
'boot_type': boot_type,
'boot_device': boot_device + '$(getprop ro.boot.slot_suffix)',
- 'recovery_type': recovery_type + '$(getprop ro.boot.slot_suffix)',
- 'recovery_device': recovery_device,
+ 'recovery_type': recovery_type,
+ 'recovery_device': recovery_device + '$(getprop ro.boot.slot_suffix)',
'bonus_args': bonus_args}
# The install script location moved from /system/etc to /system/bin in the L
@@ -3406,3 +3643,75 @@
comment('Move partition %s from default to %s' %
(p, u.tgt_group))
append('move %s %s' % (p, u.tgt_group))
+
+
+def GetBootImageBuildProp(boot_img):
+ """
+ Get build.prop from ramdisk within the boot image
+
+ Args:
+ boot_img: the boot image file. Ramdisk must be compressed with lz4 format.
+
+ Return:
+ An extracted file that stores properties in the boot image.
+ """
+ tmp_dir = MakeTempDir('boot_', suffix='.img')
+ try:
+ RunAndCheckOutput(['unpack_bootimg', '--boot_img', boot_img, '--out', tmp_dir])
+ ramdisk = os.path.join(tmp_dir, 'ramdisk')
+ if not os.path.isfile(ramdisk):
+ logger.warning('Unable to get boot image timestamp: no ramdisk in boot')
+ return None
+ uncompressed_ramdisk = os.path.join(tmp_dir, 'uncompressed_ramdisk')
+ RunAndCheckOutput(['lz4', '-d', ramdisk, uncompressed_ramdisk])
+
+ abs_uncompressed_ramdisk = os.path.abspath(uncompressed_ramdisk)
+ extracted_ramdisk = MakeTempDir('extracted_ramdisk')
+ # Use "toybox cpio" instead of "cpio" because the latter invokes cpio from
+ # the host environment.
+ RunAndCheckOutput(['toybox', 'cpio', '-F', abs_uncompressed_ramdisk, '-i'],
+ cwd=extracted_ramdisk)
+
+ for search_path in RAMDISK_BUILD_PROP_REL_PATHS:
+ prop_file = os.path.join(extracted_ramdisk, search_path)
+ if os.path.isfile(prop_file):
+ return prop_file
+ logger.warning('Unable to get boot image timestamp: no %s in ramdisk', search_path)
+
+ return None
+
+ except ExternalError as e:
+ logger.warning('Unable to get boot image build props: %s', e)
+ return None
+
+
+def GetBootImageTimestamp(boot_img):
+ """
+ Get timestamp from ramdisk within the boot image
+
+ Args:
+ boot_img: the boot image file. Ramdisk must be compressed with lz4 format.
+
+ Return:
+ An integer that corresponds to the timestamp of the boot image, or None
+ if file has unknown format. Raise exception if an unexpected error has
+ occurred.
+ """
+ prop_file = GetBootImageBuildProp(boot_img)
+ if not prop_file:
+ return None
+
+ props = PartitionBuildProps.FromBuildPropFile('boot', prop_file)
+ if props is None:
+ return None
+
+ try:
+ timestamp = props.GetProp('ro.bootimage.build.date.utc')
+ if timestamp:
+ return int(timestamp)
+ logger.warning('Unable to get boot image timestamp: ro.bootimage.build.date.utc is undefined')
+ return None
+
+ except ExternalError as e:
+ logger.warning('Unable to get boot image timestamp: %s', e)
+ return None
diff --git a/tools/releasetools/find_shareduid_violation.py b/tools/releasetools/find_shareduid_violation.py
new file mode 100755
index 0000000..35acde3
--- /dev/null
+++ b/tools/releasetools/find_shareduid_violation.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Find APK sharedUserId violators.
+
+Usage: find_shareduid_violation [args]
+
+ --product_out
+ PRODUCT_OUT directory
+
+ --aapt
+ Path to aapt or aapt2
+
+ --copy_out_system
+ TARGET_COPY_OUT_SYSTEM
+
+ --copy_out_vendor_
+ TARGET_COPY_OUT_VENDOR
+
+ --copy_out_product
+ TARGET_COPY_OUT_PRODUCT
+
+ --copy_out_system_ext
+ TARGET_COPY_OUT_SYSTEM_EXT
+"""
+
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+
+from collections import defaultdict
+from glob import glob
+
+import common
+
+logger = logging.getLogger(__name__)
+
+OPTIONS = common.OPTIONS
+OPTIONS.product_out = os.environ.get("PRODUCT_OUT")
+OPTIONS.aapt = "aapt2"
+OPTIONS.copy_out_system = "system"
+OPTIONS.copy_out_vendor = "vendor"
+OPTIONS.copy_out_product = "product"
+OPTIONS.copy_out_system_ext = "system_ext"
+
+
+def execute(cmd):
+ p = subprocess.Popen(
+ cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = map(lambda b: b.decode("utf-8"), p.communicate())
+ return p.returncode == 0, out, err
+
+
+def make_aapt_cmds(aapt, apk):
+ return [
+ aapt + " dump " + apk + " --file AndroidManifest.xml",
+ aapt + " dump xmltree " + apk + " --file AndroidManifest.xml"
+ ]
+
+
+def extract_shared_uid(aapt, apk):
+ for cmd in make_aapt_cmds(aapt, apk):
+ success, manifest, error_msg = execute(cmd)
+ if success:
+ break
+ else:
+ logger.error(error_msg)
+ sys.exit()
+
+ pattern = re.compile(r"sharedUserId.*=\"([^\"]*)")
+
+ for line in manifest.split("\n"):
+ match = pattern.search(line)
+ if match:
+ return match.group(1)
+ return None
+
+
+def FindShareduidViolation(product_out, partition_map, aapt="aapt2"):
+ """Find sharedUserId violators in the given partitions.
+
+ Args:
+ product_out: The base directory containing the partition directories.
+ partition_map: A map of partition name -> directory name.
+ aapt: The name of the aapt binary. Defaults to aapt2.
+
+ Returns:
+ A string containing a JSON object describing the shared UIDs.
+ """
+ shareduid_app_dict = defaultdict(lambda: defaultdict(list))
+
+ for part, location in partition_map.items():
+ for f in glob(os.path.join(product_out, location, "*", "*", "*.apk")):
+ apk_file = os.path.basename(f)
+ shared_uid = extract_shared_uid(aapt, f)
+
+ if shared_uid is None:
+ continue
+ shareduid_app_dict[shared_uid][part].append(apk_file)
+
+ # Only output sharedUserId values that appear in >1 partition.
+ output = {}
+ for uid, partitions in shareduid_app_dict.items():
+ if len(partitions) > 1:
+ output[uid] = shareduid_app_dict[uid]
+
+ return json.dumps(output, indent=2, sort_keys=True)
+
+
+def main():
+ common.InitLogging()
+
+ def option_handler(o, a):
+ if o == "--product_out":
+ OPTIONS.product_out = a
+ elif o == "--aapt":
+ OPTIONS.aapt = a
+ elif o == "--copy_out_system":
+ OPTIONS.copy_out_system = a
+ elif o == "--copy_out_vendor":
+ OPTIONS.copy_out_vendor = a
+ elif o == "--copy_out_product":
+ OPTIONS.copy_out_product = a
+ elif o == "--copy_out_system_ext":
+ OPTIONS.copy_out_system_ext = a
+ else:
+ return False
+ return True
+
+ args = common.ParseOptions(
+ sys.argv[1:],
+ __doc__,
+ extra_long_opts=[
+ "product_out=",
+ "aapt=",
+ "copy_out_system=",
+ "copy_out_vendor=",
+ "copy_out_product=",
+ "copy_out_system_ext=",
+ ],
+ extra_option_handler=option_handler)
+
+ if args:
+ common.Usage(__doc__)
+ sys.exit(1)
+
+ partition_map = {
+ "system": OPTIONS.copy_out_system,
+ "vendor": OPTIONS.copy_out_vendor,
+ "product": OPTIONS.copy_out_product,
+ "system_ext": OPTIONS.copy_out_system_ext,
+ }
+
+ print(
+ FindShareduidViolation(OPTIONS.product_out, partition_map, OPTIONS.aapt))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index ab38d0d..5409194 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -58,6 +58,7 @@
OPTIONS.additional_entries = []
OPTIONS.bootable_only = False
OPTIONS.put_super = None
+OPTIONS.put_bootloader = None
OPTIONS.dynamic_partition_list = None
OPTIONS.super_device_list = None
OPTIONS.retrofit_dap = None
@@ -75,6 +76,7 @@
info = OPTIONS.info_dict = common.LoadInfoDict(input_zip)
OPTIONS.put_super = info.get('super_image_in_update_package') == 'true'
+ OPTIONS.put_bootloader = info.get('bootloader_in_update_package') == 'true'
OPTIONS.dynamic_partition_list = info.get('dynamic_partition_list',
'').strip().split()
OPTIONS.super_device_list = info.get('super_block_devices',
@@ -122,9 +124,11 @@
for image_path in [name for name in namelist if name.startswith('IMAGES/')]:
image = os.path.basename(image_path)
- if OPTIONS.bootable_only and image not in ('boot.img', 'recovery.img'):
+ if OPTIONS.bootable_only and image not in('boot.img', 'recovery.img', 'bootloader'):
continue
- if not image.endswith('.img'):
+ if not image.endswith('.img') and image != 'bootloader':
+ continue
+ if image == 'bootloader' and not OPTIONS.put_bootloader:
continue
# Filter out super_empty and the images that are already in super partition.
if OPTIONS.put_super:
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 45532f5..9360d7b 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -16,11 +16,15 @@
#
"""This script merges two partial target files packages.
-One package contains framework files, and the other contains vendor files.
-It produces a complete target files package that can be used to generate an
-OTA package.
+One input package contains framework files, and the other contains vendor files.
-Usage: merge_target_files.py [args]
+This script produces a complete, merged target files package:
+ - This package can be used to generate a flashable IMG package.
+ See --output-img.
+ - This package can be used to generate an OTA package. See --output-ota.
+ - The merged package is checked for compatibility between the two inputs.
+
+Usage: merge_target_files [args]
--framework-target-files framework-target-files-zip-archive
The input target files package containing framework bits. This is a zip
@@ -70,6 +74,10 @@
--rebuild_recovery
Deprecated; does nothing.
+ --allow-duplicate-apkapex-keys
+ If provided, duplicate APK/APEX keys are ignored and the value from the
+ framework is used.
+
--keep-tmp
Keep tempoary files for debugging purposes.
"""
@@ -77,6 +85,7 @@
from __future__ import print_function
import fnmatch
+import json
import logging
import os
import re
@@ -90,6 +99,7 @@
import check_target_files_vintf
import common
import img_from_target_files
+import find_shareduid_violation
import ota_from_target_files
logger = logging.getLogger(__name__)
@@ -110,6 +120,8 @@
OPTIONS.output_super_empty = None
# TODO(b/132730255): Remove this option.
OPTIONS.rebuild_recovery = False
+# TODO(b/150582573): Remove this option.
+OPTIONS.allow_duplicate_apkapex_keys = False
OPTIONS.keep_tmp = False
# In an item list (framework or vendor), we may see entries that select whole
@@ -149,16 +161,9 @@
'SYSTEM/*',
)
-# FRAMEWORK_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
-# partial framework target files package that need some special processing, such
-# as some sort of combination with items from the partial vendor target files
-# package.
-
-FRAMEWORK_EXTRACT_SPECIAL_ITEM_LIST = ('META/*',)
-
# DEFAULT_FRAMEWORK_MISC_INFO_KEYS is a list of keys to obtain from the
-# framework instance of META/misc_info.txt. The remaining keys from the
-# vendor instance.
+# framework instance of META/misc_info.txt. The remaining keys should come
+# from the vendor instance.
DEFAULT_FRAMEWORK_MISC_INFO_KEYS = (
'avb_system_hashtree_enable',
@@ -197,17 +202,8 @@
'PREBUILT_IMAGES/*',
'RADIO/*',
'VENDOR/*',
- 'VENDOR_DLKM/*',
- 'ODM_DLKM/*',
)
-# VENDOR_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
-# partial vendor target files package that need some special processing, such as
-# some sort of combination with items from the partial framework target files
-# package.
-
-VENDOR_EXTRACT_SPECIAL_ITEM_LIST = ('META/*',)
-
# The merge config lists should not attempt to extract items from both
# builds for any of the following partitions. The partitions in
# SINGLE_BUILD_PARTITIONS should come entirely from a single build (either
@@ -316,8 +312,8 @@
framework_item_list: The list of items to extract from the partial framework
target files package as is.
framework_misc_info_keys: A list of keys to obtain from the framework
- instance of META/misc_info.txt. The remaining keys from the vendor
- instance.
+ instance of META/misc_info.txt. The remaining keys should come from the
+ vendor instance.
vendor_item_list: The list of items to extract from the partial vendor
target files package as is.
@@ -342,10 +338,15 @@
'this script.')
has_error = True
+ # Check that partitions only come from one input.
for partition in SINGLE_BUILD_PARTITIONS:
- in_framework = any(
- item.startswith(partition) for item in framework_item_list)
- in_vendor = any(item.startswith(partition) for item in vendor_item_list)
+ image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
+ in_framework = (
+ any(item.startswith(partition) for item in framework_item_list) or
+ image_path in framework_item_list)
+ in_vendor = (
+ any(item.startswith(partition) for item in vendor_item_list) or
+ image_path in vendor_item_list)
if in_framework and in_vendor:
logger.error(
'Cannot extract items from %s for both the framework and vendor'
@@ -371,8 +372,8 @@
framework directory and the vendor directory, placing the merged result in the
output directory. The precondition in that the files are already extracted.
The post condition is that the output META/ab_partitions.txt contains the
- merged content. The format for each ab_partitions.txt a one partition name per
- line. The output file contains the union of the parition names.
+ merged content. The format for each ab_partitions.txt is one partition name
+ per line. The output file contains the union of the partition names.
Args:
framework_target_files_temp_dir: The name of a directory containing the
@@ -425,8 +426,8 @@
create the output target files package after all the special cases are
processed.
framework_misc_info_keys: A list of keys to obtain from the framework
- instance of META/misc_info.txt. The remaining keys from the vendor
- instance.
+ instance of META/misc_info.txt. The remaining keys should come from the
+ vendor instance.
"""
misc_info_path = ['META', 'misc_info.txt']
@@ -455,6 +456,12 @@
# false in the partial builds to prevent duplicate building of super.img.
merged_dict['build_super_partition'] = 'true'
+ # If AVB is enabled then ensure that we build vbmeta.img.
+ # Partial builds with AVB enabled may set PRODUCT_BUILD_VBMETA_IMAGE=false to
+ # skip building an incomplete vbmeta.img.
+ if merged_dict.get('avb_enable') == 'true':
+ merged_dict['avb_building_vbmeta_image'] = 'true'
+
# Replace <image>_selinux_fc values with framework or vendor file_contexts.bin
# depending on which dictionary the key came from.
# Only the file basename is required because all selinux_fc properties are
@@ -528,6 +535,7 @@
Args:
item_list: A list of items in a target files package.
+
Returns:
A set of partitions extracted from the list of items.
"""
@@ -549,7 +557,6 @@
output_target_files_dir,
framework_partition_set,
vendor_partition_set, file_name):
-
"""Performs special processing for META/apexkeys.txt or META/apkcerts.txt.
This function merges the contents of the META/apexkeys.txt or
@@ -599,7 +606,12 @@
if partition_tag in partition_set:
if key in merged_dict:
- raise ValueError('Duplicate key %s' % key)
+ if OPTIONS.allow_duplicate_apkapex_keys:
+ # TODO(b/150582573) Always raise on duplicates.
+ logger.warning('Duplicate key %s' % key)
+ continue
+ else:
+ raise ValueError('Duplicate key %s' % key)
merged_dict[key] = value
@@ -649,8 +661,7 @@
def process_special_cases(framework_target_files_temp_dir,
vendor_target_files_temp_dir,
output_target_files_temp_dir,
- framework_misc_info_keys,
- framework_partition_set,
+ framework_misc_info_keys, framework_partition_set,
vendor_partition_set):
"""Performs special-case processing for certain target files items.
@@ -666,8 +677,8 @@
create the output target files package after all the special cases are
processed.
framework_misc_info_keys: A list of keys to obtain from the framework
- instance of META/misc_info.txt. The remaining keys from the vendor
- instance.
+ instance of META/misc_info.txt. The remaining keys should come from the
+ vendor instance.
framework_partition_set: Partitions that are considered framework
partitions. Used to filter apexkeys.txt and apkcerts.txt.
vendor_partition_set: Partitions that are considered vendor partitions. Used
@@ -713,26 +724,6 @@
file_name='apexkeys.txt')
-def files_from_path(target_path, extra_args=None):
- """Gets files under given path.
-
- Get (sub)files from given target path and return sorted list.
-
- Args:
- target_path: Target path to get subfiles.
- extra_args: List of extra argument for find command. Optional.
-
- Returns:
- Sorted files and directories list.
- """
-
- find_command = ['find', target_path] + (extra_args or [])
- find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False)
- return common.RunAndCheckOutput(['sort'],
- stdin=find_process.stdout,
- verbose=False)
-
-
def create_merged_package(temp_dir, framework_target_files, framework_item_list,
vendor_target_files, vendor_item_list,
framework_misc_info_keys, rebuild_recovery):
@@ -754,64 +745,42 @@
target files package as is, meaning these items will land in the output
target files package exactly as they appear in the input partial vendor
target files package.
- framework_misc_info_keys: The list of keys to obtain from the framework
- instance of META/misc_info.txt. The remaining keys from the vendor
- instance.
+ framework_misc_info_keys: A list of keys to obtain from the framework
+ instance of META/misc_info.txt. The remaining keys should come from the
+ vendor instance.
rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
devices and write it to the system image.
Returns:
Path to merged package under temp directory.
"""
+ # Extract "as is" items from the input framework and vendor partial target
+ # files packages directly into the output temporary directory, since these items
+ # do not need special case processing.
- # Create directory names that we'll use when we extract files from framework,
- # and vendor, and for zipping the final output.
-
- framework_target_files_temp_dir = os.path.join(temp_dir, 'framework')
- vendor_target_files_temp_dir = os.path.join(temp_dir, 'vendor')
output_target_files_temp_dir = os.path.join(temp_dir, 'output')
-
- # Extract "as is" items from the input framework partial target files package.
- # We extract them directly into the output temporary directory since the
- # items do not need special case processing.
-
extract_items(
target_files=framework_target_files,
target_files_temp_dir=output_target_files_temp_dir,
extract_item_list=framework_item_list)
-
- # Extract "as is" items from the input vendor partial target files package. We
- # extract them directly into the output temporary directory since the items
- # do not need special case processing.
-
extract_items(
target_files=vendor_target_files,
target_files_temp_dir=output_target_files_temp_dir,
extract_item_list=vendor_item_list)
- # Extract "special" items from the input framework partial target files
- # package. We extract these items to different directory since they require
- # special processing before they will end up in the output directory.
-
+ # Perform special case processing on META/* items.
+ # After this function completes successfully, all the files we need to create
+ # the output target files package are in place.
+ framework_target_files_temp_dir = os.path.join(temp_dir, 'framework')
+ vendor_target_files_temp_dir = os.path.join(temp_dir, 'vendor')
extract_items(
target_files=framework_target_files,
target_files_temp_dir=framework_target_files_temp_dir,
- extract_item_list=FRAMEWORK_EXTRACT_SPECIAL_ITEM_LIST)
-
- # Extract "special" items from the input vendor partial target files package.
- # We extract these items to different directory since they require special
- # processing before they will end up in the output directory.
-
+ extract_item_list=('META/*',))
extract_items(
target_files=vendor_target_files,
target_files_temp_dir=vendor_target_files_temp_dir,
- extract_item_list=VENDOR_EXTRACT_SPECIAL_ITEM_LIST)
-
- # Now that the temporary directories contain all the extracted files, perform
- # special case processing on any items that need it. After this function
- # completes successfully, all the files we need to create the output target
- # files package are in place.
-
+ extract_item_list=('META/*',))
process_special_cases(
framework_target_files_temp_dir=framework_target_files_temp_dir,
vendor_target_files_temp_dir=vendor_target_files_temp_dir,
@@ -837,8 +806,10 @@
# Regenerate IMAGES in the target directory.
- add_img_args = ['--verbose']
- add_img_args.append('--add_missing')
+ add_img_args = [
+ '--verbose',
+ '--add_missing',
+ ]
# TODO(b/132730255): Remove this if statement.
if rebuild_recovery:
add_img_args.append('--rebuild_recovery')
@@ -891,6 +862,15 @@
output_zip = os.path.abspath(output_file)
output_target_files_meta_dir = os.path.join(source_dir, 'META')
+ def files_from_path(target_path, extra_args=None):
+ """Gets files under the given path and return a sorted list."""
+ find_command = ['find', target_path] + (extra_args or [])
+ find_process = common.Run(
+ find_command, stdout=subprocess.PIPE, verbose=False)
+ return common.RunAndCheckOutput(['sort'],
+ stdin=find_process.stdout,
+ verbose=False)
+
meta_content = files_from_path(output_target_files_meta_dir)
other_content = files_from_path(
source_dir,
@@ -939,9 +919,9 @@
target files package as is, meaning these items will land in the output
target files package exactly as they appear in the input partial framework
target files package.
- framework_misc_info_keys: The list of keys to obtain from the framework
- instance of META/misc_info.txt. The remaining keys from the vendor
- instance.
+ framework_misc_info_keys: A list of keys to obtain from the framework
+ instance of META/misc_info.txt. The remaining keys should come from the
+ vendor instance.
vendor_target_files: The name of the zip archive containing the vendor
partial target files package.
vendor_item_list: The list of items to extract from the partial vendor
@@ -969,7 +949,44 @@
rebuild_recovery)
if not check_target_files_vintf.CheckVintf(output_target_files_temp_dir):
- raise RuntimeError("Incompatible VINTF metadata")
+ raise RuntimeError('Incompatible VINTF metadata')
+
+ partition_map = common.PartitionMapFromTargetFiles(
+ output_target_files_temp_dir)
+
+ # Generate and check for cross-partition violations of sharedUserId
+ # values in APKs. This requires the input target-files packages to contain
+ # *.apk files.
+ shareduid_violation_modules = os.path.join(
+ output_target_files_temp_dir, 'META', 'shareduid_violation_modules.json')
+ with open(shareduid_violation_modules, 'w') as f:
+ violation = find_shareduid_violation.FindShareduidViolation(
+ output_target_files_temp_dir, partition_map)
+
+ # Write the output to a file to enable debugging.
+ f.write(violation)
+
+ # Check for violations across the input builds' partition groups.
+ framework_partitions = item_list_to_partition_set(framework_item_list)
+ vendor_partitions = item_list_to_partition_set(vendor_item_list)
+ shareduid_errors = common.SharedUidPartitionViolations(
+ json.loads(violation), [framework_partitions, vendor_partitions])
+ if shareduid_errors:
+ for error in shareduid_errors:
+ logger.error(error)
+ raise ValueError('sharedUserId APK error. See %s' %
+ shareduid_violation_modules)
+
+ # Run host_init_verifier on the combined init rc files.
+ filtered_partitions = {
+ partition: path
+ for partition, path in partition_map.items()
+ # host_init_verifier checks only the following partitions:
+ if partition in ['system', 'system_ext', 'product', 'vendor', 'odm']
+ }
+ common.RunHostInitVerifier(
+ product_out=output_target_files_temp_dir,
+ partition_map=filtered_partitions)
generate_images(output_target_files_temp_dir, rebuild_recovery)
@@ -1077,8 +1094,10 @@
OPTIONS.output_img = a
elif o == '--output-super-empty':
OPTIONS.output_super_empty = a
- elif o == '--rebuild_recovery': # TODO(b/132730255): Warn
+ elif o == '--rebuild_recovery': # TODO(b/132730255): Warn
OPTIONS.rebuild_recovery = True
+ elif o == '--allow-duplicate-apkapex-keys':
+ OPTIONS.allow_duplicate_apkapex_keys = True
elif o == '--keep-tmp':
OPTIONS.keep_tmp = True
else:
@@ -1106,6 +1125,7 @@
'output-img=',
'output-super-empty=',
'rebuild_recovery',
+ 'allow-duplicate-apkapex-keys',
'keep-tmp',
],
extra_option_handler=option_handler)
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 3a87957..471ef25 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -276,7 +276,7 @@
script.SetProgress(1)
script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
- metadata["ota-required-cache"] = str(script.required_cache)
+ metadata.required_cache = script.required_cache
# We haven't written the metadata entry, which will be done in
# FinalizeMetadata.
@@ -530,7 +530,7 @@
script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
else:
script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
- metadata["ota-required-cache"] = str(script.required_cache)
+ metadata.required_cache = script.required_cache
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f42974f..6b82d32 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -85,6 +85,13 @@
If not set, generates A/B package for A/B device and non-A/B package for
non-A/B device.
+ -o (--oem_settings) <main_file[,additional_files...]>
+ Comma separated list of files used to specify the expected OEM-specific
+ properties on the OEM partition of the intended device. Multiple expected
+ values can be used by providing multiple files. Only the first dict will
+ be used to compute fingerprint, while the rest will be used to assert
+ OEM-specific properties.
+
Non-A/B OTA specific options
-b (--binary) <file>
@@ -114,13 +121,6 @@
builds for an incremental package. This option is only meaningful when -i
is specified.
- -o (--oem_settings) <main_file[,additional_files...]>
- Comma seperated list of files used to specify the expected OEM-specific
- properties on the OEM partition of the intended device. Multiple expected
- values can be used by providing multiple files. Only the first dict will
- be used to compute fingerprint, while the rest will be used to assert
- OEM-specific properties.
-
--oem_no_mount
For devices with OEM-specific properties but without an OEM partition, do
not mount the OEM partition in the updater-script. This should be very
@@ -202,20 +202,33 @@
ones. Should only be used if caller knows it's safe to do so (e.g. all the
postinstall work is to dexopt apps and a data wipe will happen immediately
after). Only meaningful when generating A/B OTAs.
+
+ --partial "<PARTITION> [<PARTITION>[...]]"
+ Generate partial updates, overriding ab_partitions list with the given
+ list.
+
+ --custom_image <custom_partition=custom_image>
+ Use the specified custom_image to update custom_partition when generating
+ an A/B OTA package. e.g. "--custom_image oem=oem.img --custom_image
+ cus=cus_test.img"
"""
from __future__ import print_function
import logging
import multiprocessing
+import os
import os.path
+import re
import shlex
import shutil
import struct
+import subprocess
import sys
import zipfile
import common
+import ota_utils
import target_files_diff
from check_target_files_vintf import CheckVintfIfTrebleEnabled
from non_ab_ota import GenerateNonAbOtaPackage
@@ -228,20 +241,16 @@
logger = logging.getLogger(__name__)
-OPTIONS = common.OPTIONS
-OPTIONS.package_key = None
-OPTIONS.incremental_source = None
+OPTIONS = ota_utils.OPTIONS
OPTIONS.verify = False
OPTIONS.patch_threshold = 0.95
OPTIONS.wipe_user_data = False
-OPTIONS.downgrade = False
OPTIONS.extra_script = None
OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
if OPTIONS.worker_threads == 0:
OPTIONS.worker_threads = 1
OPTIONS.two_step = False
OPTIONS.include_secondary = False
-OPTIONS.no_signing = False
OPTIONS.block_based = True
OPTIONS.updater_binary = None
OPTIONS.oem_dicts = None
@@ -257,15 +266,12 @@
OPTIONS.payload_signer_args = []
OPTIONS.payload_signer_maximum_signature_size = None
OPTIONS.extracted_input = None
-OPTIONS.key_passwords = []
OPTIONS.skip_postinstall = False
-OPTIONS.retrofit_dynamic_partitions = False
OPTIONS.skip_compatibility_check = False
-OPTIONS.output_metadata_path = None
OPTIONS.disable_fec_computation = False
-OPTIONS.force_non_ab = False
-OPTIONS.boot_variable_file = None
-
+OPTIONS.disable_verity_computation = False
+OPTIONS.partial = None
+OPTIONS.custom_images = {}
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -397,6 +403,8 @@
cmd.extend(["--source_image", source_file])
if OPTIONS.disable_fec_computation:
cmd.extend(["--disable_fec_computation", "true"])
+ if OPTIONS.disable_verity_computation:
+ cmd.extend(["--disable_verity_computation", "true"])
cmd.extend(additional_args)
self._Run(cmd)
@@ -601,6 +609,48 @@
return (payload_offset, metadata_total)
+def UpdatesInfoForSpecialUpdates(content, partitions_filter,
+ delete_keys=None):
+ """ Updates info file for secondary payload generation, partial update, etc.
+
+ Scan each line in the info file, and remove the unwanted partitions from
+ the dynamic partition list in the related properties. e.g.
+ "super_google_dynamic_partitions_partition_list=system vendor product"
+ will become "super_google_dynamic_partitions_partition_list=system".
+
+ Args:
+ content: The content of the input info file. e.g. misc_info.txt.
+ partitions_filter: A function to filter the desired partitions from a given
+ list
+ delete_keys: A list of keys to delete in the info file
+
+ Returns:
+ A string of the updated info content.
+ """
+
+ output_list = []
+ # The suffix in partition_list variables that follows the name of the
+ # partition group.
+ list_suffix = 'partition_list'
+ for line in content.splitlines():
+ if line.startswith('#') or '=' not in line:
+ output_list.append(line)
+ continue
+ key, value = line.strip().split('=', 1)
+
+ if delete_keys and key in delete_keys:
+ pass
+ elif key.endswith(list_suffix):
+ partitions = value.split()
+ # TODO for partial update, partitions in the same group must be all
+ # updated or all omitted
+ partitions = filter(partitions_filter, partitions)
+ output_list.append('{}={}'.format(key, ' '.join(partitions)))
+ else:
+ output_list.append(line)
+ return '\n'.join(output_list)
+
+
def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
"""Returns a target-files.zip file for generating secondary payload.
@@ -622,49 +672,20 @@
"""
def GetInfoForSecondaryImages(info_file):
- """Updates info file for secondary payload generation.
-
- Scan each line in the info file, and remove the unwanted partitions from
- the dynamic partition list in the related properties. e.g.
- "super_google_dynamic_partitions_partition_list=system vendor product"
- will become "super_google_dynamic_partitions_partition_list=system".
-
- Args:
- info_file: The input info file. e.g. misc_info.txt.
-
- Returns:
- A string of the updated info content.
- """
-
- output_list = []
+ """Updates info file for secondary payload generation."""
with open(info_file) as f:
- lines = f.read().splitlines()
-
- # The suffix in partition_list variables that follows the name of the
- # partition group.
- LIST_SUFFIX = 'partition_list'
- for line in lines:
- if line.startswith('#') or '=' not in line:
- output_list.append(line)
- continue
- key, value = line.strip().split('=', 1)
- if key == 'dynamic_partition_list' or key.endswith(LIST_SUFFIX):
- partitions = value.split()
- partitions = [partition for partition in partitions if partition
- not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
- output_list.append('{}={}'.format(key, ' '.join(partitions)))
- elif key in ['virtual_ab', "virtual_ab_retrofit"]:
- # Remove virtual_ab flag from secondary payload so that OTA client
- # don't use snapshots for secondary update
- pass
- else:
- output_list.append(line)
- return '\n'.join(output_list)
+ content = f.read()
+ # Remove virtual_ab flag from secondary payload so that OTA client
+ # don't use snapshots for secondary update
+ delete_keys = ['virtual_ab', "virtual_ab_retrofit"]
+ return UpdatesInfoForSpecialUpdates(
+ content, lambda p: p not in SECONDARY_PAYLOAD_SKIPPED_IMAGES,
+ delete_keys)
target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
infolist = input_zip.infolist()
input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
@@ -727,7 +748,7 @@
The filename of target-files.zip that doesn't contain postinstall config.
"""
# We should only make a copy if postinstall_config entry exists.
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
if POSTINSTALL_CONFIG not in input_zip.namelist():
return input_file
@@ -736,6 +757,80 @@
common.ZipDelete(target_file, POSTINSTALL_CONFIG)
return target_file
+def ParseInfoDict(target_file_path):
+ with zipfile.ZipFile(target_file_path, 'r', allowZip64=True) as zfp:
+ return common.LoadInfoDict(zfp)
+
+def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
+ """Returns a target-files.zip for partial ota update package generation.
+
+ This function modifies ab_partitions list with the desired partitions before
+ calling the brillo_update_payload script. It also cleans up the reference to
+ the excluded partitions in the info file, e.g misc_info.txt.
+
+ Args:
+ input_file: The input target-files.zip filename.
+ ab_partitions: A list of partitions to include in the partial update
+
+ Returns:
+ The filename of target-files.zip used for partial ota update.
+ """
+
+ def AddImageForPartition(partition_name):
+ """Add the archive name for a given partition to the copy list."""
+ for prefix in ['IMAGES', 'RADIO']:
+ image_path = '{}/{}.img'.format(prefix, partition_name)
+ if image_path in namelist:
+ copy_entries.append(image_path)
+ map_path = '{}/{}.map'.format(prefix, partition_name)
+ if map_path in namelist:
+ copy_entries.append(map_path)
+ return
+
+ raise ValueError("Cannot find {} in input zipfile".format(partition_name))
+
+ with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+ original_ab_partitions = input_zip.read(
+ AB_PARTITIONS).decode().splitlines()
+ namelist = input_zip.namelist()
+
+ unrecognized_partitions = [partition for partition in ab_partitions if
+ partition not in original_ab_partitions]
+ if unrecognized_partitions:
+ raise ValueError("Unrecognized partitions when generating partial updates",
+ unrecognized_partitions)
+
+ logger.info("Generating partial updates for %s", ab_partitions)
+
+ copy_entries = ['META/update_engine_config.txt']
+ for partition_name in ab_partitions:
+ AddImageForPartition(partition_name)
+
+ # Use zip2zip to avoid extracting the zipfile.
+ partial_target_file = common.MakeTempFile(suffix='.zip')
+ cmd = ['zip2zip', '-i', input_file, '-o', partial_target_file]
+ cmd.extend(['{}:{}'.format(name, name) for name in copy_entries])
+ common.RunAndCheckOutput(cmd)
+
+ partial_target_zip = zipfile.ZipFile(partial_target_file, 'a',
+ allowZip64=True)
+ with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+ common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt',
+ '\n'.join(ab_partitions))
+ for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
+ if info_file not in input_zip.namelist():
+ logger.warning('Cannot find %s in input zipfile', info_file)
+ continue
+ content = input_zip.read(info_file).decode()
+ modified_info = UpdatesInfoForSpecialUpdates(
+ content, lambda p: p in ab_partitions)
+ common.ZipWriteStr(partial_target_zip, info_file, modified_info)
+
+ # TODO(xunchang) handle 'META/care_map.pb', 'META/postinstall_config.txt'
+ common.ZipClose(partial_target_zip)
+
+ return partial_target_file
+
def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
super_block_devices,
@@ -762,7 +857,7 @@
target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
shutil.copyfile(input_file, target_file)
- with zipfile.ZipFile(input_file) as input_zip:
+ with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
namelist = input_zip.namelist()
input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
@@ -822,6 +917,107 @@
return target_file
+def GetTargetFilesZipForCustomImagesUpdates(input_file, custom_images):
+ """Returns a target-files.zip for custom partitions update.
+
+ This function modifies ab_partitions list with the desired custom partitions
+ and puts the custom images into the target target-files.zip.
+
+ Args:
+ input_file: The input target-files.zip filename.
+ custom_images: A map of custom partitions and custom images.
+
+ Returns:
+ The filename of a target-files.zip which has renamed the custom images in
+ the IMAGS/ to their partition names.
+ """
+ # Use zip2zip to avoid extracting the zipfile.
+ target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+ cmd = ['zip2zip', '-i', input_file, '-o', target_file]
+
+ with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+ namelist = input_zip.namelist()
+
+ # Write {custom_image}.img as {custom_partition}.img.
+ for custom_partition, custom_image in custom_images.items():
+ default_custom_image = '{}.img'.format(custom_partition)
+ if default_custom_image != custom_image:
+ logger.info("Update custom partition '%s' with '%s'",
+ custom_partition, custom_image)
+ # Default custom image need to be deleted first.
+ namelist.remove('IMAGES/{}'.format(default_custom_image))
+ # IMAGES/{custom_image}.img:IMAGES/{custom_partition}.img.
+ cmd.extend(['IMAGES/{}:IMAGES/{}'.format(custom_image,
+ default_custom_image)])
+
+ cmd.extend(['{}:{}'.format(name, name) for name in namelist])
+ common.RunAndCheckOutput(cmd)
+
+ return target_file
+
+def GeneratePartitionTimestampFlags(partition_state):
+ partition_timestamps = [
+ part.partition_name + ":" + part.version
+ for part in partition_state]
+ return ["--partition_timestamps", ",".join(partition_timestamps)]
+
+def GeneratePartitionTimestampFlagsDowngrade(pre_partition_state, post_partition_state):
+ assert pre_partition_state is not None
+ partition_timestamps = {}
+ for part in pre_partition_state:
+ partition_timestamps[part.partition_name] = part.version
+ for part in post_partition_state:
+ partition_timestamps[part.partition_name] = \
+ max(part.version, partition_timestamps[part.partition_name])
+ return [
+ "--partition_timestamps",
+ ",".join([key + ":" + val for (key, val) in partition_timestamps.items()])
+ ]
+
+def IsSparseImage(filepath):
+ with open(filepath, 'rb') as fp:
+ # Magic for android sparse image format
+ # https://source.android.com/devices/bootloader/images
+ return fp.read(4) == b'\x3A\xFF\x26\xED'
+
+def SupportsMainlineGkiUpdates(target_file):
+ """Return True if the build supports MainlineGKIUpdates.
+
+ This function scans the product.img file in IMAGES/ directory for
+ pattern |*/apex/com.android.gki.*.apex|. If there are files
+ matching this pattern, conclude that build supports mainline
+ GKI and return True
+
+ Args:
+ target_file: Path to a target_file.zip, or an extracted directory
+ Return:
+ True if thisb uild supports Mainline GKI Updates.
+ """
+ if target_file is None:
+ return False
+ if os.path.isfile(target_file):
+ target_file = common.UnzipTemp(target_file, ["IMAGES/product.img"])
+ if not os.path.isdir(target_file):
+ assert os.path.isdir(target_file), \
+ "{} must be a path to zip archive or dir containing extracted"\
+ " target_files".format(target_file)
+ image_file = os.path.join(target_file, "IMAGES", "product.img")
+
+ if not os.path.isfile(image_file):
+ return False
+
+ if IsSparseImage(image_file):
+ # Unsparse the image
+ tmp_img = common.MakeTempFile(suffix=".img")
+ subprocess.check_output(["simg2img", image_file, tmp_img])
+ image_file = tmp_img
+
+ cmd = ["debugfs_static", "-R", "ls -p /apex", image_file]
+ output = subprocess.check_output(cmd).decode()
+
+ pattern = re.compile(r"com\.android\.gki\..*\.apex")
+ return pattern.search(output) is not None
+
def GenerateAbOtaPackage(target_file, output_file, source_file=None):
"""Generates an Android OTA package that has A/B update payload."""
# Stage the output zip package for package signing.
@@ -830,36 +1026,79 @@
else:
staging_file = output_file
output_zip = zipfile.ZipFile(staging_file, "w",
- compression=zipfile.ZIP_DEFLATED)
+ compression=zipfile.ZIP_DEFLATED, allowZip64=True)
if source_file is not None:
+ assert "ab_partitions" in OPTIONS.source_info_dict, \
+ "META/ab_partitions.txt is required for ab_update."
+ assert "ab_partitions" in OPTIONS.target_info_dict, \
+ "META/ab_partitions.txt is required for ab_update."
target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+ vendor_prop = source_info.info_dict.get("vendor.build.prop")
+ if vendor_prop and \
+ vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true":
+ # TODO(zhangkelvin) Remove this once FEC on VABC is supported
+ logger.info("Virtual AB Compression enabled, disabling FEC")
+ OPTIONS.disable_fec_computation = True
+ OPTIONS.disable_verity_computation = True
else:
+ assert "ab_partitions" in OPTIONS.info_dict, \
+ "META/ab_partitions.txt is required for ab_update."
target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
source_info = None
- # Metadata to comply with Android OTA package format.
- metadata = GetPackageMetadata(target_info, source_info)
+ additional_args = []
+
+ # Prepare custom images.
+ if OPTIONS.custom_images:
+ target_file = GetTargetFilesZipForCustomImagesUpdates(
+ target_file, OPTIONS.custom_images)
if OPTIONS.retrofit_dynamic_partitions:
target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
target_file, target_info.get("super_block_devices").strip().split(),
target_info.get("dynamic_partition_list").strip().split())
+ elif OPTIONS.partial:
+ target_file = GetTargetFilesZipForPartialUpdates(target_file,
+ OPTIONS.partial)
+ additional_args += ["--is_partial_update", "true"]
elif OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
+ # Target_file may have been modified, reparse ab_partitions
+ with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
+ target_info.info_dict['ab_partitions'] = zfp.read(
+ AB_PARTITIONS).decode().strip().split("\n")
+ # Metadata to comply with Android OTA package format.
+ metadata = GetPackageMetadata(target_info, source_info)
# Generate payload.
payload = Payload()
+ partition_timestamps_flags = []
# Enforce a max timestamp this payload can be applied on top of.
if OPTIONS.downgrade:
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
+ partition_timestamps_flags = GeneratePartitionTimestampFlagsDowngrade(
+ metadata.precondition.partition_state,
+ metadata.postcondition.partition_state
+ )
else:
- max_timestamp = metadata["post-timestamp"]
- additional_args = ["--max_timestamp", max_timestamp]
+ max_timestamp = str(metadata.postcondition.timestamp)
+ partition_timestamps_flags = GeneratePartitionTimestampFlags(
+ metadata.postcondition.partition_state)
- payload.Generate(target_file, source_file, additional_args)
+ additional_args += ["--max_timestamp", max_timestamp]
+
+ if SupportsMainlineGkiUpdates(source_file):
+ logger.warn("Detected build with mainline GKI, include full boot image.")
+ additional_args.extend(["--full_boot", "true"])
+
+ payload.Generate(
+ target_file,
+ source_file,
+ additional_args + partition_timestamps_flags
+ )
# Sign the payload.
payload_signer = PayloadSigner()
@@ -877,15 +1116,16 @@
target_file, OPTIONS.skip_postinstall)
secondary_payload = Payload(secondary=True)
secondary_payload.Generate(secondary_target_file,
- additional_args=additional_args)
+ additional_args=["--max_timestamp",
+ max_timestamp])
secondary_payload.Sign(payload_signer)
secondary_payload.WriteToZip(output_zip)
# If dm-verity is supported for the device, copy contents of care_map
# into A/B OTA package.
- target_zip = zipfile.ZipFile(target_file, "r")
+ target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
if (target_info.get("verity") == "true" or
- target_info.get("avb_enable") == "true"):
+ target_info.get("avb_enable") == "true"):
care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
"META/" + x in target_zip.namelist()]
@@ -992,10 +1232,20 @@
OPTIONS.output_metadata_path = a
elif o == "--disable_fec_computation":
OPTIONS.disable_fec_computation = True
+ elif o == "--disable_verity_computation":
+ OPTIONS.disable_verity_computation = True
elif o == "--force_non_ab":
OPTIONS.force_non_ab = True
elif o == "--boot_variable_file":
OPTIONS.boot_variable_file = a
+ elif o == "--partial":
+ partitions = a.split()
+ if not partitions:
+ raise ValueError("Cannot parse partitions in {}".format(a))
+ OPTIONS.partial = partitions
+ elif o == "--custom_image":
+ custom_partition, custom_image = a.split("=")
+ OPTIONS.custom_images[custom_partition] = custom_image
else:
return False
return True
@@ -1032,8 +1282,11 @@
"skip_compatibility_check",
"output_metadata_path=",
"disable_fec_computation",
+ "disable_verity_computation",
"force_non_ab",
"boot_variable_file=",
+ "partial=",
+ "custom_image=",
], extra_option_handler=option_handler)
if len(args) != 2:
@@ -1042,13 +1295,6 @@
common.InitLogging()
- if OPTIONS.downgrade:
- # We should only allow downgrading incrementals (as opposed to full).
- # Otherwise the device may go back from arbitrary build with this full
- # OTA package.
- if OPTIONS.incremental_source is None:
- raise ValueError("Cannot generate downgradable full OTAs")
-
# Load the build info dicts from the zip directly or the extracted input
# directory. We don't need to unzip the entire target-files zips, because they
# won't be needed for A/B OTAs (brillo_update_payload does that on its own).
@@ -1059,21 +1305,43 @@
if OPTIONS.extracted_input is not None:
OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
else:
- with zipfile.ZipFile(args[0], 'r') as input_zip:
- OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+ OPTIONS.info_dict = ParseInfoDict(args[0])
+
+ if OPTIONS.downgrade:
+ # We should only allow downgrading incrementals (as opposed to full).
+ # Otherwise the device may go back from arbitrary build with this full
+ # OTA package.
+ if OPTIONS.incremental_source is None:
+ raise ValueError("Cannot generate downgradable full OTAs")
+
+
+ # TODO(xunchang) for retrofit and partial updates, maybe we should rebuild the
+ # target-file and reload the info_dict. So the info will be consistent with
+ # the modified target-file.
logger.info("--- target info ---")
common.DumpInfoDict(OPTIONS.info_dict)
+
# Load the source build dict if applicable.
if OPTIONS.incremental_source is not None:
OPTIONS.target_info_dict = OPTIONS.info_dict
- with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
- OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+ OPTIONS.source_info_dict = ParseInfoDict(OPTIONS.incremental_source)
logger.info("--- source info ---")
common.DumpInfoDict(OPTIONS.source_info_dict)
+ if OPTIONS.partial:
+ OPTIONS.info_dict['ab_partitions'] = \
+ list(
+ set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
+ )
+ if OPTIONS.source_info_dict:
+ OPTIONS.source_info_dict['ab_partitions'] = \
+ list(
+ set(OPTIONS.source_info_dict['ab_partitions']) & set(OPTIONS.partial)
+ )
+
# Load OEM dicts if provided.
OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
@@ -1081,7 +1349,7 @@
# use_dynamic_partitions but target build does.
if (OPTIONS.source_info_dict and
OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
- OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
+ OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
raise common.ExternalError(
"Expect to generate incremental OTA for retrofitting dynamic "
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
new file mode 100644
index 0000000..1277685
--- /dev/null
+++ b/tools/releasetools/ota_metadata.proto
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// If you change this file,
+// Please update ota_metadata_pb2.py by executing
+// protoc ota_metadata.proto --python_out $ANDROID_BUILD_TOP/build/tools/releasetools
+
+
+syntax = "proto3";
+
+package build.tools.releasetools;
+option optimize_for = LITE_RUNTIME;
+
+// The build information of a particular partition on the device.
+message PartitionState {
+ string partition_name = 1;
+ repeated string device = 2;
+ repeated string build = 3;
+ // The version string of the partition. It's usually timestamp if present.
+ // One known exception is the boot image, who uses the kmi version, e.g.
+ // 5.4.42-android12-0
+ string version = 4;
+
+ // TODO(xunchang), revisit other necessary fields, e.g. security_patch_level.
+}
+
+// The build information on the device. The bytes of the running images are thus
+// inferred from the device state. For more information of the meaning of each
+// subfield, check
+// https://source.android.com/compatibility/android-cdd#3_2_2_build_parameters
+message DeviceState {
+ // device name. i.e. ro.product.device; if the field has multiple values, it
+ // means the ota package supports multiple devices. This usually happens when
+ // we use the same image to support multiple skus.
+ repeated string device = 1;
+ // device fingerprint. Up to R build, the value reads from
+ // ro.build.fingerprint.
+ repeated string build = 2;
+ // A value that specify a version of the android build.
+ string build_incremental = 3;
+ // The timestamp when the build is generated.
+ int64 timestamp = 4;
+ // The version of the currently-executing Android system.
+ string sdk_level = 5;
+ // A value indicating the security patch level of a build.
+ string security_patch_level = 6;
+
+ // The detailed state of each partition. For partial updates or devices with
+ // mixed build of partitions, some of the above fields may left empty. And the
+ // client will rely on the information of specific partitions to target the
+ // update.
+ repeated PartitionState partition_state = 7;
+}
+
+message ApexInfo {
+ string package_name = 1;
+ int64 version = 2;
+ bool is_compressed = 3;
+ int64 decompressed_size = 4;
+}
+
+// The metadata of an OTA package. It contains the information of the package
+// and prerequisite to install the update correctly.
+message OtaMetadata {
+ enum OtaType {
+ UNKNOWN = 0;
+ AB = 1;
+ BLOCK = 2;
+ BRICK = 3;
+ };
+ OtaType type = 1;
+ // True if we need to wipe after the update.
+ bool wipe = 2;
+ // True if the timestamp of the post build is older than the pre build.
+ bool downgrade = 3;
+ // A map of name:content of property files, e.g. ota-property-files.
+ map<string, string> property_files = 4;
+
+ // The required device state in order to install the package.
+ DeviceState precondition = 5;
+ // The expected device state after the update.
+ DeviceState postcondition = 6;
+
+ // True if the ota that updates a device to support dynamic partitions, where
+ // the source build doesn't support it.
+ bool retrofit_dynamic_partitions = 7;
+ // The required size of the cache partition, only valid for non-A/B update.
+ int64 required_cache = 8;
+}
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
new file mode 100644
index 0000000..ff2b2c5
--- /dev/null
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -0,0 +1,343 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: ota_metadata.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='ota_metadata.proto',
+ package='build.tools.releasetools',
+ syntax='proto3',
+ serialized_options=b'H\003',
+ serialized_pb=b'\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"\xe1\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3'
+)
+
+
+
+_OTAMETADATA_OTATYPE = _descriptor.EnumDescriptor(
+ name='OtaType',
+ full_name='build.tools.releasetools.OtaMetadata.OtaType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='AB', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BLOCK', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BRICK', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=777,
+ serialized_end=829,
+)
+_sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
+
+
+_PARTITIONSTATE = _descriptor.Descriptor(
+ name='PartitionState',
+ full_name='build.tools.releasetools.PartitionState',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='partition_name', full_name='build.tools.releasetools.PartitionState.partition_name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='device', full_name='build.tools.releasetools.PartitionState.device', index=1,
+ number=2, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='build', full_name='build.tools.releasetools.PartitionState.build', index=2,
+ number=3, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='version', full_name='build.tools.releasetools.PartitionState.version', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=48,
+ serialized_end=136,
+)
+
+
+_DEVICESTATE = _descriptor.Descriptor(
+ name='DeviceState',
+ full_name='build.tools.releasetools.DeviceState',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='device', full_name='build.tools.releasetools.DeviceState.device', index=0,
+ number=1, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='build', full_name='build.tools.releasetools.DeviceState.build', index=1,
+ number=2, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='build_incremental', full_name='build.tools.releasetools.DeviceState.build_incremental', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='timestamp', full_name='build.tools.releasetools.DeviceState.timestamp', index=3,
+ number=4, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='sdk_level', full_name='build.tools.releasetools.DeviceState.sdk_level', index=4,
+ number=5, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='security_patch_level', full_name='build.tools.releasetools.DeviceState.security_patch_level', index=5,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='partition_state', full_name='build.tools.releasetools.DeviceState.partition_state', index=6,
+ number=7, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=139,
+ serialized_end=345,
+)
+
+
+_OTAMETADATA_PROPERTYFILESENTRY = _descriptor.Descriptor(
+ name='PropertyFilesEntry',
+ full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=b'8\001',
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=723,
+ serialized_end=775,
+)
+
+_OTAMETADATA = _descriptor.Descriptor(
+ name='OtaMetadata',
+ full_name='build.tools.releasetools.OtaMetadata',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='build.tools.releasetools.OtaMetadata.type', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='wipe', full_name='build.tools.releasetools.OtaMetadata.wipe', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='downgrade', full_name='build.tools.releasetools.OtaMetadata.downgrade', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='property_files', full_name='build.tools.releasetools.OtaMetadata.property_files', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='precondition', full_name='build.tools.releasetools.OtaMetadata.precondition', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='postcondition', full_name='build.tools.releasetools.OtaMetadata.postcondition', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='retrofit_dynamic_partitions', full_name='build.tools.releasetools.OtaMetadata.retrofit_dynamic_partitions', index=6,
+ number=7, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='required_cache', full_name='build.tools.releasetools.OtaMetadata.required_cache', index=7,
+ number=8, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_OTAMETADATA_PROPERTYFILESENTRY, ],
+ enum_types=[
+ _OTAMETADATA_OTATYPE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=348,
+ serialized_end=829,
+)
+
+_DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
+_OTAMETADATA_PROPERTYFILESENTRY.containing_type = _OTAMETADATA
+_OTAMETADATA.fields_by_name['type'].enum_type = _OTAMETADATA_OTATYPE
+_OTAMETADATA.fields_by_name['property_files'].message_type = _OTAMETADATA_PROPERTYFILESENTRY
+_OTAMETADATA.fields_by_name['precondition'].message_type = _DEVICESTATE
+_OTAMETADATA.fields_by_name['postcondition'].message_type = _DEVICESTATE
+_OTAMETADATA_OTATYPE.containing_type = _OTAMETADATA
+DESCRIPTOR.message_types_by_name['PartitionState'] = _PARTITIONSTATE
+DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
+DESCRIPTOR.message_types_by_name['OtaMetadata'] = _OTAMETADATA
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+PartitionState = _reflection.GeneratedProtocolMessageType('PartitionState', (_message.Message,), {
+ 'DESCRIPTOR' : _PARTITIONSTATE,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.PartitionState)
+ })
+_sym_db.RegisterMessage(PartitionState)
+
+DeviceState = _reflection.GeneratedProtocolMessageType('DeviceState', (_message.Message,), {
+ 'DESCRIPTOR' : _DEVICESTATE,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.DeviceState)
+ })
+_sym_db.RegisterMessage(DeviceState)
+
+OtaMetadata = _reflection.GeneratedProtocolMessageType('OtaMetadata', (_message.Message,), {
+
+ 'PropertyFilesEntry' : _reflection.GeneratedProtocolMessageType('PropertyFilesEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _OTAMETADATA_PROPERTYFILESENTRY,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.OtaMetadata.PropertyFilesEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _OTAMETADATA,
+ '__module__' : 'ota_metadata_pb2'
+ # @@protoc_insertion_point(class_scope:build.tools.releasetools.OtaMetadata)
+ })
+_sym_db.RegisterMessage(OtaMetadata)
+_sym_db.RegisterMessage(OtaMetadata.PropertyFilesEntry)
+
+
+DESCRIPTOR._options = None
+_OTAMETADATA_PROPERTYFILESENTRY._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/tools/releasetools/ota_package_parser.py b/tools/releasetools/ota_package_parser.py
index 331122b..1e733b9 100755
--- a/tools/releasetools/ota_package_parser.py
+++ b/tools/releasetools/ota_package_parser.py
@@ -215,7 +215,7 @@
logging.basicConfig(level=logging.INFO, format=logging_format)
try:
- with zipfile.ZipFile(args.ota_package, 'r') as package:
+ with zipfile.ZipFile(args.ota_package, 'r', allowZip64=True) as package:
package_parser = OtaPackageParser(package)
package_parser.Analyze()
except:
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 874ab95..6bbcc92 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -14,17 +14,32 @@
import copy
import itertools
+import logging
import os
import zipfile
+import ota_metadata_pb2
from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
- SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
+ SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps)
+
+logger = logging.getLogger(__name__)
+
+OPTIONS.no_signing = False
+OPTIONS.force_non_ab = False
+OPTIONS.wipe_user_data = False
+OPTIONS.downgrade = False
+OPTIONS.key_passwords = {}
+OPTIONS.package_key = None
+OPTIONS.incremental_source = None
+OPTIONS.retrofit_dynamic_partitions = False
+OPTIONS.output_metadata_path = None
+OPTIONS.boot_variable_file = None
METADATA_NAME = 'META-INF/com/android/metadata'
+METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
-
def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
"""Finalizes the metadata and signs an A/B OTA package.
@@ -48,14 +63,15 @@
def ComputeAllPropertyFiles(input_file, needed_property_files):
# Write the current metadata entry with placeholders.
- with zipfile.ZipFile(input_file) as input_zip:
+ with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
for property_files in needed_property_files:
- metadata[property_files.name] = property_files.Compute(input_zip)
+ metadata.property_files[property_files.name] = property_files.Compute(
+ input_zip)
namelist = input_zip.namelist()
- if METADATA_NAME in namelist:
- ZipDelete(input_file, METADATA_NAME)
- output_zip = zipfile.ZipFile(input_file, 'a')
+ if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
+ ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
+ output_zip = zipfile.ZipFile(input_file, 'a', allowZip64=True)
WriteMetadata(metadata, output_zip)
ZipClose(output_zip)
@@ -67,10 +83,11 @@
return prelim_signing
def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
- with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
+ with zipfile.ZipFile(prelim_signing, allowZip64=True) as prelim_signing_zip:
for property_files in needed_property_files:
- metadata[property_files.name] = property_files.Finalize(
- prelim_signing_zip, len(metadata[property_files.name]))
+ metadata.property_files[property_files.name] = property_files.Finalize(
+ prelim_signing_zip,
+ len(metadata.property_files[property_files.name]))
# SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
# entries, as well as padding the entry headers. We do a preliminary signing
@@ -91,8 +108,8 @@
FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
# Replace the METADATA entry.
- ZipDelete(prelim_signing, METADATA_NAME)
- output_zip = zipfile.ZipFile(prelim_signing, 'a')
+ ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
+ output_zip = zipfile.ZipFile(prelim_signing, 'a', allowZip64=True)
WriteMetadata(metadata, output_zip)
ZipClose(output_zip)
@@ -103,9 +120,10 @@
SignOutput(prelim_signing, output_file)
# Reopen the final signed zip to double check the streaming metadata.
- with zipfile.ZipFile(output_file) as output_zip:
+ with zipfile.ZipFile(output_file, allowZip64=True) as output_zip:
for property_files in needed_property_files:
- property_files.Verify(output_zip, metadata[property_files.name].strip())
+ property_files.Verify(
+ output_zip, metadata.property_files[property_files.name].strip())
# If requested, dump the metadata to a separate file.
output_metadata_path = OPTIONS.output_metadata_path
@@ -113,30 +131,101 @@
WriteMetadata(metadata, output_metadata_path)
-def WriteMetadata(metadata, output):
+def WriteMetadata(metadata_proto, output):
"""Writes the metadata to the zip archive or a file.
Args:
- metadata: The metadata dict for the package.
- output: A ZipFile object or a string of the output file path.
+ metadata_proto: The metadata protobuf for the package.
+ output: A ZipFile object or a string of the output file path. If a string
+ path is given, the metadata in the protobuf format will be written to
+ {output}.pb, e.g. ota_metadata.pb
"""
- value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
+ metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
+ legacy_metadata = "".join(["%s=%s\n" % kv for kv in
+ sorted(metadata_dict.items())])
if isinstance(output, zipfile.ZipFile):
- ZipWriteStr(output, METADATA_NAME, value,
+ ZipWriteStr(output, METADATA_PROTO_NAME, metadata_proto.SerializeToString(),
+ compress_type=zipfile.ZIP_STORED)
+ ZipWriteStr(output, METADATA_NAME, legacy_metadata,
compress_type=zipfile.ZIP_STORED)
return
+ with open('{}.pb'.format(output), 'w') as f:
+ f.write(metadata_proto.SerializeToString())
with open(output, 'w') as f:
- f.write(value)
+ f.write(legacy_metadata)
+
+
+def UpdateDeviceState(device_state, build_info, boot_variable_values,
+ is_post_build):
+ """Update the fields of the DeviceState proto with build info."""
+
+ def UpdatePartitionStates(partition_states):
+ """Update the per-partition state according to its build.prop"""
+ if not build_info.is_ab:
+ return
+ build_info_set = ComputeRuntimeBuildInfos(build_info,
+ boot_variable_values)
+ assert "ab_partitions" in build_info.info_dict,\
+ "ab_partitions property required for ab update."
+ ab_partitions = set(build_info.info_dict.get("ab_partitions"))
+
+ # delta_generator will error out on unused timestamps,
+ # so only generate timestamps for dynamic partitions
+ # used in OTA update.
+ for partition in sorted(set(PARTITIONS_WITH_BUILD_PROP) & ab_partitions):
+ partition_prop = build_info.info_dict.get(
+ '{}.build.prop'.format(partition))
+ # Skip if the partition is missing, or it doesn't have a build.prop
+ if not partition_prop or not partition_prop.build_props:
+ continue
+
+ partition_state = partition_states.add()
+ partition_state.partition_name = partition
+ # Update the partition's runtime device names and fingerprints
+ partition_devices = set()
+ partition_fingerprints = set()
+ for runtime_build_info in build_info_set:
+ partition_devices.add(
+ runtime_build_info.GetPartitionBuildProp('ro.product.device',
+ partition))
+ partition_fingerprints.add(
+ runtime_build_info.GetPartitionFingerprint(partition))
+
+ partition_state.device.extend(sorted(partition_devices))
+ partition_state.build.extend(sorted(partition_fingerprints))
+
+ # TODO(xunchang) set the boot image's version with kmi. Note the boot
+ # image doesn't have a file map.
+ partition_state.version = build_info.GetPartitionBuildProp(
+ 'ro.build.date.utc', partition)
+
+ # TODO(xunchang), we can save a call to ComputeRuntimeBuildInfos.
+ build_devices, build_fingerprints = \
+ CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values)
+ device_state.device.extend(sorted(build_devices))
+ device_state.build.extend(sorted(build_fingerprints))
+ device_state.build_incremental = build_info.GetBuildProp(
+ 'ro.build.version.incremental')
+
+ UpdatePartitionStates(device_state.partition_state)
+
+ if is_post_build:
+ device_state.sdk_level = build_info.GetBuildProp(
+ 'ro.build.version.sdk')
+ device_state.security_patch_level = build_info.GetBuildProp(
+ 'ro.build.version.security_patch')
+ # Use the actual post-timestamp, even for a downgrade case.
+ device_state.timestamp = int(build_info.GetBuildProp('ro.build.date.utc'))
def GetPackageMetadata(target_info, source_info=None):
- """Generates and returns the metadata dict.
+ """Generates and returns the metadata proto.
- It generates a dict() that contains the info to be written into an OTA
- package (META-INF/com/android/metadata). It also handles the detection of
- downgrade / data wipe based on the global options.
+ It generates a ota_metadata protobuf that contains the info to be written
+ into an OTA package (META-INF/com/android/metadata.pb). It also handles the
+ detection of downgrade / data wipe based on the global options.
Args:
target_info: The BuildInfo instance that holds the target build info.
@@ -144,66 +233,96 @@
None if generating full OTA.
Returns:
- A dict to be written into package metadata entry.
+ A protobuf to be written into package metadata entry.
"""
assert isinstance(target_info, BuildInfo)
assert source_info is None or isinstance(source_info, BuildInfo)
- separator = '|'
-
boot_variable_values = {}
if OPTIONS.boot_variable_file:
d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
for key, values in d.items():
boot_variable_values[key] = [val.strip() for val in values.split(',')]
- post_build_devices, post_build_fingerprints = \
- CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
- metadata = {
- 'post-build': separator.join(sorted(post_build_fingerprints)),
- 'post-build-incremental': target_info.GetBuildProp(
- 'ro.build.version.incremental'),
- 'post-sdk-level': target_info.GetBuildProp(
- 'ro.build.version.sdk'),
- 'post-security-patch-level': target_info.GetBuildProp(
- 'ro.build.version.security_patch'),
- }
+ metadata_proto = ota_metadata_pb2.OtaMetadata()
+ # TODO(xunchang) some fields, e.g. post-device isn't necessary. We can
+ # consider skipping them if they aren't used by clients.
+ UpdateDeviceState(metadata_proto.postcondition, target_info,
+ boot_variable_values, True)
if target_info.is_ab and not OPTIONS.force_non_ab:
- metadata['ota-type'] = 'AB'
- metadata['ota-required-cache'] = '0'
+ metadata_proto.type = ota_metadata_pb2.OtaMetadata.AB
+ metadata_proto.required_cache = 0
else:
- metadata['ota-type'] = 'BLOCK'
+ metadata_proto.type = ota_metadata_pb2.OtaMetadata.BLOCK
+ # cache requirement will be updated by the non-A/B codes.
if OPTIONS.wipe_user_data:
- metadata['ota-wipe'] = 'yes'
+ metadata_proto.wipe = True
if OPTIONS.retrofit_dynamic_partitions:
- metadata['ota-retrofit-dynamic-partitions'] = 'yes'
+ metadata_proto.retrofit_dynamic_partitions = True
is_incremental = source_info is not None
if is_incremental:
- pre_build_devices, pre_build_fingerprints = \
- CalculateRuntimeDevicesAndFingerprints(source_info,
- boot_variable_values)
- metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
- metadata['pre-build-incremental'] = source_info.GetBuildProp(
- 'ro.build.version.incremental')
- metadata['pre-device'] = separator.join(sorted(pre_build_devices))
+ UpdateDeviceState(metadata_proto.precondition, source_info,
+ boot_variable_values, False)
else:
- metadata['pre-device'] = separator.join(sorted(post_build_devices))
-
- # Use the actual post-timestamp, even for a downgrade case.
- metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+ metadata_proto.precondition.device.extend(
+ metadata_proto.postcondition.device)
# Detect downgrades and set up downgrade flags accordingly.
if is_incremental:
- HandleDowngradeMetadata(metadata, target_info, source_info)
+ HandleDowngradeMetadata(metadata_proto, target_info, source_info)
- return metadata
+ return metadata_proto
-def HandleDowngradeMetadata(metadata, target_info, source_info):
+def BuildLegacyOtaMetadata(metadata_proto):
+ """Converts the metadata proto to a legacy metadata dict.
+
+ This metadata dict is used to build the legacy metadata text file for
+ backward compatibility. We won't add new keys to the legacy metadata format.
+ If new information is needed, we should add it as a new field in OtaMetadata
+ proto definition.
+ """
+
+ separator = '|'
+
+ metadata_dict = {}
+ if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
+ metadata_dict['ota-type'] = 'AB'
+ elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
+ metadata_dict['ota-type'] = 'BLOCK'
+ if metadata_proto.wipe:
+ metadata_dict['ota-wipe'] = 'yes'
+ if metadata_proto.retrofit_dynamic_partitions:
+ metadata_dict['ota-retrofit-dynamic-partitions'] = 'yes'
+ if metadata_proto.downgrade:
+ metadata_dict['ota-downgrade'] = 'yes'
+
+ metadata_dict['ota-required-cache'] = str(metadata_proto.required_cache)
+
+ post_build = metadata_proto.postcondition
+ metadata_dict['post-build'] = separator.join(post_build.build)
+ metadata_dict['post-build-incremental'] = post_build.build_incremental
+ metadata_dict['post-sdk-level'] = post_build.sdk_level
+ metadata_dict['post-security-patch-level'] = post_build.security_patch_level
+ metadata_dict['post-timestamp'] = str(post_build.timestamp)
+
+ pre_build = metadata_proto.precondition
+ metadata_dict['pre-device'] = separator.join(pre_build.device)
+ # incremental updates
+ if len(pre_build.build) != 0:
+ metadata_dict['pre-build'] = separator.join(pre_build.build)
+ metadata_dict['pre-build-incremental'] = pre_build.build_incremental
+
+ metadata_dict.update(metadata_proto.property_files)
+
+ return metadata_dict
+
+
+def HandleDowngradeMetadata(metadata_proto, target_info, source_info):
# Only incremental OTAs are allowed to reach here.
assert OPTIONS.incremental_source is not None
@@ -216,7 +335,7 @@
raise RuntimeError(
"--downgrade or --override_timestamp specified but no downgrade "
"detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
- metadata["ota-downgrade"] = "yes"
+ metadata_proto.downgrade = True
else:
if is_downgrade:
raise RuntimeError(
@@ -225,14 +344,12 @@
"building the incremental." % (pre_timestamp, post_timestamp))
-def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values):
- """Returns a tuple of sets for runtime devices and fingerprints"""
+def ComputeRuntimeBuildInfos(default_build_info, boot_variable_values):
+ """Returns a set of build info objects that may exist during runtime."""
- device_names = {build_info.device}
- fingerprints = {build_info.fingerprint}
-
+ build_info_set = {default_build_info}
if not boot_variable_values:
- return device_names, fingerprints
+ return build_info_set
# Calculate all possible combinations of the values for the boot variables.
keys = boot_variable_values.keys()
@@ -242,12 +359,12 @@
for placeholder_values in combinations:
# Reload the info_dict as some build properties may change their values
# based on the value of ro.boot* properties.
- info_dict = copy.deepcopy(build_info.info_dict)
- for partition in PARTITIONS_WITH_CARE_MAP:
+ info_dict = copy.deepcopy(default_build_info.info_dict)
+ for partition in PARTITIONS_WITH_BUILD_PROP:
partition_prop_key = "{}.build.prop".format(partition)
input_file = info_dict[partition_prop_key].input_file
if isinstance(input_file, zipfile.ZipFile):
- with zipfile.ZipFile(input_file.filename) as input_zip:
+ with zipfile.ZipFile(input_file.filename, allowZip64=True) as input_zip:
info_dict[partition_prop_key] = \
PartitionBuildProps.FromInputFile(input_zip, partition,
placeholder_values)
@@ -256,10 +373,22 @@
PartitionBuildProps.FromInputFile(input_file, partition,
placeholder_values)
info_dict["build.prop"] = info_dict["system.build.prop"]
+ build_info_set.add(BuildInfo(info_dict, default_build_info.oem_dicts))
- new_build_info = BuildInfo(info_dict, build_info.oem_dicts)
- device_names.add(new_build_info.device)
- fingerprints.add(new_build_info.fingerprint)
+ return build_info_set
+
+
+def CalculateRuntimeDevicesAndFingerprints(default_build_info,
+ boot_variable_values):
+ """Returns a tuple of sets for runtime devices and fingerprints"""
+
+ device_names = set()
+ fingerprints = set()
+ build_info_set = ComputeRuntimeBuildInfos(default_build_info,
+ boot_variable_values)
+ for runtime_build_info in build_info_set:
+ device_names.add(runtime_build_info.device)
+ fingerprints.add(runtime_build_info.fingerprint)
return device_names, fingerprints
@@ -403,8 +532,10 @@
# reserved space serves the metadata entry only.
if reserve_space:
tokens.append('metadata:' + ' ' * 15)
+ tokens.append('metadata.pb:' + ' ' * 15)
else:
tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
+ tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
return ','.join(tokens)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index b4646b7..e8674b6 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -515,7 +515,7 @@
data,
payload_key,
container_key,
- key_passwords[container_key],
+ key_passwords,
apk_keys,
codename_to_api_level_map,
no_hashtree=True,
@@ -813,7 +813,7 @@
keys: A list of public keys to use during OTA package verification.
"""
temp_file = io.BytesIO()
- certs_zip = zipfile.ZipFile(temp_file, "w")
+ certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
for k in keys:
common.ZipWrite(certs_zip, k)
common.ZipClose(certs_zip)
@@ -1294,7 +1294,7 @@
common.InitLogging()
- input_zip = zipfile.ZipFile(args[0], "r")
+ input_zip = zipfile.ZipFile(args[0], "r", allowZip64=True)
output_zip = zipfile.ZipFile(args[1], "w",
compression=zipfile.ZIP_DEFLATED,
allowZip64=True)
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index efa60b6..6b7a7db 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -93,10 +93,10 @@
# Set up the output zip.
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
AddPackRadioImages(output_zip, images)
- with zipfile.ZipFile(output_file, 'r') as verify_zip:
+ with zipfile.ZipFile(output_file, 'r', allowZip64=True) as verify_zip:
for image in images:
self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
@@ -344,12 +344,12 @@
image_paths = self._test_AddCareMapForAbOta()
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
AddCareMapForAbOta(output_zip, ['system', 'vendor'], image_paths)
care_map_name = "META/care_map.pb"
temp_dir = common.MakeTempDir()
- with zipfile.ZipFile(output_file, 'r') as verify_zip:
+ with zipfile.ZipFile(output_file, 'r', allowZip64=True) as verify_zip:
self.assertTrue(care_map_name in verify_zip.namelist())
verify_zip.extract(care_map_name, path=temp_dir)
@@ -367,7 +367,7 @@
image_paths = self._test_AddCareMapForAbOta()
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
# Create an existing META/care_map.pb entry.
common.ZipWriteStr(output_zip, 'META/care_map.pb',
'fake care_map.pb')
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
index 7b4a4b0..71f6433 100644
--- a/tools/releasetools/test_apex_utils.py
+++ b/tools/releasetools/test_apex_utils.py
@@ -160,7 +160,7 @@
self.payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
apex_file = signer.ProcessApexFile(apk_keys, self.payload_key)
- package_name_extract_cmd = ['aapt', 'dump', 'badging', apex_file]
+ package_name_extract_cmd = ['aapt2', 'dump', 'badging', apex_file]
output = common.RunAndCheckOutput(package_name_extract_cmd)
for line in output.splitlines():
# Sample output from aapt: "package: name='com.google.android.wifi'
@@ -174,8 +174,8 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_ApexApkSigner_noAssetDir(self):
no_asset = common.MakeTempFile(suffix='.apex')
- with zipfile.ZipFile(no_asset, 'w') as output_zip:
- with zipfile.ZipFile(self.apex_with_apk, 'r') as input_zip:
+ with zipfile.ZipFile(no_asset, 'w', allowZip64=True) as output_zip:
+ with zipfile.ZipFile(self.apex_with_apk, 'r', allowZip64=True) as input_zip:
name_list = input_zip.namelist()
for name in name_list:
if not name.startswith('assets'):
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 81ee53d..ecd759c 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -15,6 +15,7 @@
#
import copy
+import json
import os
import subprocess
import tempfile
@@ -363,7 +364,7 @@
self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
# Reopen the zip file to verify.
- zip_file = zipfile.ZipFile(zip_file_name, "r")
+ zip_file = zipfile.ZipFile(zip_file_name, "r", allowZip64=True)
# Verify the timestamp.
info = zip_file.getinfo(arcname)
@@ -399,7 +400,7 @@
arcname = arcname[1:]
zip_file.close()
- zip_file = zipfile.ZipFile(zip_file_name, "w")
+ zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
try:
sha1_hash = sha1()
@@ -431,7 +432,7 @@
zip_file_name = zip_file.name
zip_file.close()
- zip_file = zipfile.ZipFile(zip_file_name, "w")
+ zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
try:
expected_compress_type = extra_args.get("compress_type",
@@ -475,7 +476,7 @@
arcname_large = arcname_large[1:]
zip_file.close()
- zip_file = zipfile.ZipFile(zip_file_name, "w")
+ zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
try:
sha1_hash = sha1()
@@ -599,7 +600,7 @@
try:
random_string = os.urandom(1024)
- zip_file = zipfile.ZipFile(zip_file_name, "w")
+ zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
# Default perms should be 0o644 when passing the filename.
common.ZipWriteStr(zip_file, "foo", random_string)
# Honor the specified perms.
@@ -644,7 +645,7 @@
try:
common.ZipDelete(zip_file.name, 'Test2')
- with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+ with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
entries = check_zip.namelist()
self.assertTrue('Test1' in entries)
self.assertFalse('Test2' in entries)
@@ -652,21 +653,21 @@
self.assertRaises(
common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
- with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+ with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
entries = check_zip.namelist()
self.assertTrue('Test1' in entries)
self.assertFalse('Test2' in entries)
self.assertTrue('Test3' in entries)
common.ZipDelete(zip_file.name, ['Test3'])
- with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+ with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
entries = check_zip.namelist()
self.assertTrue('Test1' in entries)
self.assertFalse('Test2' in entries)
self.assertFalse('Test3' in entries)
common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
- with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
+ with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
entries = check_zip.namelist()
self.assertFalse('Test1' in entries)
self.assertFalse('Test2' in entries)
@@ -834,7 +835,7 @@
if additional is None:
additional = []
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
for entry in additional:
target_files_zip.writestr(entry, '')
@@ -842,7 +843,7 @@
def test_ReadApkCerts_NoncompressedApks(self):
target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
certmap, ext = common.ReadApkCerts(input_zip)
self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
@@ -855,7 +856,7 @@
self.APKCERTS_TXT2,
['Compressed1.apk.gz', 'Compressed3.apk'])
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
certmap, ext = common.ReadApkCerts(input_zip)
self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
@@ -865,7 +866,7 @@
target_files = self._write_apkcerts_txt(
self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
certmap, ext = common.ReadApkCerts(input_zip)
self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
@@ -876,7 +877,7 @@
self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
['Compressed1.apk.gz', 'Compressed3.apk'])
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
certmap, ext = common.ReadApkCerts(input_zip)
certmap_merged = self.APKCERTS_CERTMAP1.copy()
@@ -889,7 +890,7 @@
self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
['Compressed1.apk.gz', 'Compressed4.apk.xz'])
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
def test_ReadApkCerts_MismatchingKeys(self):
@@ -899,12 +900,12 @@
)
target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
def test_ReadApkCerts_WithWithoutOptionalFields(self):
target_files = self._write_apkcerts_txt(self.APKCERTS_TXT4)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
certmap, ext = common.ReadApkCerts(input_zip)
self.assertDictEqual(self.APKCERTS_CERTMAP4, certmap)
@@ -973,7 +974,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetSparseImage_emptyBlockMapFile(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([
(0xCAC1, 6),
@@ -985,7 +986,7 @@
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
self.assertDictEqual(
@@ -995,6 +996,55 @@
},
sparse_image.file_map)
+ def test_PartitionMapFromTargetFiles(self):
+ target_files_dir = common.MakeTempDir()
+ os.makedirs(os.path.join(target_files_dir, 'SYSTEM'))
+ os.makedirs(os.path.join(target_files_dir, 'SYSTEM', 'vendor'))
+ os.makedirs(os.path.join(target_files_dir, 'PRODUCT'))
+ os.makedirs(os.path.join(target_files_dir, 'SYSTEM', 'product'))
+ os.makedirs(os.path.join(target_files_dir, 'SYSTEM', 'vendor', 'odm'))
+ os.makedirs(os.path.join(target_files_dir, 'VENDOR_DLKM'))
+ partition_map = common.PartitionMapFromTargetFiles(target_files_dir)
+ self.assertDictEqual(
+ partition_map,
+ {
+ 'system': 'SYSTEM',
+ 'vendor': 'SYSTEM/vendor',
+ # Prefer PRODUCT over SYSTEM/product
+ 'product': 'PRODUCT',
+ 'odm': 'SYSTEM/vendor/odm',
+ 'vendor_dlkm': 'VENDOR_DLKM',
+ # No system_ext or odm_dlkm
+ })
+
+ def test_SharedUidPartitionViolations(self):
+ uid_dict = {
+ 'android.uid.phone': {
+ 'system': ['system_phone.apk'],
+ 'system_ext': ['system_ext_phone.apk'],
+ },
+ 'android.uid.wifi': {
+ 'vendor': ['vendor_wifi.apk'],
+ 'odm': ['odm_wifi.apk'],
+ },
+ }
+ errors = common.SharedUidPartitionViolations(
+ uid_dict, [('system', 'system_ext'), ('vendor', 'odm')])
+ self.assertEqual(errors, [])
+
+ def test_SharedUidPartitionViolations_Violation(self):
+ uid_dict = {
+ 'android.uid.phone': {
+ 'system': ['system_phone.apk'],
+ 'vendor': ['vendor_phone.apk'],
+ },
+ }
+ errors = common.SharedUidPartitionViolations(
+ uid_dict, [('system', 'system_ext'), ('vendor', 'odm')])
+ self.assertIn(
+ ('APK sharedUserId "android.uid.phone" found across partition groups '
+ 'in partitions "system,vendor"'), errors)
+
def test_GetSparseImage_missingImageFile(self):
self.assertRaises(
AssertionError, common.GetSparseImage, 'system2', self.testdata_dir,
@@ -1006,7 +1056,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetSparseImage_missingBlockMapFile(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([
(0xCAC1, 6),
@@ -1017,7 +1067,7 @@
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
self.assertRaises(
AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
False)
@@ -1026,7 +1076,7 @@
def test_GetSparseImage_sharedBlocks_notAllowed(self):
"""Tests the case of having overlapping blocks but disallowed."""
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([(0xCAC2, 16)]),
arcname='IMAGES/system.img')
@@ -1040,7 +1090,7 @@
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
self.assertRaises(
AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
False)
@@ -1049,7 +1099,7 @@
def test_GetSparseImage_sharedBlocks_allowed(self):
"""Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
# Construct an image with a care_map of "0-5 9-12".
target_files_zip.write(
test_utils.construct_sparse_image([(0xCAC2, 16)]),
@@ -1064,7 +1114,7 @@
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
self.assertDictEqual(
@@ -1094,7 +1144,7 @@
def test_GetSparseImage_incompleteRanges(self):
"""Tests the case of ext4 images with holes."""
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([(0xCAC2, 16)]),
arcname='IMAGES/system.img')
@@ -1108,7 +1158,7 @@
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
self.assertEqual(
@@ -1119,7 +1169,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([(0xCAC2, 16)]),
arcname='IMAGES/system.img')
@@ -1136,7 +1186,7 @@
target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
self.assertEqual(
@@ -1149,7 +1199,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([(0xCAC2, 16)]),
arcname='IMAGES/system.img')
@@ -1163,7 +1213,7 @@
target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
self.assertEqual(
@@ -1174,7 +1224,7 @@
@test_utils.SkipIfExternalToolsUnavailable()
def test_GetSparseImage_fileNotFound(self):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.write(
test_utils.construct_sparse_image([(0xCAC2, 16)]),
arcname='IMAGES/system.img')
@@ -1186,7 +1236,7 @@
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
tempdir = common.UnzipTemp(target_files)
- with zipfile.ZipFile(target_files, 'r') as input_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
self.assertRaises(
AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
False)
@@ -1274,7 +1324,7 @@
@staticmethod
def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
info_values = ''.join(
['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
@@ -1294,7 +1344,7 @@
target_files = self._test_LoadInfoDict_createTargetFiles(
self.INFO_DICT_DEFAULT,
'BOOT/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1305,7 +1355,7 @@
target_files = self._test_LoadInfoDict_createTargetFiles(
self.INFO_DICT_DEFAULT,
'BOOT/RAMDISK/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1346,7 +1396,7 @@
target_files = self._test_LoadInfoDict_createTargetFiles(
info_dict,
'RECOVERY/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1362,7 +1412,7 @@
target_files = self._test_LoadInfoDict_createTargetFiles(
info_dict,
'RECOVERY/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1376,7 +1426,7 @@
target_files = self._test_LoadInfoDict_createTargetFiles(
info_dict,
'RECOVERY/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
loaded_dict = common.LoadInfoDict(target_files_zip)
self.assertEqual(3, loaded_dict['recovery_api_version'])
self.assertEqual(2, loaded_dict['fstab_version'])
@@ -1388,7 +1438,7 @@
self.INFO_DICT_DEFAULT,
'BOOT/RAMDISK/system/etc/recovery.fstab')
common.ZipDelete(target_files, 'META/misc_info.txt')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1412,19 +1462,23 @@
target_files = self._test_LoadInfoDict_createTargetFiles(
self.INFO_DICT_DEFAULT,
'BOOT/RAMDISK/system/etc/recovery.fstab')
- with zipfile.ZipFile(target_files, 'r') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
self.assertRaises(
AssertionError, common.LoadInfoDict, target_files_zip, True)
def test_MergeDynamicPartitionInfoDicts_ReturnsMergedDict(self):
framework_dict = {
+ 'use_dynamic_partitions': 'true',
'super_partition_groups': 'group_a',
'dynamic_partition_list': 'system',
'super_group_a_partition_list': 'system',
}
vendor_dict = {
+ 'use_dynamic_partitions': 'true',
'super_partition_groups': 'group_a group_b',
'dynamic_partition_list': 'vendor product',
+ 'super_block_devices': 'super',
+ 'super_super_device_size': '3000',
'super_group_a_partition_list': 'vendor',
'super_group_a_group_size': '1000',
'super_group_b_partition_list': 'product',
@@ -1434,8 +1488,11 @@
framework_dict=framework_dict,
vendor_dict=vendor_dict)
expected_merged_dict = {
+ 'use_dynamic_partitions': 'true',
'super_partition_groups': 'group_a group_b',
- 'dynamic_partition_list': 'system vendor product',
+ 'dynamic_partition_list': 'product system vendor',
+ 'super_block_devices': 'super',
+ 'super_super_device_size': '3000',
'super_group_a_partition_list': 'system vendor',
'super_group_a_group_size': '1000',
'super_group_b_partition_list': 'product',
@@ -1445,12 +1502,14 @@
def test_MergeDynamicPartitionInfoDicts_IgnoringFrameworkGroupSize(self):
framework_dict = {
+ 'use_dynamic_partitions': 'true',
'super_partition_groups': 'group_a',
'dynamic_partition_list': 'system',
'super_group_a_partition_list': 'system',
'super_group_a_group_size': '5000',
}
vendor_dict = {
+ 'use_dynamic_partitions': 'true',
'super_partition_groups': 'group_a group_b',
'dynamic_partition_list': 'vendor product',
'super_group_a_partition_list': 'vendor',
@@ -1462,8 +1521,9 @@
framework_dict=framework_dict,
vendor_dict=vendor_dict)
expected_merged_dict = {
+ 'use_dynamic_partitions': 'true',
'super_partition_groups': 'group_a group_b',
- 'dynamic_partition_list': 'system vendor product',
+ 'dynamic_partition_list': 'product system vendor',
'super_group_a_partition_list': 'system vendor',
'super_group_a_group_size': '1000',
'super_group_b_partition_list': 'product',
@@ -1704,7 +1764,7 @@
@staticmethod
def get_op_list(output_path):
- with zipfile.ZipFile(output_path) as output_zip:
+ with zipfile.ZipFile(output_path, allowZip64=True) as output_zip:
with output_zip.open('dynamic_partitions_op_list') as op_list:
return [line.decode().strip() for line in op_list.readlines()
if not line.startswith(b'#')]
@@ -1724,7 +1784,7 @@
MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
- with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
self.assertEqual(str(self.script).strip(), """
@@ -1772,7 +1832,7 @@
dp_diff = common.DynamicPartitionsDifference(target_info,
block_diffs=[],
source_info_dict=source_info)
- with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
lines = self.get_op_list(self.output_path)
@@ -1816,7 +1876,7 @@
dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
source_info_dict=source_info)
- with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
metadata_idx = self.script.lines.index(
@@ -1887,7 +1947,7 @@
dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
source_info_dict=source_info)
- with zipfile.ZipFile(self.output_path, 'w') as output_zip:
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
self.assertNotIn("block_image_update", str(self.script),
@@ -1910,7 +1970,7 @@
@staticmethod
def _BuildZipFile(entries):
input_file = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
for name, content in entries.items():
input_zip.writestr(name, content)
@@ -1927,7 +1987,7 @@
'ODM/etc/build.prop': '\n'.join(build_prop),
})
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': ['std', 'pro']
}
@@ -1959,7 +2019,7 @@
'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
})
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': 'std'
}
@@ -1974,7 +2034,7 @@
'ro.product.odm.name': 'product1',
}, partition_props.build_props)
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': 'pro'
}
@@ -1995,7 +2055,7 @@
'ODM/etc/build.prop': '\n'.join(build_prop),
})
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
partition_props = common.PartitionBuildProps.FromInputFile(
input_zip, 'odm')
@@ -2038,7 +2098,7 @@
'ODM/etc/build_product2.prop': '\n'.join(product2_prop),
})
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': 'std',
'ro.boot.product.product_name': 'product1',
@@ -2055,7 +2115,7 @@
'ro.product.odm.name': 'product1'
}, partition_props.build_props)
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': 'pro',
'ro.boot.product.product_name': 'product2',
@@ -2089,7 +2149,7 @@
'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
})
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': 'std',
}
@@ -2126,7 +2186,7 @@
'ODM/etc/build_product2.prop': '\n'.join(product2_prop),
})
- with zipfile.ZipFile(input_file, 'r') as input_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
placeholder_values = {
'ro.boot.product.device_name': 'std',
'ro.boot.product.product_name': 'product1',
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index ff8593b..7ea7f96 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -117,6 +117,15 @@
DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
vendor_item_list))
+ def test_validate_config_lists_ReturnsFalseIfSharedExtractedPartitionImage(
+ self):
+ vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
+ vendor_item_list.append('IMAGES/system.img')
+ self.assertFalse(
+ validate_config_lists(DEFAULT_FRAMEWORK_ITEM_LIST,
+ DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
+ vendor_item_list))
+
def test_validate_config_lists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
framework_misc_info_keys = list(DEFAULT_FRAMEWORK_MISC_INFO_KEYS)
@@ -144,8 +153,7 @@
process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
set(['product', 'system', 'system_ext']),
- set(['odm', 'vendor']),
- 'apexkeys.txt')
+ set(['odm', 'vendor']), 'apexkeys.txt')
merged_entries = []
merged_path = os.path.join(self.testdata_dir, 'apexkeys_merge.txt')
@@ -180,8 +188,7 @@
self.assertRaises(ValueError, process_apex_keys_apk_certs_common,
framework_dir, conflict_dir, output_dir,
set(['product', 'system', 'system_ext']),
- set(['odm', 'vendor']),
- 'apexkeys.txt')
+ set(['odm', 'vendor']), 'apexkeys.txt')
def test_process_apex_keys_apk_certs_HandlesApkCertsSyntax(self):
output_dir = common.MakeTempDir()
@@ -201,8 +208,7 @@
process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
set(['product', 'system', 'system_ext']),
- set(['odm', 'vendor']),
- 'apkcerts.txt')
+ set(['odm', 'vendor']), 'apkcerts.txt')
merged_entries = []
merged_path = os.path.join(self.testdata_dir, 'apkcerts_merge.txt')
diff --git a/tools/releasetools/test_non_ab_ota.py b/tools/releasetools/test_non_ab_ota.py
index ee1b411..5207e2f 100644
--- a/tools/releasetools/test_non_ab_ota.py
+++ b/tools/releasetools/test_non_ab_ota.py
@@ -42,12 +42,13 @@
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(1, len(tokens))
+ self.assertEqual(2, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Finalize(self):
entries = [
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
@@ -57,14 +58,16 @@
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(1, len(tokens))
+ self.assertEqual(2, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata'.
entries[0] = 'metadata'
+ entries[1] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
def test_Verify(self):
entries = (
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = NonAbOtaPropertyFiles()
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 52aa487..b556b3a 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -20,21 +20,27 @@
import zipfile
import common
+import ota_metadata_pb2
import test_utils
-from ota_utils import CalculateRuntimeDevicesAndFingerprints
+from ota_utils import (
+ BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
+ FinalizeMetadata, GetPackageMetadata, PropertyFiles)
from ota_from_target_files import (
- _LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
- GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
+ _LoadOemDicts, AbOtaPropertyFiles,
+ GetTargetFilesZipForCustomImagesUpdates,
+ GetTargetFilesZipForPartialUpdates,
+ GetTargetFilesZipForSecondaryImages,
GetTargetFilesZipWithoutPostinstallConfig,
- Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
- StreamingPropertyFiles)
-from non_ab_ota import NonAbOtaPropertyFiles
+ Payload, PayloadSigner, POSTINSTALL_CONFIG,
+ StreamingPropertyFiles, AB_PARTITIONS)
+from apex_utils import GetApexInfoFromTargetFiles
from test_utils import PropertyFilesTestCase
-def construct_target_files(secondary=False):
+
+def construct_target_files(secondary=False, compressedApex=False):
"""Returns a target-files.zip file for generating OTA packages."""
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
# META/update_engine_config.txt
target_files_zip.writestr(
'META/update_engine_config.txt',
@@ -73,6 +79,11 @@
target_files_zip.writestr('IMAGES/system_other.img',
os.urandom(len("system_other")))
+ if compressedApex:
+ apex_file_name = 'com.android.apex.compressed.v1.capex'
+ apex_file = os.path.join(test_utils.get_current_dir(), apex_file_name)
+ target_files_zip.write(apex_file, 'SYSTEM/apex/' + apex_file_name)
+
return target_files
@@ -143,14 +154,13 @@
),
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
'vendor', {
- 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
+ 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
),
'property1': 'value1',
'property2': 4096,
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
}
-
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.assertTrue(os.path.exists(self.testdata_dir))
@@ -164,63 +174,71 @@
common.OPTIONS.no_signing = False
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
common.OPTIONS.search_path = test_utils.get_search_path()
+ @staticmethod
+ def GetLegacyOtaMetadata(target_info, source_info=None):
+ metadata_proto = GetPackageMetadata(target_info, source_info)
+ return BuildLegacyOtaMetadata(metadata_proto)
+
def test_GetPackageMetadata_abOta_full(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
+ target_info_dict['ab_partitions'] = []
target_info = common.BuildInfo(target_info_dict, None)
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-type' : 'AB',
- 'ota-required-cache' : '0',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-type': 'AB',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
def test_GetPackageMetadata_abOta_incremental(self):
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
target_info_dict['ab_update'] = 'true'
+ target_info_dict['ab_partitions'] = []
target_info = common.BuildInfo(target_info_dict, None)
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
- 'ota-type' : 'AB',
- 'ota-required-cache' : '0',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
- 'pre-build' : 'build-fingerprint-source',
- 'pre-build-incremental' : 'build-version-incremental-source',
+ 'ota-type': 'AB',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
+ 'pre-build': 'build-fingerprint-source',
+ 'pre-build-incremental': 'build-version-incremental-source',
},
metadata)
def test_GetPackageMetadata_nonAbOta_full(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-type' : 'BLOCK',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
@@ -228,52 +246,70 @@
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
common.OPTIONS.incremental_source = ''
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata = self.GetLegacyOtaMetadata(target_info, source_info)
self.assertDictEqual(
{
- 'ota-type' : 'BLOCK',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
- 'pre-build' : 'build-fingerprint-source',
- 'pre-build-incremental' : 'build-version-incremental-source',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
+ 'pre-build': 'build-fingerprint-source',
+ 'pre-build-incremental': 'build-version-incremental-source',
},
metadata)
def test_GetPackageMetadata_wipe(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.wipe_user_data = True
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-type' : 'BLOCK',
- 'ota-wipe' : 'yes',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'ota-wipe': 'yes',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetApexInfoFromTargetFiles(self):
+ target_files = construct_target_files(compressedApex=True)
+ apex_infos = GetApexInfoFromTargetFiles(target_files)
+ self.assertEqual(len(apex_infos), 1)
+ self.assertEqual(apex_infos[0].package_name, "com.android.apex.compressed")
+ self.assertEqual(apex_infos[0].version, 1)
+ self.assertEqual(apex_infos[0].is_compressed, True)
+ # Compare the decompressed APEX size with the original uncompressed APEX
+ original_apex_name = 'com.android.apex.compressed.v1_original.apex'
+ original_apex_filepath = os.path.join(test_utils.get_current_dir(), original_apex_name)
+ uncompressed_apex_size = os.path.getsize(original_apex_filepath)
+ self.assertEqual(apex_infos[0].decompressed_size, uncompressed_apex_size)
+
+
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.retrofit_dynamic_partitions = True
- metadata = GetPackageMetadata(target_info)
+ metadata = self.GetLegacyOtaMetadata(target_info)
self.assertDictEqual(
{
- 'ota-retrofit-dynamic-partitions' : 'yes',
- 'ota-type' : 'BLOCK',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1500000000',
- 'pre-device' : 'product-device',
+ 'ota-retrofit-dynamic-partitions': 'yes',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1500000000',
+ 'pre-device': 'product-device',
},
metadata)
@@ -293,7 +329,7 @@
target_info = common.BuildInfo(target_info_dict, None)
source_info = common.BuildInfo(source_info_dict, None)
common.OPTIONS.incremental_source = ''
- self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+ self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
source_info)
def test_GetPackageMetadata_downgrade(self):
@@ -307,20 +343,22 @@
common.OPTIONS.incremental_source = ''
common.OPTIONS.downgrade = True
common.OPTIONS.wipe_user_data = True
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata = self.GetLegacyOtaMetadata(target_info, source_info)
+
self.assertDictEqual(
{
- 'ota-downgrade' : 'yes',
- 'ota-type' : 'BLOCK',
- 'ota-wipe' : 'yes',
- 'post-build' : 'build-fingerprint-target',
- 'post-build-incremental' : 'build-version-incremental-target',
- 'post-sdk-level' : '27',
- 'post-security-patch-level' : '2017-12-01',
- 'post-timestamp' : '1400000000',
- 'pre-device' : 'product-device',
- 'pre-build' : 'build-fingerprint-source',
- 'pre-build-incremental' : 'build-version-incremental-source',
+ 'ota-downgrade': 'yes',
+ 'ota-type': 'BLOCK',
+ 'ota-required-cache': '0',
+ 'ota-wipe': 'yes',
+ 'post-build': 'build-fingerprint-target',
+ 'post-build-incremental': 'build-version-incremental-target',
+ 'post-sdk-level': '27',
+ 'post-security-patch-level': '2017-12-01',
+ 'post-timestamp': '1400000000',
+ 'pre-device': 'product-device',
+ 'pre-build': 'build-fingerprint-source',
+ 'pre-build-incremental': 'build-version-incremental-source',
},
metadata)
@@ -402,7 +440,7 @@
'super_google_dynamic_partitions_partition_list=system vendor product',
])
- with zipfile.ZipFile(input_file, 'a') as append_zip:
+ with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip:
common.ZipWriteStr(append_zip, 'META/misc_info.txt', misc_info)
common.ZipWriteStr(append_zip, 'META/dynamic_partitions_info.txt',
dynamic_partitions_info)
@@ -435,6 +473,86 @@
updated_dynamic_partitions_info)
@test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetTargetFilesZipForPartialUpdates_singlePartition(self):
+ input_file = construct_target_files()
+ with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip:
+ common.ZipWriteStr(append_zip, 'IMAGES/system.map', 'fake map')
+
+ target_file = GetTargetFilesZipForPartialUpdates(input_file, ['system'])
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+ ab_partitions = verify_zip.read('META/ab_partitions.txt').decode()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertIn('META/update_engine_config.txt', namelist)
+ self.assertIn('IMAGES/system.img', namelist)
+ self.assertIn('IMAGES/system.map', namelist)
+
+ self.assertNotIn('IMAGES/boot.img', namelist)
+ self.assertNotIn('IMAGES/system_other.img', namelist)
+ self.assertNotIn('RADIO/bootloader.img', namelist)
+ self.assertNotIn('RADIO/modem.img', namelist)
+
+ self.assertEqual('system', ab_partitions)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetTargetFilesZipForPartialUpdates_unrecognizedPartition(self):
+ input_file = construct_target_files()
+ self.assertRaises(ValueError, GetTargetFilesZipForPartialUpdates,
+ input_file, ['product'])
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetTargetFilesZipForPartialUpdates_dynamicPartitions(self):
+ input_file = construct_target_files(secondary=True)
+ misc_info = '\n'.join([
+ 'use_dynamic_partition_size=true',
+ 'use_dynamic_partitions=true',
+ 'dynamic_partition_list=system vendor product',
+ 'super_partition_groups=google_dynamic_partitions',
+ 'super_google_dynamic_partitions_group_size=4873781248',
+ 'super_google_dynamic_partitions_partition_list=system vendor product',
+ ])
+ dynamic_partitions_info = '\n'.join([
+ 'super_partition_groups=google_dynamic_partitions',
+ 'super_google_dynamic_partitions_group_size=4873781248',
+ 'super_google_dynamic_partitions_partition_list=system vendor product',
+ ])
+
+ with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip:
+ common.ZipWriteStr(append_zip, 'META/misc_info.txt', misc_info)
+ common.ZipWriteStr(append_zip, 'META/dynamic_partitions_info.txt',
+ dynamic_partitions_info)
+
+ target_file = GetTargetFilesZipForPartialUpdates(input_file,
+ ['boot', 'system'])
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+ ab_partitions = verify_zip.read('META/ab_partitions.txt').decode()
+ updated_misc_info = verify_zip.read('META/misc_info.txt').decode()
+ updated_dynamic_partitions_info = verify_zip.read(
+ 'META/dynamic_partitions_info.txt').decode()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertIn('IMAGES/boot.img', namelist)
+ self.assertIn('IMAGES/system.img', namelist)
+ self.assertIn('META/misc_info.txt', namelist)
+ self.assertIn('META/dynamic_partitions_info.txt', namelist)
+
+ self.assertNotIn('IMAGES/system_other.img', namelist)
+ self.assertNotIn('RADIO/bootloader.img', namelist)
+ self.assertNotIn('RADIO/modem.img', namelist)
+
+ # Check the vendor & product are removed from the partitions list.
+ expected_misc_info = misc_info.replace('system vendor product',
+ 'system')
+ expected_dynamic_partitions_info = dynamic_partitions_info.replace(
+ 'system vendor product', 'system')
+ self.assertEqual(expected_misc_info, updated_misc_info)
+ self.assertEqual(expected_dynamic_partitions_info,
+ updated_dynamic_partitions_info)
+ self.assertEqual('boot\nsystem', ab_partitions)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_GetTargetFilesZipWithoutPostinstallConfig(self):
input_file = construct_target_files()
target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
@@ -449,6 +567,46 @@
with zipfile.ZipFile(target_file) as verify_zip:
self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetTargetFilesZipForCustomImagesUpdates_oemDefaultImage(self):
+ input_file = construct_target_files()
+ with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip:
+ common.ZipWriteStr(append_zip, 'IMAGES/oem.img', 'oem')
+ common.ZipWriteStr(append_zip, 'IMAGES/oem_test.img', 'oem_test')
+
+ target_file = GetTargetFilesZipForCustomImagesUpdates(
+ input_file, {'oem': 'oem.img'})
+
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+ ab_partitions = verify_zip.read('META/ab_partitions.txt').decode()
+ oem_image = verify_zip.read('IMAGES/oem.img').decode()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertEqual('boot\nsystem\nvendor\nbootloader\nmodem', ab_partitions)
+ self.assertIn('IMAGES/oem.img', namelist)
+ self.assertEqual('oem', oem_image)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetTargetFilesZipForCustomImagesUpdates_oemTestImage(self):
+ input_file = construct_target_files()
+ with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip:
+ common.ZipWriteStr(append_zip, 'IMAGES/oem.img', 'oem')
+ common.ZipWriteStr(append_zip, 'IMAGES/oem_test.img', 'oem_test')
+
+ target_file = GetTargetFilesZipForCustomImagesUpdates(
+ input_file, {'oem': 'oem_test.img'})
+
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+ ab_partitions = verify_zip.read('META/ab_partitions.txt').decode()
+ oem_image = verify_zip.read('IMAGES/oem.img').decode()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertEqual('boot\nsystem\nvendor\nbootloader\nmodem', ab_partitions)
+ self.assertIn('IMAGES/oem.img', namelist)
+ self.assertEqual('oem_test', oem_image)
+
def _test_FinalizeMetadata(self, large_entry=False):
entries = [
'required-entry1',
@@ -457,20 +615,20 @@
zip_file = PropertyFilesTest.construct_zip_package(entries)
# Add a large entry of 1 GiB if requested.
if large_entry:
- with zipfile.ZipFile(zip_file, 'a') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'a', allowZip64=True) as zip_fp:
zip_fp.writestr(
# Using 'zoo' so that the entry stays behind others after signing.
'zoo',
'A' * 1024 * 1024 * 1024,
zipfile.ZIP_STORED)
- metadata = {}
+ metadata = ota_metadata_pb2.OtaMetadata()
output_file = common.MakeTempFile(suffix='.zip')
needed_property_files = (
TestPropertyFiles(),
)
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
- self.assertIn('ota-test-property-files', metadata)
+ self.assertIn('ota-test-property-files', metadata.property_files)
@test_utils.SkipIfExternalToolsUnavailable()
def test_FinalizeMetadata(self):
@@ -499,7 +657,7 @@
'optional-entry2',
]
zip_file = PropertyFilesTest.construct_zip_package(entries)
- with zipfile.ZipFile(zip_file, 'a') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'a', allowZip64=True) as zip_fp:
zip_fp.writestr(
# 'foo-entry1' will appear ahead of all other entries (in alphabetical
# order) after the signing, which will in turn trigger the
@@ -508,13 +666,13 @@
'A' * 1024 * 1024,
zipfile.ZIP_STORED)
- metadata = {}
+ metadata = ota_metadata_pb2.OtaMetadata()
needed_property_files = (
TestPropertyFiles(),
)
output_file = common.MakeTempFile(suffix='.zip')
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
- self.assertIn('ota-test-property-files', metadata)
+ self.assertIn('ota-test-property-files', metadata.property_files)
class TestPropertyFiles(PropertyFiles):
@@ -532,8 +690,8 @@
'optional-entry2',
)
-class PropertyFilesTest(PropertyFilesTestCase):
+class PropertyFilesTest(PropertyFilesTestCase):
@test_utils.SkipIfExternalToolsUnavailable()
def test_Compute(self):
@@ -543,11 +701,11 @@
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(3, len(tokens))
+ self.assertEqual(4, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Compute_withOptionalEntries(self):
@@ -559,11 +717,11 @@
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(5, len(tokens))
+ self.assertEqual(6, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Compute_missingRequiredEntry(self):
@@ -572,7 +730,7 @@
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
self.assertRaises(KeyError, property_files.Compute, zip_fp)
@test_utils.SkipIfExternalToolsUnavailable()
@@ -581,19 +739,21 @@
'required-entry1',
'required-entry2',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
- self.assertEqual(3, len(tokens))
+ self.assertEqual(4, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
# streaming metadata.
entries[2] = 'metadata'
+ entries[3] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
@test_utils.SkipIfExternalToolsUnavailable()
@@ -604,10 +764,11 @@
'optional-entry1',
'optional-entry2',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
# First get the raw metadata string (i.e. without padding space).
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
@@ -638,10 +799,11 @@
'optional-entry1',
'optional-entry2',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = TestPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
# First get the raw metadata string (i.e. without padding space).
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
@@ -683,11 +845,11 @@
)
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- self.assertEqual(5, len(tokens))
+ self.assertEqual(6, len(tokens))
self._verify_entries(zip_file, tokens, entries)
def test_Finalize(self):
@@ -697,19 +859,21 @@
'care_map.txt',
'compatibility.zip',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
]
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(streaming_metadata)
- self.assertEqual(5, len(tokens))
+ self.assertEqual(6, len(tokens))
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
# streaming metadata.
entries[4] = 'metadata'
+ entries[5] = 'metadata.pb'
self._verify_entries(zip_file, tokens, entries)
def test_Verify(self):
@@ -719,10 +883,11 @@
'care_map.txt',
'compatibility.zip',
'META-INF/com/android/metadata',
+ 'META-INF/com/android/metadata.pb',
)
zip_file = self.construct_zip_package(entries)
property_files = StreamingPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
# First get the raw metadata string (i.e. without padding space).
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
@@ -750,7 +915,7 @@
common.OPTIONS.payload_signer_args = None
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
def test_init(self):
@@ -780,7 +945,7 @@
payload.Sign(payload_signer)
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
payload.WriteToZip(output_zip)
# Find out the payload metadata offset and size.
@@ -845,7 +1010,7 @@
payload.Sign(payload_signer)
zip_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'w', allowZip64=True) as zip_fp:
# 'payload.bin',
payload.WriteToZip(zip_fp)
@@ -855,6 +1020,7 @@
# Put META-INF/com/android/metadata if needed.
if with_metadata:
entries.append('META-INF/com/android/metadata')
+ entries.append('META-INF/com/android/metadata.pb')
for entry in entries:
zip_fp.writestr(
@@ -866,13 +1032,13 @@
def test_Compute(self):
zip_file = self.construct_zip_package_withValidPayload()
property_files = AbOtaPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
property_files_string = property_files.Compute(zip_fp)
tokens = self._parse_property_files_string(property_files_string)
- # "6" indcludes the four entries above, one metadata entry, and one entry
+ # "7" indcludes the four entries above, two metadata entries, and one entry
# for payload-metadata.bin.
- self.assertEqual(6, len(tokens))
+ self.assertEqual(7, len(tokens))
self._verify_entries(
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
@@ -880,15 +1046,16 @@
def test_Finalize(self):
zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
property_files = AbOtaPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
- property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
+ property_files_string = property_files.Finalize(
+ zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
- # "6" indcludes the four entries above, one metadata entry, and one entry
+ # "7" includes the four entries above, two metadata entries, and one entry
# for payload-metadata.bin.
- self.assertEqual(6, len(tokens))
+ self.assertEqual(7, len(tokens))
self._verify_entries(
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
@@ -896,7 +1063,7 @@
def test_Verify(self):
zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
property_files = AbOtaPropertyFiles()
- with zipfile.ZipFile(zip_file, 'r') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'r', allowZip64=True) as zip_fp:
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
@@ -916,7 +1083,7 @@
common.OPTIONS.payload_signer_args = []
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
def _assertFilesEqual(self, file1, file2):
@@ -934,7 +1101,7 @@
common.OPTIONS.package_key = os.path.join(
self.testdata_dir, 'testkey_with_passwd')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : 'foo',
+ common.OPTIONS.package_key: 'foo',
}
payload_signer = PayloadSigner()
self.assertEqual('openssl', payload_signer.signer)
@@ -1011,7 +1178,7 @@
common.OPTIONS.payload_signer_args = None
common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
common.OPTIONS.key_passwords = {
- common.OPTIONS.package_key : None,
+ common.OPTIONS.package_key: None,
}
@staticmethod
@@ -1063,7 +1230,7 @@
payload.Sign(PayloadSigner())
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
payload.WriteToZip(output_zip)
import check_ota_package_signature
@@ -1077,7 +1244,7 @@
payload.Sign(PayloadSigner())
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
payload.WriteToZip(output_zip)
import check_ota_package_signature
@@ -1116,7 +1283,7 @@
payload.Sign(PayloadSigner())
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
payload.WriteToZip(output_zip)
with zipfile.ZipFile(output_file) as verify_zip:
@@ -1138,14 +1305,14 @@
payload = self._create_payload_full()
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
# Also test with incremental payload.
payload = self._create_payload_incremental()
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
@test_utils.SkipIfExternalToolsUnavailable()
@@ -1154,7 +1321,7 @@
payload.Sign(PayloadSigner())
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
payload.WriteToZip(output_zip)
with zipfile.ZipFile(output_file) as verify_zip:
@@ -1166,8 +1333,8 @@
# Then assert these entries are stored.
for entry_info in verify_zip.infolist():
if entry_info.filename not in (
- Payload.SECONDARY_PAYLOAD_BIN,
- Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
+ Payload.SECONDARY_PAYLOAD_BIN,
+ Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
continue
self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
@@ -1177,20 +1344,41 @@
'recovery_api_version=3',
'fstab_version=2',
'recovery_as_boot=true',
+ 'ab_update=true',
]
BUILD_PROP = [
- 'ro.build.version.release=version-release',
'ro.build.id=build-id',
'ro.build.version.incremental=version-incremental',
'ro.build.type=build-type',
'ro.build.tags=build-tags',
+ 'ro.build.version.release=version-release',
+ 'ro.build.version.release_or_codename=version-release',
'ro.build.version.sdk=30',
'ro.build.version.security_patch=2020',
- 'ro.build.date.utc=12345678'
+ 'ro.build.date.utc=12345678',
+ 'ro.system.build.version.release=version-release',
+ 'ro.system.build.id=build-id',
+ 'ro.system.build.version.incremental=version-incremental',
+ 'ro.system.build.type=build-type',
+ 'ro.system.build.tags=build-tags',
+ 'ro.system.build.version.sdk=30',
+ 'ro.system.build.version.security_patch=2020',
+ 'ro.system.build.date.utc=12345678',
+ 'ro.product.system.brand=generic',
+ 'ro.product.system.name=generic',
+ 'ro.product.system.device=generic',
]
VENDOR_BUILD_PROP = [
+ 'ro.vendor.build.version.release=version-release',
+ 'ro.vendor.build.id=build-id',
+ 'ro.vendor.build.version.incremental=version-incremental',
+ 'ro.vendor.build.type=build-type',
+ 'ro.vendor.build.tags=build-tags',
+ 'ro.vendor.build.version.sdk=30',
+ 'ro.vendor.build.version.security_patch=2020',
+ 'ro.vendor.build.date.utc=12345678',
'ro.product.vendor.brand=vendor-product-brand',
'ro.product.vendor.name=vendor-product-name',
'ro.product.vendor.device=vendor-product-device'
@@ -1319,6 +1507,7 @@
'ro.product.vendor.name=vendor-product-std',
'VENDOR/etc/build_pro.prop':
'ro.product.vendor.name=vendor-product-pro',
+ AB_PARTITIONS: '\n'.join(['system', 'vendor']),
}, self.test_dir)
common.OPTIONS.boot_variable_file = common.MakeTempFile()
@@ -1326,8 +1515,8 @@
f.write('ro.boot.sku_name=std,pro')
build_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
- metadata = GetPackageMetadata(build_info)
- self.assertEqual('vendor-product-device', metadata['pre-device'])
+ metadata_dict = BuildLegacyOtaMetadata(GetPackageMetadata(build_info))
+ self.assertEqual('vendor-product-device', metadata_dict['pre-device'])
fingerprints = [
self.constructFingerprint(
'vendor-product-brand/vendor-product-name/vendor-product-device'),
@@ -1336,7 +1525,33 @@
self.constructFingerprint(
'vendor-product-brand/vendor-product-std/vendor-product-device'),
]
- self.assertEqual('|'.join(fingerprints), metadata['post-build'])
+ self.assertEqual('|'.join(fingerprints), metadata_dict['post-build'])
+
+ def CheckMetadataEqual(self, metadata_dict, metadata_proto):
+ post_build = metadata_proto.postcondition
+ self.assertEqual('|'.join(post_build.build),
+ metadata_dict['post-build'])
+ self.assertEqual(post_build.build_incremental,
+ metadata_dict['post-build-incremental'])
+ self.assertEqual(post_build.sdk_level,
+ metadata_dict['post-sdk-level'])
+ self.assertEqual(post_build.security_patch_level,
+ metadata_dict['post-security-patch-level'])
+
+ if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
+ ota_type = 'AB'
+ elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
+ ota_type = 'BLOCK'
+ else:
+ ota_type = ''
+ self.assertEqual(ota_type, metadata_dict['ota-type'])
+ self.assertEqual(metadata_proto.wipe,
+ metadata_dict.get('ota-wipe') == 'yes')
+ self.assertEqual(metadata_proto.required_cache,
+ int(metadata_dict.get('ota-required-cache', 0)))
+ self.assertEqual(metadata_proto.retrofit_dynamic_partitions,
+ metadata_dict.get(
+ 'ota-retrofit-dynamic-partitions') == 'yes')
def test_GetPackageMetadata_incremental_package(self):
vendor_build_prop = copy.deepcopy(self.VENDOR_BUILD_PROP)
@@ -1344,6 +1559,8 @@
'import /vendor/etc/build_${ro.boot.sku_name}.prop',
])
self.writeFiles({
+ 'META/misc_info.txt': '\n'.join(self.MISC_INFO),
+ 'META/ab_partitions.txt': '\n'.join(['system', 'vendor', 'product']),
'SYSTEM/build.prop': '\n'.join(self.BUILD_PROP),
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
'VENDOR/etc/build_std.prop':
@@ -1365,10 +1582,22 @@
'ro.build.tags=build-tags',
'ro.build.version.sdk=29',
'ro.build.version.security_patch=2020',
- 'ro.build.date.utc=12340000'
+ 'ro.build.date.utc=12340000',
+ 'ro.system.build.version.release=source-version-release',
+ 'ro.system.build.id=source-build-id',
+ 'ro.system.build.version.incremental=source-version-incremental',
+ 'ro.system.build.type=build-type',
+ 'ro.system.build.tags=build-tags',
+ 'ro.system.build.version.sdk=29',
+ 'ro.system.build.version.security_patch=2020',
+ 'ro.system.build.date.utc=12340000',
+ 'ro.product.system.brand=generic',
+ 'ro.product.system.name=generic',
+ 'ro.product.system.device=generic',
]
self.writeFiles({
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
+ 'META/ab_partitions.txt': '\n'.join(['system', 'vendor', 'product']),
'SYSTEM/build.prop': '\n'.join(source_build_prop),
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
'VENDOR/etc/build_std.prop':
@@ -1381,21 +1610,22 @@
target_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
source_info = common.BuildInfo(common.LoadInfoDict(source_dir))
- metadata = GetPackageMetadata(target_info, source_info)
+ metadata_proto = GetPackageMetadata(target_info, source_info)
+ metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
self.assertEqual(
'vendor-device-pro|vendor-device-std|vendor-product-device',
- metadata['pre-device'])
- suffix = ':source-version-release/source-build-id/' \
- 'source-version-incremental:build-type/build-tags'
+ metadata_dict['pre-device'])
+ source_suffix = ':source-version-release/source-build-id/' \
+ 'source-version-incremental:build-type/build-tags'
pre_fingerprints = [
'vendor-product-brand/vendor-product-name/vendor-device-pro'
- '{}'.format(suffix),
+ '{}'.format(source_suffix),
'vendor-product-brand/vendor-product-name/vendor-device-std'
- '{}'.format(suffix),
+ '{}'.format(source_suffix),
'vendor-product-brand/vendor-product-name/vendor-product-device'
- '{}'.format(suffix),
+ '{}'.format(source_suffix),
]
- self.assertEqual('|'.join(pre_fingerprints), metadata['pre-build'])
+ self.assertEqual('|'.join(pre_fingerprints), metadata_dict['pre-build'])
post_fingerprints = [
self.constructFingerprint(
@@ -1405,4 +1635,31 @@
self.constructFingerprint(
'vendor-product-brand/vendor-product-name/vendor-product-device'),
]
- self.assertEqual('|'.join(post_fingerprints), metadata['post-build'])
+ self.assertEqual('|'.join(post_fingerprints), metadata_dict['post-build'])
+
+ self.CheckMetadataEqual(metadata_dict, metadata_proto)
+
+ pre_partition_states = metadata_proto.precondition.partition_state
+ self.assertEqual(2, len(pre_partition_states))
+ self.assertEqual('system', pre_partition_states[0].partition_name)
+ self.assertEqual(['generic'], pre_partition_states[0].device)
+ self.assertEqual(['generic/generic/generic{}'.format(source_suffix)],
+ pre_partition_states[0].build)
+
+ self.assertEqual('vendor', pre_partition_states[1].partition_name)
+ self.assertEqual(['vendor-device-pro', 'vendor-device-std',
+ 'vendor-product-device'], pre_partition_states[1].device)
+ vendor_fingerprints = post_fingerprints
+ self.assertEqual(vendor_fingerprints, pre_partition_states[1].build)
+
+ post_partition_states = metadata_proto.postcondition.partition_state
+ self.assertEqual(2, len(post_partition_states))
+ self.assertEqual('system', post_partition_states[0].partition_name)
+ self.assertEqual(['generic'], post_partition_states[0].device)
+ self.assertEqual([self.constructFingerprint('generic/generic/generic')],
+ post_partition_states[0].build)
+
+ self.assertEqual('vendor', post_partition_states[1].partition_name)
+ self.assertEqual(['vendor-device-pro', 'vendor-device-std',
+ 'vendor-product-device'], post_partition_states[1].device)
+ self.assertEqual(vendor_fingerprints, post_partition_states[1].build)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 308172f..18e4858 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -164,15 +164,15 @@
"veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
input_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
# Test with the first certificate.
cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'r') as input_zip, \
- zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
+ zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
ReplaceVerityKeyId(input_zip, output_zip, cert_file)
with zipfile.ZipFile(output_file) as output_zip:
@@ -181,8 +181,8 @@
# Test with the second certificate.
cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
- with zipfile.ZipFile(input_file, 'r') as input_zip, \
- zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
+ zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
ReplaceVerityKeyId(input_zip, output_zip, cert_file)
with zipfile.ZipFile(output_file) as output_zip:
@@ -195,12 +195,12 @@
"loop.max_part=7\n")
input_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'r') as input_zip, \
- zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
+ zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
ReplaceVerityKeyId(input_zip, output_zip, None)
with zipfile.ZipFile(output_file) as output_zip:
@@ -284,7 +284,7 @@
]
entry_name = 'SYSTEM/etc/security/otacerts.zip'
output_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(output_file, 'w') as output_zip:
+ with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
WriteOtacerts(output_zip, entry_name, certs)
with zipfile.ZipFile(output_file) as input_zip:
self.assertIn(entry_name, input_zip.namelist())
@@ -294,7 +294,7 @@
def test_CheckApkAndApexKeysAvailable(self):
input_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
input_zip.writestr('SYSTEM/app/App1.apk', "App1-content")
input_zip.writestr('SYSTEM/app/App2.apk.gz', "App2-content")
@@ -318,7 +318,7 @@
def test_CheckApkAndApexKeysAvailable_invalidApexKeys(self):
input_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
input_zip.writestr('SYSTEM/apex/Apex1.apex', "Apex1-content")
input_zip.writestr('SYSTEM/apex/Apex2.apex', "Apex2-content")
@@ -466,10 +466,10 @@
def test_ReadApexKeysInfo(self):
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
- with zipfile.ZipFile(target_files) as target_files_zip:
+ with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
@@ -491,10 +491,10 @@
'container_private_key="build/make/target/product/security/testkey2.pk8" '
'partition="system"')
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
- with zipfile.ZipFile(target_files) as target_files_zip:
+ with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
self.assertRaises(ValueError, ReadApexKeysInfo, target_files_zip)
def test_ReadApexKeysInfo_missingPayloadPrivateKey(self):
@@ -505,10 +505,10 @@
'container_certificate="build/make/target/product/security/testkey.x509.pem" '
'container_private_key="build/make/target/product/security/testkey.pk8"')
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
- with zipfile.ZipFile(target_files) as target_files_zip:
+ with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
@@ -528,10 +528,10 @@
'container_certificate="build/make/target/product/security/testkey.x509.pem" '
'container_private_key="build/make/target/product/security/testkey.pk8"')
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
- with zipfile.ZipFile(target_files) as target_files_zip:
+ with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
@@ -551,10 +551,10 @@
'container_certificate="PRESIGNED" '
'container_private_key="PRESIGNED"')
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
- with zipfile.ZipFile(target_files) as target_files_zip:
+ with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
@@ -574,10 +574,10 @@
'container_certificate="PRESIGNED" '
'container_private_key="PRESIGNED"')
target_files = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
target_files_zip.writestr('META/apexkeys.txt', apex_keys)
- with zipfile.ZipFile(target_files) as target_files_zip:
+ with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
keys_info = ReadApexKeysInfo(target_files_zip)
self.assertEqual({
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index 65092d8..808b392 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -22,6 +22,7 @@
import logging
import os
import os.path
+import re
import struct
import sys
import unittest
@@ -54,6 +55,11 @@
current_dir = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_dir, 'testdata')
+def get_current_dir():
+ """Returns the current dir, relative to the script dir."""
+ # The script dir is the one we want, which could be different from pwd.
+ current_dir = os.path.dirname(os.path.realpath(__file__))
+ return current_dir
def get_search_path():
"""Returns the search path that has 'framework/signapk.jar' under."""
@@ -198,7 +204,7 @@
@staticmethod
def construct_zip_package(entries):
zip_file = common.MakeTempFile(suffix='.zip')
- with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+ with zipfile.ZipFile(zip_file, 'w', allowZip64=True) as zip_fp:
for entry in entries:
zip_fp.writestr(
entry,
@@ -224,13 +230,26 @@
input_fp.seek(offset)
if entry == 'metadata':
expected = b'META-INF/COM/ANDROID/METADATA'
+ elif entry == 'metadata.pb':
+ expected = b'META-INF/COM/ANDROID/METADATA-PB'
else:
expected = entry.replace('.', '-').upper().encode()
self.assertEqual(expected, input_fp.read(size))
if __name__ == '__main__':
- testsuite = unittest.TestLoader().discover(
- os.path.dirname(os.path.realpath(__file__)))
+ # We only want to run tests from the top level directory. Unfortunately the
+ # pattern option of unittest.discover, internally using fnmatch, doesn't
+ # provide a good API to filter the test files based on directory. So we do an
+ # os walk and load them manually.
+ test_modules = []
+ base_path = os.path.dirname(os.path.realpath(__file__))
+ for dirpath, _, files in os.walk(base_path):
+ for fn in files:
+ if dirpath == base_path and re.match('test_.*\\.py$', fn):
+ test_modules.append(fn[:-3])
+
+ test_suite = unittest.TestLoader().loadTestsFromNames(test_modules)
+
# atest needs a verbosity level of >= 2 to correctly parse the result.
- unittest.TextTestRunner(verbosity=2).run(testsuite)
+ unittest.TextTestRunner(verbosity=2).run(test_suite)
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index ca70ca8..48b563d 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -272,7 +272,7 @@
input_file = common.MakeTempFile()
all_entries = ['SYSTEM/', 'SYSTEM/b', 'SYSTEM/a', 'IMAGES/',
'IMAGES/system.map', 'IMAGES/system.img']
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
for name in all_entries:
input_zip.write(os.path.join(input_tmp, name), arcname=name)
@@ -321,7 +321,7 @@
input_file = common.MakeTempFile()
all_entries = ['SYSTEM/', 'SYSTEM/abc', 'IMAGES/',
'IMAGES/system.map', 'IMAGES/system.img']
- with zipfile.ZipFile(input_file, 'w') as input_zip:
+ with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
for name in all_entries:
input_zip.write(os.path.join(input_tmp, name), arcname=name)
@@ -357,9 +357,6 @@
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
'ro.product.odm.device=coral',
]
- input_tmp = ValidateTargetFilesTest.make_build_prop({
- 'ODM/etc/build.prop': '\n'.join(build_prop),
- })
+ input_tmp = ValidateTargetFilesTest.make_build_prop(build_prop)
- self.assertRaises(ValueError, CheckBuildPropDuplicity,
- input_tmp)
+ self.assertRaises(ValueError, CheckBuildPropDuplicity, input_tmp)
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 2e3aa74..401857f 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -236,6 +236,7 @@
logging.info('Done checking %s', script_path)
+
# Symlink files in `src` to `dst`, if the files do not
# already exists in `dst` directory.
def symlinkIfNotExists(src, dst):
@@ -246,6 +247,7 @@
continue
os.symlink(os.path.join(src, filename), os.path.join(dst, filename))
+
def ValidateVerifiedBootImages(input_tmp, info_dict, options):
"""Validates the Verified Boot related images.
@@ -423,16 +425,25 @@
'Verified %s with avbtool (key: %s):\n%s', image, key,
stdoutdata.rstrip())
-def CheckDataDuplicity(lines):
+
+def CheckDataInconsistency(lines):
build_prop = {}
for line in lines:
if line.startswith("import") or line.startswith("#"):
continue
- key, value = line.split("=", 1)
+ if "=" not in line:
+ continue
+
+ key, value = line.rstrip().split("=", 1)
if key in build_prop:
- return key
+ logging.info("Duplicated key found for {}".format(key))
+ if value != build_prop[key]:
+ logging.error("Key {} is defined twice with different values {} vs {}"
+ .format(key, value, build_prop[key]))
+ return key
build_prop[key] = value
+
def CheckBuildPropDuplicity(input_tmp):
"""Check all buld.prop files inside directory input_tmp, raise error
if they contain duplicates"""
@@ -448,9 +459,11 @@
continue
logging.info("Checking {}".format(path))
with open(path, 'r') as fp:
- dupKey = CheckDataDuplicity(fp.readlines())
+ dupKey = CheckDataInconsistency(fp.readlines())
if dupKey:
- raise ValueError("{} contains duplicate keys for {}", path, dupKey)
+ raise ValueError("{} contains duplicate keys for {}".format(
+ path, dupKey))
+
def main():
parser = argparse.ArgumentParser(
@@ -487,7 +500,7 @@
input_tmp = common.UnzipTemp(args.target_files)
info_dict = common.LoadInfoDict(input_tmp)
- with zipfile.ZipFile(args.target_files, 'r') as input_zip:
+ with zipfile.ZipFile(args.target_files, 'r', allowZip64=True) as input_zip:
ValidateFileConsistency(input_zip, input_tmp, info_dict)
CheckBuildPropDuplicity(input_tmp)
diff --git a/tools/signapk/Android.bp b/tools/signapk/Android.bp
index c799dbf..b90f010 100644
--- a/tools/signapk/Android.bp
+++ b/tools/signapk/Android.bp
@@ -16,7 +16,7 @@
// the signapk tool (a .jar application used to sign packages)
// ============================================================
-java_library_host {
+java_binary_host {
name: "signapk",
srcs: ["src/**/*.java"],
manifest: "SignApk.mf",
@@ -27,11 +27,12 @@
"conscrypt-unbundled",
],
- required: ["libconscrypt_openjdk_jni"],
+ jni_libs: ["libconscrypt_openjdk_jni"],
// The post-build signing tools need signapk.jar (and its shared libraries,
// handled in their own Android.bp files)
dist: {
+ tag: ".jar",
targets: ["droidcore"],
},
}
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 95ef05f..7e5c8fc 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -41,6 +41,7 @@
import com.android.apksig.apk.ApkUtils;
import com.android.apksig.apk.MinSdkVersionException;
import com.android.apksig.util.DataSink;
+import com.android.apksig.util.DataSource;
import com.android.apksig.util.DataSources;
import com.android.apksig.zip.ZipFormatException;
@@ -57,6 +58,7 @@
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
+import java.io.RandomAccessFile;
import java.lang.reflect.Constructor;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
@@ -1021,9 +1023,10 @@
"[-providerClass <className>] " +
"[--min-sdk-version <n>] " +
"[--disable-v2] " +
+ "[--enable-v4] " +
"publickey.x509[.pem] privatekey.pk8 " +
"[publickey2.x509[.pem] privatekey2.pk8 ...] " +
- "input.jar output.jar");
+ "input.jar output.jar [output-v4-file]");
System.exit(2);
}
@@ -1043,6 +1046,7 @@
int alignment = 4;
Integer minSdkVersionOverride = null;
boolean signUsingApkSignatureSchemeV2 = true;
+ boolean signUsingApkSignatureSchemeV4 = false;
SigningCertificateLineage certLineage = null;
int argstart = 0;
@@ -1071,6 +1075,9 @@
} else if ("--disable-v2".equals(args[argstart])) {
signUsingApkSignatureSchemeV2 = false;
++argstart;
+ } else if ("--enable-v4".equals(args[argstart])) {
+ signUsingApkSignatureSchemeV4 = true;
+ ++argstart;
} else if ("--lineage".equals(args[argstart])) {
File lineageFile = new File(args[++argstart]);
try {
@@ -1085,8 +1092,14 @@
}
}
- if ((args.length - argstart) % 2 == 1) usage();
- int numKeys = ((args.length - argstart) / 2) - 1;
+ int numArgsExcludeV4FilePath;
+ if (signUsingApkSignatureSchemeV4) {
+ numArgsExcludeV4FilePath = args.length - 1;
+ } else {
+ numArgsExcludeV4FilePath = args.length;
+ }
+ if ((numArgsExcludeV4FilePath - argstart) % 2 == 1) usage();
+ int numKeys = ((numArgsExcludeV4FilePath - argstart) / 2) - 1;
if (signWholeFile && numKeys > 1) {
System.err.println("Only one key may be used with -w.");
System.exit(2);
@@ -1094,8 +1107,12 @@
loadProviderIfNecessary(providerClass);
- String inputFilename = args[args.length-2];
- String outputFilename = args[args.length-1];
+ String inputFilename = args[numArgsExcludeV4FilePath - 2];
+ String outputFilename = args[numArgsExcludeV4FilePath - 1];
+ String outputV4Filename = "";
+ if (signUsingApkSignatureSchemeV4) {
+ outputV4Filename = args[args.length - 1];
+ }
JarFile inputJar = null;
FileOutputStream outputFile = null;
@@ -1233,6 +1250,13 @@
outputFile.close();
outputFile = null;
apkSigner.outputDone();
+
+ if (signUsingApkSignatureSchemeV4) {
+ final DataSource outputApkIn = DataSources.asDataSource(
+ new RandomAccessFile(new File(outputFilename), "r"));
+ final File outputV4File = new File(outputV4Filename);
+ apkSigner.signV4(outputApkIn, outputV4File, false /* ignore failures */);
+ }
}
return;
diff --git a/tools/warn/android_project_list.py b/tools/warn/android_project_list.py
index 4726fa2..82c0fbd 100644
--- a/tools/warn/android_project_list.py
+++ b/tools/warn/android_project_list.py
@@ -102,13 +102,14 @@
create_pattern('ndk'),
# match vendor/unbungled_google/packages before other packages
create_pattern('unbundled_google'),
+ create_pattern('packages/providers/MediaProvider'),
create_pattern('packages'),
create_pattern('pdk'),
create_pattern('prebuilts'),
create_pattern('system/bt'),
create_pattern('system/connectivity'),
create_pattern('system/core/adb'),
- create_pattern('system/core/base'),
+ create_pattern('system/libbase'),
create_pattern('system/core/debuggerd'),
create_pattern('system/core/fastboot'),
create_pattern('system/core/fingerprintd'),
@@ -117,8 +118,8 @@
create_pattern('system/core/healthd'),
create_pattern('system/core/include'),
create_pattern('system/core/init'),
- create_pattern('system/core/libbacktrace'),
- create_pattern('system/core/liblog'),
+ create_pattern('system/unwinding/libbacktrace'),
+ create_pattern('system/logging/liblog'),
create_pattern('system/core/libpixelflinger'),
create_pattern('system/core/libprocessgroup'),
create_pattern('system/core/libsysutils'),
diff --git a/tools/warn/cpp_warn_patterns.py b/tools/warn/cpp_warn_patterns.py
index 65ce73a..e8783bc 100644
--- a/tools/warn/cpp_warn_patterns.py
+++ b/tools/warn/cpp_warn_patterns.py
@@ -155,6 +155,7 @@
[r".*: warning: unknown attribute '.+'"]),
medium('Attribute ignored',
[r".*: warning: '_*packed_*' attribute ignored",
+ r".*: warning: .* not supported .*Wignored-attributes",
r".*: warning: attribute declaration must precede definition .+ignored-attributes"]),
medium('Visibility problem',
[r".*: warning: declaration of '.+' will not be visible outside of this function"]),
@@ -251,6 +252,8 @@
[r".*: warning: taking address of temporary"]),
medium('Taking address of packed member',
[r".*: warning: taking address of packed member"]),
+ medium('Pack alignment value is modified',
+ [r".*: warning: .*#pragma pack alignment value is modified.*Wpragma-pack.*"]),
medium('Possible broken line continuation',
[r".*: warning: backslash and newline separated by space"]),
medium('Undefined variable template',
@@ -332,7 +335,7 @@
[r".*: warning: extra tokens at end of #endif directive"]),
medium('Comparison between different enums',
[r".*: warning: comparison between '.+' and '.+'.+Wenum-compare",
- r".*: warning: comparison of .* enumeration types .*-Wenum-compare-switch"]),
+ r".*: warning: comparison of .* enumeration types .*-Wenum-compare.*"]),
medium('Conversion may change value',
[r".*: warning: converting negative value '.+' to '.+'",
r".*: warning: conversion to '.+' .+ may (alter|change)"]),
@@ -396,6 +399,8 @@
r".*: warning: absolute value function '.+' given .+ which may cause truncation .+Wabsolute-value"]),
low('Using C++11 extensions',
[r".*: warning: 'auto' type specifier is a C\+\+11 extension"]),
+ low('Using C++17 extensions',
+ [r".*: warning: .* a C\+\+17 extension .+Wc\+\+17-extensions"]),
low('Refers to implicitly defined namespace',
[r".*: warning: using directive refers to implicitly-defined namespace .+"]),
low('Invalid pp token',
@@ -437,8 +442,10 @@
[r".*: warning: unannotated fall-through between switch labels.+Wimplicit-fallthrough"]),
medium('Invalid partial specialization',
[r".*: warning: class template partial specialization.+Winvalid-partial-specialization"]),
- medium('Overlapping compatisons',
+ medium('Overlapping comparisons',
[r".*: warning: overlapping comparisons.+Wtautological-overlap-compare"]),
+ medium('bitwise comparison',
+ [r".*: warning: bitwise comparison.+Wtautological-bitwise-compare"]),
medium('int in bool context',
[r".*: warning: converting.+to a boolean.+Wint-in-bool-context"]),
medium('bitwise conditional parentheses',
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index b8d3fe6..026a6d0 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -359,6 +359,25 @@
csvwriter.writerow([total, '', 'All warnings'])
+def dump_csv_with_description(csvwriter, warning_records, warning_messages,
+ warn_patterns, project_names):
+ """Outputs all the warning messages by project."""
+ csv_output = []
+ for record in warning_records:
+ project_name = project_names[record[1]]
+ pattern = warn_patterns[record[0]]
+ severity = pattern['severity'].header
+ category = pattern['category']
+ description = pattern['description']
+ warning = warning_messages[record[2]]
+ csv_output.append([project_name, severity,
+ category, description,
+ warning])
+ csv_output = sorted(csv_output)
+ for output in csv_output:
+ csvwriter.writerow(output)
+
+
# Return s with escaped backslash and quotation characters.
def escape_string(s):
return s.replace('\\', '\\\\').replace('"', '\\"')
@@ -666,6 +685,12 @@
with open(flags.csvpath, 'w') as f:
dump_csv(csv.writer(f, lineterminator='\n'), warn_patterns)
+ if flags.csvwithdescription:
+ with open(flags.csvwithdescription, 'w') as f:
+ dump_csv_with_description(csv.writer(f, lineterminator='\n'),
+ warning_records, warning_messages,
+ warn_patterns, project_names)
+
if flags.gencsv:
dump_csv(csv.writer(sys.stdout, lineterminator='\n'), warn_patterns)
else:
diff --git a/tools/warn/java_warn_patterns.py b/tools/warn/java_warn_patterns.py
index 17e3864..ac1ed5d 100644
--- a/tools/warn/java_warn_patterns.py
+++ b/tools/warn/java_warn_patterns.py
@@ -486,6 +486,7 @@
[r'.*\.java:.*: warning: \[static\] static method should be qualified']),
medium('AbstractInner'),
medium('BothPackageInfoAndHtml'),
+ medium('BuilderSetStyle'),
medium('CallbackName'),
medium('ExecutorRegistration'),
medium('HiddenTypeParameter'),
@@ -493,9 +494,11 @@
medium('ListenerLast'),
medium('MinMaxConstant'),
medium('MissingBuildMethod'),
+ medium('MissingGetterMatchingBuilder'),
medium('NoByteOrShort'),
medium('OverlappingConstants'),
medium('SetterReturnsThis'),
+ medium('StaticFinalBuilder'),
medium('StreamFiles'),
medium('Typo'),
medium('UseIcu'),
diff --git a/tools/warn/other_warn_patterns.py b/tools/warn/other_warn_patterns.py
index 318c3d4..8df5b87 100644
--- a/tools/warn/other_warn_patterns.py
+++ b/tools/warn/other_warn_patterns.py
@@ -143,6 +143,8 @@
# Yacc warnings
yacc('deprecate directive',
[r".*\.yy?:.*: warning: deprecated directive: "]),
+ yacc('reduce/reduce conflicts',
+ [r".*\.yy?: warning: .+ reduce/reduce conflicts "]),
yacc('shift/reduce conflicts',
[r".*\.yy?: warning: .+ shift/reduce conflicts "]),
{'category': 'yacc', 'severity': Severity.SKIP,
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 68ed995..b2dd8ab 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -77,6 +77,9 @@
help='Save CSV warning file to the passed path')
parser.add_argument('--gencsv', action='store_true',
help='Generate CSV file with number of various warnings')
+ parser.add_argument('--csvwithdescription', default='',
+ help="""Save CSV warning file to the passed path this csv
+ will contain all the warning descriptions""")
parser.add_argument('--byproject', action='store_true',
help='Separate warnings in HTML output by project names')
parser.add_argument('--url', default='',
diff --git a/tools/zipalign/Android.bp b/tools/zipalign/Android.bp
index 8e6196d..1ebf4eb 100644
--- a/tools/zipalign/Android.bp
+++ b/tools/zipalign/Android.bp
@@ -4,20 +4,31 @@
// Zip alignment tool
//
-cc_binary_host {
- name: "zipalign",
+cc_defaults {
+ name: "zipalign_defaults",
+ target: {
+ windows: {
+ host_ldlibs: ["-lpthread"],
+ enabled: true,
+ },
+ },
+}
+cc_library_host_static {
+ name: "libzipalign",
srcs: [
"ZipAlign.cpp",
"ZipEntry.cpp",
"ZipFile.cpp",
],
-
+ export_include_dirs: [
+ "include",
+ ],
cflags: ["-Wall", "-Werror"],
// NOTE: Do not add any shared_libs dependencies because they will break the
// static_sdk_tools target.
- static_libs: [
+ whole_static_libs: [
"libutils",
"libcutils",
"liblog",
@@ -26,11 +37,39 @@
"libbase",
"libzopfli",
],
+ defaults: ["zipalign_defaults"],
+}
- target: {
- windows: {
- host_ldlibs: ["-lpthread"],
- enabled: true,
- },
+cc_binary_host {
+ name: "zipalign",
+ srcs: [
+ "ZipAlignMain.cpp",
+ ],
+ cflags: ["-Wall", "-Werror"],
+ static_libs: [
+ "libzipalign",
+ ],
+ defaults: ["zipalign_defaults"],
+}
+
+cc_test_host {
+ name: "zipalign_tests",
+ srcs: [
+ "tests/src/*_test.cpp",
+ ],
+ test_options: {
+ unit_test: true,
},
+ static_libs: [
+ "libbase",
+ "libzipalign",
+ "libgmock",
+ ],
+ data: [
+ "tests/data/diffOrders.zip",
+ "tests/data/holes.zip",
+ "tests/data/unaligned.zip",
+ ],
+ defaults: ["zipalign_defaults"],
+ test_suites: ["general-tests"],
}
diff --git a/tools/zipalign/OWNERS b/tools/zipalign/OWNERS
new file mode 100644
index 0000000..d701e4a
--- /dev/null
+++ b/tools/zipalign/OWNERS
@@ -0,0 +1,2 @@
+include platform/system/core:/janitors/OWNERS
+sanglardf@google.com
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index eea1749..08f67ff 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -14,35 +14,13 @@
* limitations under the License.
*/
-/*
- * Zip alignment tool
- */
#include "ZipFile.h"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
-using namespace android;
-
-/*
- * Show program usage.
- */
-void usage(void)
-{
- fprintf(stderr, "Zip alignment utility\n");
- fprintf(stderr, "Copyright (C) 2009 The Android Open Source Project\n\n");
- fprintf(stderr,
- "Usage: zipalign [-f] [-p] [-v] [-z] <align> infile.zip outfile.zip\n"
- " zipalign -c [-p] [-v] <align> infile.zip\n\n" );
- fprintf(stderr,
- " <align>: alignment in bytes, e.g. '4' provides 32-bit alignment\n");
- fprintf(stderr, " -c: check alignment only (does not modify file)\n");
- fprintf(stderr, " -f: overwrite existing outfile.zip\n");
- fprintf(stderr, " -p: memory page alignment for stored shared object files\n");
- fprintf(stderr, " -v: verbose output\n");
- fprintf(stderr, " -z: recompress using Zopfli\n");
-}
+namespace android {
static int getAlignment(bool pageAlignSharedLibs, int defaultAlignment,
ZipEntry* pEntry) {
@@ -69,7 +47,6 @@
{
int numEntries = pZin->getNumEntries();
ZipEntry* pEntry;
- int bias = 0;
status_t status;
for (int i = 0; i < numEntries; i++) {
@@ -90,30 +67,20 @@
if (zopfli) {
status = pZout->addRecompress(pZin, pEntry, &pNewEntry);
- bias += pNewEntry->getCompressedLen() - pEntry->getCompressedLen();
} else {
status = pZout->add(pZin, pEntry, padding, &pNewEntry);
}
} else {
const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry);
- /*
- * Copy the entry, adjusting as required. We assume that the
- * file position in the new file will be equal to the file
- * position in the original.
- */
- off_t newOffset = pEntry->getFileOffset() + bias;
- padding = (alignTo - (newOffset % alignTo)) % alignTo;
-
//printf("--- %s: orig at %ld(+%d) len=%ld, adding pad=%d\n",
// pEntry->getFileName(), (long) pEntry->getFileOffset(),
// bias, (long) pEntry->getUncompressedLen(), padding);
- status = pZout->add(pZin, pEntry, padding, &pNewEntry);
+ status = pZout->add(pZin, pEntry, alignTo, &pNewEntry);
}
if (status != OK)
return 1;
- bias += padding;
//printf(" added '%s' at %ld (pad=%d)\n",
// pNewEntry->getFileName(), (long) pNewEntry->getFileOffset(),
// padding);
@@ -126,7 +93,7 @@
* Process a file. We open the input and output files, failing if the
* output file exists and "force" wasn't specified.
*/
-static int process(const char* inFileName, const char* outFileName,
+int process(const char* inFileName, const char* outFileName,
int alignment, bool force, bool zopfli, bool pageAlignSharedLibs)
{
ZipFile zin, zout;
@@ -169,7 +136,7 @@
/*
* Verify the alignment of a zip archive.
*/
-static int verify(const char* fileName, int alignment, bool verbose,
+int verify(const char* fileName, int alignment, bool verbose,
bool pageAlignSharedLibs)
{
ZipFile zipFile;
@@ -218,92 +185,4 @@
return foundBad ? 1 : 0;
}
-/*
- * Parse args.
- */
-int main(int argc, char* const argv[])
-{
- bool wantUsage = false;
- bool check = false;
- bool force = false;
- bool verbose = false;
- bool zopfli = false;
- bool pageAlignSharedLibs = false;
- int result = 1;
- int alignment;
- char* endp;
-
- if (argc < 4) {
- wantUsage = true;
- goto bail;
- }
-
- argc--;
- argv++;
-
- while (argc && argv[0][0] == '-') {
- const char* cp = argv[0] +1;
-
- while (*cp != '\0') {
- switch (*cp) {
- case 'c':
- check = true;
- break;
- case 'f':
- force = true;
- break;
- case 'v':
- verbose = true;
- break;
- case 'z':
- zopfli = true;
- break;
- case 'p':
- pageAlignSharedLibs = true;
- break;
- default:
- fprintf(stderr, "ERROR: unknown flag -%c\n", *cp);
- wantUsage = true;
- goto bail;
- }
-
- cp++;
- }
-
- argc--;
- argv++;
- }
-
- if (!((check && argc == 2) || (!check && argc == 3))) {
- wantUsage = true;
- goto bail;
- }
-
- alignment = strtol(argv[0], &endp, 10);
- if (*endp != '\0' || alignment <= 0) {
- fprintf(stderr, "Invalid value for alignment: %s\n", argv[0]);
- wantUsage = true;
- goto bail;
- }
-
- if (check) {
- /* check existing archive for correct alignment */
- result = verify(argv[1], alignment, verbose, pageAlignSharedLibs);
- } else {
- /* create the new archive */
- result = process(argv[1], argv[2], alignment, force, zopfli, pageAlignSharedLibs);
-
- /* trust, but verify */
- if (result == 0) {
- result = verify(argv[2], alignment, verbose, pageAlignSharedLibs);
- }
- }
-
-bail:
- if (wantUsage) {
- usage();
- result = 2;
- }
-
- return result;
-}
+} // namespace android
diff --git a/tools/zipalign/ZipAlignMain.cpp b/tools/zipalign/ZipAlignMain.cpp
new file mode 100644
index 0000000..49be916
--- /dev/null
+++ b/tools/zipalign/ZipAlignMain.cpp
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Zip alignment tool
+ */
+
+#include "ZipAlign.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+using namespace android;
+
+/*
+ * Show program usage.
+ */
+void usage(void)
+{
+ fprintf(stderr, "Zip alignment utility\n");
+ fprintf(stderr, "Copyright (C) 2009 The Android Open Source Project\n\n");
+ fprintf(stderr,
+ "Usage: zipalign [-f] [-p] [-v] [-z] <align> infile.zip outfile.zip\n"
+ " zipalign -c [-p] [-v] <align> infile.zip\n\n" );
+ fprintf(stderr,
+ " <align>: alignment in bytes, e.g. '4' provides 32-bit alignment\n");
+ fprintf(stderr, " -c: check alignment only (does not modify file)\n");
+ fprintf(stderr, " -f: overwrite existing outfile.zip\n");
+ fprintf(stderr, " -p: memory page alignment for stored shared object files\n");
+ fprintf(stderr, " -v: verbose output\n");
+ fprintf(stderr, " -z: recompress using Zopfli\n");
+}
+
+
+/*
+ * Parse args.
+ */
+int main(int argc, char* const argv[])
+{
+ bool wantUsage = false;
+ bool check = false;
+ bool force = false;
+ bool verbose = false;
+ bool zopfli = false;
+ bool pageAlignSharedLibs = false;
+ int result = 1;
+ int alignment;
+ char* endp;
+
+ if (argc < 4) {
+ wantUsage = true;
+ goto bail;
+ }
+
+ argc--;
+ argv++;
+
+ while (argc && argv[0][0] == '-') {
+ const char* cp = argv[0] +1;
+
+ while (*cp != '\0') {
+ switch (*cp) {
+ case 'c':
+ check = true;
+ break;
+ case 'f':
+ force = true;
+ break;
+ case 'v':
+ verbose = true;
+ break;
+ case 'z':
+ zopfli = true;
+ break;
+ case 'p':
+ pageAlignSharedLibs = true;
+ break;
+ default:
+ fprintf(stderr, "ERROR: unknown flag -%c\n", *cp);
+ wantUsage = true;
+ goto bail;
+ }
+
+ cp++;
+ }
+
+ argc--;
+ argv++;
+ }
+
+ if (!((check && argc == 2) || (!check && argc == 3))) {
+ wantUsage = true;
+ goto bail;
+ }
+
+ alignment = strtol(argv[0], &endp, 10);
+ if (*endp != '\0' || alignment <= 0) {
+ fprintf(stderr, "Invalid value for alignment: %s\n", argv[0]);
+ wantUsage = true;
+ goto bail;
+ }
+
+ if (check) {
+ /* check existing archive for correct alignment */
+ result = verify(argv[1], alignment, verbose, pageAlignSharedLibs);
+ } else {
+ /* create the new archive */
+ result = process(argv[1], argv[2], alignment, force, zopfli, pageAlignSharedLibs);
+
+ /* trust, but verify */
+ if (result == 0) {
+ result = verify(argv[2], alignment, verbose, pageAlignSharedLibs);
+ }
+ }
+
+bail:
+ if (wantUsage) {
+ usage();
+ result = 2;
+ }
+
+ return result;
+}
diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp
index 810d74a..5233f0a 100644
--- a/tools/zipalign/ZipEntry.cpp
+++ b/tools/zipalign/ZipEntry.cpp
@@ -29,7 +29,7 @@
#include <string.h>
#include <time.h>
-using namespace android;
+namespace android {
/*
* Initialize a new ZipEntry structure from a FILE* positioned at a
@@ -696,3 +696,5 @@
ALOGD(" comment: '%s'\n", mFileComment);
}
+} // namespace android
+
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index 88505b7..1e3c413 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -35,7 +35,7 @@
#include <assert.h>
#include <inttypes.h>
-using namespace android;
+namespace android {
/*
* Some environments require the "b", some choke on it.
@@ -134,7 +134,7 @@
/*
* Return the Nth entry in the archive.
*/
-android::ZipEntry* ZipFile::getEntryByIndex(int idx) const
+ZipEntry* ZipFile::getEntryByIndex(int idx) const
{
if (idx < 0 || idx >= (int) mEntries.size())
return NULL;
@@ -145,7 +145,7 @@
/*
* Find an entry by name.
*/
-android::ZipEntry* ZipFile::getEntryByName(const char* fileName) const
+ZipEntry* ZipFile::getEntryByName(const char* fileName) const
{
/*
* Do a stupid linear string-compare search.
@@ -245,7 +245,11 @@
/* read the last part of the file into the buffer */
if (fread(buf, 1, readAmount, mZipFp) != (size_t) readAmount) {
- ALOGD("short file? wanted %ld\n", readAmount);
+ if (feof(mZipFp)) {
+ ALOGW("fread %ld bytes failed, unexpected EOF", readAmount);
+ } else {
+ ALOGW("fread %ld bytes failed, %s", readAmount, strerror(errno));
+ }
result = UNKNOWN_ERROR;
goto bail;
}
@@ -327,7 +331,11 @@
{
uint8_t checkBuf[4];
if (fread(checkBuf, 1, 4, mZipFp) != 4) {
- ALOGD("EOCD check read failed\n");
+ if (feof(mZipFp)) {
+ ALOGW("fread EOCD failed, unexpected EOF");
+ } else {
+ ALOGW("fread EOCD failed, %s", strerror(errno));
+ }
result = INVALID_OPERATION;
goto bail;
}
@@ -503,6 +511,32 @@
}
/*
+ * Based on the current position in the output zip, assess where the entry
+ * payload will end up if written as-is. If alignment is not satisfactory,
+ * add some padding in the extra field.
+ *
+ */
+status_t ZipFile::alignEntry(android::ZipEntry* pEntry, uint32_t alignTo){
+ if (alignTo == 0 || alignTo == 1)
+ return OK;
+
+ // Calculate where the entry payload offset will end up if we were to write
+ // it as-is.
+ uint64_t expectedPayloadOffset = ftell(mZipFp) +
+ android::ZipEntry::LocalFileHeader::kLFHLen +
+ pEntry->mLFH.mFileNameLength +
+ pEntry->mLFH.mExtraFieldLength;
+
+ // If the alignment is not what was requested, add some padding in the extra
+ // so the payload ends up where is requested.
+ uint64_t alignDiff = alignTo - (expectedPayloadOffset % alignTo);
+ if (alignDiff == 0)
+ return OK;
+
+ return pEntry->addPadding(alignDiff);
+}
+
+/*
* Add an entry by copying it from another zip file. If "padding" is
* nonzero, the specified number of bytes will be added to the "extra"
* field in the header.
@@ -510,7 +544,7 @@
* If "ppEntry" is non-NULL, a pointer to the new entry will be returned.
*/
status_t ZipFile::add(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
- int padding, ZipEntry** ppEntry)
+ int alignTo, ZipEntry** ppEntry)
{
ZipEntry* pEntry = NULL;
status_t result;
@@ -537,11 +571,10 @@
result = pEntry->initFromExternal(pSourceEntry);
if (result != OK)
goto bail;
- if (padding != 0) {
- result = pEntry->addPadding(padding);
- if (result != OK)
- goto bail;
- }
+
+ result = alignEntry(pEntry, alignTo);
+ if (result != OK)
+ goto bail;
/*
* From here on out, failures are more interesting.
@@ -760,15 +793,18 @@
while (1) {
count = fread(tmpBuf, 1, sizeof(tmpBuf), srcFp);
- if (ferror(srcFp) || ferror(dstFp))
- return errnoToStatus(errno);
+ if (ferror(srcFp) || ferror(dstFp)) {
+ status_t status = errnoToStatus(errno);
+ ALOGW("fread %zu bytes failed, %s", count, strerror(errno));
+ return status;
+ }
if (count == 0)
break;
*pCRC32 = crc32(*pCRC32, tmpBuf, count);
if (fwrite(tmpBuf, 1, count, dstFp) != count) {
- ALOGD("fwrite %d bytes failed\n", (int) count);
+ ALOGW("fwrite %zu bytes failed, %s", count, strerror(errno));
return UNKNOWN_ERROR;
}
}
@@ -788,7 +824,7 @@
if (size > 0) {
*pCRC32 = crc32(*pCRC32, (const unsigned char*)data, size);
if (fwrite(data, 1, size, dstFp) != size) {
- ALOGD("fwrite %d bytes failed\n", (int) size);
+ ALOGW("fwrite %zu bytes failed, %s", size, strerror(errno));
return UNKNOWN_ERROR;
}
}
@@ -822,7 +858,11 @@
count = fread(tmpBuf, 1, readSize, srcFp);
if (count != readSize) { // error or unexpected EOF
- ALOGD("fread %d bytes failed\n", (int) readSize);
+ if (feof(srcFp)) {
+ ALOGW("fread %zu bytes failed, unexpected EOF", readSize);
+ } else {
+ ALOGW("fread %zu bytes failed, %s", readSize, strerror(errno));
+ }
return UNKNOWN_ERROR;
}
@@ -830,7 +870,7 @@
*pCRC32 = crc32(*pCRC32, tmpBuf, count);
if (fwrite(tmpBuf, 1, count, dstFp) != count) {
- ALOGD("fwrite %d bytes failed\n", (int) count);
+ ALOGW("fwrite %zu bytes failed, %s", count, strerror(errno));
return UNKNOWN_ERROR;
}
@@ -890,8 +930,7 @@
goto bail;
}
if (getSize < kBufSize) {
- ALOGV("+++ got %d bytes, EOF reached\n",
- (int)getSize);
+ ALOGV("+++ got %zu bytes, EOF reached\n", getSize);
atEof = true;
}
@@ -901,9 +940,9 @@
delete[] inBuf;
}
- ALOGV("+++ writing %d bytes\n", (int)outSize);
+ ALOGV("+++ writing %zu bytes\n", outSize);
if (fwrite(outBuf, 1, outSize, dstFp) != outSize) {
- ALOGD("write %d failed in deflate\n", (int)outSize);
+ ALOGW("fwrite %zu bytes failed, %s", outSize, strerror(errno));
result = UNKNOWN_ERROR;
goto bail;
}
@@ -1109,24 +1148,31 @@
getSize = n;
if (fseek(fp, (long) src, SEEK_SET) != 0) {
- ALOGD("filemove src seek %ld failed\n", (long) src);
+ ALOGW("filemove src seek %ld failed, %s",
+ (long) src, strerror(errno));
return UNKNOWN_ERROR;
}
if (fread(readBuf, 1, getSize, fp) != getSize) {
- ALOGD("filemove read %ld off=%ld failed\n",
- (long) getSize, (long) src);
+ if (feof(fp)) {
+ ALOGW("fread %zu bytes off=%ld failed, unexpected EOF",
+ getSize, (long) src);
+ } else {
+ ALOGW("fread %zu bytes off=%ld failed, %s",
+ getSize, (long) src, strerror(errno));
+ }
return UNKNOWN_ERROR;
}
if (fseek(fp, (long) dst, SEEK_SET) != 0) {
- ALOGD("filemove dst seek %ld failed\n", (long) dst);
+ ALOGW("filemove dst seek %ld failed, %s",
+ (long) dst, strerror(errno));
return UNKNOWN_ERROR;
}
if (fwrite(readBuf, 1, getSize, fp) != getSize) {
- ALOGD("filemove write %ld off=%ld failed\n",
- (long) getSize, (long) dst);
+ ALOGW("filemove write %zu off=%ld failed, %s",
+ getSize, (long) dst, strerror(errno));
return UNKNOWN_ERROR;
}
@@ -1374,12 +1420,17 @@
ZipEntry::putLongLE(&buf[0x10], mCentralDirOffset);
ZipEntry::putShortLE(&buf[0x14], mCommentLen);
- if (fwrite(buf, 1, kEOCDLen, fp) != kEOCDLen)
+ if (fwrite(buf, 1, kEOCDLen, fp) != kEOCDLen) {
+ ALOGW("fwrite EOCD failed, %s", strerror(errno));
return UNKNOWN_ERROR;
+ }
if (mCommentLen > 0) {
assert(mComment != NULL);
- if (fwrite(mComment, mCommentLen, 1, fp) != mCommentLen)
+ if (fwrite(mComment, mCommentLen, 1, fp) != mCommentLen) {
+ ALOGW("fwrite %d bytes failed, %s",
+ (int) mCommentLen, strerror(errno));
return UNKNOWN_ERROR;
+ }
}
return OK;
@@ -1397,3 +1448,4 @@
mCentralDirSize, mCentralDirOffset, mCommentLen);
}
+} // namespace android
diff --git a/tools/zipalign/ZipFile.h b/tools/zipalign/ZipFile.h
index 11d20c5..854f981 100644
--- a/tools/zipalign/ZipFile.h
+++ b/tools/zipalign/ZipFile.h
@@ -102,14 +102,14 @@
}
/*
- * Add an entry by copying it from another zip file. If "padding" is
- * nonzero, the specified number of bytes will be added to the "extra"
- * field in the header.
+ * Add an entry by copying it from another zip file. If "alignment" is
+ * nonzero, an appropriate number of bytes will be added to the "extra"
+ * field in the header so the entry payload is aligned.
*
* If "ppEntry" is non-NULL, a pointer to the new entry will be returned.
*/
status_t add(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
- int padding, ZipEntry** ppEntry);
+ int alignment, ZipEntry** ppEntry);
/*
* Add an entry by copying it from another zip file, recompressing with
@@ -163,6 +163,8 @@
ZipFile(const ZipFile& src);
ZipFile& operator=(const ZipFile& src);
+ status_t alignEntry(android::ZipEntry* pEntry, uint32_t alignTo);
+
class EndOfCentralDir {
public:
EndOfCentralDir(void) :
diff --git a/tools/zipalign/include/ZipAlign.h b/tools/zipalign/include/ZipAlign.h
new file mode 100644
index 0000000..ab36086
--- /dev/null
+++ b/tools/zipalign/include/ZipAlign.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ZIPALIGN_H
+#define ZIPALIGN_H
+
+namespace android {
+
+/*
+ * Generate a new, aligned, zip "output" from an "input" zip.
+ * - alignTo: Alignment (in bytes) for uncompressed entries.
+ * - force : Overwrite output if it exists, fail otherwise.
+ * - zopfli : Recompress compressed entries with more efficient algorithm.
+ * Copy compressed entries as-is, and unaligned, otherwise.
+ * - pageAlignSharedLibs: Align .so files to 4096 and other files to
+ * alignTo, or all files to alignTo if false..
+ *
+ * Returns 0 on success.
+ */
+int process(const char* input, const char* output, int alignTo, bool force,
+ bool zopfli, bool pageAlignSharedLibs);
+
+/*
+ * Verify the alignment of a zip archive.
+ * - alignTo: Alignment (in bytes) for uncompressed entries.
+ * - pageAlignSharedLibs: Align .so files to 4096 and other files to
+ * alignTo, or all files to alignTo if false..
+ *
+ * Returns 0 on success.
+ */
+int verify(const char* fileName, int alignTo, bool verbose,
+ bool pageAlignSharedLibs);
+
+} // namespace android
+
+#endif // ZIPALIGN_H
diff --git a/tools/zipalign/tests/data/diffOrders.zip b/tools/zipalign/tests/data/diffOrders.zip
new file mode 100644
index 0000000..8f512ed
--- /dev/null
+++ b/tools/zipalign/tests/data/diffOrders.zip
Binary files differ
diff --git a/tools/zipalign/tests/data/holes.zip b/tools/zipalign/tests/data/holes.zip
new file mode 100644
index 0000000..c88f891
--- /dev/null
+++ b/tools/zipalign/tests/data/holes.zip
Binary files differ
diff --git a/tools/zipalign/tests/data/unaligned.zip b/tools/zipalign/tests/data/unaligned.zip
new file mode 100644
index 0000000..d572b1a
--- /dev/null
+++ b/tools/zipalign/tests/data/unaligned.zip
Binary files differ
diff --git a/tools/zipalign/tests/src/align_test.cpp b/tools/zipalign/tests/src/align_test.cpp
new file mode 100644
index 0000000..c79e791
--- /dev/null
+++ b/tools/zipalign/tests/src/align_test.cpp
@@ -0,0 +1,53 @@
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+#include "ZipAlign.h"
+
+#include <stdio.h>
+#include <string>
+
+#include <android-base/file.h>
+
+using namespace android;
+
+static std::string GetTestPath(const std::string& filename) {
+ static std::string test_data_dir = android::base::GetExecutableDirectory() + "/tests/data/";
+ return test_data_dir + filename;
+}
+
+TEST(Align, Unaligned) {
+ const std::string src = GetTestPath("unaligned.zip");
+ const std::string dst = GetTestPath("unaligned_out.zip");
+
+ int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+ ASSERT_EQ(0, processed);
+
+ int verified = verify(dst.c_str(), 4, true, false);
+ ASSERT_EQ(0, verified);
+}
+
+// Align a zip featuring a hole at the beginning. The
+// hole in the archive is a delete entry in the Central
+// Directory.
+TEST(Align, Holes) {
+ const std::string src = GetTestPath("holes.zip");
+ const std::string dst = GetTestPath("holes_out.zip");
+
+ int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+ ASSERT_EQ(0, processed);
+
+ int verified = verify(dst.c_str(), 4, false, true);
+ ASSERT_EQ(0, verified);
+}
+
+// Align a zip where LFH order and CD entries differ.
+TEST(Align, DifferenteOrders) {
+ const std::string src = GetTestPath("diffOrders.zip");
+ const std::string dst = GetTestPath("diffOrders_out.zip");
+
+ int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+ ASSERT_EQ(0, processed);
+
+ int verified = verify(dst.c_str(), 4, false, true);
+ ASSERT_EQ(0, verified);
+}