Merge "Fix check_elf_file with LOCAL_SDK_VERSION and NDK libs"
diff --git a/Changes.md b/Changes.md
index 35b8944..1fadcef 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,35 @@
# Build System Changes for Android.mk Writers
+## `PRODUCT_HOST_PACKAGES` split from `PRODUCT_PACKAGES` {#PRODUCT_HOST_PACKAGES}
+
+Previously, adding a module to `PRODUCT_PACKAGES` that supported both the host
+and the target (`host_supported` in Android.bp; two modules with the same name
+in Android.mk) would cause both to be built and installed. In many cases you
+only want either the host or target versions to be built/installed by default,
+and would be over-building with both. So `PRODUCT_PACKAGES` will be changing to
+just affect target modules, while `PRODUCT_HOST_PACKAGES` is being added for
+host modules.
+
+Functional differences between `PRODUCT_PACKAGES` and `PRODUCT_HOST_PACKAGES`:
+
+* `PRODUCT_HOST_PACKAGES` does not have `_ENG`/`_DEBUG` variants, as that's a
+ property of the target, not the host.
+* `PRODUCT_HOST_PACKAGES` does not support `LOCAL_MODULE_OVERRIDES`.
+* `PRODUCT_HOST_PACKAGES` requires listed modules to exist, and be host
+ modules. (Unless `ALLOW_MISSING_DEPENDENCIES` is set)
+
+This is still an active migration, so currently it still uses
+`PRODUCT_PACKAGES` to make installation decisions, but verifies that if we used
+`PRODUCT_HOST_PACKAGES`, it would trigger installation for all of the same host
+packages. This check ignores shared libraries, as those are not normally
+necessary in `PRODUCT_*PACKAGES`, and tended to be over-built (especially the
+32-bit variants).
+
+Future changes will switch installation decisions to `PRODUCT_HOST_PACKAGES`
+for host modules, error when there's a host-only module in `PRODUCT_PACKAGES`,
+and do some further cleanup where `LOCAL_REQUIRED_MODULES` are still merged
+between host and target modules with the same name.
+
## `*.c.arm` / `*.cpp.arm` deprecation {#file_arm}
In Android.mk files, you used to be able to change LOCAL_ARM_MODE for each
diff --git a/CleanSpec.mk b/CleanSpec.mk
index f95f0a7..51139ed 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -599,6 +599,17 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/dex_bootjars)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/dex_bootjars_input)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libnpt.so)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*)
+
+# Clean up old testcase files
+$(call add-clean-step, rm -rf $(TARGET_OUT_TESTCASES)/*)
+$(call add-clean-step, rm -rf $(HOST_OUT_TESTCASES)/*)
+$(call add-clean-step, rm -rf $(HOST_CROSS_OUT_TESTCASES)/*)
+$(call add-clean-step, rm -rf $(TARGET_OUT_DATA)/*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/vts/*)
+$(call add-clean-step, rm -rf $(HOST_OUT)/framework/vts-tradefed.jar)
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/OWNERS b/OWNERS
index 9f621d8..77496f1 100644
--- a/OWNERS
+++ b/OWNERS
@@ -5,7 +5,5 @@
# To expedite LON reviews
hansson@google.com
-per-file * = ccross@android.com,dwillemsen@google.com,hansson@google.com
-
# For version updates
per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com
diff --git a/core/Makefile b/core/Makefile
index 44f01ba..811282d 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1556,13 +1556,11 @@
$(TARGET_RECOVERY_ROOT_OUT)/plat_file_contexts \
$(TARGET_RECOVERY_ROOT_OUT)/vendor_file_contexts \
$(TARGET_RECOVERY_ROOT_OUT)/plat_property_contexts \
- $(TARGET_RECOVERY_ROOT_OUT)/vendor_property_contexts
-
-ifdef BOARD_ODM_SEPOLICY_DIRS
-recovery_sepolicy += \
+ $(TARGET_RECOVERY_ROOT_OUT)/vendor_property_contexts \
$(TARGET_RECOVERY_ROOT_OUT)/odm_file_contexts \
- $(TARGET_RECOVERY_ROOT_OUT)/odm_property_contexts
-endif
+ $(TARGET_RECOVERY_ROOT_OUT)/odm_property_contexts \
+ $(TARGET_RECOVERY_ROOT_OUT)/product_file_contexts \
+ $(TARGET_RECOVERY_ROOT_OUT)/product_property_contexts
# Passed into rsync from non-recovery root to recovery root, to avoid overwriting recovery-specific
# SELinux files
@@ -4319,6 +4317,13 @@
# BOARD_SUPER_PARTITION_SIZE must be defined to build super image.
ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+# Dump variables used by build_super_image.py.
+define dump-super-image-info
+ $(call dump-dynamic-partitions-info,$(1))
+ $(if $(filter true,$(AB_OTA_UPDATER)), \
+ echo "ab_update=true" >> $(1))
+endef
+
ifneq (true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS))
INSTALLED_SUPERIMAGE_TARGET := $(PRODUCT_OUT)/super.img
$(INSTALLED_SUPERIMAGE_TARGET): extracted_input_target_files := $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE))
@@ -4326,7 +4331,22 @@
$(call pretty,"Target super fs image: $@")
PATH=$(dir $(LPMAKE)):$$PATH \
$(BUILD_SUPER_IMAGE) -v $(extracted_input_target_files) $@
-endif
+
+# supernod uses images in the $(PRODUCT_OUT) directory instead of images from target files package.
+.PHONY: superimage-nodeps supernod
+superimage-nodeps supernod: intermediates := $(call intermediates-dir-for,PACKAGING,superimage-nodeps)
+superimage-nodeps supernod: | $(LPMAKE) $(BUILD_SUPER_IMAGE) \
+ $(foreach p, $(BOARD_SUPER_PARTITION_PARTITION_LIST), $(INSTALLED_$(call to-upper,$(p))IMAGE_TARGET))
+ $(call pretty,"make $(INSTALLED_SUPERIMAGE_TARGET): ignoring dependencies")
+ mkdir -p $(intermediates)
+ rm -rf $(intermediates)/misc_info.txt
+ $(call dump-super-image-info,$(intermediates)/misc_info.txt)
+ $(foreach p,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+ echo "$(p)_image=$(INSTALLED_$(call to-upper,$(p))IMAGE_TARGET)" >> $(intermediates)/misc_info.txt;)
+ PATH=$(dir $(LPMAKE)):$$PATH \
+ $(BUILD_SUPER_IMAGE) -v $(intermediates)/misc_info.txt $(INSTALLED_SUPERIMAGE_TARGET)
+
+endif # PRODUCT_RETROFIT_DYNAMIC_PARTITIONS != "true"
$(call dist-for-goals,dist_files,$(INSTALLED_SUPERIMAGE_TARGET))
@@ -4336,10 +4356,7 @@
$(call pretty,"Target empty super fs image: $@")
mkdir -p $(intermediates)
rm -rf $(intermediates)/misc_info.txt
- $(call dump-dynamic-partitions-info,$(intermediates)/misc_info.txt)
-ifeq ($(AB_OTA_UPDATER),true)
- echo "ab_update=true" >> $(intermediates)/misc_info.txt
-endif
+ $(call dump-super-image-info,$(intermediates)/misc_info.txt)
PATH=$(dir $(LPMAKE)):$$PATH \
$(BUILD_SUPER_IMAGE) -v $(intermediates)/misc_info.txt $@
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index efa6ae6..931b1b1 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -36,7 +36,7 @@
fixed_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml.fixed
$(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(my_full_libs_manifest_files)
- $(full_android_manifest): $(ANDROID_MANIFEST_MERGER_DEPS)
+ $(full_android_manifest): $(ANDROID_MANIFEST_MERGER)
$(full_android_manifest) : $(fixed_android_manifest) $(my_full_libs_manifest_files)
@echo "Merge android manifest files: $@ <-- $< $(PRIVATE_LIBS_MANIFESTS)"
@mkdir -p $(dir $@)
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index a01d80f..6d75132 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -36,7 +36,7 @@
$(autogen_test_config_file): PRIVATE_MODULE_NAME := $(LOCAL_MODULE)
$(autogen_test_config_file) : $(autogen_test_config_template)
@echo "Auto generating test config $(notdir $@)"
- $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g' $< > $@
+ $(hide) sed 's&{MODULE}&$(PRIVATE_MODULE_NAME)&g;s&{EXTRA_OPTIONS}&&g' $< > $@
my_auto_generate_config := true
else
# Auto generating test config file for instrumentation test
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 2fbf524..b2bbe46 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -242,6 +242,33 @@
partition_tag := $(if $(call should-install-to-system,$(my_module_tags)),,_DATA)
endif
endif
+# For test modules that lack a suite tag, set null-suite as the default.
+# We only support adding a default suite to native tests, native benchmarks, and instrumentation tests.
+# This is because they are the only tests we currently auto-generate test configs for.
+ifndef LOCAL_COMPATIBILITY_SUITE
+ ifneq ($(filter NATIVE_TESTS NATIVE_BENCHMARK, $(LOCAL_MODULE_CLASS)),)
+ LOCAL_COMPATIBILITY_SUITE := null-suite
+ endif
+ ifneq ($(filter APPS, $(LOCAL_MODULE_CLASS)),)
+ ifneq ($(filter $(my_module_tags),tests),)
+ LOCAL_COMPATIBILITY_SUITE := null-suite
+ endif
+ endif
+endif
+
+use_testcase_folder :=
+ifdef ENABLE_DEFAULT_TEST_LOCATION
+ ifeq ($(my_module_path),)
+ ifneq ($(LOCAL_MODULE),$(filter $(LOCAL_MODULE),$(DEFAULT_DATA_OUT_MODULES)))
+ ifdef LOCAL_COMPATIBILITY_SUITE
+ ifneq (true, $(LOCAL_IS_HOST_MODULE))
+ use_testcase_folder := true
+ endif
+ endif
+ endif
+ endif
+endif
+
ifeq ($(my_module_path),)
install_path_var := $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT$(partition_tag)_$(LOCAL_MODULE_CLASS)
ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
@@ -249,6 +276,16 @@
endif
my_module_path := $($(install_path_var))
+
+ # If use_testcase_folder be set, and LOCAL_MODULE_PATH not set,
+ # overwrite the default path under testcase.
+ ifeq ($(use_testcase_folder),true)
+ arch_dir := $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+ testcase_folder := $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)/$(arch_dir)
+ my_module_path := $(testcase_folder)
+ arch_dir :=
+ endif
+
ifeq ($(strip $(my_module_path)),)
$(error $(LOCAL_PATH): unhandled install path "$(install_path_var) for $(LOCAL_MODULE)")
endif
@@ -324,7 +361,9 @@
# Neither do Runtime Resource Overlay apks, which contain just the overlaid resources.
else ifeq ($(LOCAL_IS_RUNTIME_RESOURCE_OVERLAY),true)
else
- my_module_path := $(my_module_path)/$(LOCAL_MODULE)
+ ifneq ($(use_testcase_folder),true)
+ my_module_path := $(my_module_path)/$(LOCAL_MODULE)
+ endif
endif
endif
LOCAL_INSTALLED_MODULE := $(my_module_path)/$(my_installed_module_stem)
@@ -429,12 +468,23 @@
my_init_rc_installed :=
my_init_rc_pairs :=
my_installed_symlinks :=
+my_default_test_module :=
+ifeq ($(use_testcase_folder),true)
+arch_dir := $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+my_default_test_module := $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)/$(arch_dir)/$(my_installed_module_stem)
+arch_dir :=
+endif
+
ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+ifneq ($(LOCAL_INSTALLED_MODULE),$(my_default_test_module))
+# Install into the testcase folder
+$(LOCAL_INSTALLED_MODULE) : $(my_default_test_module)
$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
@echo "Install: $@"
$(copy-file-to-new-target)
$(PRIVATE_POST_INSTALL_CMD)
+endif
ifndef LOCAL_IS_HOST_MODULE
# Rule to install the module's companion init.rc.
@@ -544,20 +594,6 @@
endif
endif
-# For test modules that lack a suite tag, set null-suite as the default.
-# We only support adding a default suite to native tests, native benchmarks, and instrumentation tests.
-# This is because they are the only tests we currently auto-generate test configs for.
-ifndef LOCAL_COMPATIBILITY_SUITE
-ifneq ($(filter NATIVE_TESTS NATIVE_BENCHMARK, $(LOCAL_MODULE_CLASS)),)
-LOCAL_COMPATIBILITY_SUITE := null-suite
-endif
-ifneq ($(filter APPS, $(LOCAL_MODULE_CLASS)),)
-ifneq ($(filter $(my_module_tags),tests),)
-LOCAL_COMPATIBILITY_SUITE := null-suite
-endif
-endif
-endif
-
###########################################################
## Compatibility suite files.
###########################################################
@@ -575,9 +611,15 @@
ifdef LOCAL_MULTILIB
multi_arch := true
endif
+
ifdef multi_arch
+arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+else
+ifeq ($(use_testcase_folder),true)
arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
endif
+endif
+
multi_arch :=
# The module itself.
@@ -671,6 +713,17 @@
endif
+ifeq ($(use_testcase_folder),true)
+ifneq ($(my_test_data_file_pairs),)
+$(foreach pair, $(my_test_data_file_pairs), \
+ $(eval parts := $(subst :,$(space),$(pair))) \
+ $(eval src_path := $(word 1,$(parts))) \
+ $(eval file := $(word 2,$(parts))) \
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
+ $(call filter-copy-pair,$(src_path),$(call append-path,$(dir),$(file)),$(my_installed_test_data))))))
+endif
+else
ifneq ($(my_test_data_file_pairs),)
$(foreach pair, $(my_test_data_file_pairs), \
$(eval parts := $(subst :,$(space),$(pair))) \
@@ -680,6 +733,9 @@
$(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
$(src_path):$(call append-path,$(dir),$(file))))))
endif
+endif
+
+
arch_dir :=
is_native :=
diff --git a/core/board_config.mk b/core/board_config.mk
index 528c133..ed741c3 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -19,6 +19,80 @@
# and sanity-checks the variable defined therein.
# ###############################################################
+_board_strip_readonly_list := \
+ BOARD_EGL_CFG \
+ BOARD_HAVE_BLUETOOTH \
+ BOARD_INSTALLER_CMDLINE \
+ BOARD_KERNEL_CMDLINE \
+ BOARD_KERNEL_BASE \
+ BOARD_USES_GENERIC_AUDIO \
+ BOARD_VENDOR_USE_AKMD \
+ BOARD_WPA_SUPPLICANT_DRIVER \
+ BOARD_WLAN_DEVICE \
+ TARGET_ARCH \
+ TARGET_ARCH_VARIANT \
+ TARGET_CPU_ABI \
+ TARGET_CPU_ABI2 \
+ TARGET_CPU_VARIANT \
+ TARGET_CPU_VARIANT_RUNTIME \
+ TARGET_2ND_ARCH \
+ TARGET_2ND_ARCH_VARIANT \
+ TARGET_2ND_CPU_ABI \
+ TARGET_2ND_CPU_ABI2 \
+ TARGET_2ND_CPU_VARIANT \
+ TARGET_2ND_CPU_VARIANT_RUNTIME \
+ TARGET_BOARD_PLATFORM \
+ TARGET_BOARD_PLATFORM_GPU \
+ TARGET_BOOTLOADER_BOARD_NAME \
+ TARGET_NO_BOOTLOADER \
+ TARGET_NO_KERNEL \
+ TARGET_NO_RECOVERY \
+ TARGET_NO_RADIOIMAGE \
+ TARGET_HARDWARE_3D \
+ WITH_DEXPREOPT \
+
+# File system variables
+_board_strip_readonly_list += \
+ BOARD_FLASH_BLOCK_SIZE \
+ BOARD_BOOTIMAGE_PARTITION_SIZE \
+ BOARD_RECOVERYIMAGE_PARTITION_SIZE \
+ BOARD_SYSTEMIMAGE_PARTITION_SIZE \
+ BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_USERDATAIMAGE_PARTITION_SIZE \
+ BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_CACHEIMAGE_PARTITION_SIZE \
+ BOARD_VENDORIMAGE_PARTITION_SIZE \
+ BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_PRODUCTIMAGE_PARTITION_SIZE \
+ BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE \
+ BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE \
+ BOARD_ODMIMAGE_PARTITION_SIZE \
+ BOARD_ODMIMAGE_FILE_SYSTEM_TYPE \
+
+# Logical partitions related variables.
+_dynamic_partitions_var_list += \
+ BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE \
+ BOARD_SUPER_PARTITION_SIZE \
+ BOARD_SUPER_PARTITION_GROUPS \
+
+_board_strip_readonly_list += $(_dynamic_partitions_var_list)
+
+_build_broken_var_list := \
+ BUILD_BROKEN_ANDROIDMK_EXPORTS \
+ BUILD_BROKEN_DUP_COPY_HEADERS \
+ BUILD_BROKEN_DUP_RULES \
+ BUILD_BROKEN_PHONY_TARGETS \
+ BUILD_BROKEN_ENG_DEBUG_TAGS \
+
+_board_true_false_vars := $(_build_broken_var_list)
+_board_strip_readonly_list += $(_build_broken_var_list)
+
# Conditional to building on linux, as dex2oat currently does not work on darwin.
ifeq ($(HOST_OS),linux)
WITH_DEXPREOPT := true
@@ -68,27 +142,29 @@
endif
board_config_mk :=
-# Clean up/verify variables defined by the board config file.
-TARGET_BOOTLOADER_BOARD_NAME := $(strip $(TARGET_BOOTLOADER_BOARD_NAME))
-TARGET_CPU_ABI := $(strip $(TARGET_CPU_ABI))
-TARGET_CPU_ABI2 := $(strip $(TARGET_CPU_ABI2))
-TARGET_CPU_VARIANT := $(strip $(TARGET_CPU_VARIANT))
-TARGET_CPU_VARIANT_RUNTIME := $(strip $(TARGET_CPU_VARIANT_RUNTIME))
-
-TARGET_2ND_CPU_ABI := $(strip $(TARGET_2ND_CPU_ABI))
-TARGET_2ND_CPU_ABI2 := $(strip $(TARGET_2ND_CPU_ABI2))
-TARGET_2ND_CPU_VARIANT := $(strip $(TARGET_2ND_CPU_VARIANT))
-TARGET_2ND_CPU_VARIANT_RUNTIME := $(strip $(TARGET_2ND_CPU_VARIANT_RUNTIME))
+# Clean up and verify BoardConfig variables
+$(foreach var,$(_board_strip_readonly_list),$(eval $(var) := $$(strip $$($(var)))))
+$(foreach var,$(_board_true_false_vars), \
+ $(if $(filter-out true false,$($(var))), \
+ $(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
# Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
-BOARD_KERNEL_BASE := $(strip $(BOARD_KERNEL_BASE))
-BOARD_KERNEL_PAGESIZE := $(strip $(BOARD_KERNEL_PAGESIZE))
+# The combo makefiles sanity-check and set defaults for various CPU configuration
+combo_target := TARGET_
+combo_2nd_arch_prefix :=
+include $(BUILD_SYSTEM)/combo/select.mk
-INTERNAL_KERNEL_CMDLINE := $(strip $(BOARD_KERNEL_CMDLINE))
+ifdef TARGET_2ND_ARCH
+ combo_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/combo/select.mk
+endif
+.KATI_READONLY := $(_board_strip_readonly_list)
+
+INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
ifeq ($(TARGET_CPU_ABI),)
$(error No TARGET_CPU_ABI defined by board config: $(board_config_mk))
endif
@@ -152,23 +228,6 @@
TARGET_CPU_ABI_LIST_32_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_32_BIT)))
TARGET_CPU_ABI_LIST_64_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_64_BIT)))
-###########################################
-# Handle BUILD_BROKEN_* settings
-vars := \
- BUILD_BROKEN_ANDROIDMK_EXPORTS \
- BUILD_BROKEN_DUP_COPY_HEADERS \
- BUILD_BROKEN_DUP_RULES \
- BUILD_BROKEN_PHONY_TARGETS \
- BUILD_BROKEN_ENG_DEBUG_TAGS
-
-$(foreach var,$(vars),$(eval $(var) := $$(strip $$($(var)))))
-
-$(foreach var,$(vars), \
- $(if $(filter-out true false,$($(var))), \
- $(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
-
-.KATI_READONLY := $(vars)
-
ifneq ($(BUILD_BROKEN_ANDROIDMK_EXPORTS),true)
$(KATI_obsolete_export It is a global setting. See $(CHANGES_URL)#export_keyword)
endif
diff --git a/core/combo/arch/x86/stoneyridge.mk b/core/combo/arch/x86/stoneyridge.mk
new file mode 100644
index 0000000..30405a1
--- /dev/null
+++ b/core/combo/arch/x86/stoneyridge.mk
@@ -0,0 +1,12 @@
+# Configuration for Linux on x86.
+# Generating binaries for Stoney Ridge processors.
+#
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AES_NI := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/combo/arch/x86_64/stoneyridge.mk b/core/combo/arch/x86_64/stoneyridge.mk
new file mode 100644
index 0000000..f7d9583
--- /dev/null
+++ b/core/combo/arch/x86_64/stoneyridge.mk
@@ -0,0 +1,12 @@
+# Configuration for Linux on x86_64.
+# Generating binaries for Stoney Ridge processors.
+#
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+ARCH_X86_HAVE_AES_NI := true
+ARCH_X86_HAVE_AVX := true
+ARCH_X86_HAVE_AVX2 := true
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_MOVBE := true
diff --git a/core/config.mk b/core/config.mk
index d4069d3..0bc460e 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -287,6 +287,7 @@
TARGET_PRODUCT_KERNEL_HEADERS := $(strip $(wildcard $(PRODUCT_VENDOR_KERNEL_HEADERS)))
TARGET_PRODUCT_KERNEL_HEADERS := $(patsubst %/,%,$(TARGET_PRODUCT_KERNEL_HEADERS))
$(call validate-kernel-headers,$(TARGET_PRODUCT_KERNEL_HEADERS))
+.KATI_READONLY := TARGET_DEVICE_KERNEL_HEADERS TARGET_BOARD_KERNEL_HEADERS TARGET_PRODUCT_KERNEL_HEADERS
# Commands to generate .toc file common to ELF .so files.
define _gen_toc_command_for_elf
@@ -300,44 +301,6 @@
$(hide) $(HOST_NM) -gP $(1) | cut -f1-2 -d" " | (grep -v U$$ >> $(2) || true)
endef
-combo_target := HOST_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
-
-# Load the 2nd host arch if it's needed.
-ifdef HOST_2ND_ARCH
-combo_target := HOST_
-combo_2nd_arch_prefix := $(HOST_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/combo/select.mk
-endif
-
-# Load the windows cross compiler under Linux
-ifdef HOST_CROSS_OS
-combo_target := HOST_CROSS_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
-
-ifdef HOST_CROSS_2ND_ARCH
-combo_target := HOST_CROSS_
-combo_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/combo/select.mk
-endif
-endif
-
-# on windows, the tools have .exe at the end, and we depend on the
-# host config stuff being done first
-
-combo_target := TARGET_
-combo_2nd_arch_prefix :=
-include $(BUILD_SYSTEM)/combo/select.mk
-
-# Load the 2nd target arch if it's needed.
-ifdef TARGET_2ND_ARCH
-combo_target := TARGET_
-combo_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-include $(BUILD_SYSTEM)/combo/select.mk
-endif
-
ifeq ($(CALLED_FROM_SETUP),true)
include $(BUILD_SYSTEM)/ccache.mk
include $(BUILD_SYSTEM)/goma.mk
@@ -803,6 +766,7 @@
else
DEFAULT_SYSTEM_DEV_CERTIFICATE := build/target/product/security/testkey
endif
+.KATI_READONLY := DEFAULT_SYSTEM_DEV_CERTIFICATE
BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
@@ -1210,4 +1174,12 @@
include $(BUILD_SYSTEM)/soong_config.mk
endif
+# If ENABLE_DEFAULT_TEST_LOCATION is true, move default install path from
+# $(my_prefix)OUT_DATA to $(my_prefix)OUT_TESTCASES
+ENABLE_DEFAULT_TEST_LOCATION := true
+-include external/linux-kselftest/android/kselftest_test_list.mk
+-include external/ltp/android/ltp_package_list.mk
+DEFAULT_DATA_OUT_MODULES := ltp $(ltp_packages) $(kselftest_modules)
+.KATI_READONLY := ENABLE_DEFAULT_TEST_LOCATION DEFAULT_DATA_OUT_MODULES
+
include $(BUILD_SYSTEM)/dumpvar.mk
diff --git a/core/definitions.mk b/core/definitions.mk
index c97f647..e880fa5 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2225,7 +2225,7 @@
@mkdir -p $(dir $@)
$(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
$(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
-$(hide) $(DX_COMMAND) \
+$(hide) $(DX_COMMAND) $(DEX_FLAGS) \
--output $(dir $@) \
$(addprefix --lib ,$(PRIVATE_D8_LIBS)) \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
@@ -2388,7 +2388,7 @@
define run-appcompat
$(hide) \
echo "appcompat.sh output:" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
- PACKAGING=$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING ANDROID_LOG_TAGS="*:e" art/tools/veridex/appcompat.sh --dex-file=$@ 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
+ PACKAGING=$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING ANDROID_LOG_TAGS="*:e" art/tools/veridex/appcompat.sh --dex-file=$@ --api-flags=$(INTERNAL_PLATFORM_HIDDENAPI_FLAGS) 2>&1 >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
endef
appcompat-files = \
art/tools/veridex/appcompat.sh \
@@ -2478,6 +2478,25 @@
$$(align-package)
endef
+# Create copy pair for compatibility suite
+# Filter out $(LOCAL_INSTALLED_MODULE) to prevent overriding target
+# $(1): source path
+# $(2): destination path
+# The format of copy pair is src:dst
+define compat-copy-pair
+$(if $(filter-out $(2), $(LOCAL_INSTALLED_MODULE)), $(1):$(2))
+endef
+
+# Create copy pair for $(1) $(2)
+# If $(2) is substring of $(3) do nothing.
+# $(1): source path
+# $(2): destination path
+# $(3): filter-out target
+# The format of copy pair is src:dst
+define filter-copy-pair
+$(if $(findstring $(2), $(3)),,$(1):$(2))
+endef
+
# Copies many files.
# $(1): The files to copy. Each entry is a ':' separated src:dst pair
# $(2): An optional directory to prepend to the destination
@@ -2697,7 +2716,8 @@
define transform-jar-to-dex-r8
@echo R8: $@
$(hide) rm -f $(PRIVATE_PROGUARD_DICTIONARY)
-$(hide) $(R8_COMPAT_PROGUARD) -injars '$<' \
+$(hide) $(R8_COMPAT_PROGUARD) $(DEX_FLAGS) \
+ -injars '$<' \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
--no-data-resources \
--force-proguard-compatibility --output $(subst classes.dex,,$@) \
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 3d02cdc..85ddbfa 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -117,6 +117,10 @@
my_dexpreopt_archs :=
my_dexpreopt_images :=
+my_dexpreopt_infix := boot
+ifeq (true, $(DEXPREOPT_USE_APEX_IMAGE))
+ my_dexpreopt_infix := apex
+endif
ifdef LOCAL_DEX_PREOPT
ifeq (,$(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
@@ -150,13 +154,13 @@
# #################################################
# Odex for the 1st arch
my_dexpreopt_archs += $(TARGET_ARCH)
- my_dexpreopt_images += $(DEXPREOPT_IMAGE_boot_$(TARGET_ARCH))
+ my_dexpreopt_images += $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_ARCH))
# Odex for the 2nd arch
ifdef TARGET_2ND_ARCH
ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
ifneq (first,$(my_module_multilib))
my_dexpreopt_archs += $(TARGET_2ND_ARCH)
- my_dexpreopt_images += $(DEXPREOPT_IMAGE_boot_$(TARGET_2ND_ARCH))
+ my_dexpreopt_images += $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_2ND_ARCH))
endif # my_module_multilib is not first.
endif # TARGET_TRANSLATE_2ND_ARCH not true
endif # TARGET_2ND_ARCH
@@ -166,13 +170,15 @@
# Save the module multilib since setup_one_odex modifies it.
my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
my_dexpreopt_archs += $(TARGET_$(my_2nd_arch_prefix)ARCH)
- my_dexpreopt_images += $(DEXPREOPT_IMAGE_boot_$(TARGET_$(my_2nd_arch_prefix)ARCH))
+ my_dexpreopt_images += \
+ $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_$(my_2nd_arch_prefix)ARCH))
ifdef TARGET_2ND_ARCH
ifeq ($(my_module_multilib),both)
# The non-preferred arch
my_2nd_arch_prefix := $(if $(LOCAL_2ND_ARCH_VAR_PREFIX),,$(TARGET_2ND_ARCH_VAR_PREFIX))
my_dexpreopt_archs += $(TARGET_$(my_2nd_arch_prefix)ARCH)
- my_dexpreopt_images += $(DEXPREOPT_IMAGE_boot_$(TARGET_$(my_2nd_arch_prefix)ARCH))
+ my_dexpreopt_images += \
+ $(DEXPREOPT_IMAGE_$(my_dexpreopt_infix)_$(TARGET_$(my_2nd_arch_prefix)ARCH))
endif # LOCAL_MULTILIB is both
endif # TARGET_2ND_ARCH
endif # LOCAL_MODULE_CLASS
diff --git a/core/envsetup.mk b/core/envsetup.mk
index d072c6e..1704daf 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -152,6 +152,45 @@
HOST_2ND_ARCH :=
endif
+HOST_2ND_ARCH_VAR_PREFIX := 2ND_
+HOST_2ND_ARCH_MODULE_SUFFIX := _32
+HOST_CROSS_2ND_ARCH_VAR_PREFIX := 2ND_
+HOST_CROSS_2ND_ARCH_MODULE_SUFFIX := _64
+TARGET_2ND_ARCH_VAR_PREFIX := 2ND_
+.KATI_READONLY := \
+ HOST_ARCH \
+ HOST_2ND_ARCH \
+ HOST_IS_64_BIT \
+ HOST_2ND_ARCH_VAR_PREFIX \
+ HOST_2ND_ARCH_MODULE_SUFFIX \
+ HOST_CROSS_2ND_ARCH_VAR_PREFIX \
+ HOST_CROSS_2ND_ARCH_MODULE_SUFFIX \
+ TARGET_2ND_ARCH_VAR_PREFIX \
+
+combo_target := HOST_
+combo_2nd_arch_prefix :=
+include $(BUILD_COMBOS)/select.mk
+
+ifdef HOST_2ND_ARCH
+ combo_2nd_arch_prefix := $(HOST_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/combo/select.mk
+endif
+
+# Load the windows cross compiler under Linux
+ifdef HOST_CROSS_OS
+ combo_target := HOST_CROSS_
+ combo_2nd_arch_prefix :=
+ include $(BUILD_SYSTEM)/combo/select.mk
+
+ ifdef HOST_CROSS_2ND_ARCH
+ combo_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+ include $(BUILD_SYSTEM)/combo/select.mk
+ endif
+endif
+
+# on windows, the tools have .exe at the end, and we depend on the
+# host config stuff being done first
+
BUILD_ARCH := $(HOST_ARCH)
BUILD_2ND_ARCH := $(HOST_2ND_ARCH)
@@ -232,11 +271,10 @@
endif
SDK_HOST_ARCH := x86
+TARGET_OS := linux
include $(BUILD_SYSTEM)/board_config.mk
-TARGET_OS := linux
-
# the target build type defaults to release
ifneq ($(TARGET_BUILD_TYPE),debug)
TARGET_BUILD_TYPE := release
@@ -342,8 +380,6 @@
.KATI_READONLY := HOST_OUT_TEST_CONFIG
# Out for HOST_2ND_ARCH
-HOST_2ND_ARCH_VAR_PREFIX := 2ND_
-HOST_2ND_ARCH_MODULE_SUFFIX := _32
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATES := $(HOST_OUT)/obj32
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES := $(HOST_OUT)/lib
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_EXECUTABLES := $(HOST_OUT_EXECUTABLES)
@@ -351,8 +387,6 @@
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_TESTCASES := $(HOST_OUT_TESTCASES)
.KATI_READONLY := \
- HOST_2ND_ARCH_VAR_PREFIX \
- HOST_2ND_ARCH_MODULE_SUFFIX \
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATES \
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES \
$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_EXECUTABLES \
@@ -366,15 +400,11 @@
.KATI_READONLY := HOST_LIBRARY_PATH
# Out for HOST_CROSS_2ND_ARCH
-HOST_CROSS_2ND_ARCH_VAR_PREFIX := 2ND_
-HOST_CROSS_2ND_ARCH_MODULE_SUFFIX := _64
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES := $(HOST_CROSS_OUT)/obj64
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib64
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT_EXECUTABLES)
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_NATIVE_TESTS := $(HOST_CROSS_OUT)/nativetest64
.KATI_READONLY := \
- HOST_CROSS_2ND_ARCH_VAR_PREFIX \
- HOST_CROSS_2ND_ARCH_MODULE_SUFFIX \
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES \
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_SHARED_LIBRARIES \
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES \
@@ -461,14 +491,13 @@
.KATI_READONLY := TARGET_OUT_SYSTEM_OTHER
# Out for TARGET_2ND_ARCH
-TARGET_2ND_ARCH_VAR_PREFIX := $(HOST_2ND_ARCH_VAR_PREFIX)
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
# With this you can reference the arm binary translation library with libfoo_arm in PRODUCT_PACKAGES.
TARGET_2ND_ARCH_MODULE_SUFFIX := _$(TARGET_2ND_ARCH)
else
TARGET_2ND_ARCH_MODULE_SUFFIX := $(HOST_2ND_ARCH_MODULE_SUFFIX)
endif
-.KATI_READONLY := TARGET_2ND_ARCH_VAR_PREFIX TARGET_2ND_ARCH_MODULE_SUFFIX
+.KATI_READONLY := TARGET_2ND_ARCH_MODULE_SUFFIX
ifneq ($(filter address,$(SANITIZE_TARGET)),)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)_asan
@@ -494,6 +523,11 @@
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS_PRIVILEGED \
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_TESTCASES
+MODULE_CLASS_APPS := app
+MODULE_CLASS_EXECUTABLES := bin
+MODULE_CLASS_JAVA_LIBRARIES := framework
+MODULE_CLASS_NATIVE_TESTS := nativetest
+MODULE_CLASS_METRIC_TESTS := benchmarktest
TARGET_OUT_DATA := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DATA)
TARGET_OUT_DATA_EXECUTABLES := $(TARGET_OUT_EXECUTABLES)
TARGET_OUT_DATA_SHARED_LIBRARIES := $(TARGET_OUT_SHARED_LIBRARIES)
@@ -513,6 +547,7 @@
TARGET_OUT_VENDOR_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest$(TARGET_VENDOR_TEST_SUFFIX)
TARGET_OUT_VENDOR_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest$(TARGET_VENDOR_TEST_SUFFIX)
endif
+MODULE_CLASS_FAKE := fake_packages
TARGET_OUT_DATA_FAKE := $(TARGET_OUT_DATA)/fake_packages
.KATI_READONLY := \
TARGET_OUT_DATA \
@@ -527,7 +562,13 @@
TARGET_OUT_DATA_METRIC_TESTS \
TARGET_OUT_VENDOR_NATIVE_TESTS \
TARGET_OUT_VENDOR_METRIC_TESTS \
- TARGET_OUT_DATA_FAKE
+ TARGET_OUT_DATA_FAKE \
+ MODULE_CLASS_APPS \
+ MODULE_CLASS_EXECUTABLES \
+ MODULE_CLASS_JAVA_LIBRARIES \
+ MODULE_CLASS_NATIVE_TESTS \
+ MODULE_CLASS_METRIC_TESTS \
+ MODULE_CLASS_FAKE
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_EXECUTABLES := $(TARGET_OUT_DATA_EXECUTABLES)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_SHARED_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES)
diff --git a/core/main.mk b/core/main.mk
index b63fd61..22cba4e 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -257,6 +257,15 @@
ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
endif
+# Sets the default value of ro.postinstall.fstab.prefix to /system.
+# Device board config should override the value to /product when needed by:
+#
+# PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product
+#
+# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to
+# mount system_other partition.
+ADDITIONAL_DEFAULT_PROPERTIES += ro.postinstall.fstab.prefix=/system
+
# -----------------------------------------------------------------
###
### In this section we set up the things that are different
@@ -568,6 +577,22 @@
endef
endif # TARGET_TRANSLATE_2ND_ARCH
+# TODO: we can probably check to see if these modules are actually host
+# modules
+define get-host-32-bit-modules
+$(sort $(foreach m,$(1),\
+ $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+ $(m)$(HOST_2ND_ARCH_MODULE_SUFFIX))))
+endef
+# Get a list of corresponding 32-bit module names, if one exists;
+# otherwise return the original module name
+define get-host-32-bit-modules-if-we-can
+$(sort $(foreach m,$(1),\
+ $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+ $(m)$(HOST_2ND_ARCH_MODULE_SUFFIX),\
+ $(m))))
+endef
+
# If a module is for a cross host os, the required modules must be for
# that OS too.
# If a module is built for 32-bit, the required modules must be 32-bit too;
@@ -1015,6 +1040,16 @@
$(call expand-required-modules,$(1),$(_erm_new_modules),$(_erm_all_overrides)))
endef
+# Same as expand-required-modules above, but does not handle module overrides, as
+# we don't intend to support them on the host.
+define expand-required-host-modules
+$(eval _erm_req := $(foreach m,$(2),$(ALL_MODULES.$(m).REQUIRED))) \
+$(eval _erm_new_modules := $(sort $(filter-out $($(1)),$(_erm_req)))) \
+$(eval $(1) += $(_erm_new_modules)) \
+$(if $(_erm_new_modules),\
+ $(call expand-required-host-modules,$(1),$(_erm_new_modules)))
+endef
+
# Transforms paths relative to PRODUCT_OUT to absolute paths.
# $(1): list of relative paths
# $(2): optional suffix to append to paths
@@ -1031,6 +1066,8 @@
define auto-included-modules
$(if $(BOARD_VNDK_VERSION),vndk_package) \
$(if $(DEVICE_MANIFEST_FILE),device_manifest.xml) \
+ $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \
+ $(if $(ODM_MANIFEST_SKUS),$(foreach sku, $(ODM_MANIFEST_SKUS),odm_manifest_$(sku).xml)) \
endef
@@ -1078,6 +1115,23 @@
$(foreach cf,$(PRODUCTS.$(_mk).PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
endef
+# Similar to product-installed-files above, but handles PRODUCT_HOST_PACKAGES instead
+# This does support the :32 / :64 syntax, but does not support module overrides.
+define host-installed-files
+ $(eval _hif_modules := $(PRODUCTS.$(strip $(1)).PRODUCT_HOST_PACKAGES)) \
+ $(eval ### Resolve the :32 :64 module name) \
+ $(eval _hif_modules_32 := $(patsubst %:32,%,$(filter %:32, $(_hif_modules)))) \
+ $(eval _hif_modules_64 := $(patsubst %:64,%,$(filter %:64, $(_hif_modules)))) \
+ $(eval _hif_modules_rest := $(filter-out %:32 %:64,$(_hif_modules))) \
+ $(eval _hif_modules := $(call get-host-32-bit-modules-if-we-can, $(_hif_modules_32))) \
+ $(eval _hif_modules += $(_hif_modules_64)) \
+ $(eval ### For the rest we add both) \
+ $(eval _hif_modules += $(call get-host-32-bit-modules, $(_hif_modules_rest))) \
+ $(eval _hif_modules += $(_hif_modules_rest)) \
+ $(call expand-required-host-modules,_hif_modules,$(_hif_modules)) \
+ $(filter $(HOST_OUT_ROOT)/%,$(call module-installed-files, $(_hif_modules)))
+endef
+
# Fails the build if the given list is non-empty, and prints it entries (stripping PRODUCT_OUT).
# $(1): list of files to print
# $(2): heading to print on failure
@@ -1105,10 +1159,42 @@
endif
ifdef FULL_BUILD
- product_FILES := $(call product-installed-files, $(INTERNAL_PRODUCT))
+ # Check to ensure that all modules in PRODUCT_HOST_PACKAGES exist
+ #
+ # Many host modules are Linux-only, so skip this check on Mac. If we ever have Mac-only modules,
+ # maybe it would make sense to have PRODUCT_HOST_PACKAGES_LINUX/_DARWIN?
+ ifneq ($(HOST_OS),darwin)
+ ifneq (true,$(ALLOW_MISSING_DEPENDENCIES))
+ _modules := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_HOST_PACKAGES)
+ _nonexistant_modules := $(foreach m,$(_modules),\
+ $(if $(filter FAKE,$(ALL_MODULES.$(m).CLASS))$(filter $(HOST_OUT_ROOT)/%,$(ALL_MODULES.$(m).INSTALLED)),,$(m)))
+ $(call maybe-print-list-and-error,$(_nonexistant_modules),\
+ $(INTERNAL_PRODUCT) includes non-existant modules in PRODUCT_HOST_PACKAGES)
+ endif
+ endif
+
+ product_host_FILES := $(call host-installed-files,$(INTERNAL_PRODUCT))
+ product_target_FILES := $(call product-installed-files, $(INTERNAL_PRODUCT))
# WARNING: The product_MODULES variable is depended on by external files.
product_MODULES := $(_pif_modules)
+ # Verify that PRODUCT_HOST_PACKAGES is complete
+ # This is a temporary requirement during migration
+ # Ignore libraries, since they shouldn't need to be in PRODUCT_PACKAGES for the most part anyway.
+ host_files_in_target_FILES := $(filter-out \
+ $(HOST_OUT_SHARED_LIBRARIES)/% \
+ $($(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)/%,\
+ $(filter $(HOST_OUT_ROOT)/%,$(product_target_FILES)))
+ ifneq (,$(filter-out $(product_host_FILES),$(host_files_in_target_FILES)))
+ packages := $(foreach f,$(filter-out $(product_host_FILES),$(host_files_in_target_FILES)), \
+ $(or $(INSTALLABLE_FILES.$(f).MODULE),$(f)))
+ $(warning Missing modules from PRODUCT_HOST_PACKAGES)
+ $(warning See $(CHANGES_URL)#PRODUCT_HOST_PACKAGES for more information)
+ $(foreach f,$(sort $(packages)),$(warning _ $(f)))
+ $(error stop)
+ endif
+ host_files_in_target_FILES :=
+
# Verify the artifact path requirements made by included products.
is_asan := $(if $(filter address,$(SANITIZE_TARGET)),true)
ifneq (true,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS)))
@@ -1151,7 +1237,7 @@
$(eval unused_whitelist := $(filter-out $(files),$(whitelist_patterns))) \
$(call maybe-print-list-and-error,$(unused_whitelist),$(makefile) includes redundant whitelist entries in its artifact path requirement.) \
$(eval ### Optionally verify that nothing else produces files inside this artifact path requirement.) \
- $(eval extra_files := $(filter-out $(files) $(HOST_OUT)/%,$(product_FILES))) \
+ $(eval extra_files := $(filter-out $(files) $(HOST_OUT)/%,$(product_target_FILES))) \
$(eval files_in_requirement := $(filter $(path_patterns),$(extra_files))) \
$(eval all_offending_files += $(files_in_requirement)) \
$(eval whitelist := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST)) \
@@ -1176,14 +1262,16 @@
# a subset of the module makefiles. Don't try to build any modules
# requested by the product, because we probably won't have rules
# to build them.
- product_FILES :=
+ product_target_FILES :=
+ product_host_FILES :=
endif
# TODO: Remove the 3 places in the tree that use ALL_DEFAULT_INSTALLED_MODULES
# and get rid of it from this list.
modules_to_install := $(sort \
$(ALL_DEFAULT_INSTALLED_MODULES) \
- $(product_FILES) \
+ $(product_target_FILES) \
+ $(product_host_FILES) \
$(call get-tagged-modules,$(tags_to_install)) \
$(CUSTOM_MODULES) \
)
@@ -1589,8 +1677,8 @@
.PHONY: dump-files
dump-files:
- $(info product_FILES for $(TARGET_DEVICE) ($(INTERNAL_PRODUCT)):)
- $(foreach p,$(sort $(product_FILES)),$(info : $(p)))
+ $(info product_target_FILES for $(TARGET_DEVICE) ($(INTERNAL_PRODUCT)):)
+ $(foreach p,$(sort $(product_target_FILES)),$(info : $(p)))
@echo Successfully dumped product file list
.PHONY: nothing
diff --git a/core/native_benchmark.mk b/core/native_benchmark.mk
index e73bcad..4750649 100644
--- a/core/native_benchmark.mk
+++ b/core/native_benchmark.mk
@@ -6,8 +6,10 @@
LOCAL_STATIC_LIBRARIES += libgoogle-benchmark
+ifndef ENABLE_DEFAULT_TEST_LOCATION
LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_METRIC_TESTS)/$(LOCAL_MODULE)
LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS)/$(LOCAL_MODULE)
+endif
ifndef LOCAL_MULTILIB
ifndef LOCAL_32_BIT_ONLY
diff --git a/core/native_test_config_template.xml b/core/native_test_config_template.xml
index a88d57c..f3d3231 100644
--- a/core/native_test_config_template.xml
+++ b/core/native_test_config_template.xml
@@ -17,6 +17,9 @@
<configuration description="Runs {MODULE}.">
<option name="test-suite-tag" value="apct" />
<option name="test-suite-tag" value="apct-native" />
+
+ {EXTRA_OPTIONS}
+
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="true" />
<option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 684ab9f..e9e89c3 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -50,7 +50,6 @@
user \
userdataimage \
userdebug \
- valgrind-test-art% \
vts \
win_sdk \
winsdk-tools
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 9043710..3be4635 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -768,10 +768,17 @@
ifdef LOCAL_COMPATIBILITY_SUITE
+ifndef ENABLE_DEFAULT_TEST_LOCATION
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
$(foreach s,$(my_split_suffixes),\
$(intermediates)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
+else
+$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(foreach s,$(my_split_suffixes),\
+ $(call compat-copy-pair,$(intermediates)/package_$(s).apk,$(dir)/$(LOCAL_MODULE)_$(s).apk)))))
+endif
$(call create-suite-dependencies)
diff --git a/core/product.mk b/core/product.mk
index efe029b..a367a6b 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -117,6 +117,7 @@
PRODUCT_AAPT_CONFIG \
PRODUCT_AAPT_PREF_CONFIG \
PRODUCT_AAPT_PREBUILT_DPI \
+ PRODUCT_HOST_PACKAGES \
PRODUCT_PACKAGES \
PRODUCT_PACKAGES_DEBUG \
PRODUCT_PACKAGES_DEBUG_ASAN \
@@ -366,80 +367,10 @@
$(strip $(call _resolve-short-product-name,$(1)))
endef
-
-_product_stash_var_list := $(_product_var_list) \
- PRODUCT_BOOTCLASSPATH \
- PRODUCT_SYSTEM_SERVER_CLASSPATH \
- TARGET_ARCH \
- TARGET_ARCH_VARIANT \
- TARGET_CPU_ABI \
- TARGET_CPU_ABI2 \
- TARGET_CPU_VARIANT \
- TARGET_CPU_VARIANT_RUNTIME \
- TARGET_2ND_ARCH \
- TARGET_2ND_ARCH_VARIANT \
- TARGET_2ND_CPU_ABI \
- TARGET_2ND_CPU_ABI2 \
- TARGET_2ND_CPU_VARIANT \
- TARGET_2ND_CPU_VARIANT_RUNTIME \
- TARGET_BOARD_PLATFORM \
- TARGET_BOARD_PLATFORM_GPU \
- TARGET_BOARD_KERNEL_HEADERS \
- TARGET_DEVICE_KERNEL_HEADERS \
- TARGET_PRODUCT_KERNEL_HEADERS \
- TARGET_BOOTLOADER_BOARD_NAME \
- TARGET_NO_BOOTLOADER \
- TARGET_NO_KERNEL \
- TARGET_NO_RECOVERY \
- TARGET_NO_RADIOIMAGE \
- TARGET_HARDWARE_3D \
-
-_product_stash_var_list += \
- BOARD_WPA_SUPPLICANT_DRIVER \
- BOARD_WLAN_DEVICE \
- BOARD_USES_GENERIC_AUDIO \
- BOARD_KERNEL_CMDLINE \
- BOARD_KERNEL_BASE \
- BOARD_HAVE_BLUETOOTH \
- BOARD_VENDOR_USE_AKMD \
- BOARD_EGL_CFG \
- BOARD_BOOTIMAGE_PARTITION_SIZE \
- BOARD_RECOVERYIMAGE_PARTITION_SIZE \
- BOARD_SYSTEMIMAGE_PARTITION_SIZE \
- BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE \
- BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
- BOARD_USERDATAIMAGE_PARTITION_SIZE \
- BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
- BOARD_CACHEIMAGE_PARTITION_SIZE \
- BOARD_FLASH_BLOCK_SIZE \
- BOARD_VENDORIMAGE_PARTITION_SIZE \
- BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
- BOARD_PRODUCTIMAGE_PARTITION_SIZE \
- BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
- BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE \
- BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE \
- BOARD_ODMIMAGE_PARTITION_SIZE \
- BOARD_ODMIMAGE_FILE_SYSTEM_TYPE \
- BOARD_INSTALLER_CMDLINE \
-
-
-_product_stash_var_list += \
- DEFAULT_SYSTEM_DEV_CERTIFICATE \
- WITH_DEXPREOPT \
- WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY
-
-# Logical partitions related variables.
-_dynamic_partitions_var_list += \
- BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE \
- BOARD_SUPER_PARTITION_SIZE \
- BOARD_SUPER_PARTITION_GROUPS \
-
-_product_stash_var_list += $(_dynamic_partitions_var_list)
-_product_strip_var_list := $(_dynamic_partitions_var_list)
+_product_stash_var_list := $(_product_var_list)
+# TODO: Move this to board_config.mk when no longer set in product makefiles
+_product_stash_var_list += WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY
+_product_strip_var_list :=
#
# Mark the variables in _product_stash_var_list as readonly
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 6214ac6..9692a99 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -94,6 +94,8 @@
ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
# Make sure to install the .odex and .vdex when you run "make <module_name>"
$(my_all_targets): $(installed_odex)
+ # Copy $(LOCAL_BUILT_MODULE) and its dependencies when installing boot.art
+ $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE): $(LOCAL_BUILT_MODULE)
endif
endif # is_boot_jar
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index b5c3a7c..1ed1195 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -29,6 +29,15 @@
$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_64 when building test $(LOCAL_MODULE))
endif
+use_testcase_folder := false
+ifdef ENABLE_DEFAULT_TEST_LOCATION
+ ifneq ($(LOCAL_MODULE),$(filter $(LOCAL_MODULE),$(DEFAULT_DATA_OUT_MODULES)))
+ use_testcase_folder := true
+ endif
+endif
+
+ifneq ($(use_testcase_folder),true)
ifndef LOCAL_MODULE_RELATIVE_PATH
LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
endif
+endif
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 629a9b2..82b4c6a 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -40,6 +40,8 @@
$(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).PICKUP_FILES)))\
$(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
$(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
+ $(eval _module_class_folder := $($(strip MODULE_CLASS_$(word 1, $(strip $(ALL_MODULES.$(m).CLASS)\
+ $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS))))))\
$(if $(_pickup_files)$(_built_files),,\
$(call my_missing_files,$(m)))\
$(eval my_pickup_files += $(_pickup_files))\
@@ -49,9 +51,15 @@
$(if $(filter $(TARGET_OUT_ROOT)/%,$(ins)),\
$(eval bui := $(word 1,$(bui_ins)))\
$(eval my_built_modules += $(bui))\
+ $(if $(filter $(_module_class_folder), nativetest benchmarktest),\
+ $(eval module_class_folder_stem := $(_module_class_folder)$(findstring 64, $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))),\
+ $(eval module_class_folder_stem := $(_module_class_folder)))\
$(eval my_copy_dest := $(patsubst data/%,DATA/%,\
- $(patsubst system/%,DATA/%,\
- $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+ $(patsubst testcases/%,DATA/$(module_class_folder_stem)/%,\
+ $(patsubst testcases/$(m)/$(TARGET_ARCH)/%,DATA/$(module_class_folder_stem)/$(m)/%,\
+ $(patsubst testcases/$(m)/$(TARGET_2ND_ARCH)/%,DATA/$(module_class_folder_stem)/$(m)/%,\
+ $(patsubst system/%,DATA/%,\
+ $(patsubst $(PRODUCT_OUT)/%,%,$(ins))))))))\
$(eval my_copy_pairs += $(bui):$(my_staging_dir)/$(my_copy_dest)))\
))
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 835d9fe..6923698 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -252,7 +252,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2019-02-05
+ PLATFORM_SECURITY_PATCH := 2019-03-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 7fe45eb..971a7b2 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -52,3 +52,59 @@
include $(BUILD_PREBUILT)
BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
endif
+
+# ODM manifest
+ifdef ODM_MANIFEST_FILES
+# ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest.
+include $(CLEAR_VARS)
+LOCAL_MODULE := odm_manifest.xml
+LOCAL_MODULE_STEM := manifest.xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_RELATIVE_PATH := vintf
+LOCAL_ODM_MODULE := true
+
+GEN := $(local-generated-sources-dir)/manifest.xml
+$(GEN): PRIVATE_SRC_FILES := $(ODM_MANIFEST_FILES)
+$(GEN): $(ODM_MANIFEST_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
+ # Set VINTF_IGNORE_TARGET_FCM_VERSION to true because it should only be in device manifest.
+ VINTF_IGNORE_TARGET_FCM_VERSION=true \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
+ -i $(call normalize-path-list,$(PRIVATE_SRC_FILES))
+
+LOCAL_PREBUILT_MODULE_FILE := $(GEN)
+include $(BUILD_PREBUILT)
+endif # ODM_MANIFEST_FILES
+
+# ODM_MANIFEST_SKUS: a list of SKUS where ODM_MANIFEST_<sku>_FILES are defined.
+ifdef ODM_MANIFEST_SKUS
+
+# Install /odm/etc/vintf/manifest_$(sku).xml
+# $(1): sku
+define _add_odm_sku_manifest
+my_fragment_files_var := ODM_MANIFEST_$$(call to-upper,$(1))_FILES
+ifndef $$(my_fragment_files_var)
+$$(error $(1) is in ODM_MANIFEST_SKUS but $$(my_fragment_files_var) is not defined)
+endif
+my_fragment_files := $$($$(my_fragment_files_var))
+include $$(CLEAR_VARS)
+LOCAL_MODULE := odm_manifest_$(1).xml
+LOCAL_MODULE_STEM := manifest_$(1).xml
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_RELATIVE_PATH := vintf
+LOCAL_ODM_MODULE := true
+GEN := $$(local-generated-sources-dir)/manifest_$(1).xml
+$$(GEN): PRIVATE_SRC_FILES := $$(my_fragment_files)
+$$(GEN): $$(my_fragment_files) $$(HOST_OUT_EXECUTABLES)/assemble_vintf
+ VINTF_IGNORE_TARGET_FCM_VERSION=true \
+ $$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $$@ \
+ -i $$(call normalize-path-list,$$(PRIVATE_SRC_FILES))
+LOCAL_PREBUILT_MODULE_FILE := $$(GEN)
+include $$(BUILD_PREBUILT)
+my_fragment_files_var :=
+my_fragment_files :=
+endef
+
+$(foreach sku, $(ODM_MANIFEST_SKUS), $(eval $(call _add_odm_sku_manifest,$(sku))))
+_add_odm_sku_manifest :=
+
+endif # ODM_MANIFEST_SKUS
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index a31058d..83cbd54 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -22,3 +22,7 @@
PRODUCT_PACKAGES += \
bios.bin \
vgabios-cirrus.bin \
+
+PRODUCT_HOST_PACKAGES += \
+ bios.bin \
+ vgabios-cirrus.bin \
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index a31058d..83cbd54 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -22,3 +22,7 @@
PRODUCT_PACKAGES += \
bios.bin \
vgabios-cirrus.bin \
+
+PRODUCT_HOST_PACKAGES += \
+ bios.bin \
+ vgabios-cirrus.bin \
diff --git a/target/board/gsi_system.prop b/target/board/gsi_system.prop
index 0c04a95..d768c83 100644
--- a/target/board/gsi_system.prop
+++ b/target/board/gsi_system.prop
@@ -4,5 +4,8 @@
# GSI always disables adb authentication
ro.adb.secure=0
+# TODO(b/120679683): disable RescueParty before all problem apps solved
+persist.sys.disable_rescue=true
+
# TODO(b/78105955): disable privapp_permissions checking before the bug solved
ro.control_privapp_permissions=disable
diff --git a/target/board/gsi_system_user.prop b/target/board/gsi_system_user.prop
index 1aa553b..becb783 100644
--- a/target/board/gsi_system_user.prop
+++ b/target/board/gsi_system_user.prop
@@ -1,5 +1,8 @@
# GSI always generate dex pre-opt in system image
ro.cp_system_other_odex=0
+# TODO(b/120679683): disable RescueParty before all problem apps solved
+persist.sys.disable_rescue=true
+
# TODO(b/78105955): disable privapp_permissions checking before the bug solved
ro.control_privapp_permissions=disable
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index f8ff22f..83a84f7 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -277,6 +277,42 @@
framework_manifest.xml \
system_compatibility_matrix.xml \
+# Host tools to install
+PRODUCT_HOST_PACKAGES += \
+ BugReport \
+ adb \
+ adbd \
+ atest \
+ bcc \
+ bit \
+ e2fsck \
+ fastboot \
+ flags_health_check \
+ icu-data_host_runtime_apex \
+ incident_report \
+ ld.mc \
+ lpdump \
+ mdnsd \
+ minigzip \
+ mke2fs \
+ resize2fs \
+ selinux_policy_system \
+ sgdisk \
+ shell_and_utilities_system \
+ sqlite3 \
+ tinyplay \
+ tune2fs \
+ tzdatacheck \
+ unwind_info \
+ unwind_reg_info \
+ unwind_symbols \
+ viewcompiler \
+ tzdata_host \
+ tzdata_host_runtime_apex \
+ tzlookup.xml_host_runtime_apex \
+ tz_version_host \
+ tz_version_host_runtime_apex \
+
ifeq ($(TARGET_CORE_JARS),)
$(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
endif
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 8574aff..bf01957 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -19,6 +19,7 @@
adbd.recovery \
android.hardware.health@2.0-impl-default.recovery \
cgroups.recovery.json \
+ charger.recovery \
init_second_stage.recovery \
ld.config.recovery.txt \
linker.recovery \
@@ -26,6 +27,16 @@
shell_and_utilities_recovery \
watchdogd.recovery \
+# These had been pulled in via init_second_stage.recovery, but may not be needed.
+PRODUCT_HOST_PACKAGES += \
+ e2fsdroid \
+ mke2fs \
+ sload_f2fs \
+ make_f2fs \
+
+PRODUCT_HOST_PACKAGES += \
+ icu-data_host_runtime_apex
+
# Base modules and settings for the vendor partition.
PRODUCT_PACKAGES += \
android.hardware.cas@1.0-service \
diff --git a/target/product/full_x86.mk b/target/product/full_x86.mk
index a76b07c..17ca398 100644
--- a/target/product/full_x86.mk
+++ b/target/product/full_x86.mk
@@ -36,6 +36,10 @@
bios.bin \
vgabios-cirrus.bin \
+PRODUCT_HOST_PACKAGES += \
+ bios.bin \
+ vgabios-cirrus.bin \
+
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index e35bf30..7042f6d 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -61,9 +61,8 @@
# Do not generate libartd.
PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false
-# Do not spin up a separate process for the network stack on go devices, use an in-process lib.
-PRODUCT_PACKAGES += NetworkStackLib
-PRODUCT_SYSTEM_SERVER_JARS += NetworkStackLib
+# Do not spin up a separate process for the network stack on go devices, use an in-process APK.
+PRODUCT_PACKAGES += InProcessNetworkStack
# Strip the local variable table and the local variable type table to reduce
# the size of the system image. This has no bearing on stack traces, but will
diff --git a/target/product/gsi_common.mk b/target/product/gsi_common.mk
index e87309b..fb0478d 100644
--- a/target/product/gsi_common.mk
+++ b/target/product/gsi_common.mk
@@ -24,12 +24,12 @@
$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
# Additional settings used in all AOSP builds
-PRODUCT_PROPERTY_OVERRIDES := \
+PRODUCT_PROPERTY_OVERRIDES += \
ro.config.ringtone=Ring_Synth_04.ogg \
ro.config.notification_sound=pixiedust.ogg
# The mainline checking whitelist, should be clean up
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST := \
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST += \
system/app/messaging/messaging.apk \
system/app/PhotoTable/PhotoTable.apk \
system/app/WAPPushManager/WAPPushManager.apk \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index 22d1626..0ddc3db 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -92,6 +92,9 @@
tinypcminfo \
update_engine_client \
+PRODUCT_HOST_PACKAGES += \
+ tinyplay
+
# Enable stats logging in LMKD
TARGET_LMKD_STATS_LOG := true
PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 65ee073..2ba7005 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -60,6 +60,8 @@
StatementService \
vndk_snapshot_package \
+PRODUCT_HOST_PACKAGES += \
+ fsck.f2fs \
PRODUCT_COPY_FILES += \
frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index c337e91..412d8cf 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -38,13 +38,9 @@
libicui18n \
libicuuc \
-# TODO(b/124498388): This currently needs to be in /system/lib for the debugger
-# transport to work.
-PRODUCT_PACKAGES += \
- libnpt
-
# Android Runtime APEX module.
PRODUCT_PACKAGES += com.android.runtime
+PRODUCT_HOST_PACKAGES += com.android.runtime
# Certificates.
PRODUCT_PACKAGES += \
diff --git a/tools/checkowners.py b/tools/checkowners.py
index 7f03968..d6853d8 100755
--- a/tools/checkowners.py
+++ b/tools/checkowners.py
@@ -52,12 +52,13 @@
noparent = 'set +noparent'
email = '([^@ ]+@[^ @]+|\\*)'
emails = '(%s( *, *%s)*)' % (email, email)
- directive = '(%s|%s)' % (emails, noparent)
+ file_directive = 'file: *([^ :]+ *: *)?[^ ]+'
+ directive = '(%s|%s|%s)' % (emails, noparent, file_directive)
glob = '[a-zA-Z0-9_\\.\\-\\*\\?]+'
globs = '(%s( *, *%s)*)' % (glob, glob)
perfile = 'per-file +' + globs + ' *= *' + directive
include = 'include +([^ :]+ *: *)?[^ ]+'
- pats = '(|%s|%s|%s|%s)$' % (noparent, email, perfile, include)
+ pats = '(|%s|%s|%s|%s|%s)$' % (noparent, email, perfile, include, file_directive)
patterns = re.compile(pats)
address_pattern = re.compile('([^@ ]+@[^ @]+)')
perfile_pattern = re.compile('per-file +.*=(.*)')
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
new file mode 100644
index 0000000..d14c94f
--- /dev/null
+++ b/tools/releasetools/apex_utils.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os.path
+import re
+import shlex
+import sys
+
+import common
+
+logger = logging.getLogger(__name__)
+
+
+class ApexInfoError(Exception):
+ """An Exception raised during Apex Information command."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+class ApexSigningError(Exception):
+ """An Exception raised during Apex Payload signing."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+
+
+def SignApexPayload(payload_file, payload_key_path, payload_key_name, algorithm,
+ salt, signing_args=None):
+ """Signs a given payload_file with the payload key."""
+ # Add the new footer. Old footer, if any, will be replaced by avbtool.
+ cmd = ['avbtool', 'add_hashtree_footer',
+ '--do_not_generate_fec',
+ '--algorithm', algorithm,
+ '--key', payload_key_path,
+ '--prop', 'apex.key:{}'.format(payload_key_name),
+ '--image', payload_file,
+ '--salt', salt]
+ if signing_args:
+ cmd.extend(shlex.split(signing_args))
+
+ try:
+ common.RunAndCheckOutput(cmd)
+ except common.ExternalError as e:
+ raise ApexSigningError, \
+ 'Failed to sign APEX payload {} with {}:\n{}'.format(
+ payload_file, payload_key_path, e), sys.exc_info()[2]
+
+ # Verify the signed payload image with specified public key.
+ logger.info('Verifying %s', payload_file)
+ VerifyApexPayload(payload_file, payload_key_path)
+
+
+def VerifyApexPayload(payload_file, payload_key):
+ """Verifies the APEX payload signature with the given key."""
+ cmd = ['avbtool', 'verify_image', '--image', payload_file,
+ '--key', payload_key]
+ try:
+ common.RunAndCheckOutput(cmd)
+ except common.ExternalError as e:
+ raise ApexSigningError, \
+ 'Failed to validate payload signing for {} with {}:\n{}'.format(
+ payload_file, payload_key, e), sys.exc_info()[2]
+
+
+def ParseApexPayloadInfo(payload_path):
+ """Parses the APEX payload info.
+
+ Args:
+ payload_path: The path to the payload image.
+
+ Raises:
+ ApexInfoError on parsing errors.
+
+ Returns:
+ A dict that contains payload property-value pairs. The dict should at least
+ contain Algorithm, Salt and apex.key.
+ """
+ if not os.path.exists(payload_path):
+ raise ApexInfoError('Failed to find image: {}'.format(payload_path))
+
+ cmd = ['avbtool', 'info_image', '--image', payload_path]
+ try:
+ output = common.RunAndCheckOutput(cmd)
+ except common.ExternalError as e:
+ raise ApexInfoError, \
+ 'Failed to get APEX payload info for {}:\n{}'.format(
+ payload_path, e), sys.exc_info()[2]
+
+ # Extract the Algorithm / Salt / Prop info from payload (i.e. an image signed
+ # with avbtool). For example,
+ # Algorithm: SHA256_RSA4096
+ PAYLOAD_INFO_PATTERN = (
+ r'^\s*(?P<key>Algorithm|Salt|Prop)\:\s*(?P<value>.*?)$')
+ payload_info_matcher = re.compile(PAYLOAD_INFO_PATTERN)
+
+ payload_info = {}
+ for line in output.split('\n'):
+ line_info = payload_info_matcher.match(line)
+ if not line_info:
+ continue
+
+ key, value = line_info.group('key'), line_info.group('value')
+
+ if key == 'Prop':
+ # Further extract the property key-value pair, from a 'Prop:' line. For
+ # example,
+ # Prop: apex.key -> 'com.android.runtime'
+ # Note that avbtool writes single or double quotes around values.
+ PROPERTY_DESCRIPTOR_PATTERN = r'^\s*(?P<key>.*?)\s->\s*(?P<value>.*?)$'
+
+ prop_matcher = re.compile(PROPERTY_DESCRIPTOR_PATTERN)
+ prop = prop_matcher.match(value)
+ if not prop:
+ raise ApexInfoError(
+ 'Failed to parse prop string {}'.format(value))
+
+ prop_key, prop_value = prop.group('key'), prop.group('value')
+ if prop_key == 'apex.key':
+ # avbtool dumps the prop value with repr(), which contains single /
+ # double quotes that we don't want.
+ payload_info[prop_key] = prop_value.strip('\"\'')
+
+ else:
+ payload_info[key] = value
+
+ # Sanity check.
+ for key in ('Algorithm', 'Salt', 'apex.key'):
+ if key not in payload_info:
+ raise ApexInfoError(
+ 'Failed to find {} prop in {}'.format(key, payload_path))
+
+ return payload_info
diff --git a/tools/releasetools/build_super_image.py b/tools/releasetools/build_super_image.py
index bb0e641..38ea3d6 100755
--- a/tools/releasetools/build_super_image.py
+++ b/tools/releasetools/build_super_image.py
@@ -24,9 +24,8 @@
- target files package. Same as above, but extracts the archive before
building super image.
- a dictionary file containing input arguments to build. Check
- `dump_dynamic_partitions_info' for details.
+ `dump-super-image-info' for details.
In addition:
- - "ab_update" needs to be true for A/B devices.
- If source images should be included in the output image (for super.img
and super split images), a list of "*_image" should be paths of each
source images.
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index b9f39a6..4b0d4c7 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -39,6 +39,7 @@
"""
+import logging
import os
import re
import subprocess
@@ -52,6 +53,8 @@
sys.exit(1)
+logger = logging.getLogger(__name__)
+
# Work around a bug in Python's zipfile module that prevents opening of zipfiles
# if any entry has an extra field of between 1 and 3 bytes (which is common with
# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
@@ -165,6 +168,7 @@
class APK(object):
+
def __init__(self, full_filename, filename):
self.filename = filename
self.certs = None
@@ -241,12 +245,12 @@
# must decompress them individually before we perform any analysis.
# This is the list of wildcards of files we extract from |filename|.
- apk_extensions = ['*.apk']
+ apk_extensions = ['*.apk', '*.apex']
self.certmap, compressed_extension = common.ReadApkCerts(
- zipfile.ZipFile(filename, "r"))
+ zipfile.ZipFile(filename))
if compressed_extension:
- apk_extensions.append("*.apk" + compressed_extension)
+ apk_extensions.append('*.apk' + compressed_extension)
d = common.UnzipTemp(filename, apk_extensions)
self.apks = {}
@@ -269,7 +273,7 @@
os.remove(os.path.join(dirpath, fn))
fn = uncompressed_fn
- if fn.endswith(".apk"):
+ if fn.endswith(('.apk', '.apex')):
fullname = os.path.join(dirpath, fn)
displayname = fullname[len(d)+1:]
apk = APK(fullname, displayname)
@@ -415,6 +419,8 @@
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
ALL_CERTS.FindLocalCerts()
Push("input target_files:")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index cfa81e1..e71667b 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -46,9 +46,16 @@
class Options(object):
def __init__(self):
+ base_out_path = os.getenv('OUT_DIR_COMMON_BASE')
+ if base_out_path is None:
+ base_search_path = "out"
+ else:
+ base_search_path = os.path.join(base_out_path,
+ os.path.basename(os.getcwd()))
+
platform_search_path = {
- "linux2": "out/host/linux-x86",
- "darwin": "out/host/darwin-x86",
+ "linux2": os.path.join(base_search_path, "host/linux-x86"),
+ "darwin": os.path.join(base_search_path, "host/darwin-x86"),
}
self.search_path = platform_search_path.get(sys.platform)
@@ -546,11 +553,7 @@
"""
if key is None:
key = info_dict["avb_" + partition + "_key_path"]
- avbtool = os.getenv('AVBTOOL') or info_dict["avb_avbtool"]
- pubkey_path = MakeTempFile(prefix="avb-", suffix=".pubkey")
- RunAndCheckOutput(
- [avbtool, "extract_public_key", "--key", key, "--output", pubkey_path])
-
+ pubkey_path = ExtractAvbPublicKey(key)
rollback_index_location = info_dict[
"avb_" + partition + "_rollback_index_location"]
return "{}:{}:{}".format(partition, rollback_index_location, pubkey_path)
@@ -2117,6 +2120,21 @@
return pubkey
+def ExtractAvbPublicKey(key):
+ """Extracts the AVB public key from the given public or private key.
+
+ Args:
+ key: The input key file, which should be PEM-encoded public or private key.
+
+ Returns:
+ The path to the extracted AVB public key file.
+ """
+ output = MakeTempFile(prefix='avb-', suffix='.avbpubkey')
+ RunAndCheckOutput(
+ ['avbtool', 'extract_public_key', "--key", key, "--output", output])
+ return output
+
+
def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
info_dict=None):
"""Generates the recovery-from-boot patch and writes the script to output.
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 2645829..5014516 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -25,10 +25,22 @@
The input target files package containing system bits. This is a zip
archive.
+ --system-item-list system-item-list-file
+ The optional path to a newline-separated config file that replaces the
+ contents of default_system_item_list if provided.
+
+ --system-misc-info-keys system-misc-info-keys-file
+ The optional path to a newline-separated config file that replaces the
+ contents of default_system_misc_info_keys if provided.
+
--other-target-files other-target-files-zip-archive
The input target files package containing other bits. This is a zip
archive.
+ --other-item-list other-item-list-file
+ The optional path to a newline-separated config file that replaces the
+ contents of default_other_item_list if provided.
+
--output-target-files output-target-files-package
The output merged target files package. Also a zip archive.
"""
@@ -48,16 +60,19 @@
OPTIONS = common.OPTIONS
OPTIONS.verbose = True
OPTIONS.system_target_files = None
+OPTIONS.system_item_list = None
+OPTIONS.system_misc_info_keys = None
OPTIONS.other_target_files = None
+OPTIONS.other_item_list = None
OPTIONS.output_target_files = None
OPTIONS.keep_tmp = False
-# system_extract_as_is_item_list is a list of items to extract from the partial
+# default_system_item_list is a list of items to extract from the partial
# system target files package as is, meaning these items will land in the
# output target files package exactly as they appear in the input partial
# system target files package.
-system_extract_as_is_item_list = [
+default_system_item_list = [
'META/apkcerts.txt',
'META/filesystem_config.txt',
'META/root_filesystem_config.txt',
@@ -78,10 +93,10 @@
'META/*',
]
-# system_misc_info_keys is a list of keys to obtain from the system instance of
+# default_system_misc_info_keys is a list of keys to obtain from the system instance of
# META/misc_info.txt. The remaining keys from the other instance.
-system_misc_info_keys = [
+default_system_misc_info_keys = [
'avb_system_hashtree_enable',
'avb_system_add_hashtree_footer_args',
'avb_system_key_path',
@@ -98,12 +113,12 @@
'system_size',
]
-# other_extract_as_is_item_list is a list of items to extract from the partial
+# default_other_item_list is a list of items to extract from the partial
# other target files package as is, meaning these items will land in the output
# target files package exactly as they appear in the input partial other target
# files package.
-other_extract_as_is_item_list = [
+default_other_item_list = [
'META/boot_filesystem_config.txt',
'META/otakeys.txt',
'META/releasetools.py',
@@ -119,7 +134,7 @@
'VENDOR/*',
]
-# other_extract_for_merge_item_list is a list of items to extract from the
+# other_extract_special_item_list is a list of items to extract from the
# partial other target files package that need some special processing, such as
# some sort of combination with items from the partial system target files
# package.
@@ -172,6 +187,18 @@
filtered_extract_item_list)
+def read_config_list(config_file_path):
+ """Reads a config file into a list of strings.
+
+ Expects the file to be newline-separated.
+
+ Args:
+ config_file_path: The path to the config file to open and read.
+ """
+ with open(config_file_path) as config_file:
+ return config_file.read().splitlines()
+
+
def process_ab_partitions_txt(
system_target_files_temp_dir,
other_target_files_temp_dir,
@@ -223,7 +250,8 @@
def process_misc_info_txt(
system_target_files_temp_dir,
other_target_files_temp_dir,
- output_target_files_temp_dir):
+ output_target_files_temp_dir,
+ system_misc_info_keys):
"""Perform special processing for META/misc_info.txt
This function merges the contents of the META/misc_info.txt files from the
@@ -242,6 +270,9 @@
output_target_files_temp_dir: The name of a directory that will be used
to create the output target files package after all the special cases
are processed.
+
+ system_misc_info_keys: A list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
"""
def read_helper(d):
@@ -258,8 +289,7 @@
read_helper(other_target_files_temp_dir))
# Replace certain values in merged_info_dict with values from
- # system_info_dict. TODO(b/124467065): This should be more flexible than
- # using the hard-coded system_misc_info_keys.
+ # system_info_dict.
for key in system_misc_info_keys:
merged_info_dict[key] = system_info_dict[key]
@@ -355,7 +385,8 @@
temp_dir,
system_target_files_temp_dir,
other_target_files_temp_dir,
- output_target_files_temp_dir):
+ output_target_files_temp_dir,
+ system_misc_info_keys):
"""Perform special-case processing for certain target files items.
Certain files in the output target files package require special-case
@@ -374,6 +405,9 @@
output_target_files_temp_dir: The name of a directory that will be used
to create the output target files package after all the special cases
are processed.
+
+ system_misc_info_keys: A list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
"""
process_ab_partitions_txt(
@@ -384,7 +418,8 @@
process_misc_info_txt(
system_target_files_temp_dir=system_target_files_temp_dir,
other_target_files_temp_dir=other_target_files_temp_dir,
- output_target_files_temp_dir=output_target_files_temp_dir)
+ output_target_files_temp_dir=output_target_files_temp_dir,
+ system_misc_info_keys=system_misc_info_keys)
process_file_contexts_bin(
temp_dir=temp_dir,
@@ -394,7 +429,10 @@
def merge_target_files(
temp_dir,
system_target_files,
+ system_item_list,
+ system_misc_info_keys,
other_target_files,
+ other_item_list,
output_target_files):
"""Merge two target files packages together.
@@ -410,13 +448,32 @@
system_target_files: The name of the zip archive containing the system
partial target files package.
+ system_item_list: The list of items to extract from the partial system
+ target files package as is, meaning these items will land in the output
+ target files package exactly as they appear in the input partial system
+ target files package.
+
+ system_misc_info_keys: The list of keys to obtain from the system instance
+ of META/misc_info.txt. The remaining keys from the other instance.
+
other_target_files: The name of the zip archive containing the other
partial target files package.
+ other_item_list: The list of items to extract from the partial other
+ target files package as is, meaning these items will land in the output
+ target files package exactly as they appear in the input partial other
+ target files package.
+
output_target_files: The name of the output zip archive target files
package created by merging system and other.
"""
+ logger.info(
+ 'starting: merge system %s and other %s into output %s',
+ system_target_files,
+ other_target_files,
+ output_target_files)
+
# Create directory names that we'll use when we extract files from system,
# and other, and for zipping the final output.
@@ -431,7 +488,7 @@
extract_items(
target_files=system_target_files,
target_files_temp_dir=output_target_files_temp_dir,
- extract_item_list=system_extract_as_is_item_list)
+ extract_item_list=system_item_list)
# Extract "as is" items from the input other partial target files package. We
# extract them directly into the output temporary directory since the items
@@ -440,7 +497,7 @@
extract_items(
target_files=other_target_files,
target_files_temp_dir=output_target_files_temp_dir,
- extract_item_list=other_extract_as_is_item_list)
+ extract_item_list=other_item_list)
# Extract "special" items from the input system partial target files package.
# We extract these items to different directory since they require special
@@ -469,7 +526,8 @@
temp_dir=temp_dir,
system_target_files_temp_dir=system_target_files_temp_dir,
other_target_files_temp_dir=other_target_files_temp_dir,
- output_target_files_temp_dir=output_target_files_temp_dir)
+ output_target_files_temp_dir=output_target_files_temp_dir,
+ system_misc_info_keys=system_misc_info_keys)
# Regenerate IMAGES in the temporary directory.
@@ -520,25 +578,15 @@
common.RunAndWait(command, verbose=True)
-def merge_target_files_with_temp_dir(
- system_target_files,
- other_target_files,
- output_target_files,
- keep_tmp):
+def call_func_with_temp_dir(func, keep_tmp):
"""Manage the creation and cleanup of the temporary directory.
- This function wraps merge_target_files after first creating a temporary
+ This function calls the given function after first creating a temporary
directory. It also cleans up the temporary directory.
Args:
- system_target_files: The name of the zip archive containing the system
- partial target files package.
-
- other_target_files: The name of the zip archive containing the other
- partial target files package.
-
- output_target_files: The name of the output zip archive target files
- package created by merging system and other.
+ func: The function to call. Should accept one parameter, the path to
+ the temporary directory.
keep_tmp: Keep the temporary directory after processing is complete.
"""
@@ -547,20 +595,10 @@
# we use when we extract items from the input target files packages, and also
# a scratch directory that we use for temporary files.
- logger.info(
- 'starting: merge system %s and other %s into output %s',
- system_target_files,
- other_target_files,
- output_target_files)
-
temp_dir = common.MakeTempDir(prefix='merge_target_files_')
try:
- merge_target_files(
- temp_dir=temp_dir,
- system_target_files=system_target_files,
- other_target_files=other_target_files,
- output_target_files=output_target_files)
+ func(temp_dir)
except:
raise
finally:
@@ -573,7 +611,7 @@
def main():
"""The main function.
- Process command line arguments, then call merge_target_files_with_temp_dir to
+ Process command line arguments, then call merge_target_files to
perform the heavy lifting.
"""
@@ -582,8 +620,14 @@
def option_handler(o, a):
if o == '--system-target-files':
OPTIONS.system_target_files = a
+ elif o == '--system-item-list':
+ OPTIONS.system_item_list = a
+ elif o == '--system-misc-info-keys':
+ OPTIONS.system_misc_info_keys = a
elif o == '--other-target-files':
OPTIONS.other_target_files = a
+ elif o == '--other-item-list':
+ OPTIONS.other_item_list = a
elif o == '--output-target-files':
OPTIONS.output_target_files = a
elif o == '--keep_tmp':
@@ -596,7 +640,10 @@
sys.argv[1:], __doc__,
extra_long_opts=[
'system-target-files=',
+ 'system-item-list=',
+ 'system-misc-info-keys=',
'other-target-files=',
+ 'other-item-list=',
'output-target-files=',
"keep_tmp",
],
@@ -609,11 +656,31 @@
common.Usage(__doc__)
sys.exit(1)
- merge_target_files_with_temp_dir(
- system_target_files=OPTIONS.system_target_files,
- other_target_files=OPTIONS.other_target_files,
- output_target_files=OPTIONS.output_target_files,
- keep_tmp=OPTIONS.keep_tmp)
+ if OPTIONS.system_item_list:
+ system_item_list = read_config_list(OPTIONS.system_item_list)
+ else:
+ system_item_list = default_system_item_list
+
+ if OPTIONS.system_misc_info_keys:
+ system_misc_info_keys = read_config_list(OPTIONS.system_misc_info_keys)
+ else:
+ system_misc_info_keys = default_system_misc_info_keys
+
+ if OPTIONS.other_item_list:
+ other_item_list = read_config_list(OPTIONS.other_item_list)
+ else:
+ other_item_list = default_other_item_list
+
+ call_func_with_temp_dir(
+ lambda temp_dir: merge_target_files(
+ temp_dir=temp_dir,
+ system_target_files=OPTIONS.system_target_files,
+ system_item_list=system_item_list,
+ system_misc_info_keys=system_misc_info_keys,
+ other_target_files=OPTIONS.other_target_files,
+ other_item_list=other_item_list,
+ output_target_files=OPTIONS.output_target_files),
+ OPTIONS.keep_tmp)
if __name__ == '__main__':
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index fa9e2e9..fe40936 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -235,8 +235,9 @@
METADATA_NAME = 'META-INF/com/android/metadata'
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
+AB_PARTITIONS = 'META/ab_partitions.txt'
UNZIP_PATTERN = ['IMAGES/*', 'META/*']
-SUPER_SPLIT_PATTERN = ['OTA/super_*.img']
+RETROFIT_DAP_UNZIP_PATTERN = ['OTA/super_*.img', AB_PARTITIONS]
class BuildInfo(object):
@@ -1060,6 +1061,9 @@
if OPTIONS.wipe_user_data:
metadata['ota-wipe'] = 'yes'
+ if OPTIONS.retrofit_dynamic_partitions:
+ metadata['ota-retrofit-dynamic-partitions'] = 'yes'
+
is_incremental = source_info is not None
if is_incremental:
metadata['pre-build'] = source_info.fingerprint
@@ -1852,7 +1856,8 @@
def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
- super_block_devices):
+ super_block_devices,
+ dynamic_partition_list):
"""Returns a target-files.zip for retrofitting dynamic partitions.
This allows brillo_update_payload to generate an OTA based on the exact
@@ -1861,6 +1866,7 @@
Args:
input_file: The input target-files.zip filename.
super_block_devices: The list of super block devices
+ dynamic_partition_list: The list of dynamic partitions
Returns:
The filename of target-files.zip with *.img replaced with super_*.img for
@@ -1877,8 +1883,34 @@
with zipfile.ZipFile(input_file, 'r') as input_zip:
namelist = input_zip.namelist()
+ input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
+
+ # Remove partitions from META/ab_partitions.txt that is in
+ # dynamic_partition_list but not in super_block_devices so that
+ # brillo_update_payload won't generate update for those logical partitions.
+ ab_partitions_file = os.path.join(input_tmp, *AB_PARTITIONS.split('/'))
+ with open(ab_partitions_file) as f:
+ ab_partitions_lines = f.readlines()
+ ab_partitions = [line.strip() for line in ab_partitions_lines]
+ # Assert that all super_block_devices are in ab_partitions
+ super_device_not_updated = [partition for partition in super_block_devices
+ if partition not in ab_partitions]
+ assert not super_device_not_updated, \
+ "{} is in super_block_devices but not in {}".format(
+ super_device_not_updated, AB_PARTITIONS)
+ # ab_partitions -= (dynamic_partition_list - super_block_devices)
+ new_ab_partitions = common.MakeTempFile(prefix="ab_partitions", suffix=".txt")
+ with open(new_ab_partitions, 'w') as f:
+ for partition in ab_partitions:
+ if (partition in dynamic_partition_list and
+ partition not in super_block_devices):
+ logger.info("Dropping %s from ab_partitions.txt", partition)
+ continue
+ f.write(partition + "\n")
+ to_delete = [AB_PARTITIONS]
+
# Always skip postinstall for a retrofit update.
- to_delete = [POSTINSTALL_CONFIG]
+ to_delete += [POSTINSTALL_CONFIG]
# Delete dynamic_partitions_info.txt so that brillo_update_payload thinks this
# is a regular update on devices without dynamic partitions support.
@@ -1890,7 +1922,6 @@
common.ZipDelete(target_file, to_delete)
- input_tmp = common.UnzipTemp(input_file, SUPER_SPLIT_PATTERN)
target_zip = zipfile.ZipFile(target_file, 'a', allowZip64=True)
# Write super_{foo}.img as {foo}.img.
@@ -1900,6 +1931,9 @@
unzipped_file = os.path.join(input_tmp, *src.split('/'))
common.ZipWrite(target_zip, unzipped_file, arcname=dst)
+ # Write new ab_partitions.txt file
+ common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
+
common.ZipClose(target_zip)
return target_file
@@ -1928,7 +1962,8 @@
if OPTIONS.retrofit_dynamic_partitions:
target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
- target_file, target_info.get("super_block_devices").strip().split())
+ target_file, target_info.get("super_block_devices").strip().split(),
+ target_info.get("dynamic_partition_list").strip().split())
elif OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index a07f67f..f1f032d 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -21,11 +21,17 @@
Usage: sign_target_files_apks [flags] input_target_files output_target_files
-e (--extra_apks) <name,name,...=key>
- Add extra APK name/key pairs as though they appeared in
- apkcerts.txt (so mappings specified by -k and -d are applied).
- Keys specified in -e override any value for that app contained
- in the apkcerts.txt file. Option may be repeated to give
- multiple extra packages.
+ Add extra APK/APEX name/key pairs as though they appeared in apkcerts.txt
+ or apexkeys.txt (so mappings specified by -k and -d are applied). Keys
+ specified in -e override any value for that app contained in the
+ apkcerts.txt file, or the container key for an APEX. Option may be
+ repeated to give multiple extra packages.
+
+ --extra_apex_payload_key <name=key>
+ Add a mapping for APEX package name to payload signing key, which will
+ override the default payload signing key in apexkeys.txt. Note that the
+ container key should be overridden via the `--extra_apks` flag above.
+ Option may be repeated for multiple APEXes.
--skip_apks_with_path_prefix <prefix>
Skip signing an APK if it has the matching prefix in its path. The prefix
@@ -90,7 +96,7 @@
Use the specified algorithm (e.g. SHA256_RSA4096) and the key to AVB-sign
the specified image. Otherwise it uses the existing values in info dict.
- --avb_{boot,system,vendor,dtbo,vbmeta}_extra_args <args>
+ --avb_{apex,boot,system,vendor,dtbo,vbmeta}_extra_args <args>
Specify any additional args that are needed to AVB-sign the image
(e.g. "--signing_helper /path/to/helper"). The args will be appended to
the existing ones in info dict.
@@ -102,6 +108,8 @@
import copy
import errno
import gzip
+import itertools
+import logging
import os
import re
import shutil
@@ -113,6 +121,7 @@
from xml.etree import ElementTree
import add_img_to_target_files
+import apex_utils
import common
@@ -121,9 +130,12 @@
sys.exit(1)
+logger = logging.getLogger(__name__)
+
OPTIONS = common.OPTIONS
OPTIONS.extra_apks = {}
+OPTIONS.extra_apex_payload_keys = {}
OPTIONS.skip_apks_with_path_prefix = set()
OPTIONS.key_map = {}
OPTIONS.rebuild_recovery = False
@@ -151,6 +163,41 @@
return certmap
+def GetApexKeys(keys_info, key_map):
+ """Gets APEX payload and container signing keys by applying the mapping rules.
+
+ We currently don't allow PRESIGNED payload / container keys.
+
+ Args:
+ keys_info: A dict that maps from APEX filenames to a tuple of (payload_key,
+ container_key).
+ key_map: A dict that overrides the keys, specified via command-line input.
+
+ Returns:
+ A dict that contains the updated APEX key mapping, which should be used for
+ the current signing.
+ """
+ # Apply all the --extra_apex_payload_key options to override the payload
+ # signing keys in the given keys_info.
+ for apex, key in OPTIONS.extra_apex_payload_keys.items():
+ assert key, 'Presigned APEX payload for {} is not allowed'.format(apex)
+ keys_info[apex] = (key, keys_info[apex][1])
+
+ # Apply the key remapping to container keys.
+ for apex, (payload_key, container_key) in keys_info.items():
+ keys_info[apex] = (payload_key, key_map.get(container_key, container_key))
+
+ # Apply all the --extra_apks options to override the container keys.
+ for apex, key in OPTIONS.extra_apks.items():
+ # Skip non-APEX containers.
+ if apex not in keys_info:
+ continue
+ assert key, 'Presigned APEX container for {} is not allowed'.format(apex)
+ keys_info[apex] = (keys_info[apex][0], key_map.get(key, key))
+
+ return keys_info
+
+
def GetApkFileInfo(filename, compressed_extension, skipped_prefixes):
"""Returns the APK info based on the given filename.
@@ -180,11 +227,8 @@
# skipped_prefixes should be one of set/list/tuple types. Other types such as
# str shouldn't be accepted.
- assert (isinstance(skipped_prefixes, tuple) or
- isinstance(skipped_prefixes, set) or
- isinstance(skipped_prefixes, list)), \
- "Invalid skipped_prefixes input type: {}".format(
- type(skipped_prefixes))
+ assert isinstance(skipped_prefixes, (set, list, tuple)), \
+ "Invalid skipped_prefixes input type: {}".format(type(skipped_prefixes))
compressed_apk_extension = (
".apk" + compressed_extension if compressed_extension else None)
@@ -200,34 +244,45 @@
return (True, is_compressed, should_be_skipped)
-def CheckAllApksSigned(input_tf_zip, apk_key_map, compressed_extension):
- """Checks that all the APKs have keys specified, otherwise errors out.
+def CheckApkAndApexKeysAvailable(input_tf_zip, known_keys,
+ compressed_extension):
+ """Checks that all the APKs and APEXes have keys specified.
Args:
input_tf_zip: An open target_files zip file.
- apk_key_map: A dict of known signing keys key'd by APK names.
+ known_keys: A set of APKs and APEXes that have known signing keys.
compressed_extension: The extension string of compressed APKs, such as
- ".gz", or None if there's no compressed APKs.
+ '.gz', or None if there's no compressed APKs.
Raises:
- AssertionError: On finding unknown APKs.
+ AssertionError: On finding unknown APKs and APEXes.
"""
- unknown_apks = []
+ unknown_files = []
for info in input_tf_zip.infolist():
+ # Handle APEXes first, e.g. SYSTEM/apex/com.android.tzdata.apex.
+ if (info.filename.startswith('SYSTEM/apex') and
+ info.filename.endswith('.apex')):
+ name = os.path.basename(info.filename)
+ if name not in known_keys:
+ unknown_files.append(name)
+ continue
+
+ # And APKs.
(is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
info.filename, compressed_extension, OPTIONS.skip_apks_with_path_prefix)
if not is_apk or should_be_skipped:
continue
+
name = os.path.basename(info.filename)
if is_compressed:
name = name[:-len(compressed_extension)]
- if name not in apk_key_map:
- unknown_apks.append(name)
+ if name not in known_keys:
+ unknown_files.append(name)
- assert not unknown_apks, \
+ assert not unknown_files, \
("No key specified for:\n {}\n"
"Use '-e <apkname>=' to specify a key (which may be an empty string to "
- "not sign this apk).".format("\n ".join(unknown_apks)))
+ "not sign this apk).".format("\n ".join(unknown_files)))
def SignApk(data, keyname, pw, platform_api_level, codename_to_api_level_map,
@@ -293,9 +348,69 @@
return data
+def SignApex(apex_data, payload_key, container_key, container_pw,
+ codename_to_api_level_map, signing_args=None):
+ """Signs the current APEX with the given payload/container keys.
+
+ Args:
+ apex_data: Raw APEX data.
+ payload_key: The path to payload signing key (w/o extension).
+ container_key: The path to container signing key (w/o extension).
+ container_pw: The matching password of the container_key, or None.
+ codename_to_api_level_map: A dict that maps from codename to API level.
+ signing_args: Additional args to be passed to the payload signer.
+
+ Returns:
+ (signed_apex, payload_key_name): signed_apex is the path to the signed APEX
+ file; payload_key_name is a str of the payload signing key name (e.g.
+ com.android.tzdata).
+ """
+ apex_file = common.MakeTempFile(prefix='apex-', suffix='.apex')
+ with open(apex_file, 'wb') as apex_fp:
+ apex_fp.write(apex_data)
+
+ APEX_PAYLOAD_IMAGE = 'apex_payload.img'
+
+ # Signing an APEX is a two step process.
+ # 1. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given payload_key.
+ payload_dir = common.MakeTempDir(prefix='apex-payload-')
+ with zipfile.ZipFile(apex_file) as apex_fd:
+ payload_file = apex_fd.extract(APEX_PAYLOAD_IMAGE, payload_dir)
+
+ payload_info = apex_utils.ParseApexPayloadInfo(payload_file)
+ apex_utils.SignApexPayload(
+ payload_file,
+ payload_key,
+ payload_info['apex.key'],
+ payload_info['Algorithm'],
+ payload_info['Salt'],
+ signing_args)
+
+ common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
+ apex_zip = zipfile.ZipFile(apex_file, 'a')
+ common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
+ common.ZipClose(apex_zip)
+
+ # 2. Sign the overall APEX container with container_key.
+ signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+ common.SignFile(
+ apex_file,
+ signed_apex,
+ container_key,
+ container_pw,
+ codename_to_api_level_map=codename_to_api_level_map)
+
+ signed_and_aligned_apex = common.MakeTempFile(
+ prefix='apex-container-', suffix='.apex')
+ common.RunAndCheckOutput(
+ ['zipalign', '-f', '4096', signed_apex, signed_and_aligned_apex])
+
+ return (signed_and_aligned_apex, payload_info['apex.key'])
+
+
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
- apk_key_map, key_passwords, platform_api_level,
- codename_to_api_level_map,
+ apk_keys, apex_keys, key_passwords,
+ platform_api_level, codename_to_api_level_map,
compressed_extension):
# maxsize measures the maximum filename length, including the ones to be
# skipped.
@@ -304,6 +419,10 @@
if GetApkFileInfo(i.filename, compressed_extension, [])[0]])
system_root_image = misc_info.get("system_root_image") == "true"
+ # A dict of APEX payload public keys that should be updated, i.e. the files
+ # under '/system/etc/security/apex/'.
+ updated_apex_payload_keys = {}
+
for info in input_tf_zip.infolist():
filename = info.filename
if filename.startswith("IMAGES/"):
@@ -331,7 +450,7 @@
if is_compressed:
name = name[:-len(compressed_extension)]
- key = apk_key_map[name]
+ key = apk_keys[name]
if key not in common.SPECIAL_CERT_STRINGS:
print(" signing: %-*s (%s)" % (maxsize, name, key))
signed_data = SignApk(data, key, key_passwords[key], platform_api_level,
@@ -344,6 +463,30 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
+ # Sign bundled APEX files.
+ elif filename.startswith("SYSTEM/apex") and filename.endswith(".apex"):
+ name = os.path.basename(filename)
+ payload_key, container_key = apex_keys[name]
+
+ print(" signing: %-*s container (%s)" % (maxsize, name, container_key))
+ print(" : %-*s payload (%s)" % (maxsize, name, payload_key))
+
+ (signed_apex, payload_key_name) = SignApex(
+ data,
+ payload_key,
+ container_key,
+ key_passwords[container_key],
+ codename_to_api_level_map,
+ OPTIONS.avb_extra_args.get('apex'))
+ common.ZipWrite(output_tf_zip, signed_apex, filename)
+
+ updated_apex_payload_keys[payload_key_name] = payload_key
+
+ # AVB public keys for the installed APEXes, which will be updated later.
+ elif (os.path.dirname(filename) == 'SYSTEM/etc/security/apex' and
+ filename != 'SYSTEM/etc/security/apex/'):
+ continue
+
# System properties.
elif filename in ("SYSTEM/build.prop",
"VENDOR/build.prop",
@@ -406,6 +549,30 @@
else:
common.ZipWriteStr(output_tf_zip, out_info, data)
+ # Update APEX payload public keys.
+ for info in input_tf_zip.infolist():
+ filename = info.filename
+ if (os.path.dirname(filename) != 'SYSTEM/etc/security/apex' or
+ filename == 'SYSTEM/etc/security/apex/'):
+ continue
+
+ name = os.path.basename(filename)
+ assert name in updated_apex_payload_keys, \
+ 'Unsigned APEX payload key: {}'.format(filename)
+
+ key_path = updated_apex_payload_keys[name]
+ if not os.path.exists(key_path) and not key_path.endswith('.pem'):
+ key_path = '{}.pem'.format(key_path)
+ assert os.path.exists(key_path), \
+ 'Failed to find public key file {} for APEX {}'.format(
+ updated_apex_payload_keys[name], name)
+
+ print('Replacing APEX payload public key for {} with {}'.format(
+ name, key_path))
+
+ public_key = common.ExtractAvbPublicKey(key_path)
+ common.ZipWrite(output_tf_zip, public_key, arcname=filename)
+
if OPTIONS.replace_ota_keys:
ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
@@ -816,11 +983,72 @@
result = dict()
for codename in codenames:
codename = codename.strip()
- if len(codename) > 0:
+ if codename:
result[codename] = api_level
return result
+def ReadApexKeysInfo(tf_zip):
+ """Parses the APEX keys info from a given target-files zip.
+
+ Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns a
+ dict that contains the mapping from APEX names (e.g. com.android.tzdata) to a
+ tuple of (payload_key, container_key).
+
+ Args:
+ tf_zip: The input target_files ZipFile (already open).
+
+ Returns:
+ (payload_key, container_key): payload_key contains the path to the payload
+ signing key; container_key contains the path to the container signing
+ key.
+ """
+ keys = {}
+ for line in tf_zip.read("META/apexkeys.txt").split("\n"):
+ line = line.strip()
+ if not line:
+ continue
+ matches = re.match(
+ r'^name="(?P<NAME>.*)"\s+'
+ r'public_key="(?P<PAYLOAD_PUBLIC_KEY>.*)"\s+'
+ r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
+ r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
+ r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*)"$',
+ line)
+ if not matches:
+ continue
+
+ name = matches.group('NAME')
+ payload_public_key = matches.group("PAYLOAD_PUBLIC_KEY")
+ payload_private_key = matches.group("PAYLOAD_PRIVATE_KEY")
+
+ def CompareKeys(pubkey, pubkey_suffix, privkey, privkey_suffix):
+ pubkey_suffix_len = len(pubkey_suffix)
+ privkey_suffix_len = len(privkey_suffix)
+ return (pubkey.endswith(pubkey_suffix) and
+ privkey.endswith(privkey_suffix) and
+ pubkey[:-pubkey_suffix_len] == privkey[:-privkey_suffix_len])
+
+ PAYLOAD_PUBLIC_KEY_SUFFIX = '.avbpubkey'
+ PAYLOAD_PRIVATE_KEY_SUFFIX = '.pem'
+ if not CompareKeys(
+ payload_public_key, PAYLOAD_PUBLIC_KEY_SUFFIX,
+ payload_private_key, PAYLOAD_PRIVATE_KEY_SUFFIX):
+ raise ValueError("Failed to parse payload keys: \n{}".format(line))
+
+ container_cert = matches.group("CONTAINER_CERT")
+ container_private_key = matches.group("CONTAINER_PRIVATE_KEY")
+ if not CompareKeys(
+ container_cert, OPTIONS.public_key_suffix,
+ container_private_key, OPTIONS.private_key_suffix):
+ raise ValueError("Failed to parse container keys: \n{}".format(line))
+
+ keys[name] = (payload_private_key,
+ container_cert[:-len(OPTIONS.public_key_suffix)])
+
+ return keys
+
+
def main(argv):
key_mapping_options = []
@@ -831,6 +1059,9 @@
names = names.split(",")
for n in names:
OPTIONS.extra_apks[n] = key
+ elif o == "--extra_apex_payload_key":
+ apex_name, key = a.split("=")
+ OPTIONS.extra_apex_payload_keys[apex_name] = key
elif o == "--skip_apks_with_path_prefix":
# Sanity check the prefix, which must be in all upper case.
prefix = a.split('/')[0]
@@ -887,6 +1118,8 @@
OPTIONS.avb_algorithms['vendor'] = a
elif o == "--avb_vendor_extra_args":
OPTIONS.avb_extra_args['vendor'] = a
+ elif o == "--avb_apex_extra_args":
+ OPTIONS.avb_extra_args['apex'] = a
else:
return False
return True
@@ -896,6 +1129,7 @@
extra_opts="e:d:k:ot:",
extra_long_opts=[
"extra_apks=",
+ "extra_apex_payload_key=",
"skip_apks_with_path_prefix=",
"default_key_mappings=",
"key_mapping=",
@@ -904,6 +1138,7 @@
"replace_verity_public_key=",
"replace_verity_private_key=",
"replace_verity_keyid=",
+ "avb_apex_extra_args=",
"avb_vbmeta_algorithm=",
"avb_vbmeta_key=",
"avb_vbmeta_extra_args=",
@@ -926,6 +1161,8 @@
common.Usage(__doc__)
sys.exit(1)
+ common.InitLogging()
+
input_zip = zipfile.ZipFile(args[0], "r")
output_zip = zipfile.ZipFile(args[1], "w",
compression=zipfile.ZIP_DEFLATED,
@@ -935,18 +1172,25 @@
BuildKeyMap(misc_info, key_mapping_options)
- certmap, compressed_extension = common.ReadApkCerts(input_zip)
- apk_key_map = GetApkCerts(certmap)
- CheckAllApksSigned(input_zip, apk_key_map, compressed_extension)
+ apk_keys_info, compressed_extension = common.ReadApkCerts(input_zip)
+ apk_keys = GetApkCerts(apk_keys_info)
- key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
+ apex_keys_info = ReadApexKeysInfo(input_zip)
+ apex_keys = GetApexKeys(apex_keys_info, apk_keys)
+
+ CheckApkAndApexKeysAvailable(
+ input_zip,
+ set(apk_keys.keys()) | set(apex_keys.keys()),
+ compressed_extension)
+
+ key_passwords = common.GetKeyPasswords(
+ set(apk_keys.values()) | set(itertools.chain(*apex_keys.values())))
platform_api_level, _ = GetApiLevelAndCodename(input_zip)
codename_to_api_level_map = GetCodenameToApiLevelMap(input_zip)
ProcessTargetFiles(input_zip, output_zip, misc_info,
- apk_key_map, key_passwords,
- platform_api_level,
- codename_to_api_level_map,
+ apk_keys, apex_keys, key_passwords,
+ platform_api_level, codename_to_api_level_map,
compressed_extension)
common.ZipClose(input_zip)
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
new file mode 100644
index 0000000..2f8ee49
--- /dev/null
+++ b/tools/releasetools/test_apex_utils.py
@@ -0,0 +1,87 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import os.path
+
+import apex_utils
+import common
+import test_utils
+
+
+class ApexUtilsTest(test_utils.ReleaseToolsTestCase):
+
+ # echo "foo" | sha256sum
+ SALT = 'b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c'
+
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
+ # The default payload signing key.
+ self.payload_key = os.path.join(self.testdata_dir, 'testkey.key')
+
+ @staticmethod
+ def _GetTestPayload():
+ payload_file = common.MakeTempFile(prefix='apex-', suffix='.img')
+ with open(payload_file, 'wb') as payload_fp:
+ payload_fp.write(os.urandom(8192))
+ return payload_file
+
+ def test_ParseApexPayloadInfo(self):
+ payload_file = self._GetTestPayload()
+ apex_utils.SignApexPayload(
+ payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
+ payload_info = apex_utils.ParseApexPayloadInfo(payload_file)
+ self.assertEqual('SHA256_RSA2048', payload_info['Algorithm'])
+ self.assertEqual(self.SALT, payload_info['Salt'])
+ self.assertEqual('testkey', payload_info['apex.key'])
+
+ def test_SignApexPayload(self):
+ payload_file = self._GetTestPayload()
+ apex_utils.SignApexPayload(
+ payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
+ apex_utils.VerifyApexPayload(payload_file, self.payload_key)
+
+ def test_SignApexPayload_withSignerHelper(self):
+ payload_file = self._GetTestPayload()
+ payload_signer_args = '--signing_helper_with_files {}'.format(
+ os.path.join(self.testdata_dir, 'signing_helper.sh'))
+ apex_utils.SignApexPayload(
+ payload_file,
+ self.payload_key,
+ 'testkey', 'SHA256_RSA2048', self.SALT,
+ payload_signer_args)
+ apex_utils.VerifyApexPayload(payload_file, self.payload_key)
+
+ def test_SignApexPayload_invalidKey(self):
+ self.assertRaises(
+ apex_utils.ApexSigningError,
+ apex_utils.SignApexPayload,
+ self._GetTestPayload(),
+ os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+ 'testkey',
+ 'SHA256_RSA2048',
+ self.SALT)
+
+ def test_VerifyApexPayload_wrongKey(self):
+ payload_file = self._GetTestPayload()
+ apex_utils.SignApexPayload(
+ payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
+ apex_utils.VerifyApexPayload(payload_file, self.payload_key)
+ self.assertRaises(
+ apex_utils.ApexSigningError,
+ apex_utils.VerifyApexPayload,
+ payload_file,
+ os.path.join(self.testdata_dir, 'testkey_with_passwd.key'))
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index cfd070d..8709124 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -491,6 +491,13 @@
wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
+ def test_ExtractAvbPublicKey(self):
+ privkey = os.path.join(self.testdata_dir, 'testkey.key')
+ pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
+ with open(common.ExtractAvbPublicKey(privkey)) as privkey_fp, \
+ open(common.ExtractAvbPublicKey(pubkey)) as pubkey_fp:
+ self.assertEqual(privkey_fp.read(), pubkey_fp.read())
+
def test_ParseCertificate(self):
cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
@@ -1218,7 +1225,7 @@
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
self.assertNotIn("block_image_update", str(self.script),
- "Removed partition should not be patched.")
+ "Removed partition should not be patched.")
lines = self.get_op_list(self.output_path)
self.assertEqual(lines, ["remove foo"])
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 44703db..c2da907 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -415,6 +415,7 @@
# Reset the global options as in ota_from_target_files.py.
common.OPTIONS.incremental_source = None
common.OPTIONS.downgrade = False
+ common.OPTIONS.retrofit_dynamic_partitions = False
common.OPTIONS.timestamp = False
common.OPTIONS.wipe_user_data = False
common.OPTIONS.no_signing = False
@@ -517,6 +518,23 @@
},
metadata)
+ def test_GetPackageMetadata_retrofitDynamicPartitions(self):
+ target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ common.OPTIONS.retrofit_dynamic_partitions = True
+ metadata = GetPackageMetadata(target_info)
+ self.assertDictEqual(
+ {
+ 'ota-retrofit-dynamic-partitions' : 'yes',
+ 'ota-type' : 'BLOCK',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-sdk-level' : '27',
+ 'post-security-patch-level' : '2017-12-01',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ },
+ metadata)
+
@staticmethod
def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
(target_info['build.prop']['ro.build.date.utc'],
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 18762ee..9d21429 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -21,8 +21,8 @@
import common
import test_utils
from sign_target_files_apks import (
- CheckAllApksSigned, EditTags, GetApkFileInfo, ReplaceCerts,
- ReplaceVerityKeyId, RewriteProps)
+ CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
+ ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
@@ -33,6 +33,10 @@
<signer signature="{}"><seinfo value="media"/></signer>
</policy>"""
+ APEX_KEYS_TXT = """name="apex.apexd_test.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
+name="apex.apexd_test_different_app.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
+"""
+
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
@@ -207,7 +211,7 @@
}
self.assertEqual(output_xml, ReplaceCerts(input_xml))
- def test_CheckAllApksSigned(self):
+ def test_CheckApkAndApexKeysAvailable(self):
input_file = common.MakeTempFile(suffix='.zip')
with zipfile.ZipFile(input_file, 'w') as input_zip:
input_zip.writestr('SYSTEM/app/App1.apk', "App1-content")
@@ -219,16 +223,17 @@
'App3.apk' : 'key3',
}
with zipfile.ZipFile(input_file) as input_zip:
- CheckAllApksSigned(input_zip, apk_key_map, None)
- CheckAllApksSigned(input_zip, apk_key_map, '.gz')
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, None)
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, '.gz')
# 'App2.apk.gz' won't be considered as an APK.
- CheckAllApksSigned(input_zip, apk_key_map, None)
- CheckAllApksSigned(input_zip, apk_key_map, '.xz')
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, None)
+ CheckApkAndApexKeysAvailable(input_zip, apk_key_map, '.xz')
del apk_key_map['App2.apk']
self.assertRaises(
- AssertionError, CheckAllApksSigned, input_zip, apk_key_map, '.gz')
+ AssertionError, CheckApkAndApexKeysAvailable, input_zip, apk_key_map,
+ '.gz')
def test_GetApkFileInfo(self):
(is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
@@ -344,3 +349,62 @@
self.assertRaises(
AssertionError, GetApkFileInfo, "SYSTEM_OTHER/preloads/apps/Chats.apk",
None, None)
+
+ def test_ReadApexKeysInfo(self):
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ keys_info = ReadApexKeysInfo(target_files_zip)
+
+ self.assertEqual(
+ {
+ 'apex.apexd_test.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+ 'build/target/product/security/testkey'),
+ 'apex.apexd_test_different_app.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+ 'build/target/product/security/testkey'),
+ },
+ keys_info)
+
+ def test_ReadApexKeysInfo_mismatchingKeys(self):
+ # Mismatching payload public / private keys.
+ apex_keys = self.APEX_KEYS_TXT + (
+ 'name="apex.apexd_test_different_app2.apex" '
+ 'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
+ 'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_3.pem" '
+ 'container_certificate="build/target/product/security/testkey.x509.pem" '
+ 'container_private_key="build/target/product/security/testkey.pk8"')
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ self.assertRaises(ValueError, ReadApexKeysInfo, target_files_zip)
+
+ def test_ReadApexKeysInfo_missingPrivateKey(self):
+ # Invalid lines will be skipped.
+ apex_keys = self.APEX_KEYS_TXT + (
+ 'name="apex.apexd_test_different_app2.apex" '
+ 'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
+ 'container_certificate="build/target/product/security/testkey.x509.pem" '
+ 'container_private_key="build/target/product/security/testkey.pk8"')
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+ with zipfile.ZipFile(target_files) as target_files_zip:
+ keys_info = ReadApexKeysInfo(target_files_zip)
+
+ self.assertEqual(
+ {
+ 'apex.apexd_test.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+ 'build/target/product/security/testkey'),
+ 'apex.apexd_test_different_app.apex': (
+ 'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+ 'build/target/product/security/testkey'),
+ },
+ keys_info)
diff --git a/tools/releasetools/testdata/signing_helper.sh b/tools/releasetools/testdata/signing_helper.sh
new file mode 100755
index 0000000..364e023
--- /dev/null
+++ b/tools/releasetools/testdata/signing_helper.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+tmpfile=$(mktemp)
+cat $3 | openssl rsautl -sign -inkey $2 -raw > $tmpfile
+cat $tmpfile > $3
+rm $tmpfile
diff --git a/tools/releasetools/testdata/testkey_with_passwd.key b/tools/releasetools/testdata/testkey_with_passwd.key
new file mode 100644
index 0000000..2f0a199
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCwaAOHPqgkCmqU
+AFRnJW6LrAuSfy9EzWSRHSkltp811ByMIE0N6/Nttu8ZCL456lzArHNKt/zdoBik
+eLB6gN9CTvQ8n4LMdSEmkRl3uXBtOPJuVObJ6ZUILz6L7WofWcr8DT81j2At7nHi
+Wg8SkCsFXbFfpjljOlpqUG3Szt+48X8rcgG82s97BuRwNxUgfK1/8QzOiH9fDbMU
+h6XI2jo2VwuBYOsJadJJWOf6oRRHZonrts0FXpV46CXykpLvLT2u5GXg1Pxd7i1K
+v1P8bxZOzVbEVfkL2DnUCtUBAnP98r9UyjQDd4blk4Mwl+mzB5otPTacNzEGhmNK
+Et+HB/cdAgMBAAECggEATsn2IXa7tHUuivHmwLb4O8vY01KY8xrleubSVPTPAUS+
+h1t57ujerbcR7VV5WPay/J9JUyr/9qClwPfioqRikwQek+EOk3ERIF+YR1/8tdvE
+c8DZ337DQIeRYP/l8SCyx4bHH43tADbKiLV+m+TmQhxJt5XPdeE/NtK7andZdwkv
+xEoG9l2aONE4z9pY1x+c1SdDSsq92/iLHLgSkQJmWo+lrfeh6gshXgQgDY8n6rgY
+GsCgSawLphvd8Tvo86CL04l0pWtY1gEW3s6sdYo1YDkpWQzSRCtGm0GlhEt2fyq5
+coTK2sLHguE7NL5VZo4zlGtM3QBdvRksTO1mJOt6JQKBgQDaT4oGjZp1rtKdObvn
+ElaUo5EOyJjmXkRBBndrbiG3078eOqTJHXx45DJUv8hj9+g6vSULiIeFk1FiiMQD
+vcnsBEaGaSc886wXY6TQgIIzvVfzDHGYTuQydiYQbLClH6S28HLqdlZjUIlHwxb9
+wBm8JwmTiVeAEvO8LTzeEqfkLwKBgQDO3He8Ei8XDeqtIK0lzcZ83yw9OGP23/gK
+8GDaf8J+cOtOyYkDlcV0rBNFvE8+TzIpIUlo47b2RSaART3iPSfRJTaySZjKWCVo
+s2A0/zQcrj7GgD2gaHRrgI9bmnWW1j95a9n/6AUEyEIJ6K8tYK819Vl4GAyhNHEQ
+sRbxa69qcwKBgQC5F8jxx2tXLdM6JLIQtzabLZcWTrN8Vh5Od3oWpriF0EzxB02h
+ipN3OBsISdZQE+dcrfNTtP0aHo5ZGZX/ihFCP1nAKjVvczXMWtppQRujXHzOABXr
+ya+mrQ+Wy2B1j7+qr3DvI0gZSjYqltjOaeon4X04DrEWUHtAZ6Z8rpqUVwKBgQCB
+o8mmI/8/A4m/Vmss9fke6P5gn6aGYXah5GPOi6Loevv9NHCZvpMwu2aYnZtMAXX+
+MM5A3fUcAdpPKRXPY2RAvoG42kbXCMbpBwGUNRwDnW/aFySIEu5jMP6m+fYXwc2l
+2uGUb2Q1ywsYCqs+VQl5V3nquaewn5z8SP+H7WTR4QKBgQCO5CRpyNOjEwMxTPR1
+GYUKAEiVtmzknHAxUE6drTgGEZSquAXiau0B5+7+/G5gwqxCLGpnstMByI+dhkR6
++ybAc/bzb2aoGK4pZf/PuwxQQsHBnG0oaSFU6RZlbVV20j7FZ04+cYnKHwCYkKjN
+DwA1Ae+H+u95raB4vYhk7IzD4A==
+-----END PRIVATE KEY-----