Merge "Remove org.apache.http.legacy from bootclasspath"
diff --git a/core/Makefile b/core/Makefile
index 5e4a489..9df92e9 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -4,15 +4,6 @@
# intermedites-dir-for
LOCAL_PATH := $(BUILD_SYSTEM)
-# Pick a reasonable string to use to identify files.
-ifneq (,$(filter eng.%,$(BUILD_NUMBER)))
- # BUILD_NUMBER has a timestamp in it, which means that
- # it will change every time. Pick a stable value.
- FILE_NAME_TAG := eng.$(USER)
-else
- FILE_NAME_TAG := $(BUILD_NUMBER)
-endif
-
# -----------------------------------------------------------------
# Define rules to copy PRODUCT_COPY_FILES defined by the product.
# PRODUCT_COPY_FILES contains words like <source file>:<dest file>[:<owner>].
@@ -238,28 +229,37 @@
# The string used to uniquely identify the combined build and product; used by the OTA server.
ifeq (,$(strip $(BUILD_FINGERPRINT)))
- ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
- BF_BUILD_NUMBER := $(USER)$(shell $(DATE) +%m%d%H%M)
+ ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+ BF_BUILD_NUMBER := $(USER)$$($(DATE_FROM_FILE) +%m%d%H%M)
else
- BF_BUILD_NUMBER := $(BUILD_NUMBER)
+ BF_BUILD_NUMBER := $(file <$(BUILD_NUMBER_FILE))
endif
BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
endif
-ifneq ($(words $(BUILD_FINGERPRINT)),1)
- $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
-endif
+# unset it for safety.
+BF_BUILD_NUMBER :=
-$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) > $(PRODUCT_OUT)/build_fingerprint.txt)
-BUILD_FINGERPRINT_FROM_FILE := $$(cat $(PRODUCT_OUT)/build_fingerprint.txt)
+BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE) && grep " " $(BUILD_FINGERPRINT_FILE)))
+ $(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))")
+endif
+BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE))
+# unset it for safety.
+BUILD_FINGERPRINT :=
# The string used to uniquely identify the system build; used by the OTA server.
# This purposefully excludes any product-specific variables.
ifeq (,$(strip $(BUILD_THUMBPRINT)))
- BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+ BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
endif
-ifneq ($(words $(BUILD_THUMBPRINT)),1)
- $(error BUILD_THUMBPRINT cannot contain spaces: "$(BUILD_THUMBPRINT)")
+
+BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE)))
+ $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))")
endif
+BUILD_THUMBPRINT_FROM_FILE := $$(cat $(BUILD_THUMBPRINT_FILE))
+# unset it for safety.
+BUILD_THUMBPRINT :=
KNOWN_OEM_THUMBPRINT_PROPERTIES := \
ro.product.brand \
@@ -348,7 +348,7 @@
PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION="$(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION)" \
BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
- $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)") \
+ $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 36f9dec..460a090 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -135,6 +135,7 @@
LOCAL_JAVA_LIBRARIES:=
LOCAL_JAVA_RESOURCE_DIRS:=
LOCAL_JAVA_RESOURCE_FILES:=
+LOCAL_JETIFIER_ENABLED:=
LOCAL_JNI_SHARED_LIBRARIES:=
LOCAL_JNI_SHARED_LIBRARIES_ABI:=
LOCAL_LDFLAGS:=
@@ -200,6 +201,7 @@
LOCAL_PREBUILT_OBJ_FILES:=
LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
LOCAL_PREBUILT_STRIP_COMMENTS:=
+LOCAL_PRIVATE_PLATFORM_APIS:=
LOCAL_PRIVILEGED_MODULE:=
# '',full,custom,disabled,obfuscation,optimization
LOCAL_PRODUCT_MODULE:=
@@ -243,6 +245,8 @@
LOCAL_SOONG_PROGUARD_DICT :=
LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
LOCAL_SOONG_RRO_DIRS :=
+LOCAL_DROIDDOC_STUBS_JAR :=
+LOCAL_DROIDDOC_DOC_ZIP :=
# '',true
LOCAL_SOURCE_FILES_ALL_GENERATED:=
LOCAL_SRC_FILES:=
diff --git a/core/config.mk b/core/config.mk
index f04fefb..27f187d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -681,6 +681,8 @@
FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
FINDBUGS := $(FINDBUGS_DIR)/findbugs
+JETIFIER := prebuilts/sdk/tools/jetifier/jetifier-standalone/bin/jetifier-standalone
+
# Tool to merge AndroidManifest.xmls
ANDROID_MANIFEST_MERGER_CLASSPATH := \
prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/26.0.0-beta2/manifest-merger-26.0.0-beta2.jar \
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 8db9428..8d0539a 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -106,7 +106,7 @@
$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) && \
ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
--input-image-location=$(PRIVATE_IMAGE_LOCATION) \
- --output-image-relocation-file=$@.rel \
+ --output-image-relocation-directory=$(dir $@) \
--instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
--base-offset-delta=0x10000000
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 6525b69..9f66451 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -139,8 +139,10 @@
--dex-location=$(PRIVATE_DEX_LOCATION) \
--reference-profile-file=$@
dex_preopt_profile_src_file:=
-# Remove compressed APK extension.
+
+# Remove compressed APK extension.
my_installed_profile := $(patsubst %.gz,%,$(LOCAL_INSTALLED_MODULE)).prof
+
# my_installed_profile := $(LOCAL_INSTALLED_MODULE).prof
$(eval $(call copy-one-file,$(my_built_profile),$(my_installed_profile)))
build_installed_profile:=$(my_built_profile):$(my_installed_profile)
@@ -250,12 +252,39 @@
# For non system server jars, use speed-profile when we have a profile.
LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed-profile
else
- LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
+ LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
endif
endif
endif
endif
+my_generate_dm := $(PRODUCT_DEX_PREOPT_GENERATE_DM_FILES)
+ifeq (,$(filter $(LOCAL_DEX_PREOPT_FLAGS),--compiler-filter=verify))
+# Generating DM files only makes sense for verify, avoid doing for non verify compiler filter APKs.
+my_generate_dm := false
+endif
+
+# No reason to use a dm file if the dex is already uncompressed.
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+my_generate_dm := false
+endif
+
+ifeq (true,$(my_generate_dm))
+LOCAL_DEX_PREOPT_FLAGS += --copy-dex-files=false
+LOCAL_DEX_PREOPT := nostripping
+my_built_dm := $(dir $(LOCAL_BUILT_MODULE))generated.dm
+my_installed_dm := $(patsubst %.apk,%,$(LOCAL_INSTALLED_MODULE)).dm
+my_copied_vdex := $(dir $(LOCAL_BUILT_MODULE))primary.vdex
+$(eval $(call copy-one-file,$(built_vdex),$(my_copied_vdex)))
+$(my_built_dm): PRIVATE_INPUT_VDEX := $(my_copied_vdex)
+$(my_built_dm): $(my_copied_vdex) $(ZIPTIME)
+ $(hide) mkdir -p $(dir $@)
+ $(hide) rm -f $@
+ $(hide) zip -qD -j -X -9 $@ $(PRIVATE_INPUT_VDEX)
+ $(ZIPTIME) $@
+$(eval $(call copy-one-file,$(my_built_dm),$(my_installed_dm)))
+endif
+
# PRODUCT_SYSTEM_SERVER_DEBUG_INFO overrides WITH_DEXPREOPT_DEBUG_INFO.
my_system_server_debug_info := $(PRODUCT_SYSTEM_SERVER_DEBUG_INFO)
ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
@@ -280,14 +309,26 @@
$(built_art): $(built_odex)
endif
-# Add the installed_odex to the list of installed files for this module.
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
+ifneq (true,$(my_generate_dm))
+ # Add the installed_odex to the list of installed files for this module if we aren't generating a
+ # dm file.
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+
+ # Make sure to install the .odex and .vdex when you run "make <module_name>"
+ $(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
+else
+ ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_dm)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_dm) $(my_installed_dm)
+
+ # Make sure to install the .dm when you run "make <module_name>"
+ $(my_all_targets): $(installed_dm)
+endif
# Record dex-preopt config.
DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
@@ -301,10 +342,6 @@
DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
$(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
-
-# Make sure to install the .odex and .vdex when you run "make <module_name>"
-$(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
-
endif # LOCAL_DEX_PREOPT
# Profile doesn't depend on LOCAL_DEX_PREOPT.
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 43fc780..20663d1 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -67,6 +67,8 @@
include $(BUILD_SYSTEM)/java_common.mk
+include $(BUILD_SYSTEM)/sdk_check.mk
+
$(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
# List of dependencies for anything that needs all java sources in place
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 5176f37..47404c8 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -97,10 +97,16 @@
$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
-stripDir META-INF -zipToNotStrip $< $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
+#######################################
+LOCAL_JETIFIER_INPUT_FILE := $(full_classes_combined_jar)
+
+include $(BUILD_SYSTEM)/jetifier.mk
+#######################################
+
# Run jarjar if necessary, otherwise just copy the file.
ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
+$(full_classes_jarjar_jar): $(LOCAL_JETIFIER_OUTPUT_FILE) $(LOCAL_JARJAR_RULES) | $(JARJAR)
@echo JarJar: $@
$(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
else
@@ -108,9 +114,9 @@
endif
+#######################################
LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_jarjar_jar)
-#######################################
include $(BUILD_SYSTEM)/jacoco.mk
#######################################
diff --git a/core/java.mk b/core/java.mk
index 6774b75..3b7ecd0 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -357,6 +357,8 @@
include $(BUILD_SYSTEM)/java_common.mk
+include $(BUILD_SYSTEM)/sdk_check.mk
+
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_RS_SOURCES := $(if $(renderscript_sources),true)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RS_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/renderscript
@@ -557,11 +559,17 @@
full_classes_jarjar_jar := $(full_classes_processed_jar)
endif
-$(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
+#######################################
+LOCAL_JETIFIER_INPUT_FILE := $(full_classes_jarjar_jar)
-LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_jar)
+include $(BUILD_SYSTEM)/jetifier.mk
+#######################################
+
+$(eval $(call copy-one-file,$(LOCAL_JETIFIER_OUTPUT_FILE),$(full_classes_jar)))
#######################################
+LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_jar)
+
include $(BUILD_SYSTEM)/jacoco.mk
#######################################
diff --git a/core/jetifier.mk b/core/jetifier.mk
new file mode 100644
index 0000000..305c9dd
--- /dev/null
+++ b/core/jetifier.mk
@@ -0,0 +1,34 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file sets up the running of Jetifier
+
+# now add the rule to run jetifier
+ifeq ($(strip $(LOCAL_JETIFIER_ENABLED)),true)
+ my_jetifier_input_path := $(LOCAL_JETIFIER_INPUT_FILE)
+ my_files := $(intermediates.COMMON)/jetifier
+ my_jetifier_output_path := $(my_files)/classes-jetifier.jar
+
+$(my_jetifier_output_path) : $(my_jetifier_input_path) $(JETIFIER)
+ rm -rf $@
+ $(JETIFIER) -outputfile $@ -i $<
+
+ LOCAL_JETIFIER_OUTPUT_FILE := $(my_jetifier_output_path)
+ LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_JETIFIER_OUTPUT_FILE)
+else
+ LOCAL_JETIFIER_OUTPUT_FILE := $(LOCAL_JETIFIER_INPUT_FILE)
+endif
+
diff --git a/core/main.mk b/core/main.mk
index bcce986..24ffa74 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -61,12 +61,23 @@
# when using ninja.
$(shell mkdir -p $(OUT_DIR) && \
echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+
ifeq ($(HOST_OS),darwin)
DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE)
else
DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
endif
+# Pick a reasonable string to use to identify files.
+ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+ # BUILD_NUMBER has a timestamp in it, which means that
+ # it will change every time. Pick a stable value.
+ FILE_NAME_TAG := eng.$(USER)
+else
+ FILE_NAME_TAG := $(file <$(BUILD_NUMBER_FILE))
+endif
+
# Make an empty directory, which can be used to make empty jars
EMPTY_DIRECTORY := $(OUT_DIR)/empty
$(shell mkdir -p $(EMPTY_DIRECTORY) && rm -rf $(EMPTY_DIRECTORY)/*)
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index ca2dcee..2256f98 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -19,9 +19,7 @@
boottarball-nodeps \
brillo_tests \
btnod \
- build-art% \
build_kernel-nodeps \
- clean-oat% \
continuous_instrumentation_tests \
continuous_native_tests \
cts \
@@ -47,11 +45,9 @@
systemimage-nodeps \
systemtarball-nodeps \
target-files-package \
- test-art% \
user \
userdataimage \
userdebug \
- valgrind-test-art% \
vts \
win_sdk \
winsdk-tools
diff --git a/core/product.mk b/core/product.mk
index ce14853..6cccebf 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -134,6 +134,7 @@
PRODUCT_DEX_PREOPT_BOOT_FLAGS \
PRODUCT_DEX_PREOPT_PROFILE_DIR \
PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
+ PRODUCT_DEX_PREOPT_GENERATE_DM_FILES \
PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE \
PRODUCT_SYSTEM_SERVER_COMPILER_FILTER \
PRODUCT_SANITIZER_MODULE_CONFIGS \
diff --git a/core/product_config.mk b/core/product_config.mk
index 0c46541..2620adb 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -396,6 +396,8 @@
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER))
PRODUCT_DEX_PREOPT_DEFAULT_FLAGS := \
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_FLAGS))
+PRODUCT_DEX_PREOPT_GENERATE_DM_FILES := \
+ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_GENERATE_DM_FILES))
PRODUCT_DEX_PREOPT_BOOT_FLAGS := \
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_BOOT_FLAGS))
PRODUCT_DEX_PREOPT_PROFILE_DIR := \
diff --git a/core/sdk_check.mk b/core/sdk_check.mk
new file mode 100644
index 0000000..c5c2bc8
--- /dev/null
+++ b/core/sdk_check.mk
@@ -0,0 +1,14 @@
+
+# Enforcement checks that LOCAL_SDK_VERSION and LOCAL_PRIVATE_PLATFORM_APIS are
+# set correctly.
+# Should be included by java targets that allow specifying LOCAL_SDK_VERSION.
+
+ifeq ($(LOCAL_SDK_VERSION)$(LOCAL_PRIVATE_PLATFORM_APIS),)
+ifneq ($(JAVA_SDK_ENFORCEMENT_WARNING),)
+$(warning Java modules must specify LOCAL_SDK_VERSION or LOCAL_PRIVATE_PLATFORM_APIS, but $(LOCAL_MODULE) specifies neither.)
+endif
+else ifneq ($(LOCAL_SDK_VERSION),)
+ifneq ($(LOCAL_PRIVATE_PLATFORM_APIS),)
+$(error $(LOCAL_MODULE) specifies both LOCAL_SDK_VERSION ($(LOCAL_SDK_VERSION)) and LOCAL_PRIVATE_PLATFORM_APIS ($(LOCAL_PRIVATE_PLATFORM_APIS)), but should specify only one.)
+endif
+endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 153b741..bf5034b 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -42,6 +42,9 @@
$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT))
+$(call add_json_str, BuildId, $(BUILD_ID))
+$(call add_json_str, BuildNumberFromFile, $$$(BUILD_NUMBER_FROM_FILE))
+
$(call add_json_val, Platform_sdk_version, $(PLATFORM_SDK_VERSION))
$(call add_json_csv, Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
$(call add_json_csv, Platform_version_future_codenames, $(PLATFORM_VERSION_FUTURE_CODENAMES))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index f3ed376..f10da32 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -23,6 +23,15 @@
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_jar)))
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_pre_proguard_jar)))
+ifdef LOCAL_DROIDDOC_STUBS_JAR
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_JAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.jar))
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.jar
+endif
+
+ifdef LOCAL_DROIDDOC_DOC_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+endif
+
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
$(intermediates.COMMON)/jacoco-report-classes.jar))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index 4d05237..a1151e9 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -151,7 +151,7 @@
cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
# Generate the image.
$(if $(filter oem,$(PRIVATE_MOUNT_POINT)), \
- $(hide) echo "oem.buildnumber=$(BUILD_NUMBER)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
+ $(hide) echo "oem.buildnumber=$(BUILD_NUMBER_FROM_FILE)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(PRIVATE_STAGING_DIR) $(PRIVATE_INTERMEDIATES)/image_info.txt $@ $(TARGET_OUT)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index e02820d..d6bad21 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -271,6 +271,7 @@
# to soong_ui.
BUILD_DATETIME :=
+HAS_BUILD_NUMBER := true
ifndef BUILD_NUMBER
# BUILD_NUMBER should be set to the source control value that
# represents the current state of the source code. E.g., a
@@ -282,6 +283,7 @@
# from this date/time" value. Make it start with a non-digit so that
# anyone trying to parse it as an integer will probably get "0".
BUILD_NUMBER := eng.$(shell echo $${USER:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+ HAS_BUILD_NUMBER := false
endif
ifndef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index c1596d1..6d7ac4a 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -56,6 +56,9 @@
PRODUCT_PACKAGES += \
cacerts \
+PRODUCT_PACKAGES += \
+ hiddenapi-package-whitelist.xml \
+
PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
dalvik.vm.image-dex2oat-Xms=64m \
dalvik.vm.image-dex2oat-Xmx=64m \
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 370710e..0c87857 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1800,17 +1800,23 @@
def ParseCertificate(data):
- """Parse a PEM-format certificate."""
- cert = []
+ """Parses and converts a PEM-encoded certificate into DER-encoded.
+
+ This gives the same result as `openssl x509 -in <filename> -outform DER`.
+
+ Returns:
+ The decoded certificate string.
+ """
+ cert_buffer = []
save = False
for line in data.split("\n"):
if "--END CERTIFICATE--" in line:
break
if save:
- cert.append(line)
+ cert_buffer.append(line)
if "--BEGIN CERTIFICATE--" in line:
save = True
- cert = "".join(cert).decode('base64')
+ cert = "".join(cert_buffer).decode('base64')
return cert
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index e2335c0..3225156 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -144,6 +144,13 @@
--payload_signer_args <args>
Specify the arguments needed for payload signer.
+
+ --skip_postinstall
+ Skip the postinstall hooks when generating an A/B OTA package (default:
+ False). Note that this discards ALL the hooks, including non-optional
+ ones. Should only be used if caller knows it's safe to do so (e.g. all the
+ postinstall work is to dexopt apps and a data wipe will happen immediately
+ after). Only meaningful when generating A/B OTAs.
"""
from __future__ import print_function
@@ -151,6 +158,7 @@
import multiprocessing
import os.path
import shlex
+import shutil
import subprocess
import sys
import tempfile
@@ -193,8 +201,11 @@
OPTIONS.payload_signer_args = []
OPTIONS.extracted_input = None
OPTIONS.key_passwords = []
+OPTIONS.skip_postinstall = False
+
METADATA_NAME = 'META-INF/com/android/metadata'
+POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
UNZIP_PATTERN = ['IMAGES/*', 'META/*']
@@ -1215,7 +1226,7 @@
WriteMetadata(metadata, output_zip)
-def GetTargetFilesZipForSecondaryImages(input_file):
+def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
"""Returns a target-files.zip file for generating secondary payload.
Although the original target-files.zip already contains secondary slot
@@ -1229,6 +1240,7 @@
Args:
input_file: The input target-files.zip file.
+ skip_postinstall: Whether to skip copying the postinstall config file.
Returns:
The filename of the target-files.zip for generating secondary payload.
@@ -1247,6 +1259,10 @@
'IMAGES/system.map'):
pass
+ # Skip copying the postinstall config if requested.
+ elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
+ pass
+
elif info.filename.startswith(('META/', 'IMAGES/')):
common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
@@ -1256,6 +1272,31 @@
return target_file
+def GetTargetFilesZipWithoutPostinstallConfig(input_file):
+ """Returns a target-files.zip that's not containing postinstall_config.txt.
+
+ This allows brillo_update_payload script to skip writing all the postinstall
+ hooks in the generated payload. The input target-files.zip file will be
+ duplicated, with 'META/postinstall_config.txt' skipped. If input_file doesn't
+ contain the postinstall_config.txt entry, the input file will be returned.
+
+ Args:
+ input_file: The input target-files.zip filename.
+
+ Returns:
+ The filename of target-files.zip that doesn't contain postinstall config.
+ """
+ # We should only make a copy if postinstall_config entry exists.
+ with zipfile.ZipFile(input_file, 'r') as input_zip:
+ if POSTINSTALL_CONFIG not in input_zip.namelist():
+ return input_file
+
+ target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+ shutil.copyfile(input_file, target_file)
+ common.ZipDelete(target_file, POSTINSTALL_CONFIG)
+ return target_file
+
+
def WriteABOTAPackageWithBrilloScript(target_file, output_file,
source_file=None):
"""Generate an Android OTA package that has A/B update payload."""
@@ -1325,6 +1366,9 @@
# Metadata to comply with Android OTA package format.
metadata = GetPackageMetadata(target_info, source_info)
+ if OPTIONS.skip_postinstall:
+ target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
+
# Generate payload.
payload = Payload()
@@ -1349,7 +1393,8 @@
if OPTIONS.include_secondary:
# We always include a full payload for the secondary slot, even when
# building an incremental OTA. See the comments for "--include_secondary".
- secondary_target_file = GetTargetFilesZipForSecondaryImages(target_file)
+ secondary_target_file = GetTargetFilesZipForSecondaryImages(
+ target_file, OPTIONS.skip_postinstall)
secondary_payload = Payload(secondary=True)
secondary_payload.Generate(secondary_target_file,
additional_args=additional_args)
@@ -1478,6 +1523,8 @@
OPTIONS.payload_signer_args = shlex.split(a)
elif o == "--extracted_input_target_files":
OPTIONS.extracted_input = a
+ elif o == "--skip_postinstall":
+ OPTIONS.skip_postinstall = True
else:
return False
return True
@@ -1507,6 +1554,7 @@
"payload_signer=",
"payload_signer_args=",
"extracted_input_target_files=",
+ "skip_postinstall",
], extra_option_handler=option_handler)
if len(args) != 2:
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 1f9a3ca..fa62c8f 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -104,6 +104,7 @@
import sys
import tempfile
import zipfile
+from xml.etree import ElementTree
import add_img_to_target_files
import common
@@ -290,6 +291,8 @@
new_data = RewriteProps(data)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
+ # Replace the certs in *mac_permissions.xml (there could be multiple, such
+ # as {system,vendor}/etc/selinux/{plat,nonplat}_mac_permissions.xml).
elif info.filename.endswith("mac_permissions.xml"):
print("Rewriting %s with new keys." % (info.filename,))
new_data = ReplaceCerts(data)
@@ -361,31 +364,54 @@
def ReplaceCerts(data):
- """Given a string of data, replace all occurences of a set
- of X509 certs with a newer set of X509 certs and return
- the updated data string."""
- for old, new in OPTIONS.key_map.iteritems():
- try:
- if OPTIONS.verbose:
- print(" Replacing %s.x509.pem with %s.x509.pem" % (old, new))
- f = open(old + ".x509.pem")
- old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
- f.close()
- f = open(new + ".x509.pem")
- new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
- f.close()
- # Only match entire certs.
- pattern = "\\b" + old_cert16 + "\\b"
- (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
- if OPTIONS.verbose:
- print(" Replaced %d occurence(s) of %s.x509.pem with "
- "%s.x509.pem" % (num, old, new))
- except IOError as e:
- if e.errno == errno.ENOENT and not OPTIONS.verbose:
- continue
+ """Replaces all the occurences of X.509 certs with the new ones.
- print(" Error accessing %s. %s. Skip replacing %s.x509.pem with "
- "%s.x509.pem." % (e.filename, e.strerror, old, new))
+ The mapping info is read from OPTIONS.key_map. Non-existent certificate will
+ be skipped. After the replacement, it additionally checks for duplicate
+ entries, which would otherwise fail the policy loading code in
+ frameworks/base/services/core/java/com/android/server/pm/SELinuxMMAC.java.
+
+ Args:
+ data: Input string that contains a set of X.509 certs.
+
+ Returns:
+ A string after the replacement.
+
+ Raises:
+ AssertionError: On finding duplicate entries.
+ """
+ for old, new in OPTIONS.key_map.iteritems():
+ if OPTIONS.verbose:
+ print(" Replacing %s.x509.pem with %s.x509.pem" % (old, new))
+
+ try:
+ with open(old + ".x509.pem") as old_fp:
+ old_cert16 = base64.b16encode(
+ common.ParseCertificate(old_fp.read())).lower()
+ with open(new + ".x509.pem") as new_fp:
+ new_cert16 = base64.b16encode(
+ common.ParseCertificate(new_fp.read())).lower()
+ except IOError as e:
+ if OPTIONS.verbose or e.errno != errno.ENOENT:
+ print(" Error accessing %s: %s.\nSkip replacing %s.x509.pem with "
+ "%s.x509.pem." % (e.filename, e.strerror, old, new))
+ continue
+
+ # Only match entire certs.
+ pattern = "\\b" + old_cert16 + "\\b"
+ (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
+
+ if OPTIONS.verbose:
+ print(" Replaced %d occurence(s) of %s.x509.pem with %s.x509.pem" % (
+ num, old, new))
+
+ # Verify that there're no duplicate entries after the replacement. Note that
+ # it's only checking entries with global seinfo at the moment (i.e. ignoring
+ # the ones with inner packages). (Bug: 69479366)
+ root = ElementTree.fromstring(data)
+ signatures = [signer.attrib['signature'] for signer in root.findall('signer')]
+ assert len(signatures) == len(set(signatures)), \
+ "Found duplicate entries after cert replacement: {}".format(data)
return data
@@ -597,7 +623,7 @@
# Extract keyid using openssl command.
p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
- stdout=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
keyid, stderr = p.communicate()
assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
keyid = re.search(
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 6da286c..c073eba 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -13,7 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+
import os
+import subprocess
import tempfile
import time
import unittest
@@ -23,6 +25,7 @@
import common
import test_utils
import validate_target_files
+from rangelib import RangeSet
KiB = 1024
@@ -400,6 +403,9 @@
'Compressed4.apk' : 'certs/compressed4',
}
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
+
def tearDown(self):
common.Cleanup()
@@ -477,17 +483,168 @@
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
def test_ExtractPublicKey(self):
- testdata_dir = test_utils.get_testdata_dir()
- cert = os.path.join(testdata_dir, 'testkey.x509.pem')
- pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+ pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
with open(pubkey, 'rb') as pubkey_fp:
self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
def test_ExtractPublicKey_invalidInput(self):
- testdata_dir = test_utils.get_testdata_dir()
- wrong_input = os.path.join(testdata_dir, 'testkey.pk8')
+ wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
+ def test_ParseCertificate(self):
+ cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+
+ cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
+ proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ expected, _ = proc.communicate()
+ self.assertEqual(0, proc.returncode)
+
+ with open(cert) as cert_fp:
+ actual = common.ParseCertificate(cert_fp.read())
+ self.assertEqual(expected, actual)
+
+
+class CommonUtilsTest(unittest.TestCase):
+
+ def tearDown(self):
+ common.Cleanup()
+
+ def test_GetSparseImage_emptyBlockMapFile(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 3),
+ (0xCAC1, 4)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr('IMAGES/system.map', '')
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir, input_zip = common.UnzipTemp(target_files)
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+ input_zip.close()
+
+ self.assertDictEqual(
+ {
+ '__COPY': RangeSet("0"),
+ '__NONZERO-0': RangeSet("1-5 9-12"),
+ },
+ sparse_image.file_map)
+
+ def test_GetSparseImage_invalidImageName(self):
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system2', None, None, False)
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'unknown', None, None, False)
+
+ def test_GetSparseImage_missingBlockMapFile(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 3),
+ (0xCAC1, 4)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir, input_zip = common.UnzipTemp(target_files)
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+ False)
+ input_zip.close()
+
+ def test_GetSparseImage_sharedBlocks_notAllowed(self):
+ """Tests the case of having overlapping blocks but disallowed."""
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ # Block 10 is shared between two files.
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '/system/file1 1-5 9-10',
+ '/system/file2 10-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir, input_zip = common.UnzipTemp(target_files)
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+ False)
+ input_zip.close()
+
+ def test_GetSparseImage_sharedBlocks_allowed(self):
+ """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ # Construct an image with a care_map of "0-5 9-12".
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ # Block 10 is shared between two files.
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '/system/file1 1-5 9-10',
+ '/system/file2 10-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir, input_zip = common.UnzipTemp(target_files)
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
+ input_zip.close()
+
+ self.assertDictEqual(
+ {
+ '__COPY': RangeSet("0"),
+ '__NONZERO-0': RangeSet("6-8 13-15"),
+ '/system/file1': RangeSet("1-5 9-10"),
+ '/system/file2': RangeSet("11-12"),
+ },
+ sparse_image.file_map)
+
+ # '/system/file2' should be marked with 'uses_shared_blocks', but not with
+ # 'incomplete'.
+ self.assertTrue(
+ sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
+ self.assertNotIn(
+ 'incomplete', sparse_image.file_map['/system/file2'].extra)
+
+ # All other entries should look normal without any tags.
+ self.assertFalse(sparse_image.file_map['__COPY'].extra)
+ self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
+ self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+
+ def test_GetSparseImage_incompleteRanges(self):
+ """Tests the case of ext4 images with holes."""
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '/system/file1 1-5 9-10',
+ '/system/file2 11-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ # '/system/file2' has less blocks listed (2) than actual (3).
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir, input_zip = common.UnzipTemp(target_files)
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+ input_zip.close()
+
+ self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+ self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
+
class InstallRecoveryScriptFormatTest(unittest.TestCase):
"""Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index a4fa4f9..ee5bc53 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -24,7 +24,9 @@
import test_utils
from ota_from_target_files import (
_LoadOemDicts, BuildInfo, GetPackageMetadata,
- GetTargetFilesZipForSecondaryImages, Payload, PayloadSigner,
+ GetTargetFilesZipForSecondaryImages,
+ GetTargetFilesZipWithoutPostinstallConfig,
+ Payload, PayloadSigner, POSTINSTALL_CONFIG,
WriteFingerprintAssertion)
@@ -37,6 +39,16 @@
'META/update_engine_config.txt',
"PAYLOAD_MAJOR_VERSION=2\nPAYLOAD_MINOR_VERSION=4\n")
+ # META/postinstall_config.txt
+ target_files_zip.writestr(
+ POSTINSTALL_CONFIG,
+ '\n'.join([
+ "RUN_POSTINSTALL_system=true",
+ "POSTINSTALL_PATH_system=system/bin/otapreopt_script",
+ "FILESYSTEM_TYPE_system=ext4",
+ "POSTINSTALL_OPTIONAL_system=true",
+ ]))
+
# META/ab_partitions.txt
ab_partitions = ['boot', 'system', 'vendor']
target_files_zip.writestr(
@@ -539,10 +551,41 @@
self.assertIn('IMAGES/boot.img', namelist)
self.assertIn('IMAGES/system.img', namelist)
self.assertIn('IMAGES/vendor.img', namelist)
+ self.assertIn(POSTINSTALL_CONFIG, namelist)
self.assertNotIn('IMAGES/system_other.img', namelist)
self.assertNotIn('IMAGES/system.map', namelist)
+ def test_GetTargetFilesZipForSecondaryImages_skipPostinstall(self):
+ input_file = construct_target_files(secondary=True)
+ target_file = GetTargetFilesZipForSecondaryImages(
+ input_file, skip_postinstall=True)
+
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertIn('IMAGES/boot.img', namelist)
+ self.assertIn('IMAGES/system.img', namelist)
+ self.assertIn('IMAGES/vendor.img', namelist)
+
+ self.assertNotIn('IMAGES/system_other.img', namelist)
+ self.assertNotIn('IMAGES/system.map', namelist)
+ self.assertNotIn(POSTINSTALL_CONFIG, namelist)
+
+ def test_GetTargetFilesZipWithoutPostinstallConfig(self):
+ input_file = construct_target_files()
+ target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+ with zipfile.ZipFile(target_file) as verify_zip:
+ self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
+ def test_GetTargetFilesZipWithoutPostinstallConfig_missingEntry(self):
+ input_file = construct_target_files()
+ common.ZipDelete(input_file, POSTINSTALL_CONFIG)
+ target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+ with zipfile.ZipFile(target_file) as verify_zip:
+ self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
class PayloadSignerTest(unittest.TestCase):
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 726d6b9..26f9e10 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -16,18 +16,27 @@
from __future__ import print_function
-import tempfile
+import base64
+import os.path
import unittest
import zipfile
import common
-from sign_target_files_apks import EditTags, ReplaceVerityKeyId, RewriteProps
+import test_utils
+from sign_target_files_apks import (
+ EditTags, ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
class SignTargetFilesApksTest(unittest.TestCase):
+ MAC_PERMISSIONS_XML = """<?xml version="1.0" encoding="iso-8859-1"?>
+<policy>
+ <signer signature="{}"><seinfo value="platform"/></signer>
+ <signer signature="{}"><seinfo value="media"/></signer>
+</policy>"""
+
def setUp(self):
- self.tempdir = common.MakeTempDir()
+ self.testdata_dir = test_utils.get_testdata_dir()
def tearDown(self):
common.Cleanup()
@@ -88,94 +97,31 @@
"androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
"lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
"buildvariant=userdebug "
- "veritykeyid=id:485900563d272c46ae118605a47419ac09ca8c11\n")
+ "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
- # From build/target/product/security/verity.x509.pem.
- VERITY_CERTIFICATE1 = """-----BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
-VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
-AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
-A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
-6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
-fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
-T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
-AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
-jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
-HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
-oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
-NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
-JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
-dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
-UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
-yttuAJAEAymk1mipd9+zp38=
------END CERTIFICATE-----
-"""
-
- # From build/target/product/security/testkey.x509.pem.
- VERITY_CERTIFICATE2 = """-----BEGIN CERTIFICATE-----
-MIIEqDCCA5CgAwIBAgIJAJNurL4H8gHfMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
-VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
-AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0wODAyMjkwMTMzNDZaFw0zNTA3MTcwMTMzNDZaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
-A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
-hvcNAQEBBQADggENADCCAQgCggEBANaTGQTexgskse3HYuDZ2CU+Ps1s6x3i/waM
-qOi8qM1r03hupwqnbOYOuw+ZNVn/2T53qUPn6D1LZLjk/qLT5lbx4meoG7+yMLV4
-wgRDvkxyGLhG9SEVhvA4oU6Jwr44f46+z4/Kw9oe4zDJ6pPQp8PcSvNQIg1QCAcy
-4ICXF+5qBTNZ5qaU7Cyz8oSgpGbIepTYOzEJOmc3Li9kEsBubULxWBjf/gOBzAzU
-RNps3cO4JFgZSAGzJWQTT7/emMkod0jb9WdqVA2BVMi7yge54kdVMxHEa5r3b97s
-zI5p58ii0I54JiCUP5lyfTwE/nKZHZnfm644oLIXf6MdW2r+6R8CAQOjgfwwgfkw
-HQYDVR0OBBYEFEhZAFY9JyxGrhGGBaR0GawJyowRMIHJBgNVHSMEgcEwgb6AFEhZ
-AFY9JyxGrhGGBaR0GawJyowRoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
-CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
-QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
-CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAJNurL4H8gHfMAwGA1Ud
-EwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHqvlozrUMRBBVEY0NqrrwFbinZa
-J6cVosK0TyIUFf/azgMJWr+kLfcHCHJsIGnlw27drgQAvilFLAhLwn62oX6snb4Y
-LCBOsVMR9FXYJLZW2+TcIkCRLXWG/oiVHQGo/rWuWkJgU134NDEFJCJGjDbiLCpe
-+ZTWHdcwauTJ9pUbo8EvHRkU3cYfGmLaLfgn9gP+pWA7LFQNvXwBnDa6sppCccEX
-31I828XzgXpJ4O+mDL1/dBd+ek8ZPUP0IgdyZm5MTYPhvVqGCHzzTy3sIeJFymwr
-sBbmg2OAUNLEMO6nwmocSdN2ClirfxqCzJOLSDE4QyS9BAH6EhY6UFcOaE0=
------END CERTIFICATE-----
-"""
-
- input_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+ input_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
# Test with the first certificate.
- cert_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.x509.pem', dir=self.tempdir)
- cert_file.write(VERITY_CERTIFICATE1)
- cert_file.close()
+ cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
- output_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
- zipfile.ZipFile(output_file.name, 'w') as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'r') as input_zip, \
+ zipfile.ZipFile(output_file, 'w') as output_zip:
+ ReplaceVerityKeyId(input_zip, output_zip, cert_file)
- with zipfile.ZipFile(output_file.name) as output_zip:
+ with zipfile.ZipFile(output_file) as output_zip:
self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline'))
# Test with the second certificate.
- with open(cert_file.name, 'w') as cert_file_fp:
- cert_file_fp.write(VERITY_CERTIFICATE2)
+ cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
- with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
- zipfile.ZipFile(output_file.name, 'w') as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+ with zipfile.ZipFile(input_file, 'r') as input_zip, \
+ zipfile.ZipFile(output_file, 'w') as output_zip:
+ ReplaceVerityKeyId(input_zip, output_zip, cert_file)
- with zipfile.ZipFile(output_file.name) as output_zip:
+ with zipfile.ZipFile(output_file) as output_zip:
self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline'))
def test_ReplaceVerityKeyId_no_veritykeyid(self):
@@ -184,16 +130,84 @@
"lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
"loop.max_part=7\n")
- input_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+ input_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
- output_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
- zipfile.ZipFile(output_file.name, 'w') as output_zip:
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'r') as input_zip, \
+ zipfile.ZipFile(output_file, 'w') as output_zip:
ReplaceVerityKeyId(input_zip, output_zip, None)
- with zipfile.ZipFile(output_file.name) as output_zip:
+ with zipfile.ZipFile(output_file) as output_zip:
self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline'))
+
+ def test_ReplaceCerts(self):
+ cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+ with open(cert1_path) as cert1_fp:
+ cert1 = cert1_fp.read()
+ cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+ with open(cert2_path) as cert2_fp:
+ cert2 = cert2_fp.read()
+ cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+ with open(cert3_path) as cert3_fp:
+ cert3 = cert3_fp.read()
+
+ # Replace cert1 with cert3.
+ input_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert1)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ output_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert3)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ common.OPTIONS.key_map = {
+ cert1_path[:-9] : cert3_path[:-9],
+ }
+
+ self.assertEqual(output_xml, ReplaceCerts(input_xml))
+
+ def test_ReplaceCerts_duplicateEntries(self):
+ cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+ with open(cert1_path) as cert1_fp:
+ cert1 = cert1_fp.read()
+ cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+ with open(cert2_path) as cert2_fp:
+ cert2 = cert2_fp.read()
+
+ # Replace cert1 with cert2, which leads to duplicate entries.
+ input_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert1)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ common.OPTIONS.key_map = {
+ cert1_path[:-9] : cert2_path[:-9],
+ }
+ self.assertRaises(AssertionError, ReplaceCerts, input_xml)
+
+ def test_ReplaceCerts_skipNonExistentCerts(self):
+ cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+ with open(cert1_path) as cert1_fp:
+ cert1 = cert1_fp.read()
+ cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+ with open(cert2_path) as cert2_fp:
+ cert2 = cert2_fp.read()
+ cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+ with open(cert3_path) as cert3_fp:
+ cert3 = cert3_fp.read()
+
+ input_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert1)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ output_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert3)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ common.OPTIONS.key_map = {
+ cert1_path[:-9] : cert3_path[:-9],
+ 'non-existent' : cert3_path[:-9],
+ cert2_path[:-9] : 'non-existent',
+ }
+ self.assertEqual(output_xml, ReplaceCerts(input_xml))
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index ec53731..e64355b 100644
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -18,7 +18,11 @@
Utils for running unittests.
"""
+import os
import os.path
+import struct
+
+import common
def get_testdata_dir():
@@ -26,3 +30,67 @@
# The script dir is the one we want, which could be different from pwd.
current_dir = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_dir, 'testdata')
+
+
+def construct_sparse_image(chunks):
+ """Returns a sparse image file constructed from the given chunks.
+
+ From system/core/libsparse/sparse_format.h.
+ typedef struct sparse_header {
+ __le32 magic; // 0xed26ff3a
+ __le16 major_version; // (0x1) - reject images with higher major versions
+ __le16 minor_version; // (0x0) - allow images with higer minor versions
+ __le16 file_hdr_sz; // 28 bytes for first revision of the file format
+ __le16 chunk_hdr_sz; // 12 bytes for first revision of the file format
+ __le32 blk_sz; // block size in bytes, must be a multiple of 4 (4096)
+ __le32 total_blks; // total blocks in the non-sparse output image
+ __le32 total_chunks; // total chunks in the sparse input image
+ __le32 image_checksum; // CRC32 checksum of the original data, counting
+ // "don't care" as 0. Standard 802.3 polynomial,
+ // use a Public Domain table implementation
+ } sparse_header_t;
+
+ typedef struct chunk_header {
+ __le16 chunk_type; // 0xCAC1 -> raw; 0xCAC2 -> fill;
+ // 0xCAC3 -> don't care
+ __le16 reserved1;
+ __le32 chunk_sz; // in blocks in output image
+ __le32 total_sz; // in bytes of chunk input file including chunk header
+ // and data
+ } chunk_header_t;
+
+ Args:
+ chunks: A list of chunks to be written. Each entry should be a tuple of
+ (chunk_type, block_number).
+
+ Returns:
+ Filename of the created sparse image.
+ """
+ SPARSE_HEADER_MAGIC = 0xED26FF3A
+ SPARSE_HEADER_FORMAT = "<I4H4I"
+ CHUNK_HEADER_FORMAT = "<2H2I"
+
+ sparse_image = common.MakeTempFile(prefix='sparse-', suffix='.img')
+ with open(sparse_image, 'wb') as fp:
+ fp.write(struct.pack(
+ SPARSE_HEADER_FORMAT, SPARSE_HEADER_MAGIC, 1, 0, 28, 12, 4096,
+ sum(chunk[1] for chunk in chunks),
+ len(chunks), 0))
+
+ for chunk in chunks:
+ data_size = 0
+ if chunk[0] == 0xCAC1:
+ data_size = 4096 * chunk[1]
+ elif chunk[0] == 0xCAC2:
+ data_size = 4
+ elif chunk[0] == 0xCAC3:
+ pass
+ else:
+ assert False, "Unsupported chunk type: {}".format(chunk[0])
+
+ fp.write(struct.pack(
+ CHUNK_HEADER_FORMAT, chunk[0], 0, chunk[1], data_size + 12))
+ if data_size != 0:
+ fp.write(os.urandom(data_size))
+
+ return sparse_image
diff --git a/tools/releasetools/testdata/media.x509.pem b/tools/releasetools/testdata/media.x509.pem
new file mode 100644
index 0000000..98cd443
--- /dev/null
+++ b/tools/releasetools/testdata/media.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJAPK5jmEjVyxOMA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMzQwNTdaFw0zNTA5MDEyMzQwNTdaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAK4lDFoW75f8KGmsZRsyF8w2ug6GlkFo1YoE
+n0DOhYZxI6P/tPbZScM88to6BcI+rKpX2AOImxdZvPWefG8hiQriUIW37VaqYmwJ
+ie+czTY2LKDo0blgP9TYModnkmzMCQxot3Wuf/MJNMw2nvKFWiZn3wxmf9DHz12O
+umVYBnNzA7tiRybquu37cvB+16dqs8uaOBxLfc2AmxQNiR8AITvkAfWNagamHq3D
+qcLxxlZyhbCa4JNCpm+kIer5Ot91c6AowzHXBgGrOvfMhAM+znx3KjpbhrDb6dd3
+w6SKqYAe3O4ngVifRNnkETl5YAV2qZQQuoEJElna2YxsaP94S48CAQOjgfwwgfkw
+HQYDVR0OBBYEFMopPKqLwO0+VC7vQgWiv/K1fk11MIHJBgNVHSMEgcEwgb6AFMop
+PKqLwO0+VC7vQgWiv/K1fk11oYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAPK5jmEjVyxOMAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAITelRbV5KhyF6c9qEhwSPUzc6X3
+M/OQ1hvfPMnlJRYlv8qnwxWcriddFyqa4eh21UWBJ6xUL2gpDdUQwAKdj1Hg7hVr
+e3tazbOUJBuOx4t05cQsXK+uFWyvW9GZojonUk2gct6743hGSlM2MLDk0P+34I7L
+cB+ttjecdEZ/bgDG7YiFlTgHkgOHVgB4csjjAHr0I6V6LKs6KChptkxLe9X8GH0K
+fiQVll1ark4Hpt91G0p16Xk8kYphK4HNC2KK7gFo3ETkexDTWTJghJ1q321yfcJE
+RMIh0/nsw2jK0HmZ8rgQW8HyDTjUEGbMFBHCV6lupDSfV0ZWVQfk6AIKGoE=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/platform.x509.pem b/tools/releasetools/testdata/platform.x509.pem
new file mode 100644
index 0000000..087f02e
--- /dev/null
+++ b/tools/releasetools/testdata/platform.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJALOZgIbQVs/6MA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMjQwNTBaFw0zNTA5MDEyMjQwNTBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAJx4BZKsDV04HN6qZezIpgBuNkgMbXIHsSAR
+vlCGOqvitV0Amt9xRtbyICKAx81Ne9smJDuKgGwms0sTdSOkkmgiSQTcAUk+fArP
+GgXIdPabA3tgMJ2QdNJCgOFrrSqHNDYZUer3KkgtCbIEsYdeEqyYwap3PWgAuer9
+5W1Yvtjo2hb5o2AJnDeoNKbf7be2tEoEngeiafzPLFSW8s821k35CjuNjzSjuqtM
+9TNxqydxmzulh1StDFP8FOHbRdUeI0+76TybpO35zlQmE1DsU1YHv2mi/0qgfbX3
+6iANCabBtJ4hQC+J7RGQiTqrWpGA8VLoL4WkV1PPX8GQccXuyCcCAQOjgfwwgfkw
+HQYDVR0OBBYEFE/koLPdnLop9x1yh8Tnw48ghsKZMIHJBgNVHSMEgcEwgb6AFE/k
+oLPdnLop9x1yh8Tnw48ghsKZoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJALOZgIbQVs/6MAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAFclUbjZOh9z3g9tRp+G2tZwFAAp
+PIigzXzXeLc9r8wZf6t25iEuVsHHYc/EL9cz3lLFCuCIFM78CjtaGkNGBU2Cnx2C
+tCsgSL+ItdFJKe+F9g7dEtctVWV+IuPoXQTIMdYT0Zk4u4mCJH+jISVroS0dao+S
+6h2xw3Mxe6DAN/DRr/ZFrvIkl5+6bnoUvAJccbmBOM7z3fwFlhfPJIRc97QNY4L3
+J17XOElatuWTG5QhdlxJG3L7aOCA29tYwgKdNHyLMozkPvaosVUz7fvpib1qSN1L
+IC7alMarjdW4OZID2q4u1EYjLk/pvZYTlMYwDlE448/Shebk5INTjLixs1c=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/verity.x509.pem b/tools/releasetools/testdata/verity.x509.pem
new file mode 100644
index 0000000..86399c3
--- /dev/null
+++ b/tools/releasetools/testdata/verity.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----