Merge "Add new API file names"
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 7e23ee0..5ab64b3 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -456,6 +456,13 @@
$(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/with-local/)
$(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/no-local/)
+# Remove legacy VINTF metadata files
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/compatibility_matrix.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/compatibility_matrix.xml)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/core/config.mk b/core/config.mk
index e1d1b3d..b11e9fd 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -742,6 +742,19 @@
APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
+# Boolean variable determining if the whitelist for compatible properties is enabled
+PRODUCT_COMPATIBLE_PROPERTY := false
+ifneq ($(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE),)
+ PRODUCT_COMPATIBLE_PROPERTY := $(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+ #$(warning no product shipping level defined)
+else ifneq ($(call math_lt,27,$(PRODUCT_SHIPPING_API_LEVEL)),)
+ PRODUCT_COMPATIBLE_PROPERTY := true
+endif
+
+.KATI_READONLY := \
+ PRODUCT_COMPATIBLE_PROPERTY
+
# Boolean variable determining if Treble is fully enabled
PRODUCT_FULL_TREBLE := false
ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79e72c1..af2355a 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -7,11 +7,14 @@
# Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
ifeq ($(USE_DEX2OAT_DEBUG),false)
DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoat$(HOST_EXECUTABLE_SUFFIX)
else
DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoatd$(HOST_EXECUTABLE_SUFFIX)
endif
DEX2OAT_DEPENDENCY += $(DEX2OAT)
+PATCHOAT_DEPENDENCY += $(PATCHOAT)
# Use the first preloaded-classes file in PRODUCT_COPY_FILES.
PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
@@ -87,14 +90,17 @@
# is converted into to boot.art (to match the legacy assumption that boot.art
# exists), and the rest are converted to boot-<name>.art.
# In addition, each .art file has an associated .oat file.
-LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).oat boot-$(jar).vdex)
-LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.oat boot.vdex
+LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).art.rel boot-$(jar).oat boot-$(jar).vdex)
+LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.art.rel boot.oat boot.vdex
# If we use a boot image profile.
my_use_profile_for_boot_image := $(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE)
ifeq (,$(my_use_profile_for_boot_image))
-# If not set, use the default.
-my_use_profile_for_boot_image := false
+# If not set, set the default to true if we are not a PDK build. PDK builds
+# can't build the profile since they don't have frameworks/base.
+ifneq (true,$(TARGET_BUILD_PDK))
+my_use_profile_for_boot_image := true
+endif
endif
ifeq (true,$(my_use_profile_for_boot_image))
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 8b71198..ad8f18d 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -73,14 +73,16 @@
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_BOOT_IMAGE_FLAGS := $(my_boot_image_flags)
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_IMAGE_LOCATION := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
# Use dex2oat debug version for better error reporting
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(my_out_boot_image_profile_location)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(PATCHOAT_DEPENDENCY) $(my_out_boot_image_profile_location)
@echo "target dex2oat: $@"
@mkdir -p $(dir $@)
@mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
- @rm -f $(dir $@)/*.art $(dir $@)/*.oat
+ @rm -f $(dir $@)/*.art $(dir $@)/*.oat $(dir $@)/*.art.rel
@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
+ @rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art.rel
$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
$(PRIVATE_BOOT_IMAGE_FLAGS) \
@@ -99,6 +101,11 @@
--multi-image --no-inline-from=core-oj.jar \
--abort-on-hard-verifier-error \
--abort-on-soft-verifier-error \
- $(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS)
+ $(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) && \
+ ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
+ --input-image-location=$(PRIVATE_IMAGE_LOCATION) \
+ --output-image-relocation-file=$@.rel \
+ --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
+ --base-offset-delta=0x10000000
endif
diff --git a/core/main.mk b/core/main.mk
index fe178da..93c8d3b 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -205,6 +205,14 @@
variables like PRODUCT_SEPOLICY_SPLIT should be used until that is \
possible.)
+# Sets ro.actionable_compatible_property.enabled to know on runtime whether the whitelist
+# of actionable compatible properties is enabled or not.
+ifeq ($(PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE),true)
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=false
+else
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=${PRODUCT_COMPATIBLE_PROPERTY}
+endif
+
# -----------------------------------------------------------------
###
### In this section we set up the things that are different
diff --git a/core/product.mk b/core/product.mk
index f15f6b3..77f78a6 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -149,6 +149,8 @@
PRODUCT_ADB_KEYS \
PRODUCT_CFI_INCLUDE_PATHS \
PRODUCT_CFI_EXCLUDE_PATHS \
+ PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE \
+ PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE \
define dump-product
$(info ==== $(1) ====)\
diff --git a/core/product_config.mk b/core/product_config.mk
index 5b0e257..2cd8016 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -484,3 +484,11 @@
# which Soong namespaces to export to Make
PRODUCT_SOONG_NAMESPACES :=
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SOONG_NAMESPACES))
+
+# A flag to override PRODUCT_COMPATIBLE_PROPERTY
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := \
+ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE))
+
+# Whether the whitelist of actionable compatible properties should be disabled or not
+PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE := \
+ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE))
diff --git a/target/board/Android.mk b/target/board/Android.mk
index ae6be92..1c9edb8 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -37,7 +37,7 @@
LOCAL_MODULE := device_manifest.xml
LOCAL_MODULE_STEM := manifest.xml
LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/vintf
GEN := $(local-generated-sources-dir)/manifest.xml
$(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE)
@@ -53,18 +53,27 @@
BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
endif
+# VNDK Version in device compatibility matrix and framework manifest
+ifeq ($(BOARD_VNDK_VERSION),current)
+VINTF_VNDK_VERSION := $(PLATFORM_VNDK_VERSION)
+else
+VINTF_VNDK_VERSION := $(BOARD_VNDK_VERSION)
+endif
+
# Device Compatibility Matrix
ifdef DEVICE_MATRIX_FILE
include $(CLEAR_VARS)
LOCAL_MODULE := device_compatibility_matrix.xml
LOCAL_MODULE_STEM := compatibility_matrix.xml
LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/vintf
GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
+
+$(GEN): PRIVATE_VINTF_VNDK_VERSION := $(VINTF_VNDK_VERSION)
$(GEN): $(DEVICE_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
- # TODO(b/37342627): put BOARD_VNDK_VERSION & BOARD_VNDK_LIBRARIES into device matrix.
- $(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
+ REQUIRED_VNDK_VERSION=$(PRIVATE_VINTF_VNDK_VERSION) \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
LOCAL_PREBUILT_MODULE_FILE := $(GEN)
include $(BUILD_PREBUILT)
@@ -76,7 +85,7 @@
LOCAL_MODULE := framework_manifest.xml
LOCAL_MODULE_STEM := manifest.xml
LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_OUT)
+LOCAL_MODULE_PATH := $(TARGET_OUT)/etc/vintf
GEN := $(local-generated-sources-dir)/manifest.xml
@@ -89,9 +98,12 @@
endif
endif
+$(GEN): PRIVATE_VINTF_VNDK_VERSION := $(VINTF_VNDK_VERSION)
$(GEN): PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES := $(FRAMEWORK_MANIFEST_INPUT_FILES)
$(GEN): $(FRAMEWORK_MANIFEST_INPUT_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
- BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+ BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
+ PROVIDED_VNDK_VERSIONS="$(PRIVATE_VINTF_VNDK_VERSION) $(PRODUCT_EXTRA_VNDK_VERSIONS)" \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf \
-i $(call normalize-path-list,$(PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES)) \
-o $@ $(PRIVATE_FLAGS)
@@ -99,3 +111,4 @@
include $(BUILD_PREBUILT)
BUILT_SYSTEM_MANIFEST := $(LOCAL_BUILT_MODULE)
+VINTF_VNDK_VERSION :=
diff --git a/target/board/generic_arm64_a/BoardConfig.mk b/target/board/generic_arm64_a/BoardConfig.mk
index 8f4043f..34a8ac0 100644
--- a/target/board/generic_arm64_a/BoardConfig.mk
+++ b/target/board/generic_arm64_a/BoardConfig.mk
@@ -23,7 +23,7 @@
TARGET_CPU_VARIANT := generic
TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := cortex-a15
+TARGET_2ND_CPU_VARIANT := generic
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index e0d7372..00afee6 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -23,10 +23,10 @@
TARGET_CPU_VARIANT := generic
TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := cortex-a15
+TARGET_2ND_CPU_VARIANT := generic
# Enable A/B update
TARGET_NO_RECOVERY := true
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index f8fb88f..a73a31b 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -11,6 +11,10 @@
TARGET_ARCH_VARIANT := x86
TARGET_PRELINK_MODULE := false
+#emulator now uses 64bit kernel to run 32bit x86 image
+#
+TARGET_USES_64_BIT_BINDER := true
+
# The IA emulator (qemu) uses the Goldfish devices
HAVE_HTC_AUDIO_DRIVER := true
BOARD_USES_GENERIC_AUDIO := true
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 03203ce..811c330 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -24,7 +24,7 @@
PRODUCT_COPY_FILES += \
development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
- prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+ prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
include $(SRC_TARGET_DIR)/product/full_x86.mk
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index b252349..16599cb 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -85,6 +85,7 @@
telephony-common \
uiautomator \
uncrypt \
+ vndk_snapshot_package \
voip-common \
webview \
webview_zygote \
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 1e82773..b9820d3 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
PRODUCT_COPY_FILES += \
development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
- prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+ prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index a134d02..93aaf37 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -77,26 +77,19 @@
@chmod a+x $@
include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_current
+LOCAL_MODULE := vndk_package
LOCAL_REQUIRED_MODULES := \
$(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
$(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
$(LLNDK_LIBRARIES) \
llndk.libraries.txt \
vndksp.libraries.txt
-
include $(BUILD_PHONY_PACKAGE)
include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_package
-ifeq (current,$(BOARD_VNDK_VERSION))
+LOCAL_MODULE := vndk_snapshot_package
LOCAL_REQUIRED_MODULES := \
- vndk_current
-else
-LOCAL_REQUIRED_MODULES := \
- vndk_v$(BOARD_VNDK_VERSION)_$(TARGET_ARCH)
-endif
-LOCAL_REQUIRED_MODULES += \
$(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
include $(BUILD_PHONY_PACKAGE)
+
endif # BOARD_VNDK_VERSION is set
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 9601d88..a9863bc 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -52,7 +52,6 @@
import shutil
import subprocess
import sys
-import tempfile
import uuid
import zipfile
@@ -75,6 +74,10 @@
OPTIONS.is_signing = False
+# Partitions that should have their care_map added to META/care_map.txt.
+PARTITIONS_WITH_CARE_MAP = ('system', 'vendor')
+
+
class OutputFile(object):
def __init__(self, output_zip, input_dir, prefix, name):
self._output_zip = output_zip
@@ -94,13 +97,10 @@
def GetCareMap(which, imgname):
- """Generate care_map of system (or vendor) partition"""
-
- assert which in ("system", "vendor")
+ """Generates the care_map for the given partition."""
+ assert which in PARTITIONS_WITH_CARE_MAP
simg = sparse_img.SparseImage(imgname)
- care_map_list = [which]
-
care_map_ranges = simg.care_map
key = which + "_adjusted_partition_size"
adjusted_blocks = OPTIONS.info_dict.get(key)
@@ -109,8 +109,7 @@
care_map_ranges = care_map_ranges.intersect(rangelib.RangeSet(
"0-%d" % (adjusted_blocks,)))
- care_map_list.append(care_map_ranges.to_string_raw())
- return care_map_list
+ return [which, care_map_ranges.to_string_raw()]
def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
@@ -462,6 +461,126 @@
img.Write()
+def AddRadioImagesForAbOta(output_zip, ab_partitions):
+ """Adds the radio images needed for A/B OTA to the output file.
+
+ It parses the list of A/B partitions, looks for the missing ones from RADIO/
+ or VENDOR_IMAGES/ dirs, and copies them to IMAGES/ of the output file (or
+ dir).
+
+ It also ensures that on returning from the function all the listed A/B
+ partitions must have their images available under IMAGES/.
+
+ Args:
+ output_zip: The output zip file (needs to be already open), or None to
+ write images to OPTIONS.input_tmp/.
+ ab_partitions: The list of A/B partitions.
+
+ Raises:
+ AssertionError: If it can't find an image.
+ """
+ for partition in ab_partitions:
+ img_name = partition.strip() + ".img"
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+ if os.path.exists(prebuilt_path):
+ print("%s already exists, no need to overwrite..." % (img_name,))
+ continue
+
+ img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+ if os.path.exists(img_radio_path):
+ if output_zip:
+ common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+ else:
+ shutil.copy(img_radio_path, prebuilt_path)
+ continue
+
+ # Walk through VENDOR_IMAGES/ since files could be under subdirs.
+ img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
+ for root, _, files in os.walk(img_vendor_dir):
+ if img_name in files:
+ if output_zip:
+ common.ZipWrite(output_zip, os.path.join(root, img_name),
+ "IMAGES/" + img_name)
+ else:
+ shutil.copy(os.path.join(root, img_name), prebuilt_path)
+ break
+
+ # Assert that the image is present under IMAGES/ now.
+ if output_zip:
+ # Zip spec says: All slashes MUST be forward slashes.
+ img_path = 'IMAGES/' + img_name
+ assert img_path in output_zip.namelist(), "cannot find " + img_name
+ else:
+ img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+ assert os.path.exists(img_path), "cannot find " + img_name
+
+
+def AddCareMapTxtForAbOta(output_zip, ab_partitions, image_paths):
+ """Generates and adds care_map.txt for system and vendor partitions.
+
+ Args:
+ output_zip: The output zip file (needs to be already open), or None to
+ write images to OPTIONS.input_tmp/.
+ ab_partitions: The list of A/B partitions.
+ image_paths: A map from the partition name to the image path.
+ """
+ care_map_list = []
+ for partition in ab_partitions:
+ partition = partition.strip()
+ if partition not in PARTITIONS_WITH_CARE_MAP:
+ continue
+
+ verity_block_device = "{}_verity_block_device".format(partition)
+ avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
+ if (verity_block_device in OPTIONS.info_dict or
+ OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
+ image_path = image_paths[partition]
+ assert os.path.exists(image_path)
+ care_map_list += GetCareMap(partition, image_path)
+
+ if care_map_list:
+ care_map_path = "META/care_map.txt"
+ if output_zip and care_map_path not in output_zip.namelist():
+ common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
+ else:
+ with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
+ fp.write('\n'.join(care_map_list))
+ if output_zip:
+ OPTIONS.replace_updated_files_list.append(care_map_path)
+
+
+def AddPackRadioImages(output_zip, images):
+ """Copies images listed in META/pack_radioimages.txt from RADIO/ to IMAGES/.
+
+ Args:
+ output_zip: The output zip file (needs to be already open), or None to
+ write images to OPTIONS.input_tmp/.
+ images: A list of image names.
+
+ Raises:
+ AssertionError: If a listed image can't be found.
+ """
+ for image in images:
+ img_name = image.strip()
+ _, ext = os.path.splitext(img_name)
+ if not ext:
+ img_name += ".img"
+
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+ if os.path.exists(prebuilt_path):
+ print("%s already exists, no need to overwrite..." % (img_name,))
+ continue
+
+ img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+ assert os.path.exists(img_radio_path), \
+ "Failed to find %s at %s" % (img_name, img_radio_path)
+
+ if output_zip:
+ common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+ else:
+ shutil.copy(img_radio_path, prebuilt_path)
+
+
def ReplaceUpdatedFiles(zip_filename, files_list):
"""Updates all the ZIP entries listed in files_list.
@@ -589,12 +708,12 @@
recovery_two_step_image.AddToZip(output_zip)
banner("system")
- partitions['system'] = system_img_path = AddSystem(
+ partitions['system'] = AddSystem(
output_zip, recovery_img=recovery_image, boot_img=boot_image)
if has_vendor:
banner("vendor")
- partitions['vendor'] = vendor_img_path = AddVendor(output_zip)
+ partitions['vendor'] = AddVendor(output_zip)
if has_system_other:
banner("system_other")
@@ -618,95 +737,28 @@
banner("vbmeta")
AddVBMeta(output_zip, partitions)
- # For devices using A/B update, copy over images from RADIO/ and/or
- # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
- # images ready under IMAGES/. All images should have '.img' as extension.
banner("radio")
- ab_partitions = os.path.join(OPTIONS.input_tmp, "META", "ab_partitions.txt")
- if os.path.exists(ab_partitions):
- with open(ab_partitions, 'r') as f:
- lines = f.readlines()
- # For devices using A/B update, generate care_map for system and vendor
- # partitions (if present), then write this file to target_files package.
- care_map_list = []
- for line in lines:
- if line.strip() == "system" and (
- "system_verity_block_device" in OPTIONS.info_dict or
- OPTIONS.info_dict.get("avb_system_hashtree_enable") == "true"):
- assert os.path.exists(system_img_path)
- care_map_list += GetCareMap("system", system_img_path)
- if line.strip() == "vendor" and (
- "vendor_verity_block_device" in OPTIONS.info_dict or
- OPTIONS.info_dict.get("avb_vendor_hashtree_enable") == "true"):
- assert os.path.exists(vendor_img_path)
- care_map_list += GetCareMap("vendor", vendor_img_path)
+ ab_partitions_txt = os.path.join(OPTIONS.input_tmp, "META",
+ "ab_partitions.txt")
+ if os.path.exists(ab_partitions_txt):
+ with open(ab_partitions_txt, 'r') as f:
+ ab_partitions = f.readlines()
- img_name = line.strip() + ".img"
- prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
- if os.path.exists(prebuilt_path):
- print("%s already exists, no need to overwrite..." % (img_name,))
- continue
+ # For devices using A/B update, copy over images from RADIO/ and/or
+ # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
+ # images ready under IMAGES/. All images should have '.img' as extension.
+ AddRadioImagesForAbOta(output_zip, ab_partitions)
- img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
- if os.path.exists(img_radio_path):
- if output_zip:
- common.ZipWrite(output_zip, img_radio_path,
- os.path.join("IMAGES", img_name))
- else:
- shutil.copy(img_radio_path, prebuilt_path)
- else:
- img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
- for root, _, files in os.walk(img_vendor_dir):
- if img_name in files:
- if output_zip:
- common.ZipWrite(output_zip, os.path.join(root, img_name),
- os.path.join("IMAGES", img_name))
- else:
- shutil.copy(os.path.join(root, img_name), prebuilt_path)
- break
-
- if output_zip:
- # Zip spec says: All slashes MUST be forward slashes.
- img_path = 'IMAGES/' + img_name
- assert img_path in output_zip.namelist(), "cannot find " + img_name
- else:
- img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
- assert os.path.exists(img_path), "cannot find " + img_name
-
- if care_map_list:
- care_map_path = "META/care_map.txt"
- if output_zip and care_map_path not in output_zip.namelist():
- common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
- else:
- with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
- fp.write('\n'.join(care_map_list))
- if output_zip:
- OPTIONS.replace_updated_files_list.append(care_map_path)
+ # Generate care_map.txt for system and vendor partitions (if present), then
+ # write this file to target_files package.
+ AddCareMapTxtForAbOta(output_zip, ab_partitions, partitions)
# Radio images that need to be packed into IMAGES/, and product-img.zip.
- pack_radioimages = os.path.join(
+ pack_radioimages_txt = os.path.join(
OPTIONS.input_tmp, "META", "pack_radioimages.txt")
- if os.path.exists(pack_radioimages):
- with open(pack_radioimages, 'r') as f:
- lines = f.readlines()
- for line in lines:
- img_name = line.strip()
- _, ext = os.path.splitext(img_name)
- if not ext:
- img_name += ".img"
- prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
- if os.path.exists(prebuilt_path):
- print("%s already exists, no need to overwrite..." % (img_name,))
- continue
-
- img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
- assert os.path.exists(img_radio_path), \
- "Failed to find %s at %s" % (img_name, img_radio_path)
- if output_zip:
- common.ZipWrite(output_zip, img_radio_path,
- os.path.join("IMAGES", img_name))
- else:
- shutil.copy(img_radio_path, prebuilt_path)
+ if os.path.exists(pack_radioimages_txt):
+ with open(pack_radioimages_txt, 'r') as f:
+ AddPackRadioImages(output_zip, f.readlines())
if output_zip:
common.ZipClose(output_zip)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index ed60188..1f5caf3 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -325,8 +325,10 @@
else:
return True, unsparse_image_path
inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
- (_, exit_code) = RunCommand(inflate_command)
+ (inflate_output, exit_code) = RunCommand(inflate_command)
if exit_code != 0:
+ print("Error: '%s' failed with exit code %d:\n%s" % (
+ inflate_command, exit_code, inflate_output))
os.remove(unsparse_image_path)
return False, None
return True, unsparse_image_path
@@ -569,12 +571,12 @@
build_command.extend(["-c", prop_dict["selinux_fc"]])
if "block_list" in prop_dict:
build_command.extend(["-B", prop_dict["block_list"]])
+ if "squashfs_block_size" in prop_dict:
+ build_command.extend(["-b", prop_dict["squashfs_block_size"]])
if "squashfs_compressor" in prop_dict:
build_command.extend(["-z", prop_dict["squashfs_compressor"]])
if "squashfs_compressor_opt" in prop_dict:
build_command.extend(["-zo", prop_dict["squashfs_compressor_opt"]])
- if "squashfs_block_size" in prop_dict:
- build_command.extend(["-b", prop_dict["squashfs_block_size"]])
if prop_dict.get("squashfs_disable_4k_align") == "true":
build_command.extend(["-a"])
elif fs_type.startswith("f2fs"):
@@ -607,7 +609,8 @@
(mkfs_output, exit_code) = RunCommand(build_command)
if exit_code != 0:
- print("Error: '%s' failed with exit code %d" % (build_command, exit_code))
+ print("Error: '%s' failed with exit code %d:\n%s" % (
+ build_command, exit_code, mkfs_output))
return False
# Check if there's enough headroom space available for ext4 image.
@@ -654,13 +657,13 @@
# Run e2fsck on the inflated image file
e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
- (_, exit_code) = RunCommand(e2fsck_command)
+ (e2fsck_output, exit_code) = RunCommand(e2fsck_command)
os.remove(unsparse_image)
if exit_code != 0:
- print("Error: '%s' failed with exit code %d" % (e2fsck_command,
- exit_code))
+ print("Error: '%s' failed with exit code %d:\n%s" % (
+ e2fsck_command, exit_code, e2fsck_output))
return False
return True
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index c4877e0..db63fd3 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -53,11 +53,13 @@
import common
-# Work around a bug in python's zipfile module that prevents opening
-# of zipfiles if any entry has an extra field of between 1 and 3 bytes
-# (which is common with zipaligned APKs). This overrides the
-# ZipInfo._decodeExtra() method (which contains the bug) with an empty
-# version (since we don't need to decode the extra field anyway).
+# Work around a bug in Python's zipfile module that prevents opening of zipfiles
+# if any entry has an extra field of between 1 and 3 bytes (which is common with
+# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
+# contains the bug) with an empty version (since we don't need to decode the
+# extra field anyway).
+# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
+# Python 3.5.0 alpha 1.
class MyZipInfo(zipfile.ZipInfo):
def _decodeExtra(self):
pass
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f474a6c..88cb741 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -527,12 +527,7 @@
if target_info.oem_props and not OPTIONS.oem_no_mount:
target_info.WriteMountOemScript(script)
- metadata = {
- "post-build": target_info.fingerprint,
- "pre-device": target_info.device,
- "post-timestamp": target_info.GetBuildProp("ro.build.date.utc"),
- "ota-type" : "BLOCK",
- }
+ metadata = GetPackageMetadata(target_info)
device_specific = common.DeviceSpecificParams(
input_zip=input_zip,
@@ -711,6 +706,57 @@
metadata["post-timestamp"] = post_timestamp
+def GetPackageMetadata(target_info, source_info=None):
+ """Generates and returns the metadata dict.
+
+ It generates a dict() that contains the info to be written into an OTA
+ package (META-INF/com/android/metadata). It also handles the detection of
+ downgrade / timestamp override / data wipe based on the global options.
+
+ Args:
+ target_info: The BuildInfo instance that holds the target build info.
+ source_info: The BuildInfo instance that holds the source build info, or
+ None if generating full OTA.
+
+ Returns:
+ A dict to be written into package metadata entry.
+ """
+ assert isinstance(target_info, BuildInfo)
+ assert source_info is None or isinstance(source_info, BuildInfo)
+
+ metadata = {
+ 'post-build' : target_info.fingerprint,
+ 'post-build-incremental' : target_info.GetBuildProp(
+ 'ro.build.version.incremental'),
+ }
+
+ if target_info.is_ab:
+ metadata['ota-type'] = 'AB'
+ metadata['ota-required-cache'] = '0'
+ else:
+ metadata['ota-type'] = 'BLOCK'
+
+ if OPTIONS.wipe_user_data:
+ metadata['ota-wipe'] = 'yes'
+
+ is_incremental = source_info is not None
+ if is_incremental:
+ metadata['pre-build'] = source_info.fingerprint
+ metadata['pre-build-incremental'] = source_info.GetBuildProp(
+ 'ro.build.version.incremental')
+ metadata['pre-device'] = source_info.device
+ else:
+ metadata['pre-device'] = target_info.device
+
+ # Detect downgrades, or fill in the post-timestamp.
+ if is_incremental:
+ HandleDowngradeMetadata(metadata, target_info, source_info)
+ else:
+ metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+
+ return metadata
+
+
def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
@@ -728,12 +774,7 @@
if not OPTIONS.oem_no_mount:
source_info.WriteMountOemScript(script)
- metadata = {
- "pre-device": source_info.device,
- "ota-type": "BLOCK",
- }
-
- HandleDowngradeMetadata(metadata, target_info, source_info)
+ metadata = GetPackageMetadata(target_info, source_info)
device_specific = common.DeviceSpecificParams(
source_zip=source_zip,
@@ -745,13 +786,6 @@
metadata=metadata,
info_dict=source_info)
- metadata["pre-build"] = source_info.fingerprint
- metadata["post-build"] = target_info.fingerprint
- metadata["pre-build-incremental"] = source_info.GetBuildProp(
- "ro.build.version.incremental")
- metadata["post-build-incremental"] = target_info.GetBuildProp(
- "ro.build.version.incremental")
-
source_boot = common.GetBootableImage(
"/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
target_boot = common.GetBootableImage(
@@ -1070,24 +1104,7 @@
source_info = None
# Metadata to comply with Android OTA package format.
- metadata = {
- "post-build" : target_info.fingerprint,
- "post-build-incremental" : target_info.GetBuildProp(
- "ro.build.version.incremental"),
- "ota-required-cache" : "0",
- "ota-type" : "AB",
- }
-
- if source_file is not None:
- metadata["pre-device"] = source_info.device
- metadata["pre-build"] = source_info.fingerprint
- metadata["pre-build-incremental"] = source_info.GetBuildProp(
- "ro.build.version.incremental")
-
- HandleDowngradeMetadata(metadata, target_info, source_info)
- else:
- metadata["pre-device"] = target_info.device
- metadata["post-timestamp"] = target_info.GetBuildProp("ro.build.date.utc")
+ metadata = GetPackageMetadata(target_info, source_info)
# 1. Generate payload.
payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
@@ -1349,16 +1366,34 @@
assert not (OPTIONS.downgrade and OPTIONS.timestamp), \
"Cannot have --downgrade AND --override_timestamp both"
- # Load the dict file from the zip directly to have a peek at the OTA type.
- # For packages using A/B update, unzipping is not needed.
+ # Load the build info dicts from the zip directly or the extracted input
+ # directory. We don't need to unzip the entire target-files zips, because they
+ # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
+ # When loading the info dicts, we don't need to provide the second parameter
+ # to common.LoadInfoDict(). Specifying the second parameter allows replacing
+ # some properties with their actual paths, such as 'selinux_fc',
+ # 'ramdisk_dir', which won't be used during OTA generation.
if OPTIONS.extracted_input is not None:
- OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input,
- OPTIONS.extracted_input)
+ OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
else:
- input_zip = zipfile.ZipFile(args[0], "r")
- OPTIONS.info_dict = common.LoadInfoDict(input_zip)
- common.ZipClose(input_zip)
+ with zipfile.ZipFile(args[0], 'r') as input_zip:
+ OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+ if OPTIONS.verbose:
+ print("--- target info ---")
+ common.DumpInfoDict(OPTIONS.info_dict)
+
+ # Load the source build dict if applicable.
+ if OPTIONS.incremental_source is not None:
+ OPTIONS.target_info_dict = OPTIONS.info_dict
+ with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+ OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+
+ if OPTIONS.verbose:
+ print("--- source info ---")
+ common.DumpInfoDict(OPTIONS.source_info_dict)
+
+ # Load OEM dicts if provided.
OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
ab_update = OPTIONS.info_dict.get("ab_update") == "true"
@@ -1375,20 +1410,6 @@
OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
if ab_update:
- if OPTIONS.incremental_source is not None:
- OPTIONS.target_info_dict = OPTIONS.info_dict
- source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
- OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
- common.ZipClose(source_zip)
-
- if OPTIONS.verbose:
- print("--- target info ---")
- common.DumpInfoDict(OPTIONS.info_dict)
-
- if OPTIONS.incremental_source is not None:
- print("--- source info ---")
- common.DumpInfoDict(OPTIONS.source_info_dict)
-
WriteABOTAPackageWithBrilloScript(
target_file=args[0],
output_file=args[1],
@@ -1397,49 +1418,45 @@
print("done.")
return
+ # Sanity check the loaded info dicts first.
+ if OPTIONS.info_dict.get("no_recovery") == "true":
+ raise common.ExternalError(
+ "--- target build has specified no recovery ---")
+
+ # Non-A/B OTAs rely on /cache partition to store temporary files.
+ cache_size = OPTIONS.info_dict.get("cache_size")
+ if cache_size is None:
+ print("--- can't determine the cache partition size ---")
+ OPTIONS.cache_size = cache_size
+
if OPTIONS.extra_script is not None:
OPTIONS.extra_script = open(OPTIONS.extra_script).read()
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input
- OPTIONS.target_tmp = OPTIONS.input_tmp
- OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp,
- OPTIONS.input_tmp)
input_zip = zipfile.ZipFile(args[0], "r")
else:
print("unzipping target target-files...")
OPTIONS.input_tmp, input_zip = common.UnzipTemp(
args[0], UNZIP_PATTERN)
+ OPTIONS.target_tmp = OPTIONS.input_tmp
- OPTIONS.target_tmp = OPTIONS.input_tmp
- OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
-
- if OPTIONS.verbose:
- print("--- target info ---")
- common.DumpInfoDict(OPTIONS.info_dict)
-
- # If the caller explicitly specified the device-specific extensions
- # path via -s/--device_specific, use that. Otherwise, use
- # META/releasetools.py if it is present in the target target_files.
- # Otherwise, take the path of the file from 'tool_extensions' in the
- # info dict and look for that in the local filesystem, relative to
- # the current directory.
-
+ # If the caller explicitly specified the device-specific extensions path via
+ # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
+ # is present in the target target_files. Otherwise, take the path of the file
+ # from 'tool_extensions' in the info dict and look for that in the local
+ # filesystem, relative to the current directory.
if OPTIONS.device_specific is None:
from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
if os.path.exists(from_input):
print("(using device-specific extensions from target_files)")
OPTIONS.device_specific = from_input
else:
- OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+ OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
if OPTIONS.device_specific is not None:
OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
- if OPTIONS.info_dict.get("no_recovery") == "true":
- raise common.ExternalError(
- "--- target build has specified no recovery ---")
-
# Set up the output zip. Create a temporary zip file if signing is needed.
if OPTIONS.no_signing:
if os.path.exists(args[1]):
@@ -1451,12 +1468,6 @@
output_zip = zipfile.ZipFile(temp_zip_file, "w",
compression=zipfile.ZIP_DEFLATED)
- # Non A/B OTAs rely on /cache partition to store temporary files.
- cache_size = OPTIONS.info_dict.get("cache_size", None)
- if cache_size is None:
- print("--- can't determine the cache partition size ---")
- OPTIONS.cache_size = cache_size
-
# Generate a full OTA.
if OPTIONS.incremental_source is None:
WriteFullOTAPackage(input_zip, output_zip)
@@ -1467,12 +1478,6 @@
OPTIONS.source_tmp, source_zip = common.UnzipTemp(
OPTIONS.incremental_source,
UNZIP_PATTERN)
- OPTIONS.target_info_dict = OPTIONS.info_dict
- OPTIONS.source_info_dict = common.LoadInfoDict(source_zip,
- OPTIONS.source_tmp)
- if OPTIONS.verbose:
- print("--- source info ---")
- common.DumpInfoDict(OPTIONS.source_info_dict)
WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
@@ -1482,6 +1487,7 @@
target_files_diff.recursiveDiff(
'', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
+ common.ZipClose(input_zip)
common.ZipClose(output_zip)
# Sign the generated zip package unless no_signing is specified.
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
new file mode 100644
index 0000000..e449ca8
--- /dev/null
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -0,0 +1,168 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import os.path
+import unittest
+import zipfile
+
+import common
+from add_img_to_target_files import AddPackRadioImages, AddRadioImagesForAbOta
+
+
+OPTIONS = common.OPTIONS
+
+
+class AddImagesToTargetFilesTest(unittest.TestCase):
+
+ def setUp(self):
+ OPTIONS.input_tmp = common.MakeTempDir()
+
+ def tearDown(self):
+ common.Cleanup()
+
+ @staticmethod
+ def _create_images(images, prefix):
+ """Creates images under OPTIONS.input_tmp/prefix."""
+ path = os.path.join(OPTIONS.input_tmp, prefix)
+ if not os.path.exists(path):
+ os.mkdir(path)
+
+ for image in images:
+ image_path = os.path.join(path, image + '.img')
+ with open(image_path, 'wb') as image_fp:
+ image_fp.write(image.encode())
+
+ images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+ if not os.path.exists(images_path):
+ os.mkdir(images_path)
+ return images, images_path
+
+ def test_AddRadioImagesForAbOta_imageExists(self):
+ """Tests the case with existing images under IMAGES/."""
+ images, images_path = self._create_images(['aboot', 'xbl'], 'IMAGES')
+ AddRadioImagesForAbOta(None, images)
+
+ for image in images:
+ self.assertTrue(
+ os.path.exists(os.path.join(images_path, image + '.img')))
+
+ def test_AddRadioImagesForAbOta_copyFromRadio(self):
+ """Tests the case that copies images from RADIO/."""
+ images, images_path = self._create_images(['aboot', 'xbl'], 'RADIO')
+ AddRadioImagesForAbOta(None, images)
+
+ for image in images:
+ self.assertTrue(
+ os.path.exists(os.path.join(images_path, image + '.img')))
+
+ def test_AddRadioImagesForAbOta_copyFromRadio_zipOutput(self):
+ images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+ # Set up the output zip.
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(output_file, 'w') as output_zip:
+ AddRadioImagesForAbOta(output_zip, images)
+
+ with zipfile.ZipFile(output_file, 'r') as verify_zip:
+ for image in images:
+ self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+ def test_AddRadioImagesForAbOta_copyFromVendorImages(self):
+ """Tests the case that copies images from VENDOR_IMAGES/."""
+ vendor_images_path = os.path.join(OPTIONS.input_tmp, 'VENDOR_IMAGES')
+ os.mkdir(vendor_images_path)
+
+ partitions = ['aboot', 'xbl']
+ for index, partition in enumerate(partitions):
+ subdir = os.path.join(vendor_images_path, 'subdir-{}'.format(index))
+ os.mkdir(subdir)
+
+ partition_image_path = os.path.join(subdir, partition + '.img')
+ with open(partition_image_path, 'wb') as partition_fp:
+ partition_fp.write(partition.encode())
+
+ # Set up the output dir.
+ images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+ os.mkdir(images_path)
+
+ AddRadioImagesForAbOta(None, partitions)
+
+ for partition in partitions:
+ self.assertTrue(
+ os.path.exists(os.path.join(images_path, partition + '.img')))
+
+ def test_AddRadioImagesForAbOta_missingImages(self):
+ images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+ self.assertRaises(AssertionError, AddRadioImagesForAbOta, None,
+ images + ['baz'])
+
+ def test_AddRadioImagesForAbOta_missingImages_zipOutput(self):
+ images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+ # Set up the output zip.
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(output_file, 'w') as output_zip:
+ self.assertRaises(AssertionError, AddRadioImagesForAbOta, output_zip,
+ images + ['baz'])
+
+ def test_AddPackRadioImages(self):
+ images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+ AddPackRadioImages(None, images)
+
+ for image in images:
+ self.assertTrue(
+ os.path.exists(os.path.join(images_path, image + '.img')))
+
+ def test_AddPackRadioImages_with_suffix(self):
+ images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+ images_with_suffix = [image + '.img' for image in images]
+ AddPackRadioImages(None, images_with_suffix)
+
+ for image in images:
+ self.assertTrue(
+ os.path.exists(os.path.join(images_path, image + '.img')))
+
+ def test_AddPackRadioImages_zipOutput(self):
+ images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+
+ # Set up the output zip.
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(output_file, 'w') as output_zip:
+ AddPackRadioImages(output_zip, images)
+
+ with zipfile.ZipFile(output_file, 'r') as verify_zip:
+ for image in images:
+ self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+ def test_AddPackRadioImages_imageExists(self):
+ images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+
+ # Additionally create images under IMAGES/ so that they should be skipped.
+ images, images_path = self._create_images(['foo', 'bar'], 'IMAGES')
+
+ AddPackRadioImages(None, images)
+
+ for image in images:
+ self.assertTrue(
+ os.path.exists(os.path.join(images_path, image + '.img')))
+
+ def test_AddPackRadioImages_missingImages(self):
+ images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+ AddPackRadioImages(None, images)
+
+ self.assertRaises(AssertionError, AddPackRadioImages, None,
+ images + ['baz'])
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 0948c61..5f6c5d0 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -19,7 +19,7 @@
import common
from ota_from_target_files import (
- _LoadOemDicts, BuildInfo, WriteFingerprintAssertion)
+ _LoadOemDicts, BuildInfo, GetPackageMetadata, WriteFingerprintAssertion)
class MockScriptWriter(object):
@@ -300,3 +300,179 @@
self.assertEqual('foo', oem_dict['xyz'])
self.assertEqual('bar', oem_dict['a.b.c'])
self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])
+
+
+class OtaFromTargetFilesTest(unittest.TestCase):
+
+ TEST_TARGET_INFO_DICT = {
+ 'build.prop' : {
+ 'ro.product.device' : 'product-device',
+ 'ro.build.fingerprint' : 'build-fingerprint-target',
+ 'ro.build.version.incremental' : 'build-version-incremental-target',
+ 'ro.build.date.utc' : '1500000000',
+ },
+ }
+
+ TEST_SOURCE_INFO_DICT = {
+ 'build.prop' : {
+ 'ro.product.device' : 'product-device',
+ 'ro.build.fingerprint' : 'build-fingerprint-source',
+ 'ro.build.version.incremental' : 'build-version-incremental-source',
+ 'ro.build.date.utc' : '1400000000',
+ },
+ }
+
+ def setUp(self):
+ # Reset the global options as in ota_from_target_files.py.
+ common.OPTIONS.incremental_source = None
+ common.OPTIONS.downgrade = False
+ common.OPTIONS.timestamp = False
+ common.OPTIONS.wipe_user_data = False
+
+ def test_GetPackageMetadata_abOta_full(self):
+ target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+ target_info_dict['ab_update'] = 'true'
+ target_info = BuildInfo(target_info_dict, None)
+ metadata = GetPackageMetadata(target_info)
+ self.assertDictEqual(
+ {
+ 'ota-type' : 'AB',
+ 'ota-required-cache' : '0',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ },
+ metadata)
+
+ def test_GetPackageMetadata_abOta_incremental(self):
+ target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+ target_info_dict['ab_update'] = 'true'
+ target_info = BuildInfo(target_info_dict, None)
+ source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+ common.OPTIONS.incremental_source = ''
+ metadata = GetPackageMetadata(target_info, source_info)
+ self.assertDictEqual(
+ {
+ 'ota-type' : 'AB',
+ 'ota-required-cache' : '0',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ 'pre-build' : 'build-fingerprint-source',
+ 'pre-build-incremental' : 'build-version-incremental-source',
+ },
+ metadata)
+
+ def test_GetPackageMetadata_nonAbOta_full(self):
+ target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ metadata = GetPackageMetadata(target_info)
+ self.assertDictEqual(
+ {
+ 'ota-type' : 'BLOCK',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ },
+ metadata)
+
+ def test_GetPackageMetadata_nonAbOta_incremental(self):
+ target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+ common.OPTIONS.incremental_source = ''
+ metadata = GetPackageMetadata(target_info, source_info)
+ self.assertDictEqual(
+ {
+ 'ota-type' : 'BLOCK',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ 'pre-build' : 'build-fingerprint-source',
+ 'pre-build-incremental' : 'build-version-incremental-source',
+ },
+ metadata)
+
+ def test_GetPackageMetadata_wipe(self):
+ target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+ common.OPTIONS.wipe_user_data = True
+ metadata = GetPackageMetadata(target_info)
+ self.assertDictEqual(
+ {
+ 'ota-type' : 'BLOCK',
+ 'ota-wipe' : 'yes',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-timestamp' : '1500000000',
+ 'pre-device' : 'product-device',
+ },
+ metadata)
+
+ @staticmethod
+ def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
+ (target_info['build.prop']['ro.build.date.utc'],
+ source_info['build.prop']['ro.build.date.utc']) = (
+ source_info['build.prop']['ro.build.date.utc'],
+ target_info['build.prop']['ro.build.date.utc'])
+
+ def test_GetPackageMetadata_unintentionalDowngradeDetected(self):
+ target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+ source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+ self._test_GetPackageMetadata_swapBuildTimestamps(
+ target_info_dict, source_info_dict)
+
+ target_info = BuildInfo(target_info_dict, None)
+ source_info = BuildInfo(source_info_dict, None)
+ common.OPTIONS.incremental_source = ''
+ self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+ source_info)
+
+ def test_GetPackageMetadata_downgrade(self):
+ target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+ source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+ self._test_GetPackageMetadata_swapBuildTimestamps(
+ target_info_dict, source_info_dict)
+
+ target_info = BuildInfo(target_info_dict, None)
+ source_info = BuildInfo(source_info_dict, None)
+ common.OPTIONS.incremental_source = ''
+ common.OPTIONS.downgrade = True
+ common.OPTIONS.wipe_user_data = True
+ metadata = GetPackageMetadata(target_info, source_info)
+ self.assertDictEqual(
+ {
+ 'ota-downgrade' : 'yes',
+ 'ota-type' : 'BLOCK',
+ 'ota-wipe' : 'yes',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'pre-device' : 'product-device',
+ 'pre-build' : 'build-fingerprint-source',
+ 'pre-build-incremental' : 'build-version-incremental-source',
+ },
+ metadata)
+
+ def test_GetPackageMetadata_overrideTimestamp(self):
+ target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+ source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+ self._test_GetPackageMetadata_swapBuildTimestamps(
+ target_info_dict, source_info_dict)
+
+ target_info = BuildInfo(target_info_dict, None)
+ source_info = BuildInfo(source_info_dict, None)
+ common.OPTIONS.incremental_source = ''
+ common.OPTIONS.timestamp = True
+ metadata = GetPackageMetadata(target_info, source_info)
+ self.assertDictEqual(
+ {
+ 'ota-type' : 'BLOCK',
+ 'post-build' : 'build-fingerprint-target',
+ 'post-build-incremental' : 'build-version-incremental-target',
+ 'post-timestamp' : '1500000001',
+ 'pre-device' : 'product-device',
+ 'pre-build' : 'build-fingerprint-source',
+ 'pre-build-incremental' : 'build-version-incremental-source',
+ },
+ metadata)
diff --git a/tools/warn.py b/tools/warn.py
index 62feac3..f42fb96 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1010,12 +1010,7 @@
'severity': Severity.HIGH,
'description':
'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
- 'patterns': [r".*: warning: \[GuardedByChecker\] .+"]},
- {'category': 'java',
- 'severity': Severity.HIGH,
- 'description':
- 'Java: Invalid @GuardedBy expression',
- 'patterns': [r".*: warning: \[GuardedByValidator\] .+"]},
+ 'patterns': [r".*: warning: \[GuardedBy\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
'description':