Merge changes Iee17fa80,Id9ee50be into lmp-dev am: 9e63540d01 am: 12e0186aa1 am: 724c3d4bd9 am: f73b5cbd1f am: f84d2ddb50 am: d236029c61 am: a34ba7f284 am: 72c26c936b am: fcac1e2dda
am: 51c0c361a6
Change-Id: I389c07f81d5ddfd34b139f5e83b988ae0c80fc16
diff --git a/core/Makefile b/core/Makefile
index 6a70b49..8570d00 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1092,7 +1092,8 @@
@echo Installed file list: $@
@mkdir -p $(dir $@)
@rm -f $@
- $(hide) build/tools/fileslist.py $(TARGET_OUT) > $@
+ $(hide) build/tools/fileslist.py $(TARGET_OUT) > $(@:.txt=.json)
+ $(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
.PHONY: installed-file-list
installed-file-list: $(INSTALLED_FILES_FILE)
@@ -1495,7 +1496,8 @@
@echo Installed file list: $@
@mkdir -p $(dir $@)
@rm -f $@
- $(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $@
+ $(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+ $(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
vendorimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 6722af4..6fc2935 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -210,7 +210,10 @@
# Apk and its attachments reside in its own subdir.
ifeq ($(LOCAL_MODULE_CLASS),APPS)
# framework-res.apk doesn't like the additional layer.
- ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+ ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+ # Neither do Runtime Resource Overlay apks, which contain just the overlaid resources.
+ else ifeq ($(LOCAL_IS_RUNTIME_RESOURCE_OVERLAY),true)
+ else
my_module_path := $(my_module_path)/$(LOCAL_MODULE)
endif
endif
diff --git a/core/build_rro_package.mk b/core/build_rro_package.mk
new file mode 100644
index 0000000..9865b33
--- /dev/null
+++ b/core/build_rro_package.mk
@@ -0,0 +1,25 @@
+#############################################################################
+## Standard rules for installing runtime resouce overlay APKs.
+##
+## Set LOCAL_RRO_THEME to the theme name if the package should apply only to
+## a particular theme as set by ro.boot.vendor.overlay.theme system property.
+##
+## If LOCAL_RRO_THEME is not set, the package will apply always, independent
+## of themes.
+##
+#############################################################################
+
+LOCAL_IS_RUNTIME_RESOURCE_OVERLAY := true
+
+ifneq ($(LOCAL_SRC_FILES),)
+ $(error runtime resource overlay package should not contain sources)
+endif
+
+ifeq (S(LOCAL_RRO_THEME),)
+ LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay
+else
+ LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay/$(LOCAL_RRO_THEME)
+endif
+
+include $(BUILD_SYSTEM)/package.mk
+
diff --git a/core/clang/HOST_x86_common.mk b/core/clang/HOST_x86_common.mk
index 9e71750..690c0f6 100644
--- a/core/clang/HOST_x86_common.mk
+++ b/core/clang/HOST_x86_common.mk
@@ -13,7 +13,8 @@
ifeq ($(HOST_OS),linux)
CLANG_CONFIG_x86_LINUX_HOST_EXTRA_ASFLAGS := \
--gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
- --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot
+ --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+ -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin
CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CFLAGS := \
--gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 5886610..f7567b4 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -363,6 +363,8 @@
LOCAL_INIT_RC_32:=
LOCAL_INIT_RC_64:=
LOCAL_JAVA_LANGUAGE_VERSION:=
+LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:=
+LOCAL_RRO_THEME:=
# Trim MAKEFILE_LIST so that $(call my-dir) doesn't need to
# iterate over thousands of entries every time.
diff --git a/core/config.mk b/core/config.mk
index 5b9f1f8..2847d34 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -85,6 +85,7 @@
BUILD_HOST_EXECUTABLE:= $(BUILD_SYSTEM)/host_executable.mk
BUILD_PACKAGE:= $(BUILD_SYSTEM)/package.mk
BUILD_PHONY_PACKAGE:= $(BUILD_SYSTEM)/phony_package.mk
+BUILD_RRO_PACKAGE:= $(BUILD_SYSTEM)/build_rro_package.mk
BUILD_HOST_PREBUILT:= $(BUILD_SYSTEM)/host_prebuilt.mk
BUILD_PREBUILT:= $(BUILD_SYSTEM)/prebuilt.mk
BUILD_MULTI_PREBUILT:= $(BUILD_SYSTEM)/multi_prebuilt.mk
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index b08a28a..afa8389 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -61,6 +61,7 @@
device/generic/goldfish/init.ranchu.rc:root/init.ranchu.rc \
device/generic/goldfish/fstab.ranchu:root/fstab.ranchu \
device/generic/goldfish/ueventd.ranchu.rc:root/ueventd.ranchu.rc \
+ device/generic/goldfish/input/goldfish_rotary.idc:system/usr/idc/goldfish_rotary.idc \
frameworks/native/data/etc/android.hardware.usb.accessory.xml:system/etc/permissions/android.hardware.usb.accessory.xml
PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
diff --git a/tools/fileslist.py b/tools/fileslist.py
index a11efaa..b9e7350 100755
--- a/tools/fileslist.py
+++ b/tools/fileslist.py
@@ -15,12 +15,24 @@
# limitations under the License.
#
-import operator, os, sys
+import json, hashlib, operator, os, sys
def get_file_size(path):
st = os.lstat(path)
return st.st_size;
+def get_file_digest(path):
+ if os.path.isfile(path) == False:
+ return "----------------------------------------------------------------"
+ digest = hashlib.sha256()
+ with open(path, 'rb') as f:
+ while True:
+ buf = f.read(1024*1024)
+ if not buf:
+ break
+ digest.update(buf)
+ return digest.hexdigest();
+
def main(argv):
output = []
roots = argv[1:]
@@ -30,16 +42,17 @@
relative = dir[base:]
for f in files:
try:
- row = (
- get_file_size(os.path.sep.join((dir, f))),
- os.path.sep.join((relative, f)),
- )
+ path = os.path.sep.join((dir, f))
+ row = {
+ "Size": get_file_size(path),
+ "Name": os.path.sep.join((relative, f)),
+ "SHA256": get_file_digest(path),
+ }
output.append(row)
except os.error:
pass
- output.sort(key=operator.itemgetter(0), reverse=True)
- for row in output:
- print "%12d %s" % row
+ output.sort(key=operator.itemgetter("Size", "Name"), reverse=True)
+ print json.dumps(output, indent=2, separators=(',',': '))
if __name__ == '__main__':
main(sys.argv)
diff --git a/tools/fileslist_util.py b/tools/fileslist_util.py
new file mode 100755
index 0000000..ff40d51
--- /dev/null
+++ b/tools/fileslist_util.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import getopt, json, sys
+
+def PrintFileNames(path):
+ with open(path) as jf:
+ data = json.load(jf)
+ for line in data:
+ print(line["Name"])
+
+def PrintCanonicalList(path):
+ with open(path) as jf:
+ data = json.load(jf)
+ for line in data:
+ print "{0:12d} {1}".format(line["Size"], line["Name"])
+
+def PrintUsage(name):
+ print("""
+Usage: %s -[nc] json_files_list
+ -n produces list of files only
+ -c produces classic installed-files.txt
+""" % (name))
+
+def main(argv):
+ try:
+ opts, args = getopt.getopt(argv[1:], "nc", "")
+ except getopt.GetoptError, err:
+ print(err)
+ PrintUsage(argv[0])
+ sys.exit(2)
+
+ if len(opts) == 0:
+ print("No conversion option specified")
+ PrintUsage(argv[0])
+ sys.exit(2)
+
+ if len(args) == 0:
+ print("No input file specified")
+ PrintUsage(argv[0])
+ sys.exit(2)
+
+ for o, a in opts:
+ if o == ("-n"):
+ PrintFileNames(args[0])
+ sys.exit()
+ elif o == ("-c"):
+ PrintCanonicalList(args[0])
+ sys.exit()
+ else:
+ assert False, "Unsupported option"
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 0bbd8f6..5a0a411 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -19,7 +19,31 @@
not have an IMAGES/ top-level subdirectory), produce the images and
add them to the zipfile.
-Usage: add_img_to_target_files target_files
+Usage: add_img_to_target_files [flag] target_files
+
+ -a (--add_missing)
+ Build and add missing images to "IMAGES/". If this option is
+ not specified, this script will simply exit when "IMAGES/"
+ directory exists in the target file.
+
+ -r (--rebuild_recovery)
+ Rebuild the recovery patch and write it to the system image. Only
+ meaningful when system image needs to be rebuilt.
+
+ --replace_verity_private_key
+ Replace the private key used for verity signing. (same as the option
+ in sign_target_files_apks)
+
+ --replace_verity_public_key
+ Replace the certificate (public key) used for verity verification. (same
+ as the option in sign_target_files_apks)
+
+ --is_signing
+ Skip building & adding the images for "userdata" and "cache" if we
+ are signing the target files.
+
+ --verity_signer_path
+ Specify the signer path to build verity metadata.
"""
import sys
@@ -45,6 +69,7 @@
OPTIONS.rebuild_recovery = False
OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
+OPTIONS.is_signing = False
OPTIONS.verity_signer_path = None
def GetCareMap(which, imgname):
@@ -364,10 +389,11 @@
if has_system_other:
banner("system_other")
AddSystemOther(output_zip)
- banner("userdata")
- AddUserdata(output_zip)
- banner("cache")
- AddCache(output_zip)
+ if not OPTIONS.is_signing:
+ banner("userdata")
+ AddUserdata(output_zip)
+ banner("cache")
+ AddCache(output_zip)
# For devices using A/B update, copy over images from RADIO/ to IMAGES/ and
# make sure we have all the needed images ready under IMAGES/.
@@ -414,6 +440,8 @@
OPTIONS.replace_verity_private_key = (True, a)
elif o == "--replace_verity_public_key":
OPTIONS.replace_verity_public_key = (True, a)
+ elif o == "--is_signing":
+ OPTIONS.is_signing = True
elif o == "--verity_signer_path":
OPTIONS.verity_signer_path = a
else:
@@ -425,6 +453,7 @@
extra_long_opts=["add_missing", "rebuild_recovery",
"replace_verity_public_key=",
"replace_verity_private_key=",
+ "is_signing",
"verity_signer_path="],
extra_option_handler=option_handler)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 24ecd15..e141da0 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -109,7 +109,8 @@
Args:
partition_size: the size of the partition to be verified.
Returns:
- The size of the partition adjusted for verity metadata.
+ A tuple of the size of the partition adjusted for verity metadata, and
+ the size of verity metadata.
"""
key = "%d %d" % (partition_size, fec_supported)
if key in AdjustPartitionSizeForVerity.results:
@@ -121,27 +122,31 @@
# verity tree and fec sizes depend on the partition size, which
# means this estimate is always going to be unnecessarily small
- lo = partition_size - GetVeritySize(hi, fec_supported)
+ verity_size = GetVeritySize(hi, fec_supported)
+ lo = partition_size - verity_size
result = lo
# do a binary search for the optimal size
while lo < hi:
i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
- size = i + GetVeritySize(i, fec_supported)
- if size <= partition_size:
+ v = GetVeritySize(i, fec_supported)
+ if i + v <= partition_size:
if result < i:
result = i
+ verity_size = v
lo = i + BLOCK_SIZE
else:
hi = i
- AdjustPartitionSizeForVerity.results[key] = result
- return result
+ AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
+ return (result, verity_size)
AdjustPartitionSizeForVerity.results = {}
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path):
- cmd = "fec -e %s %s %s" % (sparse_image_path, verity_path, verity_fec_path)
+def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
+ padding_size):
+ cmd = "fec -e -p %d %s %s %s" % (padding_size, sparse_image_path,
+ verity_path, verity_fec_path)
print cmd
status, output = commands.getstatusoutput(cmd)
if status:
@@ -207,7 +212,7 @@
def BuildVerifiedImage(data_image_path, verity_image_path,
verity_metadata_path, verity_fec_path,
- fec_supported):
+ padding_size, fec_supported):
if not Append(verity_image_path, verity_metadata_path,
"Could not append verity metadata!"):
return False
@@ -215,7 +220,7 @@
if fec_supported:
# build FEC for the entire partition, including metadata
if not BuildVerityFEC(data_image_path, verity_image_path,
- verity_fec_path):
+ verity_fec_path, padding_size):
return False
if not Append(verity_image_path, verity_fec_path, "Could not append FEC!"):
@@ -253,7 +258,7 @@
True on success, False otherwise.
"""
# get properties
- image_size = prop_dict["partition_size"]
+ image_size = int(prop_dict["partition_size"])
block_dev = prop_dict["verity_block_device"]
signer_key = prop_dict["verity_key"] + ".pk8"
if OPTIONS.verity_signer_path is not None:
@@ -284,10 +289,17 @@
return False
# build the full verified image
+ target_size = int(prop_dict["original_partition_size"])
+ verity_size = int(prop_dict["verity_size"])
+
+ padding_size = target_size - image_size - verity_size
+ assert padding_size >= 0
+
if not BuildVerifiedImage(out_file,
verity_image_path,
verity_metadata_path,
verity_fec_path,
+ padding_size,
fec_supported):
shutil.rmtree(tempdir_name, ignore_errors=True)
return False
@@ -358,12 +370,13 @@
# verified.
if verity_supported and is_verity_partition:
partition_size = int(prop_dict.get("partition_size"))
- adjusted_size = AdjustPartitionSizeForVerity(partition_size,
- verity_fec_supported)
+ (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(partition_size,
+ verity_fec_supported)
if not adjusted_size:
return False
prop_dict["partition_size"] = str(adjusted_size)
prop_dict["original_partition_size"] = str(partition_size)
+ prop_dict["verity_size"] = str(verity_size)
if fs_type.startswith("ext"):
build_command = ["mkuserimg.sh"]
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 2ecc5cb..1e7d873 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -77,26 +77,28 @@
with temporary=True) to this one."""
self.script.extend(other.script)
- def AssertOemProperty(self, name, value):
- """Assert that a property on the OEM paritition matches a value."""
+ def AssertOemProperty(self, name, values):
+ """Assert that a property on the OEM paritition matches allowed values."""
if not name:
raise ValueError("must specify an OEM property")
- if not value:
+ if not values:
raise ValueError("must specify the OEM value")
+ get_prop_command = None
if common.OPTIONS.oem_no_mount:
- cmd = ('getprop("{name}") == "{value}" || '
- 'abort("E{code}: This package expects the value \\"{value}\\" for '
- '\\"{name}\\"; this has value \\"" + '
- 'getprop("{name}") + "\\".");').format(
- code=common.ErrorCode.OEM_PROP_MISMATCH,
- name=name, value=value)
+ get_prop_command = 'getprop("%s")' % name
else:
- cmd = ('file_getprop("/oem/oem.prop", "{name}") == "{value}" || '
- 'abort("E{code}: This package expects the value \\"{value}\\" for '
- '\\"{name}\\" on the OEM partition; this has value \\"" + '
- 'file_getprop("/oem/oem.prop", "{name}") + "\\".");').format(
- code=common.ErrorCode.OEM_PROP_MISMATCH,
- name=name, value=value)
+ get_prop_command = 'file_getprop("/oem/oem.prop", "%s")' % name
+
+ cmd = ''
+ for value in values:
+ cmd += '%s == "%s" || ' % (get_prop_command, value)
+ cmd += (
+ 'abort("E{code}: This package expects the value \\"{values}\\" for '
+ '\\"{name}\\"; this has value \\"" + '
+ '{get_prop_command} + "\\".");').format(
+ code=common.ErrorCode.OEM_PROP_MISMATCH,
+ get_prop_command=get_prop_command, name=name,
+ values='\\" or \\"'.join(values))
self.script.append(cmd)
def AssertSomeFingerprint(self, *fp):
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 24b42ee..4b6d394 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -50,9 +50,11 @@
Remount and verify the checksums of the files written to the
system and vendor (if used) partitions. Incremental builds only.
- -o (--oem_settings) <file>
- Use the file to specify the expected OEM-specific properties
- on the OEM partition of the intended device.
+ -o (--oem_settings) <main_file[,additional_files...]>
+ Comma seperated list of files used to specify the expected OEM-specific
+ properties on the OEM partition of the intended device.
+ Multiple expected values can be used by providing multiple files.
+
--oem_no_mount
For devices with OEM-specific properties but without an OEM partition,
@@ -464,20 +466,38 @@
whole_file=True)
-def AppendAssertions(script, info_dict, oem_dict=None):
+def AppendAssertions(script, info_dict, oem_dicts=None):
oem_props = info_dict.get("oem_fingerprint_properties")
if oem_props is None or len(oem_props) == 0:
device = GetBuildProp("ro.product.device", info_dict)
script.AssertDevice(device)
else:
- if oem_dict is None:
+ if not oem_dicts:
raise common.ExternalError(
"No OEM file provided to answer expected assertions")
for prop in oem_props.split():
- if oem_dict.get(prop) is None:
+ values = []
+ for oem_dict in oem_dicts:
+ if oem_dict.get(prop):
+ values.append(oem_dict[prop])
+ if not values:
raise common.ExternalError(
"The OEM file is missing the property %s" % prop)
- script.AssertOemProperty(prop, oem_dict.get(prop))
+ script.AssertOemProperty(prop, values)
+
+
+def _LoadOemDicts(script, recovery_mount_options):
+ """Returns the list of loaded OEM properties dict."""
+ oem_dicts = None
+ if OPTIONS.oem_source is None:
+ raise common.ExternalError("OEM source required for this build")
+ if not OPTIONS.oem_no_mount:
+ script.Mount("/oem", recovery_mount_options)
+ oem_dicts = []
+ for oem_file in OPTIONS.oem_source:
+ oem_dicts.append(common.LoadDictionaryFromLines(
+ open(oem_file).readlines()))
+ return oem_dicts
def _WriteRecoveryImageToBoot(script, output_zip):
@@ -590,19 +610,15 @@
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
- oem_dict = None
- if oem_props is not None and len(oem_props) > 0:
- if OPTIONS.oem_source is None:
- raise common.ExternalError("OEM source required for this build")
- if not OPTIONS.oem_no_mount:
- script.Mount("/oem", recovery_mount_options)
- oem_dict = common.LoadDictionaryFromLines(
- open(OPTIONS.oem_source).readlines())
+ oem_dicts = None
+ if oem_props:
+ oem_dicts = _LoadOemDicts(script, recovery_mount_options)
metadata = {
- "post-build": CalculateFingerprint(oem_props, oem_dict,
+ "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
OPTIONS.info_dict),
- "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ "pre-device": GetOemProperty("ro.product.device", oem_props,
+ oem_dicts and oem_dicts[0],
OPTIONS.info_dict),
"post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
}
@@ -626,7 +642,7 @@
ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
script.AssertOlderBuild(ts, ts_text)
- AppendAssertions(script, OPTIONS.info_dict, oem_dict)
+ AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
device_specific.FullOTA_Assertions()
# Two-step package strategy (in chronological order, which is *not*
@@ -677,7 +693,7 @@
# Dump fingerprints
script.Print("Target: %s" % CalculateFingerprint(
- oem_props, oem_dict, OPTIONS.info_dict))
+ oem_props, oem_dicts and oem_dicts[0], OPTIONS.info_dict))
device_specific.FullOTA_InstallBegin()
@@ -879,17 +895,13 @@
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
recovery_mount_options = OPTIONS.source_info_dict.get(
"recovery_mount_options")
- oem_dict = None
- if oem_props is not None and len(oem_props) > 0:
- if OPTIONS.oem_source is None:
- raise common.ExternalError("OEM source required for this build")
- if not OPTIONS.oem_no_mount:
- script.Mount("/oem", recovery_mount_options)
- oem_dict = common.LoadDictionaryFromLines(
- open(OPTIONS.oem_source).readlines())
+ oem_dicts = None
+ if oem_props:
+ oem_dicts = _LoadOemDicts(script, recovery_mount_options)
metadata = {
- "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ "pre-device": GetOemProperty("ro.product.device", oem_props,
+ oem_dicts and oem_dicts[0],
OPTIONS.source_info_dict),
"ota-type": "BLOCK",
}
@@ -906,9 +918,9 @@
metadata=metadata,
info_dict=OPTIONS.source_info_dict)
- source_fp = CalculateFingerprint(oem_props, oem_dict,
+ source_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
OPTIONS.source_info_dict)
- target_fp = CalculateFingerprint(oem_props, oem_dict,
+ target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
OPTIONS.target_info_dict)
metadata["pre-build"] = source_fp
metadata["post-build"] = target_fp
@@ -973,7 +985,7 @@
else:
vendor_diff = None
- AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
+ AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
device_specific.IncrementalOTA_Assertions()
# Two-step incremental package strategy (in chronological order,
@@ -1025,9 +1037,9 @@
# Dump fingerprints
script.Print("Source: %s" % CalculateFingerprint(
- oem_props, oem_dict, OPTIONS.source_info_dict))
+ oem_props, oem_dicts and oem_dicts[0], OPTIONS.source_info_dict))
script.Print("Target: %s" % CalculateFingerprint(
- oem_props, oem_dict, OPTIONS.target_info_dict))
+ oem_props, oem_dicts and oem_dicts[0], OPTIONS.target_info_dict))
script.Print("Verifying current system...")
@@ -1176,18 +1188,16 @@
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
recovery_mount_options = OPTIONS.info_dict.get(
"recovery_mount_options")
- oem_dict = None
- if oem_props is not None and len(oem_props) > 0:
- if OPTIONS.oem_source is None:
- raise common.ExternalError("OEM source required for this build")
- script.Mount("/oem", recovery_mount_options)
- oem_dict = common.LoadDictionaryFromLines(
- open(OPTIONS.oem_source).readlines())
+ oem_dicts = None
+ if oem_props:
+ oem_dicts = _LoadOemDicts(script, recovery_mount_options)
- target_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.info_dict)
+ target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
+ OPTIONS.info_dict)
metadata = {
"post-build": target_fp,
- "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ "pre-device": GetOemProperty("ro.product.device", oem_props,
+ oem_dicts and oem_dicts[0],
OPTIONS.info_dict),
"post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
}
@@ -1201,7 +1211,7 @@
metadata=metadata,
info_dict=OPTIONS.info_dict)
- AppendAssertions(script, OPTIONS.info_dict, oem_dict)
+ AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
script.Print("Verifying device images against %s..." % target_fp)
script.AppendExtra("")
@@ -1273,26 +1283,25 @@
# Metadata to comply with Android OTA package format.
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
- oem_dict = None
+ oem_dicts = None
if oem_props:
- if OPTIONS.oem_source is None:
- raise common.ExternalError("OEM source required for this build")
- oem_dict = common.LoadDictionaryFromLines(
- open(OPTIONS.oem_source).readlines())
+ oem_dicts = _LoadOemDicts(script, None)
metadata = {
- "post-build": CalculateFingerprint(oem_props, oem_dict,
+ "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
OPTIONS.info_dict),
"post-build-incremental" : GetBuildProp("ro.build.version.incremental",
OPTIONS.info_dict),
- "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ "pre-device": GetOemProperty("ro.product.device", oem_props,
+ oem_dicts and oem_dicts[0],
OPTIONS.info_dict),
"ota-required-cache": "0",
"ota-type": "AB",
}
if source_file is not None:
- metadata["pre-build"] = CalculateFingerprint(oem_props, oem_dict,
+ metadata["pre-build"] = CalculateFingerprint(oem_props,
+ oem_dicts and oem_dicts[0],
OPTIONS.source_info_dict)
metadata["pre-build-incremental"] = GetBuildProp(
"ro.build.version.incremental", OPTIONS.source_info_dict)
@@ -1565,17 +1574,13 @@
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
recovery_mount_options = OPTIONS.source_info_dict.get(
"recovery_mount_options")
- oem_dict = None
- if oem_props is not None and len(oem_props) > 0:
- if OPTIONS.oem_source is None:
- raise common.ExternalError("OEM source required for this build")
- if not OPTIONS.oem_no_mount:
- script.Mount("/oem", recovery_mount_options)
- oem_dict = common.LoadDictionaryFromLines(
- open(OPTIONS.oem_source).readlines())
+ oem_dicts = None
+ if oem_props:
+ oem_dicts = _LoadOemDicts(script, recovery_mount_options)
metadata = {
- "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ "pre-device": GetOemProperty("ro.product.device", oem_props,
+ oem_dicts and oem_dicts[0],
OPTIONS.source_info_dict),
"ota-type": "FILE",
}
@@ -1600,9 +1605,9 @@
else:
vendor_diff = None
- target_fp = CalculateFingerprint(oem_props, oem_dict,
+ target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
OPTIONS.target_info_dict)
- source_fp = CalculateFingerprint(oem_props, oem_dict,
+ source_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
OPTIONS.source_info_dict)
if oem_props is None:
@@ -1640,7 +1645,7 @@
# 0.1 for unpacking verbatim files, symlinking, and doing the
# device-specific commands.
- AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
+ AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
device_specific.IncrementalOTA_Assertions()
# Two-step incremental package strategy (in chronological order,
@@ -1980,7 +1985,7 @@
elif o == "--override_timestamp":
OPTIONS.timestamp = True
elif o in ("-o", "--oem_settings"):
- OPTIONS.oem_source = a
+ OPTIONS.oem_source = a.split(',')
elif o == "--oem_no_mount":
OPTIONS.oem_no_mount = True
elif o in ("-e", "--extra_script"):
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 1677a44..52b526c 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -713,7 +713,9 @@
common.ZipClose(input_zip)
common.ZipClose(output_zip)
- add_img_to_target_files.AddImagesToTargetFiles(args[1])
+ # Skip building userdata.img and cache.img when signing the target files.
+ new_args = ["--is_signing", args[1]]
+ add_img_to_target_files.main(new_args)
print "done."