Merge "Revert "Remove option to create boot image based on preloaded classes""
diff --git a/core/Makefile b/core/Makefile
index 690c9c1..ec9029b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1310,26 +1310,25 @@
ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
endif
-
-INTERNAL_USERIMAGES_DEPS := $(SIMG2IMG)
-INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK) $(TUNE2FS)
-ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
-INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG) $(MAKE_F2FS)
-endif
-
-ifeq ($(BOARD_AVB_ENABLE),true)
-INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
-endif
-
ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
endif
-ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE) $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
-INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
+
+INTERNAL_USERIMAGES_DEPS := \
+ $(BLK_ALLOC_TO_BASE_FS) \
+ $(E2FSCK) \
+ $(MKE2FS_CONF) \
+ $(MKEXTUSERIMG) \
+ $(SIMG2IMG) \
+ $(TUNE2FS)
+
+ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
+INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG)
endif
-# Get a colon-separated list of search paths.
-INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
+ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE) $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
+INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
+endif
ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
@@ -1338,13 +1337,16 @@
endif
endif
+ifeq ($(BOARD_AVB_ENABLE),true)
+INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
+endif
+
+# Get a colon-separated list of search paths.
+INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
+
SELINUX_FC := $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.bin
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
-INTERNAL_USERIMAGES_DEPS += $(BLK_ALLOC_TO_BASE_FS)
-
-INTERNAL_USERIMAGES_DEPS += $(MKE2FS_CONF)
-
ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
@@ -3616,6 +3618,7 @@
imgdiff \
libconscrypt_openjdk_jni \
lpmake \
+ lpunpack \
make_f2fs \
minigzip \
mkbootfs \
@@ -4261,14 +4264,14 @@
# $(1): output file
# $(2): additional args
define build-ota-package-target
-PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
- build/make/tools/releasetools/ota_from_target_files -v \
- --block \
- --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
- -p $(HOST_OUT) \
- $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
- $(2) \
- $(BUILT_TARGET_FILES_PACKAGE) $(1)
+PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
+ build/make/tools/releasetools/ota_from_target_files \
+ --verbose \
+ --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
+ --path $(HOST_OUT) \
+ $(if $(OEM_OTA_CONFIG), --oem_settings $(OEM_OTA_CONFIG)) \
+ $(2) \
+ $(BUILT_TARGET_FILES_PACKAGE) $(1)
endef
name := $(TARGET_PRODUCT)
diff --git a/core/config.mk b/core/config.mk
index 92efd34..4ce664a 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -577,16 +577,12 @@
endif
APICHECK := $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX)
FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
-MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/mke2fs$(HOST_EXECUTABLE_SUFFIX)
MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
BLK_ALLOC_TO_BASE_FS := $(HOST_OUT_EXECUTABLES)/blk_alloc_to_base_fs$(HOST_EXECUTABLE_SUFFIX)
-MAKE_SQUASHFS := $(HOST_OUT_EXECUTABLES)/mksquashfs$(HOST_EXECUTABLE_SUFFIX)
MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
-MAKE_F2FS := $(HOST_OUT_EXECUTABLES)/make_f2fs$(HOST_EXECUTABLE_SUFFIX)
MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
-IMG2SIMG := $(HOST_OUT_EXECUTABLES)/img2simg$(HOST_EXECUTABLE_SUFFIX)
E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
MKTARBALL := build/make/tools/mktarball.sh
TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
diff --git a/core/main.mk b/core/main.mk
index 5cb1d34..73aa649 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -832,6 +832,9 @@
$(call update-host-shared-libs-deps-for-suites)
ifdef HOST_CROSS_OS
$(call resolve-shared-libs-depes,HOST_CROSS_,,true)
+ifdef HOST_CROSS_2ND_ARCH
+$(call resolve-shared-libs-depes,HOST_CROSS_,true,true)
+endif
endif
# Pass the shared libraries dependencies to prebuilt ELF file check.
diff --git a/core/product.mk b/core/product.mk
index 838673c..9ec3257 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -541,10 +541,15 @@
endef
#
-# Strip the variables in _product_strip_var_list
+# Strip the variables in _product_var_list and a few build-system
+# internal variables, and assign the ones for the current product
+# to a shorthand that is more convenient to read from elsewhere.
#
define strip-product-vars
-$(foreach v,$(_product_var_list), \
+$(foreach v,\
+ $(_product_var_list) \
+ PRODUCT_ENFORCE_PACKAGES_EXIST \
+ PRODUCT_ENFORCE_PACKAGES_EXIST_WHITELIST, \
$(eval $(v) := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).$(v)))) \
)
endef
diff --git a/target/product/generic.mk b/target/product/generic.mk
index 6fe4818..68130e3 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -25,4 +25,5 @@
PRODUCT_DEVICE := generic
PRODUCT_NAME := generic
-$(call enforce-product-packages-exist,)
+whitelist := product_manifest.xml
+$(call enforce-product-packages-exist,$(whitelist))
diff --git a/target/product/mainline_arm64.mk b/target/product/mainline_arm64.mk
index 6050924..c098c9f 100644
--- a/target/product/mainline_arm64.mk
+++ b/target/product/mainline_arm64.mk
@@ -16,7 +16,8 @@
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline.mk)
-$(call enforce-product-packages-exist,)
+whitelist := product_manifest.xml
+$(call enforce-product-packages-exist,$(whitelist))
PRODUCT_NAME := mainline_arm64
PRODUCT_DEVICE := mainline_arm64
diff --git a/tools/mktarball.sh b/tools/mktarball.sh
index ef0fe86..ced7e17 100755
--- a/tools/mktarball.sh
+++ b/tools/mktarball.sh
@@ -37,7 +37,7 @@
# echo "$f: dir: $is_dir curr: $curr_perms uid: $new_uid gid: $new_gid "\
# "perms: $new_perms"
tar --no-recursion --numeric-owner --owner $new_uid \
- --group $new_gid --mode $new_perms -p -rf ${target_tar} ${f}
+ --group $new_gid --mode $new_perms -rf ${target_tar} ${f}
done
if [ $? -eq 0 ] ; then
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index fb4ca76..0751125 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -59,9 +59,9 @@
try:
common.RunAndCheckOutput(cmd)
except common.ExternalError as e:
- raise ApexSigningError, \
+ raise ApexSigningError(
'Failed to sign APEX payload {} with {}:\n{}'.format(
- payload_file, payload_key_path, e), sys.exc_info()[2]
+ payload_file, payload_key_path, e))
# Verify the signed payload image with specified public key.
logger.info('Verifying %s', payload_file)
@@ -75,9 +75,9 @@
try:
common.RunAndCheckOutput(cmd)
except common.ExternalError as e:
- raise ApexSigningError, \
+ raise ApexSigningError(
'Failed to validate payload signing for {} with {}:\n{}'.format(
- payload_file, payload_key, e), sys.exc_info()[2]
+ payload_file, payload_key, e))
def ParseApexPayloadInfo(payload_path):
@@ -100,9 +100,9 @@
try:
output = common.RunAndCheckOutput(cmd)
except common.ExternalError as e:
- raise ApexInfoError, \
+ raise ApexInfoError(
'Failed to get APEX payload info for {}:\n{}'.format(
- payload_path, e), sys.exc_info()[2]
+ payload_path, e))
# Extract the Algorithm / Salt / Prop info from payload (i.e. an image signed
# with avbtool). For example,
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 7cff831..107b6f5 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -14,6 +14,7 @@
from __future__ import print_function
+import base64
import collections
import copy
import errno
@@ -30,7 +31,6 @@
import re
import shlex
import shutil
-import string
import subprocess
import sys
import tempfile
@@ -190,6 +190,8 @@
kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
stdin, etc. stdout and stderr will default to subprocess.PIPE and
subprocess.STDOUT respectively unless caller specifies any of them.
+ universal_newlines will default to True, as most of the users in
+ releasetools expect string output.
Returns:
A subprocess.Popen object.
@@ -197,6 +199,8 @@
if 'stdout' not in kwargs and 'stderr' not in kwargs:
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.STDOUT
+ if 'universal_newlines' not in kwargs:
+ kwargs['universal_newlines'] = True
# Don't log any if caller explicitly says so.
if verbose != False:
logger.info(" Running: \"%s\"", " ".join(args))
@@ -314,7 +318,7 @@
def read_helper(fn):
if isinstance(input_file, zipfile.ZipFile):
- return input_file.read(fn)
+ return input_file.read(fn).decode()
else:
path = os.path.join(input_file, *fn.split("/"))
try:
@@ -526,7 +530,7 @@
# system. Other areas assume system is always at "/system" so point /system
# at /.
if system_root_image:
- assert not d.has_key("/system") and d.has_key("/")
+ assert '/system' not in d and '/' in d
d["/system"] = d["/"]
return d
@@ -953,7 +957,7 @@
# filename listed in system.map may contain an additional leading slash
# (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
# results.
- arcname = string.replace(entry, which, which.upper(), 1).lstrip('/')
+ arcname = entry.replace(which, which.upper(), 1).lstrip('/')
# Special handling another case, where files not under /system
# (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
@@ -1223,7 +1227,7 @@
if basename:
installed_files.add(basename)
- for line in tf_zip.read("META/apkcerts.txt").split("\n"):
+ for line in tf_zip.read('META/apkcerts.txt').decode().split('\n'):
line = line.strip()
if not line:
continue
@@ -1433,6 +1437,8 @@
if not first:
print("key file %s still missing some passwords." % (self.pwfile,))
+ if sys.version_info[0] >= 3:
+ raw_input = input # pylint: disable=redefined-builtin
answer = raw_input("try to edit again? [y]> ").strip()
if answer and answer[0] not in 'yY':
raise RuntimeError("key passwords unavailable")
@@ -2185,7 +2191,7 @@
This gives the same result as `openssl x509 -in <filename> -outform DER`.
Returns:
- The decoded certificate string.
+ The decoded certificate bytes.
"""
cert_buffer = []
save = False
@@ -2196,7 +2202,7 @@
cert_buffer.append(line)
if "--BEGIN CERTIFICATE--" in line:
save = True
- cert = "".join(cert_buffer).decode('base64')
+ cert = base64.b64decode("".join(cert_buffer))
return cert
@@ -2338,7 +2344,7 @@
logger.info("putting script in %s", sh_location)
- output_sink(sh_location, sh)
+ output_sink(sh_location, sh.encode())
class DynamicPartitionUpdate(object):
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 11ac9f5..914e58e 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -579,7 +579,7 @@
def test_ExtractPublicKey(self):
cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
- with open(pubkey, 'rb') as pubkey_fp:
+ with open(pubkey) as pubkey_fp:
self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
def test_ExtractPublicKey_invalidInput(self):
@@ -590,15 +590,16 @@
def test_ExtractAvbPublicKey(self):
privkey = os.path.join(self.testdata_dir, 'testkey.key')
pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
- with open(common.ExtractAvbPublicKey(privkey)) as privkey_fp, \
- open(common.ExtractAvbPublicKey(pubkey)) as pubkey_fp:
+ with open(common.ExtractAvbPublicKey(privkey), 'rb') as privkey_fp, \
+ open(common.ExtractAvbPublicKey(pubkey), 'rb') as pubkey_fp:
self.assertEqual(privkey_fp.read(), pubkey_fp.read())
def test_ParseCertificate(self):
cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
- proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ universal_newlines=False)
expected, _ = proc.communicate()
self.assertEqual(0, proc.returncode)
@@ -914,7 +915,7 @@
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
with zipfile.ZipFile(target_files, 'w') as target_files_zip:
info_values = ''.join(
- ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.iteritems())])
+ ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
@@ -1085,7 +1086,7 @@
loc = os.path.join(self._tempdir, prefix, name)
if not os.path.exists(os.path.dirname(loc)):
os.makedirs(os.path.dirname(loc))
- with open(loc, "w+") as f:
+ with open(loc, "wb") as f:
f.write(data)
def test_full_recovery(self):
@@ -1110,7 +1111,7 @@
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
self._info)
# Validate 'recovery-from-boot' with bonus argument.
- self._out_tmp_sink("etc/recovery-resource.dat", "bonus", "SYSTEM")
+ self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
recovery_image, boot_image, self._info)
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
@@ -1118,25 +1119,30 @@
class MockScriptWriter(object):
- """A class that mocks edify_generator.EdifyGenerator.
- """
+ """A class that mocks edify_generator.EdifyGenerator."""
+
def __init__(self, enable_comments=False):
self.lines = []
self.enable_comments = enable_comments
+
def Comment(self, comment):
if self.enable_comments:
- self.lines.append("# {}".format(comment))
+ self.lines.append('# {}'.format(comment))
+
def AppendExtra(self, extra):
self.lines.append(extra)
+
def __str__(self):
- return "\n".join(self.lines)
+ return '\n'.join(self.lines)
class MockBlockDifference(object):
+
def __init__(self, partition, tgt, src=None):
self.partition = partition
self.tgt = tgt
self.src = src
+
def WriteScript(self, script, _, progress=None,
write_verify_script=False):
if progress:
@@ -1144,11 +1150,13 @@
script.AppendExtra("patch({});".format(self.partition))
if write_verify_script:
self.WritePostInstallVerifyScript(script)
+
def WritePostInstallVerifyScript(self, script):
script.AppendExtra("verify({});".format(self.partition))
class FakeSparseImage(object):
+
def __init__(self, size):
self.blocksize = 4096
self.total_blocks = size // 4096
@@ -1156,12 +1164,13 @@
class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
+
@staticmethod
def get_op_list(output_path):
with zipfile.ZipFile(output_path) as output_zip:
- with output_zip.open("dynamic_partitions_op_list") as op_list:
- return [line.strip() for line in op_list.readlines()
- if not line.startswith("#")]
+ with output_zip.open('dynamic_partitions_op_list') as op_list:
+ return [line.decode().strip() for line in op_list.readlines()
+ if not line.startswith(b'#')]
def setUp(self):
self.script = MockScriptWriter()
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index 1cc539f..d02bc7f 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -47,25 +47,22 @@
}
self.hash_algorithm = "sha256"
- self.fixed_salt = \
- "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
- self.expected_root_hash = \
- "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d"
+ self.fixed_salt = (
+ "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7")
+ self.expected_root_hash = (
+ "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d")
- def _create_simg(self, raw_data):
+ def _CreateSimg(self, raw_data): # pylint: disable=no-self-use
output_file = common.MakeTempFile()
raw_image = common.MakeTempFile()
with open(raw_image, 'wb') as f:
f.write(raw_data)
cmd = ["img2simg", raw_image, output_file, '4096']
- p = common.Run(cmd)
- p.communicate()
- self.assertEqual(0, p.returncode)
-
+ common.RunAndCheckOutput(cmd)
return output_file
- def _generate_image(self):
+ def _GenerateImage(self):
partition_size = 1024 * 1024
prop_dict = {
'partition_size': str(partition_size),
@@ -79,11 +76,11 @@
self.assertIsNotNone(verity_image_builder)
adjusted_size = verity_image_builder.CalculateMaxImageSize()
- raw_image = ""
+ raw_image = bytearray(adjusted_size)
for i in range(adjusted_size):
- raw_image += str(i % 10)
+ raw_image[i] = ord('0') + i % 10
- output_file = self._create_simg(raw_image)
+ output_file = self._CreateSimg(raw_image)
# Append the verity metadata.
verity_image_builder.Build(output_file)
@@ -92,7 +89,7 @@
@SkipIfExternalToolsUnavailable()
def test_CreateHashtreeInfoGenerator(self):
- image_file = sparse_img.SparseImage(self._generate_image())
+ image_file = sparse_img.SparseImage(self._GenerateImage())
generator = CreateHashtreeInfoGenerator(
'system', image_file, self.prop_dict)
@@ -103,7 +100,7 @@
@SkipIfExternalToolsUnavailable()
def test_DecomposeSparseImage(self):
- image_file = sparse_img.SparseImage(self._generate_image())
+ image_file = sparse_img.SparseImage(self._GenerateImage())
generator = VerifiedBootVersion1HashtreeInfoGenerator(
self.partition_size, 4096, True)
@@ -114,7 +111,7 @@
@SkipIfExternalToolsUnavailable()
def test_ParseHashtreeMetadata(self):
- image_file = sparse_img.SparseImage(self._generate_image())
+ image_file = sparse_img.SparseImage(self._GenerateImage())
generator = VerifiedBootVersion1HashtreeInfoGenerator(
self.partition_size, 4096, True)
generator.DecomposeSparseImage(image_file)
@@ -131,12 +128,12 @@
def test_ValidateHashtree_smoke(self):
generator = VerifiedBootVersion1HashtreeInfoGenerator(
self.partition_size, 4096, True)
- generator.image = sparse_img.SparseImage(self._generate_image())
+ generator.image = sparse_img.SparseImage(self._GenerateImage())
generator.hashtree_info = info = HashtreeInfo()
- info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+ info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
info.hashtree_range = RangeSet(
- data=[991232 / 4096, (991232 + 12288) / 4096])
+ data=[991232 // 4096, (991232 + 12288) // 4096])
info.hash_algorithm = self.hash_algorithm
info.salt = self.fixed_salt
info.root_hash = self.expected_root_hash
@@ -147,12 +144,12 @@
def test_ValidateHashtree_failure(self):
generator = VerifiedBootVersion1HashtreeInfoGenerator(
self.partition_size, 4096, True)
- generator.image = sparse_img.SparseImage(self._generate_image())
+ generator.image = sparse_img.SparseImage(self._GenerateImage())
generator.hashtree_info = info = HashtreeInfo()
- info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+ info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
info.hashtree_range = RangeSet(
- data=[991232 / 4096, (991232 + 12288) / 4096])
+ data=[991232 // 4096, (991232 + 12288) // 4096])
info.hash_algorithm = self.hash_algorithm
info.salt = self.fixed_salt
info.root_hash = "a" + self.expected_root_hash[1:]
@@ -161,12 +158,12 @@
@SkipIfExternalToolsUnavailable()
def test_Generate(self):
- image_file = sparse_img.SparseImage(self._generate_image())
+ image_file = sparse_img.SparseImage(self._GenerateImage())
generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
info = generator.Generate(image_file)
- self.assertEqual(RangeSet(data=[0, 991232 / 4096]), info.filesystem_range)
- self.assertEqual(RangeSet(data=[991232 / 4096, (991232 + 12288) / 4096]),
+ self.assertEqual(RangeSet(data=[0, 991232 // 4096]), info.filesystem_range)
+ self.assertEqual(RangeSet(data=[991232 // 4096, (991232 + 12288) // 4096]),
info.hashtree_range)
self.assertEqual(self.hash_algorithm, info.hash_algorithm)
self.assertEqual(self.fixed_salt, info.salt)
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 37d5d27..5d99c99 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -44,7 +44,7 @@
"""Constructs and returns a File object. Rounds up its size if needed."""
assert os.path.exists(unpacked_name)
- with open(unpacked_name, 'r') as f:
+ with open(unpacked_name, 'rb') as f:
file_data = f.read()
file_size = len(file_data)
if round_up:
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 3063800..e7f84f5 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -131,7 +131,8 @@
BuildVerityImageError: On error.
"""
try:
- with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
+ with open(target, 'ab') as out_file, \
+ open(file_to_append, 'rb') as input_file:
for line in input_file:
out_file.write(line)
except IOError:
@@ -178,6 +179,8 @@
# key_path and algorithm are only available when chain partition is used.
key_path = prop_dict.get("avb_key_path")
algorithm = prop_dict.get("avb_algorithm")
+
+ # Image uses hash footer.
if prop_dict.get("avb_hash_enable") == "true":
return VerifiedBootVersion2VerityImageBuilder(
prop_dict["partition_name"],
@@ -188,16 +191,17 @@
algorithm,
prop_dict.get("avb_salt"),
prop_dict["avb_add_hash_footer_args"])
- else:
- return VerifiedBootVersion2VerityImageBuilder(
- prop_dict["partition_name"],
- partition_size,
- VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
- prop_dict["avb_avbtool"],
- key_path,
- algorithm,
- prop_dict.get("avb_salt"),
- prop_dict["avb_add_hashtree_footer_args"])
+
+ # Image uses hashtree footer.
+ return VerifiedBootVersion2VerityImageBuilder(
+ prop_dict["partition_name"],
+ partition_size,
+ VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
+ prop_dict["avb_avbtool"],
+ key_path,
+ algorithm,
+ prop_dict.get("avb_salt"),
+ prop_dict["avb_add_hashtree_footer_args"])
return None
@@ -605,19 +609,19 @@
self.metadata_size = metadata_size
self.hashtree_info.filesystem_range = RangeSet(
- data=[0, adjusted_size / self.block_size])
+ data=[0, adjusted_size // self.block_size])
self.hashtree_info.hashtree_range = RangeSet(
- data=[adjusted_size / self.block_size,
- (adjusted_size + verity_tree_size) / self.block_size])
+ data=[adjusted_size // self.block_size,
+ (adjusted_size + verity_tree_size) // self.block_size])
def _ParseHashtreeMetadata(self):
"""Parses the hash_algorithm, root_hash, salt from the metadata block."""
metadata_start = self.filesystem_size + self.hashtree_size
metadata_range = RangeSet(
- data=[metadata_start / self.block_size,
- (metadata_start + self.metadata_size) / self.block_size])
- meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
+ data=[metadata_start // self.block_size,
+ (metadata_start + self.metadata_size) // self.block_size])
+ meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
# More info about the metadata structure available in:
# system/extras/verity/build_verity_metadata.py
@@ -640,9 +644,9 @@
assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
int(table_entries[6]) * self.block_size == self.filesystem_size)
- self.hashtree_info.hash_algorithm = table_entries[7]
- self.hashtree_info.root_hash = table_entries[8]
- self.hashtree_info.salt = table_entries[9]
+ self.hashtree_info.hash_algorithm = table_entries[7].decode()
+ self.hashtree_info.root_hash = table_entries[8].decode()
+ self.hashtree_info.salt = table_entries[9].decode()
def ValidateHashtree(self):
"""Checks that we can reconstruct the verity hash tree."""
@@ -669,8 +673,8 @@
# Reads the generated hash tree and checks if it has the exact same bytes
# as the one in the sparse image.
- with open(generated_verity_tree, "rb") as fd:
- return fd.read() == ''.join(self.image.ReadRangeSet(
+ with open(generated_verity_tree, 'rb') as fd:
+ return fd.read() == b''.join(self.image.ReadRangeSet(
self.hashtree_info.hashtree_range))
def Generate(self, image):
diff --git a/tools/warn.py b/tools/warn.py
index c710164..9389b7d 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1,5 +1,5 @@
#!/usr/bin/python
-# This file uses the following encoding: utf-8
+# Prefer python3 but work also with python2.
"""Grep warnings messages and output HTML tables or warning counts in CSV.
@@ -74,9 +74,11 @@
# escape_string, strip_escape_string, emit_warning_arrays
# emit_js_data():
+from __future__ import print_function
import argparse
import cgi
import csv
+import io
import multiprocessing
import os
import re
@@ -540,7 +542,7 @@
{'category': 'java',
'severity': Severity.LOW,
'description':
- 'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
+ u'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
'patterns': [r".*: warning: \[LambdaFunctionalInterface\] .+"]},
{'category': 'java',
'severity': Severity.LOW,
@@ -1270,7 +1272,7 @@
{'category': 'java',
'severity': Severity.MEDIUM,
'description':
- 'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
+ u'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
{'category': 'java',
'severity': Severity.MEDIUM,
@@ -1535,7 +1537,7 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+ u'Java: Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
'patterns': [r".*: warning: \[ComparableType\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -1790,7 +1792,7 @@
{'category': 'java',
'severity': Severity.HIGH,
'description':
- 'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
+ u'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
{'category': 'java',
'severity': Severity.HIGH,
@@ -2922,17 +2924,17 @@
def dump_html_prologue(title):
- print '<html>\n<head>'
- print '<title>' + title + '</title>'
- print html_head_scripts
+ print('<html>\n<head>')
+ print('<title>' + title + '</title>')
+ print(html_head_scripts)
emit_stats_by_project()
- print '</head>\n<body>'
- print html_big(title)
- print '<p>'
+ print('</head>\n<body>')
+ print(html_big(title))
+ print('<p>')
def dump_html_epilogue():
- print '</body>\n</head>\n</html>'
+ print('</body>\n</head>\n</html>')
def sort_warnings():
@@ -2943,6 +2945,7 @@
def emit_stats_by_project():
"""Dump a google chart table of warnings per project and severity."""
# warnings[p][s] is number of warnings in project p of severity s.
+ # pylint:disable=g-complex-comprehension
warnings = {p: {s: 0 for s in Severity.range} for p in project_names}
for i in warn_patterns:
s = i['severity']
@@ -2988,11 +2991,11 @@
total_all_severities += total_by_severity[s]
one_row.append(total_all_projects)
stats_rows.append(one_row)
- print '<script>'
+ print('<script>')
emit_const_string_array('StatsHeader', stats_header)
emit_const_object_array('StatsRows', stats_rows)
- print draw_table_javascript
- print '</script>'
+ print(draw_table_javascript)
+ print('</script>')
def dump_stats():
@@ -3008,14 +3011,14 @@
skipped += len(i['members'])
else:
known += len(i['members'])
- print 'Number of classified warnings: <b>' + str(known) + '</b><br>'
- print 'Number of skipped warnings: <b>' + str(skipped) + '</b><br>'
- print 'Number of unclassified warnings: <b>' + str(unknown) + '</b><br>'
+ print('Number of classified warnings: <b>' + str(known) + '</b><br>')
+ print('Number of skipped warnings: <b>' + str(skipped) + '</b><br>')
+ print('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
total = unknown + known + skipped
extra_msg = ''
if total < 1000:
extra_msg = ' (low count may indicate incremental build)'
- print 'Total number of warnings: <b>' + str(total) + '</b>' + extra_msg
+ print('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
# New base table of warnings, [severity, warn_id, project, warning_message]
@@ -3029,14 +3032,14 @@
# id for each warning pattern
# sort by project, severity, warn_id, warning_message
def emit_buttons():
- print ('<button class="button" onclick="expandCollapse(1);">'
- 'Expand all warnings</button>\n'
- '<button class="button" onclick="expandCollapse(0);">'
- 'Collapse all warnings</button>\n'
- '<button class="button" onclick="groupBySeverity();">'
- 'Group warnings by severity</button>\n'
- '<button class="button" onclick="groupByProject();">'
- 'Group warnings by project</button><br>')
+ print('<button class="button" onclick="expandCollapse(1);">'
+ 'Expand all warnings</button>\n'
+ '<button class="button" onclick="expandCollapse(0);">'
+ 'Collapse all warnings</button>\n'
+ '<button class="button" onclick="groupBySeverity();">'
+ 'Group warnings by severity</button>\n'
+ '<button class="button" onclick="groupByProject();">'
+ 'Group warnings by project</button><br>')
def all_patterns(category):
@@ -3051,14 +3054,14 @@
"""Show which warnings no longer occur."""
anchor = 'fixed_warnings'
mark = anchor + '_mark'
- print ('\n<br><p style="background-color:lightblue"><b>'
- '<button id="' + mark + '" '
- 'class="bt" onclick="expand(\'' + anchor + '\');">'
- '⊕</button> Fixed warnings. '
- 'No more occurrences. Please consider turning these into '
- 'errors if possible, before they are reintroduced in to the build'
- ':</b></p>')
- print '<blockquote>'
+ print('\n<br><p style="background-color:lightblue"><b>'
+ '<button id="' + mark + '" '
+ 'class="bt" onclick="expand(\'' + anchor + '\');">'
+ '⊕</button> Fixed warnings. '
+ 'No more occurrences. Please consider turning these into '
+ 'errors if possible, before they are reintroduced in to the build'
+ ':</b></p>')
+ print('<blockquote>')
fixed_patterns = []
for i in warn_patterns:
if not i['members']:
@@ -3066,16 +3069,16 @@
all_patterns(i) + ')')
if i['option']:
fixed_patterns.append(' ' + i['option'])
- fixed_patterns.sort()
- print '<div id="' + anchor + '" style="display:none;"><table>'
+ fixed_patterns = sorted(fixed_patterns)
+ print('<div id="' + anchor + '" style="display:none;"><table>')
cur_row_class = 0
for text in fixed_patterns:
cur_row_class = 1 - cur_row_class
# remove last '\n'
t = text[:-1] if text[-1] == '\n' else text
- print '<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>'
- print '</table></div>'
- print '</blockquote>'
+ print('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
+ print('</table></div>')
+ print('</blockquote>')
def find_project_index(line):
@@ -3187,8 +3190,9 @@
def normalize_warning_line(line):
"""Normalize file path relative to android_root in a warning line."""
# replace fancy quotes with plain ol' quotes
- line = line.replace('‘', "'")
- line = line.replace('’', "'")
+ line = re.sub(u'[\u2018\u2019]', '\'', line)
+ # replace non-ASCII chars to spaces
+ line = re.sub(u'[^\x00-\x7f]', ' ', line)
line = line.strip()
first_column = line.find(':')
if first_column > 0:
@@ -3246,21 +3250,22 @@
def emit_warning_array(name):
- print 'var warning_{} = ['.format(name)
+ print('var warning_{} = ['.format(name))
for i in range(len(warn_patterns)):
- print '{},'.format(warn_patterns[i][name])
- print '];'
+ print('{},'.format(warn_patterns[i][name]))
+ print('];')
def emit_warning_arrays():
emit_warning_array('severity')
- print 'var warning_description = ['
+ print('var warning_description = [')
for i in range(len(warn_patterns)):
if warn_patterns[i]['members']:
- print '"{}",'.format(escape_string(warn_patterns[i]['description']))
+ print('"{}",'.format(escape_string(warn_patterns[i]['description'])))
else:
- print '"",' # no such warning
- print '];'
+ print('"",') # no such warning
+ print('];')
+
scripts_for_warning_groups = """
function compareMessages(x1, x2) { // of the same warning type
@@ -3393,39 +3398,42 @@
# Emit a JavaScript const string
def emit_const_string(name, value):
- print 'const ' + name + ' = "' + escape_string(value) + '";'
+ print('const ' + name + ' = "' + escape_string(value) + '";')
# Emit a JavaScript const integer array.
def emit_const_int_array(name, array):
- print 'const ' + name + ' = ['
+ print('const ' + name + ' = [')
for n in array:
- print str(n) + ','
- print '];'
+ print(str(n) + ',')
+ print('];')
# Emit a JavaScript const string array.
def emit_const_string_array(name, array):
- print 'const ' + name + ' = ['
+ print('const ' + name + ' = [')
for s in array:
- print '"' + strip_escape_string(s) + '",'
- print '];'
+ print('"' + strip_escape_string(s) + '",')
+ print('];')
# Emit a JavaScript const string array for HTML.
def emit_const_html_string_array(name, array):
- print 'const ' + name + ' = ['
+ print('const ' + name + ' = [')
for s in array:
- print '"' + cgi.escape(strip_escape_string(s)) + '",'
- print '];'
+ # Not using html.escape yet, to work for both python 2 and 3,
+ # until all users switch to python 3.
+ # pylint:disable=deprecated-method
+ print('"' + cgi.escape(strip_escape_string(s)) + '",')
+ print('];')
# Emit a JavaScript const object array.
def emit_const_object_array(name, array):
- print 'const ' + name + ' = ['
+ print('const ' + name + ' = [')
for x in array:
- print str(x) + ','
- print '];'
+ print(str(x) + ',')
+ print('];')
def emit_js_data():
@@ -3471,18 +3479,18 @@
dump_html_prologue('Warnings for ' + platform_version + ' - ' +
target_product + ' - ' + target_variant)
dump_stats()
- print '<br><div id="stats_table"></div><br>'
- print '\n<script>'
+ print('<br><div id="stats_table"></div><br>')
+ print('\n<script>')
emit_js_data()
- print scripts_for_warning_groups
- print '</script>'
+ print(scripts_for_warning_groups)
+ print('</script>')
emit_buttons()
# Warning messages are grouped by severities or project names.
- print '<br><div id="warning_groups"></div>'
+ print('<br><div id="warning_groups"></div>')
if args.byproject:
- print '<script>groupByProject();</script>'
+ print('<script>groupByProject();</script>')
else:
- print '<script>groupBySeverity();</script>'
+ print('<script>groupBySeverity();</script>')
dump_fixed()
dump_html_epilogue()
@@ -3506,8 +3514,7 @@
warning = kind + ': ' + description_for_csv(i)
writer.writerow([n, '', warning])
# print number of warnings for each project, ordered by project name.
- projects = i['projects'].keys()
- projects.sort()
+ projects = sorted(i['projects'].keys())
for p in projects:
writer.writerow([i['projects'][p], p, warning])
writer.writerow([total, '', kind + ' warnings'])
@@ -3526,7 +3533,9 @@
def main():
- warning_lines = parse_input_file(open(args.buildlog, 'r'))
+ # We must use 'utf-8' codec to parse some non-ASCII code in warnings.
+ warning_lines = parse_input_file(
+ io.open(args.buildlog, mode='r', encoding='utf-8'))
parallel_classify_warnings(warning_lines)
# If a user pases a csv path, save the fileoutput to the path
# If the user also passed gencsv write the output to stdout