releasetools: Clean up additional modules for Python 3 compatibility.
It also addresses a few linter warnings (mostly regarding indentation).
Bug: 131631303
Test: TreeHugger
Test: `python -m unittest test_ota_from_target_files`
Test: `python3 -m unittest test_ota_from_target_files`
Test: `python build/make/tools/releasetools/check_ota_package_signature.py
build/make/target/product/security/testkey.x509.pem
test-keys-signed-ota.zip`
Test: `python3 build/make/tools/releasetools/check_ota_package_signature.py
build/make/target/product/security/testkey.x509.pem
test-keys-signed-ota.zip`
Change-Id: I11a745dfec052081ab0282bb028dc27debf71f57
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f686ca0..3442b27 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -38,8 +38,8 @@
-k (--package_key) <key>
Key to use to sign the package (default is the value of
default_system_dev_certificate from the input target-files's
- META/misc_info.txt, or "build/make/target/product/security/testkey" if that
- value is not specified).
+ META/misc_info.txt, or "build/make/target/product/security/testkey" if
+ that value is not specified).
For incremental OTAs, the default value is based on the source
target-file, not the target build.
@@ -371,7 +371,7 @@
return prop_val
source_order_val = self.info_dict.get("build.prop", {}).get(
- "ro.product.property_source_order")
+ "ro.product.property_source_order")
if source_order_val:
source_order = source_order_val.split(",")
else:
@@ -381,13 +381,13 @@
if any([x not in BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
for x in source_order]):
raise common.ExternalError(
- "Invalid ro.product.property_source_order '{}'".format(source_order))
+ "Invalid ro.product.property_source_order '{}'".format(source_order))
for source in source_order:
- source_prop = prop.replace("ro.product", "ro.product.{}".format(source),
- 1)
- prop_val = self.info_dict.get("{}.build.prop".format(source), {}).get(
- source_prop)
+ source_prop = prop.replace(
+ "ro.product", "ro.product.{}".format(source), 1)
+ prop_val = self.info_dict.get(
+ "{}.build.prop".format(source), {}).get(source_prop)
if prop_val:
return prop_val
@@ -412,14 +412,14 @@
return self.GetBuildProp("ro.build.fingerprint")
except common.ExternalError:
return "{}/{}/{}:{}/{}/{}:{}/{}".format(
- self.GetBuildProp("ro.product.brand"),
- self.GetBuildProp("ro.product.name"),
- self.GetBuildProp("ro.product.device"),
- self.GetBuildProp("ro.build.version.release"),
- self.GetBuildProp("ro.build.id"),
- self.GetBuildProp("ro.build.version.incremental"),
- self.GetBuildProp("ro.build.type"),
- self.GetBuildProp("ro.build.tags"))
+ self.GetBuildProp("ro.product.brand"),
+ self.GetBuildProp("ro.product.name"),
+ self.GetBuildProp("ro.product.device"),
+ self.GetBuildProp("ro.build.version.release"),
+ self.GetBuildProp("ro.build.id"),
+ self.GetBuildProp("ro.build.version.incremental"),
+ self.GetBuildProp("ro.build.type"),
+ self.GetBuildProp("ro.build.tags"))
return "%s/%s/%s:%s" % (
self.GetOemProperty("ro.product.brand"),
self.GetOemProperty("ro.product.name"),
@@ -508,7 +508,7 @@
MODULUS_PREFIX = "Modulus="
assert modulus_string.startswith(MODULUS_PREFIX)
modulus_string = modulus_string[len(MODULUS_PREFIX):]
- key_size = len(modulus_string) / 2
+ key_size = len(modulus_string) // 2
assert key_size == 256 or key_size == 512, \
"Unsupported key size {}".format(key_size)
return key_size
@@ -1051,7 +1051,7 @@
output: A ZipFile object or a string of the output file path.
"""
- value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.iteritems())])
+ value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
if isinstance(output, zipfile.ZipFile):
common.ZipWriteStr(output, METADATA_NAME, value,
compress_type=zipfile.ZIP_STORED)
@@ -1067,7 +1067,7 @@
post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
- is_downgrade = long(post_timestamp) < long(pre_timestamp)
+ is_downgrade = int(post_timestamp) < int(pre_timestamp)
if OPTIONS.downgrade:
if not is_downgrade:
@@ -1392,7 +1392,7 @@
payload_offset += len(payload_info.extra) + len(payload_info.filename)
payload_size = payload_info.file_size
- with input_zip.open('payload.bin', 'r') as payload_fp:
+ with input_zip.open('payload.bin') as payload_fp:
header_bin = payload_fp.read(24)
# network byte order (big-endian)
@@ -1864,7 +1864,6 @@
with zipfile.ZipFile(input_file, 'r') as input_zip:
infolist = input_zip.infolist()
- namelist = input_zip.namelist()
input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
for info in infolist:
@@ -1963,8 +1962,8 @@
for partition in ab_partitions:
if (partition in dynamic_partition_list and
partition not in super_block_devices):
- logger.info("Dropping %s from ab_partitions.txt", partition)
- continue
+ logger.info("Dropping %s from ab_partitions.txt", partition)
+ continue
f.write(partition + "\n")
to_delete = [AB_PARTITIONS]
@@ -1976,7 +1975,7 @@
to_delete += [DYNAMIC_PARTITION_INFO]
# Remove the existing partition images as well as the map files.
- to_delete += replace.values()
+ to_delete += list(replace.values())
to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
common.ZipDelete(target_file, to_delete)
@@ -1986,7 +1985,7 @@
# Write super_{foo}.img as {foo}.img.
for src, dst in replace.items():
assert src in namelist, \
- 'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
+ 'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
unzipped_file = os.path.join(input_tmp, *src.split('/'))
common.ZipWrite(target_zip, unzipped_file, arcname=dst)
@@ -2291,7 +2290,8 @@
OPTIONS.cache_size = cache_size
if OPTIONS.extra_script is not None:
- OPTIONS.extra_script = open(OPTIONS.extra_script).read()
+ with open(OPTIONS.extra_script) as fp:
+ OPTIONS.extra_script = fp.read()
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input