Merge "Allow files generated by droidstubs to be copied in the SDK"
diff --git a/core/binary.mk b/core/binary.mk
index d9763f9..51259b2 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -120,8 +120,6 @@
$(error $(LOCAL_PATH): LOCAL_SDK_VERSION cannot be used in host module)
endif
- my_cflags += -D__ANDROID_NDK__
-
# Make sure we've built the NDK.
my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
diff --git a/core/config.mk b/core/config.mk
index 49b9329..241ac8d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -630,9 +630,11 @@
USE_OPENJDK9 := true
ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),)
-TARGET_OPENJDK9 :=
+TARGET_OPENJDK9 := true
else ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),true)
TARGET_OPENJDK9 := true
+else ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),false)
+TARGET_OPENJDK9 :=
endif
# Path to tools.jar
diff --git a/core/android_vts_host_config.mk b/core/suite_host_config.mk
similarity index 89%
rename from core/android_vts_host_config.mk
rename to core/suite_host_config.mk
index 38ba19d..d575c5b 100644
--- a/core/android_vts_host_config.mk
+++ b/core/suite_host_config.mk
@@ -16,11 +16,9 @@
LOCAL_MODULE_CLASS := FAKE
LOCAL_IS_HOST_MODULE := true
-LOCAL_COMPATIBILITY_SUITE := vts
include $(BUILD_SYSTEM)/base_rules.mk
$(LOCAL_BUILT_MODULE):
- @echo "VTS host-driven test target: $(PRIVATE_MODULE)"
+ @echo "$(LOCAL_COMPATIBILITY_SUITE) host-driven test target: $(PRIVATE_MODULE)"
$(hide) touch $@
-
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index f49218a..7cc3270 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -364,6 +364,7 @@
logpersist.start \
logtagd.rc \
procrank \
+ remount \
showmap \
sqlite3 \
ss \
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 93b0b15..2b288e6 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -154,8 +154,7 @@
vndkprivate.libraries.txt \
vndkcorevariant.libraries.txt \
$(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
- $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
- com.android.vndk.current
+ $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
endif
include $(BUILD_PHONY_PACKAGE)
@@ -167,11 +166,8 @@
_binder32 := _binder32
endif
endif
-# Phony targets are installed for **.libraries.txt files.
-# TODO(b/141450808): remove following VNDK phony targets when **.libraries.txt files are provided by apexes.
LOCAL_REQUIRED_MODULES := \
$(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH)$(_binder32))
-LOCAL_REQUIRED_MODULES += $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver))
_binder32 :=
include $(BUILD_PHONY_PACKAGE)
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 3e44349..8fed53c 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -137,6 +137,7 @@
VNDK-core: android.hardware.neuralnetworks@1.0.so
VNDK-core: android.hardware.neuralnetworks@1.1.so
VNDK-core: android.hardware.neuralnetworks@1.2.so
+VNDK-core: android.hardware.neuralnetworks@1.3.so
VNDK-core: android.hardware.nfc@1.0.so
VNDK-core: android.hardware.nfc@1.1.so
VNDK-core: android.hardware.nfc@1.2.so
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index a71f9b8..cd6a0f7 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -78,6 +78,7 @@
android.hardware.secure_element@1.0 \
android.hardware.wifi@1.0 \
libaudio-resampler \
+ libaudiohal \
libdrm \
liblogwrap \
liblz4 \
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 3631cfd..d6a8b53 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -80,30 +80,3 @@
$(extra_recovery_keys)
$(SOONG_ZIP) -o $@ -j \
$(foreach key_file, $(PRIVATE_CERT) $(PRIVATE_EXTRA_RECOVERY_KEYS), -f $(key_file))
-
-
-#######################################
-# update_engine_payload_key, used by update_engine. We use the same key as otacerts but in RSA
-# public key format.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := update_engine_payload_key
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_STEM := update-payload-key.pub.pem
-LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/update_engine
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
- openssl x509 -pubkey -noout -in $< > $@
-
-
-#######################################
-# update_engine_payload_key for recovery image, used by update_engine.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := update_engine_payload_key.recovery
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_STEM := update-payload-key.pub.pem
-LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
- openssl x509 -pubkey -noout -in $< > $@
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index f846b18..031db1d 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -882,7 +882,7 @@
# boot chain, to be verified with AVB_SLOT_VERIFY_FLAGS_NO_VBMETA_PARTITION.
# See details at
# https://android.googlesource.com/platform/external/avb/+/master/README.md#booting-into-recovery.
- if OPTIONS.info_dict.get("ab_update") != "true" and partition == "recovery":
+ if info_dict.get("ab_update") != "true" and partition == "recovery":
return []
# Otherwise chain the partition into vbmeta.
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 1e7bb3a..dfcfb49 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -171,8 +171,16 @@
--payload_signer_args <args>
Specify the arguments needed for payload signer.
+ --payload_signer_maximum_signature_size <signature_size>
+ The maximum signature size (in bytes) that would be generated by the given
+ payload signer. Only meaningful when custom payload signer is specified
+ via '--payload_signer'.
+ If the signer uses a RSA key, this should be the number of bytes to
+ represent the modulus. If it uses an EC key, this is the size of a
+ DER-encoded ECDSA signature.
+
--payload_signer_key_size <key_size>
- Specify the key size in bytes of the payload signer.
+ Deprecated. Use the '--payload_signer_maximum_signature_size' instead.
--skip_postinstall
Skip the postinstall hooks when generating an A/B OTA package (default:
@@ -231,7 +239,7 @@
OPTIONS.log_diff = None
OPTIONS.payload_signer = None
OPTIONS.payload_signer_args = []
-OPTIONS.payload_signer_key_size = None
+OPTIONS.payload_signer_maximum_signature_size = None
OPTIONS.extracted_input = None
OPTIONS.key_passwords = []
OPTIONS.skip_postinstall = False
@@ -288,35 +296,31 @@
self.signer = "openssl"
self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
"-pkeyopt", "digest:sha256"]
- self.key_size = self._GetKeySizeInBytes(signing_key)
+ self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
+ signing_key)
else:
self.signer = OPTIONS.payload_signer
self.signer_args = OPTIONS.payload_signer_args
- if OPTIONS.payload_signer_key_size:
- self.key_size = int(OPTIONS.payload_signer_key_size)
- assert self.key_size == 256 or self.key_size == 512, \
- "Unsupported key size {}".format(OPTIONS.payload_signer_key_size)
+ if OPTIONS.payload_signer_maximum_signature_size:
+ self.maximum_signature_size = int(
+ OPTIONS.payload_signer_maximum_signature_size)
else:
- self.key_size = 256
+ # The legacy config uses RSA2048 keys.
+ logger.warning("The maximum signature size for payload signer is not"
+ " set, default to 256 bytes.")
+ self.maximum_signature_size = 256
@staticmethod
- def _GetKeySizeInBytes(signing_key):
- modulus_file = common.MakeTempFile(prefix="modulus-")
- cmd = ["openssl", "rsa", "-inform", "PEM", "-in", signing_key, "-modulus",
- "-noout", "-out", modulus_file]
- common.RunAndCheckOutput(cmd, verbose=False)
-
- with open(modulus_file) as f:
- modulus_string = f.read()
- # The modulus string has the format "Modulus=$data", where $data is the
- # concatenation of hex dump of the modulus.
- MODULUS_PREFIX = "Modulus="
- assert modulus_string.startswith(MODULUS_PREFIX)
- modulus_string = modulus_string[len(MODULUS_PREFIX):]
- key_size = len(modulus_string) // 2
- assert key_size == 256 or key_size == 512, \
- "Unsupported key size {}".format(key_size)
- return key_size
+ def _GetMaximumSignatureSizeInBytes(signing_key):
+ out_signature_size_file = common.MakeTempFile("signature_size")
+ cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
+ out_signature_size_file), "--private_key={}".format(signing_key)]
+ common.RunAndCheckOutput(cmd)
+ with open(out_signature_size_file) as f:
+ signature_size = f.read().rstrip()
+ logger.info("% outputs the maximum signature size: %", cmd[0],
+ signature_size)
+ return int(signature_size)
def Sign(self, in_file):
"""Signs the given input file. Returns the output filename."""
@@ -396,7 +400,7 @@
metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
cmd = ["brillo_update_payload", "hash",
"--unsigned_payload", self.payload_file,
- "--signature_size", str(payload_signer.key_size),
+ "--signature_size", str(payload_signer.maximum_signature_size),
"--metadata_hash_file", metadata_sig_file,
"--payload_hash_file", payload_sig_file]
self._Run(cmd)
@@ -411,7 +415,7 @@
cmd = ["brillo_update_payload", "sign",
"--unsigned_payload", self.payload_file,
"--payload", signed_payload_file,
- "--signature_size", str(payload_signer.key_size),
+ "--signature_size", str(payload_signer.maximum_signature_size),
"--metadata_signature_file", signed_metadata_sig_file,
"--payload_signature_file", signed_payload_sig_file]
self._Run(cmd)
@@ -2005,8 +2009,13 @@
OPTIONS.payload_signer = a
elif o == "--payload_signer_args":
OPTIONS.payload_signer_args = shlex.split(a)
+ elif o == "--payload_signer_maximum_signature_size":
+ OPTIONS.payload_signer_maximum_signature_size = a
elif o == "--payload_signer_key_size":
- OPTIONS.payload_signer_key_size = a
+ # TODO(Xunchang) remove this option after cleaning up the callers.
+ logger.warning("The option '--payload_signer_key_size' is deprecated."
+ " Use '--payload_signer_maximum_signature_size' instead.")
+ OPTIONS.payload_signer_maximum_signature_size = a
elif o == "--extracted_input_target_files":
OPTIONS.extracted_input = a
elif o == "--skip_postinstall":
@@ -2047,6 +2056,7 @@
"log_diff=",
"payload_signer=",
"payload_signer_args=",
+ "payload_signer_maximum_signature_size=",
"payload_signer_key_size=",
"extracted_input_target_files=",
"skip_postinstall",
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 0ef698e..0f4f1da 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -557,14 +557,13 @@
OPTIONS.rebuild_recovery = True
# Don't copy OTA certs if we're replacing them.
+ # Replacement of update-payload-key.pub.pem was removed in b/116660991.
elif (
OPTIONS.replace_ota_keys and
filename in (
"BOOT/RAMDISK/system/etc/security/otacerts.zip",
- "BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
"RECOVERY/RAMDISK/system/etc/security/otacerts.zip",
- "SYSTEM/etc/security/otacerts.zip",
- "SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
+ "SYSTEM/etc/security/otacerts.zip")):
pass
# Skip META/misc_info.txt since we will write back the new values later.
@@ -832,24 +831,6 @@
# We DO NOT include the extra_recovery_keys (if any) here.
WriteOtacerts(output_tf_zip, "SYSTEM/etc/security/otacerts.zip", mapped_keys)
- # For A/B devices, update the payload verification key.
- if misc_info.get("ab_update") == "true":
- # Unlike otacerts.zip that may contain multiple keys, we can only specify
- # ONE payload verification key.
- if len(mapped_keys) > 1:
- print("\n WARNING: Found more than one OTA keys; Using the first one"
- " as payload verification key.\n\n")
-
- print("Using %s for payload verification." % (mapped_keys[0],))
- pubkey = common.ExtractPublicKey(mapped_keys[0])
- common.ZipWriteStr(
- output_tf_zip,
- "SYSTEM/etc/update_engine/update-payload-key.pub.pem",
- pubkey)
- common.ZipWriteStr(
- output_tf_zip,
- "BOOT/RAMDISK/system/etc/update_engine/update-payload-key.pub.pem",
- pubkey)
def ReplaceVerityPublicKey(output_zip, filename, key_path):
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 59b05e9..8a52419 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1373,6 +1373,39 @@
self.assertEqual('5', chained_partition_args[1])
self.assertTrue(os.path.exists(chained_partition_args[2]))
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendVBMetaArgsForPartition_recoveryAsChainedPartition_nonAb(self):
+ testdata_dir = test_utils.get_testdata_dir()
+ pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ info_dict = {
+ 'avb_avbtool': 'avbtool',
+ 'avb_recovery_key_path': pubkey,
+ 'avb_recovery_rollback_index_location': 3,
+ }
+ cmd = common.GetAvbPartitionArg(
+ 'recovery', '/path/to/recovery.img', info_dict)
+ self.assertFalse(cmd)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendVBMetaArgsForPartition_recoveryAsChainedPartition_ab(self):
+ testdata_dir = test_utils.get_testdata_dir()
+ pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ info_dict = {
+ 'ab_update': 'true',
+ 'avb_avbtool': 'avbtool',
+ 'avb_recovery_key_path': pubkey,
+ 'avb_recovery_rollback_index_location': 3,
+ }
+ cmd = common.GetAvbPartitionArg(
+ 'recovery', '/path/to/recovery.img', info_dict)
+ self.assertEqual(2, len(cmd))
+ self.assertEqual('--chain_partition', cmd[0])
+ chained_partition_args = cmd[1].split(':')
+ self.assertEqual(3, len(chained_partition_args))
+ self.assertEqual('recovery', chained_partition_args[0])
+ self.assertEqual('3', chained_partition_args[1])
+ self.assertTrue(os.path.exists(chained_partition_args[2]))
+
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index c3021a1..38faf64 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -885,10 +885,28 @@
payload_offset, metadata_total = (
property_files._GetPayloadMetadataOffsetAndSize(input_zip))
- # Read in the metadata signature directly.
+ # The signature proto has the following format (details in
+ # /platform/system/update_engine/update_metadata.proto):
+ # message Signature {
+ # optional uint32 version = 1;
+ # optional bytes data = 2;
+ # optional fixed32 unpadded_signature_size = 3;
+ # }
+ #
+ # According to the protobuf encoding, the tail of the signature message will
+ # be [signature string(256 bytes) + encoding of the fixed32 number 256]. And
+ # 256 is encoded as 'x1d\x00\x01\x00\x00':
+ # [3 (field number) << 3 | 5 (type) + byte reverse of 0x100 (256)].
+ # Details in (https://developers.google.com/protocol-buffers/docs/encoding)
+ signature_tail_length = self.SIGNATURE_SIZE + 5
+ self.assertGreater(metadata_total, signature_tail_length)
with open(output_file, 'rb') as verify_fp:
- verify_fp.seek(payload_offset + metadata_total - self.SIGNATURE_SIZE)
- metadata_signature = verify_fp.read(self.SIGNATURE_SIZE)
+ verify_fp.seek(payload_offset + metadata_total - signature_tail_length)
+ metadata_signature_proto_tail = verify_fp.read(signature_tail_length)
+
+ self.assertEqual(b'\x1d\x00\x01\x00\x00',
+ metadata_signature_proto_tail[-5:])
+ metadata_signature = metadata_signature_proto_tail[:-5]
# Now we extract the metadata hash via brillo_update_payload script, which
# will serve as the oracle result.
@@ -1050,11 +1068,13 @@
with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
self.assertEqual(fp1.read(), fp2.read())
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_init(self):
payload_signer = PayloadSigner()
self.assertEqual('openssl', payload_signer.signer)
- self.assertEqual(256, payload_signer.key_size)
+ self.assertEqual(256, payload_signer.maximum_signature_size)
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_init_withPassword(self):
common.OPTIONS.package_key = os.path.join(
self.testdata_dir, 'testkey_with_passwd')
@@ -1067,18 +1087,27 @@
def test_init_withExternalSigner(self):
common.OPTIONS.payload_signer = 'abc'
common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
- common.OPTIONS.payload_signer_key_size = '512'
+ common.OPTIONS.payload_signer_maximum_signature_size = '512'
payload_signer = PayloadSigner()
self.assertEqual('abc', payload_signer.signer)
self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
- self.assertEqual(512, payload_signer.key_size)
+ self.assertEqual(512, payload_signer.maximum_signature_size)
- def test_GetKeySizeInBytes_512Bytes(self):
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetMaximumSignatureSizeInBytes_512Bytes(self):
signing_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
# pylint: disable=protected-access
- key_size = PayloadSigner._GetKeySizeInBytes(signing_key)
- self.assertEqual(512, key_size)
+ signature_size = PayloadSigner._GetMaximumSignatureSizeInBytes(signing_key)
+ self.assertEqual(512, signature_size)
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_GetMaximumSignatureSizeInBytes_ECKey(self):
+ signing_key = os.path.join(self.testdata_dir, 'testkey_EC.key')
+ # pylint: disable=protected-access
+ signature_size = PayloadSigner._GetMaximumSignatureSizeInBytes(signing_key)
+ self.assertEqual(72, signature_size)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
def test_Sign(self):
payload_signer = PayloadSigner()
input_file = os.path.join(self.testdata_dir, self.SIGFILE)
diff --git a/tools/releasetools/testdata/testkey_EC.key b/tools/releasetools/testdata/testkey_EC.key
new file mode 100644
index 0000000..9e65a68
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_EC.key
@@ -0,0 +1,5 @@
+-----BEGIN PRIVATE KEY-----
+MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgGaguGj8Yb1KkqKHd
+ISblUsjtOCbzAuVpX81i02sm8FWhRANCAARBnuotwKOsuvjH6iwTDhOAi7Q5pLWz
+xDkZjg2pcfbfi9FFTvLYETas7B2W6fx9PUezUmHTFTDV2JZuMYYFdZOw
+-----END PRIVATE KEY-----
diff --git a/tools/warn.py b/tools/warn.py
index 5994124..6218f93 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -2264,7 +2264,7 @@
i['compiled_patterns'].append(re.compile(pat))
-def find_android_root(path):
+def find_warn_py_and_android_root(path):
"""Set and return android_root path if it is found."""
global android_root
parts = path.split('/')
@@ -2273,8 +2273,36 @@
# Android root directory should contain this script.
if os.path.exists(root_path + '/build/make/tools/warn.py'):
android_root = root_path
- return root_path
- return ''
+ return True
+ return False
+
+
+def find_android_root():
+ """Guess android_root from common prefix of file paths."""
+ # Use the longest common prefix of the absolute file paths
+ # of the first 10000 warning messages as the android_root.
+ global android_root
+ warning_lines = set()
+ warning_pattern = re.compile('^/[^ ]*/[^ ]*: warning: .*')
+ count = 0
+ infile = io.open(args.buildlog, mode='r', encoding='utf-8')
+ for line in infile:
+ if warning_pattern.match(line):
+ warning_lines.add(line)
+ count += 1
+ if count > 9999:
+ break
+ # Try to find warn.py and use its location to find
+ # the source tree root.
+ if count < 100:
+ path = os.path.normpath(re.sub(':.*$', '', line))
+ if find_warn_py_and_android_root(path):
+ return
+ # Do not use common prefix of a small number of paths.
+ if count > 10:
+ root_path = os.path.commonprefix(warning_lines)
+ if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
+ android_root = root_path[:-1]
def remove_android_root_prefix(path):
@@ -2289,13 +2317,10 @@
"""Normalize file path relative to android_root."""
# If path is not an absolute path, just normalize it.
path = os.path.normpath(path)
- if path[0] != '/':
- return path
# Remove known prefix of root path and normalize the suffix.
- if android_root or find_android_root(path):
+ if path[0] == '/' and android_root:
return remove_android_root_prefix(path)
- else:
- return path
+ return path
def normalize_warning_line(line):
@@ -2670,6 +2695,7 @@
def main():
+ find_android_root()
# We must use 'utf-8' codec to parse some non-ASCII code in warnings.
warning_lines = parse_input_file(
io.open(args.buildlog, mode='r', encoding='utf-8'))