Merge "Fix the following issues mentioned in Pixel SBOM review."
diff --git a/core/main.mk b/core/main.mk
index df42baa..e84dfaa 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1252,6 +1252,7 @@
     $(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
     $(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
     $(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+    $(if $(filter arm64,$(TARGET_ARCH) $(TARGET_2ND_ARCH)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ARM64)) \
     $(call auto-included-modules) \
   ) \
   $(eval ### Filter out the overridden packages and executables before doing expansion) \
diff --git a/core/product.mk b/core/product.mk
index 5f1e145..cdc3d09 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -36,6 +36,7 @@
 _product_list_vars += PRODUCT_PACKAGES
 _product_list_vars += PRODUCT_PACKAGES_DEBUG
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
+_product_list_vars += PRODUCT_PACKAGES_ARM64
 # Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
 _product_list_vars += PRODUCT_PACKAGES_ENG
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 94b5c16..0f5b8a4 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -296,11 +296,9 @@
     system_manifest.xml \
     system_compatibility_matrix.xml \
 
-# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
-ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
-  PRODUCT_PACKAGES += \
-   libclang_rt.hwasan.bootstrap
-endif
+PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
+ libclang_rt.hwasan.bootstrap \
+ libc_hwasan \
 
 # Jacoco agent JARS to be built and installed, if any.
 ifeq ($(EMMA_INSTRUMENT),true)
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index e154a0f..e0bcbb7 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -842,13 +842,14 @@
   SYSTEM/ after rebuilding recovery.
   """
   common.ZipDelete(zip_filename, files_list)
-  with zipfile.ZipFile(zip_filename, "a",
+  output_zip = zipfile.ZipFile(zip_filename, "a",
                                compression=zipfile.ZIP_DEFLATED,
-                               allowZip64=True) as output_zip:
-    for item in files_list:
-      file_path = os.path.join(OPTIONS.input_tmp, item)
-      assert os.path.exists(file_path)
-      common.ZipWrite(output_zip, file_path, arcname=item)
+                               allowZip64=True)
+  for item in files_list:
+    file_path = os.path.join(OPTIONS.input_tmp, item)
+    assert os.path.exists(file_path)
+    common.ZipWrite(output_zip, file_path, arcname=item)
+  common.ZipClose(output_zip)
 
 
 def HasPartition(partition_name):
@@ -1191,7 +1192,7 @@
   AddVbmetaDigest(output_zip)
 
   if output_zip:
-    output_zip.close()
+    common.ZipClose(output_zip)
     if OPTIONS.replace_updated_files_list:
       ReplaceUpdatedFiles(output_zip.filename,
                           OPTIONS.replace_updated_files_list)
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index d523701..59c712e 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -431,7 +431,7 @@
   apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
   common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
   common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
-  apex_zip.close()
+  common.ZipClose(apex_zip)
 
   # 3. Sign the APEX container with container_key.
   signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 97957be..b395c19 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -142,7 +142,7 @@
   """Verifies the payload and metadata signatures in an A/B OTA payload."""
   package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
   if 'payload.bin' not in package_zip.namelist():
-    package_zip.close()
+    common.ZipClose(package_zip)
     return
 
   print('Verifying A/B OTA payload signatures...')
@@ -160,7 +160,7 @@
          '--in_file=' + payload_file,
          '--public_key=' + pubkey]
   common.RunAndCheckOutput(cmd)
-  package_zip.close()
+  common.ZipClose(package_zip)
 
   # Verified successfully upon reaching here.
   print('\nPayload signatures VERIFIED\n\n')
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 3904a78..01bc6e1 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -461,6 +461,25 @@
     return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
 
   @property
+  def vendor_api_level(self):
+    vendor_prop = self.info_dict.get("vendor.build.prop")
+    if not vendor_prop:
+      return -1
+
+    props = [
+        "ro.board.api_level",
+        "ro.board.first_api_level",
+        "ro.product.first_api_level",
+    ]
+    for prop in props:
+      value = vendor_prop.GetProp(prop)
+      try:
+          return int(value)
+      except:
+          pass
+    return -1
+
+  @property
   def is_vabc_xor(self):
     vendor_prop = self.info_dict.get("vendor.build.prop")
     vabc_xor_enabled = vendor_prop and \
@@ -2809,6 +2828,18 @@
 def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
              compress_type=None):
 
+  # http://b/18015246
+  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
+  # for files larger than 2GiB. We can work around this by adjusting their
+  # limit. Note that `zipfile.writestr()` will not work for strings larger than
+  # 2GiB. The Python interpreter sometimes rejects strings that large (though
+  # it isn't clear to me exactly what circumstances cause this).
+  # `zipfile.write()` must be used directly to work around this.
+  #
+  # This mess can be avoided if we port to python3.
+  saved_zip64_limit = zipfile.ZIP64_LIMIT
+  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
   if compress_type is None:
     compress_type = zip_file.compression
   if arcname is None:
@@ -2834,13 +2865,14 @@
   finally:
     os.chmod(filename, saved_stat.st_mode)
     os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
+    zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
 def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
                 compress_type=None):
   """Wrap zipfile.writestr() function to work around the zip64 limit.
 
-  Python's zip implementation won't allow writing a string
+  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
   longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
   when calling crc32(bytes).
 
@@ -2849,6 +2881,9 @@
   when we know the string won't be too long.
   """
 
+  saved_zip64_limit = zipfile.ZIP64_LIMIT
+  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
   if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
     zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
     zinfo.compress_type = zip_file.compression
@@ -2881,6 +2916,7 @@
   zinfo.date_time = (2009, 1, 1, 0, 0, 0)
 
   zip_file.writestr(zinfo, data)
+  zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
 def ZipDelete(zip_filename, entries, force=False):
@@ -2913,6 +2949,18 @@
   os.replace(new_zipfile, zip_filename)
 
 
+def ZipClose(zip_file):
+  # http://b/18015246
+  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
+  # central directory.
+  saved_zip64_limit = zipfile.ZIP64_LIMIT
+  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
+  zip_file.close()
+
+  zipfile.ZIP64_LIMIT = saved_zip64_limit
+
+
 class DeviceSpecificParams(object):
   module = None
 
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 8f93688..ba2b14f 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -165,6 +165,19 @@
     pass
 
 
+def include_meta_in_list(item_list):
+  """Include all `META/*` files in the item list.
+
+  To ensure that `AddImagesToTargetFiles` can still be used with vendor item
+  list that do not specify all of the required META/ files, those files should
+  be included by default. This preserves the backward compatibility of
+  `rebuild_image_with_sepolicy`.
+  """
+  if not item_list:
+    return None
+  return list(item_list) + ['META/*']
+
+
 def create_merged_package(temp_dir):
   """Merges two target files packages into one target files structure.
 
@@ -276,7 +289,7 @@
   merge_utils.CollectTargetFiles(
       input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=vendor_target_files_dir,
-      item_list=OPTIONS.vendor_item_list)
+      item_list=include_meta_in_list(OPTIONS.vendor_item_list))
 
   # Copy the partition contents from the merged target-files archive to the
   # vendor target-files archive.
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 7078d67..c4fd809 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -272,7 +272,7 @@
 
   # We haven't written the metadata entry, which will be done in
   # FinalizeMetadata.
-  output_zip.close()
+  common.ZipClose(output_zip)
 
   needed_property_files = (
       NonAbOtaPropertyFiles(),
@@ -526,7 +526,7 @@
 
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
-  output_zip.close()
+  common.ZipClose(output_zip)
 
   # Sign the generated zip package unless no_signing is specified.
   needed_property_files = (
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 2458244..df283d6 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -495,7 +495,7 @@
       else:
         common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
 
-  target_zip.close()
+  common.ZipClose(target_zip)
 
   return target_file
 
@@ -632,7 +632,7 @@
 
     # TODO(xunchang) handle META/postinstall_config.txt'
 
-  partial_target_zip.close()
+  common.ZipClose(partial_target_zip)
 
   return partial_target_file
 
@@ -717,7 +717,7 @@
   # Write new ab_partitions.txt file
   common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
 
-  target_zip.close()
+  common.ZipClose(target_zip)
 
   return target_file
 
@@ -909,6 +909,19 @@
     logger.info(
         "VABC Compression algorithm is set to 'none', disabling VABC xor")
     OPTIONS.enable_vabc_xor = False
+
+  if OPTIONS.enable_vabc_xor:
+    api_level = -1
+    if source_info is not None:
+      api_level = source_info.vendor_api_level
+    if api_level == -1:
+      api_level = target_info.vendor_api_level
+
+    # XOR is only supported on T and higher.
+    if api_level < 33:
+      logger.error("VABC XOR not supported on this vendor, disabling")
+      OPTIONS.enable_vabc_xor = False
+
   additional_args = []
 
   # Prepare custom images.
@@ -1052,11 +1065,11 @@
     common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
                        compress_type=zipfile.ZIP_STORED)
 
-  target_zip.close()
+  common.ZipClose(target_zip)
 
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
-  output_zip.close()
+  common.ZipClose(output_zip)
 
   FinalizeMetadata(metadata, staging_file, output_file,
                    package_key=OPTIONS.package_key)
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 8c26114..985aeda 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,7 +22,7 @@
 
 import ota_metadata_pb2
 import common
-from common import (ZipDelete, OPTIONS, MakeTempFile,
+from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
                     SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
                     GetRamdiskFormat, ParseUpdateEngineConfig)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4a12e74..8291448 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -908,7 +908,7 @@
   certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
   for k in keys:
     common.ZipWrite(certs_zip, k)
-  certs_zip.close()
+  common.ZipClose(certs_zip)
   common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
 
 
@@ -1545,8 +1545,8 @@
                      platform_api_level, codename_to_api_level_map,
                      compressed_extension)
 
-  input_zip.close()
-  output_zip.close()
+  common.ZipClose(input_zip)
+  common.ZipClose(output_zip)
 
   if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
     BuildVendorPartitions(args[1])
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 0e4626b..2dfd8c7 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -222,17 +222,17 @@
     info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
     build_info = common.BuildInfo(info_dict)
     self.assertEqual(
-        'product-brand/product-name/product-device:version-release/build-id/'
-        'version-incremental:build-type/build-tags', build_info.fingerprint)
+      'product-brand/product-name/product-device:version-release/build-id/'
+      'version-incremental:build-type/build-tags', build_info.fingerprint)
 
     build_props = info_dict['build.prop'].build_props
     del build_props['ro.build.id']
     build_props['ro.build.legacy.id'] = 'legacy-build-id'
     build_info = common.BuildInfo(info_dict, use_legacy_id=True)
     self.assertEqual(
-        'product-brand/product-name/product-device:version-release/'
-        'legacy-build-id/version-incremental:build-type/build-tags',
-        build_info.fingerprint)
+      'product-brand/product-name/product-device:version-release/'
+      'legacy-build-id/version-incremental:build-type/build-tags',
+      build_info.fingerprint)
 
     self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
                       False)
@@ -241,9 +241,9 @@
     info_dict['vbmeta_digest'] = 'abcde12345'
     build_info = common.BuildInfo(info_dict, use_legacy_id=False)
     self.assertEqual(
-        'product-brand/product-name/product-device:version-release/'
-        'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
-        build_info.fingerprint)
+      'product-brand/product-name/product-device:version-release/'
+      'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+      build_info.fingerprint)
 
   def test___getitem__(self):
     target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +376,7 @@
     info_dict['build.prop'].build_props[
         'ro.product.property_source_order'] = 'bad-source'
     with self.assertRaisesRegexp(common.ExternalError,
-                                 'Invalid ro.product.property_source_order'):
+        'Invalid ro.product.property_source_order'):
       info = common.BuildInfo(info_dict, None)
       info.GetBuildProp('ro.product.device')
 
@@ -461,7 +461,7 @@
       os.utime(test_file_name, (1234567, 1234567))
       expected_stat = os.stat(test_file_name)
       common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
                    test_file_name, expected_stat, expected_mode,
@@ -494,7 +494,7 @@
         expected_mode = extra_args.get("perms", zinfo_perms)
 
       common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
                    expected_mode=expected_mode,
@@ -538,7 +538,7 @@
 
       common.ZipWrite(zip_file, test_file_name, **extra_args)
       common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       # Verify the contents written by ZipWrite().
       self._verify(zip_file, zip_file_name, arcname_large,
@@ -553,6 +553,12 @@
       os.remove(zip_file_name)
       os.remove(test_file_name)
 
+  def _test_reset_ZIP64_LIMIT(self, func, *args):
+    default_limit = (1 << 31) - 1
+    self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+    func(*args)
+    self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+
   def test_ZipWrite(self):
     file_contents = os.urandom(1024)
     self._test_ZipWrite(file_contents)
@@ -577,7 +583,7 @@
     })
 
   def test_ZipWrite_resets_ZIP64_LIMIT(self):
-    self._test_ZipWrite("")
+    self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
 
   def test_ZipWriteStr(self):
     random_string = os.urandom(1024)
@@ -628,9 +634,9 @@
     })
 
   def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
-    self._test_ZipWriteStr('foo', b'')
+    self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
     zinfo = zipfile.ZipInfo(filename="foo")
-    self._test_ZipWriteStr(zinfo, b'')
+    self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
 
   def test_bug21309935(self):
     zip_file = tempfile.NamedTemporaryFile(delete=False)
@@ -652,7 +658,7 @@
       zinfo = zipfile.ZipInfo(filename="qux")
       zinfo.external_attr = 0o700 << 16
       common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       self._verify(zip_file, zip_file_name, "foo",
                    sha1(random_string).hexdigest(),
@@ -679,7 +685,7 @@
       common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
       common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
       common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
-      output_zip.close()
+      common.ZipClose(output_zip)
     zip_file.close()
 
     try:
@@ -727,8 +733,8 @@
       common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
       common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
       common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
-      output_zip.close()
-    output_zip.close()
+      common.ZipClose(output_zip)
+    common.ZipClose(output_zip)
     return zip_file
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -815,9 +821,9 @@
   )
 
   APKCERTS_CERTMAP1 = {
-      'RecoveryLocalizer.apk': 'certs/devkey',
-      'Settings.apk': 'build/make/target/product/security/platform',
-      'TV.apk': 'PRESIGNED',
+      'RecoveryLocalizer.apk' : 'certs/devkey',
+      'Settings.apk' : 'build/make/target/product/security/platform',
+      'TV.apk' : 'PRESIGNED',
   }
 
   APKCERTS_TXT2 = (
@@ -832,10 +838,10 @@
   )
 
   APKCERTS_CERTMAP2 = {
-      'Compressed1.apk': 'certs/compressed1',
-      'Compressed2a.apk': 'certs/compressed2',
-      'Compressed2b.apk': 'certs/compressed2',
-      'Compressed3.apk': 'certs/compressed3',
+      'Compressed1.apk' : 'certs/compressed1',
+      'Compressed2a.apk' : 'certs/compressed2',
+      'Compressed2b.apk' : 'certs/compressed2',
+      'Compressed3.apk' : 'certs/compressed3',
   }
 
   APKCERTS_TXT3 = (
@@ -844,7 +850,7 @@
   )
 
   APKCERTS_CERTMAP3 = {
-      'Compressed4.apk': 'certs/compressed4',
+      'Compressed4.apk' : 'certs/compressed4',
   }
 
   # Test parsing with no optional fields, both optional fields, and only the
@@ -861,9 +867,9 @@
   )
 
   APKCERTS_CERTMAP4 = {
-      'RecoveryLocalizer.apk': 'certs/devkey',
-      'Settings.apk': 'build/make/target/product/security/platform',
-      'TV.apk': 'PRESIGNED',
+      'RecoveryLocalizer.apk' : 'certs/devkey',
+      'Settings.apk' : 'build/make/target/product/security/platform',
+      'TV.apk' : 'PRESIGNED',
   }
 
   def setUp(self):
@@ -967,7 +973,7 @@
     extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
     extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
     with open(extracted_from_privkey, 'rb') as privkey_fp, \
-            open(extracted_from_pubkey, 'rb') as pubkey_fp:
+        open(extracted_from_pubkey, 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
@@ -1231,8 +1237,7 @@
     self.assertEqual(
         '1-5 9-10',
         sparse_image.file_map['//system/file1'].extra['text_str'])
-    self.assertTrue(
-        sparse_image.file_map['//system/file2'].extra['incomplete'])
+    self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
     self.assertTrue(
         sparse_image.file_map['/system/app/file3'].extra['incomplete'])
 
@@ -1340,7 +1345,7 @@
       'recovery_api_version': 3,
       'fstab_version': 2,
       'system_root_image': 'true',
-      'no_recovery': 'true',
+      'no_recovery' : 'true',
       'recovery_as_boot': 'true',
   }
 
@@ -1661,7 +1666,6 @@
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
                       test_file.name, 'generic_kernel')
 
-
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
 
@@ -1671,7 +1675,7 @@
   def setUp(self):
     self._tempdir = common.MakeTempDir()
     # Create a fake dict that contains the fstab info for boot&recovery.
-    self._info = {"fstab": {}}
+    self._info = {"fstab" : {}}
     fake_fstab = [
         "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2018,11 +2022,11 @@
           input_zip, 'odm', placeholder_values)
 
     self.assertEqual({
-        'ro.odm.build.date.utc': '1578430045',
-        'ro.odm.build.fingerprint':
-        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-        'ro.product.odm.device': 'coral',
-        'ro.product.odm.name': 'product1',
+      'ro.odm.build.date.utc': '1578430045',
+      'ro.odm.build.fingerprint':
+      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+      'ro.product.odm.device': 'coral',
+      'ro.product.odm.name': 'product1',
     }, partition_props.build_props)
 
     with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2205,8 +2209,8 @@
 
     copied_props = copy.deepcopy(partition_props)
     self.assertEqual({
-        'ro.odm.build.date.utc': '1578430045',
-        'ro.odm.build.fingerprint':
-        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-        'ro.product.odm.device': 'coral',
+      'ro.odm.build.date.utc': '1578430045',
+      'ro.odm.build.fingerprint':
+      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+      'ro.product.odm.device': 'coral',
     }, copied_props.build_props)