Clean up unused code for VB in verity_utils.py

We are removeing support of VB from release tools. This change aims to
remove code that are unreachable after removeing Version1ImageBuilder.

Bug: 242672222
Test: atest under build/make
Change-Id: Iaae39e4c49045f6c34351140cd43087dabd63e9c
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index 32f7cce..ada70e6 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -28,149 +28,11 @@
     get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable)
 from verity_utils import (
     CalculateVbmetaDigest, CreateHashtreeInfoGenerator,
-    CreateVerityImageBuilder, HashtreeInfo,
-    VerifiedBootVersion1HashtreeInfoGenerator)
+    CreateVerityImageBuilder, HashtreeInfo)
 
 BLOCK_SIZE = common.BLOCK_SIZE
 
 
-class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase):
-
-  def setUp(self):
-    self.testdata_dir = get_testdata_dir()
-
-    self.partition_size = 1024 * 1024
-    self.prop_dict = {
-        'verity': 'true',
-        'verity_fec': 'true',
-        'system_verity_block_device': '/dev/block/system',
-        'system_size': self.partition_size
-    }
-
-    self.hash_algorithm = "sha256"
-    self.fixed_salt = (
-        "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7")
-    self.expected_root_hash = (
-        "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d")
-
-  def _CreateSimg(self, raw_data):  # pylint: disable=no-self-use
-    output_file = common.MakeTempFile()
-    raw_image = common.MakeTempFile()
-    with open(raw_image, 'wb') as f:
-      f.write(raw_data)
-
-    cmd = ["img2simg", raw_image, output_file, '4096']
-    common.RunAndCheckOutput(cmd)
-    return output_file
-
-  def _GenerateImage(self):
-    partition_size = 1024 * 1024
-    prop_dict = {
-        'partition_size': str(partition_size),
-        'verity': 'true',
-        'verity_block_device': '/dev/block/system',
-        'verity_key': os.path.join(self.testdata_dir, 'testkey'),
-        'verity_fec': 'true',
-        'verity_signer_cmd': 'verity_signer',
-    }
-    verity_image_builder = CreateVerityImageBuilder(prop_dict)
-    self.assertIsNotNone(verity_image_builder)
-    adjusted_size = verity_image_builder.CalculateMaxImageSize()
-
-    raw_image = bytearray(adjusted_size)
-    for i in range(adjusted_size):
-      raw_image[i] = ord('0') + i % 10
-
-    output_file = self._CreateSimg(raw_image)
-
-    # Append the verity metadata.
-    verity_image_builder.Build(output_file)
-
-    return output_file
-
-  @SkipIfExternalToolsUnavailable()
-  def test_CreateHashtreeInfoGenerator(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-
-    generator = CreateHashtreeInfoGenerator(
-        'system', image_file, self.prop_dict)
-    self.assertEqual(
-        VerifiedBootVersion1HashtreeInfoGenerator, type(generator))
-    self.assertEqual(self.partition_size, generator.partition_size)
-    self.assertTrue(generator.fec_supported)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_DecomposeSparseImage(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.DecomposeSparseImage(image_file)
-    self.assertEqual(991232, generator.filesystem_size)
-    self.assertEqual(12288, generator.hashtree_size)
-    self.assertEqual(32768, generator.metadata_size)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_ParseHashtreeMetadata(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.DecomposeSparseImage(image_file)
-
-    # pylint: disable=protected-access
-    generator._ParseHashtreeMetadata()
-
-    self.assertEqual(
-        self.hash_algorithm, generator.hashtree_info.hash_algorithm)
-    self.assertEqual(self.fixed_salt, generator.hashtree_info.salt)
-    self.assertEqual(self.expected_root_hash, generator.hashtree_info.root_hash)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_ValidateHashtree_smoke(self):
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._GenerateImage())
-
-    generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
-    info.hashtree_range = RangeSet(
-        data=[991232 // 4096, (991232 + 12288) // 4096])
-    info.hash_algorithm = self.hash_algorithm
-    info.salt = self.fixed_salt
-    info.root_hash = self.expected_root_hash
-
-    self.assertTrue(generator.ValidateHashtree())
-
-  @SkipIfExternalToolsUnavailable()
-  def test_ValidateHashtree_failure(self):
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._GenerateImage())
-
-    generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
-    info.hashtree_range = RangeSet(
-        data=[991232 // 4096, (991232 + 12288) // 4096])
-    info.hash_algorithm = self.hash_algorithm
-    info.salt = self.fixed_salt
-    info.root_hash = "a" + self.expected_root_hash[1:]
-
-    self.assertFalse(generator.ValidateHashtree())
-
-  @SkipIfExternalToolsUnavailable()
-  def test_Generate(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-    generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
-    info = generator.Generate(image_file)
-
-    self.assertEqual(RangeSet(data=[0, 991232 // 4096]), info.filesystem_range)
-    self.assertEqual(RangeSet(data=[991232 // 4096, (991232 + 12288) // 4096]),
-                     info.hashtree_range)
-    self.assertEqual(self.hash_algorithm, info.hash_algorithm)
-    self.assertEqual(self.fixed_salt, info.salt)
-    self.assertEqual(self.expected_root_hash, info.root_hash)
-
-
 class VerifiedBootVersion2VerityImageBuilderTest(ReleaseToolsTestCase):
 
   DEFAULT_PROP_DICT = {
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index efb3008..ee197e0 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -49,107 +49,6 @@
     Exception.__init__(self, message)
 
 
-def GetVerityFECSize(image_size):
-  cmd = ["fec", "-s", str(image_size)]
-  output = common.RunAndCheckOutput(cmd, verbose=False)
-  return int(output)
-
-
-def GetVerityTreeSize(image_size):
-  cmd = ["build_verity_tree", "-s", str(image_size)]
-  output = common.RunAndCheckOutput(cmd, verbose=False)
-  return int(output)
-
-
-def GetVerityMetadataSize(image_size):
-  cmd = ["build_verity_metadata", "size", str(image_size)]
-  output = common.RunAndCheckOutput(cmd, verbose=False)
-  return int(output)
-
-
-def GetVeritySize(image_size, fec_supported):
-  verity_tree_size = GetVerityTreeSize(image_size)
-  verity_metadata_size = GetVerityMetadataSize(image_size)
-  verity_size = verity_tree_size + verity_metadata_size
-  if fec_supported:
-    fec_size = GetVerityFECSize(image_size + verity_size)
-    return verity_size + fec_size
-  return verity_size
-
-
-def GetSimgSize(image_file):
-  simg = sparse_img.SparseImage(image_file, build_map=False)
-  return simg.blocksize * simg.total_blocks
-
-
-def ZeroPadSimg(image_file, pad_size):
-  blocks = pad_size // BLOCK_SIZE
-  logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
-  simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
-  simg.AppendFillChunk(0, blocks)
-
-
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
-                   padding_size):
-  cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
-         verity_path, verity_fec_path]
-  common.RunAndCheckOutput(cmd)
-
-
-def BuildVerityTree(sparse_image_path, verity_image_path):
-  cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
-         verity_image_path]
-  output = common.RunAndCheckOutput(cmd)
-  root, salt = output.split()
-  return root, salt
-
-
-def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
-                        block_device, signer_path, key, signer_args,
-                        verity_disable):
-  cmd = ["build_verity_metadata", "build", str(image_size),
-         verity_metadata_path, root_hash, salt, block_device, signer_path, key]
-  if signer_args:
-    cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
-  if verity_disable:
-    cmd.append("--verity_disable")
-  common.RunAndCheckOutput(cmd)
-
-
-def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
-  """Appends the unsparse image to the given sparse image.
-
-  Args:
-    sparse_image_path: the path to the (sparse) image
-    unsparse_image_path: the path to the (unsparse) image
-
-  Raises:
-    BuildVerityImageError: On error.
-  """
-  cmd = ["append2simg", sparse_image_path, unsparse_image_path]
-  try:
-    common.RunAndCheckOutput(cmd)
-  except:
-    logger.exception(error_message)
-    raise BuildVerityImageError(error_message)
-
-
-def Append(target, file_to_append, error_message):
-  """Appends file_to_append to target.
-
-  Raises:
-    BuildVerityImageError: On error.
-  """
-  try:
-    with open(target, 'ab') as out_file, \
-        open(file_to_append, 'rb') as input_file:
-      for line in input_file:
-        out_file.write(line)
-  except IOError:
-    logger.exception(error_message)
-    raise BuildVerityImageError(error_message)
-
-
 def CreateVerityImageBuilder(prop_dict):
   """Returns a verity image builder based on the given build properties.
 
@@ -414,160 +313,6 @@
     raise NotImplementedError
 
 
-class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
-  """A class that parses the metadata of hashtree for a given partition."""
-
-  def __init__(self, partition_size, block_size, fec_supported):
-    """Initialize VerityTreeInfo with the sparse image and input property.
-
-    Arguments:
-      partition_size: The whole size in bytes of a partition, including the
-          filesystem size, padding size, and verity size.
-      block_size: Expected size in bytes of each block for the sparse image.
-      fec_supported: True if the verity section contains fec data.
-    """
-
-    self.block_size = block_size
-    self.partition_size = partition_size
-    self.fec_supported = fec_supported
-
-    self.image = None
-    self.filesystem_size = None
-    self.hashtree_size = None
-    self.metadata_size = None
-
-    prop_dict = {
-        'partition_size': str(partition_size),
-        'verity': 'true',
-        'verity_fec': 'true' if fec_supported else None,
-        # 'verity_block_device' needs to be present to indicate a verity-enabled
-        # partition.
-        'verity_block_device': '',
-        # We don't need the following properties that are needed for signing the
-        # verity metadata.
-        'verity_key': '',
-        'verity_signer_cmd': None,
-    }
-    self.verity_image_builder = CreateVerityImageBuilder(prop_dict)
-
-    self.hashtree_info = HashtreeInfo()
-
-  def DecomposeSparseImage(self, image):
-    """Calculate the verity size based on the size of the input image.
-
-    Since we already know the structure of a verity enabled image to be:
-    [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
-    calculate the size and offset of each section.
-    """
-
-    self.image = image
-    assert self.block_size == image.blocksize
-    assert self.partition_size == image.total_blocks * self.block_size, \
-        "partition size {} doesn't match with the calculated image size." \
-        " total_blocks: {}".format(self.partition_size, image.total_blocks)
-
-    adjusted_size = self.verity_image_builder.CalculateMaxImageSize()
-    assert adjusted_size % self.block_size == 0
-
-    verity_tree_size = GetVerityTreeSize(adjusted_size)
-    assert verity_tree_size % self.block_size == 0
-
-    metadata_size = GetVerityMetadataSize(adjusted_size)
-    assert metadata_size % self.block_size == 0
-
-    self.filesystem_size = adjusted_size
-    self.hashtree_size = verity_tree_size
-    self.metadata_size = metadata_size
-
-    self.hashtree_info.filesystem_range = RangeSet(
-        data=[0, adjusted_size // self.block_size])
-    self.hashtree_info.hashtree_range = RangeSet(
-        data=[adjusted_size // self.block_size,
-              (adjusted_size + verity_tree_size) // self.block_size])
-
-  def _ParseHashtreeMetadata(self):
-    """Parses the hash_algorithm, root_hash, salt from the metadata block."""
-
-    metadata_start = self.filesystem_size + self.hashtree_size
-    metadata_range = RangeSet(
-        data=[metadata_start // self.block_size,
-              (metadata_start + self.metadata_size) // self.block_size])
-    meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
-
-    # More info about the metadata structure available in:
-    # system/extras/verity/build_verity_metadata.py
-    META_HEADER_SIZE = 268
-    header_bin = meta_data[0:META_HEADER_SIZE]
-    header = struct.unpack("II256sI", header_bin)
-
-    # header: magic_number, version, signature, table_len
-    assert header[0] == 0xb001b001, header[0]
-    table_len = header[3]
-    verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
-    table_entries = verity_table.rstrip().split()
-
-    # Expected verity table format: "1 block_device block_device block_size
-    # block_size data_blocks data_blocks hash_algorithm root_hash salt"
-    assert len(table_entries) == 10, "Unexpected verity table size {}".format(
-        len(table_entries))
-    assert (int(table_entries[3]) == self.block_size and
-            int(table_entries[4]) == self.block_size)
-    assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
-            int(table_entries[6]) * self.block_size == self.filesystem_size)
-
-    self.hashtree_info.hash_algorithm = table_entries[7].decode()
-    self.hashtree_info.root_hash = table_entries[8].decode()
-    self.hashtree_info.salt = table_entries[9].decode()
-
-  def ValidateHashtree(self):
-    """Checks that we can reconstruct the verity hash tree."""
-
-    # Writes the filesystem section to a temp file; and calls the executable
-    # build_verity_tree to construct the hash tree.
-    adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
-    with open(adjusted_partition, "wb") as fd:
-      self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
-
-    generated_verity_tree = common.MakeTempFile(prefix="verity")
-    root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
-
-    # The salt should be always identical, as we use fixed value.
-    assert salt == self.hashtree_info.salt, \
-        "Calculated salt {} doesn't match the one in metadata {}".format(
-            salt, self.hashtree_info.salt)
-
-    if root_hash != self.hashtree_info.root_hash:
-      logger.warning(
-          "Calculated root hash %s doesn't match the one in metadata %s",
-          root_hash, self.hashtree_info.root_hash)
-      return False
-
-    # Reads the generated hash tree and checks if it has the exact same bytes
-    # as the one in the sparse image.
-    with open(generated_verity_tree, 'rb') as fd:
-      return fd.read() == b''.join(self.image.ReadRangeSet(
-          self.hashtree_info.hashtree_range))
-
-  def Generate(self, image):
-    """Parses and validates the hashtree info in a sparse image.
-
-    Returns:
-      hashtree_info: The information needed to reconstruct the hashtree.
-
-    Raises:
-      HashtreeInfoGenerationError: If we fail to generate the exact bytes of
-          the hashtree.
-    """
-
-    self.DecomposeSparseImage(image)
-    self._ParseHashtreeMetadata()
-
-    if not self.ValidateHashtree():
-      raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
-
-    return self.hashtree_info
-
-
 def CreateCustomImageBuilder(info_dict, partition_name, partition_size,
                             key_path, algorithm, signing_args):
   builder = None