Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | # |
| 3 | # Copyright (C) 2018 The Android Open Source Project |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | # you may not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
| 16 | |
| 17 | from __future__ import print_function |
| 18 | |
Tao Bao | 32fcdab | 2018-10-12 10:30:39 -0700 | [diff] [blame^] | 19 | import logging |
Tao Bao | 7119751 | 2018-10-11 14:08:45 -0700 | [diff] [blame] | 20 | import os.path |
| 21 | import shlex |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 22 | import struct |
| 23 | |
| 24 | import common |
Tao Bao | 7119751 | 2018-10-11 14:08:45 -0700 | [diff] [blame] | 25 | import sparse_img |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 26 | from rangelib import RangeSet |
| 27 | |
Tao Bao | 32fcdab | 2018-10-12 10:30:39 -0700 | [diff] [blame^] | 28 | logger = logging.getLogger(__name__) |
| 29 | |
Tao Bao | 7119751 | 2018-10-11 14:08:45 -0700 | [diff] [blame] | 30 | OPTIONS = common.OPTIONS |
| 31 | BLOCK_SIZE = common.BLOCK_SIZE |
| 32 | FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7" |
| 33 | |
| 34 | |
| 35 | class BuildVerityImageError(Exception): |
| 36 | """An Exception raised during verity image building.""" |
| 37 | |
| 38 | def __init__(self, message): |
| 39 | Exception.__init__(self, message) |
| 40 | |
| 41 | |
| 42 | def GetVerityFECSize(partition_size): |
| 43 | cmd = ["fec", "-s", str(partition_size)] |
| 44 | output = common.RunAndCheckOutput(cmd, verbose=False) |
| 45 | return int(output) |
| 46 | |
| 47 | |
| 48 | def GetVerityTreeSize(partition_size): |
| 49 | cmd = ["build_verity_tree", "-s", str(partition_size)] |
| 50 | output = common.RunAndCheckOutput(cmd, verbose=False) |
| 51 | return int(output) |
| 52 | |
| 53 | |
| 54 | def GetVerityMetadataSize(partition_size): |
| 55 | cmd = ["build_verity_metadata.py", "size", str(partition_size)] |
| 56 | output = common.RunAndCheckOutput(cmd, verbose=False) |
| 57 | return int(output) |
| 58 | |
| 59 | |
| 60 | def GetVeritySize(partition_size, fec_supported): |
| 61 | verity_tree_size = GetVerityTreeSize(partition_size) |
| 62 | verity_metadata_size = GetVerityMetadataSize(partition_size) |
| 63 | verity_size = verity_tree_size + verity_metadata_size |
| 64 | if fec_supported: |
| 65 | fec_size = GetVerityFECSize(partition_size + verity_size) |
| 66 | return verity_size + fec_size |
| 67 | return verity_size |
| 68 | |
| 69 | |
| 70 | def GetSimgSize(image_file): |
| 71 | simg = sparse_img.SparseImage(image_file, build_map=False) |
| 72 | return simg.blocksize * simg.total_blocks |
| 73 | |
| 74 | |
| 75 | def ZeroPadSimg(image_file, pad_size): |
| 76 | blocks = pad_size // BLOCK_SIZE |
Tao Bao | 32fcdab | 2018-10-12 10:30:39 -0700 | [diff] [blame^] | 77 | logger.info("Padding %d blocks (%d bytes)", blocks, pad_size) |
Tao Bao | 7119751 | 2018-10-11 14:08:45 -0700 | [diff] [blame] | 78 | simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False) |
| 79 | simg.AppendFillChunk(0, blocks) |
| 80 | |
| 81 | |
| 82 | def AdjustPartitionSizeForVerity(partition_size, fec_supported): |
| 83 | """Modifies the provided partition size to account for the verity metadata. |
| 84 | |
| 85 | This information is used to size the created image appropriately. |
| 86 | |
| 87 | Args: |
| 88 | partition_size: the size of the partition to be verified. |
| 89 | |
| 90 | Returns: |
| 91 | A tuple of the size of the partition adjusted for verity metadata, and |
| 92 | the size of verity metadata. |
| 93 | """ |
| 94 | key = "%d %d" % (partition_size, fec_supported) |
| 95 | if key in AdjustPartitionSizeForVerity.results: |
| 96 | return AdjustPartitionSizeForVerity.results[key] |
| 97 | |
| 98 | hi = partition_size |
| 99 | if hi % BLOCK_SIZE != 0: |
| 100 | hi = (hi // BLOCK_SIZE) * BLOCK_SIZE |
| 101 | |
| 102 | # verity tree and fec sizes depend on the partition size, which |
| 103 | # means this estimate is always going to be unnecessarily small |
| 104 | verity_size = GetVeritySize(hi, fec_supported) |
| 105 | lo = partition_size - verity_size |
| 106 | result = lo |
| 107 | |
| 108 | # do a binary search for the optimal size |
| 109 | while lo < hi: |
| 110 | i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE |
| 111 | v = GetVeritySize(i, fec_supported) |
| 112 | if i + v <= partition_size: |
| 113 | if result < i: |
| 114 | result = i |
| 115 | verity_size = v |
| 116 | lo = i + BLOCK_SIZE |
| 117 | else: |
| 118 | hi = i |
| 119 | |
Tao Bao | 32fcdab | 2018-10-12 10:30:39 -0700 | [diff] [blame^] | 120 | logger.info( |
| 121 | "Adjusted partition size for verity, partition_size: %s, verity_size: %s", |
| 122 | result, verity_size) |
Tao Bao | 7119751 | 2018-10-11 14:08:45 -0700 | [diff] [blame] | 123 | AdjustPartitionSizeForVerity.results[key] = (result, verity_size) |
| 124 | return (result, verity_size) |
| 125 | |
| 126 | |
| 127 | AdjustPartitionSizeForVerity.results = {} |
| 128 | |
| 129 | |
| 130 | def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path, |
| 131 | padding_size): |
| 132 | cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path, |
| 133 | verity_path, verity_fec_path] |
| 134 | common.RunAndCheckOutput(cmd) |
| 135 | |
| 136 | |
| 137 | def BuildVerityTree(sparse_image_path, verity_image_path): |
| 138 | cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path, |
| 139 | verity_image_path] |
| 140 | output = common.RunAndCheckOutput(cmd) |
| 141 | root, salt = output.split() |
| 142 | return root, salt |
| 143 | |
| 144 | |
| 145 | def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt, |
| 146 | block_device, signer_path, key, signer_args, |
| 147 | verity_disable): |
| 148 | cmd = ["build_verity_metadata.py", "build", str(image_size), |
| 149 | verity_metadata_path, root_hash, salt, block_device, signer_path, key] |
| 150 | if signer_args: |
| 151 | cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),)) |
| 152 | if verity_disable: |
| 153 | cmd.append("--verity_disable") |
| 154 | common.RunAndCheckOutput(cmd) |
| 155 | |
| 156 | |
| 157 | def Append2Simg(sparse_image_path, unsparse_image_path, error_message): |
| 158 | """Appends the unsparse image to the given sparse image. |
| 159 | |
| 160 | Args: |
| 161 | sparse_image_path: the path to the (sparse) image |
| 162 | unsparse_image_path: the path to the (unsparse) image |
| 163 | |
| 164 | Raises: |
| 165 | BuildVerityImageError: On error. |
| 166 | """ |
| 167 | cmd = ["append2simg", sparse_image_path, unsparse_image_path] |
| 168 | try: |
| 169 | common.RunAndCheckOutput(cmd) |
| 170 | except: |
| 171 | raise BuildVerityImageError(error_message) |
| 172 | |
| 173 | |
| 174 | def Append(target, file_to_append, error_message): |
| 175 | """Appends file_to_append to target. |
| 176 | |
| 177 | Raises: |
| 178 | BuildVerityImageError: On error. |
| 179 | """ |
| 180 | try: |
| 181 | with open(target, "a") as out_file, open(file_to_append, "r") as input_file: |
| 182 | for line in input_file: |
| 183 | out_file.write(line) |
| 184 | except IOError: |
| 185 | raise BuildVerityImageError(error_message) |
| 186 | |
| 187 | |
| 188 | def BuildVerifiedImage(data_image_path, verity_image_path, |
| 189 | verity_metadata_path, verity_fec_path, |
| 190 | padding_size, fec_supported): |
| 191 | Append( |
| 192 | verity_image_path, verity_metadata_path, |
| 193 | "Could not append verity metadata!") |
| 194 | |
| 195 | if fec_supported: |
| 196 | # Build FEC for the entire partition, including metadata. |
| 197 | BuildVerityFEC( |
| 198 | data_image_path, verity_image_path, verity_fec_path, padding_size) |
| 199 | Append(verity_image_path, verity_fec_path, "Could not append FEC!") |
| 200 | |
| 201 | Append2Simg( |
| 202 | data_image_path, verity_image_path, "Could not append verity data!") |
| 203 | |
| 204 | |
| 205 | def MakeVerityEnabledImage(out_file, fec_supported, prop_dict): |
| 206 | """Creates an image that is verifiable using dm-verity. |
| 207 | |
| 208 | Args: |
| 209 | out_file: the location to write the verifiable image at |
| 210 | prop_dict: a dictionary of properties required for image creation and |
| 211 | verification |
| 212 | |
| 213 | Raises: |
| 214 | AssertionError: On invalid partition sizes. |
| 215 | """ |
| 216 | # get properties |
| 217 | image_size = int(prop_dict["image_size"]) |
| 218 | block_dev = prop_dict["verity_block_device"] |
| 219 | signer_key = prop_dict["verity_key"] + ".pk8" |
| 220 | if OPTIONS.verity_signer_path is not None: |
| 221 | signer_path = OPTIONS.verity_signer_path |
| 222 | else: |
| 223 | signer_path = prop_dict["verity_signer_cmd"] |
| 224 | signer_args = OPTIONS.verity_signer_args |
| 225 | |
| 226 | tempdir_name = common.MakeTempDir(suffix="_verity_images") |
| 227 | |
| 228 | # Get partial image paths. |
| 229 | verity_image_path = os.path.join(tempdir_name, "verity.img") |
| 230 | verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img") |
| 231 | verity_fec_path = os.path.join(tempdir_name, "verity_fec.img") |
| 232 | |
| 233 | # Build the verity tree and get the root hash and salt. |
| 234 | root_hash, salt = BuildVerityTree(out_file, verity_image_path) |
| 235 | |
| 236 | # Build the metadata blocks. |
| 237 | verity_disable = "verity_disable" in prop_dict |
| 238 | BuildVerityMetadata( |
| 239 | image_size, verity_metadata_path, root_hash, salt, block_dev, signer_path, |
| 240 | signer_key, signer_args, verity_disable) |
| 241 | |
| 242 | # Build the full verified image. |
| 243 | partition_size = int(prop_dict["partition_size"]) |
| 244 | verity_size = int(prop_dict["verity_size"]) |
| 245 | |
| 246 | padding_size = partition_size - image_size - verity_size |
| 247 | assert padding_size >= 0 |
| 248 | |
| 249 | BuildVerifiedImage( |
| 250 | out_file, verity_image_path, verity_metadata_path, verity_fec_path, |
| 251 | padding_size, fec_supported) |
| 252 | |
| 253 | |
| 254 | def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args): |
| 255 | """Calculates max image size for a given partition size. |
| 256 | |
| 257 | Args: |
| 258 | avbtool: String with path to avbtool. |
| 259 | footer_type: 'hash' or 'hashtree' for generating footer. |
| 260 | partition_size: The size of the partition in question. |
| 261 | additional_args: Additional arguments to pass to "avbtool add_hash_footer" |
| 262 | or "avbtool add_hashtree_footer". |
| 263 | |
| 264 | Returns: |
| 265 | The maximum image size. |
| 266 | |
| 267 | Raises: |
| 268 | BuildVerityImageError: On invalid image size. |
| 269 | """ |
| 270 | cmd = [avbtool, "add_%s_footer" % footer_type, |
| 271 | "--partition_size", str(partition_size), "--calc_max_image_size"] |
| 272 | cmd.extend(shlex.split(additional_args)) |
| 273 | |
| 274 | output = common.RunAndCheckOutput(cmd) |
| 275 | image_size = int(output) |
| 276 | if image_size <= 0: |
| 277 | raise BuildVerityImageError( |
| 278 | "Invalid max image size: {}".format(output)) |
| 279 | return image_size |
| 280 | |
| 281 | |
| 282 | def AVBCalcMinPartitionSize(image_size, size_calculator): |
| 283 | """Calculates min partition size for a given image size. |
| 284 | |
| 285 | Args: |
| 286 | image_size: The size of the image in question. |
| 287 | size_calculator: The function to calculate max image size |
| 288 | for a given partition size. |
| 289 | |
| 290 | Returns: |
| 291 | The minimum partition size required to accommodate the image size. |
| 292 | """ |
| 293 | # Use image size as partition size to approximate final partition size. |
| 294 | image_ratio = size_calculator(image_size) / float(image_size) |
| 295 | |
| 296 | # Prepare a binary search for the optimal partition size. |
| 297 | lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - BLOCK_SIZE |
| 298 | |
| 299 | # Ensure lo is small enough: max_image_size should <= image_size. |
| 300 | delta = BLOCK_SIZE |
| 301 | max_image_size = size_calculator(lo) |
| 302 | while max_image_size > image_size: |
| 303 | image_ratio = max_image_size / float(lo) |
| 304 | lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - delta |
| 305 | delta *= 2 |
| 306 | max_image_size = size_calculator(lo) |
| 307 | |
| 308 | hi = lo + BLOCK_SIZE |
| 309 | |
| 310 | # Ensure hi is large enough: max_image_size should >= image_size. |
| 311 | delta = BLOCK_SIZE |
| 312 | max_image_size = size_calculator(hi) |
| 313 | while max_image_size < image_size: |
| 314 | image_ratio = max_image_size / float(hi) |
| 315 | hi = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE + delta |
| 316 | delta *= 2 |
| 317 | max_image_size = size_calculator(hi) |
| 318 | |
| 319 | partition_size = hi |
| 320 | |
| 321 | # Start to binary search. |
| 322 | while lo < hi: |
| 323 | mid = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE |
| 324 | max_image_size = size_calculator(mid) |
| 325 | if max_image_size >= image_size: # if mid can accommodate image_size |
| 326 | if mid < partition_size: # if a smaller partition size is found |
| 327 | partition_size = mid |
| 328 | hi = mid |
| 329 | else: |
| 330 | lo = mid + BLOCK_SIZE |
| 331 | |
Tao Bao | 32fcdab | 2018-10-12 10:30:39 -0700 | [diff] [blame^] | 332 | logger.info( |
| 333 | "AVBCalcMinPartitionSize(%d): partition_size: %d.", |
| 334 | image_size, partition_size) |
Tao Bao | 7119751 | 2018-10-11 14:08:45 -0700 | [diff] [blame] | 335 | |
| 336 | return partition_size |
| 337 | |
| 338 | |
| 339 | def AVBAddFooter(image_path, avbtool, footer_type, partition_size, |
| 340 | partition_name, key_path, algorithm, salt, |
| 341 | additional_args): |
| 342 | """Adds dm-verity hashtree and AVB metadata to an image. |
| 343 | |
| 344 | Args: |
| 345 | image_path: Path to image to modify. |
| 346 | avbtool: String with path to avbtool. |
| 347 | footer_type: 'hash' or 'hashtree' for generating footer. |
| 348 | partition_size: The size of the partition in question. |
| 349 | partition_name: The name of the partition - will be embedded in metadata. |
| 350 | key_path: Path to key to use or None. |
| 351 | algorithm: Name of algorithm to use or None. |
| 352 | salt: The salt to use (a hexadecimal string) or None. |
| 353 | additional_args: Additional arguments to pass to "avbtool add_hash_footer" |
| 354 | or "avbtool add_hashtree_footer". |
| 355 | """ |
| 356 | cmd = [avbtool, "add_%s_footer" % footer_type, |
| 357 | "--partition_size", partition_size, |
| 358 | "--partition_name", partition_name, |
| 359 | "--image", image_path] |
| 360 | |
| 361 | if key_path and algorithm: |
| 362 | cmd.extend(["--key", key_path, "--algorithm", algorithm]) |
| 363 | if salt: |
| 364 | cmd.extend(["--salt", salt]) |
| 365 | |
| 366 | cmd.extend(shlex.split(additional_args)) |
| 367 | |
| 368 | common.RunAndCheckOutput(cmd) |
| 369 | |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 370 | |
| 371 | class HashtreeInfoGenerationError(Exception): |
| 372 | """An Exception raised during hashtree info generation.""" |
| 373 | |
| 374 | def __init__(self, message): |
| 375 | Exception.__init__(self, message) |
| 376 | |
| 377 | |
| 378 | class HashtreeInfo(object): |
| 379 | def __init__(self): |
| 380 | self.hashtree_range = None |
| 381 | self.filesystem_range = None |
| 382 | self.hash_algorithm = None |
| 383 | self.salt = None |
| 384 | self.root_hash = None |
| 385 | |
| 386 | |
| 387 | def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict): |
| 388 | generator = None |
| 389 | if (info_dict.get("verity") == "true" and |
| 390 | info_dict.get("{}_verity_block_device".format(partition_name))): |
| 391 | partition_size = info_dict["{}_size".format(partition_name)] |
| 392 | fec_supported = info_dict.get("verity_fec") == "true" |
| 393 | generator = VerifiedBootVersion1HashtreeInfoGenerator( |
| 394 | partition_size, block_size, fec_supported) |
| 395 | |
| 396 | return generator |
| 397 | |
| 398 | |
| 399 | class HashtreeInfoGenerator(object): |
| 400 | def Generate(self, image): |
| 401 | raise NotImplementedError |
| 402 | |
| 403 | def DecomposeSparseImage(self, image): |
| 404 | raise NotImplementedError |
| 405 | |
| 406 | def ValidateHashtree(self): |
| 407 | raise NotImplementedError |
| 408 | |
| 409 | |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 410 | class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator): |
| 411 | """A class that parses the metadata of hashtree for a given partition.""" |
| 412 | |
| 413 | def __init__(self, partition_size, block_size, fec_supported): |
| 414 | """Initialize VerityTreeInfo with the sparse image and input property. |
| 415 | |
| 416 | Arguments: |
| 417 | partition_size: The whole size in bytes of a partition, including the |
| 418 | filesystem size, padding size, and verity size. |
| 419 | block_size: Expected size in bytes of each block for the sparse image. |
| 420 | fec_supported: True if the verity section contains fec data. |
| 421 | """ |
| 422 | |
| 423 | self.block_size = block_size |
| 424 | self.partition_size = partition_size |
| 425 | self.fec_supported = fec_supported |
| 426 | |
| 427 | self.image = None |
| 428 | self.filesystem_size = None |
| 429 | self.hashtree_size = None |
| 430 | self.metadata_size = None |
| 431 | |
| 432 | self.hashtree_info = HashtreeInfo() |
| 433 | |
| 434 | def DecomposeSparseImage(self, image): |
| 435 | """Calculate the verity size based on the size of the input image. |
| 436 | |
| 437 | Since we already know the structure of a verity enabled image to be: |
| 438 | [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then |
| 439 | calculate the size and offset of each section. |
| 440 | """ |
| 441 | |
| 442 | self.image = image |
| 443 | assert self.block_size == image.blocksize |
| 444 | assert self.partition_size == image.total_blocks * self.block_size, \ |
| 445 | "partition size {} doesn't match with the calculated image size." \ |
| 446 | " total_blocks: {}".format(self.partition_size, image.total_blocks) |
| 447 | |
| 448 | adjusted_size, _ = AdjustPartitionSizeForVerity( |
| 449 | self.partition_size, self.fec_supported) |
| 450 | assert adjusted_size % self.block_size == 0 |
| 451 | |
| 452 | verity_tree_size = GetVerityTreeSize(adjusted_size) |
| 453 | assert verity_tree_size % self.block_size == 0 |
| 454 | |
| 455 | metadata_size = GetVerityMetadataSize(adjusted_size) |
| 456 | assert metadata_size % self.block_size == 0 |
| 457 | |
| 458 | self.filesystem_size = adjusted_size |
| 459 | self.hashtree_size = verity_tree_size |
| 460 | self.metadata_size = metadata_size |
| 461 | |
| 462 | self.hashtree_info.filesystem_range = RangeSet( |
| 463 | data=[0, adjusted_size / self.block_size]) |
| 464 | self.hashtree_info.hashtree_range = RangeSet( |
| 465 | data=[adjusted_size / self.block_size, |
| 466 | (adjusted_size + verity_tree_size) / self.block_size]) |
| 467 | |
| 468 | def _ParseHashtreeMetadata(self): |
| 469 | """Parses the hash_algorithm, root_hash, salt from the metadata block.""" |
| 470 | |
| 471 | metadata_start = self.filesystem_size + self.hashtree_size |
| 472 | metadata_range = RangeSet( |
| 473 | data=[metadata_start / self.block_size, |
| 474 | (metadata_start + self.metadata_size) / self.block_size]) |
| 475 | meta_data = ''.join(self.image.ReadRangeSet(metadata_range)) |
| 476 | |
| 477 | # More info about the metadata structure available in: |
| 478 | # system/extras/verity/build_verity_metadata.py |
| 479 | META_HEADER_SIZE = 268 |
| 480 | header_bin = meta_data[0:META_HEADER_SIZE] |
| 481 | header = struct.unpack("II256sI", header_bin) |
| 482 | |
| 483 | # header: magic_number, version, signature, table_len |
| 484 | assert header[0] == 0xb001b001, header[0] |
| 485 | table_len = header[3] |
| 486 | verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len] |
| 487 | table_entries = verity_table.rstrip().split() |
| 488 | |
| 489 | # Expected verity table format: "1 block_device block_device block_size |
| 490 | # block_size data_blocks data_blocks hash_algorithm root_hash salt" |
| 491 | assert len(table_entries) == 10, "Unexpected verity table size {}".format( |
| 492 | len(table_entries)) |
| 493 | assert (int(table_entries[3]) == self.block_size and |
| 494 | int(table_entries[4]) == self.block_size) |
| 495 | assert (int(table_entries[5]) * self.block_size == self.filesystem_size and |
| 496 | int(table_entries[6]) * self.block_size == self.filesystem_size) |
| 497 | |
| 498 | self.hashtree_info.hash_algorithm = table_entries[7] |
| 499 | self.hashtree_info.root_hash = table_entries[8] |
| 500 | self.hashtree_info.salt = table_entries[9] |
| 501 | |
| 502 | def ValidateHashtree(self): |
| 503 | """Checks that we can reconstruct the verity hash tree.""" |
| 504 | |
| 505 | # Writes the file system section to a temp file; and calls the executable |
| 506 | # build_verity_tree to construct the hash tree. |
| 507 | adjusted_partition = common.MakeTempFile(prefix="adjusted_partition") |
| 508 | with open(adjusted_partition, "wb") as fd: |
| 509 | self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd) |
| 510 | |
| 511 | generated_verity_tree = common.MakeTempFile(prefix="verity") |
Tao Bao | 2f05746 | 2018-10-03 16:31:18 -0700 | [diff] [blame] | 512 | root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree) |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 513 | |
Tao Bao | 2f05746 | 2018-10-03 16:31:18 -0700 | [diff] [blame] | 514 | # The salt should be always identical, as we use fixed value. |
| 515 | assert salt == self.hashtree_info.salt, \ |
| 516 | "Calculated salt {} doesn't match the one in metadata {}".format( |
| 517 | salt, self.hashtree_info.salt) |
| 518 | |
| 519 | if root_hash != self.hashtree_info.root_hash: |
Tao Bao | 32fcdab | 2018-10-12 10:30:39 -0700 | [diff] [blame^] | 520 | logger.warning( |
| 521 | "Calculated root hash %s doesn't match the one in metadata %s", |
| 522 | root_hash, self.hashtree_info.root_hash) |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 523 | return False |
| 524 | |
| 525 | # Reads the generated hash tree and checks if it has the exact same bytes |
| 526 | # as the one in the sparse image. |
| 527 | with open(generated_verity_tree, "rb") as fd: |
| 528 | return fd.read() == ''.join(self.image.ReadRangeSet( |
| 529 | self.hashtree_info.hashtree_range)) |
| 530 | |
| 531 | def Generate(self, image): |
| 532 | """Parses and validates the hashtree info in a sparse image. |
| 533 | |
| 534 | Returns: |
| 535 | hashtree_info: The information needed to reconstruct the hashtree. |
Tao Bao | 2f05746 | 2018-10-03 16:31:18 -0700 | [diff] [blame] | 536 | |
Tianjie Xu | 67c7cbb | 2018-08-30 00:32:07 -0700 | [diff] [blame] | 537 | Raises: |
| 538 | HashtreeInfoGenerationError: If we fail to generate the exact bytes of |
| 539 | the hashtree. |
| 540 | """ |
| 541 | |
| 542 | self.DecomposeSparseImage(image) |
| 543 | self._ParseHashtreeMetadata() |
| 544 | |
| 545 | if not self.ValidateHashtree(): |
| 546 | raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree") |
| 547 | |
| 548 | return self.hashtree_info |