blob: b85e2b8336686a4b0d8bda7c767a18aced0ce241 [file] [log] [blame]
Gilad Arnold553b0ec2013-01-26 01:00:39 -08001# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""Verifying the integrity of a Chrome OS update payload.
6
7This module is used internally by the main Payload class for verifying the
8integrity of an update payload. The interface for invoking the checks is as
9follows:
10
11 checker = PayloadChecker(payload)
12 checker.Run(...)
13
14"""
15
16import array
17import base64
18import hashlib
19import subprocess
20
21import common
22from error import PayloadError
23import format_utils
24import histogram
25import update_metadata_pb2
26
27
28#
29# Constants / helper functions.
30#
Gilad Arnold553b0ec2013-01-26 01:00:39 -080031_TYPE_FULL = 'full'
32_TYPE_DELTA = 'delta'
33
34_DEFAULT_BLOCK_SIZE = 4096
35
36
37#
38# Helper functions.
39#
40def _IsPowerOfTwo(val):
41 """Returns True iff val is a power of two."""
42 return val > 0 and (val & (val - 1)) == 0
43
44
45def _AddFormat(format_func, value):
46 """Adds a custom formatted representation to ordinary string representation.
47
48 Args:
49 format_func: a value formatter
50 value: value to be formatted and returned
51 Returns:
52 A string 'x (y)' where x = str(value) and y = format_func(value).
53
54 """
55 return '%s (%s)' % (value, format_func(value))
56
57
58def _AddHumanReadableSize(size):
59 """Adds a human readable representation to a byte size value."""
60 return _AddFormat(format_utils.BytesToHumanReadable, size)
61
62
63#
64# Payload report generator.
65#
66class _PayloadReport(object):
67 """A payload report generator.
68
69 A report is essentially a sequence of nodes, which represent data points. It
70 is initialized to have a "global", untitled section. A node may be a
71 sub-report itself.
72
73 """
74
75 # Report nodes: field, sub-report, section.
76 class Node(object):
77 """A report node interface."""
78
79 @staticmethod
80 def _Indent(indent, line):
81 """Indents a line by a given indentation amount.
82
83 Args:
84 indent: the indentation amount
85 line: the line content (string)
86 Returns:
87 The properly indented line (string).
88
89 """
90 return '%*s%s' % (indent, '', line)
91
92 def GenerateLines(self, base_indent, sub_indent, curr_section):
93 """Generates the report lines for this node.
94
95 Args:
96 base_indent: base indentation for each line
97 sub_indent: additional indentation for sub-nodes
98 curr_section: the current report section object
99 Returns:
100 A pair consisting of a list of properly indented report lines and a new
101 current section object.
102
103 """
104 raise NotImplementedError()
105
106 class FieldNode(Node):
107 """A field report node, representing a (name, value) pair."""
108
109 def __init__(self, name, value, linebreak, indent):
110 super(_PayloadReport.FieldNode, self).__init__()
111 self.name = name
112 self.value = value
113 self.linebreak = linebreak
114 self.indent = indent
115
116 def GenerateLines(self, base_indent, sub_indent, curr_section):
117 """Generates a properly formatted 'name : value' entry."""
118 report_output = ''
119 if self.name:
120 report_output += self.name.ljust(curr_section.max_field_name_len) + ' :'
121 value_lines = str(self.value).splitlines()
122 if self.linebreak and self.name:
123 report_output += '\n' + '\n'.join(
124 ['%*s%s' % (self.indent, '', line) for line in value_lines])
125 else:
126 if self.name:
127 report_output += ' '
128 report_output += '%*s' % (self.indent, '')
129 cont_line_indent = len(report_output)
130 indented_value_lines = [value_lines[0]]
131 indented_value_lines.extend(['%*s%s' % (cont_line_indent, '', line)
132 for line in value_lines[1:]])
133 report_output += '\n'.join(indented_value_lines)
134
135 report_lines = [self._Indent(base_indent, line + '\n')
136 for line in report_output.split('\n')]
137 return report_lines, curr_section
138
139 class SubReportNode(Node):
140 """A sub-report node, representing a nested report."""
141
142 def __init__(self, title, report):
143 super(_PayloadReport.SubReportNode, self).__init__()
144 self.title = title
145 self.report = report
146
147 def GenerateLines(self, base_indent, sub_indent, curr_section):
148 """Recurse with indentation."""
149 report_lines = [self._Indent(base_indent, self.title + ' =>\n')]
150 report_lines.extend(self.report.GenerateLines(base_indent + sub_indent,
151 sub_indent))
152 return report_lines, curr_section
153
154 class SectionNode(Node):
155 """A section header node."""
156
157 def __init__(self, title=None):
158 super(_PayloadReport.SectionNode, self).__init__()
159 self.title = title
160 self.max_field_name_len = 0
161
162 def GenerateLines(self, base_indent, sub_indent, curr_section):
163 """Dump a title line, return self as the (new) current section."""
164 report_lines = []
165 if self.title:
166 report_lines.append(self._Indent(base_indent,
167 '=== %s ===\n' % self.title))
168 return report_lines, self
169
170 def __init__(self):
171 self.report = []
172 self.last_section = self.global_section = self.SectionNode()
173 self.is_finalized = False
174
175 def GenerateLines(self, base_indent, sub_indent):
176 """Generates the lines in the report, properly indented.
177
178 Args:
179 base_indent: the indentation used for root-level report lines
180 sub_indent: the indentation offset used for sub-reports
181 Returns:
182 A list of indented report lines.
183
184 """
185 report_lines = []
186 curr_section = self.global_section
187 for node in self.report:
188 node_report_lines, curr_section = node.GenerateLines(
189 base_indent, sub_indent, curr_section)
190 report_lines.extend(node_report_lines)
191
192 return report_lines
193
194 def Dump(self, out_file, base_indent=0, sub_indent=2):
195 """Dumps the report to a file.
196
197 Args:
198 out_file: file object to output the content to
199 base_indent: base indentation for report lines
200 sub_indent: added indentation for sub-reports
201
202 """
203
204 report_lines = self.GenerateLines(base_indent, sub_indent)
205 if report_lines and not self.is_finalized:
206 report_lines.append('(incomplete report)\n')
207
208 for line in report_lines:
209 out_file.write(line)
210
211 def AddField(self, name, value, linebreak=False, indent=0):
212 """Adds a field/value pair to the payload report.
213
214 Args:
215 name: the field's name
216 value: the field's value
217 linebreak: whether the value should be printed on a new line
218 indent: amount of extra indent for each line of the value
219
220 """
221 assert not self.is_finalized
222 if name and self.last_section.max_field_name_len < len(name):
223 self.last_section.max_field_name_len = len(name)
224 self.report.append(self.FieldNode(name, value, linebreak, indent))
225
226 def AddSubReport(self, title):
227 """Adds and returns a sub-report with a title."""
228 assert not self.is_finalized
229 sub_report = self.SubReportNode(title, type(self)())
230 self.report.append(sub_report)
231 return sub_report.report
232
233 def AddSection(self, title):
234 """Adds a new section title."""
235 assert not self.is_finalized
236 self.last_section = self.SectionNode(title)
237 self.report.append(self.last_section)
238
239 def Finalize(self):
240 """Seals the report, marking it as complete."""
241 self.is_finalized = True
242
243
244#
245# Payload verification.
246#
247class PayloadChecker(object):
248 """Checking the integrity of an update payload.
249
250 This is a short-lived object whose purpose is to isolate the logic used for
251 verifying the integrity of an update payload.
252
253 """
254
255 def __init__(self, payload):
256 assert payload.is_init, 'uninitialized update payload'
257 self.payload = payload
258
259 # Reset state; these will be assigned when the manifest is checked.
260 self.block_size = _DEFAULT_BLOCK_SIZE
261 self.sigs_offset = 0
262 self.sigs_size = 0
263 self.old_rootfs_size = 0
264 self.old_kernel_size = 0
265 self.new_rootfs_size = 0
266 self.new_kernel_size = 0
267 self.payload_type = None
268
269 @staticmethod
270 def _CheckElem(msg, name, report, is_mandatory, is_submsg, convert=str,
271 msg_name=None, linebreak=False, indent=0):
272 """Adds an element from a protobuf message to the payload report.
273
274 Checks to see whether a message contains a given element, and if so adds
275 the element value to the provided report. A missing mandatory element
276 causes an exception to be raised.
277
278 Args:
279 msg: the message containing the element
280 name: the name of the element
281 report: a report object to add the element name/value to
282 is_mandatory: whether or not this element must be present
283 is_submsg: whether this element is itself a message
284 convert: a function for converting the element value for reporting
285 msg_name: the name of the message object (for error reporting)
286 linebreak: whether the value report should induce a line break
287 indent: amount of indent used for reporting the value
288 Returns:
289 A pair consisting of the element value and the generated sub-report for
290 it (if the element is a sub-message, None otherwise). If the element is
291 missing, returns (None, None).
292 Raises:
293 PayloadError if a mandatory element is missing.
294
295 """
296 if not msg.HasField(name):
297 if is_mandatory:
298 raise PayloadError("%smissing mandatory %s '%s'" %
299 (msg_name + ' ' if msg_name else '',
300 'sub-message' if is_submsg else 'field',
301 name))
302 return (None, None)
303
304 value = getattr(msg, name)
305 if is_submsg:
306 return (value, report and report.AddSubReport(name))
307 else:
308 if report:
309 report.AddField(name, convert(value), linebreak=linebreak,
310 indent=indent)
311 return (value, None)
312
313 @staticmethod
314 def _CheckMandatoryField(msg, field_name, report, msg_name, convert=str,
315 linebreak=False, indent=0):
316 """Adds a mandatory field; returning first component from _CheckElem."""
317 return PayloadChecker._CheckElem(msg, field_name, report, True, False,
318 convert=convert, msg_name=msg_name,
319 linebreak=linebreak, indent=indent)[0]
320
321 @staticmethod
322 def _CheckOptionalField(msg, field_name, report, convert=str,
323 linebreak=False, indent=0):
324 """Adds an optional field; returning first component from _CheckElem."""
325 return PayloadChecker._CheckElem(msg, field_name, report, False, False,
326 convert=convert, linebreak=linebreak,
327 indent=indent)[0]
328
329 @staticmethod
330 def _CheckMandatorySubMsg(msg, submsg_name, report, msg_name):
331 """Adds a mandatory sub-message; wrapper for _CheckElem."""
332 return PayloadChecker._CheckElem(msg, submsg_name, report, True, True,
333 msg_name)
334
335 @staticmethod
336 def _CheckOptionalSubMsg(msg, submsg_name, report):
337 """Adds an optional sub-message; wrapper for _CheckElem."""
338 return PayloadChecker._CheckElem(msg, submsg_name, report, False, True)
339
340 @staticmethod
341 def _CheckPresentIff(val1, val2, name1, name2, obj_name):
342 """Checks that val1 is None iff val2 is None.
343
344 Args:
345 val1: first value to be compared
346 val2: second value to be compared
347 name1: name of object holding the first value
348 name2: name of object holding the second value
349 obj_name: name of the object containing these values
350 Raises:
351 PayloadError if assertion does not hold.
352
353 """
354 if None in (val1, val2) and val1 is not val2:
355 present, missing = (name1, name2) if val2 is None else (name2, name1)
356 raise PayloadError("'%s' present without '%s'%s" %
357 (present, missing,
358 ' in ' + obj_name if obj_name else ''))
359
360 @staticmethod
361 def _Run(cmd, send_data=None):
362 """Runs a subprocess, returns its output.
363
364 Args:
365 cmd: list of command-line argument for invoking the subprocess
366 send_data: data to feed to the process via its stdin
367 Returns:
368 A tuple containing the stdout and stderr output of the process.
369
370 """
371 run_process = subprocess.Popen(cmd, stdin=subprocess.PIPE,
372 stdout=subprocess.PIPE)
373 return run_process.communicate(input=send_data)
374
375 @staticmethod
376 def _CheckSha256Signature(sig_data, pubkey_file_name, actual_hash, sig_name):
377 """Verifies an actual hash against a signed one.
378
379 Args:
380 sig_data: the raw signature data
381 pubkey_file_name: public key used for verifying signature
382 actual_hash: the actual hash digest
383 sig_name: signature name for error reporting
384 Raises:
385 PayloadError if signature could not be verified.
386
387 """
388 if len(sig_data) != 256:
389 raise PayloadError('%s: signature size (%d) not as expected (256)' %
390 (sig_name, len(sig_data)))
391 signed_data, _ = PayloadChecker._Run(
392 ['openssl', 'rsautl', '-verify', '-pubin', '-inkey', pubkey_file_name],
393 send_data=sig_data)
394
Gilad Arnold5502b562013-03-08 13:22:31 -0800395 if len(signed_data) != len(common.SIG_ASN1_HEADER) + 32:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800396 raise PayloadError('%s: unexpected signed data length (%d)' %
397 (sig_name, len(signed_data)))
398
Gilad Arnold5502b562013-03-08 13:22:31 -0800399 if not signed_data.startswith(common.SIG_ASN1_HEADER):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800400 raise PayloadError('%s: not containing standard ASN.1 prefix' % sig_name)
401
Gilad Arnold5502b562013-03-08 13:22:31 -0800402 signed_hash = signed_data[len(common.SIG_ASN1_HEADER):]
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800403 if signed_hash != actual_hash:
404 raise PayloadError('%s: signed hash (%s) different from actual (%s)' %
405 (sig_name, signed_hash.encode('hex'),
406 actual_hash.encode('hex')))
407
408 @staticmethod
409 def _CheckBlocksFitLength(length, num_blocks, block_size, length_name,
410 block_name=None):
411 """Checks that a given length fits given block space.
412
413 This ensures that the number of blocks allocated is appropriate for the
414 length of the data residing in these blocks.
415
416 Args:
417 length: the actual length of the data
418 num_blocks: the number of blocks allocated for it
419 block_size: the size of each block in bytes
420 length_name: name of length (used for error reporting)
421 block_name: name of block (used for error reporting)
422 Raises:
423 PayloadError if the aforementioned invariant is not satisfied.
424
425 """
426 # Check: length <= num_blocks * block_size.
427 if not length <= num_blocks * block_size:
428 raise PayloadError(
429 '%s (%d) > num %sblocks (%d) * block_size (%d)' %
430 (length_name, length, block_name or '', num_blocks, block_size))
431
432 # Check: length > (num_blocks - 1) * block_size.
433 if not length > (num_blocks - 1) * block_size:
434 raise PayloadError(
435 '%s (%d) <= (num %sblocks - 1 (%d)) * block_size (%d)' %
436 (length_name, length, block_name or '', num_blocks - 1, block_size))
437
438 def _CheckManifest(self, report):
439 """Checks the payload manifest.
440
441 Args:
442 report: a report object to add to
443 Returns:
444 A tuple consisting of the partition block size used during the update
445 (integer), the signatures block offset and size.
446 Raises:
447 PayloadError if any of the checks fail.
448
449 """
450 manifest = self.payload.manifest
451 report.AddSection('manifest')
452
453 # Check: block_size must exist and match the expected value.
454 actual_block_size = self._CheckMandatoryField(manifest, 'block_size',
455 report, 'manifest')
456 if actual_block_size != self.block_size:
457 raise PayloadError('block_size (%d) not as expected (%d)' %
458 (actual_block_size, self.block_size))
459
460 # Check: signatures_offset <==> signatures_size.
461 self.sigs_offset = self._CheckOptionalField(manifest, 'signatures_offset',
462 report)
463 self.sigs_size = self._CheckOptionalField(manifest, 'signatures_size',
464 report)
465 self._CheckPresentIff(self.sigs_offset, self.sigs_size,
466 'signatures_offset', 'signatures_size', 'manifest')
467
468 # Check: old_kernel_info <==> old_rootfs_info.
469 oki_msg, oki_report = self._CheckOptionalSubMsg(manifest,
470 'old_kernel_info', report)
471 ori_msg, ori_report = self._CheckOptionalSubMsg(manifest,
472 'old_rootfs_info', report)
473 self._CheckPresentIff(oki_msg, ori_msg, 'old_kernel_info',
474 'old_rootfs_info', 'manifest')
475 if oki_msg: # equivalently, ori_msg
476 # Assert/mark delta payload.
477 if self.payload_type == _TYPE_FULL:
478 raise PayloadError(
479 'apparent full payload contains old_{kernel,rootfs}_info')
480 self.payload_type = _TYPE_DELTA
481
482 # Check: {size, hash} present in old_{kernel,rootfs}_info.
483 self.old_kernel_size = self._CheckMandatoryField(
484 oki_msg, 'size', oki_report, 'old_kernel_info')
485 self._CheckMandatoryField(oki_msg, 'hash', oki_report, 'old_kernel_info',
486 convert=common.FormatSha256)
487 self.old_rootfs_size = self._CheckMandatoryField(
488 ori_msg, 'size', ori_report, 'old_rootfs_info')
489 self._CheckMandatoryField(ori_msg, 'hash', ori_report, 'old_rootfs_info',
490 convert=common.FormatSha256)
491 else:
492 # Assert/mark full payload.
493 if self.payload_type == _TYPE_DELTA:
494 raise PayloadError(
495 'apparent delta payload missing old_{kernel,rootfs}_info')
496 self.payload_type = _TYPE_FULL
497
498 # Check: new_kernel_info present; contains {size, hash}.
499 nki_msg, nki_report = self._CheckMandatorySubMsg(
500 manifest, 'new_kernel_info', report, 'manifest')
501 self.new_kernel_size = self._CheckMandatoryField(
502 nki_msg, 'size', nki_report, 'new_kernel_info')
503 self._CheckMandatoryField(nki_msg, 'hash', nki_report, 'new_kernel_info',
504 convert=common.FormatSha256)
505
506 # Check: new_rootfs_info present; contains {size, hash}.
507 nri_msg, nri_report = self._CheckMandatorySubMsg(
508 manifest, 'new_rootfs_info', report, 'manifest')
509 self.new_rootfs_size = self._CheckMandatoryField(
510 nri_msg, 'size', nri_report, 'new_rootfs_info')
511 self._CheckMandatoryField(nri_msg, 'hash', nri_report, 'new_rootfs_info',
512 convert=common.FormatSha256)
513
514 # Check: payload must contain at least one operation.
515 if not(len(manifest.install_operations) or
516 len(manifest.kernel_install_operations)):
517 raise PayloadError('payload contains no operations')
518
519 def _CheckLength(self, length, total_blocks, op_name, length_name):
520 """Checks whether a length matches the space designated in extents.
521
522 Args:
523 length: the total length of the data
524 total_blocks: the total number of blocks in extents
525 op_name: operation name (for error reporting)
526 length_name: length name (for error reporting)
527 Raises:
528 PayloadError is there a problem with the length.
529
530 """
531 # Check: length is non-zero.
532 if length == 0:
533 raise PayloadError('%s: %s is zero' % (op_name, length_name))
534
535 # Check that length matches number of blocks.
536 self._CheckBlocksFitLength(length, total_blocks, self.block_size,
537 '%s: %s' % (op_name, length_name))
538
539 def _CheckExtents(self, extents, part_size, block_counters, name,
540 allow_pseudo=False, allow_signature=False):
541 """Checks a sequence of extents.
542
543 Args:
544 extents: the sequence of extents to check
545 part_size: the total size of the partition to which the extents apply
546 block_counters: an array of counters corresponding to the number of blocks
547 name: the name of the extent block
548 allow_pseudo: whether or not pseudo block numbers are allowed
549 allow_signature: whether or not the extents are used for a signature
550 Returns:
551 The total number of blocks in the extents.
552 Raises:
553 PayloadError if any of the entailed checks fails.
554
555 """
556 total_num_blocks = 0
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800557 for ex, ex_name in common.ExtentIter(extents, name):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800558 # Check: mandatory fields.
559 start_block = PayloadChecker._CheckMandatoryField(ex, 'start_block',
560 None, ex_name)
561 num_blocks = PayloadChecker._CheckMandatoryField(ex, 'num_blocks', None,
562 ex_name)
563 end_block = start_block + num_blocks
564
565 # Check: num_blocks > 0.
566 if num_blocks == 0:
567 raise PayloadError('%s: extent length is zero' % ex_name)
568
569 if start_block != common.PSEUDO_EXTENT_MARKER:
570 # Check: make sure we're within the partition limit.
Gilad Arnoldaa55d1a2013-03-08 12:05:59 -0800571 if part_size and end_block * self.block_size > part_size:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800572 raise PayloadError(
573 '%s: extent (%s) exceeds partition size (%d)' %
574 (ex_name, common.FormatExtent(ex, self.block_size), part_size))
575
576 # Record block usage.
577 for i in range(start_block, end_block):
578 block_counters[i] += 1
Gilad Arnold5502b562013-03-08 13:22:31 -0800579 elif not (allow_pseudo or (allow_signature and len(extents) == 1)):
580 # Pseudo-extents must be allowed explicitly, or otherwise be part of a
581 # signature operation (in which case there has to be exactly one).
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800582 raise PayloadError('%s: unexpected pseudo-extent' % ex_name)
583
584 total_num_blocks += num_blocks
585
586 return total_num_blocks
587
588 def _CheckReplaceOperation(self, op, data_length, total_dst_blocks, op_name):
589 """Specific checks for REPLACE/REPLACE_BZ operations.
590
591 Args:
592 op: the operation object from the manifest
593 data_length: the length of the data blob associated with the operation
594 total_dst_blocks: total number of blocks in dst_extents
595 op_name: operation name for error reporting
596 Raises:
597 PayloadError if any check fails.
598
599 """
Gilad Arnold5502b562013-03-08 13:22:31 -0800600 # Check: does not contain src extents.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800601 if op.src_extents:
602 raise PayloadError('%s: contains src_extents' % op_name)
603
Gilad Arnold5502b562013-03-08 13:22:31 -0800604 # Check: contains data.
605 if data_length is None:
606 raise PayloadError('%s: missing data_{offset,length}' % op_name)
607
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800608 if op.type == common.OpType.REPLACE:
609 PayloadChecker._CheckBlocksFitLength(data_length, total_dst_blocks,
610 self.block_size,
611 op_name + '.data_length', 'dst')
612 else:
613 # Check: data_length must be smaller than the alotted dst blocks.
614 if data_length >= total_dst_blocks * self.block_size:
615 raise PayloadError(
616 '%s: data_length (%d) must be less than allotted dst block '
617 'space (%d * %d)' %
618 (op_name, data_length, total_dst_blocks, self.block_size))
619
620 def _CheckMoveOperation(self, op, data_offset, total_src_blocks,
621 total_dst_blocks, op_name):
622 """Specific checks for MOVE operations.
623
624 Args:
625 op: the operation object from the manifest
626 data_offset: the offset of a data blob for the operation
627 total_src_blocks: total number of blocks in src_extents
628 total_dst_blocks: total number of blocks in dst_extents
629 op_name: operation name for error reporting
630 Raises:
631 PayloadError if any check fails.
632
633 """
634 # Check: no data_{offset,length}.
635 if data_offset is not None:
636 raise PayloadError('%s: contains data_{offset,length}' % op_name)
637
638 # Check: total src blocks == total dst blocks.
639 if total_src_blocks != total_dst_blocks:
640 raise PayloadError(
641 '%s: total src blocks (%d) != total dst blocks (%d)' %
642 (op_name, total_src_blocks, total_dst_blocks))
643
644 # Check: for all i, i-th src block index != i-th dst block index.
645 i = 0
646 src_extent_iter = iter(op.src_extents)
647 dst_extent_iter = iter(op.dst_extents)
648 src_extent = dst_extent = None
649 src_idx = src_num = dst_idx = dst_num = 0
650 while i < total_src_blocks:
651 # Get the next source extent, if needed.
652 if not src_extent:
653 try:
654 src_extent = src_extent_iter.next()
655 except StopIteration:
656 raise PayloadError('%s: ran out of src extents (%d/%d)' %
657 (op_name, i, total_src_blocks))
658 src_idx = src_extent.start_block
659 src_num = src_extent.num_blocks
660
661 # Get the next dest extent, if needed.
662 if not dst_extent:
663 try:
664 dst_extent = dst_extent_iter.next()
665 except StopIteration:
666 raise PayloadError('%s: ran out of dst extents (%d/%d)' %
667 (op_name, i, total_dst_blocks))
668 dst_idx = dst_extent.start_block
669 dst_num = dst_extent.num_blocks
670
671 if src_idx == dst_idx:
Gilad Arnold5502b562013-03-08 13:22:31 -0800672 raise PayloadError('%s: src/dst block number %d is the same (%d)' %
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800673 (op_name, i, src_idx))
674
675 advance = min(src_num, dst_num)
676 i += advance
677
678 src_idx += advance
679 src_num -= advance
680 if src_num == 0:
681 src_extent = None
682
683 dst_idx += advance
684 dst_num -= advance
685 if dst_num == 0:
686 dst_extent = None
687
Gilad Arnold5502b562013-03-08 13:22:31 -0800688 # Make sure we've exhausted all src/dst extents.
689 if src_extent:
690 raise PayloadError('%s: excess src blocks' % op_name)
691 if dst_extent:
692 raise PayloadError('%s: excess dst blocks' % op_name)
693
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800694 def _CheckBsdiffOperation(self, data_length, total_dst_blocks, op_name):
695 """Specific checks for BSDIFF operations.
696
697 Args:
698 data_length: the length of the data blob associated with the operation
699 total_dst_blocks: total number of blocks in dst_extents
700 op_name: operation name for error reporting
701 Raises:
702 PayloadError if any check fails.
703
704 """
Gilad Arnold5502b562013-03-08 13:22:31 -0800705 # Check: data_{offset,length} present.
706 if data_length is None:
707 raise PayloadError('%s: missing data_{offset,length}' % op_name)
708
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800709 # Check: data_length is strictly smaller than the alotted dst blocks.
710 if data_length >= total_dst_blocks * self.block_size:
711 raise PayloadError(
Gilad Arnold5502b562013-03-08 13:22:31 -0800712 '%s: data_length (%d) must be smaller than allotted dst space '
713 '(%d * %d = %d)' %
714 (op_name, data_length, total_dst_blocks, self.block_size,
715 total_dst_blocks * self.block_size))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800716
717 def _CheckOperation(self, op, op_name, is_last, old_block_counters,
718 new_block_counters, old_part_size, new_part_size,
719 prev_data_offset, allow_signature, allow_unhashed,
720 blob_hash_counts):
721 """Checks a single update operation.
722
723 Args:
724 op: the operation object
725 op_name: operation name string for error reporting
726 is_last: whether this is the last operation in the sequence
727 old_block_counters: arrays of block read counters
728 new_block_counters: arrays of block write counters
729 old_part_size: the source partition size in bytes
730 new_part_size: the target partition size in bytes
731 prev_data_offset: offset of last used data bytes
732 allow_signature: whether this may be a signature operation
733 allow_unhashed: allow operations with unhashed data blobs
734 blob_hash_counts: counters for hashed/unhashed blobs
735 Returns:
736 The amount of data blob associated with the operation.
737 Raises:
738 PayloadError if any check has failed.
739
740 """
741 # Check extents.
742 total_src_blocks = self._CheckExtents(
743 op.src_extents, old_part_size, old_block_counters,
744 op_name + '.src_extents', allow_pseudo=True)
745 allow_signature_in_extents = (allow_signature and is_last and
746 op.type == common.OpType.REPLACE)
747 total_dst_blocks = self._CheckExtents(
748 op.dst_extents, new_part_size, new_block_counters,
749 op_name + '.dst_extents', allow_signature=allow_signature_in_extents)
750
751 # Check: data_offset present <==> data_length present.
752 data_offset = self._CheckOptionalField(op, 'data_offset', None)
753 data_length = self._CheckOptionalField(op, 'data_length', None)
754 self._CheckPresentIff(data_offset, data_length, 'data_offset',
755 'data_length', op_name)
756
757 # Check: at least one dst_extent.
758 if not op.dst_extents:
759 raise PayloadError('%s: dst_extents is empty' % op_name)
760
761 # Check {src,dst}_length, if present.
762 if op.HasField('src_length'):
763 self._CheckLength(op.src_length, total_src_blocks, op_name, 'src_length')
764 if op.HasField('dst_length'):
765 self._CheckLength(op.dst_length, total_dst_blocks, op_name, 'dst_length')
766
767 if op.HasField('data_sha256_hash'):
768 blob_hash_counts['hashed'] += 1
769
770 # Check: operation carries data.
771 if data_offset is None:
772 raise PayloadError(
773 '%s: data_sha256_hash present but no data_{offset,length}' %
774 op_name)
775
776 # Check: hash verifies correctly.
777 # pylint: disable=E1101
778 actual_hash = hashlib.sha256(self.payload.ReadDataBlob(data_offset,
779 data_length))
780 if op.data_sha256_hash != actual_hash.digest():
781 raise PayloadError(
782 '%s: data_sha256_hash (%s) does not match actual hash (%s)' %
783 (op_name, op.data_sha256_hash.encode('hex'),
784 actual_hash.hexdigest()))
785 elif data_offset is not None:
786 if allow_signature_in_extents:
787 blob_hash_counts['signature'] += 1
788 elif allow_unhashed:
789 blob_hash_counts['unhashed'] += 1
790 else:
791 raise PayloadError('%s: unhashed operation not allowed' % op_name)
792
793 if data_offset is not None:
794 # Check: contiguous use of data section.
795 if data_offset != prev_data_offset:
796 raise PayloadError(
797 '%s: data offset (%d) not matching amount used so far (%d)' %
798 (op_name, data_offset, prev_data_offset))
799
800 # Type-specific checks.
801 if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ):
802 self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name)
803 elif self.payload_type == _TYPE_FULL:
804 raise PayloadError('%s: non-REPLACE operation in a full payload' %
805 op_name)
806 elif op.type == common.OpType.MOVE:
807 self._CheckMoveOperation(op, data_offset, total_src_blocks,
808 total_dst_blocks, op_name)
809 elif op.type == common.OpType.BSDIFF:
810 self._CheckBsdiffOperation(data_length, total_dst_blocks, op_name)
811 else:
812 assert False, 'cannot get here'
813
814 return data_length if data_length is not None else 0
815
Gilad Arnold5502b562013-03-08 13:22:31 -0800816 def _AllocBlockCounters(self, part_size):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800817 """Returns a freshly initialized array of block counters.
818
819 Args:
820 part_size: the size of the partition
821 Returns:
822 An array of unsigned char elements initialized to zero, one for each of
823 the blocks necessary for containing the partition.
824
825 """
826 num_blocks = (part_size + self.block_size - 1) / self.block_size
827 return array.array('B', [0] * num_blocks)
828
829 def _CheckOperations(self, operations, report, base_name, old_part_size,
830 new_part_size, prev_data_offset, allow_unhashed,
831 allow_signature):
832 """Checks a sequence of update operations.
833
834 Args:
835 operations: the sequence of operations to check
836 report: the report object to add to
837 base_name: the name of the operation block
838 old_part_size: the old partition size in bytes
839 new_part_size: the new partition size in bytes
840 prev_data_offset: offset of last used data bytes
841 allow_unhashed: allow operations with unhashed data blobs
842 allow_signature: whether this sequence may contain signature operations
843 Returns:
Gilad Arnold5502b562013-03-08 13:22:31 -0800844 The total data blob size used.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800845 Raises:
846 PayloadError if any of the checks fails.
847
848 """
849 # The total size of data blobs used by operations scanned thus far.
850 total_data_used = 0
851 # Counts of specific operation types.
852 op_counts = {
853 common.OpType.REPLACE: 0,
854 common.OpType.REPLACE_BZ: 0,
855 common.OpType.MOVE: 0,
856 common.OpType.BSDIFF: 0,
857 }
858 # Total blob sizes for each operation type.
859 op_blob_totals = {
860 common.OpType.REPLACE: 0,
861 common.OpType.REPLACE_BZ: 0,
862 # MOVE operations don't have blobs
863 common.OpType.BSDIFF: 0,
864 }
865 # Counts of hashed vs unhashed operations.
866 blob_hash_counts = {
867 'hashed': 0,
868 'unhashed': 0,
869 }
870 if allow_signature:
871 blob_hash_counts['signature'] = 0
872
873 # Allocate old and new block counters.
Gilad Arnold5502b562013-03-08 13:22:31 -0800874 old_block_counters = (self._AllocBlockCounters(old_part_size)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800875 if old_part_size else None)
Gilad Arnold5502b562013-03-08 13:22:31 -0800876 new_block_counters = self._AllocBlockCounters(new_part_size)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800877
878 # Process and verify each operation.
879 op_num = 0
880 for op, op_name in common.OperationIter(operations, base_name):
881 op_num += 1
882
883 # Check: type is valid.
884 if op.type not in op_counts.keys():
885 raise PayloadError('%s: invalid type (%d)' % (op_name, op.type))
886 op_counts[op.type] += 1
887
888 is_last = op_num == len(operations)
889 curr_data_used = self._CheckOperation(
890 op, op_name, is_last, old_block_counters, new_block_counters,
891 old_part_size, new_part_size, prev_data_offset + total_data_used,
892 allow_signature, allow_unhashed, blob_hash_counts)
893 if curr_data_used:
894 op_blob_totals[op.type] += curr_data_used
895 total_data_used += curr_data_used
896
897 # Report totals and breakdown statistics.
898 report.AddField('total operations', op_num)
899 report.AddField(
900 None,
901 histogram.Histogram.FromCountDict(op_counts,
902 key_names=common.OpType.NAMES),
903 indent=1)
904 report.AddField('total blobs', sum(blob_hash_counts.values()))
905 report.AddField(None,
906 histogram.Histogram.FromCountDict(blob_hash_counts),
907 indent=1)
908 report.AddField('total blob size', _AddHumanReadableSize(total_data_used))
909 report.AddField(
910 None,
911 histogram.Histogram.FromCountDict(op_blob_totals,
912 formatter=_AddHumanReadableSize,
913 key_names=common.OpType.NAMES),
914 indent=1)
915
916 # Report read/write histograms.
917 if old_block_counters:
918 report.AddField('block read hist',
919 histogram.Histogram.FromKeyList(old_block_counters),
920 linebreak=True, indent=1)
921
922 new_write_hist = histogram.Histogram.FromKeyList(new_block_counters)
923 # Check: full update must write each dst block once.
924 if self.payload_type == _TYPE_FULL and new_write_hist.GetKeys() != [1]:
925 raise PayloadError(
926 '%s: not all blocks written exactly once during full update' %
927 base_name)
928
929 report.AddField('block write hist', new_write_hist, linebreak=True,
930 indent=1)
931
932 return total_data_used
933
934 def _CheckSignatures(self, report, pubkey_file_name):
935 """Checks a payload's signature block."""
936 sigs_raw = self.payload.ReadDataBlob(self.sigs_offset, self.sigs_size)
937 sigs = update_metadata_pb2.Signatures()
938 sigs.ParseFromString(sigs_raw)
939 report.AddSection('signatures')
940
941 # Check: at least one signature present.
942 # pylint: disable=E1101
943 if not sigs.signatures:
944 raise PayloadError('signature block is empty')
945
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800946 last_ops_section = (self.payload.manifest.kernel_install_operations or
947 self.payload.manifest.install_operations)
948 fake_sig_op = last_ops_section[-1]
Gilad Arnold5502b562013-03-08 13:22:31 -0800949 # Check: signatures_{offset,size} must match the last (fake) operation.
950 if not (fake_sig_op.type == common.OpType.REPLACE and
951 self.sigs_offset == fake_sig_op.data_offset and
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800952 self.sigs_size == fake_sig_op.data_length):
953 raise PayloadError(
954 'signatures_{offset,size} (%d+%d) does not match last operation '
955 '(%d+%d)' %
956 (self.sigs_offset, self.sigs_size, fake_sig_op.data_offset,
957 fake_sig_op.data_length))
958
959 # Compute the checksum of all data up to signature blob.
960 # TODO(garnold) we're re-reading the whole data section into a string
961 # just to compute the checksum; instead, we could do it incrementally as
962 # we read the blobs one-by-one, under the assumption that we're reading
963 # them in order (which currently holds). This should be reconsidered.
964 payload_hasher = self.payload.manifest_hasher.copy()
965 common.Read(self.payload.payload_file, self.sigs_offset,
966 offset=self.payload.data_offset, hasher=payload_hasher)
967
968 for sig, sig_name in common.SignatureIter(sigs.signatures, 'signatures'):
969 sig_report = report.AddSubReport(sig_name)
970
971 # Check: signature contains mandatory fields.
972 self._CheckMandatoryField(sig, 'version', sig_report, sig_name)
973 self._CheckMandatoryField(sig, 'data', None, sig_name)
974 sig_report.AddField('data len', len(sig.data))
975
976 # Check: signatures pertains to actual payload hash.
977 if sig.version == 1:
978 self._CheckSha256Signature(sig.data, pubkey_file_name,
979 payload_hasher.digest(), sig_name)
980 else:
981 raise PayloadError('unknown signature version (%d)' % sig.version)
982
983 def Run(self, pubkey_file_name=None, metadata_sig_file=None,
984 report_out_file=None, assert_type=None, block_size=0,
985 allow_unhashed=False):
986 """Checker entry point, invoking all checks.
987
988 Args:
989 pubkey_file_name: public key used for signature verification
990 metadata_sig_file: metadata signature, if verification is desired
991 report_out_file: file object to dump the report to
992 assert_type: assert that payload is either 'full' or 'delta' (optional)
993 block_size: expected filesystem / payload block size
994 allow_unhashed: allow operations with unhashed data blobs
995 Raises:
996 PayloadError if payload verification failed.
997
998 """
999 report = _PayloadReport()
1000
1001 if assert_type not in (None, _TYPE_FULL, _TYPE_DELTA):
1002 raise PayloadError("invalid assert_type value (`%s')" % assert_type)
1003 self.payload_type = assert_type
1004
1005 if block_size:
1006 self.block_size = block_size
1007 if not _IsPowerOfTwo(self.block_size):
1008 raise PayloadError('expected block (%d) size is not a power of two' %
1009 self.block_size)
1010
1011 # Get payload file size.
1012 self.payload.payload_file.seek(0, 2)
1013 payload_file_size = self.payload.payload_file.tell()
1014 self.payload.ResetFile()
1015
1016 try:
1017 # Check metadata signature (if provided).
1018 if metadata_sig_file:
1019 if not pubkey_file_name:
1020 raise PayloadError(
1021 'no public key provided, cannot verify metadata signature')
1022 metadata_sig = base64.b64decode(metadata_sig_file.read())
1023 self._CheckSha256Signature(metadata_sig, pubkey_file_name,
1024 self.payload.manifest_hasher.digest(),
1025 'metadata signature')
1026
1027 # Part 1: check the file header.
1028 report.AddSection('header')
1029 # Check: payload version is valid.
1030 if self.payload.header.version != 1:
1031 raise PayloadError('unknown payload version (%d)' %
1032 self.payload.header.version)
1033 report.AddField('version', self.payload.header.version)
1034 report.AddField('manifest len', self.payload.header.manifest_len)
1035
1036 # Part 2: check the manifest.
1037 self._CheckManifest(report)
1038 assert self.payload_type, 'payload type should be known by now'
1039
1040 # Part 3: examine rootfs operations.
1041 report.AddSection('rootfs operations')
1042 total_blob_size = self._CheckOperations(
1043 self.payload.manifest.install_operations, report,
1044 'install_operations', self.old_rootfs_size,
1045 self.new_rootfs_size, 0, allow_unhashed, False)
1046
1047 # Part 4: examine kernel operations.
1048 report.AddSection('kernel operations')
1049 total_blob_size += self._CheckOperations(
1050 self.payload.manifest.kernel_install_operations, report,
1051 'kernel_install_operations', self.old_kernel_size,
1052 self.new_kernel_size, total_blob_size, allow_unhashed, True)
1053
1054 # Check: operations data reach the end of the payload file.
1055 used_payload_size = self.payload.data_offset + total_blob_size
1056 if used_payload_size != payload_file_size:
1057 raise PayloadError(
1058 'used payload size (%d) different from actual file size (%d)' %
1059 (used_payload_size, payload_file_size))
1060
1061 # Part 5: handle payload signatures message.
1062 if self.sigs_size:
1063 if not pubkey_file_name:
1064 raise PayloadError(
1065 'no public key provided, cannot verify payload signature')
1066 self._CheckSignatures(report, pubkey_file_name)
1067
1068 # Part 6: summary.
1069 report.AddSection('summary')
1070 report.AddField('update type', self.payload_type)
1071
1072 report.Finalize()
1073 finally:
1074 if report_out_file:
1075 report.Dump(report_out_file)