paycheck: allow to disable specific checks
This became necessary as the delta generator appears to generate
payloads that fail certain checks (e.g. during update_engine unit
testing).
BUG=None
TEST=Disabled checks not being triggered
Change-Id: I4491e0cb32ef44f85e11ffb0402b40d1371525ae
Reviewed-on: https://gerrit.chromium.org/gerrit/49676
Tested-by: Gilad Arnold <garnold@chromium.org>
Reviewed-by: Chris Sosa <sosa@chromium.org>
Commit-Queue: Gilad Arnold <garnold@chromium.org>
diff --git a/scripts/paycheck.py b/scripts/paycheck.py
index 7fd4b17..245b63e 100755
--- a/scripts/paycheck.py
+++ b/scripts/paycheck.py
@@ -62,6 +62,11 @@
help='assert a non-default (4096) payload block size')
check_opts.add_option('-u', '--allow-unhashed', action='store_true',
default=False, help='allow unhashed operations')
+ check_opts.add_option('-d', '--disabled_tests', metavar='TESTLIST',
+ default=(),
+ help=('comma-separated list of tests to disable; '
+ 'available values: ' +
+ ', '.join(update_payload.CHECKS_TO_DISABLE)))
check_opts.add_option('-k', '--key', metavar='FILE',
help='public key to be used for signature verification')
check_opts.add_option('-m', '--meta-sig', metavar='FILE',
@@ -84,6 +89,13 @@
if opts.assert_type not in (None, _TYPE_FULL, _TYPE_DELTA):
parser.error('invalid argument to --type: %s' % opts.assert_type)
+ # Convert and validate --disabled_tests value list, if provided.
+ if opts.disabled_tests:
+ opts.disabled_tests = opts.disabled_tests.split(',')
+ for test in opts.disabled_tests:
+ if test not in update_payload.CHECKS_TO_DISABLE:
+ parser.error('invalid argument to --disabled_tests: %s' % test)
+
# Ensure consistent use of block tracing options.
do_block_trace = opts.root_block or opts.kern_block
if opts.skip and not do_block_trace:
@@ -145,7 +157,8 @@
report_out_file=report_file,
assert_type=options.assert_type,
block_size=int(options.block_size),
- allow_unhashed=options.allow_unhashed)
+ allow_unhashed=options.allow_unhashed,
+ disabled_tests=options.disabled_tests)
finally:
if do_close_report_file:
report_file.close()
diff --git a/scripts/update_payload/__init__.py b/scripts/update_payload/__init__.py
index e437f40..eae23af 100644
--- a/scripts/update_payload/__init__.py
+++ b/scripts/update_payload/__init__.py
@@ -8,3 +8,4 @@
# pylint: disable=W0401
from error import PayloadError
from payload import Payload
+from checker import CHECKS_TO_DISABLE
diff --git a/scripts/update_payload/checker.py b/scripts/update_payload/checker.py
index b85e2b8..703b166 100644
--- a/scripts/update_payload/checker.py
+++ b/scripts/update_payload/checker.py
@@ -28,6 +28,15 @@
#
# Constants / helper functions.
#
+_CHECK_DST_PSEUDO_EXTENTS = 'dst-pseudo-extents'
+_CHECK_MOVE_SAME_SRC_DST_BLOCK = 'move-same-src-dst-block'
+_CHECK_PAYLOAD_SIG = 'payload-sig'
+CHECKS_TO_DISABLE = (
+ _CHECK_DST_PSEUDO_EXTENTS,
+ _CHECK_MOVE_SAME_SRC_DST_BLOCK,
+ _CHECK_PAYLOAD_SIG,
+)
+
_TYPE_FULL = 'full'
_TYPE_DELTA = 'delta'
@@ -252,19 +261,45 @@
"""
- def __init__(self, payload):
+ def __init__(self, payload, assert_type=None, block_size=0,
+ allow_unhashed=False, disabled_tests=()):
+ """Initialize the checker object.
+
+ Args:
+ payload: the payload object to check
+ assert_type: assert that payload is either 'full' or 'delta' (optional)
+ block_size: expected filesystem / payload block size (optional)
+ allow_unhashed: allow operations with unhashed data blobs
+ disabled_tests: list of tests to disable
+
+ """
assert payload.is_init, 'uninitialized update payload'
+
+ # Set checker configuration.
self.payload = payload
+ self.block_size = block_size if block_size else _DEFAULT_BLOCK_SIZE
+ if not _IsPowerOfTwo(self.block_size):
+ raise PayloadError('expected block (%d) size is not a power of two' %
+ self.block_size)
+ if assert_type not in (None, _TYPE_FULL, _TYPE_DELTA):
+ raise PayloadError("invalid assert_type value (`%s')" % assert_type)
+ self.payload_type = assert_type
+ self.allow_unhashed = allow_unhashed
+
+ # Disable specific tests.
+ self.check_dst_pseudo_extents = (
+ _CHECK_DST_PSEUDO_EXTENTS not in disabled_tests)
+ self.check_move_same_src_dst_block = (
+ _CHECK_MOVE_SAME_SRC_DST_BLOCK not in disabled_tests)
+ self.check_payload_sig = _CHECK_PAYLOAD_SIG not in disabled_tests
# Reset state; these will be assigned when the manifest is checked.
- self.block_size = _DEFAULT_BLOCK_SIZE
self.sigs_offset = 0
self.sigs_size = 0
self.old_rootfs_size = 0
self.old_kernel_size = 0
self.new_rootfs_size = 0
self.new_kernel_size = 0
- self.payload_type = None
@staticmethod
def _CheckElem(msg, name, report, is_mandatory, is_submsg, convert=str,
@@ -668,7 +703,7 @@
dst_idx = dst_extent.start_block
dst_num = dst_extent.num_blocks
- if src_idx == dst_idx:
+ if self.check_move_same_src_dst_block and src_idx == dst_idx:
raise PayloadError('%s: src/dst block number %d is the same (%d)' %
(op_name, i, src_idx))
@@ -716,8 +751,7 @@
def _CheckOperation(self, op, op_name, is_last, old_block_counters,
new_block_counters, old_part_size, new_part_size,
- prev_data_offset, allow_signature, allow_unhashed,
- blob_hash_counts):
+ prev_data_offset, allow_signature, blob_hash_counts):
"""Checks a single update operation.
Args:
@@ -730,7 +764,6 @@
new_part_size: the target partition size in bytes
prev_data_offset: offset of last used data bytes
allow_signature: whether this may be a signature operation
- allow_unhashed: allow operations with unhashed data blobs
blob_hash_counts: counters for hashed/unhashed blobs
Returns:
The amount of data blob associated with the operation.
@@ -746,7 +779,9 @@
op.type == common.OpType.REPLACE)
total_dst_blocks = self._CheckExtents(
op.dst_extents, new_part_size, new_block_counters,
- op_name + '.dst_extents', allow_signature=allow_signature_in_extents)
+ op_name + '.dst_extents',
+ allow_pseudo=(not self.check_dst_pseudo_extents),
+ allow_signature=allow_signature_in_extents)
# Check: data_offset present <==> data_length present.
data_offset = self._CheckOptionalField(op, 'data_offset', None)
@@ -785,7 +820,7 @@
elif data_offset is not None:
if allow_signature_in_extents:
blob_hash_counts['signature'] += 1
- elif allow_unhashed:
+ elif self.allow_unhashed:
blob_hash_counts['unhashed'] += 1
else:
raise PayloadError('%s: unhashed operation not allowed' % op_name)
@@ -827,8 +862,7 @@
return array.array('B', [0] * num_blocks)
def _CheckOperations(self, operations, report, base_name, old_part_size,
- new_part_size, prev_data_offset, allow_unhashed,
- allow_signature):
+ new_part_size, prev_data_offset, allow_signature):
"""Checks a sequence of update operations.
Args:
@@ -838,7 +872,6 @@
old_part_size: the old partition size in bytes
new_part_size: the new partition size in bytes
prev_data_offset: offset of last used data bytes
- allow_unhashed: allow operations with unhashed data blobs
allow_signature: whether this sequence may contain signature operations
Returns:
The total data blob size used.
@@ -889,7 +922,7 @@
curr_data_used = self._CheckOperation(
op, op_name, is_last, old_block_counters, new_block_counters,
old_part_size, new_part_size, prev_data_offset + total_data_used,
- allow_signature, allow_unhashed, blob_hash_counts)
+ allow_signature, blob_hash_counts)
if curr_data_used:
op_blob_totals[op.type] += curr_data_used
total_data_used += curr_data_used
@@ -981,33 +1014,19 @@
raise PayloadError('unknown signature version (%d)' % sig.version)
def Run(self, pubkey_file_name=None, metadata_sig_file=None,
- report_out_file=None, assert_type=None, block_size=0,
- allow_unhashed=False):
+ report_out_file=None):
"""Checker entry point, invoking all checks.
Args:
pubkey_file_name: public key used for signature verification
metadata_sig_file: metadata signature, if verification is desired
report_out_file: file object to dump the report to
- assert_type: assert that payload is either 'full' or 'delta' (optional)
- block_size: expected filesystem / payload block size
- allow_unhashed: allow operations with unhashed data blobs
Raises:
PayloadError if payload verification failed.
"""
report = _PayloadReport()
- if assert_type not in (None, _TYPE_FULL, _TYPE_DELTA):
- raise PayloadError("invalid assert_type value (`%s')" % assert_type)
- self.payload_type = assert_type
-
- if block_size:
- self.block_size = block_size
- if not _IsPowerOfTwo(self.block_size):
- raise PayloadError('expected block (%d) size is not a power of two' %
- self.block_size)
-
# Get payload file size.
self.payload.payload_file.seek(0, 2)
payload_file_size = self.payload.payload_file.tell()
@@ -1041,15 +1060,15 @@
report.AddSection('rootfs operations')
total_blob_size = self._CheckOperations(
self.payload.manifest.install_operations, report,
- 'install_operations', self.old_rootfs_size,
- self.new_rootfs_size, 0, allow_unhashed, False)
+ 'install_operations', self.old_rootfs_size, self.new_rootfs_size, 0,
+ False)
# Part 4: examine kernel operations.
report.AddSection('kernel operations')
total_blob_size += self._CheckOperations(
self.payload.manifest.kernel_install_operations, report,
'kernel_install_operations', self.old_kernel_size,
- self.new_kernel_size, total_blob_size, allow_unhashed, True)
+ self.new_kernel_size, total_blob_size, True)
# Check: operations data reach the end of the payload file.
used_payload_size = self.payload.data_offset + total_blob_size
@@ -1059,7 +1078,7 @@
(used_payload_size, payload_file_size))
# Part 5: handle payload signatures message.
- if self.sigs_size:
+ if self.check_payload_sig and self.sigs_size:
if not pubkey_file_name:
raise PayloadError(
'no public key provided, cannot verify payload signature')
diff --git a/scripts/update_payload/checker_unittest.py b/scripts/update_payload/checker_unittest.py
index 681a920..8d134fc 100755
--- a/scripts/update_payload/checker_unittest.py
+++ b/scripts/update_payload/checker_unittest.py
@@ -54,14 +54,20 @@
return count << 30
-def _GetPayloadChecker(payload_gen_write_to_file_func, *largs, **dargs):
+def _GetPayloadChecker(payload_gen_write_to_file_func, payload_gen_dargs=None,
+ checker_init_dargs=None):
"""Returns a payload checker from a given payload generator."""
+ if payload_gen_dargs is None:
+ payload_gen_dargs = {}
+ if checker_init_dargs is None:
+ checker_init_dargs = {}
+
payload_file = cStringIO.StringIO()
- payload_gen_write_to_file_func(payload_file, *largs, **dargs)
+ payload_gen_write_to_file_func(payload_file, **payload_gen_dargs)
payload_file.seek(0)
payload = update_payload.Payload(payload_file)
payload.Init()
- return checker.PayloadChecker(payload)
+ return checker.PayloadChecker(payload, **checker_init_dargs)
def _GetPayloadCheckerWithData(payload_gen):
@@ -783,7 +789,8 @@
# Create the test object.
payload = self.MockPayload()
- payload_checker = checker.PayloadChecker(payload)
+ payload_checker = checker.PayloadChecker(payload,
+ allow_unhashed=allow_unhashed)
block_size = payload_checker.block_size
# Create auxiliary arguments.
@@ -865,7 +872,7 @@
fail_dst_length or fail_data_hash or fail_prev_data_offset)
largs = [op, 'foo', is_last, old_block_counters, new_block_counters,
old_part_size, new_part_size, prev_data_offset, allow_signature,
- allow_unhashed, blob_hash_counts]
+ blob_hash_counts]
if should_fail:
self.assertRaises(update_payload.PayloadError,
payload_checker._CheckOperation, *largs)
@@ -919,13 +926,15 @@
data_length=rootfs_data_length)
# Create the test object.
- payload_checker = _GetPayloadChecker(payload_gen.WriteToFile)
+ payload_checker = _GetPayloadChecker(payload_gen.WriteToFile,
+ checker_init_dargs={
+ 'allow_unhashed': True})
payload_checker.payload_type = checker._TYPE_FULL
report = checker._PayloadReport()
should_fail = (fail_bad_type or fail_nonexhaustive_full_update)
largs = (payload_checker.payload.manifest.install_operations, report,
- 'foo', 0, rootfs_part_size, 0, True, False)
+ 'foo', 0, rootfs_part_size, 0, False)
if should_fail:
self.assertRaises(update_payload.PayloadError,
payload_checker._CheckOperations, *largs)
@@ -982,9 +991,11 @@
# Generate payload (complete w/ signature) and create the test object.
payload_checker = _GetPayloadChecker(
- payload_gen.WriteToFileWithData, sigs_data=sigs_data,
- privkey_file_name=_PRIVKEY_FILE_NAME,
- do_add_pseudo_operation=(not do_forge_pseudo_op))
+ payload_gen.WriteToFileWithData,
+ payload_gen_dargs={
+ 'sigs_data': sigs_data,
+ 'privkey_file_name': _PRIVKEY_FILE_NAME,
+ 'do_add_pseudo_operation': not do_forge_pseudo_op})
payload_checker.payload_type = checker._TYPE_FULL
report = checker._PayloadReport()
@@ -1004,7 +1015,7 @@
def DoRunTest(self, fail_wrong_payload_type, fail_invalid_block_size,
fail_mismatched_block_size, fail_excess_data):
# Generate a test payload. For this test, we generate a full update that
- # has samle kernel and rootfs operations. Since most testing is done with
+ # has sample kernel and rootfs operations. Since most testing is done with
# internal PayloadChecker methods that are tested elsewhere, here we only
# tamper with what's actually being manipulated and/or tested in the Run()
# method itself. Note that the checker doesn't verify partition hashes, so
@@ -1028,30 +1039,36 @@
data_blob=os.urandom(kernel_part_size))
# Generate payload (complete w/ signature) and create the test object.
- payload_checker = _GetPayloadChecker(
- payload_gen.WriteToFileWithData,
- privkey_file_name=_PRIVKEY_FILE_NAME,
- do_add_pseudo_operation=True, is_pseudo_in_kernel=True,
- padding=os.urandom(1024) if fail_excess_data else None)
-
if fail_invalid_block_size:
use_block_size = block_size + 5 # not a power of two
elif fail_mismatched_block_size:
use_block_size = block_size * 2 # different that payload stated
else:
use_block_size = block_size
- dargs = {
- 'pubkey_file_name': _PUBKEY_FILE_NAME,
- 'assert_type': 'delta' if fail_wrong_payload_type else 'full',
- 'block_size': use_block_size}
- should_fail = (fail_wrong_payload_type or fail_invalid_block_size or
- fail_mismatched_block_size or fail_excess_data)
- if should_fail:
- self.assertRaises(update_payload.PayloadError,
- payload_checker.Run, **dargs)
+ dargs = {
+ 'payload_gen_dargs': {
+ 'privkey_file_name': _PRIVKEY_FILE_NAME,
+ 'do_add_pseudo_operation': True,
+ 'is_pseudo_in_kernel': True,
+ 'padding': os.urandom(1024) if fail_excess_data else None},
+ 'checker_init_dargs': {
+ 'assert_type': 'delta' if fail_wrong_payload_type else 'full',
+ 'block_size': use_block_size}}
+ if fail_invalid_block_size:
+ self.assertRaises(update_payload.PayloadError, _GetPayloadChecker,
+ payload_gen.WriteToFileWithData, **dargs)
else:
- self.assertIsNone(payload_checker.Run(**dargs))
+ payload_checker = _GetPayloadChecker(payload_gen.WriteToFileWithData,
+ **dargs)
+ dargs = {'pubkey_file_name': _PUBKEY_FILE_NAME}
+ should_fail = (fail_wrong_payload_type or fail_mismatched_block_size or
+ fail_excess_data)
+ if should_fail:
+ self.assertRaises(update_payload.PayloadError,
+ payload_checker.Run, **dargs)
+ else:
+ self.assertIsNone(payload_checker.Run(**dargs))
# This implements a generic API, hence the occasional unused args.
diff --git a/scripts/update_payload/payload.py b/scripts/update_payload/payload.py
index dbb385a..b4760b2 100644
--- a/scripts/update_payload/payload.py
+++ b/scripts/update_payload/payload.py
@@ -175,7 +175,7 @@
def Check(self, pubkey_file_name=None, metadata_sig_file=None,
report_out_file=None, assert_type=None, block_size=0,
- allow_unhashed=False):
+ allow_unhashed=False, disabled_tests=()):
"""Checks the payload integrity.
Args:
@@ -185,6 +185,7 @@
assert_type: assert that payload is either 'full' or 'delta'
block_size: expected filesystem / payload block size
allow_unhashed: allow unhashed operation blobs
+ disabled_tests: list of tests to disable
Raises:
PayloadError if payload verification failed.
@@ -192,11 +193,12 @@
self._AssertInit()
# Create a short-lived payload checker object and run it.
- helper = checker.PayloadChecker(self)
+ helper = checker.PayloadChecker(
+ self, assert_type=assert_type, block_size=block_size,
+ allow_unhashed=allow_unhashed, disabled_tests=disabled_tests)
helper.Run(pubkey_file_name=pubkey_file_name,
metadata_sig_file=metadata_sig_file,
- report_out_file=report_out_file, assert_type=assert_type,
- block_size=block_size, allow_unhashed=allow_unhashed)
+ report_out_file=report_out_file)
def Apply(self, dst_kernel_part, dst_rootfs_part, src_kernel_part=None,
src_rootfs_part=None):