Gilad Arnold | 553b0ec | 2013-01-26 01:00:39 -0800 | [diff] [blame^] | 1 | # Copyright (c) 2013 The Chromium OS Authors. All rights reserved. |
| 2 | # Use of this source code is governed by a BSD-style license that can be |
| 3 | # found in the LICENSE file. |
| 4 | |
| 5 | """Tools for reading, verifying and applying Chrome OS update payloads.""" |
| 6 | |
| 7 | import hashlib |
| 8 | import struct |
| 9 | |
| 10 | import applier |
| 11 | import block_tracer |
| 12 | import checker |
| 13 | import common |
| 14 | from error import PayloadError |
| 15 | import update_metadata_pb2 |
| 16 | |
| 17 | |
| 18 | # |
| 19 | # Helper functions. |
| 20 | # |
| 21 | def _ReadInt(file_obj, size, is_unsigned, hasher=None): |
| 22 | """Read a binary-encoded integer from a file. |
| 23 | |
| 24 | It will do the correct conversion based on the reported size and whether or |
| 25 | not a signed number is expected. Assumes a network (big-endian) byte |
| 26 | ordering. |
| 27 | |
| 28 | Args: |
| 29 | file_obj: a file object |
| 30 | size: the integer size in bytes (2, 4 or 8) |
| 31 | is_unsigned: whether it is signed or not |
| 32 | hasher: an optional hasher to pass the value through |
| 33 | Returns: |
| 34 | An "unpacked" (Python) integer value. |
| 35 | Raises: |
| 36 | PayloadError if an read error occurred. |
| 37 | |
| 38 | """ |
| 39 | # Determine the base conversion format. |
| 40 | if size == 2: |
| 41 | fmt = 'h' |
| 42 | elif size == 4: |
| 43 | fmt = 'i' |
| 44 | elif size == 8: |
| 45 | fmt = 'q' |
| 46 | else: |
| 47 | raise PayloadError('unsupport numeric field size (%s)' % size) |
| 48 | |
| 49 | # Signed or unsigned? |
| 50 | if is_unsigned: |
| 51 | fmt = fmt.upper() |
| 52 | |
| 53 | # Our numeric values are in network byte order (big-endian). |
| 54 | fmt = '!' + fmt |
| 55 | |
| 56 | return struct.unpack(fmt, common.Read(file_obj, size, hasher=hasher))[0] |
| 57 | |
| 58 | |
| 59 | # |
| 60 | # Update payload. |
| 61 | # |
| 62 | class Payload(object): |
| 63 | """Chrome OS update payload processor.""" |
| 64 | |
| 65 | class _PayloadHeader(object): |
| 66 | """Update payload header struct.""" |
| 67 | |
| 68 | def __init__(self, version, manifest_len): |
| 69 | self.version = version |
| 70 | self.manifest_len = manifest_len |
| 71 | |
| 72 | # Header constants; sizes are in bytes. |
| 73 | _MAGIC = 'CrAU' |
| 74 | _VERSION_SIZE = 8 |
| 75 | _MANIFEST_LEN_SIZE = 8 |
| 76 | |
| 77 | def __init__(self, payload_file): |
| 78 | """Initialize the payload object. |
| 79 | |
| 80 | Args: |
| 81 | payload_file: update payload file object open for reading |
| 82 | |
| 83 | """ |
| 84 | self.payload_file = payload_file |
| 85 | self.manifest_hasher = None |
| 86 | self.is_init = False |
| 87 | self.header = None |
| 88 | self.manifest = None |
| 89 | self.data_offset = 0 |
| 90 | |
| 91 | def _ReadHeader(self): |
| 92 | """Reads and returns the payload header. |
| 93 | |
| 94 | Returns: |
| 95 | A payload header object. |
| 96 | Raises: |
| 97 | PayloadError if a read error occurred. |
| 98 | |
| 99 | """ |
| 100 | # Verify magic |
| 101 | magic = common.Read(self.payload_file, len(self._MAGIC), |
| 102 | hasher=self.manifest_hasher) |
| 103 | if magic != self._MAGIC: |
| 104 | raise PayloadError('invalid payload magic: %s' % magic) |
| 105 | |
| 106 | return self._PayloadHeader( |
| 107 | _ReadInt(self.payload_file, self._VERSION_SIZE, True, |
| 108 | hasher=self.manifest_hasher), |
| 109 | _ReadInt(self.payload_file, self._MANIFEST_LEN_SIZE, True, |
| 110 | hasher=self.manifest_hasher)) |
| 111 | |
| 112 | def _ReadManifest(self): |
| 113 | """Reads and returns the payload manifest. |
| 114 | |
| 115 | Returns: |
| 116 | A string containing the payload manifest in binary form. |
| 117 | Raises: |
| 118 | PayloadError if a read error occurred. |
| 119 | |
| 120 | """ |
| 121 | if not self.header: |
| 122 | raise PayloadError('payload header not present') |
| 123 | |
| 124 | return common.Read(self.payload_file, self.header.manifest_len, |
| 125 | hasher=self.manifest_hasher) |
| 126 | |
| 127 | def ReadDataBlob(self, offset, length): |
| 128 | """Reads and returns a single data blob from the update payload. |
| 129 | |
| 130 | Args: |
| 131 | offset: offset to the beginning of the blob from the end of the manifest |
| 132 | length: the blob's length |
| 133 | Returns: |
| 134 | A string containing the raw blob data. |
| 135 | Raises: |
| 136 | PayloadError if a read error occurred. |
| 137 | |
| 138 | """ |
| 139 | return common.Read(self.payload_file, length, |
| 140 | offset=self.data_offset + offset) |
| 141 | |
| 142 | def Init(self): |
| 143 | """Initializes the payload object. |
| 144 | |
| 145 | This is a prerequisite for any other public API call. |
| 146 | |
| 147 | Raises: |
| 148 | PayloadError if object already initialized or fails to initialize |
| 149 | correctly. |
| 150 | |
| 151 | """ |
| 152 | if self.is_init: |
| 153 | raise PayloadError('payload object already initialized') |
| 154 | |
| 155 | # Initialize hash context. |
| 156 | # pylint: disable=E1101 |
| 157 | self.manifest_hasher = hashlib.sha256() |
| 158 | |
| 159 | # Read the file header. |
| 160 | self.header = self._ReadHeader() |
| 161 | |
| 162 | # Read the manifest. |
| 163 | manifest_raw = self._ReadManifest() |
| 164 | self.manifest = update_metadata_pb2.DeltaArchiveManifest() |
| 165 | self.manifest.ParseFromString(manifest_raw) |
| 166 | |
| 167 | # Store data offset. |
| 168 | self.data_offset = (len(self._MAGIC) + self._VERSION_SIZE + |
| 169 | self._MANIFEST_LEN_SIZE + self.header.manifest_len) |
| 170 | |
| 171 | self.is_init = True |
| 172 | |
| 173 | def _AssertInit(self): |
| 174 | """Raises an exception if the object was not initialized.""" |
| 175 | if not self.is_init: |
| 176 | raise PayloadError('payload object not initialized') |
| 177 | |
| 178 | def ResetFile(self): |
| 179 | """Resets the offset of the payload file to right past the manifest.""" |
| 180 | self.payload_file.seek(self.data_offset) |
| 181 | |
| 182 | def IsDelta(self): |
| 183 | """Returns True iff the payload appears to be a delta.""" |
| 184 | self._AssertInit() |
| 185 | return (self.manifest.HasField('old_kernel_info') or |
| 186 | self.manifest.HasField('old_rootfs_info')) |
| 187 | |
| 188 | def IsFull(self): |
| 189 | """Returns True iff the payload appears to be a full.""" |
| 190 | return not self.IsDelta() |
| 191 | |
| 192 | def Check(self, pubkey_file_name=None, metadata_sig_file=None, |
| 193 | report_out_file=None, assert_type=None, block_size=0, |
| 194 | allow_unhashed=False): |
| 195 | """Checks the payload integrity. |
| 196 | |
| 197 | Args: |
| 198 | pubkey_file_name: public key used for signature verification |
| 199 | metadata_sig_file: metadata signature, if verification is desired |
| 200 | report_out_file: file object to dump the report to |
| 201 | assert_type: assert that payload is either 'full' or 'delta' |
| 202 | block_size: expected filesystem / payload block size |
| 203 | allow_unhashed: allow unhashed operation blobs |
| 204 | Raises: |
| 205 | PayloadError if payload verification failed. |
| 206 | |
| 207 | """ |
| 208 | self._AssertInit() |
| 209 | |
| 210 | # Create a short-lived payload checker object and run it. |
| 211 | helper = checker.PayloadChecker(self) |
| 212 | helper.Run(pubkey_file_name=pubkey_file_name, |
| 213 | metadata_sig_file=metadata_sig_file, |
| 214 | report_out_file=report_out_file, assert_type=assert_type, |
| 215 | block_size=block_size, allow_unhashed=allow_unhashed) |
| 216 | |
| 217 | def Apply(self, dst_kernel_part, dst_rootfs_part, src_kernel_part=None, |
| 218 | src_rootfs_part=None): |
| 219 | """Applies the update payload. |
| 220 | |
| 221 | Args: |
| 222 | dst_kernel_part: name of dest kernel partition file |
| 223 | dst_rootfs_part: name of dest rootfs partition file |
| 224 | src_kernel_part: name of source kernel partition file (optional) |
| 225 | src_rootfs_part: name of source rootfs partition file (optional) |
| 226 | Raises: |
| 227 | PayloadError if payload application failed. |
| 228 | |
| 229 | """ |
| 230 | self._AssertInit() |
| 231 | |
| 232 | # Create a short-lived payload applier object and run it. |
| 233 | helper = applier.PayloadApplier(self) |
| 234 | helper.Run(dst_kernel_part, dst_rootfs_part, |
| 235 | src_kernel_part=src_kernel_part, |
| 236 | src_rootfs_part=src_rootfs_part) |
| 237 | |
| 238 | def TraceBlock(self, block, skip, trace_out_file, is_kernel): |
| 239 | """Traces the origin(s) of a given dest partition block. |
| 240 | |
| 241 | The tracing tries to find origins transitively, when possible (it currently |
| 242 | only works for move operations, where the mapping of src/dst is |
| 243 | one-to-one). It will dump a list of operations and source blocks |
| 244 | responsible for the data in the given dest block. |
| 245 | |
| 246 | Args: |
| 247 | block: the block number whose origin to trace |
| 248 | skip: the number of first origin mappings to skip |
| 249 | trace_out_file: file object to dump the trace to |
| 250 | is_kernel: trace through kernel (True) or rootfs (False) operations |
| 251 | |
| 252 | """ |
| 253 | self._AssertInit() |
| 254 | |
| 255 | # Create a short-lived payload block tracer object and run it. |
| 256 | helper = block_tracer.PayloadBlockTracer(self) |
| 257 | helper.Run(block, skip, trace_out_file, is_kernel) |