blob: ce1998f6ffe196aa613e03ec2c5e76f5e8f41c88 [file] [log] [blame]
Amin Hassanif94b6432018-01-26 17:39:47 -08001#
2# Copyright (C) 2013 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
Gilad Arnold553b0ec2013-01-26 01:00:39 -080016
17"""Applying a Chrome OS update payload.
18
19This module is used internally by the main Payload class for applying an update
20payload. The interface for invoking the applier is as follows:
21
22 applier = PayloadApplier(payload)
23 applier.Run(...)
24
25"""
26
Andrew Lassalle165843c2019-11-05 13:30:34 -080027from __future__ import absolute_import
Allie Wood12f59aa2015-04-06 11:05:12 -070028from __future__ import print_function
29
Gilad Arnold553b0ec2013-01-26 01:00:39 -080030import array
31import bz2
32import hashlib
Amin Hassani0de7f782017-12-07 12:13:03 -080033# Not everywhere we can have the lzma library so we ignore it if we didn't have
34# it because it is not going to be used. For example, 'cros flash' uses
35# devserver code which eventually loads this file, but the lzma library is not
36# included in the client test devices, and it is not necessary to do so. But
37# lzma is not used in 'cros flash' so it should be fine. Python 3.x include
38# lzma, but for backward compatibility with Python 2.7, backports-lzma is
39# needed.
40try:
41 import lzma
42except ImportError:
43 try:
44 from backports import lzma
45 except ImportError:
46 pass
Gilad Arnold553b0ec2013-01-26 01:00:39 -080047import os
Gilad Arnold553b0ec2013-01-26 01:00:39 -080048import subprocess
49import sys
50import tempfile
51
Amin Hassanib05a65a2017-12-18 15:15:32 -080052from update_payload import common
53from update_payload.error import PayloadError
Gilad Arnold553b0ec2013-01-26 01:00:39 -080054
Gilad Arnold553b0ec2013-01-26 01:00:39 -080055#
56# Helper functions.
57#
Gilad Arnold382df5c2013-05-03 12:49:28 -070058def _VerifySha256(file_obj, expected_hash, name, length=-1):
Gilad Arnold553b0ec2013-01-26 01:00:39 -080059 """Verifies the SHA256 hash of a file.
60
61 Args:
62 file_obj: file object to read
63 expected_hash: the hash digest we expect to be getting
64 name: name string of this hash, for error reporting
Gilad Arnold382df5c2013-05-03 12:49:28 -070065 length: precise length of data to verify (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -070066
Gilad Arnold553b0ec2013-01-26 01:00:39 -080067 Raises:
Gilad Arnold382df5c2013-05-03 12:49:28 -070068 PayloadError if computed hash doesn't match expected one, or if fails to
69 read the specified length of data.
Gilad Arnold553b0ec2013-01-26 01:00:39 -080070 """
Gilad Arnold553b0ec2013-01-26 01:00:39 -080071 hasher = hashlib.sha256()
72 block_length = 1024 * 1024
Andrew Lassalle165843c2019-11-05 13:30:34 -080073 max_length = length if length >= 0 else sys.maxsize
Gilad Arnold553b0ec2013-01-26 01:00:39 -080074
Gilad Arnold382df5c2013-05-03 12:49:28 -070075 while max_length > 0:
Gilad Arnold553b0ec2013-01-26 01:00:39 -080076 read_length = min(max_length, block_length)
77 data = file_obj.read(read_length)
78 if not data:
79 break
80 max_length -= len(data)
81 hasher.update(data)
82
Gilad Arnold382df5c2013-05-03 12:49:28 -070083 if length >= 0 and max_length > 0:
84 raise PayloadError(
85 'insufficient data (%d instead of %d) when verifying %s' %
86 (length - max_length, length, name))
87
Gilad Arnold553b0ec2013-01-26 01:00:39 -080088 actual_hash = hasher.digest()
89 if actual_hash != expected_hash:
90 raise PayloadError('%s hash (%s) not as expected (%s)' %
Gilad Arnold96405372013-05-04 00:24:58 -070091 (name, common.FormatSha256(actual_hash),
92 common.FormatSha256(expected_hash)))
Gilad Arnold553b0ec2013-01-26 01:00:39 -080093
94
95def _ReadExtents(file_obj, extents, block_size, max_length=-1):
96 """Reads data from file as defined by extent sequence.
97
98 This tries to be efficient by not copying data as it is read in chunks.
99
100 Args:
101 file_obj: file object
102 extents: sequence of block extents (offset and length)
103 block_size: size of each block
104 max_length: maximum length to read (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700105
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800106 Returns:
107 A character array containing the concatenated read data.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800108 """
Andrew8a1de4b2019-11-23 20:32:35 -0800109 data = array.array('B')
Gilad Arnold272a4992013-05-08 13:12:53 -0700110 if max_length < 0:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800111 max_length = sys.maxsize
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800112 for ex in extents:
113 if max_length == 0:
114 break
Gilad Arnold272a4992013-05-08 13:12:53 -0700115 read_length = min(max_length, ex.num_blocks * block_size)
Gilad Arnold658185a2013-05-08 17:57:54 -0700116
Amin Hassani55c75412019-10-07 11:20:39 -0700117 file_obj.seek(ex.start_block * block_size)
118 data.fromfile(file_obj, read_length)
Gilad Arnold658185a2013-05-08 17:57:54 -0700119
Gilad Arnold272a4992013-05-08 13:12:53 -0700120 max_length -= read_length
Gilad Arnold658185a2013-05-08 17:57:54 -0700121
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800122 return data
123
124
125def _WriteExtents(file_obj, data, extents, block_size, base_name):
Gilad Arnold272a4992013-05-08 13:12:53 -0700126 """Writes data to file as defined by extent sequence.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800127
128 This tries to be efficient by not copy data as it is written in chunks.
129
130 Args:
131 file_obj: file object
132 data: data to write
133 extents: sequence of block extents (offset and length)
134 block_size: size of each block
Gilad Arnold272a4992013-05-08 13:12:53 -0700135 base_name: name string of extent sequence for error reporting
Allie Wood12f59aa2015-04-06 11:05:12 -0700136
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800137 Raises:
138 PayloadError when things don't add up.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800139 """
140 data_offset = 0
141 data_length = len(data)
142 for ex, ex_name in common.ExtentIter(extents, base_name):
Gilad Arnold272a4992013-05-08 13:12:53 -0700143 if not data_length:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800144 raise PayloadError('%s: more write extents than data' % ex_name)
Gilad Arnold272a4992013-05-08 13:12:53 -0700145 write_length = min(data_length, ex.num_blocks * block_size)
Amin Hassani55c75412019-10-07 11:20:39 -0700146 file_obj.seek(ex.start_block * block_size)
Andrew4b00ae12019-11-25 09:37:27 -0800147 file_obj.write(data[data_offset:(data_offset + write_length)])
Gilad Arnold658185a2013-05-08 17:57:54 -0700148
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800149 data_offset += write_length
Gilad Arnold272a4992013-05-08 13:12:53 -0700150 data_length -= write_length
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800151
Gilad Arnold272a4992013-05-08 13:12:53 -0700152 if data_length:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800153 raise PayloadError('%s: more data than write extents' % base_name)
154
155
Gilad Arnold272a4992013-05-08 13:12:53 -0700156def _ExtentsToBspatchArg(extents, block_size, base_name, data_length=-1):
157 """Translates an extent sequence into a bspatch-compatible string argument.
158
159 Args:
160 extents: sequence of block extents (offset and length)
161 block_size: size of each block
162 base_name: name string of extent sequence for error reporting
163 data_length: the actual total length of the data in bytes (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700164
Gilad Arnold272a4992013-05-08 13:12:53 -0700165 Returns:
166 A tuple consisting of (i) a string of the form
167 "off_1:len_1,...,off_n:len_n", (ii) an offset where zero padding is needed
168 for filling the last extent, (iii) the length of the padding (zero means no
169 padding is needed and the extents cover the full length of data).
Allie Wood12f59aa2015-04-06 11:05:12 -0700170
Gilad Arnold272a4992013-05-08 13:12:53 -0700171 Raises:
172 PayloadError if data_length is too short or too long.
Gilad Arnold272a4992013-05-08 13:12:53 -0700173 """
174 arg = ''
175 pad_off = pad_len = 0
176 if data_length < 0:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800177 data_length = sys.maxsize
Gilad Arnold272a4992013-05-08 13:12:53 -0700178 for ex, ex_name in common.ExtentIter(extents, base_name):
179 if not data_length:
180 raise PayloadError('%s: more extents than total data length' % ex_name)
Gilad Arnold658185a2013-05-08 17:57:54 -0700181
Amin Hassani55c75412019-10-07 11:20:39 -0700182 start_byte = ex.start_block * block_size
Gilad Arnold272a4992013-05-08 13:12:53 -0700183 num_bytes = ex.num_blocks * block_size
184 if data_length < num_bytes:
Gilad Arnold658185a2013-05-08 17:57:54 -0700185 # We're only padding a real extent.
Amin Hassani55c75412019-10-07 11:20:39 -0700186 pad_off = start_byte + data_length
187 pad_len = num_bytes - data_length
Gilad Arnold272a4992013-05-08 13:12:53 -0700188 num_bytes = data_length
Gilad Arnold658185a2013-05-08 17:57:54 -0700189
Gilad Arnold272a4992013-05-08 13:12:53 -0700190 arg += '%s%d:%d' % (arg and ',', start_byte, num_bytes)
191 data_length -= num_bytes
192
193 if data_length:
194 raise PayloadError('%s: extents not covering full data length' % base_name)
195
196 return arg, pad_off, pad_len
197
198
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800199#
200# Payload application.
201#
202class PayloadApplier(object):
203 """Applying an update payload.
204
205 This is a short-lived object whose purpose is to isolate the logic used for
206 applying an update payload.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800207 """
208
Gilad Arnold21a02502013-08-22 16:59:48 -0700209 def __init__(self, payload, bsdiff_in_place=True, bspatch_path=None,
Amin Hassani5ef5d452017-08-04 13:10:59 -0700210 puffpatch_path=None, truncate_to_expected_size=True):
Gilad Arnold272a4992013-05-08 13:12:53 -0700211 """Initialize the applier.
212
213 Args:
214 payload: the payload object to check
215 bsdiff_in_place: whether to perform BSDIFF operation in-place (optional)
Gilad Arnold21a02502013-08-22 16:59:48 -0700216 bspatch_path: path to the bspatch binary (optional)
Amin Hassani5ef5d452017-08-04 13:10:59 -0700217 puffpatch_path: path to the puffpatch binary (optional)
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700218 truncate_to_expected_size: whether to truncate the resulting partitions
219 to their expected sizes, as specified in the
220 payload (optional)
Gilad Arnold272a4992013-05-08 13:12:53 -0700221 """
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800222 assert payload.is_init, 'uninitialized update payload'
223 self.payload = payload
224 self.block_size = payload.manifest.block_size
Allie Wood12f59aa2015-04-06 11:05:12 -0700225 self.minor_version = payload.manifest.minor_version
Gilad Arnold272a4992013-05-08 13:12:53 -0700226 self.bsdiff_in_place = bsdiff_in_place
Gilad Arnold21a02502013-08-22 16:59:48 -0700227 self.bspatch_path = bspatch_path or 'bspatch'
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700228 self.puffpatch_path = puffpatch_path or 'puffin'
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700229 self.truncate_to_expected_size = truncate_to_expected_size
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800230
231 def _ApplyReplaceOperation(self, op, op_name, out_data, part_file, part_size):
Amin Hassani0de7f782017-12-07 12:13:03 -0800232 """Applies a REPLACE{,_BZ,_XZ} operation.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800233
234 Args:
235 op: the operation object
236 op_name: name string for error reporting
237 out_data: the data to be written
238 part_file: the partition file object
239 part_size: the size of the partition
Allie Wood12f59aa2015-04-06 11:05:12 -0700240
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800241 Raises:
242 PayloadError if something goes wrong.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800243 """
244 block_size = self.block_size
245 data_length = len(out_data)
246
247 # Decompress data if needed.
248 if op.type == common.OpType.REPLACE_BZ:
249 out_data = bz2.decompress(out_data)
250 data_length = len(out_data)
Amin Hassani0de7f782017-12-07 12:13:03 -0800251 elif op.type == common.OpType.REPLACE_XZ:
252 # pylint: disable=no-member
253 out_data = lzma.decompress(out_data)
254 data_length = len(out_data)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800255
256 # Write data to blocks specified in dst extents.
257 data_start = 0
258 for ex, ex_name in common.ExtentIter(op.dst_extents,
259 '%s.dst_extents' % op_name):
260 start_block = ex.start_block
261 num_blocks = ex.num_blocks
262 count = num_blocks * block_size
263
Amin Hassani55c75412019-10-07 11:20:39 -0700264 data_end = data_start + count
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800265
Amin Hassani55c75412019-10-07 11:20:39 -0700266 # Make sure we're not running past partition boundary.
267 if (start_block + num_blocks) * block_size > part_size:
268 raise PayloadError(
269 '%s: extent (%s) exceeds partition size (%d)' %
270 (ex_name, common.FormatExtent(ex, block_size),
271 part_size))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800272
Amin Hassani55c75412019-10-07 11:20:39 -0700273 # Make sure that we have enough data to write.
274 if data_end >= data_length + block_size:
275 raise PayloadError(
276 '%s: more dst blocks than data (even with padding)')
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800277
Amin Hassani55c75412019-10-07 11:20:39 -0700278 # Pad with zeros if necessary.
279 if data_end > data_length:
280 padding = data_end - data_length
Andrew4b00ae12019-11-25 09:37:27 -0800281 out_data += b'\0' * padding
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800282
Amin Hassani55c75412019-10-07 11:20:39 -0700283 self.payload.payload_file.seek(start_block * block_size)
284 part_file.seek(start_block * block_size)
285 part_file.write(out_data[data_start:data_end])
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800286
287 data_start += count
288
289 # Make sure we wrote all data.
290 if data_start < data_length:
291 raise PayloadError('%s: wrote fewer bytes (%d) than expected (%d)' %
292 (op_name, data_start, data_length))
293
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700294 def _ApplyZeroOperation(self, op, op_name, part_file):
295 """Applies a ZERO operation.
296
297 Args:
298 op: the operation object
299 op_name: name string for error reporting
300 part_file: the partition file object
301
302 Raises:
303 PayloadError if something goes wrong.
304 """
305 block_size = self.block_size
306 base_name = '%s.dst_extents' % op_name
307
308 # Iterate over the extents and write zero.
Amin Hassanib05a65a2017-12-18 15:15:32 -0800309 # pylint: disable=unused-variable
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700310 for ex, ex_name in common.ExtentIter(op.dst_extents, base_name):
Amin Hassani55c75412019-10-07 11:20:39 -0700311 part_file.seek(ex.start_block * block_size)
Andrew4b00ae12019-11-25 09:37:27 -0800312 part_file.write(b'\0' * (ex.num_blocks * block_size))
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700313
Allie Wood12f59aa2015-04-06 11:05:12 -0700314 def _ApplySourceCopyOperation(self, op, op_name, old_part_file,
315 new_part_file):
316 """Applies a SOURCE_COPY operation.
317
318 Args:
319 op: the operation object
320 op_name: name string for error reporting
321 old_part_file: the old partition file object
322 new_part_file: the new partition file object
323
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800324 Raises:
325 PayloadError if something goes wrong.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800326 """
Allie Wood12f59aa2015-04-06 11:05:12 -0700327 if not old_part_file:
328 raise PayloadError(
329 '%s: no source partition file provided for operation type (%d)' %
330 (op_name, op.type))
331
332 block_size = self.block_size
333
334 # Gather input raw data from src extents.
335 in_data = _ReadExtents(old_part_file, op.src_extents, block_size)
336
337 # Dump extracted data to dst extents.
338 _WriteExtents(new_part_file, in_data, op.dst_extents, block_size,
339 '%s.dst_extents' % op_name)
340
Amin Hassaniefa62d92017-11-09 13:46:56 -0800341 def _BytesInExtents(self, extents, base_name):
342 """Counts the length of extents in bytes.
343
344 Args:
345 extents: The list of Extents.
346 base_name: For error reporting.
347
348 Returns:
349 The number of bytes in extents.
350 """
351
352 length = 0
Amin Hassanib05a65a2017-12-18 15:15:32 -0800353 # pylint: disable=unused-variable
Amin Hassaniefa62d92017-11-09 13:46:56 -0800354 for ex, ex_name in common.ExtentIter(extents, base_name):
355 length += ex.num_blocks * self.block_size
356 return length
357
Sen Jiang92161a72016-06-28 16:09:38 -0700358 def _ApplyDiffOperation(self, op, op_name, patch_data, old_part_file,
359 new_part_file):
Amin Hassaniefa62d92017-11-09 13:46:56 -0800360 """Applies a SOURCE_BSDIFF, BROTLI_BSDIFF or PUFFDIFF operation.
Allie Wood12f59aa2015-04-06 11:05:12 -0700361
362 Args:
363 op: the operation object
364 op_name: name string for error reporting
365 patch_data: the binary patch content
366 old_part_file: the source partition file object
367 new_part_file: the target partition file object
368
369 Raises:
370 PayloadError if something goes wrong.
Allie Wood12f59aa2015-04-06 11:05:12 -0700371 """
372 if not old_part_file:
373 raise PayloadError(
374 '%s: no source partition file provided for operation type (%d)' %
375 (op_name, op.type))
376
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800377 block_size = self.block_size
378
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800379 # Dump patch data to file.
380 with tempfile.NamedTemporaryFile(delete=False) as patch_file:
381 patch_file_name = patch_file.name
382 patch_file.write(patch_data)
383
Allie Wood12f59aa2015-04-06 11:05:12 -0700384 if (hasattr(new_part_file, 'fileno') and
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700385 ((not old_part_file) or hasattr(old_part_file, 'fileno'))):
Gilad Arnold272a4992013-05-08 13:12:53 -0700386 # Construct input and output extents argument for bspatch.
Amin Hassaniefa62d92017-11-09 13:46:56 -0800387
Gilad Arnold272a4992013-05-08 13:12:53 -0700388 in_extents_arg, _, _ = _ExtentsToBspatchArg(
389 op.src_extents, block_size, '%s.src_extents' % op_name,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800390 data_length=op.src_length if op.src_length else
391 self._BytesInExtents(op.src_extents, "%s.src_extents"))
Gilad Arnold272a4992013-05-08 13:12:53 -0700392 out_extents_arg, pad_off, pad_len = _ExtentsToBspatchArg(
393 op.dst_extents, block_size, '%s.dst_extents' % op_name,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800394 data_length=op.dst_length if op.dst_length else
395 self._BytesInExtents(op.dst_extents, "%s.dst_extents"))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800396
Allie Wood12f59aa2015-04-06 11:05:12 -0700397 new_file_name = '/dev/fd/%d' % new_part_file.fileno()
398 # Diff from source partition.
399 old_file_name = '/dev/fd/%d' % old_part_file.fileno()
400
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700401 if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF):
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700402 # Invoke bspatch on partition file with extents args.
403 bspatch_cmd = [self.bspatch_path, old_file_name, new_file_name,
404 patch_file_name, in_extents_arg, out_extents_arg]
405 subprocess.check_call(bspatch_cmd)
406 elif op.type == common.OpType.PUFFDIFF:
407 # Invoke puffpatch on partition file with extents args.
408 puffpatch_cmd = [self.puffpatch_path,
409 "--operation=puffpatch",
410 "--src_file=%s" % old_file_name,
411 "--dst_file=%s" % new_file_name,
412 "--patch_file=%s" % patch_file_name,
413 "--src_extents=%s" % in_extents_arg,
414 "--dst_extents=%s" % out_extents_arg]
415 subprocess.check_call(puffpatch_cmd)
416 else:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800417 raise PayloadError("Unknown operation %s" % op.type)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800418
Gilad Arnold272a4992013-05-08 13:12:53 -0700419 # Pad with zeros past the total output length.
420 if pad_len:
Allie Wood12f59aa2015-04-06 11:05:12 -0700421 new_part_file.seek(pad_off)
Andrew4b00ae12019-11-25 09:37:27 -0800422 new_part_file.write(b'\0' * pad_len)
Gilad Arnold272a4992013-05-08 13:12:53 -0700423 else:
424 # Gather input raw data and write to a temp file.
Allie Wood12f59aa2015-04-06 11:05:12 -0700425 input_part_file = old_part_file if old_part_file else new_part_file
426 in_data = _ReadExtents(input_part_file, op.src_extents, block_size,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800427 max_length=op.src_length if op.src_length else
428 self._BytesInExtents(op.src_extents,
429 "%s.src_extents"))
Gilad Arnold272a4992013-05-08 13:12:53 -0700430 with tempfile.NamedTemporaryFile(delete=False) as in_file:
431 in_file_name = in_file.name
432 in_file.write(in_data)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800433
Allie Wood12f59aa2015-04-06 11:05:12 -0700434 # Allocate temporary output file.
Gilad Arnold272a4992013-05-08 13:12:53 -0700435 with tempfile.NamedTemporaryFile(delete=False) as out_file:
436 out_file_name = out_file.name
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800437
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700438 if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF):
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700439 # Invoke bspatch.
440 bspatch_cmd = [self.bspatch_path, in_file_name, out_file_name,
441 patch_file_name]
442 subprocess.check_call(bspatch_cmd)
443 elif op.type == common.OpType.PUFFDIFF:
444 # Invoke puffpatch.
445 puffpatch_cmd = [self.puffpatch_path,
446 "--operation=puffpatch",
447 "--src_file=%s" % in_file_name,
448 "--dst_file=%s" % out_file_name,
449 "--patch_file=%s" % patch_file_name]
450 subprocess.check_call(puffpatch_cmd)
451 else:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800452 raise PayloadError("Unknown operation %s" % op.type)
Gilad Arnold272a4992013-05-08 13:12:53 -0700453
454 # Read output.
455 with open(out_file_name, 'rb') as out_file:
456 out_data = out_file.read()
457 if len(out_data) != op.dst_length:
458 raise PayloadError(
459 '%s: actual patched data length (%d) not as expected (%d)' %
460 (op_name, len(out_data), op.dst_length))
461
462 # Write output back to partition, with padding.
463 unaligned_out_len = len(out_data) % block_size
464 if unaligned_out_len:
Andrew4b00ae12019-11-25 09:37:27 -0800465 out_data += b'\0' * (block_size - unaligned_out_len)
Allie Wood12f59aa2015-04-06 11:05:12 -0700466 _WriteExtents(new_part_file, out_data, op.dst_extents, block_size,
Gilad Arnold272a4992013-05-08 13:12:53 -0700467 '%s.dst_extents' % op_name)
468
469 # Delete input/output files.
470 os.remove(in_file_name)
471 os.remove(out_file_name)
472
473 # Delete patch file.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800474 os.remove(patch_file_name)
475
Allie Wood12f59aa2015-04-06 11:05:12 -0700476 def _ApplyOperations(self, operations, base_name, old_part_file,
477 new_part_file, part_size):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800478 """Applies a sequence of update operations to a partition.
479
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800480 Args:
481 operations: the sequence of operations
482 base_name: the name of the operation sequence
Allie Wood12f59aa2015-04-06 11:05:12 -0700483 old_part_file: the old partition file object, open for reading/writing
484 new_part_file: the new partition file object, open for reading/writing
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800485 part_size: the partition size
Allie Wood12f59aa2015-04-06 11:05:12 -0700486
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800487 Raises:
488 PayloadError if anything goes wrong while processing the payload.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800489 """
490 for op, op_name in common.OperationIter(operations, base_name):
491 # Read data blob.
492 data = self.payload.ReadDataBlob(op.data_offset, op.data_length)
493
Amin Hassani0de7f782017-12-07 12:13:03 -0800494 if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ,
495 common.OpType.REPLACE_XZ):
Allie Wood12f59aa2015-04-06 11:05:12 -0700496 self._ApplyReplaceOperation(op, op_name, data, new_part_file, part_size)
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700497 elif op.type == common.OpType.ZERO:
498 self._ApplyZeroOperation(op, op_name, new_part_file)
Allie Wood12f59aa2015-04-06 11:05:12 -0700499 elif op.type == common.OpType.SOURCE_COPY:
500 self._ApplySourceCopyOperation(op, op_name, old_part_file,
501 new_part_file)
Amin Hassaniefa62d92017-11-09 13:46:56 -0800502 elif op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.PUFFDIFF,
503 common.OpType.BROTLI_BSDIFF):
Sen Jiang92161a72016-06-28 16:09:38 -0700504 self._ApplyDiffOperation(op, op_name, data, old_part_file,
505 new_part_file)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800506 else:
507 raise PayloadError('%s: unknown operation type (%d)' %
508 (op_name, op.type))
509
510 def _ApplyToPartition(self, operations, part_name, base_name,
Gilad Arnold16416602013-05-04 21:40:39 -0700511 new_part_file_name, new_part_info,
512 old_part_file_name=None, old_part_info=None):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800513 """Applies an update to a partition.
514
515 Args:
516 operations: the sequence of update operations to apply
517 part_name: the name of the partition, for error reporting
518 base_name: the name of the operation sequence
Gilad Arnold16416602013-05-04 21:40:39 -0700519 new_part_file_name: file name to write partition data to
520 new_part_info: size and expected hash of dest partition
521 old_part_file_name: file name of source partition (optional)
522 old_part_info: size and expected hash of source partition (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700523
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800524 Raises:
525 PayloadError if anything goes wrong with the update.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800526 """
527 # Do we have a source partition?
Gilad Arnold16416602013-05-04 21:40:39 -0700528 if old_part_file_name:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800529 # Verify the source partition.
Gilad Arnold16416602013-05-04 21:40:39 -0700530 with open(old_part_file_name, 'rb') as old_part_file:
Gilad Arnold4b8f4c22015-07-16 11:45:39 -0700531 _VerifySha256(old_part_file, old_part_info.hash,
532 'old ' + part_name, length=old_part_info.size)
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700533 new_part_file_mode = 'r+b'
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700534 open(new_part_file_name, 'w').close()
535
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800536 else:
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700537 # We need to create/truncate the dst partition file.
538 new_part_file_mode = 'w+b'
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800539
540 # Apply operations.
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700541 with open(new_part_file_name, new_part_file_mode) as new_part_file:
Allie Wood12f59aa2015-04-06 11:05:12 -0700542 old_part_file = (open(old_part_file_name, 'r+b')
543 if old_part_file_name else None)
544 try:
545 self._ApplyOperations(operations, base_name, old_part_file,
546 new_part_file, new_part_info.size)
547 finally:
548 if old_part_file:
549 old_part_file.close()
550
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700551 # Truncate the result, if so instructed.
552 if self.truncate_to_expected_size:
553 new_part_file.seek(0, 2)
554 if new_part_file.tell() > new_part_info.size:
555 new_part_file.seek(new_part_info.size)
556 new_part_file.truncate()
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800557
558 # Verify the resulting partition.
Gilad Arnold16416602013-05-04 21:40:39 -0700559 with open(new_part_file_name, 'rb') as new_part_file:
Gilad Arnold4b8f4c22015-07-16 11:45:39 -0700560 _VerifySha256(new_part_file, new_part_info.hash,
561 'new ' + part_name, length=new_part_info.size)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800562
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700563 def Run(self, new_parts, old_parts=None):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800564 """Applier entry point, invoking all update operations.
565
566 Args:
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700567 new_parts: map of partition name to dest partition file
568 old_parts: map of partition name to source partition file (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700569
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800570 Raises:
571 PayloadError if payload application failed.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800572 """
Tudor Brindusacd20392018-06-19 11:46:16 -0700573 if old_parts is None:
574 old_parts = {}
575
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800576 self.payload.ResetFile()
577
Tudor Brindusacd20392018-06-19 11:46:16 -0700578 new_part_info = {}
579 old_part_info = {}
580 install_operations = []
581
582 manifest = self.payload.manifest
Amin Hassani55c75412019-10-07 11:20:39 -0700583 for part in manifest.partitions:
584 name = part.partition_name
585 new_part_info[name] = part.new_partition_info
586 old_part_info[name] = part.old_partition_info
587 install_operations.append((name, part.operations))
Tudor Brindusacd20392018-06-19 11:46:16 -0700588
589 part_names = set(new_part_info.keys()) # Equivalently, old_part_info.keys()
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700590
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800591 # Make sure the arguments are sane and match the payload.
Tudor Brindusacd20392018-06-19 11:46:16 -0700592 new_part_names = set(new_parts.keys())
593 if new_part_names != part_names:
594 raise PayloadError('missing dst partition(s) %s' %
595 ', '.join(part_names - new_part_names))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800596
Tudor Brindusacd20392018-06-19 11:46:16 -0700597 old_part_names = set(old_parts.keys())
598 if part_names - old_part_names:
599 if self.payload.IsDelta():
600 raise PayloadError('trying to apply a delta update without src '
601 'partition(s) %s' %
602 ', '.join(part_names - old_part_names))
603 elif old_part_names == part_names:
604 if self.payload.IsFull():
605 raise PayloadError('trying to apply a full update onto src partitions')
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800606 else:
607 raise PayloadError('not all src partitions provided')
608
Tudor Brindusacd20392018-06-19 11:46:16 -0700609 for name, operations in install_operations:
610 # Apply update to partition.
611 self._ApplyToPartition(
612 operations, name, '%s_install_operations' % name, new_parts[name],
613 new_part_info[name], old_parts.get(name, None), old_part_info[name])