blob: 511ed49746176cdda78e3bcea42c6d675b6c8910 [file] [log] [blame]
Amin Hassanif94b6432018-01-26 17:39:47 -08001#
2# Copyright (C) 2013 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
Gilad Arnold553b0ec2013-01-26 01:00:39 -080016
17"""Applying a Chrome OS update payload.
18
19This module is used internally by the main Payload class for applying an update
20payload. The interface for invoking the applier is as follows:
21
22 applier = PayloadApplier(payload)
23 applier.Run(...)
24
25"""
26
Allie Wood12f59aa2015-04-06 11:05:12 -070027from __future__ import print_function
28
Gilad Arnold553b0ec2013-01-26 01:00:39 -080029import array
30import bz2
31import hashlib
Amin Hassani0de7f782017-12-07 12:13:03 -080032# Not everywhere we can have the lzma library so we ignore it if we didn't have
33# it because it is not going to be used. For example, 'cros flash' uses
34# devserver code which eventually loads this file, but the lzma library is not
35# included in the client test devices, and it is not necessary to do so. But
36# lzma is not used in 'cros flash' so it should be fine. Python 3.x include
37# lzma, but for backward compatibility with Python 2.7, backports-lzma is
38# needed.
39try:
40 import lzma
41except ImportError:
42 try:
43 from backports import lzma
44 except ImportError:
45 pass
Gilad Arnold553b0ec2013-01-26 01:00:39 -080046import os
Gilad Arnold553b0ec2013-01-26 01:00:39 -080047import subprocess
48import sys
49import tempfile
50
Amin Hassanib05a65a2017-12-18 15:15:32 -080051from update_payload import common
52from update_payload.error import PayloadError
Gilad Arnold553b0ec2013-01-26 01:00:39 -080053
54
55#
56# Helper functions.
57#
Gilad Arnold382df5c2013-05-03 12:49:28 -070058def _VerifySha256(file_obj, expected_hash, name, length=-1):
Gilad Arnold553b0ec2013-01-26 01:00:39 -080059 """Verifies the SHA256 hash of a file.
60
61 Args:
62 file_obj: file object to read
63 expected_hash: the hash digest we expect to be getting
64 name: name string of this hash, for error reporting
Gilad Arnold382df5c2013-05-03 12:49:28 -070065 length: precise length of data to verify (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -070066
Gilad Arnold553b0ec2013-01-26 01:00:39 -080067 Raises:
Gilad Arnold382df5c2013-05-03 12:49:28 -070068 PayloadError if computed hash doesn't match expected one, or if fails to
69 read the specified length of data.
Gilad Arnold553b0ec2013-01-26 01:00:39 -080070 """
Gilad Arnold553b0ec2013-01-26 01:00:39 -080071 hasher = hashlib.sha256()
72 block_length = 1024 * 1024
Gilad Arnold382df5c2013-05-03 12:49:28 -070073 max_length = length if length >= 0 else sys.maxint
Gilad Arnold553b0ec2013-01-26 01:00:39 -080074
Gilad Arnold382df5c2013-05-03 12:49:28 -070075 while max_length > 0:
Gilad Arnold553b0ec2013-01-26 01:00:39 -080076 read_length = min(max_length, block_length)
77 data = file_obj.read(read_length)
78 if not data:
79 break
80 max_length -= len(data)
81 hasher.update(data)
82
Gilad Arnold382df5c2013-05-03 12:49:28 -070083 if length >= 0 and max_length > 0:
84 raise PayloadError(
85 'insufficient data (%d instead of %d) when verifying %s' %
86 (length - max_length, length, name))
87
Gilad Arnold553b0ec2013-01-26 01:00:39 -080088 actual_hash = hasher.digest()
89 if actual_hash != expected_hash:
90 raise PayloadError('%s hash (%s) not as expected (%s)' %
Gilad Arnold96405372013-05-04 00:24:58 -070091 (name, common.FormatSha256(actual_hash),
92 common.FormatSha256(expected_hash)))
Gilad Arnold553b0ec2013-01-26 01:00:39 -080093
94
95def _ReadExtents(file_obj, extents, block_size, max_length=-1):
96 """Reads data from file as defined by extent sequence.
97
98 This tries to be efficient by not copying data as it is read in chunks.
99
100 Args:
101 file_obj: file object
102 extents: sequence of block extents (offset and length)
103 block_size: size of each block
104 max_length: maximum length to read (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700105
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800106 Returns:
107 A character array containing the concatenated read data.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800108 """
109 data = array.array('c')
Gilad Arnold272a4992013-05-08 13:12:53 -0700110 if max_length < 0:
111 max_length = sys.maxint
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800112 for ex in extents:
113 if max_length == 0:
114 break
Gilad Arnold272a4992013-05-08 13:12:53 -0700115 read_length = min(max_length, ex.num_blocks * block_size)
Gilad Arnold658185a2013-05-08 17:57:54 -0700116
Amin Hassani55c75412019-10-07 11:20:39 -0700117 file_obj.seek(ex.start_block * block_size)
118 data.fromfile(file_obj, read_length)
Gilad Arnold658185a2013-05-08 17:57:54 -0700119
Gilad Arnold272a4992013-05-08 13:12:53 -0700120 max_length -= read_length
Gilad Arnold658185a2013-05-08 17:57:54 -0700121
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800122 return data
123
124
125def _WriteExtents(file_obj, data, extents, block_size, base_name):
Gilad Arnold272a4992013-05-08 13:12:53 -0700126 """Writes data to file as defined by extent sequence.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800127
128 This tries to be efficient by not copy data as it is written in chunks.
129
130 Args:
131 file_obj: file object
132 data: data to write
133 extents: sequence of block extents (offset and length)
134 block_size: size of each block
Gilad Arnold272a4992013-05-08 13:12:53 -0700135 base_name: name string of extent sequence for error reporting
Allie Wood12f59aa2015-04-06 11:05:12 -0700136
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800137 Raises:
138 PayloadError when things don't add up.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800139 """
140 data_offset = 0
141 data_length = len(data)
142 for ex, ex_name in common.ExtentIter(extents, base_name):
Gilad Arnold272a4992013-05-08 13:12:53 -0700143 if not data_length:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800144 raise PayloadError('%s: more write extents than data' % ex_name)
Gilad Arnold272a4992013-05-08 13:12:53 -0700145 write_length = min(data_length, ex.num_blocks * block_size)
Gilad Arnold658185a2013-05-08 17:57:54 -0700146
Amin Hassani55c75412019-10-07 11:20:39 -0700147 file_obj.seek(ex.start_block * block_size)
148 data_view = buffer(data, data_offset, write_length)
149 file_obj.write(data_view)
Gilad Arnold658185a2013-05-08 17:57:54 -0700150
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800151 data_offset += write_length
Gilad Arnold272a4992013-05-08 13:12:53 -0700152 data_length -= write_length
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800153
Gilad Arnold272a4992013-05-08 13:12:53 -0700154 if data_length:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800155 raise PayloadError('%s: more data than write extents' % base_name)
156
157
Gilad Arnold272a4992013-05-08 13:12:53 -0700158def _ExtentsToBspatchArg(extents, block_size, base_name, data_length=-1):
159 """Translates an extent sequence into a bspatch-compatible string argument.
160
161 Args:
162 extents: sequence of block extents (offset and length)
163 block_size: size of each block
164 base_name: name string of extent sequence for error reporting
165 data_length: the actual total length of the data in bytes (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700166
Gilad Arnold272a4992013-05-08 13:12:53 -0700167 Returns:
168 A tuple consisting of (i) a string of the form
169 "off_1:len_1,...,off_n:len_n", (ii) an offset where zero padding is needed
170 for filling the last extent, (iii) the length of the padding (zero means no
171 padding is needed and the extents cover the full length of data).
Allie Wood12f59aa2015-04-06 11:05:12 -0700172
Gilad Arnold272a4992013-05-08 13:12:53 -0700173 Raises:
174 PayloadError if data_length is too short or too long.
Gilad Arnold272a4992013-05-08 13:12:53 -0700175 """
176 arg = ''
177 pad_off = pad_len = 0
178 if data_length < 0:
179 data_length = sys.maxint
180 for ex, ex_name in common.ExtentIter(extents, base_name):
181 if not data_length:
182 raise PayloadError('%s: more extents than total data length' % ex_name)
Gilad Arnold658185a2013-05-08 17:57:54 -0700183
Amin Hassani55c75412019-10-07 11:20:39 -0700184 start_byte = ex.start_block * block_size
Gilad Arnold272a4992013-05-08 13:12:53 -0700185 num_bytes = ex.num_blocks * block_size
186 if data_length < num_bytes:
Gilad Arnold658185a2013-05-08 17:57:54 -0700187 # We're only padding a real extent.
Amin Hassani55c75412019-10-07 11:20:39 -0700188 pad_off = start_byte + data_length
189 pad_len = num_bytes - data_length
Gilad Arnold272a4992013-05-08 13:12:53 -0700190 num_bytes = data_length
Gilad Arnold658185a2013-05-08 17:57:54 -0700191
Gilad Arnold272a4992013-05-08 13:12:53 -0700192 arg += '%s%d:%d' % (arg and ',', start_byte, num_bytes)
193 data_length -= num_bytes
194
195 if data_length:
196 raise PayloadError('%s: extents not covering full data length' % base_name)
197
198 return arg, pad_off, pad_len
199
200
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800201#
202# Payload application.
203#
204class PayloadApplier(object):
205 """Applying an update payload.
206
207 This is a short-lived object whose purpose is to isolate the logic used for
208 applying an update payload.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800209 """
210
Gilad Arnold21a02502013-08-22 16:59:48 -0700211 def __init__(self, payload, bsdiff_in_place=True, bspatch_path=None,
Amin Hassani5ef5d452017-08-04 13:10:59 -0700212 puffpatch_path=None, truncate_to_expected_size=True):
Gilad Arnold272a4992013-05-08 13:12:53 -0700213 """Initialize the applier.
214
215 Args:
216 payload: the payload object to check
217 bsdiff_in_place: whether to perform BSDIFF operation in-place (optional)
Gilad Arnold21a02502013-08-22 16:59:48 -0700218 bspatch_path: path to the bspatch binary (optional)
Amin Hassani5ef5d452017-08-04 13:10:59 -0700219 puffpatch_path: path to the puffpatch binary (optional)
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700220 truncate_to_expected_size: whether to truncate the resulting partitions
221 to their expected sizes, as specified in the
222 payload (optional)
Gilad Arnold272a4992013-05-08 13:12:53 -0700223 """
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800224 assert payload.is_init, 'uninitialized update payload'
225 self.payload = payload
226 self.block_size = payload.manifest.block_size
Allie Wood12f59aa2015-04-06 11:05:12 -0700227 self.minor_version = payload.manifest.minor_version
Gilad Arnold272a4992013-05-08 13:12:53 -0700228 self.bsdiff_in_place = bsdiff_in_place
Gilad Arnold21a02502013-08-22 16:59:48 -0700229 self.bspatch_path = bspatch_path or 'bspatch'
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700230 self.puffpatch_path = puffpatch_path or 'puffin'
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700231 self.truncate_to_expected_size = truncate_to_expected_size
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800232
233 def _ApplyReplaceOperation(self, op, op_name, out_data, part_file, part_size):
Amin Hassani0de7f782017-12-07 12:13:03 -0800234 """Applies a REPLACE{,_BZ,_XZ} operation.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800235
236 Args:
237 op: the operation object
238 op_name: name string for error reporting
239 out_data: the data to be written
240 part_file: the partition file object
241 part_size: the size of the partition
Allie Wood12f59aa2015-04-06 11:05:12 -0700242
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800243 Raises:
244 PayloadError if something goes wrong.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800245 """
246 block_size = self.block_size
247 data_length = len(out_data)
248
249 # Decompress data if needed.
250 if op.type == common.OpType.REPLACE_BZ:
251 out_data = bz2.decompress(out_data)
252 data_length = len(out_data)
Amin Hassani0de7f782017-12-07 12:13:03 -0800253 elif op.type == common.OpType.REPLACE_XZ:
254 # pylint: disable=no-member
255 out_data = lzma.decompress(out_data)
256 data_length = len(out_data)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800257
258 # Write data to blocks specified in dst extents.
259 data_start = 0
260 for ex, ex_name in common.ExtentIter(op.dst_extents,
261 '%s.dst_extents' % op_name):
262 start_block = ex.start_block
263 num_blocks = ex.num_blocks
264 count = num_blocks * block_size
265
Amin Hassani55c75412019-10-07 11:20:39 -0700266 data_end = data_start + count
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800267
Amin Hassani55c75412019-10-07 11:20:39 -0700268 # Make sure we're not running past partition boundary.
269 if (start_block + num_blocks) * block_size > part_size:
270 raise PayloadError(
271 '%s: extent (%s) exceeds partition size (%d)' %
272 (ex_name, common.FormatExtent(ex, block_size),
273 part_size))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800274
Amin Hassani55c75412019-10-07 11:20:39 -0700275 # Make sure that we have enough data to write.
276 if data_end >= data_length + block_size:
277 raise PayloadError(
278 '%s: more dst blocks than data (even with padding)')
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800279
Amin Hassani55c75412019-10-07 11:20:39 -0700280 # Pad with zeros if necessary.
281 if data_end > data_length:
282 padding = data_end - data_length
283 out_data += '\0' * padding
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800284
Amin Hassani55c75412019-10-07 11:20:39 -0700285 self.payload.payload_file.seek(start_block * block_size)
286 part_file.seek(start_block * block_size)
287 part_file.write(out_data[data_start:data_end])
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800288
289 data_start += count
290
291 # Make sure we wrote all data.
292 if data_start < data_length:
293 raise PayloadError('%s: wrote fewer bytes (%d) than expected (%d)' %
294 (op_name, data_start, data_length))
295
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700296 def _ApplyZeroOperation(self, op, op_name, part_file):
297 """Applies a ZERO operation.
298
299 Args:
300 op: the operation object
301 op_name: name string for error reporting
302 part_file: the partition file object
303
304 Raises:
305 PayloadError if something goes wrong.
306 """
307 block_size = self.block_size
308 base_name = '%s.dst_extents' % op_name
309
310 # Iterate over the extents and write zero.
Amin Hassanib05a65a2017-12-18 15:15:32 -0800311 # pylint: disable=unused-variable
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700312 for ex, ex_name in common.ExtentIter(op.dst_extents, base_name):
Amin Hassani55c75412019-10-07 11:20:39 -0700313 part_file.seek(ex.start_block * block_size)
314 part_file.write('\0' * (ex.num_blocks * block_size))
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700315
Allie Wood12f59aa2015-04-06 11:05:12 -0700316 def _ApplySourceCopyOperation(self, op, op_name, old_part_file,
317 new_part_file):
318 """Applies a SOURCE_COPY operation.
319
320 Args:
321 op: the operation object
322 op_name: name string for error reporting
323 old_part_file: the old partition file object
324 new_part_file: the new partition file object
325
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800326 Raises:
327 PayloadError if something goes wrong.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800328 """
Allie Wood12f59aa2015-04-06 11:05:12 -0700329 if not old_part_file:
330 raise PayloadError(
331 '%s: no source partition file provided for operation type (%d)' %
332 (op_name, op.type))
333
334 block_size = self.block_size
335
336 # Gather input raw data from src extents.
337 in_data = _ReadExtents(old_part_file, op.src_extents, block_size)
338
339 # Dump extracted data to dst extents.
340 _WriteExtents(new_part_file, in_data, op.dst_extents, block_size,
341 '%s.dst_extents' % op_name)
342
Amin Hassaniefa62d92017-11-09 13:46:56 -0800343 def _BytesInExtents(self, extents, base_name):
344 """Counts the length of extents in bytes.
345
346 Args:
347 extents: The list of Extents.
348 base_name: For error reporting.
349
350 Returns:
351 The number of bytes in extents.
352 """
353
354 length = 0
Amin Hassanib05a65a2017-12-18 15:15:32 -0800355 # pylint: disable=unused-variable
Amin Hassaniefa62d92017-11-09 13:46:56 -0800356 for ex, ex_name in common.ExtentIter(extents, base_name):
357 length += ex.num_blocks * self.block_size
358 return length
359
Sen Jiang92161a72016-06-28 16:09:38 -0700360 def _ApplyDiffOperation(self, op, op_name, patch_data, old_part_file,
361 new_part_file):
Amin Hassaniefa62d92017-11-09 13:46:56 -0800362 """Applies a SOURCE_BSDIFF, BROTLI_BSDIFF or PUFFDIFF operation.
Allie Wood12f59aa2015-04-06 11:05:12 -0700363
364 Args:
365 op: the operation object
366 op_name: name string for error reporting
367 patch_data: the binary patch content
368 old_part_file: the source partition file object
369 new_part_file: the target partition file object
370
371 Raises:
372 PayloadError if something goes wrong.
Allie Wood12f59aa2015-04-06 11:05:12 -0700373 """
374 if not old_part_file:
375 raise PayloadError(
376 '%s: no source partition file provided for operation type (%d)' %
377 (op_name, op.type))
378
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800379 block_size = self.block_size
380
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800381 # Dump patch data to file.
382 with tempfile.NamedTemporaryFile(delete=False) as patch_file:
383 patch_file_name = patch_file.name
384 patch_file.write(patch_data)
385
Allie Wood12f59aa2015-04-06 11:05:12 -0700386 if (hasattr(new_part_file, 'fileno') and
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700387 ((not old_part_file) or hasattr(old_part_file, 'fileno'))):
Gilad Arnold272a4992013-05-08 13:12:53 -0700388 # Construct input and output extents argument for bspatch.
Amin Hassaniefa62d92017-11-09 13:46:56 -0800389
Gilad Arnold272a4992013-05-08 13:12:53 -0700390 in_extents_arg, _, _ = _ExtentsToBspatchArg(
391 op.src_extents, block_size, '%s.src_extents' % op_name,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800392 data_length=op.src_length if op.src_length else
393 self._BytesInExtents(op.src_extents, "%s.src_extents"))
Gilad Arnold272a4992013-05-08 13:12:53 -0700394 out_extents_arg, pad_off, pad_len = _ExtentsToBspatchArg(
395 op.dst_extents, block_size, '%s.dst_extents' % op_name,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800396 data_length=op.dst_length if op.dst_length else
397 self._BytesInExtents(op.dst_extents, "%s.dst_extents"))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800398
Allie Wood12f59aa2015-04-06 11:05:12 -0700399 new_file_name = '/dev/fd/%d' % new_part_file.fileno()
400 # Diff from source partition.
401 old_file_name = '/dev/fd/%d' % old_part_file.fileno()
402
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700403 if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF):
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700404 # Invoke bspatch on partition file with extents args.
405 bspatch_cmd = [self.bspatch_path, old_file_name, new_file_name,
406 patch_file_name, in_extents_arg, out_extents_arg]
407 subprocess.check_call(bspatch_cmd)
408 elif op.type == common.OpType.PUFFDIFF:
409 # Invoke puffpatch on partition file with extents args.
410 puffpatch_cmd = [self.puffpatch_path,
411 "--operation=puffpatch",
412 "--src_file=%s" % old_file_name,
413 "--dst_file=%s" % new_file_name,
414 "--patch_file=%s" % patch_file_name,
415 "--src_extents=%s" % in_extents_arg,
416 "--dst_extents=%s" % out_extents_arg]
417 subprocess.check_call(puffpatch_cmd)
418 else:
419 raise PayloadError("Unknown operation %s", op.type)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800420
Gilad Arnold272a4992013-05-08 13:12:53 -0700421 # Pad with zeros past the total output length.
422 if pad_len:
Allie Wood12f59aa2015-04-06 11:05:12 -0700423 new_part_file.seek(pad_off)
424 new_part_file.write('\0' * pad_len)
Gilad Arnold272a4992013-05-08 13:12:53 -0700425 else:
426 # Gather input raw data and write to a temp file.
Allie Wood12f59aa2015-04-06 11:05:12 -0700427 input_part_file = old_part_file if old_part_file else new_part_file
428 in_data = _ReadExtents(input_part_file, op.src_extents, block_size,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800429 max_length=op.src_length if op.src_length else
430 self._BytesInExtents(op.src_extents,
431 "%s.src_extents"))
Gilad Arnold272a4992013-05-08 13:12:53 -0700432 with tempfile.NamedTemporaryFile(delete=False) as in_file:
433 in_file_name = in_file.name
434 in_file.write(in_data)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800435
Allie Wood12f59aa2015-04-06 11:05:12 -0700436 # Allocate temporary output file.
Gilad Arnold272a4992013-05-08 13:12:53 -0700437 with tempfile.NamedTemporaryFile(delete=False) as out_file:
438 out_file_name = out_file.name
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800439
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700440 if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF):
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700441 # Invoke bspatch.
442 bspatch_cmd = [self.bspatch_path, in_file_name, out_file_name,
443 patch_file_name]
444 subprocess.check_call(bspatch_cmd)
445 elif op.type == common.OpType.PUFFDIFF:
446 # Invoke puffpatch.
447 puffpatch_cmd = [self.puffpatch_path,
448 "--operation=puffpatch",
449 "--src_file=%s" % in_file_name,
450 "--dst_file=%s" % out_file_name,
451 "--patch_file=%s" % patch_file_name]
452 subprocess.check_call(puffpatch_cmd)
453 else:
454 raise PayloadError("Unknown operation %s", op.type)
Gilad Arnold272a4992013-05-08 13:12:53 -0700455
456 # Read output.
457 with open(out_file_name, 'rb') as out_file:
458 out_data = out_file.read()
459 if len(out_data) != op.dst_length:
460 raise PayloadError(
461 '%s: actual patched data length (%d) not as expected (%d)' %
462 (op_name, len(out_data), op.dst_length))
463
464 # Write output back to partition, with padding.
465 unaligned_out_len = len(out_data) % block_size
466 if unaligned_out_len:
467 out_data += '\0' * (block_size - unaligned_out_len)
Allie Wood12f59aa2015-04-06 11:05:12 -0700468 _WriteExtents(new_part_file, out_data, op.dst_extents, block_size,
Gilad Arnold272a4992013-05-08 13:12:53 -0700469 '%s.dst_extents' % op_name)
470
471 # Delete input/output files.
472 os.remove(in_file_name)
473 os.remove(out_file_name)
474
475 # Delete patch file.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800476 os.remove(patch_file_name)
477
Allie Wood12f59aa2015-04-06 11:05:12 -0700478 def _ApplyOperations(self, operations, base_name, old_part_file,
479 new_part_file, part_size):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800480 """Applies a sequence of update operations to a partition.
481
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800482 Args:
483 operations: the sequence of operations
484 base_name: the name of the operation sequence
Allie Wood12f59aa2015-04-06 11:05:12 -0700485 old_part_file: the old partition file object, open for reading/writing
486 new_part_file: the new partition file object, open for reading/writing
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800487 part_size: the partition size
Allie Wood12f59aa2015-04-06 11:05:12 -0700488
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800489 Raises:
490 PayloadError if anything goes wrong while processing the payload.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800491 """
492 for op, op_name in common.OperationIter(operations, base_name):
493 # Read data blob.
494 data = self.payload.ReadDataBlob(op.data_offset, op.data_length)
495
Amin Hassani0de7f782017-12-07 12:13:03 -0800496 if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ,
497 common.OpType.REPLACE_XZ):
Allie Wood12f59aa2015-04-06 11:05:12 -0700498 self._ApplyReplaceOperation(op, op_name, data, new_part_file, part_size)
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700499 elif op.type == common.OpType.ZERO:
500 self._ApplyZeroOperation(op, op_name, new_part_file)
Allie Wood12f59aa2015-04-06 11:05:12 -0700501 elif op.type == common.OpType.SOURCE_COPY:
502 self._ApplySourceCopyOperation(op, op_name, old_part_file,
503 new_part_file)
Amin Hassaniefa62d92017-11-09 13:46:56 -0800504 elif op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.PUFFDIFF,
505 common.OpType.BROTLI_BSDIFF):
Sen Jiang92161a72016-06-28 16:09:38 -0700506 self._ApplyDiffOperation(op, op_name, data, old_part_file,
507 new_part_file)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800508 else:
509 raise PayloadError('%s: unknown operation type (%d)' %
510 (op_name, op.type))
511
512 def _ApplyToPartition(self, operations, part_name, base_name,
Gilad Arnold16416602013-05-04 21:40:39 -0700513 new_part_file_name, new_part_info,
514 old_part_file_name=None, old_part_info=None):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800515 """Applies an update to a partition.
516
517 Args:
518 operations: the sequence of update operations to apply
519 part_name: the name of the partition, for error reporting
520 base_name: the name of the operation sequence
Gilad Arnold16416602013-05-04 21:40:39 -0700521 new_part_file_name: file name to write partition data to
522 new_part_info: size and expected hash of dest partition
523 old_part_file_name: file name of source partition (optional)
524 old_part_info: size and expected hash of source partition (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700525
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800526 Raises:
527 PayloadError if anything goes wrong with the update.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800528 """
529 # Do we have a source partition?
Gilad Arnold16416602013-05-04 21:40:39 -0700530 if old_part_file_name:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800531 # Verify the source partition.
Gilad Arnold16416602013-05-04 21:40:39 -0700532 with open(old_part_file_name, 'rb') as old_part_file:
Gilad Arnold4b8f4c22015-07-16 11:45:39 -0700533 _VerifySha256(old_part_file, old_part_info.hash,
534 'old ' + part_name, length=old_part_info.size)
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700535 new_part_file_mode = 'r+b'
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700536 open(new_part_file_name, 'w').close()
537
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800538 else:
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700539 # We need to create/truncate the dst partition file.
540 new_part_file_mode = 'w+b'
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800541
542 # Apply operations.
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700543 with open(new_part_file_name, new_part_file_mode) as new_part_file:
Allie Wood12f59aa2015-04-06 11:05:12 -0700544 old_part_file = (open(old_part_file_name, 'r+b')
545 if old_part_file_name else None)
546 try:
547 self._ApplyOperations(operations, base_name, old_part_file,
548 new_part_file, new_part_info.size)
549 finally:
550 if old_part_file:
551 old_part_file.close()
552
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700553 # Truncate the result, if so instructed.
554 if self.truncate_to_expected_size:
555 new_part_file.seek(0, 2)
556 if new_part_file.tell() > new_part_info.size:
557 new_part_file.seek(new_part_info.size)
558 new_part_file.truncate()
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800559
560 # Verify the resulting partition.
Gilad Arnold16416602013-05-04 21:40:39 -0700561 with open(new_part_file_name, 'rb') as new_part_file:
Gilad Arnold4b8f4c22015-07-16 11:45:39 -0700562 _VerifySha256(new_part_file, new_part_info.hash,
563 'new ' + part_name, length=new_part_info.size)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800564
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700565 def Run(self, new_parts, old_parts=None):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800566 """Applier entry point, invoking all update operations.
567
568 Args:
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700569 new_parts: map of partition name to dest partition file
570 old_parts: map of partition name to source partition file (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700571
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800572 Raises:
573 PayloadError if payload application failed.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800574 """
Tudor Brindusacd20392018-06-19 11:46:16 -0700575 if old_parts is None:
576 old_parts = {}
577
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800578 self.payload.ResetFile()
579
Tudor Brindusacd20392018-06-19 11:46:16 -0700580 new_part_info = {}
581 old_part_info = {}
582 install_operations = []
583
584 manifest = self.payload.manifest
Amin Hassani55c75412019-10-07 11:20:39 -0700585 for part in manifest.partitions:
586 name = part.partition_name
587 new_part_info[name] = part.new_partition_info
588 old_part_info[name] = part.old_partition_info
589 install_operations.append((name, part.operations))
Tudor Brindusacd20392018-06-19 11:46:16 -0700590
591 part_names = set(new_part_info.keys()) # Equivalently, old_part_info.keys()
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700592
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800593 # Make sure the arguments are sane and match the payload.
Tudor Brindusacd20392018-06-19 11:46:16 -0700594 new_part_names = set(new_parts.keys())
595 if new_part_names != part_names:
596 raise PayloadError('missing dst partition(s) %s' %
597 ', '.join(part_names - new_part_names))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800598
Tudor Brindusacd20392018-06-19 11:46:16 -0700599 old_part_names = set(old_parts.keys())
600 if part_names - old_part_names:
601 if self.payload.IsDelta():
602 raise PayloadError('trying to apply a delta update without src '
603 'partition(s) %s' %
604 ', '.join(part_names - old_part_names))
605 elif old_part_names == part_names:
606 if self.payload.IsFull():
607 raise PayloadError('trying to apply a full update onto src partitions')
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800608 else:
609 raise PayloadError('not all src partitions provided')
610
Tudor Brindusacd20392018-06-19 11:46:16 -0700611 for name, operations in install_operations:
612 # Apply update to partition.
613 self._ApplyToPartition(
614 operations, name, '%s_install_operations' % name, new_parts[name],
615 new_part_info[name], old_parts.get(name, None), old_part_info[name])