blob: 7a5ccd309152eb2b2101b00113c7f08266b897e5 [file] [log] [blame]
Kelvin Zhangcff4d762020-07-29 16:37:51 -04001#
2# Copyright (C) 2020 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
16
17import copy
Kelvin Zhang513b86e2023-10-27 13:27:07 -070018import os
Kelvin Zhangcff4d762020-07-29 16:37:51 -040019import zipfile
20
21import common
22import test_utils
Kelvin Zhang513b86e2023-10-27 13:27:07 -070023import validate_target_files
Kelvin Zhangcff4d762020-07-29 16:37:51 -040024
Kelvin Zhang513b86e2023-10-27 13:27:07 -070025from images import EmptyImage, DataImage
26from non_ab_ota import NonAbOtaPropertyFiles, WriteFingerprintAssertion, BlockDifference, DynamicPartitionsDifference, MakeRecoveryPatch
Kelvin Zhangcff4d762020-07-29 16:37:51 -040027from test_utils import PropertyFilesTestCase
28
29
30class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
31 """Additional validity checks specialized for NonAbOtaPropertyFiles."""
Kelvin Zhang513b86e2023-10-27 13:27:07 -070032
Kelvin Zhangcff4d762020-07-29 16:37:51 -040033 def setUp(self):
Kelvin Zhang513b86e2023-10-27 13:27:07 -070034 common.OPTIONS.no_signing = False
35
Kelvin Zhangcff4d762020-07-29 16:37:51 -040036 def test_init(self):
37 property_files = NonAbOtaPropertyFiles()
38 self.assertEqual('ota-property-files', property_files.name)
39 self.assertEqual((), property_files.required)
40 self.assertEqual((), property_files.optional)
41
42 def test_Compute(self):
43 entries = ()
44 zip_file = self.construct_zip_package(entries)
45 property_files = NonAbOtaPropertyFiles()
46 with zipfile.ZipFile(zip_file) as zip_fp:
47 property_files_string = property_files.Compute(zip_fp)
48
49 tokens = self._parse_property_files_string(property_files_string)
Tianjiea2076132020-08-19 17:25:32 -070050 self.assertEqual(2, len(tokens))
Kelvin Zhangcff4d762020-07-29 16:37:51 -040051 self._verify_entries(zip_file, tokens, entries)
52
53 def test_Finalize(self):
54 entries = [
55 'META-INF/com/android/metadata',
Tianjiea2076132020-08-19 17:25:32 -070056 'META-INF/com/android/metadata.pb',
Kelvin Zhangcff4d762020-07-29 16:37:51 -040057 ]
58 zip_file = self.construct_zip_package(entries)
59 property_files = NonAbOtaPropertyFiles()
60 with zipfile.ZipFile(zip_file) as zip_fp:
61 raw_metadata = property_files.GetPropertyFilesString(
62 zip_fp, reserve_space=False)
Kelvin Zhang513b86e2023-10-27 13:27:07 -070063 property_files_string = property_files.Finalize(
64 zip_fp, len(raw_metadata))
Kelvin Zhangcff4d762020-07-29 16:37:51 -040065 tokens = self._parse_property_files_string(property_files_string)
66
Tianjiea2076132020-08-19 17:25:32 -070067 self.assertEqual(2, len(tokens))
Kelvin Zhangcff4d762020-07-29 16:37:51 -040068 # 'META-INF/com/android/metadata' will be key'd as 'metadata'.
69 entries[0] = 'metadata'
Tianjiea2076132020-08-19 17:25:32 -070070 entries[1] = 'metadata.pb'
Kelvin Zhangcff4d762020-07-29 16:37:51 -040071 self._verify_entries(zip_file, tokens, entries)
72
73 def test_Verify(self):
74 entries = (
75 'META-INF/com/android/metadata',
Tianjiea2076132020-08-19 17:25:32 -070076 'META-INF/com/android/metadata.pb',
Kelvin Zhangcff4d762020-07-29 16:37:51 -040077 )
78 zip_file = self.construct_zip_package(entries)
79 property_files = NonAbOtaPropertyFiles()
80 with zipfile.ZipFile(zip_file) as zip_fp:
81 raw_metadata = property_files.GetPropertyFilesString(
82 zip_fp, reserve_space=False)
83
84 property_files.Verify(zip_fp, raw_metadata)
85
Kelvin Zhang513b86e2023-10-27 13:27:07 -070086
Kelvin Zhangcff4d762020-07-29 16:37:51 -040087class NonAbOTATest(test_utils.ReleaseToolsTestCase):
88 TEST_TARGET_INFO_DICT = {
89 'build.prop': common.PartitionBuildProps.FromDictionary(
90 'system', {
91 'ro.product.device': 'product-device',
92 'ro.build.fingerprint': 'build-fingerprint-target',
93 'ro.build.version.incremental': 'build-version-incremental-target',
94 'ro.build.version.sdk': '27',
95 'ro.build.version.security_patch': '2017-12-01',
96 'ro.build.date.utc': '1500000000'}
97 )
98 }
99 TEST_INFO_DICT_USES_OEM_PROPS = {
100 'build.prop': common.PartitionBuildProps.FromDictionary(
101 'system', {
102 'ro.product.name': 'product-name',
103 'ro.build.thumbprint': 'build-thumbprint',
104 'ro.build.bar': 'build-bar'}
105 ),
106 'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
107 'vendor', {
Kelvin Zhang513b86e2023-10-27 13:27:07 -0700108 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
Kelvin Zhangcff4d762020-07-29 16:37:51 -0400109 ),
110 'property1': 'value1',
111 'property2': 4096,
112 'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
113 }
114 TEST_OEM_DICTS = [
115 {
116 'ro.product.brand': 'brand1',
117 'ro.product.device': 'device1',
118 },
119 {
120 'ro.product.brand': 'brand2',
121 'ro.product.device': 'device2',
122 },
123 {
124 'ro.product.brand': 'brand3',
125 'ro.product.device': 'device3',
126 },
127 ]
Kelvin Zhang513b86e2023-10-27 13:27:07 -0700128
Kelvin Zhangcff4d762020-07-29 16:37:51 -0400129 def test_WriteFingerprintAssertion_without_oem_props(self):
130 target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
131 source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
132 source_info_dict['build.prop'].build_props['ro.build.fingerprint'] = (
133 'source-build-fingerprint')
134 source_info = common.BuildInfo(source_info_dict, None)
135
136 script_writer = test_utils.MockScriptWriter()
137 WriteFingerprintAssertion(script_writer, target_info, source_info)
138 self.assertEqual(
139 [('AssertSomeFingerprint', 'source-build-fingerprint',
140 'build-fingerprint-target')],
141 script_writer.lines)
142
143 def test_WriteFingerprintAssertion_with_source_oem_props(self):
144 target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
145 source_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
146 self.TEST_OEM_DICTS)
147
148 script_writer = test_utils.MockScriptWriter()
149 WriteFingerprintAssertion(script_writer, target_info, source_info)
150 self.assertEqual(
151 [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
152 'build-thumbprint')],
153 script_writer.lines)
154
155 def test_WriteFingerprintAssertion_with_target_oem_props(self):
156 target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
157 self.TEST_OEM_DICTS)
158 source_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
159
160 script_writer = test_utils.MockScriptWriter()
161 WriteFingerprintAssertion(script_writer, target_info, source_info)
162 self.assertEqual(
163 [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
164 'build-thumbprint')],
165 script_writer.lines)
166
167 def test_WriteFingerprintAssertion_with_both_oem_props(self):
168 target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
169 self.TEST_OEM_DICTS)
170 source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
171 source_info_dict['build.prop'].build_props['ro.build.thumbprint'] = (
172 'source-build-thumbprint')
173 source_info = common.BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
174
175 script_writer = test_utils.MockScriptWriter()
176 WriteFingerprintAssertion(script_writer, target_info, source_info)
177 self.assertEqual(
178 [('AssertSomeThumbprint', 'build-thumbprint',
179 'source-build-thumbprint')],
180 script_writer.lines)
Kelvin Zhang513b86e2023-10-27 13:27:07 -0700181
182
183KiB = 1024
184MiB = 1024 * KiB
185GiB = 1024 * MiB
186
187
188class MockBlockDifference(object):
189
190 def __init__(self, partition, tgt, src=None):
191 self.partition = partition
192 self.tgt = tgt
193 self.src = src
194
195 def WriteScript(self, script, _, progress=None,
196 write_verify_script=False):
197 if progress:
198 script.AppendExtra("progress({})".format(progress))
199 script.AppendExtra("patch({});".format(self.partition))
200 if write_verify_script:
201 self.WritePostInstallVerifyScript(script)
202
203 def WritePostInstallVerifyScript(self, script):
204 script.AppendExtra("verify({});".format(self.partition))
205
206
207class FakeSparseImage(object):
208
209 def __init__(self, size):
210 self.blocksize = 4096
211 self.total_blocks = size // 4096
212 assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
213
214
215class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
216
217 @staticmethod
218 def get_op_list(output_path):
219 with zipfile.ZipFile(output_path, allowZip64=True) as output_zip:
220 with output_zip.open('dynamic_partitions_op_list') as op_list:
221 return [line.decode().strip() for line in op_list.readlines()
222 if not line.startswith(b'#')]
223
224 def setUp(self):
225 self.script = test_utils.MockScriptWriter()
226 self.output_path = common.MakeTempFile(suffix='.zip')
227
228 def test_full(self):
229 target_info = common.LoadDictionaryFromLines("""
230dynamic_partition_list=system vendor
231super_partition_groups=group_foo
232super_group_foo_group_size={group_size}
233super_group_foo_partition_list=system vendor
234""".format(group_size=4 * GiB).split("\n"))
235 block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
236 MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
237
238 dp_diff = DynamicPartitionsDifference(target_info, block_diffs)
239 with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
240 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
241
242 self.assertEqual(str(self.script).strip(), """
243assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
244patch(system);
245verify(system);
246unmap_partition("system");
247patch(vendor);
248verify(vendor);
249unmap_partition("vendor");
250""".strip())
251
252 lines = self.get_op_list(self.output_path)
253
254 remove_all_groups = lines.index("remove_all_groups")
255 add_group = lines.index("add_group group_foo 4294967296")
256 add_vendor = lines.index("add vendor group_foo")
257 add_system = lines.index("add system group_foo")
258 resize_vendor = lines.index("resize vendor 1073741824")
259 resize_system = lines.index("resize system 3221225472")
260
261 self.assertLess(remove_all_groups, add_group,
262 "Should add groups after removing all groups")
263 self.assertLess(add_group, min(add_vendor, add_system),
264 "Should add partitions after adding group")
265 self.assertLess(add_system, resize_system,
266 "Should resize system after adding it")
267 self.assertLess(add_vendor, resize_vendor,
268 "Should resize vendor after adding it")
269
270 def test_inc_groups(self):
271 source_info = common.LoadDictionaryFromLines("""
272super_partition_groups=group_foo group_bar group_baz
273super_group_foo_group_size={group_foo_size}
274super_group_bar_group_size={group_bar_size}
275""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
276 target_info = common.LoadDictionaryFromLines("""
277super_partition_groups=group_foo group_baz group_qux
278super_group_foo_group_size={group_foo_size}
279super_group_baz_group_size={group_baz_size}
280super_group_qux_group_size={group_qux_size}
281""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
282 group_qux_size=1 * GiB).split("\n"))
283
284 dp_diff = DynamicPartitionsDifference(target_info,
285 block_diffs=[],
286 source_info_dict=source_info)
287 with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
288 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
289
290 lines = self.get_op_list(self.output_path)
291
292 removed = lines.index("remove_group group_bar")
293 shrunk = lines.index("resize_group group_foo 3221225472")
294 grown = lines.index("resize_group group_baz 4294967296")
295 added = lines.index("add_group group_qux 1073741824")
296
297 self.assertLess(max(removed, shrunk),
298 min(grown, added),
299 "ops that remove / shrink partitions must precede ops that "
300 "grow / add partitions")
301
302 def test_incremental(self):
303 source_info = common.LoadDictionaryFromLines("""
304dynamic_partition_list=system vendor product system_ext
305super_partition_groups=group_foo
306super_group_foo_group_size={group_foo_size}
307super_group_foo_partition_list=system vendor product system_ext
308""".format(group_foo_size=4 * GiB).split("\n"))
309 target_info = common.LoadDictionaryFromLines("""
310dynamic_partition_list=system vendor product odm
311super_partition_groups=group_foo group_bar
312super_group_foo_group_size={group_foo_size}
313super_group_foo_partition_list=system vendor odm
314super_group_bar_group_size={group_bar_size}
315super_group_bar_partition_list=product
316""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
317
318 block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
319 src=FakeSparseImage(1024 * MiB)),
320 MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
321 src=FakeSparseImage(1024 * MiB)),
322 MockBlockDifference("product", FakeSparseImage(1024 * MiB),
323 src=FakeSparseImage(1024 * MiB)),
324 MockBlockDifference("system_ext", None,
325 src=FakeSparseImage(1024 * MiB)),
326 MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
327 src=None)]
328
329 dp_diff = DynamicPartitionsDifference(target_info, block_diffs,
330 source_info_dict=source_info)
331 with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
332 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
333
334 metadata_idx = self.script.lines.index(
335 'assert(update_dynamic_partitions(package_extract_file('
336 '"dynamic_partitions_op_list")));')
337 self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
338 self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
339 for p in ("product", "system", "odm"):
340 patch_idx = self.script.lines.index("patch({});".format(p))
341 verify_idx = self.script.lines.index("verify({});".format(p))
342 self.assertLess(metadata_idx, patch_idx,
343 "Should patch {} after updating metadata".format(p))
344 self.assertLess(patch_idx, verify_idx,
345 "Should verify {} after patching".format(p))
346
347 self.assertNotIn("patch(system_ext);", self.script.lines)
348
349 lines = self.get_op_list(self.output_path)
350
351 remove = lines.index("remove system_ext")
352 move_product_out = lines.index("move product default")
353 shrink = lines.index("resize vendor 536870912")
354 shrink_group = lines.index("resize_group group_foo 3221225472")
355 add_group_bar = lines.index("add_group group_bar 1073741824")
356 add_odm = lines.index("add odm group_foo")
357 grow_existing = lines.index("resize system 1610612736")
358 grow_added = lines.index("resize odm 1073741824")
359 move_product_in = lines.index("move product group_bar")
360
361 max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
362 min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
363
364 self.assertLess(max_idx_move_partition_out_foo, shrink_group,
365 "Must shrink group after partitions inside group are shrunk"
366 " / removed")
367
368 self.assertLess(add_group_bar, move_product_in,
369 "Must add partitions to group after group is added")
370
371 self.assertLess(max_idx_move_partition_out_foo,
372 min_idx_move_partition_in_foo,
373 "Must shrink partitions / remove partitions from group"
374 "before adding / moving partitions into group")
375
376 def test_remove_partition(self):
377 source_info = common.LoadDictionaryFromLines("""
378blockimgdiff_versions=3,4
379use_dynamic_partitions=true
380dynamic_partition_list=foo
381super_partition_groups=group_foo
382super_group_foo_group_size={group_foo_size}
383super_group_foo_partition_list=foo
384""".format(group_foo_size=4 * GiB).split("\n"))
385 target_info = common.LoadDictionaryFromLines("""
386blockimgdiff_versions=3,4
387use_dynamic_partitions=true
388super_partition_groups=group_foo
389super_group_foo_group_size={group_foo_size}
390""".format(group_foo_size=4 * GiB).split("\n"))
391
392 common.OPTIONS.info_dict = target_info
393 common.OPTIONS.target_info_dict = target_info
394 common.OPTIONS.source_info_dict = source_info
395 common.OPTIONS.cache_size = 4 * 4096
396
397 block_diffs = [BlockDifference("foo", EmptyImage(),
398 src=DataImage("source", pad=True))]
399
400 dp_diff = DynamicPartitionsDifference(target_info, block_diffs,
401 source_info_dict=source_info)
402 with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
403 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
404
405 self.assertNotIn("block_image_update", str(self.script),
406 "Removed partition should not be patched.")
407
408 lines = self.get_op_list(self.output_path)
409 self.assertEqual(lines, ["remove foo"])
410
411
412
413class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
414 """Checks the format of install-recovery.sh.
415
416 Its format should match between common.py and validate_target_files.py.
417 """
418
419 def setUp(self):
420 self._tempdir = common.MakeTempDir()
421 # Create a fake dict that contains the fstab info for boot&recovery.
422 self._info = {"fstab": {}}
423 fake_fstab = [
424 "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
425 "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
426 self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, fake_fstab)
427 # Construct the gzipped recovery.img and boot.img
428 self.recovery_data = bytearray([
429 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
430 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
431 0x08, 0x00, 0x00, 0x00
432 ])
433 # echo -n "boot" | gzip -f | hd
434 self.boot_data = bytearray([
435 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
436 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
437 ])
438
439 def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
440 loc = os.path.join(self._tempdir, prefix, name)
441 if not os.path.exists(os.path.dirname(loc)):
442 os.makedirs(os.path.dirname(loc))
443 with open(loc, "wb") as f:
444 f.write(data)
445
446 def test_full_recovery(self):
447 recovery_image = common.File("recovery.img", self.recovery_data)
448 boot_image = common.File("boot.img", self.boot_data)
449 self._info["full_recovery_image"] = "true"
450
451 MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
452 recovery_image, boot_image, self._info)
453 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
454 self._info)
455
456 @test_utils.SkipIfExternalToolsUnavailable()
457 def test_recovery_from_boot(self):
458 recovery_image = common.File("recovery.img", self.recovery_data)
459 self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
460 boot_image = common.File("boot.img", self.boot_data)
461 self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
462
463 MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
464 recovery_image, boot_image, self._info)
465 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
466 self._info)
467 # Validate 'recovery-from-boot' with bonus argument.
468 self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
469 MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
470 recovery_image, boot_image, self._info)
471 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
472 self._info)
473