| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 1 | # | 
|  | 2 | # Copyright (C) 2015 The Android Open Source Project | 
|  | 3 | # | 
|  | 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 5 | # you may not use this file except in compliance with the License. | 
|  | 6 | # You may obtain a copy of the License at | 
|  | 7 | # | 
|  | 8 | #      http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 9 | # | 
|  | 10 | # Unless required by applicable law or agreed to in writing, software | 
|  | 11 | # distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 13 | # See the License for the specific language governing permissions and | 
|  | 14 | # limitations under the License. | 
|  | 15 | # | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 16 |  | 
| Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 17 | import copy | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 18 | import os | 
| Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 19 | import subprocess | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 20 | import tempfile | 
|  | 21 | import time | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 22 | import zipfile | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 23 | from hashlib import sha1 | 
|  | 24 |  | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 25 | import common | 
| Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 26 | import test_utils | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 27 | import validate_target_files | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 28 | from rangelib import RangeSet | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 29 |  | 
| Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 30 |  | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 31 | KiB = 1024 | 
|  | 32 | MiB = 1024 * KiB | 
|  | 33 | GiB = 1024 * MiB | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 34 |  | 
| Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 35 |  | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 36 | def get_2gb_string(): | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 37 | size = int(2 * GiB + 1) | 
|  | 38 | block_size = 4 * KiB | 
|  | 39 | step_size = 4 * MiB | 
|  | 40 | # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'. | 
|  | 41 | for _ in range(0, size, step_size): | 
|  | 42 | yield os.urandom(block_size) | 
|  | 43 | yield '\0' * (step_size - block_size) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 44 |  | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 45 |  | 
| Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 46 | class CommonZipTest(test_utils.ReleaseToolsTestCase): | 
|  | 47 |  | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 48 | def _verify(self, zip_file, zip_file_name, arcname, expected_hash, | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 49 | test_file_name=None, expected_stat=None, expected_mode=0o644, | 
|  | 50 | expected_compress_type=zipfile.ZIP_STORED): | 
|  | 51 | # Verify the stat if present. | 
|  | 52 | if test_file_name is not None: | 
|  | 53 | new_stat = os.stat(test_file_name) | 
|  | 54 | self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode)) | 
|  | 55 | self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime)) | 
|  | 56 |  | 
|  | 57 | # Reopen the zip file to verify. | 
|  | 58 | zip_file = zipfile.ZipFile(zip_file_name, "r") | 
|  | 59 |  | 
|  | 60 | # Verify the timestamp. | 
|  | 61 | info = zip_file.getinfo(arcname) | 
|  | 62 | self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0)) | 
|  | 63 |  | 
|  | 64 | # Verify the file mode. | 
|  | 65 | mode = (info.external_attr >> 16) & 0o777 | 
|  | 66 | self.assertEqual(mode, expected_mode) | 
|  | 67 |  | 
|  | 68 | # Verify the compress type. | 
|  | 69 | self.assertEqual(info.compress_type, expected_compress_type) | 
|  | 70 |  | 
|  | 71 | # Verify the zip contents. | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 72 | entry = zip_file.open(arcname) | 
|  | 73 | sha1_hash = sha1() | 
|  | 74 | for chunk in iter(lambda: entry.read(4 * MiB), ''): | 
|  | 75 | sha1_hash.update(chunk) | 
|  | 76 | self.assertEqual(expected_hash, sha1_hash.hexdigest()) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 77 | self.assertIsNone(zip_file.testzip()) | 
|  | 78 |  | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 79 | def _test_ZipWrite(self, contents, extra_zipwrite_args=None): | 
|  | 80 | extra_zipwrite_args = dict(extra_zipwrite_args or {}) | 
|  | 81 |  | 
|  | 82 | test_file = tempfile.NamedTemporaryFile(delete=False) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 83 | test_file_name = test_file.name | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 84 |  | 
|  | 85 | zip_file = tempfile.NamedTemporaryFile(delete=False) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 86 | zip_file_name = zip_file.name | 
|  | 87 |  | 
|  | 88 | # File names within an archive strip the leading slash. | 
|  | 89 | arcname = extra_zipwrite_args.get("arcname", test_file_name) | 
|  | 90 | if arcname[0] == "/": | 
|  | 91 | arcname = arcname[1:] | 
|  | 92 |  | 
|  | 93 | zip_file.close() | 
|  | 94 | zip_file = zipfile.ZipFile(zip_file_name, "w") | 
|  | 95 |  | 
|  | 96 | try: | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 97 | sha1_hash = sha1() | 
|  | 98 | for data in contents: | 
|  | 99 | sha1_hash.update(data) | 
|  | 100 | test_file.write(data) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 101 | test_file.close() | 
|  | 102 |  | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 103 | expected_stat = os.stat(test_file_name) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 104 | expected_mode = extra_zipwrite_args.get("perms", 0o644) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 105 | expected_compress_type = extra_zipwrite_args.get("compress_type", | 
|  | 106 | zipfile.ZIP_STORED) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 107 | time.sleep(5)  # Make sure the atime/mtime will change measurably. | 
|  | 108 |  | 
|  | 109 | common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 110 | common.ZipClose(zip_file) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 111 |  | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 112 | self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(), | 
|  | 113 | test_file_name, expected_stat, expected_mode, | 
|  | 114 | expected_compress_type) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 115 | finally: | 
|  | 116 | os.remove(test_file_name) | 
|  | 117 | os.remove(zip_file_name) | 
|  | 118 |  | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 119 | def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None): | 
|  | 120 | extra_args = dict(extra_args or {}) | 
|  | 121 |  | 
|  | 122 | zip_file = tempfile.NamedTemporaryFile(delete=False) | 
|  | 123 | zip_file_name = zip_file.name | 
|  | 124 | zip_file.close() | 
|  | 125 |  | 
|  | 126 | zip_file = zipfile.ZipFile(zip_file_name, "w") | 
|  | 127 |  | 
|  | 128 | try: | 
|  | 129 | expected_compress_type = extra_args.get("compress_type", | 
|  | 130 | zipfile.ZIP_STORED) | 
|  | 131 | time.sleep(5)  # Make sure the atime/mtime will change measurably. | 
|  | 132 |  | 
|  | 133 | if not isinstance(zinfo_or_arcname, zipfile.ZipInfo): | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 134 | arcname = zinfo_or_arcname | 
|  | 135 | expected_mode = extra_args.get("perms", 0o644) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 136 | else: | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 137 | arcname = zinfo_or_arcname.filename | 
|  | 138 | expected_mode = extra_args.get("perms", | 
|  | 139 | zinfo_or_arcname.external_attr >> 16) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 140 |  | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 141 | common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 142 | common.ZipClose(zip_file) | 
|  | 143 |  | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 144 | self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(), | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 145 | expected_mode=expected_mode, | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 146 | expected_compress_type=expected_compress_type) | 
|  | 147 | finally: | 
|  | 148 | os.remove(zip_file_name) | 
|  | 149 |  | 
|  | 150 | def _test_ZipWriteStr_large_file(self, large, small, extra_args=None): | 
|  | 151 | extra_args = dict(extra_args or {}) | 
|  | 152 |  | 
|  | 153 | zip_file = tempfile.NamedTemporaryFile(delete=False) | 
|  | 154 | zip_file_name = zip_file.name | 
|  | 155 |  | 
|  | 156 | test_file = tempfile.NamedTemporaryFile(delete=False) | 
|  | 157 | test_file_name = test_file.name | 
|  | 158 |  | 
|  | 159 | arcname_large = test_file_name | 
|  | 160 | arcname_small = "bar" | 
|  | 161 |  | 
|  | 162 | # File names within an archive strip the leading slash. | 
|  | 163 | if arcname_large[0] == "/": | 
|  | 164 | arcname_large = arcname_large[1:] | 
|  | 165 |  | 
|  | 166 | zip_file.close() | 
|  | 167 | zip_file = zipfile.ZipFile(zip_file_name, "w") | 
|  | 168 |  | 
|  | 169 | try: | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 170 | sha1_hash = sha1() | 
|  | 171 | for data in large: | 
|  | 172 | sha1_hash.update(data) | 
|  | 173 | test_file.write(data) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 174 | test_file.close() | 
|  | 175 |  | 
|  | 176 | expected_stat = os.stat(test_file_name) | 
|  | 177 | expected_mode = 0o644 | 
|  | 178 | expected_compress_type = extra_args.get("compress_type", | 
|  | 179 | zipfile.ZIP_STORED) | 
|  | 180 | time.sleep(5)  # Make sure the atime/mtime will change measurably. | 
|  | 181 |  | 
|  | 182 | common.ZipWrite(zip_file, test_file_name, **extra_args) | 
|  | 183 | common.ZipWriteStr(zip_file, arcname_small, small, **extra_args) | 
|  | 184 | common.ZipClose(zip_file) | 
|  | 185 |  | 
|  | 186 | # Verify the contents written by ZipWrite(). | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 187 | self._verify(zip_file, zip_file_name, arcname_large, | 
|  | 188 | sha1_hash.hexdigest(), test_file_name, expected_stat, | 
|  | 189 | expected_mode, expected_compress_type) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 190 |  | 
|  | 191 | # Verify the contents written by ZipWriteStr(). | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 192 | self._verify(zip_file, zip_file_name, arcname_small, | 
|  | 193 | sha1(small).hexdigest(), | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 194 | expected_compress_type=expected_compress_type) | 
|  | 195 | finally: | 
|  | 196 | os.remove(zip_file_name) | 
|  | 197 | os.remove(test_file_name) | 
|  | 198 |  | 
|  | 199 | def _test_reset_ZIP64_LIMIT(self, func, *args): | 
|  | 200 | default_limit = (1 << 31) - 1 | 
|  | 201 | self.assertEqual(default_limit, zipfile.ZIP64_LIMIT) | 
|  | 202 | func(*args) | 
|  | 203 | self.assertEqual(default_limit, zipfile.ZIP64_LIMIT) | 
|  | 204 |  | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 205 | def test_ZipWrite(self): | 
|  | 206 | file_contents = os.urandom(1024) | 
|  | 207 | self._test_ZipWrite(file_contents) | 
|  | 208 |  | 
|  | 209 | def test_ZipWrite_with_opts(self): | 
|  | 210 | file_contents = os.urandom(1024) | 
|  | 211 | self._test_ZipWrite(file_contents, { | 
|  | 212 | "arcname": "foobar", | 
|  | 213 | "perms": 0o777, | 
|  | 214 | "compress_type": zipfile.ZIP_DEFLATED, | 
|  | 215 | }) | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 216 | self._test_ZipWrite(file_contents, { | 
|  | 217 | "arcname": "foobar", | 
|  | 218 | "perms": 0o700, | 
|  | 219 | "compress_type": zipfile.ZIP_STORED, | 
|  | 220 | }) | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 221 |  | 
|  | 222 | def test_ZipWrite_large_file(self): | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 223 | file_contents = get_2gb_string() | 
| Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 224 | self._test_ZipWrite(file_contents, { | 
|  | 225 | "compress_type": zipfile.ZIP_DEFLATED, | 
|  | 226 | }) | 
|  | 227 |  | 
|  | 228 | def test_ZipWrite_resets_ZIP64_LIMIT(self): | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 229 | self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "") | 
|  | 230 |  | 
|  | 231 | def test_ZipWriteStr(self): | 
|  | 232 | random_string = os.urandom(1024) | 
|  | 233 | # Passing arcname | 
|  | 234 | self._test_ZipWriteStr("foo", random_string) | 
|  | 235 |  | 
|  | 236 | # Passing zinfo | 
|  | 237 | zinfo = zipfile.ZipInfo(filename="foo") | 
|  | 238 | self._test_ZipWriteStr(zinfo, random_string) | 
|  | 239 |  | 
|  | 240 | # Timestamp in the zinfo should be overwritten. | 
|  | 241 | zinfo.date_time = (2015, 3, 1, 15, 30, 0) | 
|  | 242 | self._test_ZipWriteStr(zinfo, random_string) | 
|  | 243 |  | 
|  | 244 | def test_ZipWriteStr_with_opts(self): | 
|  | 245 | random_string = os.urandom(1024) | 
|  | 246 | # Passing arcname | 
|  | 247 | self._test_ZipWriteStr("foo", random_string, { | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 248 | "perms": 0o700, | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 249 | "compress_type": zipfile.ZIP_DEFLATED, | 
|  | 250 | }) | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 251 | self._test_ZipWriteStr("bar", random_string, { | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 252 | "compress_type": zipfile.ZIP_STORED, | 
|  | 253 | }) | 
|  | 254 |  | 
|  | 255 | # Passing zinfo | 
|  | 256 | zinfo = zipfile.ZipInfo(filename="foo") | 
|  | 257 | self._test_ZipWriteStr(zinfo, random_string, { | 
|  | 258 | "compress_type": zipfile.ZIP_DEFLATED, | 
|  | 259 | }) | 
|  | 260 | self._test_ZipWriteStr(zinfo, random_string, { | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 261 | "perms": 0o600, | 
| Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 262 | "compress_type": zipfile.ZIP_STORED, | 
|  | 263 | }) | 
|  | 264 |  | 
|  | 265 | def test_ZipWriteStr_large_file(self): | 
|  | 266 | # zipfile.writestr() doesn't work when the str size is over 2GiB even with | 
|  | 267 | # the workaround. We will only test the case of writing a string into a | 
|  | 268 | # large archive. | 
|  | 269 | long_string = get_2gb_string() | 
|  | 270 | short_string = os.urandom(1024) | 
|  | 271 | self._test_ZipWriteStr_large_file(long_string, short_string, { | 
|  | 272 | "compress_type": zipfile.ZIP_DEFLATED, | 
|  | 273 | }) | 
|  | 274 |  | 
|  | 275 | def test_ZipWriteStr_resets_ZIP64_LIMIT(self): | 
|  | 276 | self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, "foo", "") | 
|  | 277 | zinfo = zipfile.ZipInfo(filename="foo") | 
|  | 278 | self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, "") | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 279 |  | 
|  | 280 | def test_bug21309935(self): | 
|  | 281 | zip_file = tempfile.NamedTemporaryFile(delete=False) | 
|  | 282 | zip_file_name = zip_file.name | 
|  | 283 | zip_file.close() | 
|  | 284 |  | 
|  | 285 | try: | 
|  | 286 | random_string = os.urandom(1024) | 
|  | 287 | zip_file = zipfile.ZipFile(zip_file_name, "w") | 
|  | 288 | # Default perms should be 0o644 when passing the filename. | 
|  | 289 | common.ZipWriteStr(zip_file, "foo", random_string) | 
|  | 290 | # Honor the specified perms. | 
|  | 291 | common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755) | 
|  | 292 | # The perms in zinfo should be untouched. | 
|  | 293 | zinfo = zipfile.ZipInfo(filename="baz") | 
|  | 294 | zinfo.external_attr = 0o740 << 16 | 
|  | 295 | common.ZipWriteStr(zip_file, zinfo, random_string) | 
|  | 296 | # Explicitly specified perms has the priority. | 
|  | 297 | zinfo = zipfile.ZipInfo(filename="qux") | 
|  | 298 | zinfo.external_attr = 0o700 << 16 | 
|  | 299 | common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400) | 
|  | 300 | common.ZipClose(zip_file) | 
|  | 301 |  | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 302 | self._verify(zip_file, zip_file_name, "foo", | 
|  | 303 | sha1(random_string).hexdigest(), | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 304 | expected_mode=0o644) | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 305 | self._verify(zip_file, zip_file_name, "bar", | 
|  | 306 | sha1(random_string).hexdigest(), | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 307 | expected_mode=0o755) | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 308 | self._verify(zip_file, zip_file_name, "baz", | 
|  | 309 | sha1(random_string).hexdigest(), | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 310 | expected_mode=0o740) | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 311 | self._verify(zip_file, zip_file_name, "qux", | 
|  | 312 | sha1(random_string).hexdigest(), | 
| Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 313 | expected_mode=0o400) | 
|  | 314 | finally: | 
|  | 315 | os.remove(zip_file_name) | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 316 |  | 
| Tao Bao | 89d7ab2 | 2017-12-14 17:05:33 -0800 | [diff] [blame] | 317 | def test_ZipDelete(self): | 
|  | 318 | zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip') | 
|  | 319 | output_zip = zipfile.ZipFile(zip_file.name, 'w', | 
|  | 320 | compression=zipfile.ZIP_DEFLATED) | 
|  | 321 | with tempfile.NamedTemporaryFile() as entry_file: | 
|  | 322 | entry_file.write(os.urandom(1024)) | 
|  | 323 | common.ZipWrite(output_zip, entry_file.name, arcname='Test1') | 
|  | 324 | common.ZipWrite(output_zip, entry_file.name, arcname='Test2') | 
|  | 325 | common.ZipWrite(output_zip, entry_file.name, arcname='Test3') | 
|  | 326 | common.ZipClose(output_zip) | 
|  | 327 | zip_file.close() | 
|  | 328 |  | 
|  | 329 | try: | 
|  | 330 | common.ZipDelete(zip_file.name, 'Test2') | 
|  | 331 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: | 
|  | 332 | entries = check_zip.namelist() | 
|  | 333 | self.assertTrue('Test1' in entries) | 
|  | 334 | self.assertFalse('Test2' in entries) | 
|  | 335 | self.assertTrue('Test3' in entries) | 
|  | 336 |  | 
| Tao Bao | 986ee86 | 2018-10-04 15:46:16 -0700 | [diff] [blame] | 337 | self.assertRaises( | 
|  | 338 | common.ExternalError, common.ZipDelete, zip_file.name, 'Test2') | 
| Tao Bao | 89d7ab2 | 2017-12-14 17:05:33 -0800 | [diff] [blame] | 339 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: | 
|  | 340 | entries = check_zip.namelist() | 
|  | 341 | self.assertTrue('Test1' in entries) | 
|  | 342 | self.assertFalse('Test2' in entries) | 
|  | 343 | self.assertTrue('Test3' in entries) | 
|  | 344 |  | 
|  | 345 | common.ZipDelete(zip_file.name, ['Test3']) | 
|  | 346 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: | 
|  | 347 | entries = check_zip.namelist() | 
|  | 348 | self.assertTrue('Test1' in entries) | 
|  | 349 | self.assertFalse('Test2' in entries) | 
|  | 350 | self.assertFalse('Test3' in entries) | 
|  | 351 |  | 
|  | 352 | common.ZipDelete(zip_file.name, ['Test1', 'Test2']) | 
|  | 353 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: | 
|  | 354 | entries = check_zip.namelist() | 
|  | 355 | self.assertFalse('Test1' in entries) | 
|  | 356 | self.assertFalse('Test2' in entries) | 
|  | 357 | self.assertFalse('Test3' in entries) | 
|  | 358 | finally: | 
|  | 359 | os.remove(zip_file.name) | 
|  | 360 |  | 
|  | 361 |  | 
| Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 362 | class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase): | 
| Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 363 | """Tests the APK utils related functions.""" | 
|  | 364 |  | 
|  | 365 | APKCERTS_TXT1 = ( | 
|  | 366 | 'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"' | 
|  | 367 | ' private_key="certs/devkey.pk8"\n' | 
|  | 368 | 'name="Settings.apk"' | 
|  | 369 | ' certificate="build/target/product/security/platform.x509.pem"' | 
|  | 370 | ' private_key="build/target/product/security/platform.pk8"\n' | 
|  | 371 | 'name="TV.apk" certificate="PRESIGNED" private_key=""\n' | 
|  | 372 | ) | 
|  | 373 |  | 
|  | 374 | APKCERTS_CERTMAP1 = { | 
|  | 375 | 'RecoveryLocalizer.apk' : 'certs/devkey', | 
|  | 376 | 'Settings.apk' : 'build/target/product/security/platform', | 
|  | 377 | 'TV.apk' : 'PRESIGNED', | 
|  | 378 | } | 
|  | 379 |  | 
|  | 380 | APKCERTS_TXT2 = ( | 
|  | 381 | 'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"' | 
|  | 382 | ' private_key="certs/compressed1.pk8" compressed="gz"\n' | 
|  | 383 | 'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"' | 
|  | 384 | ' private_key="certs/compressed2.pk8" compressed="gz"\n' | 
|  | 385 | 'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"' | 
|  | 386 | ' private_key="certs/compressed2.pk8" compressed="gz"\n' | 
|  | 387 | 'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"' | 
|  | 388 | ' private_key="certs/compressed3.pk8" compressed="gz"\n' | 
|  | 389 | ) | 
|  | 390 |  | 
|  | 391 | APKCERTS_CERTMAP2 = { | 
|  | 392 | 'Compressed1.apk' : 'certs/compressed1', | 
|  | 393 | 'Compressed2a.apk' : 'certs/compressed2', | 
|  | 394 | 'Compressed2b.apk' : 'certs/compressed2', | 
|  | 395 | 'Compressed3.apk' : 'certs/compressed3', | 
|  | 396 | } | 
|  | 397 |  | 
|  | 398 | APKCERTS_TXT3 = ( | 
|  | 399 | 'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"' | 
|  | 400 | ' private_key="certs/compressed4.pk8" compressed="xz"\n' | 
|  | 401 | ) | 
|  | 402 |  | 
|  | 403 | APKCERTS_CERTMAP3 = { | 
|  | 404 | 'Compressed4.apk' : 'certs/compressed4', | 
|  | 405 | } | 
|  | 406 |  | 
| Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 407 | def setUp(self): | 
|  | 408 | self.testdata_dir = test_utils.get_testdata_dir() | 
|  | 409 |  | 
| Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 410 | @staticmethod | 
|  | 411 | def _write_apkcerts_txt(apkcerts_txt, additional=None): | 
|  | 412 | if additional is None: | 
|  | 413 | additional = [] | 
|  | 414 | target_files = common.MakeTempFile(suffix='.zip') | 
|  | 415 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 416 | target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt) | 
|  | 417 | for entry in additional: | 
|  | 418 | target_files_zip.writestr(entry, '') | 
|  | 419 | return target_files | 
|  | 420 |  | 
|  | 421 | def test_ReadApkCerts_NoncompressedApks(self): | 
|  | 422 | target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1) | 
|  | 423 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 424 | certmap, ext = common.ReadApkCerts(input_zip) | 
|  | 425 |  | 
|  | 426 | self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap) | 
|  | 427 | self.assertIsNone(ext) | 
|  | 428 |  | 
|  | 429 | def test_ReadApkCerts_CompressedApks(self): | 
|  | 430 | # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is | 
|  | 431 | # not stored in '.gz' format, so it shouldn't be considered as installed. | 
|  | 432 | target_files = self._write_apkcerts_txt( | 
|  | 433 | self.APKCERTS_TXT2, | 
|  | 434 | ['Compressed1.apk.gz', 'Compressed3.apk']) | 
|  | 435 |  | 
|  | 436 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 437 | certmap, ext = common.ReadApkCerts(input_zip) | 
|  | 438 |  | 
|  | 439 | self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap) | 
|  | 440 | self.assertEqual('.gz', ext) | 
|  | 441 |  | 
|  | 442 | # Alternative case with '.xz'. | 
|  | 443 | target_files = self._write_apkcerts_txt( | 
|  | 444 | self.APKCERTS_TXT3, ['Compressed4.apk.xz']) | 
|  | 445 |  | 
|  | 446 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 447 | certmap, ext = common.ReadApkCerts(input_zip) | 
|  | 448 |  | 
|  | 449 | self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap) | 
|  | 450 | self.assertEqual('.xz', ext) | 
|  | 451 |  | 
|  | 452 | def test_ReadApkCerts_CompressedAndNoncompressedApks(self): | 
|  | 453 | target_files = self._write_apkcerts_txt( | 
|  | 454 | self.APKCERTS_TXT1 + self.APKCERTS_TXT2, | 
|  | 455 | ['Compressed1.apk.gz', 'Compressed3.apk']) | 
|  | 456 |  | 
|  | 457 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 458 | certmap, ext = common.ReadApkCerts(input_zip) | 
|  | 459 |  | 
|  | 460 | certmap_merged = self.APKCERTS_CERTMAP1.copy() | 
|  | 461 | certmap_merged.update(self.APKCERTS_CERTMAP2) | 
|  | 462 | self.assertDictEqual(certmap_merged, certmap) | 
|  | 463 | self.assertEqual('.gz', ext) | 
|  | 464 |  | 
|  | 465 | def test_ReadApkCerts_MultipleCompressionMethods(self): | 
|  | 466 | target_files = self._write_apkcerts_txt( | 
|  | 467 | self.APKCERTS_TXT2 + self.APKCERTS_TXT3, | 
|  | 468 | ['Compressed1.apk.gz', 'Compressed4.apk.xz']) | 
|  | 469 |  | 
|  | 470 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 471 | self.assertRaises(ValueError, common.ReadApkCerts, input_zip) | 
|  | 472 |  | 
|  | 473 | def test_ReadApkCerts_MismatchingKeys(self): | 
|  | 474 | malformed_apkcerts_txt = ( | 
|  | 475 | 'name="App1.apk" certificate="certs/cert1.x509.pem"' | 
|  | 476 | ' private_key="certs/cert2.pk8"\n' | 
|  | 477 | ) | 
|  | 478 | target_files = self._write_apkcerts_txt(malformed_apkcerts_txt) | 
|  | 479 |  | 
|  | 480 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 481 | self.assertRaises(ValueError, common.ReadApkCerts, input_zip) | 
|  | 482 |  | 
| Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 483 | def test_ExtractPublicKey(self): | 
| Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 484 | cert = os.path.join(self.testdata_dir, 'testkey.x509.pem') | 
|  | 485 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') | 
| Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 486 | with open(pubkey, 'rb') as pubkey_fp: | 
|  | 487 | self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert)) | 
|  | 488 |  | 
|  | 489 | def test_ExtractPublicKey_invalidInput(self): | 
| Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 490 | wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8') | 
| Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 491 | self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input) | 
|  | 492 |  | 
| Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 493 | def test_ParseCertificate(self): | 
|  | 494 | cert = os.path.join(self.testdata_dir, 'testkey.x509.pem') | 
|  | 495 |  | 
|  | 496 | cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER'] | 
|  | 497 | proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | 
|  | 498 | expected, _ = proc.communicate() | 
|  | 499 | self.assertEqual(0, proc.returncode) | 
|  | 500 |  | 
|  | 501 | with open(cert) as cert_fp: | 
|  | 502 | actual = common.ParseCertificate(cert_fp.read()) | 
|  | 503 | self.assertEqual(expected, actual) | 
|  | 504 |  | 
| Tao Bao | f47bf0f | 2018-03-21 23:28:51 -0700 | [diff] [blame] | 505 | def test_GetMinSdkVersion(self): | 
|  | 506 | test_app = os.path.join(self.testdata_dir, 'TestApp.apk') | 
|  | 507 | self.assertEqual('24', common.GetMinSdkVersion(test_app)) | 
|  | 508 |  | 
|  | 509 | def test_GetMinSdkVersion_invalidInput(self): | 
|  | 510 | self.assertRaises( | 
|  | 511 | common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk') | 
|  | 512 |  | 
|  | 513 | def test_GetMinSdkVersionInt(self): | 
|  | 514 | test_app = os.path.join(self.testdata_dir, 'TestApp.apk') | 
|  | 515 | self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {})) | 
|  | 516 |  | 
|  | 517 | def test_GetMinSdkVersionInt_invalidInput(self): | 
|  | 518 | self.assertRaises( | 
|  | 519 | common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk', | 
|  | 520 | {}) | 
|  | 521 |  | 
| Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 522 |  | 
| Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 523 | class CommonUtilsTest(test_utils.ReleaseToolsTestCase): | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 524 |  | 
| Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 525 | def setUp(self): | 
|  | 526 | self.testdata_dir = test_utils.get_testdata_dir() | 
|  | 527 |  | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 528 | def test_GetSparseImage_emptyBlockMapFile(self): | 
|  | 529 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 530 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 531 | target_files_zip.write( | 
|  | 532 | test_utils.construct_sparse_image([ | 
|  | 533 | (0xCAC1, 6), | 
|  | 534 | (0xCAC3, 3), | 
|  | 535 | (0xCAC1, 4)]), | 
|  | 536 | arcname='IMAGES/system.img') | 
|  | 537 | target_files_zip.writestr('IMAGES/system.map', '') | 
|  | 538 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8)) | 
|  | 539 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) | 
|  | 540 |  | 
| Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 541 | tempdir = common.UnzipTemp(target_files) | 
|  | 542 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 543 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 544 |  | 
|  | 545 | self.assertDictEqual( | 
|  | 546 | { | 
|  | 547 | '__COPY': RangeSet("0"), | 
|  | 548 | '__NONZERO-0': RangeSet("1-5 9-12"), | 
|  | 549 | }, | 
|  | 550 | sparse_image.file_map) | 
|  | 551 |  | 
|  | 552 | def test_GetSparseImage_invalidImageName(self): | 
|  | 553 | self.assertRaises( | 
|  | 554 | AssertionError, common.GetSparseImage, 'system2', None, None, False) | 
|  | 555 | self.assertRaises( | 
|  | 556 | AssertionError, common.GetSparseImage, 'unknown', None, None, False) | 
|  | 557 |  | 
|  | 558 | def test_GetSparseImage_missingBlockMapFile(self): | 
|  | 559 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 560 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 561 | target_files_zip.write( | 
|  | 562 | test_utils.construct_sparse_image([ | 
|  | 563 | (0xCAC1, 6), | 
|  | 564 | (0xCAC3, 3), | 
|  | 565 | (0xCAC1, 4)]), | 
|  | 566 | arcname='IMAGES/system.img') | 
|  | 567 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8)) | 
|  | 568 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) | 
|  | 569 |  | 
| Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 570 | tempdir = common.UnzipTemp(target_files) | 
|  | 571 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 572 | self.assertRaises( | 
|  | 573 | AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, | 
|  | 574 | False) | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 575 |  | 
|  | 576 | def test_GetSparseImage_sharedBlocks_notAllowed(self): | 
|  | 577 | """Tests the case of having overlapping blocks but disallowed.""" | 
|  | 578 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 579 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 580 | target_files_zip.write( | 
|  | 581 | test_utils.construct_sparse_image([(0xCAC2, 16)]), | 
|  | 582 | arcname='IMAGES/system.img') | 
|  | 583 | # Block 10 is shared between two files. | 
|  | 584 | target_files_zip.writestr( | 
|  | 585 | 'IMAGES/system.map', | 
|  | 586 | '\n'.join([ | 
|  | 587 | '/system/file1 1-5 9-10', | 
|  | 588 | '/system/file2 10-12'])) | 
|  | 589 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) | 
|  | 590 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) | 
|  | 591 |  | 
| Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 592 | tempdir = common.UnzipTemp(target_files) | 
|  | 593 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 594 | self.assertRaises( | 
|  | 595 | AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, | 
|  | 596 | False) | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 597 |  | 
|  | 598 | def test_GetSparseImage_sharedBlocks_allowed(self): | 
|  | 599 | """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true.""" | 
|  | 600 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 601 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 602 | # Construct an image with a care_map of "0-5 9-12". | 
|  | 603 | target_files_zip.write( | 
|  | 604 | test_utils.construct_sparse_image([(0xCAC2, 16)]), | 
|  | 605 | arcname='IMAGES/system.img') | 
|  | 606 | # Block 10 is shared between two files. | 
|  | 607 | target_files_zip.writestr( | 
|  | 608 | 'IMAGES/system.map', | 
|  | 609 | '\n'.join([ | 
|  | 610 | '/system/file1 1-5 9-10', | 
|  | 611 | '/system/file2 10-12'])) | 
|  | 612 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) | 
|  | 613 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) | 
|  | 614 |  | 
| Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 615 | tempdir = common.UnzipTemp(target_files) | 
|  | 616 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 617 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, True) | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 618 |  | 
|  | 619 | self.assertDictEqual( | 
|  | 620 | { | 
|  | 621 | '__COPY': RangeSet("0"), | 
|  | 622 | '__NONZERO-0': RangeSet("6-8 13-15"), | 
|  | 623 | '/system/file1': RangeSet("1-5 9-10"), | 
|  | 624 | '/system/file2': RangeSet("11-12"), | 
|  | 625 | }, | 
|  | 626 | sparse_image.file_map) | 
|  | 627 |  | 
|  | 628 | # '/system/file2' should be marked with 'uses_shared_blocks', but not with | 
|  | 629 | # 'incomplete'. | 
|  | 630 | self.assertTrue( | 
|  | 631 | sparse_image.file_map['/system/file2'].extra['uses_shared_blocks']) | 
|  | 632 | self.assertNotIn( | 
|  | 633 | 'incomplete', sparse_image.file_map['/system/file2'].extra) | 
|  | 634 |  | 
|  | 635 | # All other entries should look normal without any tags. | 
|  | 636 | self.assertFalse(sparse_image.file_map['__COPY'].extra) | 
|  | 637 | self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra) | 
|  | 638 | self.assertFalse(sparse_image.file_map['/system/file1'].extra) | 
|  | 639 |  | 
|  | 640 | def test_GetSparseImage_incompleteRanges(self): | 
|  | 641 | """Tests the case of ext4 images with holes.""" | 
|  | 642 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 643 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 644 | target_files_zip.write( | 
|  | 645 | test_utils.construct_sparse_image([(0xCAC2, 16)]), | 
|  | 646 | arcname='IMAGES/system.img') | 
|  | 647 | target_files_zip.writestr( | 
|  | 648 | 'IMAGES/system.map', | 
|  | 649 | '\n'.join([ | 
|  | 650 | '/system/file1 1-5 9-10', | 
|  | 651 | '/system/file2 11-12'])) | 
|  | 652 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) | 
|  | 653 | # '/system/file2' has less blocks listed (2) than actual (3). | 
|  | 654 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) | 
|  | 655 |  | 
| Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 656 | tempdir = common.UnzipTemp(target_files) | 
|  | 657 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 658 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 659 |  | 
|  | 660 | self.assertFalse(sparse_image.file_map['/system/file1'].extra) | 
|  | 661 | self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete']) | 
|  | 662 |  | 
| Tao Bao | d3554e6 | 2018-07-10 15:31:22 -0700 | [diff] [blame] | 663 | def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self): | 
|  | 664 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 665 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 666 | target_files_zip.write( | 
|  | 667 | test_utils.construct_sparse_image([(0xCAC2, 16)]), | 
|  | 668 | arcname='IMAGES/system.img') | 
|  | 669 | target_files_zip.writestr( | 
|  | 670 | 'IMAGES/system.map', | 
|  | 671 | '\n'.join([ | 
|  | 672 | '//system/file1 1-5 9-10', | 
|  | 673 | '//system/file2 11-12', | 
|  | 674 | '/system/app/file3 13-15'])) | 
|  | 675 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) | 
|  | 676 | # '/system/file2' has less blocks listed (2) than actual (3). | 
|  | 677 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) | 
|  | 678 | # '/system/app/file3' has less blocks listed (3) than actual (4). | 
|  | 679 | target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4)) | 
|  | 680 |  | 
|  | 681 | tempdir = common.UnzipTemp(target_files) | 
|  | 682 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 683 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) | 
|  | 684 |  | 
|  | 685 | self.assertFalse(sparse_image.file_map['//system/file1'].extra) | 
|  | 686 | self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete']) | 
|  | 687 | self.assertTrue( | 
|  | 688 | sparse_image.file_map['/system/app/file3'].extra['incomplete']) | 
|  | 689 |  | 
|  | 690 | def test_GetSparseImage_systemRootImage_nonSystemFiles(self): | 
|  | 691 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 692 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 693 | target_files_zip.write( | 
|  | 694 | test_utils.construct_sparse_image([(0xCAC2, 16)]), | 
|  | 695 | arcname='IMAGES/system.img') | 
|  | 696 | target_files_zip.writestr( | 
|  | 697 | 'IMAGES/system.map', | 
|  | 698 | '\n'.join([ | 
|  | 699 | '//system/file1 1-5 9-10', | 
|  | 700 | '//init.rc 13-15'])) | 
|  | 701 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) | 
|  | 702 | # '/init.rc' has less blocks listed (3) than actual (4). | 
|  | 703 | target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4)) | 
|  | 704 |  | 
|  | 705 | tempdir = common.UnzipTemp(target_files) | 
|  | 706 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 707 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) | 
|  | 708 |  | 
|  | 709 | self.assertFalse(sparse_image.file_map['//system/file1'].extra) | 
|  | 710 | self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete']) | 
|  | 711 |  | 
|  | 712 | def test_GetSparseImage_fileNotFound(self): | 
|  | 713 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 714 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 715 | target_files_zip.write( | 
|  | 716 | test_utils.construct_sparse_image([(0xCAC2, 16)]), | 
|  | 717 | arcname='IMAGES/system.img') | 
|  | 718 | target_files_zip.writestr( | 
|  | 719 | 'IMAGES/system.map', | 
|  | 720 | '\n'.join([ | 
|  | 721 | '//system/file1 1-5 9-10', | 
|  | 722 | '//system/file2 11-12'])) | 
|  | 723 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) | 
|  | 724 |  | 
|  | 725 | tempdir = common.UnzipTemp(target_files) | 
|  | 726 | with zipfile.ZipFile(target_files, 'r') as input_zip: | 
|  | 727 | self.assertRaises( | 
|  | 728 | AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, | 
|  | 729 | False) | 
|  | 730 |  | 
| Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 731 | def test_GetAvbChainedPartitionArg(self): | 
|  | 732 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') | 
|  | 733 | info_dict = { | 
|  | 734 | 'avb_avbtool': 'avbtool', | 
|  | 735 | 'avb_system_key_path': pubkey, | 
|  | 736 | 'avb_system_rollback_index_location': 2, | 
|  | 737 | } | 
|  | 738 | args = common.GetAvbChainedPartitionArg('system', info_dict).split(':') | 
|  | 739 | self.assertEqual(3, len(args)) | 
|  | 740 | self.assertEqual('system', args[0]) | 
|  | 741 | self.assertEqual('2', args[1]) | 
|  | 742 | self.assertTrue(os.path.exists(args[2])) | 
|  | 743 |  | 
|  | 744 | def test_GetAvbChainedPartitionArg_withPrivateKey(self): | 
|  | 745 | key = os.path.join(self.testdata_dir, 'testkey.key') | 
|  | 746 | info_dict = { | 
|  | 747 | 'avb_avbtool': 'avbtool', | 
|  | 748 | 'avb_product_key_path': key, | 
|  | 749 | 'avb_product_rollback_index_location': 2, | 
|  | 750 | } | 
|  | 751 | args = common.GetAvbChainedPartitionArg('product', info_dict).split(':') | 
|  | 752 | self.assertEqual(3, len(args)) | 
|  | 753 | self.assertEqual('product', args[0]) | 
|  | 754 | self.assertEqual('2', args[1]) | 
|  | 755 | self.assertTrue(os.path.exists(args[2])) | 
|  | 756 |  | 
|  | 757 | def test_GetAvbChainedPartitionArg_withSpecifiedKey(self): | 
|  | 758 | info_dict = { | 
|  | 759 | 'avb_avbtool': 'avbtool', | 
|  | 760 | 'avb_system_key_path': 'does-not-exist', | 
|  | 761 | 'avb_system_rollback_index_location': 2, | 
|  | 762 | } | 
|  | 763 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') | 
|  | 764 | args = common.GetAvbChainedPartitionArg( | 
|  | 765 | 'system', info_dict, pubkey).split(':') | 
|  | 766 | self.assertEqual(3, len(args)) | 
|  | 767 | self.assertEqual('system', args[0]) | 
|  | 768 | self.assertEqual('2', args[1]) | 
|  | 769 | self.assertTrue(os.path.exists(args[2])) | 
|  | 770 |  | 
|  | 771 | def test_GetAvbChainedPartitionArg_invalidKey(self): | 
|  | 772 | pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem') | 
|  | 773 | info_dict = { | 
|  | 774 | 'avb_avbtool': 'avbtool', | 
|  | 775 | 'avb_system_key_path': pubkey, | 
|  | 776 | 'avb_system_rollback_index_location': 2, | 
|  | 777 | } | 
|  | 778 | self.assertRaises( | 
| Tao Bao | 986ee86 | 2018-10-04 15:46:16 -0700 | [diff] [blame] | 779 | common.ExternalError, common.GetAvbChainedPartitionArg, 'system', | 
|  | 780 | info_dict) | 
| Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 781 |  | 
| Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 782 | INFO_DICT_DEFAULT = { | 
|  | 783 | 'recovery_api_version': 3, | 
|  | 784 | 'fstab_version': 2, | 
|  | 785 | 'system_root_image': 'true', | 
|  | 786 | 'no_recovery' : 'true', | 
|  | 787 | 'recovery_as_boot': 'true', | 
|  | 788 | } | 
|  | 789 |  | 
|  | 790 | @staticmethod | 
|  | 791 | def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path): | 
|  | 792 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') | 
|  | 793 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: | 
|  | 794 | info_values = ''.join( | 
|  | 795 | ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.iteritems())]) | 
|  | 796 | common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values) | 
|  | 797 |  | 
|  | 798 | FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults" | 
|  | 799 | if info_dict.get('system_root_image') == 'true': | 
|  | 800 | fstab_values = FSTAB_TEMPLATE.format('/') | 
|  | 801 | else: | 
|  | 802 | fstab_values = FSTAB_TEMPLATE.format('/system') | 
|  | 803 | common.ZipWriteStr(target_files_zip, fstab_path, fstab_values) | 
| Tao Bao | 410ad8b | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 804 |  | 
|  | 805 | common.ZipWriteStr( | 
|  | 806 | target_files_zip, 'META/file_contexts', 'file-contexts') | 
| Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 807 | return target_files | 
|  | 808 |  | 
|  | 809 | def test_LoadInfoDict(self): | 
|  | 810 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 811 | self.INFO_DICT_DEFAULT, | 
|  | 812 | 'BOOT/RAMDISK/system/etc/recovery.fstab') | 
|  | 813 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 814 | loaded_dict = common.LoadInfoDict(target_files_zip) | 
|  | 815 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 816 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 817 | self.assertIn('/', loaded_dict['fstab']) | 
|  | 818 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 819 |  | 
|  | 820 | def test_LoadInfoDict_legacyRecoveryFstabPath(self): | 
|  | 821 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 822 | self.INFO_DICT_DEFAULT, | 
|  | 823 | 'BOOT/RAMDISK/etc/recovery.fstab') | 
|  | 824 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 825 | loaded_dict = common.LoadInfoDict(target_files_zip) | 
|  | 826 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 827 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 828 | self.assertIn('/', loaded_dict['fstab']) | 
|  | 829 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 830 |  | 
|  | 831 | def test_LoadInfoDict_dirInput(self): | 
|  | 832 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 833 | self.INFO_DICT_DEFAULT, | 
|  | 834 | 'BOOT/RAMDISK/system/etc/recovery.fstab') | 
|  | 835 | unzipped = common.UnzipTemp(target_files) | 
|  | 836 | loaded_dict = common.LoadInfoDict(unzipped) | 
|  | 837 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 838 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 839 | self.assertIn('/', loaded_dict['fstab']) | 
|  | 840 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 841 |  | 
|  | 842 | def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self): | 
|  | 843 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 844 | self.INFO_DICT_DEFAULT, | 
|  | 845 | 'BOOT/RAMDISK/system/etc/recovery.fstab') | 
|  | 846 | unzipped = common.UnzipTemp(target_files) | 
|  | 847 | loaded_dict = common.LoadInfoDict(unzipped) | 
|  | 848 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 849 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 850 | self.assertIn('/', loaded_dict['fstab']) | 
|  | 851 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 852 |  | 
|  | 853 | def test_LoadInfoDict_systemRootImageFalse(self): | 
|  | 854 | # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices | 
|  | 855 | # launched prior to P will likely have this config. | 
|  | 856 | info_dict = copy.copy(self.INFO_DICT_DEFAULT) | 
|  | 857 | del info_dict['no_recovery'] | 
|  | 858 | del info_dict['system_root_image'] | 
|  | 859 | del info_dict['recovery_as_boot'] | 
|  | 860 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 861 | info_dict, | 
|  | 862 | 'RECOVERY/RAMDISK/system/etc/recovery.fstab') | 
|  | 863 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 864 | loaded_dict = common.LoadInfoDict(target_files_zip) | 
|  | 865 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 866 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 867 | self.assertNotIn('/', loaded_dict['fstab']) | 
|  | 868 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 869 |  | 
|  | 870 | def test_LoadInfoDict_recoveryAsBootFalse(self): | 
|  | 871 | # Devices using system-as-root, but with standalone recovery image. Non-A/B | 
|  | 872 | # devices launched since P will likely have this config. | 
|  | 873 | info_dict = copy.copy(self.INFO_DICT_DEFAULT) | 
|  | 874 | del info_dict['no_recovery'] | 
|  | 875 | del info_dict['recovery_as_boot'] | 
|  | 876 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 877 | info_dict, | 
|  | 878 | 'RECOVERY/RAMDISK/system/etc/recovery.fstab') | 
|  | 879 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 880 | loaded_dict = common.LoadInfoDict(target_files_zip) | 
|  | 881 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 882 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 883 | self.assertIn('/', loaded_dict['fstab']) | 
|  | 884 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 885 |  | 
|  | 886 | def test_LoadInfoDict_noRecoveryTrue(self): | 
|  | 887 | # Device doesn't have a recovery partition at all. | 
|  | 888 | info_dict = copy.copy(self.INFO_DICT_DEFAULT) | 
|  | 889 | del info_dict['recovery_as_boot'] | 
|  | 890 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 891 | info_dict, | 
|  | 892 | 'RECOVERY/RAMDISK/system/etc/recovery.fstab') | 
|  | 893 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 894 | loaded_dict = common.LoadInfoDict(target_files_zip) | 
|  | 895 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 896 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 897 | self.assertIsNone(loaded_dict['fstab']) | 
|  | 898 |  | 
| Tao Bao | 410ad8b | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 899 | def test_LoadInfoDict_missingMetaMiscInfoTxt(self): | 
|  | 900 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 901 | self.INFO_DICT_DEFAULT, | 
|  | 902 | 'BOOT/RAMDISK/system/etc/recovery.fstab') | 
|  | 903 | common.ZipDelete(target_files, 'META/misc_info.txt') | 
|  | 904 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 905 | self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip) | 
|  | 906 |  | 
|  | 907 | def test_LoadInfoDict_repacking(self): | 
|  | 908 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 909 | self.INFO_DICT_DEFAULT, | 
|  | 910 | 'BOOT/RAMDISK/system/etc/recovery.fstab') | 
|  | 911 | unzipped = common.UnzipTemp(target_files) | 
|  | 912 | loaded_dict = common.LoadInfoDict(unzipped, True) | 
|  | 913 | self.assertEqual(3, loaded_dict['recovery_api_version']) | 
|  | 914 | self.assertEqual(2, loaded_dict['fstab_version']) | 
|  | 915 | self.assertIn('/', loaded_dict['fstab']) | 
|  | 916 | self.assertIn('/system', loaded_dict['fstab']) | 
|  | 917 | self.assertEqual( | 
|  | 918 | os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir']) | 
|  | 919 | self.assertEqual( | 
|  | 920 | os.path.join(unzipped, 'META', 'root_filesystem_config.txt'), | 
|  | 921 | loaded_dict['root_fs_config']) | 
|  | 922 |  | 
|  | 923 | def test_LoadInfoDict_repackingWithZipFileInput(self): | 
|  | 924 | target_files = self._test_LoadInfoDict_createTargetFiles( | 
|  | 925 | self.INFO_DICT_DEFAULT, | 
|  | 926 | 'BOOT/RAMDISK/system/etc/recovery.fstab') | 
|  | 927 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: | 
|  | 928 | self.assertRaises( | 
|  | 929 | AssertionError, common.LoadInfoDict, target_files_zip, True) | 
|  | 930 |  | 
| Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 931 |  | 
| Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 932 | class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase): | 
| Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 933 | """Checks the format of install-recovery.sh. | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 934 |  | 
| Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 935 | Its format should match between common.py and validate_target_files.py. | 
|  | 936 | """ | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 937 |  | 
|  | 938 | def setUp(self): | 
| Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 939 | self._tempdir = common.MakeTempDir() | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 940 | # Create a dummy dict that contains the fstab info for boot&recovery. | 
|  | 941 | self._info = {"fstab" : {}} | 
| Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 942 | dummy_fstab = [ | 
|  | 943 | "/dev/soc.0/by-name/boot /boot emmc defaults defaults", | 
|  | 944 | "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"] | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 945 | self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab) | 
| Tianjie Xu | df05558 | 2017-11-07 12:22:58 -0800 | [diff] [blame] | 946 | # Construct the gzipped recovery.img and boot.img | 
|  | 947 | self.recovery_data = bytearray([ | 
|  | 948 | 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a, | 
|  | 949 | 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3, | 
|  | 950 | 0x08, 0x00, 0x00, 0x00 | 
|  | 951 | ]) | 
|  | 952 | # echo -n "boot" | gzip -f | hd | 
|  | 953 | self.boot_data = bytearray([ | 
|  | 954 | 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca, | 
|  | 955 | 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00 | 
|  | 956 | ]) | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 957 |  | 
|  | 958 | def _out_tmp_sink(self, name, data, prefix="SYSTEM"): | 
|  | 959 | loc = os.path.join(self._tempdir, prefix, name) | 
|  | 960 | if not os.path.exists(os.path.dirname(loc)): | 
|  | 961 | os.makedirs(os.path.dirname(loc)) | 
|  | 962 | with open(loc, "w+") as f: | 
|  | 963 | f.write(data) | 
|  | 964 |  | 
|  | 965 | def test_full_recovery(self): | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 966 | recovery_image = common.File("recovery.img", self.recovery_data) | 
|  | 967 | boot_image = common.File("boot.img", self.boot_data) | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 968 | self._info["full_recovery_image"] = "true" | 
|  | 969 |  | 
|  | 970 | common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink, | 
|  | 971 | recovery_image, boot_image, self._info) | 
|  | 972 | validate_target_files.ValidateInstallRecoveryScript(self._tempdir, | 
|  | 973 | self._info) | 
|  | 974 |  | 
|  | 975 | def test_recovery_from_boot(self): | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 976 | recovery_image = common.File("recovery.img", self.recovery_data) | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 977 | self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES") | 
| Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 978 | boot_image = common.File("boot.img", self.boot_data) | 
| Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 979 | self._out_tmp_sink("boot.img", boot_image.data, "IMAGES") | 
|  | 980 |  | 
|  | 981 | common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink, | 
|  | 982 | recovery_image, boot_image, self._info) | 
|  | 983 | validate_target_files.ValidateInstallRecoveryScript(self._tempdir, | 
|  | 984 | self._info) | 
|  | 985 | # Validate 'recovery-from-boot' with bonus argument. | 
|  | 986 | self._out_tmp_sink("etc/recovery-resource.dat", "bonus", "SYSTEM") | 
|  | 987 | common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink, | 
|  | 988 | recovery_image, boot_image, self._info) | 
|  | 989 | validate_target_files.ValidateInstallRecoveryScript(self._tempdir, | 
|  | 990 | self._info) | 
| Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 991 |  | 
|  | 992 |  | 
|  | 993 | class MockScriptWriter(object): | 
|  | 994 | """A class that mocks edify_generator.EdifyGenerator. | 
|  | 995 | """ | 
|  | 996 | def __init__(self, enable_comments=False): | 
|  | 997 | self.lines = [] | 
|  | 998 | self.enable_comments = enable_comments | 
|  | 999 | def Comment(self, comment): | 
|  | 1000 | if self.enable_comments: | 
|  | 1001 | self.lines.append("# {}".format(comment)) | 
|  | 1002 | def AppendExtra(self, extra): | 
|  | 1003 | self.lines.append(extra) | 
|  | 1004 | def __str__(self): | 
|  | 1005 | return "\n".join(self.lines) | 
|  | 1006 |  | 
|  | 1007 |  | 
|  | 1008 | class MockBlockDifference(object): | 
|  | 1009 | def __init__(self, partition, tgt, src=None): | 
|  | 1010 | self.partition = partition | 
|  | 1011 | self.tgt = tgt | 
|  | 1012 | self.src = src | 
|  | 1013 | def WriteScript(self, script, _, progress=None, | 
|  | 1014 | write_verify_script=False): | 
|  | 1015 | if progress: | 
|  | 1016 | script.AppendExtra("progress({})".format(progress)) | 
|  | 1017 | script.AppendExtra("patch({});".format(self.partition)) | 
|  | 1018 | if write_verify_script: | 
|  | 1019 | self.WritePostInstallVerifyScript(script) | 
|  | 1020 | def WritePostInstallVerifyScript(self, script): | 
|  | 1021 | script.AppendExtra("verify({});".format(self.partition)) | 
|  | 1022 |  | 
|  | 1023 |  | 
|  | 1024 | class FakeSparseImage(object): | 
|  | 1025 | def __init__(self, size): | 
|  | 1026 | self.blocksize = 4096 | 
|  | 1027 | self.total_blocks = size // 4096 | 
|  | 1028 | assert size % 4096 == 0, "{} is not a multiple of 4096".format(size) | 
|  | 1029 |  | 
|  | 1030 |  | 
|  | 1031 | class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase): | 
|  | 1032 | @staticmethod | 
|  | 1033 | def get_op_list(output_path): | 
|  | 1034 | with zipfile.ZipFile(output_path, 'r') as output_zip: | 
|  | 1035 | with output_zip.open("dynamic_partitions_op_list") as op_list: | 
|  | 1036 | return [line.strip() for line in op_list.readlines() | 
|  | 1037 | if not line.startswith("#")] | 
|  | 1038 |  | 
|  | 1039 | def setUp(self): | 
|  | 1040 | self.script = MockScriptWriter() | 
|  | 1041 | self.output_path = common.MakeTempFile(suffix='.zip') | 
|  | 1042 |  | 
|  | 1043 | def test_full(self): | 
|  | 1044 | target_info = common.LoadDictionaryFromLines(""" | 
|  | 1045 | dynamic_partition_list=system vendor | 
|  | 1046 | super_partition_groups=group_foo | 
|  | 1047 | super_group_foo_group_size={group_size} | 
|  | 1048 | super_group_foo_partition_list=system vendor | 
|  | 1049 | """.format(group_size=4 * GiB).split("\n")) | 
|  | 1050 | block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)), | 
|  | 1051 | MockBlockDifference("vendor", FakeSparseImage(1 * GiB))] | 
|  | 1052 |  | 
|  | 1053 | dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs) | 
|  | 1054 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: | 
|  | 1055 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) | 
|  | 1056 |  | 
|  | 1057 | self.assertEqual(str(self.script).strip(), """ | 
|  | 1058 | assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list"))); | 
|  | 1059 | patch(vendor); | 
|  | 1060 | verify(vendor); | 
|  | 1061 | unmap_partition("vendor"); | 
|  | 1062 | patch(system); | 
|  | 1063 | verify(system); | 
|  | 1064 | unmap_partition("system"); | 
|  | 1065 | """.strip()) | 
|  | 1066 |  | 
|  | 1067 | lines = self.get_op_list(self.output_path) | 
|  | 1068 |  | 
|  | 1069 | remove_all_groups = lines.index("remove_all_groups") | 
|  | 1070 | add_group = lines.index("add_group group_foo 4294967296") | 
|  | 1071 | add_vendor = lines.index("add vendor group_foo") | 
|  | 1072 | add_system = lines.index("add system group_foo") | 
|  | 1073 | resize_vendor = lines.index("resize vendor 1073741824") | 
|  | 1074 | resize_system = lines.index("resize system 3221225472") | 
|  | 1075 |  | 
|  | 1076 | self.assertLess(remove_all_groups, add_group, | 
|  | 1077 | "Should add groups after removing all groups") | 
|  | 1078 | self.assertLess(add_group, min(add_vendor, add_system), | 
|  | 1079 | "Should add partitions after adding group") | 
|  | 1080 | self.assertLess(add_system, resize_system, | 
|  | 1081 | "Should resize system after adding it") | 
|  | 1082 | self.assertLess(add_vendor, resize_vendor, | 
|  | 1083 | "Should resize vendor after adding it") | 
|  | 1084 |  | 
|  | 1085 | def test_inc_groups(self): | 
|  | 1086 | source_info = common.LoadDictionaryFromLines(""" | 
|  | 1087 | super_partition_groups=group_foo group_bar group_baz | 
|  | 1088 | super_group_foo_group_size={group_foo_size} | 
|  | 1089 | super_group_bar_group_size={group_bar_size} | 
|  | 1090 | """.format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n")) | 
|  | 1091 | target_info = common.LoadDictionaryFromLines(""" | 
|  | 1092 | super_partition_groups=group_foo group_baz group_qux | 
|  | 1093 | super_group_foo_group_size={group_foo_size} | 
|  | 1094 | super_group_baz_group_size={group_baz_size} | 
|  | 1095 | super_group_qux_group_size={group_qux_size} | 
|  | 1096 | """.format(group_foo_size=3 * GiB, group_baz_size=4 * GiB, | 
|  | 1097 | group_qux_size=1 * GiB).split("\n")) | 
|  | 1098 |  | 
|  | 1099 | dp_diff = common.DynamicPartitionsDifference(target_info, | 
|  | 1100 | block_diffs=[], | 
|  | 1101 | source_info_dict=source_info) | 
|  | 1102 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: | 
|  | 1103 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) | 
|  | 1104 |  | 
|  | 1105 | lines = self.get_op_list(self.output_path) | 
|  | 1106 |  | 
|  | 1107 | removed = lines.index("remove_group group_bar") | 
|  | 1108 | shrunk = lines.index("resize_group group_foo 3221225472") | 
|  | 1109 | grown = lines.index("resize_group group_baz 4294967296") | 
|  | 1110 | added = lines.index("add_group group_qux 1073741824") | 
|  | 1111 |  | 
|  | 1112 | self.assertLess(max(removed, shrunk) < min(grown, added), | 
|  | 1113 | "ops that remove / shrink partitions must precede ops that " | 
|  | 1114 | "grow / add partitions") | 
|  | 1115 |  | 
|  | 1116 | def test_inc_partitions(self): | 
|  | 1117 | source_info = common.LoadDictionaryFromLines(""" | 
|  | 1118 | dynamic_partition_list=system vendor product product_services | 
|  | 1119 | super_partition_groups=group_foo | 
|  | 1120 | super_group_foo_group_size={group_foo_size} | 
|  | 1121 | super_group_foo_partition_list=system vendor product product_services | 
|  | 1122 | """.format(group_foo_size=4 * GiB).split("\n")) | 
|  | 1123 | target_info = common.LoadDictionaryFromLines(""" | 
|  | 1124 | dynamic_partition_list=system vendor product odm | 
|  | 1125 | super_partition_groups=group_foo group_bar | 
|  | 1126 | super_group_foo_group_size={group_foo_size} | 
|  | 1127 | super_group_foo_partition_list=system vendor odm | 
|  | 1128 | super_group_bar_group_size={group_bar_size} | 
|  | 1129 | super_group_bar_partition_list=product | 
|  | 1130 | """.format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n")) | 
|  | 1131 |  | 
|  | 1132 | block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB), | 
|  | 1133 | src=FakeSparseImage(1024 * MiB)), | 
|  | 1134 | MockBlockDifference("vendor", FakeSparseImage(512 * MiB), | 
|  | 1135 | src=FakeSparseImage(1024 * MiB)), | 
|  | 1136 | MockBlockDifference("product", FakeSparseImage(1024 * MiB), | 
|  | 1137 | src=FakeSparseImage(1024 * MiB)), | 
|  | 1138 | MockBlockDifference("product_services", None, | 
|  | 1139 | src=FakeSparseImage(1024 * MiB)), | 
|  | 1140 | MockBlockDifference("odm", FakeSparseImage(1024 * MiB), | 
|  | 1141 | src=None)] | 
|  | 1142 |  | 
|  | 1143 | dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs, | 
|  | 1144 | source_info_dict=source_info) | 
|  | 1145 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: | 
|  | 1146 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) | 
|  | 1147 |  | 
|  | 1148 | metadata_idx = self.script.lines.index( | 
|  | 1149 | 'assert(update_dynamic_partitions(package_extract_file(' | 
|  | 1150 | '"dynamic_partitions_op_list")));') | 
|  | 1151 | self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx) | 
|  | 1152 | self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);')) | 
|  | 1153 | for p in ("product", "system", "odm"): | 
|  | 1154 | patch_idx = self.script.lines.index("patch({});".format(p)) | 
|  | 1155 | verify_idx = self.script.lines.index("verify({});".format(p)) | 
|  | 1156 | self.assertLess(metadata_idx, patch_idx, | 
|  | 1157 | "Should patch {} after updating metadata".format(p)) | 
|  | 1158 | self.assertLess(patch_idx, verify_idx, | 
|  | 1159 | "Should verify {} after patching".format(p)) | 
|  | 1160 |  | 
|  | 1161 | self.assertNotIn("patch(product_services);", self.script.lines) | 
|  | 1162 |  | 
|  | 1163 | lines = self.get_op_list(self.output_path) | 
|  | 1164 |  | 
|  | 1165 | remove = lines.index("remove product_services") | 
|  | 1166 | move_product_out = lines.index("move product default") | 
|  | 1167 | shrink = lines.index("resize vendor 536870912") | 
|  | 1168 | shrink_group = lines.index("resize_group group_foo 3221225472") | 
|  | 1169 | add_group_bar = lines.index("add_group group_bar 1073741824") | 
|  | 1170 | add_odm = lines.index("add odm group_foo") | 
|  | 1171 | grow_existing = lines.index("resize system 1610612736") | 
|  | 1172 | grow_added = lines.index("resize odm 1073741824") | 
|  | 1173 | move_product_in = lines.index("move product group_bar") | 
|  | 1174 |  | 
|  | 1175 | max_idx_move_partition_out_foo = max(remove, move_product_out, shrink) | 
|  | 1176 | min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added) | 
|  | 1177 |  | 
|  | 1178 | self.assertLess(max_idx_move_partition_out_foo, shrink_group, | 
|  | 1179 | "Must shrink group after partitions inside group are shrunk" | 
|  | 1180 | " / removed") | 
|  | 1181 |  | 
|  | 1182 | self.assertLess(add_group_bar, move_product_in, | 
|  | 1183 | "Must add partitions to group after group is added") | 
|  | 1184 |  | 
|  | 1185 | self.assertLess(max_idx_move_partition_out_foo, | 
|  | 1186 | min_idx_move_partition_in_foo, | 
|  | 1187 | "Must shrink partitions / remove partitions from group" | 
|  | 1188 | "before adding / moving partitions into group") |