Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 1 | # |
| 2 | # Copyright (C) 2015 The Android Open Source Project |
| 3 | # |
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | # you may not use this file except in compliance with the License. |
| 6 | # You may obtain a copy of the License at |
| 7 | # |
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | # |
| 10 | # Unless required by applicable law or agreed to in writing, software |
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | # See the License for the specific language governing permissions and |
| 14 | # limitations under the License. |
| 15 | # |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 16 | |
Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 17 | import copy |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 18 | import os |
Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 19 | import subprocess |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 20 | import tempfile |
| 21 | import time |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 22 | import zipfile |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 23 | from hashlib import sha1 |
| 24 | |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 25 | import common |
Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 26 | import test_utils |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 27 | import validate_target_files |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 28 | from rangelib import RangeSet |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 29 | |
Yifan Hong | bb2658d | 2019-01-25 12:30:58 -0800 | [diff] [blame] | 30 | from blockimgdiff import EmptyImage, DataImage |
Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 31 | |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 32 | KiB = 1024 |
| 33 | MiB = 1024 * KiB |
| 34 | GiB = 1024 * MiB |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 35 | |
Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 36 | |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 37 | def get_2gb_string(): |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 38 | size = int(2 * GiB + 1) |
| 39 | block_size = 4 * KiB |
| 40 | step_size = 4 * MiB |
| 41 | # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'. |
| 42 | for _ in range(0, size, step_size): |
| 43 | yield os.urandom(block_size) |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 44 | yield b'\0' * (step_size - block_size) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 45 | |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 46 | |
Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 47 | class CommonZipTest(test_utils.ReleaseToolsTestCase): |
| 48 | |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 49 | def _verify(self, zip_file, zip_file_name, arcname, expected_hash, |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 50 | test_file_name=None, expected_stat=None, expected_mode=0o644, |
| 51 | expected_compress_type=zipfile.ZIP_STORED): |
| 52 | # Verify the stat if present. |
| 53 | if test_file_name is not None: |
| 54 | new_stat = os.stat(test_file_name) |
| 55 | self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode)) |
| 56 | self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime)) |
| 57 | |
| 58 | # Reopen the zip file to verify. |
| 59 | zip_file = zipfile.ZipFile(zip_file_name, "r") |
| 60 | |
| 61 | # Verify the timestamp. |
| 62 | info = zip_file.getinfo(arcname) |
| 63 | self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0)) |
| 64 | |
| 65 | # Verify the file mode. |
| 66 | mode = (info.external_attr >> 16) & 0o777 |
| 67 | self.assertEqual(mode, expected_mode) |
| 68 | |
| 69 | # Verify the compress type. |
| 70 | self.assertEqual(info.compress_type, expected_compress_type) |
| 71 | |
| 72 | # Verify the zip contents. |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 73 | entry = zip_file.open(arcname) |
| 74 | sha1_hash = sha1() |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 75 | for chunk in iter(lambda: entry.read(4 * MiB), b''): |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 76 | sha1_hash.update(chunk) |
| 77 | self.assertEqual(expected_hash, sha1_hash.hexdigest()) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 78 | self.assertIsNone(zip_file.testzip()) |
| 79 | |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 80 | def _test_ZipWrite(self, contents, extra_zipwrite_args=None): |
| 81 | extra_zipwrite_args = dict(extra_zipwrite_args or {}) |
| 82 | |
| 83 | test_file = tempfile.NamedTemporaryFile(delete=False) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 84 | test_file_name = test_file.name |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 85 | |
| 86 | zip_file = tempfile.NamedTemporaryFile(delete=False) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 87 | zip_file_name = zip_file.name |
| 88 | |
| 89 | # File names within an archive strip the leading slash. |
| 90 | arcname = extra_zipwrite_args.get("arcname", test_file_name) |
| 91 | if arcname[0] == "/": |
| 92 | arcname = arcname[1:] |
| 93 | |
| 94 | zip_file.close() |
| 95 | zip_file = zipfile.ZipFile(zip_file_name, "w") |
| 96 | |
| 97 | try: |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 98 | sha1_hash = sha1() |
| 99 | for data in contents: |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 100 | sha1_hash.update(bytes(data)) |
| 101 | test_file.write(bytes(data)) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 102 | test_file.close() |
| 103 | |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 104 | expected_stat = os.stat(test_file_name) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 105 | expected_mode = extra_zipwrite_args.get("perms", 0o644) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 106 | expected_compress_type = extra_zipwrite_args.get("compress_type", |
| 107 | zipfile.ZIP_STORED) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 108 | time.sleep(5) # Make sure the atime/mtime will change measurably. |
| 109 | |
| 110 | common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 111 | common.ZipClose(zip_file) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 112 | |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 113 | self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(), |
| 114 | test_file_name, expected_stat, expected_mode, |
| 115 | expected_compress_type) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 116 | finally: |
| 117 | os.remove(test_file_name) |
| 118 | os.remove(zip_file_name) |
| 119 | |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 120 | def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None): |
| 121 | extra_args = dict(extra_args or {}) |
| 122 | |
| 123 | zip_file = tempfile.NamedTemporaryFile(delete=False) |
| 124 | zip_file_name = zip_file.name |
| 125 | zip_file.close() |
| 126 | |
| 127 | zip_file = zipfile.ZipFile(zip_file_name, "w") |
| 128 | |
| 129 | try: |
| 130 | expected_compress_type = extra_args.get("compress_type", |
| 131 | zipfile.ZIP_STORED) |
| 132 | time.sleep(5) # Make sure the atime/mtime will change measurably. |
| 133 | |
| 134 | if not isinstance(zinfo_or_arcname, zipfile.ZipInfo): |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 135 | arcname = zinfo_or_arcname |
| 136 | expected_mode = extra_args.get("perms", 0o644) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 137 | else: |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 138 | arcname = zinfo_or_arcname.filename |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 139 | if zinfo_or_arcname.external_attr: |
| 140 | zinfo_perms = zinfo_or_arcname.external_attr >> 16 |
| 141 | else: |
| 142 | zinfo_perms = 0o600 |
| 143 | expected_mode = extra_args.get("perms", zinfo_perms) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 144 | |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 145 | common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 146 | common.ZipClose(zip_file) |
| 147 | |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 148 | self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(), |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 149 | expected_mode=expected_mode, |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 150 | expected_compress_type=expected_compress_type) |
| 151 | finally: |
| 152 | os.remove(zip_file_name) |
| 153 | |
| 154 | def _test_ZipWriteStr_large_file(self, large, small, extra_args=None): |
| 155 | extra_args = dict(extra_args or {}) |
| 156 | |
| 157 | zip_file = tempfile.NamedTemporaryFile(delete=False) |
| 158 | zip_file_name = zip_file.name |
| 159 | |
| 160 | test_file = tempfile.NamedTemporaryFile(delete=False) |
| 161 | test_file_name = test_file.name |
| 162 | |
| 163 | arcname_large = test_file_name |
| 164 | arcname_small = "bar" |
| 165 | |
| 166 | # File names within an archive strip the leading slash. |
| 167 | if arcname_large[0] == "/": |
| 168 | arcname_large = arcname_large[1:] |
| 169 | |
| 170 | zip_file.close() |
| 171 | zip_file = zipfile.ZipFile(zip_file_name, "w") |
| 172 | |
| 173 | try: |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 174 | sha1_hash = sha1() |
| 175 | for data in large: |
| 176 | sha1_hash.update(data) |
| 177 | test_file.write(data) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 178 | test_file.close() |
| 179 | |
| 180 | expected_stat = os.stat(test_file_name) |
| 181 | expected_mode = 0o644 |
| 182 | expected_compress_type = extra_args.get("compress_type", |
| 183 | zipfile.ZIP_STORED) |
| 184 | time.sleep(5) # Make sure the atime/mtime will change measurably. |
| 185 | |
| 186 | common.ZipWrite(zip_file, test_file_name, **extra_args) |
| 187 | common.ZipWriteStr(zip_file, arcname_small, small, **extra_args) |
| 188 | common.ZipClose(zip_file) |
| 189 | |
| 190 | # Verify the contents written by ZipWrite(). |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 191 | self._verify(zip_file, zip_file_name, arcname_large, |
| 192 | sha1_hash.hexdigest(), test_file_name, expected_stat, |
| 193 | expected_mode, expected_compress_type) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 194 | |
| 195 | # Verify the contents written by ZipWriteStr(). |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 196 | self._verify(zip_file, zip_file_name, arcname_small, |
| 197 | sha1(small).hexdigest(), |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 198 | expected_compress_type=expected_compress_type) |
| 199 | finally: |
| 200 | os.remove(zip_file_name) |
| 201 | os.remove(test_file_name) |
| 202 | |
| 203 | def _test_reset_ZIP64_LIMIT(self, func, *args): |
| 204 | default_limit = (1 << 31) - 1 |
| 205 | self.assertEqual(default_limit, zipfile.ZIP64_LIMIT) |
| 206 | func(*args) |
| 207 | self.assertEqual(default_limit, zipfile.ZIP64_LIMIT) |
| 208 | |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 209 | def test_ZipWrite(self): |
| 210 | file_contents = os.urandom(1024) |
| 211 | self._test_ZipWrite(file_contents) |
| 212 | |
| 213 | def test_ZipWrite_with_opts(self): |
| 214 | file_contents = os.urandom(1024) |
| 215 | self._test_ZipWrite(file_contents, { |
| 216 | "arcname": "foobar", |
| 217 | "perms": 0o777, |
| 218 | "compress_type": zipfile.ZIP_DEFLATED, |
| 219 | }) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 220 | self._test_ZipWrite(file_contents, { |
| 221 | "arcname": "foobar", |
| 222 | "perms": 0o700, |
| 223 | "compress_type": zipfile.ZIP_STORED, |
| 224 | }) |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 225 | |
| 226 | def test_ZipWrite_large_file(self): |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 227 | file_contents = get_2gb_string() |
Dan Albert | 8e0178d | 2015-01-27 15:53:15 -0800 | [diff] [blame] | 228 | self._test_ZipWrite(file_contents, { |
| 229 | "compress_type": zipfile.ZIP_DEFLATED, |
| 230 | }) |
| 231 | |
| 232 | def test_ZipWrite_resets_ZIP64_LIMIT(self): |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 233 | self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "") |
| 234 | |
| 235 | def test_ZipWriteStr(self): |
| 236 | random_string = os.urandom(1024) |
| 237 | # Passing arcname |
| 238 | self._test_ZipWriteStr("foo", random_string) |
| 239 | |
| 240 | # Passing zinfo |
| 241 | zinfo = zipfile.ZipInfo(filename="foo") |
| 242 | self._test_ZipWriteStr(zinfo, random_string) |
| 243 | |
| 244 | # Timestamp in the zinfo should be overwritten. |
| 245 | zinfo.date_time = (2015, 3, 1, 15, 30, 0) |
| 246 | self._test_ZipWriteStr(zinfo, random_string) |
| 247 | |
| 248 | def test_ZipWriteStr_with_opts(self): |
| 249 | random_string = os.urandom(1024) |
| 250 | # Passing arcname |
| 251 | self._test_ZipWriteStr("foo", random_string, { |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 252 | "perms": 0o700, |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 253 | "compress_type": zipfile.ZIP_DEFLATED, |
| 254 | }) |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 255 | self._test_ZipWriteStr("bar", random_string, { |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 256 | "compress_type": zipfile.ZIP_STORED, |
| 257 | }) |
| 258 | |
| 259 | # Passing zinfo |
| 260 | zinfo = zipfile.ZipInfo(filename="foo") |
| 261 | self._test_ZipWriteStr(zinfo, random_string, { |
| 262 | "compress_type": zipfile.ZIP_DEFLATED, |
| 263 | }) |
| 264 | self._test_ZipWriteStr(zinfo, random_string, { |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 265 | "perms": 0o600, |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 266 | "compress_type": zipfile.ZIP_STORED, |
| 267 | }) |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 268 | self._test_ZipWriteStr(zinfo, random_string, { |
| 269 | "perms": 0o000, |
| 270 | "compress_type": zipfile.ZIP_STORED, |
| 271 | }) |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 272 | |
| 273 | def test_ZipWriteStr_large_file(self): |
| 274 | # zipfile.writestr() doesn't work when the str size is over 2GiB even with |
| 275 | # the workaround. We will only test the case of writing a string into a |
| 276 | # large archive. |
| 277 | long_string = get_2gb_string() |
| 278 | short_string = os.urandom(1024) |
| 279 | self._test_ZipWriteStr_large_file(long_string, short_string, { |
| 280 | "compress_type": zipfile.ZIP_DEFLATED, |
| 281 | }) |
| 282 | |
| 283 | def test_ZipWriteStr_resets_ZIP64_LIMIT(self): |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 284 | self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'') |
Tao Bao | f3282b4 | 2015-04-01 11:21:55 -0700 | [diff] [blame] | 285 | zinfo = zipfile.ZipInfo(filename="foo") |
Tao Bao | c1a1ec3 | 2019-06-18 16:29:37 -0700 | [diff] [blame] | 286 | self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'') |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 287 | |
| 288 | def test_bug21309935(self): |
| 289 | zip_file = tempfile.NamedTemporaryFile(delete=False) |
| 290 | zip_file_name = zip_file.name |
| 291 | zip_file.close() |
| 292 | |
| 293 | try: |
| 294 | random_string = os.urandom(1024) |
| 295 | zip_file = zipfile.ZipFile(zip_file_name, "w") |
| 296 | # Default perms should be 0o644 when passing the filename. |
| 297 | common.ZipWriteStr(zip_file, "foo", random_string) |
| 298 | # Honor the specified perms. |
| 299 | common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755) |
| 300 | # The perms in zinfo should be untouched. |
| 301 | zinfo = zipfile.ZipInfo(filename="baz") |
| 302 | zinfo.external_attr = 0o740 << 16 |
| 303 | common.ZipWriteStr(zip_file, zinfo, random_string) |
| 304 | # Explicitly specified perms has the priority. |
| 305 | zinfo = zipfile.ZipInfo(filename="qux") |
| 306 | zinfo.external_attr = 0o700 << 16 |
| 307 | common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400) |
| 308 | common.ZipClose(zip_file) |
| 309 | |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 310 | self._verify(zip_file, zip_file_name, "foo", |
| 311 | sha1(random_string).hexdigest(), |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 312 | expected_mode=0o644) |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 313 | self._verify(zip_file, zip_file_name, "bar", |
| 314 | sha1(random_string).hexdigest(), |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 315 | expected_mode=0o755) |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 316 | self._verify(zip_file, zip_file_name, "baz", |
| 317 | sha1(random_string).hexdigest(), |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 318 | expected_mode=0o740) |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 319 | self._verify(zip_file, zip_file_name, "qux", |
| 320 | sha1(random_string).hexdigest(), |
Tao Bao | 58c1b96 | 2015-05-20 09:32:18 -0700 | [diff] [blame] | 321 | expected_mode=0o400) |
| 322 | finally: |
| 323 | os.remove(zip_file_name) |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 324 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 325 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 89d7ab2 | 2017-12-14 17:05:33 -0800 | [diff] [blame] | 326 | def test_ZipDelete(self): |
| 327 | zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip') |
| 328 | output_zip = zipfile.ZipFile(zip_file.name, 'w', |
| 329 | compression=zipfile.ZIP_DEFLATED) |
| 330 | with tempfile.NamedTemporaryFile() as entry_file: |
| 331 | entry_file.write(os.urandom(1024)) |
| 332 | common.ZipWrite(output_zip, entry_file.name, arcname='Test1') |
| 333 | common.ZipWrite(output_zip, entry_file.name, arcname='Test2') |
| 334 | common.ZipWrite(output_zip, entry_file.name, arcname='Test3') |
| 335 | common.ZipClose(output_zip) |
| 336 | zip_file.close() |
| 337 | |
| 338 | try: |
| 339 | common.ZipDelete(zip_file.name, 'Test2') |
| 340 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: |
| 341 | entries = check_zip.namelist() |
| 342 | self.assertTrue('Test1' in entries) |
| 343 | self.assertFalse('Test2' in entries) |
| 344 | self.assertTrue('Test3' in entries) |
| 345 | |
Tao Bao | 986ee86 | 2018-10-04 15:46:16 -0700 | [diff] [blame] | 346 | self.assertRaises( |
| 347 | common.ExternalError, common.ZipDelete, zip_file.name, 'Test2') |
Tao Bao | 89d7ab2 | 2017-12-14 17:05:33 -0800 | [diff] [blame] | 348 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: |
| 349 | entries = check_zip.namelist() |
| 350 | self.assertTrue('Test1' in entries) |
| 351 | self.assertFalse('Test2' in entries) |
| 352 | self.assertTrue('Test3' in entries) |
| 353 | |
| 354 | common.ZipDelete(zip_file.name, ['Test3']) |
| 355 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: |
| 356 | entries = check_zip.namelist() |
| 357 | self.assertTrue('Test1' in entries) |
| 358 | self.assertFalse('Test2' in entries) |
| 359 | self.assertFalse('Test3' in entries) |
| 360 | |
| 361 | common.ZipDelete(zip_file.name, ['Test1', 'Test2']) |
| 362 | with zipfile.ZipFile(zip_file.name, 'r') as check_zip: |
| 363 | entries = check_zip.namelist() |
| 364 | self.assertFalse('Test1' in entries) |
| 365 | self.assertFalse('Test2' in entries) |
| 366 | self.assertFalse('Test3' in entries) |
| 367 | finally: |
| 368 | os.remove(zip_file.name) |
| 369 | |
Tao Bao | 0ff15de | 2019-03-20 11:26:06 -0700 | [diff] [blame] | 370 | @staticmethod |
| 371 | def _test_UnzipTemp_createZipFile(): |
| 372 | zip_file = common.MakeTempFile(suffix='.zip') |
| 373 | output_zip = zipfile.ZipFile( |
| 374 | zip_file, 'w', compression=zipfile.ZIP_DEFLATED) |
| 375 | contents = os.urandom(1024) |
| 376 | with tempfile.NamedTemporaryFile() as entry_file: |
| 377 | entry_file.write(contents) |
| 378 | common.ZipWrite(output_zip, entry_file.name, arcname='Test1') |
| 379 | common.ZipWrite(output_zip, entry_file.name, arcname='Test2') |
| 380 | common.ZipWrite(output_zip, entry_file.name, arcname='Foo3') |
| 381 | common.ZipWrite(output_zip, entry_file.name, arcname='Bar4') |
| 382 | common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5') |
| 383 | common.ZipClose(output_zip) |
| 384 | common.ZipClose(output_zip) |
| 385 | return zip_file |
| 386 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 387 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 0ff15de | 2019-03-20 11:26:06 -0700 | [diff] [blame] | 388 | def test_UnzipTemp(self): |
| 389 | zip_file = self._test_UnzipTemp_createZipFile() |
| 390 | unzipped_dir = common.UnzipTemp(zip_file) |
| 391 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 392 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 393 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 394 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 395 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 396 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 397 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 0ff15de | 2019-03-20 11:26:06 -0700 | [diff] [blame] | 398 | def test_UnzipTemp_withPatterns(self): |
| 399 | zip_file = self._test_UnzipTemp_createZipFile() |
| 400 | |
| 401 | unzipped_dir = common.UnzipTemp(zip_file, ['Test1']) |
| 402 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 403 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 404 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 405 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 406 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 407 | |
| 408 | unzipped_dir = common.UnzipTemp(zip_file, ['Test1', 'Foo3']) |
| 409 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 410 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 411 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 412 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 413 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 414 | |
| 415 | unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Foo3*']) |
| 416 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 417 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 418 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 419 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 420 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 421 | |
| 422 | unzipped_dir = common.UnzipTemp(zip_file, ['*Test1', '*Baz*']) |
| 423 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 424 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 425 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 426 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 427 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 428 | |
| 429 | def test_UnzipTemp_withEmptyPatterns(self): |
| 430 | zip_file = self._test_UnzipTemp_createZipFile() |
| 431 | unzipped_dir = common.UnzipTemp(zip_file, []) |
| 432 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 433 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 434 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 435 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 436 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 437 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 438 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 0ff15de | 2019-03-20 11:26:06 -0700 | [diff] [blame] | 439 | def test_UnzipTemp_withPartiallyMatchingPatterns(self): |
| 440 | zip_file = self._test_UnzipTemp_createZipFile() |
| 441 | unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*']) |
| 442 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 443 | self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 444 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 445 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 446 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 447 | |
| 448 | def test_UnzipTemp_withNoMatchingPatterns(self): |
| 449 | zip_file = self._test_UnzipTemp_createZipFile() |
| 450 | unzipped_dir = common.UnzipTemp(zip_file, ['Foo4', 'Nonexistent*']) |
| 451 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1'))) |
| 452 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2'))) |
| 453 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3'))) |
| 454 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) |
| 455 | self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) |
| 456 | |
Tao Bao | 89d7ab2 | 2017-12-14 17:05:33 -0800 | [diff] [blame] | 457 | |
Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 458 | class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase): |
Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 459 | """Tests the APK utils related functions.""" |
| 460 | |
| 461 | APKCERTS_TXT1 = ( |
| 462 | 'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"' |
| 463 | ' private_key="certs/devkey.pk8"\n' |
| 464 | 'name="Settings.apk"' |
Dan Willemsen | 0ab1be6 | 2019-04-09 21:35:37 -0700 | [diff] [blame] | 465 | ' certificate="build/make/target/product/security/platform.x509.pem"' |
| 466 | ' private_key="build/make/target/product/security/platform.pk8"\n' |
Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 467 | 'name="TV.apk" certificate="PRESIGNED" private_key=""\n' |
| 468 | ) |
| 469 | |
| 470 | APKCERTS_CERTMAP1 = { |
| 471 | 'RecoveryLocalizer.apk' : 'certs/devkey', |
Dan Willemsen | 0ab1be6 | 2019-04-09 21:35:37 -0700 | [diff] [blame] | 472 | 'Settings.apk' : 'build/make/target/product/security/platform', |
Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 473 | 'TV.apk' : 'PRESIGNED', |
| 474 | } |
| 475 | |
| 476 | APKCERTS_TXT2 = ( |
| 477 | 'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"' |
| 478 | ' private_key="certs/compressed1.pk8" compressed="gz"\n' |
| 479 | 'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"' |
| 480 | ' private_key="certs/compressed2.pk8" compressed="gz"\n' |
| 481 | 'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"' |
| 482 | ' private_key="certs/compressed2.pk8" compressed="gz"\n' |
| 483 | 'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"' |
| 484 | ' private_key="certs/compressed3.pk8" compressed="gz"\n' |
| 485 | ) |
| 486 | |
| 487 | APKCERTS_CERTMAP2 = { |
| 488 | 'Compressed1.apk' : 'certs/compressed1', |
| 489 | 'Compressed2a.apk' : 'certs/compressed2', |
| 490 | 'Compressed2b.apk' : 'certs/compressed2', |
| 491 | 'Compressed3.apk' : 'certs/compressed3', |
| 492 | } |
| 493 | |
| 494 | APKCERTS_TXT3 = ( |
| 495 | 'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"' |
| 496 | ' private_key="certs/compressed4.pk8" compressed="xz"\n' |
| 497 | ) |
| 498 | |
| 499 | APKCERTS_CERTMAP3 = { |
| 500 | 'Compressed4.apk' : 'certs/compressed4', |
| 501 | } |
| 502 | |
Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 503 | def setUp(self): |
| 504 | self.testdata_dir = test_utils.get_testdata_dir() |
| 505 | |
Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 506 | @staticmethod |
| 507 | def _write_apkcerts_txt(apkcerts_txt, additional=None): |
| 508 | if additional is None: |
| 509 | additional = [] |
| 510 | target_files = common.MakeTempFile(suffix='.zip') |
| 511 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 512 | target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt) |
| 513 | for entry in additional: |
| 514 | target_files_zip.writestr(entry, '') |
| 515 | return target_files |
| 516 | |
| 517 | def test_ReadApkCerts_NoncompressedApks(self): |
| 518 | target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1) |
| 519 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 520 | certmap, ext = common.ReadApkCerts(input_zip) |
| 521 | |
| 522 | self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap) |
| 523 | self.assertIsNone(ext) |
| 524 | |
| 525 | def test_ReadApkCerts_CompressedApks(self): |
| 526 | # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is |
| 527 | # not stored in '.gz' format, so it shouldn't be considered as installed. |
| 528 | target_files = self._write_apkcerts_txt( |
| 529 | self.APKCERTS_TXT2, |
| 530 | ['Compressed1.apk.gz', 'Compressed3.apk']) |
| 531 | |
| 532 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 533 | certmap, ext = common.ReadApkCerts(input_zip) |
| 534 | |
| 535 | self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap) |
| 536 | self.assertEqual('.gz', ext) |
| 537 | |
| 538 | # Alternative case with '.xz'. |
| 539 | target_files = self._write_apkcerts_txt( |
| 540 | self.APKCERTS_TXT3, ['Compressed4.apk.xz']) |
| 541 | |
| 542 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 543 | certmap, ext = common.ReadApkCerts(input_zip) |
| 544 | |
| 545 | self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap) |
| 546 | self.assertEqual('.xz', ext) |
| 547 | |
| 548 | def test_ReadApkCerts_CompressedAndNoncompressedApks(self): |
| 549 | target_files = self._write_apkcerts_txt( |
| 550 | self.APKCERTS_TXT1 + self.APKCERTS_TXT2, |
| 551 | ['Compressed1.apk.gz', 'Compressed3.apk']) |
| 552 | |
| 553 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 554 | certmap, ext = common.ReadApkCerts(input_zip) |
| 555 | |
| 556 | certmap_merged = self.APKCERTS_CERTMAP1.copy() |
| 557 | certmap_merged.update(self.APKCERTS_CERTMAP2) |
| 558 | self.assertDictEqual(certmap_merged, certmap) |
| 559 | self.assertEqual('.gz', ext) |
| 560 | |
| 561 | def test_ReadApkCerts_MultipleCompressionMethods(self): |
| 562 | target_files = self._write_apkcerts_txt( |
| 563 | self.APKCERTS_TXT2 + self.APKCERTS_TXT3, |
| 564 | ['Compressed1.apk.gz', 'Compressed4.apk.xz']) |
| 565 | |
| 566 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 567 | self.assertRaises(ValueError, common.ReadApkCerts, input_zip) |
| 568 | |
| 569 | def test_ReadApkCerts_MismatchingKeys(self): |
| 570 | malformed_apkcerts_txt = ( |
| 571 | 'name="App1.apk" certificate="certs/cert1.x509.pem"' |
| 572 | ' private_key="certs/cert2.pk8"\n' |
| 573 | ) |
| 574 | target_files = self._write_apkcerts_txt(malformed_apkcerts_txt) |
| 575 | |
| 576 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 577 | self.assertRaises(ValueError, common.ReadApkCerts, input_zip) |
| 578 | |
Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 579 | def test_ExtractPublicKey(self): |
Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 580 | cert = os.path.join(self.testdata_dir, 'testkey.x509.pem') |
| 581 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 582 | with open(pubkey) as pubkey_fp: |
Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 583 | self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert)) |
| 584 | |
| 585 | def test_ExtractPublicKey_invalidInput(self): |
Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 586 | wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8') |
Tao Bao | 04e1f01 | 2018-02-04 12:13:35 -0800 | [diff] [blame] | 587 | self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input) |
| 588 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 589 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 2cc0ca1 | 2019-03-15 10:44:43 -0700 | [diff] [blame] | 590 | def test_ExtractAvbPublicKey(self): |
| 591 | privkey = os.path.join(self.testdata_dir, 'testkey.key') |
| 592 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') |
Tao Bao | 1ac886e | 2019-06-26 11:58:22 -0700 | [diff] [blame^] | 593 | extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey) |
| 594 | extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey) |
| 595 | with open(extracted_from_privkey, 'rb') as privkey_fp, \ |
| 596 | open(extracted_from_pubkey, 'rb') as pubkey_fp: |
Tao Bao | 2cc0ca1 | 2019-03-15 10:44:43 -0700 | [diff] [blame] | 597 | self.assertEqual(privkey_fp.read(), pubkey_fp.read()) |
| 598 | |
Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 599 | def test_ParseCertificate(self): |
| 600 | cert = os.path.join(self.testdata_dir, 'testkey.x509.pem') |
| 601 | |
| 602 | cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER'] |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 603 | proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, |
| 604 | universal_newlines=False) |
Tao Bao | 17e4e61 | 2018-02-16 17:12:54 -0800 | [diff] [blame] | 605 | expected, _ = proc.communicate() |
| 606 | self.assertEqual(0, proc.returncode) |
| 607 | |
| 608 | with open(cert) as cert_fp: |
| 609 | actual = common.ParseCertificate(cert_fp.read()) |
| 610 | self.assertEqual(expected, actual) |
| 611 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 612 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | f47bf0f | 2018-03-21 23:28:51 -0700 | [diff] [blame] | 613 | def test_GetMinSdkVersion(self): |
| 614 | test_app = os.path.join(self.testdata_dir, 'TestApp.apk') |
| 615 | self.assertEqual('24', common.GetMinSdkVersion(test_app)) |
| 616 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 617 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | f47bf0f | 2018-03-21 23:28:51 -0700 | [diff] [blame] | 618 | def test_GetMinSdkVersion_invalidInput(self): |
| 619 | self.assertRaises( |
| 620 | common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk') |
| 621 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 622 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | f47bf0f | 2018-03-21 23:28:51 -0700 | [diff] [blame] | 623 | def test_GetMinSdkVersionInt(self): |
| 624 | test_app = os.path.join(self.testdata_dir, 'TestApp.apk') |
| 625 | self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {})) |
| 626 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 627 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | f47bf0f | 2018-03-21 23:28:51 -0700 | [diff] [blame] | 628 | def test_GetMinSdkVersionInt_invalidInput(self): |
| 629 | self.assertRaises( |
| 630 | common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk', |
| 631 | {}) |
| 632 | |
Tao Bao | 818ddf5 | 2018-01-05 11:17:34 -0800 | [diff] [blame] | 633 | |
Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 634 | class CommonUtilsTest(test_utils.ReleaseToolsTestCase): |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 635 | |
Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 636 | def setUp(self): |
| 637 | self.testdata_dir = test_utils.get_testdata_dir() |
| 638 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 639 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 640 | def test_GetSparseImage_emptyBlockMapFile(self): |
| 641 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 642 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 643 | target_files_zip.write( |
| 644 | test_utils.construct_sparse_image([ |
| 645 | (0xCAC1, 6), |
| 646 | (0xCAC3, 3), |
| 647 | (0xCAC1, 4)]), |
| 648 | arcname='IMAGES/system.img') |
| 649 | target_files_zip.writestr('IMAGES/system.map', '') |
| 650 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8)) |
| 651 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) |
| 652 | |
Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 653 | tempdir = common.UnzipTemp(target_files) |
| 654 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 655 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 656 | |
| 657 | self.assertDictEqual( |
| 658 | { |
| 659 | '__COPY': RangeSet("0"), |
| 660 | '__NONZERO-0': RangeSet("1-5 9-12"), |
| 661 | }, |
| 662 | sparse_image.file_map) |
| 663 | |
Tao Bao | b2de7d9 | 2019-04-10 10:01:47 -0700 | [diff] [blame] | 664 | def test_GetSparseImage_missingImageFile(self): |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 665 | self.assertRaises( |
Tao Bao | b2de7d9 | 2019-04-10 10:01:47 -0700 | [diff] [blame] | 666 | AssertionError, common.GetSparseImage, 'system2', self.testdata_dir, |
| 667 | None, False) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 668 | self.assertRaises( |
Tao Bao | b2de7d9 | 2019-04-10 10:01:47 -0700 | [diff] [blame] | 669 | AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir, |
| 670 | None, False) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 671 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 672 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 673 | def test_GetSparseImage_missingBlockMapFile(self): |
| 674 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 675 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 676 | target_files_zip.write( |
| 677 | test_utils.construct_sparse_image([ |
| 678 | (0xCAC1, 6), |
| 679 | (0xCAC3, 3), |
| 680 | (0xCAC1, 4)]), |
| 681 | arcname='IMAGES/system.img') |
| 682 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8)) |
| 683 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) |
| 684 | |
Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 685 | tempdir = common.UnzipTemp(target_files) |
| 686 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 687 | self.assertRaises( |
| 688 | AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, |
| 689 | False) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 690 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 691 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 692 | def test_GetSparseImage_sharedBlocks_notAllowed(self): |
| 693 | """Tests the case of having overlapping blocks but disallowed.""" |
| 694 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 695 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 696 | target_files_zip.write( |
| 697 | test_utils.construct_sparse_image([(0xCAC2, 16)]), |
| 698 | arcname='IMAGES/system.img') |
| 699 | # Block 10 is shared between two files. |
| 700 | target_files_zip.writestr( |
| 701 | 'IMAGES/system.map', |
| 702 | '\n'.join([ |
| 703 | '/system/file1 1-5 9-10', |
| 704 | '/system/file2 10-12'])) |
| 705 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) |
| 706 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) |
| 707 | |
Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 708 | tempdir = common.UnzipTemp(target_files) |
| 709 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 710 | self.assertRaises( |
| 711 | AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, |
| 712 | False) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 713 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 714 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 715 | def test_GetSparseImage_sharedBlocks_allowed(self): |
| 716 | """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true.""" |
| 717 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 718 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 719 | # Construct an image with a care_map of "0-5 9-12". |
| 720 | target_files_zip.write( |
| 721 | test_utils.construct_sparse_image([(0xCAC2, 16)]), |
| 722 | arcname='IMAGES/system.img') |
| 723 | # Block 10 is shared between two files. |
| 724 | target_files_zip.writestr( |
| 725 | 'IMAGES/system.map', |
| 726 | '\n'.join([ |
| 727 | '/system/file1 1-5 9-10', |
| 728 | '/system/file2 10-12'])) |
| 729 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) |
| 730 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) |
| 731 | |
Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 732 | tempdir = common.UnzipTemp(target_files) |
| 733 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 734 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, True) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 735 | |
| 736 | self.assertDictEqual( |
| 737 | { |
| 738 | '__COPY': RangeSet("0"), |
| 739 | '__NONZERO-0': RangeSet("6-8 13-15"), |
| 740 | '/system/file1': RangeSet("1-5 9-10"), |
| 741 | '/system/file2': RangeSet("11-12"), |
| 742 | }, |
| 743 | sparse_image.file_map) |
| 744 | |
| 745 | # '/system/file2' should be marked with 'uses_shared_blocks', but not with |
| 746 | # 'incomplete'. |
| 747 | self.assertTrue( |
| 748 | sparse_image.file_map['/system/file2'].extra['uses_shared_blocks']) |
| 749 | self.assertNotIn( |
| 750 | 'incomplete', sparse_image.file_map['/system/file2'].extra) |
| 751 | |
| 752 | # All other entries should look normal without any tags. |
| 753 | self.assertFalse(sparse_image.file_map['__COPY'].extra) |
| 754 | self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra) |
| 755 | self.assertFalse(sparse_image.file_map['/system/file1'].extra) |
| 756 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 757 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 758 | def test_GetSparseImage_incompleteRanges(self): |
| 759 | """Tests the case of ext4 images with holes.""" |
| 760 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 761 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 762 | target_files_zip.write( |
| 763 | test_utils.construct_sparse_image([(0xCAC2, 16)]), |
| 764 | arcname='IMAGES/system.img') |
| 765 | target_files_zip.writestr( |
| 766 | 'IMAGES/system.map', |
| 767 | '\n'.join([ |
| 768 | '/system/file1 1-5 9-10', |
| 769 | '/system/file2 11-12'])) |
| 770 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) |
| 771 | # '/system/file2' has less blocks listed (2) than actual (3). |
| 772 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) |
| 773 | |
Tao Bao | dba59ee | 2018-01-09 13:21:02 -0800 | [diff] [blame] | 774 | tempdir = common.UnzipTemp(target_files) |
| 775 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 776 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 777 | |
| 778 | self.assertFalse(sparse_image.file_map['/system/file1'].extra) |
| 779 | self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete']) |
| 780 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 781 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | d3554e6 | 2018-07-10 15:31:22 -0700 | [diff] [blame] | 782 | def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self): |
| 783 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 784 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 785 | target_files_zip.write( |
| 786 | test_utils.construct_sparse_image([(0xCAC2, 16)]), |
| 787 | arcname='IMAGES/system.img') |
| 788 | target_files_zip.writestr( |
| 789 | 'IMAGES/system.map', |
| 790 | '\n'.join([ |
| 791 | '//system/file1 1-5 9-10', |
| 792 | '//system/file2 11-12', |
| 793 | '/system/app/file3 13-15'])) |
| 794 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) |
| 795 | # '/system/file2' has less blocks listed (2) than actual (3). |
| 796 | target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3)) |
| 797 | # '/system/app/file3' has less blocks listed (3) than actual (4). |
| 798 | target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4)) |
| 799 | |
| 800 | tempdir = common.UnzipTemp(target_files) |
| 801 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 802 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) |
| 803 | |
| 804 | self.assertFalse(sparse_image.file_map['//system/file1'].extra) |
| 805 | self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete']) |
| 806 | self.assertTrue( |
| 807 | sparse_image.file_map['/system/app/file3'].extra['incomplete']) |
| 808 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 809 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | d3554e6 | 2018-07-10 15:31:22 -0700 | [diff] [blame] | 810 | def test_GetSparseImage_systemRootImage_nonSystemFiles(self): |
| 811 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 812 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 813 | target_files_zip.write( |
| 814 | test_utils.construct_sparse_image([(0xCAC2, 16)]), |
| 815 | arcname='IMAGES/system.img') |
| 816 | target_files_zip.writestr( |
| 817 | 'IMAGES/system.map', |
| 818 | '\n'.join([ |
| 819 | '//system/file1 1-5 9-10', |
| 820 | '//init.rc 13-15'])) |
| 821 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) |
| 822 | # '/init.rc' has less blocks listed (3) than actual (4). |
| 823 | target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4)) |
| 824 | |
| 825 | tempdir = common.UnzipTemp(target_files) |
| 826 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 827 | sparse_image = common.GetSparseImage('system', tempdir, input_zip, False) |
| 828 | |
| 829 | self.assertFalse(sparse_image.file_map['//system/file1'].extra) |
| 830 | self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete']) |
| 831 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 832 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | d3554e6 | 2018-07-10 15:31:22 -0700 | [diff] [blame] | 833 | def test_GetSparseImage_fileNotFound(self): |
| 834 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 835 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 836 | target_files_zip.write( |
| 837 | test_utils.construct_sparse_image([(0xCAC2, 16)]), |
| 838 | arcname='IMAGES/system.img') |
| 839 | target_files_zip.writestr( |
| 840 | 'IMAGES/system.map', |
| 841 | '\n'.join([ |
| 842 | '//system/file1 1-5 9-10', |
| 843 | '//system/file2 11-12'])) |
| 844 | target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7)) |
| 845 | |
| 846 | tempdir = common.UnzipTemp(target_files) |
| 847 | with zipfile.ZipFile(target_files, 'r') as input_zip: |
| 848 | self.assertRaises( |
| 849 | AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, |
| 850 | False) |
| 851 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 852 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 853 | def test_GetAvbChainedPartitionArg(self): |
| 854 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') |
| 855 | info_dict = { |
| 856 | 'avb_avbtool': 'avbtool', |
| 857 | 'avb_system_key_path': pubkey, |
| 858 | 'avb_system_rollback_index_location': 2, |
| 859 | } |
| 860 | args = common.GetAvbChainedPartitionArg('system', info_dict).split(':') |
| 861 | self.assertEqual(3, len(args)) |
| 862 | self.assertEqual('system', args[0]) |
| 863 | self.assertEqual('2', args[1]) |
| 864 | self.assertTrue(os.path.exists(args[2])) |
| 865 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 866 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 867 | def test_GetAvbChainedPartitionArg_withPrivateKey(self): |
| 868 | key = os.path.join(self.testdata_dir, 'testkey.key') |
| 869 | info_dict = { |
| 870 | 'avb_avbtool': 'avbtool', |
| 871 | 'avb_product_key_path': key, |
| 872 | 'avb_product_rollback_index_location': 2, |
| 873 | } |
| 874 | args = common.GetAvbChainedPartitionArg('product', info_dict).split(':') |
| 875 | self.assertEqual(3, len(args)) |
| 876 | self.assertEqual('product', args[0]) |
| 877 | self.assertEqual('2', args[1]) |
| 878 | self.assertTrue(os.path.exists(args[2])) |
| 879 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 880 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 881 | def test_GetAvbChainedPartitionArg_withSpecifiedKey(self): |
| 882 | info_dict = { |
| 883 | 'avb_avbtool': 'avbtool', |
| 884 | 'avb_system_key_path': 'does-not-exist', |
| 885 | 'avb_system_rollback_index_location': 2, |
| 886 | } |
| 887 | pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') |
| 888 | args = common.GetAvbChainedPartitionArg( |
| 889 | 'system', info_dict, pubkey).split(':') |
| 890 | self.assertEqual(3, len(args)) |
| 891 | self.assertEqual('system', args[0]) |
| 892 | self.assertEqual('2', args[1]) |
| 893 | self.assertTrue(os.path.exists(args[2])) |
| 894 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 895 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 896 | def test_GetAvbChainedPartitionArg_invalidKey(self): |
| 897 | pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem') |
| 898 | info_dict = { |
| 899 | 'avb_avbtool': 'avbtool', |
| 900 | 'avb_system_key_path': pubkey, |
| 901 | 'avb_system_rollback_index_location': 2, |
| 902 | } |
| 903 | self.assertRaises( |
Tao Bao | 986ee86 | 2018-10-04 15:46:16 -0700 | [diff] [blame] | 904 | common.ExternalError, common.GetAvbChainedPartitionArg, 'system', |
| 905 | info_dict) |
Tao Bao | 02a0859 | 2018-07-22 12:40:45 -0700 | [diff] [blame] | 906 | |
Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 907 | INFO_DICT_DEFAULT = { |
| 908 | 'recovery_api_version': 3, |
| 909 | 'fstab_version': 2, |
| 910 | 'system_root_image': 'true', |
| 911 | 'no_recovery' : 'true', |
| 912 | 'recovery_as_boot': 'true', |
| 913 | } |
| 914 | |
| 915 | @staticmethod |
| 916 | def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path): |
| 917 | target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') |
| 918 | with zipfile.ZipFile(target_files, 'w') as target_files_zip: |
| 919 | info_values = ''.join( |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 920 | ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())]) |
Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 921 | common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values) |
| 922 | |
| 923 | FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults" |
| 924 | if info_dict.get('system_root_image') == 'true': |
| 925 | fstab_values = FSTAB_TEMPLATE.format('/') |
| 926 | else: |
| 927 | fstab_values = FSTAB_TEMPLATE.format('/system') |
| 928 | common.ZipWriteStr(target_files_zip, fstab_path, fstab_values) |
Tao Bao | 410ad8b | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 929 | |
| 930 | common.ZipWriteStr( |
| 931 | target_files_zip, 'META/file_contexts', 'file-contexts') |
Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 932 | return target_files |
| 933 | |
| 934 | def test_LoadInfoDict(self): |
| 935 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 936 | self.INFO_DICT_DEFAULT, |
| 937 | 'BOOT/RAMDISK/system/etc/recovery.fstab') |
| 938 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 939 | loaded_dict = common.LoadInfoDict(target_files_zip) |
| 940 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 941 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 942 | self.assertIn('/', loaded_dict['fstab']) |
| 943 | self.assertIn('/system', loaded_dict['fstab']) |
| 944 | |
| 945 | def test_LoadInfoDict_legacyRecoveryFstabPath(self): |
| 946 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 947 | self.INFO_DICT_DEFAULT, |
| 948 | 'BOOT/RAMDISK/etc/recovery.fstab') |
| 949 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 950 | loaded_dict = common.LoadInfoDict(target_files_zip) |
| 951 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 952 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 953 | self.assertIn('/', loaded_dict['fstab']) |
| 954 | self.assertIn('/system', loaded_dict['fstab']) |
| 955 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 956 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 957 | def test_LoadInfoDict_dirInput(self): |
| 958 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 959 | self.INFO_DICT_DEFAULT, |
| 960 | 'BOOT/RAMDISK/system/etc/recovery.fstab') |
| 961 | unzipped = common.UnzipTemp(target_files) |
| 962 | loaded_dict = common.LoadInfoDict(unzipped) |
| 963 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 964 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 965 | self.assertIn('/', loaded_dict['fstab']) |
| 966 | self.assertIn('/system', loaded_dict['fstab']) |
| 967 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 968 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | a57ab9f | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 969 | def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self): |
| 970 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 971 | self.INFO_DICT_DEFAULT, |
| 972 | 'BOOT/RAMDISK/system/etc/recovery.fstab') |
| 973 | unzipped = common.UnzipTemp(target_files) |
| 974 | loaded_dict = common.LoadInfoDict(unzipped) |
| 975 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 976 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 977 | self.assertIn('/', loaded_dict['fstab']) |
| 978 | self.assertIn('/system', loaded_dict['fstab']) |
| 979 | |
| 980 | def test_LoadInfoDict_systemRootImageFalse(self): |
| 981 | # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices |
| 982 | # launched prior to P will likely have this config. |
| 983 | info_dict = copy.copy(self.INFO_DICT_DEFAULT) |
| 984 | del info_dict['no_recovery'] |
| 985 | del info_dict['system_root_image'] |
| 986 | del info_dict['recovery_as_boot'] |
| 987 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 988 | info_dict, |
| 989 | 'RECOVERY/RAMDISK/system/etc/recovery.fstab') |
| 990 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 991 | loaded_dict = common.LoadInfoDict(target_files_zip) |
| 992 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 993 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 994 | self.assertNotIn('/', loaded_dict['fstab']) |
| 995 | self.assertIn('/system', loaded_dict['fstab']) |
| 996 | |
| 997 | def test_LoadInfoDict_recoveryAsBootFalse(self): |
| 998 | # Devices using system-as-root, but with standalone recovery image. Non-A/B |
| 999 | # devices launched since P will likely have this config. |
| 1000 | info_dict = copy.copy(self.INFO_DICT_DEFAULT) |
| 1001 | del info_dict['no_recovery'] |
| 1002 | del info_dict['recovery_as_boot'] |
| 1003 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 1004 | info_dict, |
| 1005 | 'RECOVERY/RAMDISK/system/etc/recovery.fstab') |
| 1006 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 1007 | loaded_dict = common.LoadInfoDict(target_files_zip) |
| 1008 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 1009 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 1010 | self.assertIn('/', loaded_dict['fstab']) |
| 1011 | self.assertIn('/system', loaded_dict['fstab']) |
| 1012 | |
| 1013 | def test_LoadInfoDict_noRecoveryTrue(self): |
| 1014 | # Device doesn't have a recovery partition at all. |
| 1015 | info_dict = copy.copy(self.INFO_DICT_DEFAULT) |
| 1016 | del info_dict['recovery_as_boot'] |
| 1017 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 1018 | info_dict, |
| 1019 | 'RECOVERY/RAMDISK/system/etc/recovery.fstab') |
| 1020 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 1021 | loaded_dict = common.LoadInfoDict(target_files_zip) |
| 1022 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 1023 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 1024 | self.assertIsNone(loaded_dict['fstab']) |
| 1025 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 1026 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 410ad8b | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 1027 | def test_LoadInfoDict_missingMetaMiscInfoTxt(self): |
| 1028 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 1029 | self.INFO_DICT_DEFAULT, |
| 1030 | 'BOOT/RAMDISK/system/etc/recovery.fstab') |
| 1031 | common.ZipDelete(target_files, 'META/misc_info.txt') |
| 1032 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 1033 | self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip) |
| 1034 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 1035 | @test_utils.SkipIfExternalToolsUnavailable() |
Tao Bao | 410ad8b | 2018-08-24 12:08:38 -0700 | [diff] [blame] | 1036 | def test_LoadInfoDict_repacking(self): |
| 1037 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 1038 | self.INFO_DICT_DEFAULT, |
| 1039 | 'BOOT/RAMDISK/system/etc/recovery.fstab') |
| 1040 | unzipped = common.UnzipTemp(target_files) |
| 1041 | loaded_dict = common.LoadInfoDict(unzipped, True) |
| 1042 | self.assertEqual(3, loaded_dict['recovery_api_version']) |
| 1043 | self.assertEqual(2, loaded_dict['fstab_version']) |
| 1044 | self.assertIn('/', loaded_dict['fstab']) |
| 1045 | self.assertIn('/system', loaded_dict['fstab']) |
| 1046 | self.assertEqual( |
| 1047 | os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir']) |
| 1048 | self.assertEqual( |
| 1049 | os.path.join(unzipped, 'META', 'root_filesystem_config.txt'), |
| 1050 | loaded_dict['root_fs_config']) |
| 1051 | |
| 1052 | def test_LoadInfoDict_repackingWithZipFileInput(self): |
| 1053 | target_files = self._test_LoadInfoDict_createTargetFiles( |
| 1054 | self.INFO_DICT_DEFAULT, |
| 1055 | 'BOOT/RAMDISK/system/etc/recovery.fstab') |
| 1056 | with zipfile.ZipFile(target_files, 'r') as target_files_zip: |
| 1057 | self.assertRaises( |
| 1058 | AssertionError, common.LoadInfoDict, target_files_zip, True) |
| 1059 | |
Tao Bao | fc7e0e0 | 2018-02-13 13:54:02 -0800 | [diff] [blame] | 1060 | |
Tao Bao | 65b94e9 | 2018-10-11 21:57:26 -0700 | [diff] [blame] | 1061 | class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase): |
Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 1062 | """Checks the format of install-recovery.sh. |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1063 | |
Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 1064 | Its format should match between common.py and validate_target_files.py. |
| 1065 | """ |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1066 | |
| 1067 | def setUp(self): |
Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 1068 | self._tempdir = common.MakeTempDir() |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1069 | # Create a dummy dict that contains the fstab info for boot&recovery. |
| 1070 | self._info = {"fstab" : {}} |
Tao Bao | 1c830bf | 2017-12-25 10:43:47 -0800 | [diff] [blame] | 1071 | dummy_fstab = [ |
| 1072 | "/dev/soc.0/by-name/boot /boot emmc defaults defaults", |
| 1073 | "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"] |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 1074 | self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab) |
Tianjie Xu | df05558 | 2017-11-07 12:22:58 -0800 | [diff] [blame] | 1075 | # Construct the gzipped recovery.img and boot.img |
| 1076 | self.recovery_data = bytearray([ |
| 1077 | 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a, |
| 1078 | 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3, |
| 1079 | 0x08, 0x00, 0x00, 0x00 |
| 1080 | ]) |
| 1081 | # echo -n "boot" | gzip -f | hd |
| 1082 | self.boot_data = bytearray([ |
| 1083 | 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca, |
| 1084 | 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00 |
| 1085 | ]) |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1086 | |
| 1087 | def _out_tmp_sink(self, name, data, prefix="SYSTEM"): |
| 1088 | loc = os.path.join(self._tempdir, prefix, name) |
| 1089 | if not os.path.exists(os.path.dirname(loc)): |
| 1090 | os.makedirs(os.path.dirname(loc)) |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1091 | with open(loc, "wb") as f: |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1092 | f.write(data) |
| 1093 | |
| 1094 | def test_full_recovery(self): |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 1095 | recovery_image = common.File("recovery.img", self.recovery_data) |
| 1096 | boot_image = common.File("boot.img", self.boot_data) |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1097 | self._info["full_recovery_image"] = "true" |
| 1098 | |
| 1099 | common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink, |
| 1100 | recovery_image, boot_image, self._info) |
| 1101 | validate_target_files.ValidateInstallRecoveryScript(self._tempdir, |
| 1102 | self._info) |
| 1103 | |
Tao Bao | 82490d3 | 2019-04-09 00:12:30 -0700 | [diff] [blame] | 1104 | @test_utils.SkipIfExternalToolsUnavailable() |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1105 | def test_recovery_from_boot(self): |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 1106 | recovery_image = common.File("recovery.img", self.recovery_data) |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1107 | self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES") |
Tao Bao | 31b0807 | 2017-11-08 15:50:59 -0800 | [diff] [blame] | 1108 | boot_image = common.File("boot.img", self.boot_data) |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1109 | self._out_tmp_sink("boot.img", boot_image.data, "IMAGES") |
| 1110 | |
| 1111 | common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink, |
| 1112 | recovery_image, boot_image, self._info) |
| 1113 | validate_target_files.ValidateInstallRecoveryScript(self._tempdir, |
| 1114 | self._info) |
| 1115 | # Validate 'recovery-from-boot' with bonus argument. |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1116 | self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM") |
Tianjie Xu | 9c384d2 | 2017-06-20 17:00:55 -0700 | [diff] [blame] | 1117 | common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink, |
| 1118 | recovery_image, boot_image, self._info) |
| 1119 | validate_target_files.ValidateInstallRecoveryScript(self._tempdir, |
| 1120 | self._info) |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1121 | |
| 1122 | |
| 1123 | class MockScriptWriter(object): |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1124 | """A class that mocks edify_generator.EdifyGenerator.""" |
| 1125 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1126 | def __init__(self, enable_comments=False): |
| 1127 | self.lines = [] |
| 1128 | self.enable_comments = enable_comments |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1129 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1130 | def Comment(self, comment): |
| 1131 | if self.enable_comments: |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1132 | self.lines.append('# {}'.format(comment)) |
| 1133 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1134 | def AppendExtra(self, extra): |
| 1135 | self.lines.append(extra) |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1136 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1137 | def __str__(self): |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1138 | return '\n'.join(self.lines) |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1139 | |
| 1140 | |
| 1141 | class MockBlockDifference(object): |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1142 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1143 | def __init__(self, partition, tgt, src=None): |
| 1144 | self.partition = partition |
| 1145 | self.tgt = tgt |
| 1146 | self.src = src |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1147 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1148 | def WriteScript(self, script, _, progress=None, |
| 1149 | write_verify_script=False): |
| 1150 | if progress: |
| 1151 | script.AppendExtra("progress({})".format(progress)) |
| 1152 | script.AppendExtra("patch({});".format(self.partition)) |
| 1153 | if write_verify_script: |
| 1154 | self.WritePostInstallVerifyScript(script) |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1155 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1156 | def WritePostInstallVerifyScript(self, script): |
| 1157 | script.AppendExtra("verify({});".format(self.partition)) |
| 1158 | |
| 1159 | |
| 1160 | class FakeSparseImage(object): |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1161 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1162 | def __init__(self, size): |
| 1163 | self.blocksize = 4096 |
| 1164 | self.total_blocks = size // 4096 |
| 1165 | assert size % 4096 == 0, "{} is not a multiple of 4096".format(size) |
| 1166 | |
| 1167 | |
| 1168 | class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase): |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1169 | |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1170 | @staticmethod |
| 1171 | def get_op_list(output_path): |
Tao Bao | f1113e9 | 2019-06-18 12:10:14 -0700 | [diff] [blame] | 1172 | with zipfile.ZipFile(output_path) as output_zip: |
Tao Bao | da30cfa | 2017-12-01 16:19:46 -0800 | [diff] [blame] | 1173 | with output_zip.open('dynamic_partitions_op_list') as op_list: |
| 1174 | return [line.decode().strip() for line in op_list.readlines() |
| 1175 | if not line.startswith(b'#')] |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1176 | |
| 1177 | def setUp(self): |
| 1178 | self.script = MockScriptWriter() |
| 1179 | self.output_path = common.MakeTempFile(suffix='.zip') |
| 1180 | |
| 1181 | def test_full(self): |
| 1182 | target_info = common.LoadDictionaryFromLines(""" |
| 1183 | dynamic_partition_list=system vendor |
| 1184 | super_partition_groups=group_foo |
| 1185 | super_group_foo_group_size={group_size} |
| 1186 | super_group_foo_partition_list=system vendor |
| 1187 | """.format(group_size=4 * GiB).split("\n")) |
| 1188 | block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)), |
| 1189 | MockBlockDifference("vendor", FakeSparseImage(1 * GiB))] |
| 1190 | |
| 1191 | dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs) |
| 1192 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: |
| 1193 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) |
| 1194 | |
| 1195 | self.assertEqual(str(self.script).strip(), """ |
| 1196 | assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list"))); |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1197 | patch(system); |
| 1198 | verify(system); |
| 1199 | unmap_partition("system"); |
Tao Bao | f1113e9 | 2019-06-18 12:10:14 -0700 | [diff] [blame] | 1200 | patch(vendor); |
| 1201 | verify(vendor); |
| 1202 | unmap_partition("vendor"); |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1203 | """.strip()) |
| 1204 | |
| 1205 | lines = self.get_op_list(self.output_path) |
| 1206 | |
| 1207 | remove_all_groups = lines.index("remove_all_groups") |
| 1208 | add_group = lines.index("add_group group_foo 4294967296") |
| 1209 | add_vendor = lines.index("add vendor group_foo") |
| 1210 | add_system = lines.index("add system group_foo") |
| 1211 | resize_vendor = lines.index("resize vendor 1073741824") |
| 1212 | resize_system = lines.index("resize system 3221225472") |
| 1213 | |
| 1214 | self.assertLess(remove_all_groups, add_group, |
| 1215 | "Should add groups after removing all groups") |
| 1216 | self.assertLess(add_group, min(add_vendor, add_system), |
| 1217 | "Should add partitions after adding group") |
| 1218 | self.assertLess(add_system, resize_system, |
| 1219 | "Should resize system after adding it") |
| 1220 | self.assertLess(add_vendor, resize_vendor, |
| 1221 | "Should resize vendor after adding it") |
| 1222 | |
| 1223 | def test_inc_groups(self): |
| 1224 | source_info = common.LoadDictionaryFromLines(""" |
| 1225 | super_partition_groups=group_foo group_bar group_baz |
| 1226 | super_group_foo_group_size={group_foo_size} |
| 1227 | super_group_bar_group_size={group_bar_size} |
| 1228 | """.format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n")) |
| 1229 | target_info = common.LoadDictionaryFromLines(""" |
| 1230 | super_partition_groups=group_foo group_baz group_qux |
| 1231 | super_group_foo_group_size={group_foo_size} |
| 1232 | super_group_baz_group_size={group_baz_size} |
| 1233 | super_group_qux_group_size={group_qux_size} |
| 1234 | """.format(group_foo_size=3 * GiB, group_baz_size=4 * GiB, |
| 1235 | group_qux_size=1 * GiB).split("\n")) |
| 1236 | |
| 1237 | dp_diff = common.DynamicPartitionsDifference(target_info, |
| 1238 | block_diffs=[], |
| 1239 | source_info_dict=source_info) |
| 1240 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: |
| 1241 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) |
| 1242 | |
| 1243 | lines = self.get_op_list(self.output_path) |
| 1244 | |
| 1245 | removed = lines.index("remove_group group_bar") |
| 1246 | shrunk = lines.index("resize_group group_foo 3221225472") |
| 1247 | grown = lines.index("resize_group group_baz 4294967296") |
| 1248 | added = lines.index("add_group group_qux 1073741824") |
| 1249 | |
Tao Bao | f1113e9 | 2019-06-18 12:10:14 -0700 | [diff] [blame] | 1250 | self.assertLess(max(removed, shrunk), |
| 1251 | min(grown, added), |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1252 | "ops that remove / shrink partitions must precede ops that " |
| 1253 | "grow / add partitions") |
| 1254 | |
Yifan Hong | bb2658d | 2019-01-25 12:30:58 -0800 | [diff] [blame] | 1255 | def test_incremental(self): |
Yifan Hong | 45433e4 | 2019-01-18 13:55:25 -0800 | [diff] [blame] | 1256 | source_info = common.LoadDictionaryFromLines(""" |
| 1257 | dynamic_partition_list=system vendor product product_services |
| 1258 | super_partition_groups=group_foo |
| 1259 | super_group_foo_group_size={group_foo_size} |
| 1260 | super_group_foo_partition_list=system vendor product product_services |
| 1261 | """.format(group_foo_size=4 * GiB).split("\n")) |
| 1262 | target_info = common.LoadDictionaryFromLines(""" |
| 1263 | dynamic_partition_list=system vendor product odm |
| 1264 | super_partition_groups=group_foo group_bar |
| 1265 | super_group_foo_group_size={group_foo_size} |
| 1266 | super_group_foo_partition_list=system vendor odm |
| 1267 | super_group_bar_group_size={group_bar_size} |
| 1268 | super_group_bar_partition_list=product |
| 1269 | """.format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n")) |
| 1270 | |
| 1271 | block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB), |
| 1272 | src=FakeSparseImage(1024 * MiB)), |
| 1273 | MockBlockDifference("vendor", FakeSparseImage(512 * MiB), |
| 1274 | src=FakeSparseImage(1024 * MiB)), |
| 1275 | MockBlockDifference("product", FakeSparseImage(1024 * MiB), |
| 1276 | src=FakeSparseImage(1024 * MiB)), |
| 1277 | MockBlockDifference("product_services", None, |
| 1278 | src=FakeSparseImage(1024 * MiB)), |
| 1279 | MockBlockDifference("odm", FakeSparseImage(1024 * MiB), |
| 1280 | src=None)] |
| 1281 | |
| 1282 | dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs, |
| 1283 | source_info_dict=source_info) |
| 1284 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: |
| 1285 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) |
| 1286 | |
| 1287 | metadata_idx = self.script.lines.index( |
| 1288 | 'assert(update_dynamic_partitions(package_extract_file(' |
| 1289 | '"dynamic_partitions_op_list")));') |
| 1290 | self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx) |
| 1291 | self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);')) |
| 1292 | for p in ("product", "system", "odm"): |
| 1293 | patch_idx = self.script.lines.index("patch({});".format(p)) |
| 1294 | verify_idx = self.script.lines.index("verify({});".format(p)) |
| 1295 | self.assertLess(metadata_idx, patch_idx, |
| 1296 | "Should patch {} after updating metadata".format(p)) |
| 1297 | self.assertLess(patch_idx, verify_idx, |
| 1298 | "Should verify {} after patching".format(p)) |
| 1299 | |
| 1300 | self.assertNotIn("patch(product_services);", self.script.lines) |
| 1301 | |
| 1302 | lines = self.get_op_list(self.output_path) |
| 1303 | |
| 1304 | remove = lines.index("remove product_services") |
| 1305 | move_product_out = lines.index("move product default") |
| 1306 | shrink = lines.index("resize vendor 536870912") |
| 1307 | shrink_group = lines.index("resize_group group_foo 3221225472") |
| 1308 | add_group_bar = lines.index("add_group group_bar 1073741824") |
| 1309 | add_odm = lines.index("add odm group_foo") |
| 1310 | grow_existing = lines.index("resize system 1610612736") |
| 1311 | grow_added = lines.index("resize odm 1073741824") |
| 1312 | move_product_in = lines.index("move product group_bar") |
| 1313 | |
| 1314 | max_idx_move_partition_out_foo = max(remove, move_product_out, shrink) |
| 1315 | min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added) |
| 1316 | |
| 1317 | self.assertLess(max_idx_move_partition_out_foo, shrink_group, |
| 1318 | "Must shrink group after partitions inside group are shrunk" |
| 1319 | " / removed") |
| 1320 | |
| 1321 | self.assertLess(add_group_bar, move_product_in, |
| 1322 | "Must add partitions to group after group is added") |
| 1323 | |
| 1324 | self.assertLess(max_idx_move_partition_out_foo, |
| 1325 | min_idx_move_partition_in_foo, |
| 1326 | "Must shrink partitions / remove partitions from group" |
| 1327 | "before adding / moving partitions into group") |
Yifan Hong | bb2658d | 2019-01-25 12:30:58 -0800 | [diff] [blame] | 1328 | |
| 1329 | def test_remove_partition(self): |
| 1330 | source_info = common.LoadDictionaryFromLines(""" |
| 1331 | blockimgdiff_versions=3,4 |
| 1332 | use_dynamic_partitions=true |
| 1333 | dynamic_partition_list=foo |
| 1334 | super_partition_groups=group_foo |
| 1335 | super_group_foo_group_size={group_foo_size} |
| 1336 | super_group_foo_partition_list=foo |
| 1337 | """.format(group_foo_size=4 * GiB).split("\n")) |
| 1338 | target_info = common.LoadDictionaryFromLines(""" |
| 1339 | blockimgdiff_versions=3,4 |
| 1340 | use_dynamic_partitions=true |
| 1341 | super_partition_groups=group_foo |
| 1342 | super_group_foo_group_size={group_foo_size} |
| 1343 | """.format(group_foo_size=4 * GiB).split("\n")) |
| 1344 | |
| 1345 | common.OPTIONS.info_dict = target_info |
| 1346 | common.OPTIONS.target_info_dict = target_info |
| 1347 | common.OPTIONS.source_info_dict = source_info |
| 1348 | common.OPTIONS.cache_size = 4 * 4096 |
| 1349 | |
| 1350 | block_diffs = [common.BlockDifference("foo", EmptyImage(), |
| 1351 | src=DataImage("source", pad=True))] |
| 1352 | |
| 1353 | dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs, |
| 1354 | source_info_dict=source_info) |
| 1355 | with zipfile.ZipFile(self.output_path, 'w') as output_zip: |
| 1356 | dp_diff.WriteScript(self.script, output_zip, write_verify_script=True) |
| 1357 | |
| 1358 | self.assertNotIn("block_image_update", str(self.script), |
Tao Bao | 2cc0ca1 | 2019-03-15 10:44:43 -0700 | [diff] [blame] | 1359 | "Removed partition should not be patched.") |
Yifan Hong | bb2658d | 2019-01-25 12:30:58 -0800 | [diff] [blame] | 1360 | |
| 1361 | lines = self.get_op_list(self.output_path) |
| 1362 | self.assertEqual(lines, ["remove foo"]) |