blob: 914e58e47a7fa66c575e499d7f12775e14da8a55 [file] [log] [blame]
Dan Albert8e0178d2015-01-27 15:53:15 -08001#
2# Copyright (C) 2015 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
Tao Baofc7e0e02018-02-13 13:54:02 -080016
Tao Baoa57ab9f2018-08-24 12:08:38 -070017import copy
Dan Albert8e0178d2015-01-27 15:53:15 -080018import os
Tao Bao17e4e612018-02-16 17:12:54 -080019import subprocess
Dan Albert8e0178d2015-01-27 15:53:15 -080020import tempfile
21import time
Dan Albert8e0178d2015-01-27 15:53:15 -080022import zipfile
Tao Bao31b08072017-11-08 15:50:59 -080023from hashlib import sha1
24
Dan Albert8e0178d2015-01-27 15:53:15 -080025import common
Tao Bao04e1f012018-02-04 12:13:35 -080026import test_utils
Tianjie Xu9c384d22017-06-20 17:00:55 -070027import validate_target_files
Tao Baofc7e0e02018-02-13 13:54:02 -080028from rangelib import RangeSet
Dan Albert8e0178d2015-01-27 15:53:15 -080029
Yifan Hongbb2658d2019-01-25 12:30:58 -080030from blockimgdiff import EmptyImage, DataImage
Tao Bao04e1f012018-02-04 12:13:35 -080031
Tao Bao31b08072017-11-08 15:50:59 -080032KiB = 1024
33MiB = 1024 * KiB
34GiB = 1024 * MiB
Dan Albert8e0178d2015-01-27 15:53:15 -080035
Tao Bao1c830bf2017-12-25 10:43:47 -080036
Tao Baof3282b42015-04-01 11:21:55 -070037def get_2gb_string():
Tao Bao31b08072017-11-08 15:50:59 -080038 size = int(2 * GiB + 1)
39 block_size = 4 * KiB
40 step_size = 4 * MiB
41 # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
42 for _ in range(0, size, step_size):
43 yield os.urandom(block_size)
Tao Baoc1a1ec32019-06-18 16:29:37 -070044 yield b'\0' * (step_size - block_size)
Tao Baof3282b42015-04-01 11:21:55 -070045
Dan Albert8e0178d2015-01-27 15:53:15 -080046
Tao Bao65b94e92018-10-11 21:57:26 -070047class CommonZipTest(test_utils.ReleaseToolsTestCase):
48
Tao Bao31b08072017-11-08 15:50:59 -080049 def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
Tao Baof3282b42015-04-01 11:21:55 -070050 test_file_name=None, expected_stat=None, expected_mode=0o644,
51 expected_compress_type=zipfile.ZIP_STORED):
52 # Verify the stat if present.
53 if test_file_name is not None:
54 new_stat = os.stat(test_file_name)
55 self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode))
56 self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
57
58 # Reopen the zip file to verify.
59 zip_file = zipfile.ZipFile(zip_file_name, "r")
60
61 # Verify the timestamp.
62 info = zip_file.getinfo(arcname)
63 self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0))
64
65 # Verify the file mode.
66 mode = (info.external_attr >> 16) & 0o777
67 self.assertEqual(mode, expected_mode)
68
69 # Verify the compress type.
70 self.assertEqual(info.compress_type, expected_compress_type)
71
72 # Verify the zip contents.
Tao Bao31b08072017-11-08 15:50:59 -080073 entry = zip_file.open(arcname)
74 sha1_hash = sha1()
Tao Baoc1a1ec32019-06-18 16:29:37 -070075 for chunk in iter(lambda: entry.read(4 * MiB), b''):
Tao Bao31b08072017-11-08 15:50:59 -080076 sha1_hash.update(chunk)
77 self.assertEqual(expected_hash, sha1_hash.hexdigest())
Tao Baof3282b42015-04-01 11:21:55 -070078 self.assertIsNone(zip_file.testzip())
79
Dan Albert8e0178d2015-01-27 15:53:15 -080080 def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
81 extra_zipwrite_args = dict(extra_zipwrite_args or {})
82
83 test_file = tempfile.NamedTemporaryFile(delete=False)
Dan Albert8e0178d2015-01-27 15:53:15 -080084 test_file_name = test_file.name
Tao Baof3282b42015-04-01 11:21:55 -070085
86 zip_file = tempfile.NamedTemporaryFile(delete=False)
Dan Albert8e0178d2015-01-27 15:53:15 -080087 zip_file_name = zip_file.name
88
89 # File names within an archive strip the leading slash.
90 arcname = extra_zipwrite_args.get("arcname", test_file_name)
91 if arcname[0] == "/":
92 arcname = arcname[1:]
93
94 zip_file.close()
95 zip_file = zipfile.ZipFile(zip_file_name, "w")
96
97 try:
Tao Bao31b08072017-11-08 15:50:59 -080098 sha1_hash = sha1()
99 for data in contents:
Tao Baoc1a1ec32019-06-18 16:29:37 -0700100 sha1_hash.update(bytes(data))
101 test_file.write(bytes(data))
Dan Albert8e0178d2015-01-27 15:53:15 -0800102 test_file.close()
103
Tao Baof3282b42015-04-01 11:21:55 -0700104 expected_stat = os.stat(test_file_name)
Dan Albert8e0178d2015-01-27 15:53:15 -0800105 expected_mode = extra_zipwrite_args.get("perms", 0o644)
Tao Baof3282b42015-04-01 11:21:55 -0700106 expected_compress_type = extra_zipwrite_args.get("compress_type",
107 zipfile.ZIP_STORED)
Dan Albert8e0178d2015-01-27 15:53:15 -0800108 time.sleep(5) # Make sure the atime/mtime will change measurably.
109
110 common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
Tao Baof3282b42015-04-01 11:21:55 -0700111 common.ZipClose(zip_file)
Dan Albert8e0178d2015-01-27 15:53:15 -0800112
Tao Bao31b08072017-11-08 15:50:59 -0800113 self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
114 test_file_name, expected_stat, expected_mode,
115 expected_compress_type)
Dan Albert8e0178d2015-01-27 15:53:15 -0800116 finally:
117 os.remove(test_file_name)
118 os.remove(zip_file_name)
119
Tao Baof3282b42015-04-01 11:21:55 -0700120 def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
121 extra_args = dict(extra_args or {})
122
123 zip_file = tempfile.NamedTemporaryFile(delete=False)
124 zip_file_name = zip_file.name
125 zip_file.close()
126
127 zip_file = zipfile.ZipFile(zip_file_name, "w")
128
129 try:
130 expected_compress_type = extra_args.get("compress_type",
131 zipfile.ZIP_STORED)
132 time.sleep(5) # Make sure the atime/mtime will change measurably.
133
134 if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
Tao Bao58c1b962015-05-20 09:32:18 -0700135 arcname = zinfo_or_arcname
136 expected_mode = extra_args.get("perms", 0o644)
Tao Baof3282b42015-04-01 11:21:55 -0700137 else:
Tao Bao58c1b962015-05-20 09:32:18 -0700138 arcname = zinfo_or_arcname.filename
Tao Baoc1a1ec32019-06-18 16:29:37 -0700139 if zinfo_or_arcname.external_attr:
140 zinfo_perms = zinfo_or_arcname.external_attr >> 16
141 else:
142 zinfo_perms = 0o600
143 expected_mode = extra_args.get("perms", zinfo_perms)
Tao Baof3282b42015-04-01 11:21:55 -0700144
Tao Bao58c1b962015-05-20 09:32:18 -0700145 common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
Tao Baof3282b42015-04-01 11:21:55 -0700146 common.ZipClose(zip_file)
147
Tao Bao31b08072017-11-08 15:50:59 -0800148 self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700149 expected_mode=expected_mode,
Tao Baof3282b42015-04-01 11:21:55 -0700150 expected_compress_type=expected_compress_type)
151 finally:
152 os.remove(zip_file_name)
153
154 def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
155 extra_args = dict(extra_args or {})
156
157 zip_file = tempfile.NamedTemporaryFile(delete=False)
158 zip_file_name = zip_file.name
159
160 test_file = tempfile.NamedTemporaryFile(delete=False)
161 test_file_name = test_file.name
162
163 arcname_large = test_file_name
164 arcname_small = "bar"
165
166 # File names within an archive strip the leading slash.
167 if arcname_large[0] == "/":
168 arcname_large = arcname_large[1:]
169
170 zip_file.close()
171 zip_file = zipfile.ZipFile(zip_file_name, "w")
172
173 try:
Tao Bao31b08072017-11-08 15:50:59 -0800174 sha1_hash = sha1()
175 for data in large:
176 sha1_hash.update(data)
177 test_file.write(data)
Tao Baof3282b42015-04-01 11:21:55 -0700178 test_file.close()
179
180 expected_stat = os.stat(test_file_name)
181 expected_mode = 0o644
182 expected_compress_type = extra_args.get("compress_type",
183 zipfile.ZIP_STORED)
184 time.sleep(5) # Make sure the atime/mtime will change measurably.
185
186 common.ZipWrite(zip_file, test_file_name, **extra_args)
187 common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
188 common.ZipClose(zip_file)
189
190 # Verify the contents written by ZipWrite().
Tao Bao31b08072017-11-08 15:50:59 -0800191 self._verify(zip_file, zip_file_name, arcname_large,
192 sha1_hash.hexdigest(), test_file_name, expected_stat,
193 expected_mode, expected_compress_type)
Tao Baof3282b42015-04-01 11:21:55 -0700194
195 # Verify the contents written by ZipWriteStr().
Tao Bao31b08072017-11-08 15:50:59 -0800196 self._verify(zip_file, zip_file_name, arcname_small,
197 sha1(small).hexdigest(),
Tao Baof3282b42015-04-01 11:21:55 -0700198 expected_compress_type=expected_compress_type)
199 finally:
200 os.remove(zip_file_name)
201 os.remove(test_file_name)
202
203 def _test_reset_ZIP64_LIMIT(self, func, *args):
204 default_limit = (1 << 31) - 1
205 self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
206 func(*args)
207 self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
208
Dan Albert8e0178d2015-01-27 15:53:15 -0800209 def test_ZipWrite(self):
210 file_contents = os.urandom(1024)
211 self._test_ZipWrite(file_contents)
212
213 def test_ZipWrite_with_opts(self):
214 file_contents = os.urandom(1024)
215 self._test_ZipWrite(file_contents, {
216 "arcname": "foobar",
217 "perms": 0o777,
218 "compress_type": zipfile.ZIP_DEFLATED,
219 })
Tao Baof3282b42015-04-01 11:21:55 -0700220 self._test_ZipWrite(file_contents, {
221 "arcname": "foobar",
222 "perms": 0o700,
223 "compress_type": zipfile.ZIP_STORED,
224 })
Dan Albert8e0178d2015-01-27 15:53:15 -0800225
226 def test_ZipWrite_large_file(self):
Tao Baof3282b42015-04-01 11:21:55 -0700227 file_contents = get_2gb_string()
Dan Albert8e0178d2015-01-27 15:53:15 -0800228 self._test_ZipWrite(file_contents, {
229 "compress_type": zipfile.ZIP_DEFLATED,
230 })
231
232 def test_ZipWrite_resets_ZIP64_LIMIT(self):
Tao Baof3282b42015-04-01 11:21:55 -0700233 self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
234
235 def test_ZipWriteStr(self):
236 random_string = os.urandom(1024)
237 # Passing arcname
238 self._test_ZipWriteStr("foo", random_string)
239
240 # Passing zinfo
241 zinfo = zipfile.ZipInfo(filename="foo")
242 self._test_ZipWriteStr(zinfo, random_string)
243
244 # Timestamp in the zinfo should be overwritten.
245 zinfo.date_time = (2015, 3, 1, 15, 30, 0)
246 self._test_ZipWriteStr(zinfo, random_string)
247
248 def test_ZipWriteStr_with_opts(self):
249 random_string = os.urandom(1024)
250 # Passing arcname
251 self._test_ZipWriteStr("foo", random_string, {
Tao Bao58c1b962015-05-20 09:32:18 -0700252 "perms": 0o700,
Tao Baof3282b42015-04-01 11:21:55 -0700253 "compress_type": zipfile.ZIP_DEFLATED,
254 })
Tao Bao58c1b962015-05-20 09:32:18 -0700255 self._test_ZipWriteStr("bar", random_string, {
Tao Baof3282b42015-04-01 11:21:55 -0700256 "compress_type": zipfile.ZIP_STORED,
257 })
258
259 # Passing zinfo
260 zinfo = zipfile.ZipInfo(filename="foo")
261 self._test_ZipWriteStr(zinfo, random_string, {
262 "compress_type": zipfile.ZIP_DEFLATED,
263 })
264 self._test_ZipWriteStr(zinfo, random_string, {
Tao Bao58c1b962015-05-20 09:32:18 -0700265 "perms": 0o600,
Tao Baof3282b42015-04-01 11:21:55 -0700266 "compress_type": zipfile.ZIP_STORED,
267 })
Tao Baoc1a1ec32019-06-18 16:29:37 -0700268 self._test_ZipWriteStr(zinfo, random_string, {
269 "perms": 0o000,
270 "compress_type": zipfile.ZIP_STORED,
271 })
Tao Baof3282b42015-04-01 11:21:55 -0700272
273 def test_ZipWriteStr_large_file(self):
274 # zipfile.writestr() doesn't work when the str size is over 2GiB even with
275 # the workaround. We will only test the case of writing a string into a
276 # large archive.
277 long_string = get_2gb_string()
278 short_string = os.urandom(1024)
279 self._test_ZipWriteStr_large_file(long_string, short_string, {
280 "compress_type": zipfile.ZIP_DEFLATED,
281 })
282
283 def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
Tao Baoc1a1ec32019-06-18 16:29:37 -0700284 self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
Tao Baof3282b42015-04-01 11:21:55 -0700285 zinfo = zipfile.ZipInfo(filename="foo")
Tao Baoc1a1ec32019-06-18 16:29:37 -0700286 self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
Tao Bao58c1b962015-05-20 09:32:18 -0700287
288 def test_bug21309935(self):
289 zip_file = tempfile.NamedTemporaryFile(delete=False)
290 zip_file_name = zip_file.name
291 zip_file.close()
292
293 try:
294 random_string = os.urandom(1024)
295 zip_file = zipfile.ZipFile(zip_file_name, "w")
296 # Default perms should be 0o644 when passing the filename.
297 common.ZipWriteStr(zip_file, "foo", random_string)
298 # Honor the specified perms.
299 common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755)
300 # The perms in zinfo should be untouched.
301 zinfo = zipfile.ZipInfo(filename="baz")
302 zinfo.external_attr = 0o740 << 16
303 common.ZipWriteStr(zip_file, zinfo, random_string)
304 # Explicitly specified perms has the priority.
305 zinfo = zipfile.ZipInfo(filename="qux")
306 zinfo.external_attr = 0o700 << 16
307 common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
308 common.ZipClose(zip_file)
309
Tao Bao31b08072017-11-08 15:50:59 -0800310 self._verify(zip_file, zip_file_name, "foo",
311 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700312 expected_mode=0o644)
Tao Bao31b08072017-11-08 15:50:59 -0800313 self._verify(zip_file, zip_file_name, "bar",
314 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700315 expected_mode=0o755)
Tao Bao31b08072017-11-08 15:50:59 -0800316 self._verify(zip_file, zip_file_name, "baz",
317 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700318 expected_mode=0o740)
Tao Bao31b08072017-11-08 15:50:59 -0800319 self._verify(zip_file, zip_file_name, "qux",
320 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700321 expected_mode=0o400)
322 finally:
323 os.remove(zip_file_name)
Tianjie Xu9c384d22017-06-20 17:00:55 -0700324
Tao Bao82490d32019-04-09 00:12:30 -0700325 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao89d7ab22017-12-14 17:05:33 -0800326 def test_ZipDelete(self):
327 zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
328 output_zip = zipfile.ZipFile(zip_file.name, 'w',
329 compression=zipfile.ZIP_DEFLATED)
330 with tempfile.NamedTemporaryFile() as entry_file:
331 entry_file.write(os.urandom(1024))
332 common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
333 common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
334 common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
335 common.ZipClose(output_zip)
336 zip_file.close()
337
338 try:
339 common.ZipDelete(zip_file.name, 'Test2')
340 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
341 entries = check_zip.namelist()
342 self.assertTrue('Test1' in entries)
343 self.assertFalse('Test2' in entries)
344 self.assertTrue('Test3' in entries)
345
Tao Bao986ee862018-10-04 15:46:16 -0700346 self.assertRaises(
347 common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
Tao Bao89d7ab22017-12-14 17:05:33 -0800348 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
349 entries = check_zip.namelist()
350 self.assertTrue('Test1' in entries)
351 self.assertFalse('Test2' in entries)
352 self.assertTrue('Test3' in entries)
353
354 common.ZipDelete(zip_file.name, ['Test3'])
355 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
356 entries = check_zip.namelist()
357 self.assertTrue('Test1' in entries)
358 self.assertFalse('Test2' in entries)
359 self.assertFalse('Test3' in entries)
360
361 common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
362 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
363 entries = check_zip.namelist()
364 self.assertFalse('Test1' in entries)
365 self.assertFalse('Test2' in entries)
366 self.assertFalse('Test3' in entries)
367 finally:
368 os.remove(zip_file.name)
369
Tao Bao0ff15de2019-03-20 11:26:06 -0700370 @staticmethod
371 def _test_UnzipTemp_createZipFile():
372 zip_file = common.MakeTempFile(suffix='.zip')
373 output_zip = zipfile.ZipFile(
374 zip_file, 'w', compression=zipfile.ZIP_DEFLATED)
375 contents = os.urandom(1024)
376 with tempfile.NamedTemporaryFile() as entry_file:
377 entry_file.write(contents)
378 common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
379 common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
380 common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
381 common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
382 common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
383 common.ZipClose(output_zip)
384 common.ZipClose(output_zip)
385 return zip_file
386
Tao Bao82490d32019-04-09 00:12:30 -0700387 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700388 def test_UnzipTemp(self):
389 zip_file = self._test_UnzipTemp_createZipFile()
390 unzipped_dir = common.UnzipTemp(zip_file)
391 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
392 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
393 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
394 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
395 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
396
Tao Bao82490d32019-04-09 00:12:30 -0700397 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700398 def test_UnzipTemp_withPatterns(self):
399 zip_file = self._test_UnzipTemp_createZipFile()
400
401 unzipped_dir = common.UnzipTemp(zip_file, ['Test1'])
402 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
403 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
404 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
405 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
406 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
407
408 unzipped_dir = common.UnzipTemp(zip_file, ['Test1', 'Foo3'])
409 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
410 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
411 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
412 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
413 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
414
415 unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Foo3*'])
416 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
417 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
418 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
419 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
420 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
421
422 unzipped_dir = common.UnzipTemp(zip_file, ['*Test1', '*Baz*'])
423 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
424 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
425 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
426 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
427 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
428
429 def test_UnzipTemp_withEmptyPatterns(self):
430 zip_file = self._test_UnzipTemp_createZipFile()
431 unzipped_dir = common.UnzipTemp(zip_file, [])
432 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
433 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
434 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
435 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
436 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
437
Tao Bao82490d32019-04-09 00:12:30 -0700438 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700439 def test_UnzipTemp_withPartiallyMatchingPatterns(self):
440 zip_file = self._test_UnzipTemp_createZipFile()
441 unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*'])
442 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
443 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
444 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
445 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
446 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
447
448 def test_UnzipTemp_withNoMatchingPatterns(self):
449 zip_file = self._test_UnzipTemp_createZipFile()
450 unzipped_dir = common.UnzipTemp(zip_file, ['Foo4', 'Nonexistent*'])
451 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
452 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
453 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
454 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
455 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
456
Tao Bao89d7ab22017-12-14 17:05:33 -0800457
Tao Bao65b94e92018-10-11 21:57:26 -0700458class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase):
Tao Bao818ddf52018-01-05 11:17:34 -0800459 """Tests the APK utils related functions."""
460
461 APKCERTS_TXT1 = (
462 'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
463 ' private_key="certs/devkey.pk8"\n'
464 'name="Settings.apk"'
Dan Willemsen0ab1be62019-04-09 21:35:37 -0700465 ' certificate="build/make/target/product/security/platform.x509.pem"'
466 ' private_key="build/make/target/product/security/platform.pk8"\n'
Tao Bao818ddf52018-01-05 11:17:34 -0800467 'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
468 )
469
470 APKCERTS_CERTMAP1 = {
471 'RecoveryLocalizer.apk' : 'certs/devkey',
Dan Willemsen0ab1be62019-04-09 21:35:37 -0700472 'Settings.apk' : 'build/make/target/product/security/platform',
Tao Bao818ddf52018-01-05 11:17:34 -0800473 'TV.apk' : 'PRESIGNED',
474 }
475
476 APKCERTS_TXT2 = (
477 'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
478 ' private_key="certs/compressed1.pk8" compressed="gz"\n'
479 'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
480 ' private_key="certs/compressed2.pk8" compressed="gz"\n'
481 'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
482 ' private_key="certs/compressed2.pk8" compressed="gz"\n'
483 'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
484 ' private_key="certs/compressed3.pk8" compressed="gz"\n'
485 )
486
487 APKCERTS_CERTMAP2 = {
488 'Compressed1.apk' : 'certs/compressed1',
489 'Compressed2a.apk' : 'certs/compressed2',
490 'Compressed2b.apk' : 'certs/compressed2',
491 'Compressed3.apk' : 'certs/compressed3',
492 }
493
494 APKCERTS_TXT3 = (
495 'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
496 ' private_key="certs/compressed4.pk8" compressed="xz"\n'
497 )
498
499 APKCERTS_CERTMAP3 = {
500 'Compressed4.apk' : 'certs/compressed4',
501 }
502
Tao Bao17e4e612018-02-16 17:12:54 -0800503 def setUp(self):
504 self.testdata_dir = test_utils.get_testdata_dir()
505
Tao Bao818ddf52018-01-05 11:17:34 -0800506 @staticmethod
507 def _write_apkcerts_txt(apkcerts_txt, additional=None):
508 if additional is None:
509 additional = []
510 target_files = common.MakeTempFile(suffix='.zip')
511 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
512 target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
513 for entry in additional:
514 target_files_zip.writestr(entry, '')
515 return target_files
516
517 def test_ReadApkCerts_NoncompressedApks(self):
518 target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
519 with zipfile.ZipFile(target_files, 'r') as input_zip:
520 certmap, ext = common.ReadApkCerts(input_zip)
521
522 self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
523 self.assertIsNone(ext)
524
525 def test_ReadApkCerts_CompressedApks(self):
526 # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
527 # not stored in '.gz' format, so it shouldn't be considered as installed.
528 target_files = self._write_apkcerts_txt(
529 self.APKCERTS_TXT2,
530 ['Compressed1.apk.gz', 'Compressed3.apk'])
531
532 with zipfile.ZipFile(target_files, 'r') as input_zip:
533 certmap, ext = common.ReadApkCerts(input_zip)
534
535 self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
536 self.assertEqual('.gz', ext)
537
538 # Alternative case with '.xz'.
539 target_files = self._write_apkcerts_txt(
540 self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
541
542 with zipfile.ZipFile(target_files, 'r') as input_zip:
543 certmap, ext = common.ReadApkCerts(input_zip)
544
545 self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
546 self.assertEqual('.xz', ext)
547
548 def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
549 target_files = self._write_apkcerts_txt(
550 self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
551 ['Compressed1.apk.gz', 'Compressed3.apk'])
552
553 with zipfile.ZipFile(target_files, 'r') as input_zip:
554 certmap, ext = common.ReadApkCerts(input_zip)
555
556 certmap_merged = self.APKCERTS_CERTMAP1.copy()
557 certmap_merged.update(self.APKCERTS_CERTMAP2)
558 self.assertDictEqual(certmap_merged, certmap)
559 self.assertEqual('.gz', ext)
560
561 def test_ReadApkCerts_MultipleCompressionMethods(self):
562 target_files = self._write_apkcerts_txt(
563 self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
564 ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
565
566 with zipfile.ZipFile(target_files, 'r') as input_zip:
567 self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
568
569 def test_ReadApkCerts_MismatchingKeys(self):
570 malformed_apkcerts_txt = (
571 'name="App1.apk" certificate="certs/cert1.x509.pem"'
572 ' private_key="certs/cert2.pk8"\n'
573 )
574 target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
575
576 with zipfile.ZipFile(target_files, 'r') as input_zip:
577 self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
578
Tao Bao04e1f012018-02-04 12:13:35 -0800579 def test_ExtractPublicKey(self):
Tao Bao17e4e612018-02-16 17:12:54 -0800580 cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
581 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
Tao Baoda30cfa2017-12-01 16:19:46 -0800582 with open(pubkey) as pubkey_fp:
Tao Bao04e1f012018-02-04 12:13:35 -0800583 self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
584
585 def test_ExtractPublicKey_invalidInput(self):
Tao Bao17e4e612018-02-16 17:12:54 -0800586 wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
Tao Bao04e1f012018-02-04 12:13:35 -0800587 self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
588
Tao Bao82490d32019-04-09 00:12:30 -0700589 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao2cc0ca12019-03-15 10:44:43 -0700590 def test_ExtractAvbPublicKey(self):
591 privkey = os.path.join(self.testdata_dir, 'testkey.key')
592 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
Tao Baoda30cfa2017-12-01 16:19:46 -0800593 with open(common.ExtractAvbPublicKey(privkey), 'rb') as privkey_fp, \
594 open(common.ExtractAvbPublicKey(pubkey), 'rb') as pubkey_fp:
Tao Bao2cc0ca12019-03-15 10:44:43 -0700595 self.assertEqual(privkey_fp.read(), pubkey_fp.read())
596
Tao Bao17e4e612018-02-16 17:12:54 -0800597 def test_ParseCertificate(self):
598 cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
599
600 cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
Tao Baoda30cfa2017-12-01 16:19:46 -0800601 proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
602 universal_newlines=False)
Tao Bao17e4e612018-02-16 17:12:54 -0800603 expected, _ = proc.communicate()
604 self.assertEqual(0, proc.returncode)
605
606 with open(cert) as cert_fp:
607 actual = common.ParseCertificate(cert_fp.read())
608 self.assertEqual(expected, actual)
609
Tao Bao82490d32019-04-09 00:12:30 -0700610 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700611 def test_GetMinSdkVersion(self):
612 test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
613 self.assertEqual('24', common.GetMinSdkVersion(test_app))
614
Tao Bao82490d32019-04-09 00:12:30 -0700615 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700616 def test_GetMinSdkVersion_invalidInput(self):
617 self.assertRaises(
618 common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk')
619
Tao Bao82490d32019-04-09 00:12:30 -0700620 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700621 def test_GetMinSdkVersionInt(self):
622 test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
623 self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {}))
624
Tao Bao82490d32019-04-09 00:12:30 -0700625 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700626 def test_GetMinSdkVersionInt_invalidInput(self):
627 self.assertRaises(
628 common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk',
629 {})
630
Tao Bao818ddf52018-01-05 11:17:34 -0800631
Tao Bao65b94e92018-10-11 21:57:26 -0700632class CommonUtilsTest(test_utils.ReleaseToolsTestCase):
Tao Baofc7e0e02018-02-13 13:54:02 -0800633
Tao Bao02a08592018-07-22 12:40:45 -0700634 def setUp(self):
635 self.testdata_dir = test_utils.get_testdata_dir()
636
Tao Bao82490d32019-04-09 00:12:30 -0700637 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800638 def test_GetSparseImage_emptyBlockMapFile(self):
639 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
640 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
641 target_files_zip.write(
642 test_utils.construct_sparse_image([
643 (0xCAC1, 6),
644 (0xCAC3, 3),
645 (0xCAC1, 4)]),
646 arcname='IMAGES/system.img')
647 target_files_zip.writestr('IMAGES/system.map', '')
648 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
649 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
650
Tao Baodba59ee2018-01-09 13:21:02 -0800651 tempdir = common.UnzipTemp(target_files)
652 with zipfile.ZipFile(target_files, 'r') as input_zip:
653 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800654
655 self.assertDictEqual(
656 {
657 '__COPY': RangeSet("0"),
658 '__NONZERO-0': RangeSet("1-5 9-12"),
659 },
660 sparse_image.file_map)
661
Tao Baob2de7d92019-04-10 10:01:47 -0700662 def test_GetSparseImage_missingImageFile(self):
Tao Baofc7e0e02018-02-13 13:54:02 -0800663 self.assertRaises(
Tao Baob2de7d92019-04-10 10:01:47 -0700664 AssertionError, common.GetSparseImage, 'system2', self.testdata_dir,
665 None, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800666 self.assertRaises(
Tao Baob2de7d92019-04-10 10:01:47 -0700667 AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir,
668 None, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800669
Tao Bao82490d32019-04-09 00:12:30 -0700670 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800671 def test_GetSparseImage_missingBlockMapFile(self):
672 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
673 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
674 target_files_zip.write(
675 test_utils.construct_sparse_image([
676 (0xCAC1, 6),
677 (0xCAC3, 3),
678 (0xCAC1, 4)]),
679 arcname='IMAGES/system.img')
680 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
681 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
682
Tao Baodba59ee2018-01-09 13:21:02 -0800683 tempdir = common.UnzipTemp(target_files)
684 with zipfile.ZipFile(target_files, 'r') as input_zip:
685 self.assertRaises(
686 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
687 False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800688
Tao Bao82490d32019-04-09 00:12:30 -0700689 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800690 def test_GetSparseImage_sharedBlocks_notAllowed(self):
691 """Tests the case of having overlapping blocks but disallowed."""
692 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
693 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
694 target_files_zip.write(
695 test_utils.construct_sparse_image([(0xCAC2, 16)]),
696 arcname='IMAGES/system.img')
697 # Block 10 is shared between two files.
698 target_files_zip.writestr(
699 'IMAGES/system.map',
700 '\n'.join([
701 '/system/file1 1-5 9-10',
702 '/system/file2 10-12']))
703 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
704 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
705
Tao Baodba59ee2018-01-09 13:21:02 -0800706 tempdir = common.UnzipTemp(target_files)
707 with zipfile.ZipFile(target_files, 'r') as input_zip:
708 self.assertRaises(
709 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
710 False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800711
Tao Bao82490d32019-04-09 00:12:30 -0700712 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800713 def test_GetSparseImage_sharedBlocks_allowed(self):
714 """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
715 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
716 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
717 # Construct an image with a care_map of "0-5 9-12".
718 target_files_zip.write(
719 test_utils.construct_sparse_image([(0xCAC2, 16)]),
720 arcname='IMAGES/system.img')
721 # Block 10 is shared between two files.
722 target_files_zip.writestr(
723 'IMAGES/system.map',
724 '\n'.join([
725 '/system/file1 1-5 9-10',
726 '/system/file2 10-12']))
727 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
728 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
729
Tao Baodba59ee2018-01-09 13:21:02 -0800730 tempdir = common.UnzipTemp(target_files)
731 with zipfile.ZipFile(target_files, 'r') as input_zip:
732 sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
Tao Baofc7e0e02018-02-13 13:54:02 -0800733
734 self.assertDictEqual(
735 {
736 '__COPY': RangeSet("0"),
737 '__NONZERO-0': RangeSet("6-8 13-15"),
738 '/system/file1': RangeSet("1-5 9-10"),
739 '/system/file2': RangeSet("11-12"),
740 },
741 sparse_image.file_map)
742
743 # '/system/file2' should be marked with 'uses_shared_blocks', but not with
744 # 'incomplete'.
745 self.assertTrue(
746 sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
747 self.assertNotIn(
748 'incomplete', sparse_image.file_map['/system/file2'].extra)
749
750 # All other entries should look normal without any tags.
751 self.assertFalse(sparse_image.file_map['__COPY'].extra)
752 self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
753 self.assertFalse(sparse_image.file_map['/system/file1'].extra)
754
Tao Bao82490d32019-04-09 00:12:30 -0700755 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800756 def test_GetSparseImage_incompleteRanges(self):
757 """Tests the case of ext4 images with holes."""
758 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
759 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
760 target_files_zip.write(
761 test_utils.construct_sparse_image([(0xCAC2, 16)]),
762 arcname='IMAGES/system.img')
763 target_files_zip.writestr(
764 'IMAGES/system.map',
765 '\n'.join([
766 '/system/file1 1-5 9-10',
767 '/system/file2 11-12']))
768 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
769 # '/system/file2' has less blocks listed (2) than actual (3).
770 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
771
Tao Baodba59ee2018-01-09 13:21:02 -0800772 tempdir = common.UnzipTemp(target_files)
773 with zipfile.ZipFile(target_files, 'r') as input_zip:
774 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800775
776 self.assertFalse(sparse_image.file_map['/system/file1'].extra)
777 self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
778
Tao Bao82490d32019-04-09 00:12:30 -0700779 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700780 def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
781 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
782 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
783 target_files_zip.write(
784 test_utils.construct_sparse_image([(0xCAC2, 16)]),
785 arcname='IMAGES/system.img')
786 target_files_zip.writestr(
787 'IMAGES/system.map',
788 '\n'.join([
789 '//system/file1 1-5 9-10',
790 '//system/file2 11-12',
791 '/system/app/file3 13-15']))
792 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
793 # '/system/file2' has less blocks listed (2) than actual (3).
794 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
795 # '/system/app/file3' has less blocks listed (3) than actual (4).
796 target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
797
798 tempdir = common.UnzipTemp(target_files)
799 with zipfile.ZipFile(target_files, 'r') as input_zip:
800 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
801
802 self.assertFalse(sparse_image.file_map['//system/file1'].extra)
803 self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
804 self.assertTrue(
805 sparse_image.file_map['/system/app/file3'].extra['incomplete'])
806
Tao Bao82490d32019-04-09 00:12:30 -0700807 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700808 def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
809 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
810 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
811 target_files_zip.write(
812 test_utils.construct_sparse_image([(0xCAC2, 16)]),
813 arcname='IMAGES/system.img')
814 target_files_zip.writestr(
815 'IMAGES/system.map',
816 '\n'.join([
817 '//system/file1 1-5 9-10',
818 '//init.rc 13-15']))
819 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
820 # '/init.rc' has less blocks listed (3) than actual (4).
821 target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
822
823 tempdir = common.UnzipTemp(target_files)
824 with zipfile.ZipFile(target_files, 'r') as input_zip:
825 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
826
827 self.assertFalse(sparse_image.file_map['//system/file1'].extra)
828 self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
829
Tao Bao82490d32019-04-09 00:12:30 -0700830 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700831 def test_GetSparseImage_fileNotFound(self):
832 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
833 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
834 target_files_zip.write(
835 test_utils.construct_sparse_image([(0xCAC2, 16)]),
836 arcname='IMAGES/system.img')
837 target_files_zip.writestr(
838 'IMAGES/system.map',
839 '\n'.join([
840 '//system/file1 1-5 9-10',
841 '//system/file2 11-12']))
842 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
843
844 tempdir = common.UnzipTemp(target_files)
845 with zipfile.ZipFile(target_files, 'r') as input_zip:
846 self.assertRaises(
847 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
848 False)
849
Tao Bao82490d32019-04-09 00:12:30 -0700850 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700851 def test_GetAvbChainedPartitionArg(self):
852 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
853 info_dict = {
854 'avb_avbtool': 'avbtool',
855 'avb_system_key_path': pubkey,
856 'avb_system_rollback_index_location': 2,
857 }
858 args = common.GetAvbChainedPartitionArg('system', info_dict).split(':')
859 self.assertEqual(3, len(args))
860 self.assertEqual('system', args[0])
861 self.assertEqual('2', args[1])
862 self.assertTrue(os.path.exists(args[2]))
863
Tao Bao82490d32019-04-09 00:12:30 -0700864 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700865 def test_GetAvbChainedPartitionArg_withPrivateKey(self):
866 key = os.path.join(self.testdata_dir, 'testkey.key')
867 info_dict = {
868 'avb_avbtool': 'avbtool',
869 'avb_product_key_path': key,
870 'avb_product_rollback_index_location': 2,
871 }
872 args = common.GetAvbChainedPartitionArg('product', info_dict).split(':')
873 self.assertEqual(3, len(args))
874 self.assertEqual('product', args[0])
875 self.assertEqual('2', args[1])
876 self.assertTrue(os.path.exists(args[2]))
877
Tao Bao82490d32019-04-09 00:12:30 -0700878 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700879 def test_GetAvbChainedPartitionArg_withSpecifiedKey(self):
880 info_dict = {
881 'avb_avbtool': 'avbtool',
882 'avb_system_key_path': 'does-not-exist',
883 'avb_system_rollback_index_location': 2,
884 }
885 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
886 args = common.GetAvbChainedPartitionArg(
887 'system', info_dict, pubkey).split(':')
888 self.assertEqual(3, len(args))
889 self.assertEqual('system', args[0])
890 self.assertEqual('2', args[1])
891 self.assertTrue(os.path.exists(args[2]))
892
Tao Bao82490d32019-04-09 00:12:30 -0700893 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700894 def test_GetAvbChainedPartitionArg_invalidKey(self):
895 pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem')
896 info_dict = {
897 'avb_avbtool': 'avbtool',
898 'avb_system_key_path': pubkey,
899 'avb_system_rollback_index_location': 2,
900 }
901 self.assertRaises(
Tao Bao986ee862018-10-04 15:46:16 -0700902 common.ExternalError, common.GetAvbChainedPartitionArg, 'system',
903 info_dict)
Tao Bao02a08592018-07-22 12:40:45 -0700904
Tao Baoa57ab9f2018-08-24 12:08:38 -0700905 INFO_DICT_DEFAULT = {
906 'recovery_api_version': 3,
907 'fstab_version': 2,
908 'system_root_image': 'true',
909 'no_recovery' : 'true',
910 'recovery_as_boot': 'true',
911 }
912
913 @staticmethod
914 def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
915 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
916 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
917 info_values = ''.join(
Tao Baoda30cfa2017-12-01 16:19:46 -0800918 ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
Tao Baoa57ab9f2018-08-24 12:08:38 -0700919 common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
920
921 FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
922 if info_dict.get('system_root_image') == 'true':
923 fstab_values = FSTAB_TEMPLATE.format('/')
924 else:
925 fstab_values = FSTAB_TEMPLATE.format('/system')
926 common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
Tao Bao410ad8b2018-08-24 12:08:38 -0700927
928 common.ZipWriteStr(
929 target_files_zip, 'META/file_contexts', 'file-contexts')
Tao Baoa57ab9f2018-08-24 12:08:38 -0700930 return target_files
931
932 def test_LoadInfoDict(self):
933 target_files = self._test_LoadInfoDict_createTargetFiles(
934 self.INFO_DICT_DEFAULT,
935 'BOOT/RAMDISK/system/etc/recovery.fstab')
936 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
937 loaded_dict = common.LoadInfoDict(target_files_zip)
938 self.assertEqual(3, loaded_dict['recovery_api_version'])
939 self.assertEqual(2, loaded_dict['fstab_version'])
940 self.assertIn('/', loaded_dict['fstab'])
941 self.assertIn('/system', loaded_dict['fstab'])
942
943 def test_LoadInfoDict_legacyRecoveryFstabPath(self):
944 target_files = self._test_LoadInfoDict_createTargetFiles(
945 self.INFO_DICT_DEFAULT,
946 'BOOT/RAMDISK/etc/recovery.fstab')
947 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
948 loaded_dict = common.LoadInfoDict(target_files_zip)
949 self.assertEqual(3, loaded_dict['recovery_api_version'])
950 self.assertEqual(2, loaded_dict['fstab_version'])
951 self.assertIn('/', loaded_dict['fstab'])
952 self.assertIn('/system', loaded_dict['fstab'])
953
Tao Bao82490d32019-04-09 00:12:30 -0700954 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baoa57ab9f2018-08-24 12:08:38 -0700955 def test_LoadInfoDict_dirInput(self):
956 target_files = self._test_LoadInfoDict_createTargetFiles(
957 self.INFO_DICT_DEFAULT,
958 'BOOT/RAMDISK/system/etc/recovery.fstab')
959 unzipped = common.UnzipTemp(target_files)
960 loaded_dict = common.LoadInfoDict(unzipped)
961 self.assertEqual(3, loaded_dict['recovery_api_version'])
962 self.assertEqual(2, loaded_dict['fstab_version'])
963 self.assertIn('/', loaded_dict['fstab'])
964 self.assertIn('/system', loaded_dict['fstab'])
965
Tao Bao82490d32019-04-09 00:12:30 -0700966 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baoa57ab9f2018-08-24 12:08:38 -0700967 def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self):
968 target_files = self._test_LoadInfoDict_createTargetFiles(
969 self.INFO_DICT_DEFAULT,
970 'BOOT/RAMDISK/system/etc/recovery.fstab')
971 unzipped = common.UnzipTemp(target_files)
972 loaded_dict = common.LoadInfoDict(unzipped)
973 self.assertEqual(3, loaded_dict['recovery_api_version'])
974 self.assertEqual(2, loaded_dict['fstab_version'])
975 self.assertIn('/', loaded_dict['fstab'])
976 self.assertIn('/system', loaded_dict['fstab'])
977
978 def test_LoadInfoDict_systemRootImageFalse(self):
979 # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
980 # launched prior to P will likely have this config.
981 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
982 del info_dict['no_recovery']
983 del info_dict['system_root_image']
984 del info_dict['recovery_as_boot']
985 target_files = self._test_LoadInfoDict_createTargetFiles(
986 info_dict,
987 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
988 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
989 loaded_dict = common.LoadInfoDict(target_files_zip)
990 self.assertEqual(3, loaded_dict['recovery_api_version'])
991 self.assertEqual(2, loaded_dict['fstab_version'])
992 self.assertNotIn('/', loaded_dict['fstab'])
993 self.assertIn('/system', loaded_dict['fstab'])
994
995 def test_LoadInfoDict_recoveryAsBootFalse(self):
996 # Devices using system-as-root, but with standalone recovery image. Non-A/B
997 # devices launched since P will likely have this config.
998 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
999 del info_dict['no_recovery']
1000 del info_dict['recovery_as_boot']
1001 target_files = self._test_LoadInfoDict_createTargetFiles(
1002 info_dict,
1003 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
1004 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1005 loaded_dict = common.LoadInfoDict(target_files_zip)
1006 self.assertEqual(3, loaded_dict['recovery_api_version'])
1007 self.assertEqual(2, loaded_dict['fstab_version'])
1008 self.assertIn('/', loaded_dict['fstab'])
1009 self.assertIn('/system', loaded_dict['fstab'])
1010
1011 def test_LoadInfoDict_noRecoveryTrue(self):
1012 # Device doesn't have a recovery partition at all.
1013 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
1014 del info_dict['recovery_as_boot']
1015 target_files = self._test_LoadInfoDict_createTargetFiles(
1016 info_dict,
1017 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
1018 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1019 loaded_dict = common.LoadInfoDict(target_files_zip)
1020 self.assertEqual(3, loaded_dict['recovery_api_version'])
1021 self.assertEqual(2, loaded_dict['fstab_version'])
1022 self.assertIsNone(loaded_dict['fstab'])
1023
Tao Bao82490d32019-04-09 00:12:30 -07001024 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao410ad8b2018-08-24 12:08:38 -07001025 def test_LoadInfoDict_missingMetaMiscInfoTxt(self):
1026 target_files = self._test_LoadInfoDict_createTargetFiles(
1027 self.INFO_DICT_DEFAULT,
1028 'BOOT/RAMDISK/system/etc/recovery.fstab')
1029 common.ZipDelete(target_files, 'META/misc_info.txt')
1030 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1031 self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
1032
Tao Bao82490d32019-04-09 00:12:30 -07001033 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao410ad8b2018-08-24 12:08:38 -07001034 def test_LoadInfoDict_repacking(self):
1035 target_files = self._test_LoadInfoDict_createTargetFiles(
1036 self.INFO_DICT_DEFAULT,
1037 'BOOT/RAMDISK/system/etc/recovery.fstab')
1038 unzipped = common.UnzipTemp(target_files)
1039 loaded_dict = common.LoadInfoDict(unzipped, True)
1040 self.assertEqual(3, loaded_dict['recovery_api_version'])
1041 self.assertEqual(2, loaded_dict['fstab_version'])
1042 self.assertIn('/', loaded_dict['fstab'])
1043 self.assertIn('/system', loaded_dict['fstab'])
1044 self.assertEqual(
1045 os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
1046 self.assertEqual(
1047 os.path.join(unzipped, 'META', 'root_filesystem_config.txt'),
1048 loaded_dict['root_fs_config'])
1049
1050 def test_LoadInfoDict_repackingWithZipFileInput(self):
1051 target_files = self._test_LoadInfoDict_createTargetFiles(
1052 self.INFO_DICT_DEFAULT,
1053 'BOOT/RAMDISK/system/etc/recovery.fstab')
1054 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1055 self.assertRaises(
1056 AssertionError, common.LoadInfoDict, target_files_zip, True)
1057
Tao Baofc7e0e02018-02-13 13:54:02 -08001058
Tao Bao65b94e92018-10-11 21:57:26 -07001059class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
Tao Bao1c830bf2017-12-25 10:43:47 -08001060 """Checks the format of install-recovery.sh.
Tianjie Xu9c384d22017-06-20 17:00:55 -07001061
Tao Bao1c830bf2017-12-25 10:43:47 -08001062 Its format should match between common.py and validate_target_files.py.
1063 """
Tianjie Xu9c384d22017-06-20 17:00:55 -07001064
1065 def setUp(self):
Tao Bao1c830bf2017-12-25 10:43:47 -08001066 self._tempdir = common.MakeTempDir()
Tianjie Xu9c384d22017-06-20 17:00:55 -07001067 # Create a dummy dict that contains the fstab info for boot&recovery.
1068 self._info = {"fstab" : {}}
Tao Bao1c830bf2017-12-25 10:43:47 -08001069 dummy_fstab = [
1070 "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
1071 "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
Tao Bao31b08072017-11-08 15:50:59 -08001072 self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab)
Tianjie Xudf055582017-11-07 12:22:58 -08001073 # Construct the gzipped recovery.img and boot.img
1074 self.recovery_data = bytearray([
1075 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
1076 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
1077 0x08, 0x00, 0x00, 0x00
1078 ])
1079 # echo -n "boot" | gzip -f | hd
1080 self.boot_data = bytearray([
1081 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
1082 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
1083 ])
Tianjie Xu9c384d22017-06-20 17:00:55 -07001084
1085 def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
1086 loc = os.path.join(self._tempdir, prefix, name)
1087 if not os.path.exists(os.path.dirname(loc)):
1088 os.makedirs(os.path.dirname(loc))
Tao Baoda30cfa2017-12-01 16:19:46 -08001089 with open(loc, "wb") as f:
Tianjie Xu9c384d22017-06-20 17:00:55 -07001090 f.write(data)
1091
1092 def test_full_recovery(self):
Tao Bao31b08072017-11-08 15:50:59 -08001093 recovery_image = common.File("recovery.img", self.recovery_data)
1094 boot_image = common.File("boot.img", self.boot_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001095 self._info["full_recovery_image"] = "true"
1096
1097 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1098 recovery_image, boot_image, self._info)
1099 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1100 self._info)
1101
Tao Bao82490d32019-04-09 00:12:30 -07001102 @test_utils.SkipIfExternalToolsUnavailable()
Tianjie Xu9c384d22017-06-20 17:00:55 -07001103 def test_recovery_from_boot(self):
Tao Bao31b08072017-11-08 15:50:59 -08001104 recovery_image = common.File("recovery.img", self.recovery_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001105 self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
Tao Bao31b08072017-11-08 15:50:59 -08001106 boot_image = common.File("boot.img", self.boot_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001107 self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
1108
1109 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1110 recovery_image, boot_image, self._info)
1111 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1112 self._info)
1113 # Validate 'recovery-from-boot' with bonus argument.
Tao Baoda30cfa2017-12-01 16:19:46 -08001114 self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
Tianjie Xu9c384d22017-06-20 17:00:55 -07001115 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1116 recovery_image, boot_image, self._info)
1117 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1118 self._info)
Yifan Hong45433e42019-01-18 13:55:25 -08001119
1120
1121class MockScriptWriter(object):
Tao Baoda30cfa2017-12-01 16:19:46 -08001122 """A class that mocks edify_generator.EdifyGenerator."""
1123
Yifan Hong45433e42019-01-18 13:55:25 -08001124 def __init__(self, enable_comments=False):
1125 self.lines = []
1126 self.enable_comments = enable_comments
Tao Baoda30cfa2017-12-01 16:19:46 -08001127
Yifan Hong45433e42019-01-18 13:55:25 -08001128 def Comment(self, comment):
1129 if self.enable_comments:
Tao Baoda30cfa2017-12-01 16:19:46 -08001130 self.lines.append('# {}'.format(comment))
1131
Yifan Hong45433e42019-01-18 13:55:25 -08001132 def AppendExtra(self, extra):
1133 self.lines.append(extra)
Tao Baoda30cfa2017-12-01 16:19:46 -08001134
Yifan Hong45433e42019-01-18 13:55:25 -08001135 def __str__(self):
Tao Baoda30cfa2017-12-01 16:19:46 -08001136 return '\n'.join(self.lines)
Yifan Hong45433e42019-01-18 13:55:25 -08001137
1138
1139class MockBlockDifference(object):
Tao Baoda30cfa2017-12-01 16:19:46 -08001140
Yifan Hong45433e42019-01-18 13:55:25 -08001141 def __init__(self, partition, tgt, src=None):
1142 self.partition = partition
1143 self.tgt = tgt
1144 self.src = src
Tao Baoda30cfa2017-12-01 16:19:46 -08001145
Yifan Hong45433e42019-01-18 13:55:25 -08001146 def WriteScript(self, script, _, progress=None,
1147 write_verify_script=False):
1148 if progress:
1149 script.AppendExtra("progress({})".format(progress))
1150 script.AppendExtra("patch({});".format(self.partition))
1151 if write_verify_script:
1152 self.WritePostInstallVerifyScript(script)
Tao Baoda30cfa2017-12-01 16:19:46 -08001153
Yifan Hong45433e42019-01-18 13:55:25 -08001154 def WritePostInstallVerifyScript(self, script):
1155 script.AppendExtra("verify({});".format(self.partition))
1156
1157
1158class FakeSparseImage(object):
Tao Baoda30cfa2017-12-01 16:19:46 -08001159
Yifan Hong45433e42019-01-18 13:55:25 -08001160 def __init__(self, size):
1161 self.blocksize = 4096
1162 self.total_blocks = size // 4096
1163 assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
1164
1165
1166class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
Tao Baoda30cfa2017-12-01 16:19:46 -08001167
Yifan Hong45433e42019-01-18 13:55:25 -08001168 @staticmethod
1169 def get_op_list(output_path):
Tao Baof1113e92019-06-18 12:10:14 -07001170 with zipfile.ZipFile(output_path) as output_zip:
Tao Baoda30cfa2017-12-01 16:19:46 -08001171 with output_zip.open('dynamic_partitions_op_list') as op_list:
1172 return [line.decode().strip() for line in op_list.readlines()
1173 if not line.startswith(b'#')]
Yifan Hong45433e42019-01-18 13:55:25 -08001174
1175 def setUp(self):
1176 self.script = MockScriptWriter()
1177 self.output_path = common.MakeTempFile(suffix='.zip')
1178
1179 def test_full(self):
1180 target_info = common.LoadDictionaryFromLines("""
1181dynamic_partition_list=system vendor
1182super_partition_groups=group_foo
1183super_group_foo_group_size={group_size}
1184super_group_foo_partition_list=system vendor
1185""".format(group_size=4 * GiB).split("\n"))
1186 block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
1187 MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
1188
1189 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
1190 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1191 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1192
1193 self.assertEqual(str(self.script).strip(), """
1194assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
Yifan Hong45433e42019-01-18 13:55:25 -08001195patch(system);
1196verify(system);
1197unmap_partition("system");
Tao Baof1113e92019-06-18 12:10:14 -07001198patch(vendor);
1199verify(vendor);
1200unmap_partition("vendor");
Yifan Hong45433e42019-01-18 13:55:25 -08001201""".strip())
1202
1203 lines = self.get_op_list(self.output_path)
1204
1205 remove_all_groups = lines.index("remove_all_groups")
1206 add_group = lines.index("add_group group_foo 4294967296")
1207 add_vendor = lines.index("add vendor group_foo")
1208 add_system = lines.index("add system group_foo")
1209 resize_vendor = lines.index("resize vendor 1073741824")
1210 resize_system = lines.index("resize system 3221225472")
1211
1212 self.assertLess(remove_all_groups, add_group,
1213 "Should add groups after removing all groups")
1214 self.assertLess(add_group, min(add_vendor, add_system),
1215 "Should add partitions after adding group")
1216 self.assertLess(add_system, resize_system,
1217 "Should resize system after adding it")
1218 self.assertLess(add_vendor, resize_vendor,
1219 "Should resize vendor after adding it")
1220
1221 def test_inc_groups(self):
1222 source_info = common.LoadDictionaryFromLines("""
1223super_partition_groups=group_foo group_bar group_baz
1224super_group_foo_group_size={group_foo_size}
1225super_group_bar_group_size={group_bar_size}
1226""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
1227 target_info = common.LoadDictionaryFromLines("""
1228super_partition_groups=group_foo group_baz group_qux
1229super_group_foo_group_size={group_foo_size}
1230super_group_baz_group_size={group_baz_size}
1231super_group_qux_group_size={group_qux_size}
1232""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
1233 group_qux_size=1 * GiB).split("\n"))
1234
1235 dp_diff = common.DynamicPartitionsDifference(target_info,
1236 block_diffs=[],
1237 source_info_dict=source_info)
1238 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1239 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1240
1241 lines = self.get_op_list(self.output_path)
1242
1243 removed = lines.index("remove_group group_bar")
1244 shrunk = lines.index("resize_group group_foo 3221225472")
1245 grown = lines.index("resize_group group_baz 4294967296")
1246 added = lines.index("add_group group_qux 1073741824")
1247
Tao Baof1113e92019-06-18 12:10:14 -07001248 self.assertLess(max(removed, shrunk),
1249 min(grown, added),
Yifan Hong45433e42019-01-18 13:55:25 -08001250 "ops that remove / shrink partitions must precede ops that "
1251 "grow / add partitions")
1252
Yifan Hongbb2658d2019-01-25 12:30:58 -08001253 def test_incremental(self):
Yifan Hong45433e42019-01-18 13:55:25 -08001254 source_info = common.LoadDictionaryFromLines("""
1255dynamic_partition_list=system vendor product product_services
1256super_partition_groups=group_foo
1257super_group_foo_group_size={group_foo_size}
1258super_group_foo_partition_list=system vendor product product_services
1259""".format(group_foo_size=4 * GiB).split("\n"))
1260 target_info = common.LoadDictionaryFromLines("""
1261dynamic_partition_list=system vendor product odm
1262super_partition_groups=group_foo group_bar
1263super_group_foo_group_size={group_foo_size}
1264super_group_foo_partition_list=system vendor odm
1265super_group_bar_group_size={group_bar_size}
1266super_group_bar_partition_list=product
1267""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
1268
1269 block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
1270 src=FakeSparseImage(1024 * MiB)),
1271 MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
1272 src=FakeSparseImage(1024 * MiB)),
1273 MockBlockDifference("product", FakeSparseImage(1024 * MiB),
1274 src=FakeSparseImage(1024 * MiB)),
1275 MockBlockDifference("product_services", None,
1276 src=FakeSparseImage(1024 * MiB)),
1277 MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
1278 src=None)]
1279
1280 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
1281 source_info_dict=source_info)
1282 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1283 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1284
1285 metadata_idx = self.script.lines.index(
1286 'assert(update_dynamic_partitions(package_extract_file('
1287 '"dynamic_partitions_op_list")));')
1288 self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
1289 self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
1290 for p in ("product", "system", "odm"):
1291 patch_idx = self.script.lines.index("patch({});".format(p))
1292 verify_idx = self.script.lines.index("verify({});".format(p))
1293 self.assertLess(metadata_idx, patch_idx,
1294 "Should patch {} after updating metadata".format(p))
1295 self.assertLess(patch_idx, verify_idx,
1296 "Should verify {} after patching".format(p))
1297
1298 self.assertNotIn("patch(product_services);", self.script.lines)
1299
1300 lines = self.get_op_list(self.output_path)
1301
1302 remove = lines.index("remove product_services")
1303 move_product_out = lines.index("move product default")
1304 shrink = lines.index("resize vendor 536870912")
1305 shrink_group = lines.index("resize_group group_foo 3221225472")
1306 add_group_bar = lines.index("add_group group_bar 1073741824")
1307 add_odm = lines.index("add odm group_foo")
1308 grow_existing = lines.index("resize system 1610612736")
1309 grow_added = lines.index("resize odm 1073741824")
1310 move_product_in = lines.index("move product group_bar")
1311
1312 max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
1313 min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
1314
1315 self.assertLess(max_idx_move_partition_out_foo, shrink_group,
1316 "Must shrink group after partitions inside group are shrunk"
1317 " / removed")
1318
1319 self.assertLess(add_group_bar, move_product_in,
1320 "Must add partitions to group after group is added")
1321
1322 self.assertLess(max_idx_move_partition_out_foo,
1323 min_idx_move_partition_in_foo,
1324 "Must shrink partitions / remove partitions from group"
1325 "before adding / moving partitions into group")
Yifan Hongbb2658d2019-01-25 12:30:58 -08001326
1327 def test_remove_partition(self):
1328 source_info = common.LoadDictionaryFromLines("""
1329blockimgdiff_versions=3,4
1330use_dynamic_partitions=true
1331dynamic_partition_list=foo
1332super_partition_groups=group_foo
1333super_group_foo_group_size={group_foo_size}
1334super_group_foo_partition_list=foo
1335""".format(group_foo_size=4 * GiB).split("\n"))
1336 target_info = common.LoadDictionaryFromLines("""
1337blockimgdiff_versions=3,4
1338use_dynamic_partitions=true
1339super_partition_groups=group_foo
1340super_group_foo_group_size={group_foo_size}
1341""".format(group_foo_size=4 * GiB).split("\n"))
1342
1343 common.OPTIONS.info_dict = target_info
1344 common.OPTIONS.target_info_dict = target_info
1345 common.OPTIONS.source_info_dict = source_info
1346 common.OPTIONS.cache_size = 4 * 4096
1347
1348 block_diffs = [common.BlockDifference("foo", EmptyImage(),
1349 src=DataImage("source", pad=True))]
1350
1351 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
1352 source_info_dict=source_info)
1353 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1354 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1355
1356 self.assertNotIn("block_image_update", str(self.script),
Tao Bao2cc0ca12019-03-15 10:44:43 -07001357 "Removed partition should not be patched.")
Yifan Hongbb2658d2019-01-25 12:30:58 -08001358
1359 lines = self.get_op_list(self.output_path)
1360 self.assertEqual(lines, ["remove foo"])