blob: 287cf0a4f755168656d2e192aa669805b84c694e [file] [log] [blame]
Dan Albert8e0178d2015-01-27 15:53:15 -08001#
2# Copyright (C) 2015 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
Tao Baofc7e0e02018-02-13 13:54:02 -080016
Tao Baoa57ab9f2018-08-24 12:08:38 -070017import copy
Dan Albert8e0178d2015-01-27 15:53:15 -080018import os
Tao Bao17e4e612018-02-16 17:12:54 -080019import subprocess
Dan Albert8e0178d2015-01-27 15:53:15 -080020import tempfile
21import time
Dan Albert8e0178d2015-01-27 15:53:15 -080022import zipfile
Tao Bao31b08072017-11-08 15:50:59 -080023from hashlib import sha1
24
Dan Albert8e0178d2015-01-27 15:53:15 -080025import common
Tao Bao04e1f012018-02-04 12:13:35 -080026import test_utils
Tianjie Xu9c384d22017-06-20 17:00:55 -070027import validate_target_files
Tao Baofc7e0e02018-02-13 13:54:02 -080028from rangelib import RangeSet
Dan Albert8e0178d2015-01-27 15:53:15 -080029
Yifan Hongbb2658d2019-01-25 12:30:58 -080030from blockimgdiff import EmptyImage, DataImage
Tao Bao04e1f012018-02-04 12:13:35 -080031
Tao Bao31b08072017-11-08 15:50:59 -080032KiB = 1024
33MiB = 1024 * KiB
34GiB = 1024 * MiB
Dan Albert8e0178d2015-01-27 15:53:15 -080035
Tao Bao1c830bf2017-12-25 10:43:47 -080036
Tao Baof3282b42015-04-01 11:21:55 -070037def get_2gb_string():
Tao Bao31b08072017-11-08 15:50:59 -080038 size = int(2 * GiB + 1)
39 block_size = 4 * KiB
40 step_size = 4 * MiB
41 # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
42 for _ in range(0, size, step_size):
43 yield os.urandom(block_size)
Tao Baoc1a1ec32019-06-18 16:29:37 -070044 yield b'\0' * (step_size - block_size)
Tao Baof3282b42015-04-01 11:21:55 -070045
Dan Albert8e0178d2015-01-27 15:53:15 -080046
Tao Bao65b94e92018-10-11 21:57:26 -070047class CommonZipTest(test_utils.ReleaseToolsTestCase):
48
Tao Bao31b08072017-11-08 15:50:59 -080049 def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
Tao Baof3282b42015-04-01 11:21:55 -070050 test_file_name=None, expected_stat=None, expected_mode=0o644,
51 expected_compress_type=zipfile.ZIP_STORED):
52 # Verify the stat if present.
53 if test_file_name is not None:
54 new_stat = os.stat(test_file_name)
55 self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode))
56 self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
57
58 # Reopen the zip file to verify.
59 zip_file = zipfile.ZipFile(zip_file_name, "r")
60
61 # Verify the timestamp.
62 info = zip_file.getinfo(arcname)
63 self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0))
64
65 # Verify the file mode.
66 mode = (info.external_attr >> 16) & 0o777
67 self.assertEqual(mode, expected_mode)
68
69 # Verify the compress type.
70 self.assertEqual(info.compress_type, expected_compress_type)
71
72 # Verify the zip contents.
Tao Bao31b08072017-11-08 15:50:59 -080073 entry = zip_file.open(arcname)
74 sha1_hash = sha1()
Tao Baoc1a1ec32019-06-18 16:29:37 -070075 for chunk in iter(lambda: entry.read(4 * MiB), b''):
Tao Bao31b08072017-11-08 15:50:59 -080076 sha1_hash.update(chunk)
77 self.assertEqual(expected_hash, sha1_hash.hexdigest())
Tao Baof3282b42015-04-01 11:21:55 -070078 self.assertIsNone(zip_file.testzip())
79
Dan Albert8e0178d2015-01-27 15:53:15 -080080 def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
81 extra_zipwrite_args = dict(extra_zipwrite_args or {})
82
83 test_file = tempfile.NamedTemporaryFile(delete=False)
Dan Albert8e0178d2015-01-27 15:53:15 -080084 test_file_name = test_file.name
Tao Baof3282b42015-04-01 11:21:55 -070085
86 zip_file = tempfile.NamedTemporaryFile(delete=False)
Dan Albert8e0178d2015-01-27 15:53:15 -080087 zip_file_name = zip_file.name
88
89 # File names within an archive strip the leading slash.
90 arcname = extra_zipwrite_args.get("arcname", test_file_name)
91 if arcname[0] == "/":
92 arcname = arcname[1:]
93
94 zip_file.close()
95 zip_file = zipfile.ZipFile(zip_file_name, "w")
96
97 try:
Tao Bao31b08072017-11-08 15:50:59 -080098 sha1_hash = sha1()
99 for data in contents:
Tao Baoc1a1ec32019-06-18 16:29:37 -0700100 sha1_hash.update(bytes(data))
101 test_file.write(bytes(data))
Dan Albert8e0178d2015-01-27 15:53:15 -0800102 test_file.close()
103
Tao Baof3282b42015-04-01 11:21:55 -0700104 expected_stat = os.stat(test_file_name)
Dan Albert8e0178d2015-01-27 15:53:15 -0800105 expected_mode = extra_zipwrite_args.get("perms", 0o644)
Tao Baof3282b42015-04-01 11:21:55 -0700106 expected_compress_type = extra_zipwrite_args.get("compress_type",
107 zipfile.ZIP_STORED)
Dan Albert8e0178d2015-01-27 15:53:15 -0800108 time.sleep(5) # Make sure the atime/mtime will change measurably.
109
110 common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
Tao Baof3282b42015-04-01 11:21:55 -0700111 common.ZipClose(zip_file)
Dan Albert8e0178d2015-01-27 15:53:15 -0800112
Tao Bao31b08072017-11-08 15:50:59 -0800113 self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
114 test_file_name, expected_stat, expected_mode,
115 expected_compress_type)
Dan Albert8e0178d2015-01-27 15:53:15 -0800116 finally:
117 os.remove(test_file_name)
118 os.remove(zip_file_name)
119
Tao Baof3282b42015-04-01 11:21:55 -0700120 def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
121 extra_args = dict(extra_args or {})
122
123 zip_file = tempfile.NamedTemporaryFile(delete=False)
124 zip_file_name = zip_file.name
125 zip_file.close()
126
127 zip_file = zipfile.ZipFile(zip_file_name, "w")
128
129 try:
130 expected_compress_type = extra_args.get("compress_type",
131 zipfile.ZIP_STORED)
132 time.sleep(5) # Make sure the atime/mtime will change measurably.
133
134 if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
Tao Bao58c1b962015-05-20 09:32:18 -0700135 arcname = zinfo_or_arcname
136 expected_mode = extra_args.get("perms", 0o644)
Tao Baof3282b42015-04-01 11:21:55 -0700137 else:
Tao Bao58c1b962015-05-20 09:32:18 -0700138 arcname = zinfo_or_arcname.filename
Tao Baoc1a1ec32019-06-18 16:29:37 -0700139 if zinfo_or_arcname.external_attr:
140 zinfo_perms = zinfo_or_arcname.external_attr >> 16
141 else:
142 zinfo_perms = 0o600
143 expected_mode = extra_args.get("perms", zinfo_perms)
Tao Baof3282b42015-04-01 11:21:55 -0700144
Tao Bao58c1b962015-05-20 09:32:18 -0700145 common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
Tao Baof3282b42015-04-01 11:21:55 -0700146 common.ZipClose(zip_file)
147
Tao Bao31b08072017-11-08 15:50:59 -0800148 self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700149 expected_mode=expected_mode,
Tao Baof3282b42015-04-01 11:21:55 -0700150 expected_compress_type=expected_compress_type)
151 finally:
152 os.remove(zip_file_name)
153
154 def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
155 extra_args = dict(extra_args or {})
156
157 zip_file = tempfile.NamedTemporaryFile(delete=False)
158 zip_file_name = zip_file.name
159
160 test_file = tempfile.NamedTemporaryFile(delete=False)
161 test_file_name = test_file.name
162
163 arcname_large = test_file_name
164 arcname_small = "bar"
165
166 # File names within an archive strip the leading slash.
167 if arcname_large[0] == "/":
168 arcname_large = arcname_large[1:]
169
170 zip_file.close()
171 zip_file = zipfile.ZipFile(zip_file_name, "w")
172
173 try:
Tao Bao31b08072017-11-08 15:50:59 -0800174 sha1_hash = sha1()
175 for data in large:
176 sha1_hash.update(data)
177 test_file.write(data)
Tao Baof3282b42015-04-01 11:21:55 -0700178 test_file.close()
179
180 expected_stat = os.stat(test_file_name)
181 expected_mode = 0o644
182 expected_compress_type = extra_args.get("compress_type",
183 zipfile.ZIP_STORED)
184 time.sleep(5) # Make sure the atime/mtime will change measurably.
185
186 common.ZipWrite(zip_file, test_file_name, **extra_args)
187 common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
188 common.ZipClose(zip_file)
189
190 # Verify the contents written by ZipWrite().
Tao Bao31b08072017-11-08 15:50:59 -0800191 self._verify(zip_file, zip_file_name, arcname_large,
192 sha1_hash.hexdigest(), test_file_name, expected_stat,
193 expected_mode, expected_compress_type)
Tao Baof3282b42015-04-01 11:21:55 -0700194
195 # Verify the contents written by ZipWriteStr().
Tao Bao31b08072017-11-08 15:50:59 -0800196 self._verify(zip_file, zip_file_name, arcname_small,
197 sha1(small).hexdigest(),
Tao Baof3282b42015-04-01 11:21:55 -0700198 expected_compress_type=expected_compress_type)
199 finally:
200 os.remove(zip_file_name)
201 os.remove(test_file_name)
202
203 def _test_reset_ZIP64_LIMIT(self, func, *args):
204 default_limit = (1 << 31) - 1
205 self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
206 func(*args)
207 self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
208
Dan Albert8e0178d2015-01-27 15:53:15 -0800209 def test_ZipWrite(self):
210 file_contents = os.urandom(1024)
211 self._test_ZipWrite(file_contents)
212
213 def test_ZipWrite_with_opts(self):
214 file_contents = os.urandom(1024)
215 self._test_ZipWrite(file_contents, {
216 "arcname": "foobar",
217 "perms": 0o777,
218 "compress_type": zipfile.ZIP_DEFLATED,
219 })
Tao Baof3282b42015-04-01 11:21:55 -0700220 self._test_ZipWrite(file_contents, {
221 "arcname": "foobar",
222 "perms": 0o700,
223 "compress_type": zipfile.ZIP_STORED,
224 })
Dan Albert8e0178d2015-01-27 15:53:15 -0800225
226 def test_ZipWrite_large_file(self):
Tao Baof3282b42015-04-01 11:21:55 -0700227 file_contents = get_2gb_string()
Dan Albert8e0178d2015-01-27 15:53:15 -0800228 self._test_ZipWrite(file_contents, {
229 "compress_type": zipfile.ZIP_DEFLATED,
230 })
231
232 def test_ZipWrite_resets_ZIP64_LIMIT(self):
Tao Baof3282b42015-04-01 11:21:55 -0700233 self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
234
235 def test_ZipWriteStr(self):
236 random_string = os.urandom(1024)
237 # Passing arcname
238 self._test_ZipWriteStr("foo", random_string)
239
240 # Passing zinfo
241 zinfo = zipfile.ZipInfo(filename="foo")
242 self._test_ZipWriteStr(zinfo, random_string)
243
244 # Timestamp in the zinfo should be overwritten.
245 zinfo.date_time = (2015, 3, 1, 15, 30, 0)
246 self._test_ZipWriteStr(zinfo, random_string)
247
248 def test_ZipWriteStr_with_opts(self):
249 random_string = os.urandom(1024)
250 # Passing arcname
251 self._test_ZipWriteStr("foo", random_string, {
Tao Bao58c1b962015-05-20 09:32:18 -0700252 "perms": 0o700,
Tao Baof3282b42015-04-01 11:21:55 -0700253 "compress_type": zipfile.ZIP_DEFLATED,
254 })
Tao Bao58c1b962015-05-20 09:32:18 -0700255 self._test_ZipWriteStr("bar", random_string, {
Tao Baof3282b42015-04-01 11:21:55 -0700256 "compress_type": zipfile.ZIP_STORED,
257 })
258
259 # Passing zinfo
260 zinfo = zipfile.ZipInfo(filename="foo")
261 self._test_ZipWriteStr(zinfo, random_string, {
262 "compress_type": zipfile.ZIP_DEFLATED,
263 })
264 self._test_ZipWriteStr(zinfo, random_string, {
Tao Bao58c1b962015-05-20 09:32:18 -0700265 "perms": 0o600,
Tao Baof3282b42015-04-01 11:21:55 -0700266 "compress_type": zipfile.ZIP_STORED,
267 })
Tao Baoc1a1ec32019-06-18 16:29:37 -0700268 self._test_ZipWriteStr(zinfo, random_string, {
269 "perms": 0o000,
270 "compress_type": zipfile.ZIP_STORED,
271 })
Tao Baof3282b42015-04-01 11:21:55 -0700272
273 def test_ZipWriteStr_large_file(self):
274 # zipfile.writestr() doesn't work when the str size is over 2GiB even with
275 # the workaround. We will only test the case of writing a string into a
276 # large archive.
277 long_string = get_2gb_string()
278 short_string = os.urandom(1024)
279 self._test_ZipWriteStr_large_file(long_string, short_string, {
280 "compress_type": zipfile.ZIP_DEFLATED,
281 })
282
283 def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
Tao Baoc1a1ec32019-06-18 16:29:37 -0700284 self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
Tao Baof3282b42015-04-01 11:21:55 -0700285 zinfo = zipfile.ZipInfo(filename="foo")
Tao Baoc1a1ec32019-06-18 16:29:37 -0700286 self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
Tao Bao58c1b962015-05-20 09:32:18 -0700287
288 def test_bug21309935(self):
289 zip_file = tempfile.NamedTemporaryFile(delete=False)
290 zip_file_name = zip_file.name
291 zip_file.close()
292
293 try:
294 random_string = os.urandom(1024)
295 zip_file = zipfile.ZipFile(zip_file_name, "w")
296 # Default perms should be 0o644 when passing the filename.
297 common.ZipWriteStr(zip_file, "foo", random_string)
298 # Honor the specified perms.
299 common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755)
300 # The perms in zinfo should be untouched.
301 zinfo = zipfile.ZipInfo(filename="baz")
302 zinfo.external_attr = 0o740 << 16
303 common.ZipWriteStr(zip_file, zinfo, random_string)
304 # Explicitly specified perms has the priority.
305 zinfo = zipfile.ZipInfo(filename="qux")
306 zinfo.external_attr = 0o700 << 16
307 common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
308 common.ZipClose(zip_file)
309
Tao Bao31b08072017-11-08 15:50:59 -0800310 self._verify(zip_file, zip_file_name, "foo",
311 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700312 expected_mode=0o644)
Tao Bao31b08072017-11-08 15:50:59 -0800313 self._verify(zip_file, zip_file_name, "bar",
314 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700315 expected_mode=0o755)
Tao Bao31b08072017-11-08 15:50:59 -0800316 self._verify(zip_file, zip_file_name, "baz",
317 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700318 expected_mode=0o740)
Tao Bao31b08072017-11-08 15:50:59 -0800319 self._verify(zip_file, zip_file_name, "qux",
320 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700321 expected_mode=0o400)
322 finally:
323 os.remove(zip_file_name)
Tianjie Xu9c384d22017-06-20 17:00:55 -0700324
Tao Bao82490d32019-04-09 00:12:30 -0700325 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao89d7ab22017-12-14 17:05:33 -0800326 def test_ZipDelete(self):
327 zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
328 output_zip = zipfile.ZipFile(zip_file.name, 'w',
329 compression=zipfile.ZIP_DEFLATED)
330 with tempfile.NamedTemporaryFile() as entry_file:
331 entry_file.write(os.urandom(1024))
332 common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
333 common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
334 common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
335 common.ZipClose(output_zip)
336 zip_file.close()
337
338 try:
339 common.ZipDelete(zip_file.name, 'Test2')
340 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
341 entries = check_zip.namelist()
342 self.assertTrue('Test1' in entries)
343 self.assertFalse('Test2' in entries)
344 self.assertTrue('Test3' in entries)
345
Tao Bao986ee862018-10-04 15:46:16 -0700346 self.assertRaises(
347 common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
Tao Bao89d7ab22017-12-14 17:05:33 -0800348 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
349 entries = check_zip.namelist()
350 self.assertTrue('Test1' in entries)
351 self.assertFalse('Test2' in entries)
352 self.assertTrue('Test3' in entries)
353
354 common.ZipDelete(zip_file.name, ['Test3'])
355 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
356 entries = check_zip.namelist()
357 self.assertTrue('Test1' in entries)
358 self.assertFalse('Test2' in entries)
359 self.assertFalse('Test3' in entries)
360
361 common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
362 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
363 entries = check_zip.namelist()
364 self.assertFalse('Test1' in entries)
365 self.assertFalse('Test2' in entries)
366 self.assertFalse('Test3' in entries)
367 finally:
368 os.remove(zip_file.name)
369
Tao Bao0ff15de2019-03-20 11:26:06 -0700370 @staticmethod
371 def _test_UnzipTemp_createZipFile():
372 zip_file = common.MakeTempFile(suffix='.zip')
373 output_zip = zipfile.ZipFile(
374 zip_file, 'w', compression=zipfile.ZIP_DEFLATED)
375 contents = os.urandom(1024)
376 with tempfile.NamedTemporaryFile() as entry_file:
377 entry_file.write(contents)
378 common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
379 common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
380 common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
381 common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
382 common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
383 common.ZipClose(output_zip)
384 common.ZipClose(output_zip)
385 return zip_file
386
Tao Bao82490d32019-04-09 00:12:30 -0700387 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700388 def test_UnzipTemp(self):
389 zip_file = self._test_UnzipTemp_createZipFile()
390 unzipped_dir = common.UnzipTemp(zip_file)
391 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
392 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
393 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
394 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
395 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
396
Tao Bao82490d32019-04-09 00:12:30 -0700397 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700398 def test_UnzipTemp_withPatterns(self):
399 zip_file = self._test_UnzipTemp_createZipFile()
400
401 unzipped_dir = common.UnzipTemp(zip_file, ['Test1'])
402 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
403 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
404 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
405 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
406 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
407
408 unzipped_dir = common.UnzipTemp(zip_file, ['Test1', 'Foo3'])
409 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
410 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
411 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
412 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
413 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
414
415 unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Foo3*'])
416 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
417 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
418 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
419 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
420 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
421
422 unzipped_dir = common.UnzipTemp(zip_file, ['*Test1', '*Baz*'])
423 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
424 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
425 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
426 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
427 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
428
429 def test_UnzipTemp_withEmptyPatterns(self):
430 zip_file = self._test_UnzipTemp_createZipFile()
431 unzipped_dir = common.UnzipTemp(zip_file, [])
432 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
433 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
434 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
435 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
436 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
437
Tao Bao82490d32019-04-09 00:12:30 -0700438 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700439 def test_UnzipTemp_withPartiallyMatchingPatterns(self):
440 zip_file = self._test_UnzipTemp_createZipFile()
441 unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*'])
442 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
443 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
444 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
445 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
446 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
447
448 def test_UnzipTemp_withNoMatchingPatterns(self):
449 zip_file = self._test_UnzipTemp_createZipFile()
450 unzipped_dir = common.UnzipTemp(zip_file, ['Foo4', 'Nonexistent*'])
451 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
452 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
453 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
454 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
455 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
456
Tao Bao89d7ab22017-12-14 17:05:33 -0800457
Tao Bao65b94e92018-10-11 21:57:26 -0700458class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase):
Tao Bao818ddf52018-01-05 11:17:34 -0800459 """Tests the APK utils related functions."""
460
461 APKCERTS_TXT1 = (
462 'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
463 ' private_key="certs/devkey.pk8"\n'
464 'name="Settings.apk"'
Dan Willemsen0ab1be62019-04-09 21:35:37 -0700465 ' certificate="build/make/target/product/security/platform.x509.pem"'
466 ' private_key="build/make/target/product/security/platform.pk8"\n'
Tao Bao818ddf52018-01-05 11:17:34 -0800467 'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
468 )
469
470 APKCERTS_CERTMAP1 = {
471 'RecoveryLocalizer.apk' : 'certs/devkey',
Dan Willemsen0ab1be62019-04-09 21:35:37 -0700472 'Settings.apk' : 'build/make/target/product/security/platform',
Tao Bao818ddf52018-01-05 11:17:34 -0800473 'TV.apk' : 'PRESIGNED',
474 }
475
476 APKCERTS_TXT2 = (
477 'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
478 ' private_key="certs/compressed1.pk8" compressed="gz"\n'
479 'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
480 ' private_key="certs/compressed2.pk8" compressed="gz"\n'
481 'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
482 ' private_key="certs/compressed2.pk8" compressed="gz"\n'
483 'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
484 ' private_key="certs/compressed3.pk8" compressed="gz"\n'
485 )
486
487 APKCERTS_CERTMAP2 = {
488 'Compressed1.apk' : 'certs/compressed1',
489 'Compressed2a.apk' : 'certs/compressed2',
490 'Compressed2b.apk' : 'certs/compressed2',
491 'Compressed3.apk' : 'certs/compressed3',
492 }
493
494 APKCERTS_TXT3 = (
495 'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
496 ' private_key="certs/compressed4.pk8" compressed="xz"\n'
497 )
498
499 APKCERTS_CERTMAP3 = {
500 'Compressed4.apk' : 'certs/compressed4',
501 }
502
Tao Bao17e4e612018-02-16 17:12:54 -0800503 def setUp(self):
504 self.testdata_dir = test_utils.get_testdata_dir()
505
Tao Bao818ddf52018-01-05 11:17:34 -0800506 @staticmethod
507 def _write_apkcerts_txt(apkcerts_txt, additional=None):
508 if additional is None:
509 additional = []
510 target_files = common.MakeTempFile(suffix='.zip')
511 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
512 target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
513 for entry in additional:
514 target_files_zip.writestr(entry, '')
515 return target_files
516
517 def test_ReadApkCerts_NoncompressedApks(self):
518 target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
519 with zipfile.ZipFile(target_files, 'r') as input_zip:
520 certmap, ext = common.ReadApkCerts(input_zip)
521
522 self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
523 self.assertIsNone(ext)
524
525 def test_ReadApkCerts_CompressedApks(self):
526 # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
527 # not stored in '.gz' format, so it shouldn't be considered as installed.
528 target_files = self._write_apkcerts_txt(
529 self.APKCERTS_TXT2,
530 ['Compressed1.apk.gz', 'Compressed3.apk'])
531
532 with zipfile.ZipFile(target_files, 'r') as input_zip:
533 certmap, ext = common.ReadApkCerts(input_zip)
534
535 self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
536 self.assertEqual('.gz', ext)
537
538 # Alternative case with '.xz'.
539 target_files = self._write_apkcerts_txt(
540 self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
541
542 with zipfile.ZipFile(target_files, 'r') as input_zip:
543 certmap, ext = common.ReadApkCerts(input_zip)
544
545 self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
546 self.assertEqual('.xz', ext)
547
548 def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
549 target_files = self._write_apkcerts_txt(
550 self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
551 ['Compressed1.apk.gz', 'Compressed3.apk'])
552
553 with zipfile.ZipFile(target_files, 'r') as input_zip:
554 certmap, ext = common.ReadApkCerts(input_zip)
555
556 certmap_merged = self.APKCERTS_CERTMAP1.copy()
557 certmap_merged.update(self.APKCERTS_CERTMAP2)
558 self.assertDictEqual(certmap_merged, certmap)
559 self.assertEqual('.gz', ext)
560
561 def test_ReadApkCerts_MultipleCompressionMethods(self):
562 target_files = self._write_apkcerts_txt(
563 self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
564 ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
565
566 with zipfile.ZipFile(target_files, 'r') as input_zip:
567 self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
568
569 def test_ReadApkCerts_MismatchingKeys(self):
570 malformed_apkcerts_txt = (
571 'name="App1.apk" certificate="certs/cert1.x509.pem"'
572 ' private_key="certs/cert2.pk8"\n'
573 )
574 target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
575
576 with zipfile.ZipFile(target_files, 'r') as input_zip:
577 self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
578
Tao Bao04e1f012018-02-04 12:13:35 -0800579 def test_ExtractPublicKey(self):
Tao Bao17e4e612018-02-16 17:12:54 -0800580 cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
581 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
Tao Baoda30cfa2017-12-01 16:19:46 -0800582 with open(pubkey) as pubkey_fp:
Tao Bao04e1f012018-02-04 12:13:35 -0800583 self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
584
585 def test_ExtractPublicKey_invalidInput(self):
Tao Bao17e4e612018-02-16 17:12:54 -0800586 wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
Tao Bao04e1f012018-02-04 12:13:35 -0800587 self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
588
Tao Bao82490d32019-04-09 00:12:30 -0700589 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao2cc0ca12019-03-15 10:44:43 -0700590 def test_ExtractAvbPublicKey(self):
591 privkey = os.path.join(self.testdata_dir, 'testkey.key')
592 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
Tao Bao1ac886e2019-06-26 11:58:22 -0700593 extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
594 extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
595 with open(extracted_from_privkey, 'rb') as privkey_fp, \
596 open(extracted_from_pubkey, 'rb') as pubkey_fp:
Tao Bao2cc0ca12019-03-15 10:44:43 -0700597 self.assertEqual(privkey_fp.read(), pubkey_fp.read())
598
Tao Bao17e4e612018-02-16 17:12:54 -0800599 def test_ParseCertificate(self):
600 cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
601
602 cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
Tao Baoda30cfa2017-12-01 16:19:46 -0800603 proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
604 universal_newlines=False)
Tao Bao17e4e612018-02-16 17:12:54 -0800605 expected, _ = proc.communicate()
606 self.assertEqual(0, proc.returncode)
607
608 with open(cert) as cert_fp:
609 actual = common.ParseCertificate(cert_fp.read())
610 self.assertEqual(expected, actual)
611
Tao Bao82490d32019-04-09 00:12:30 -0700612 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700613 def test_GetMinSdkVersion(self):
614 test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
615 self.assertEqual('24', common.GetMinSdkVersion(test_app))
616
Tao Bao82490d32019-04-09 00:12:30 -0700617 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700618 def test_GetMinSdkVersion_invalidInput(self):
619 self.assertRaises(
620 common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk')
621
Tao Bao82490d32019-04-09 00:12:30 -0700622 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700623 def test_GetMinSdkVersionInt(self):
624 test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
625 self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {}))
626
Tao Bao82490d32019-04-09 00:12:30 -0700627 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700628 def test_GetMinSdkVersionInt_invalidInput(self):
629 self.assertRaises(
630 common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk',
631 {})
632
Tao Bao818ddf52018-01-05 11:17:34 -0800633
Tao Bao65b94e92018-10-11 21:57:26 -0700634class CommonUtilsTest(test_utils.ReleaseToolsTestCase):
Tao Baofc7e0e02018-02-13 13:54:02 -0800635
Tao Bao02a08592018-07-22 12:40:45 -0700636 def setUp(self):
637 self.testdata_dir = test_utils.get_testdata_dir()
638
Tao Bao82490d32019-04-09 00:12:30 -0700639 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800640 def test_GetSparseImage_emptyBlockMapFile(self):
641 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
642 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
643 target_files_zip.write(
644 test_utils.construct_sparse_image([
645 (0xCAC1, 6),
646 (0xCAC3, 3),
647 (0xCAC1, 4)]),
648 arcname='IMAGES/system.img')
649 target_files_zip.writestr('IMAGES/system.map', '')
650 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
651 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
652
Tao Baodba59ee2018-01-09 13:21:02 -0800653 tempdir = common.UnzipTemp(target_files)
654 with zipfile.ZipFile(target_files, 'r') as input_zip:
655 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800656
657 self.assertDictEqual(
658 {
659 '__COPY': RangeSet("0"),
660 '__NONZERO-0': RangeSet("1-5 9-12"),
661 },
662 sparse_image.file_map)
663
Tao Baob2de7d92019-04-10 10:01:47 -0700664 def test_GetSparseImage_missingImageFile(self):
Tao Baofc7e0e02018-02-13 13:54:02 -0800665 self.assertRaises(
Tao Baob2de7d92019-04-10 10:01:47 -0700666 AssertionError, common.GetSparseImage, 'system2', self.testdata_dir,
667 None, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800668 self.assertRaises(
Tao Baob2de7d92019-04-10 10:01:47 -0700669 AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir,
670 None, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800671
Tao Bao82490d32019-04-09 00:12:30 -0700672 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800673 def test_GetSparseImage_missingBlockMapFile(self):
674 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
675 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
676 target_files_zip.write(
677 test_utils.construct_sparse_image([
678 (0xCAC1, 6),
679 (0xCAC3, 3),
680 (0xCAC1, 4)]),
681 arcname='IMAGES/system.img')
682 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
683 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
684
Tao Baodba59ee2018-01-09 13:21:02 -0800685 tempdir = common.UnzipTemp(target_files)
686 with zipfile.ZipFile(target_files, 'r') as input_zip:
687 self.assertRaises(
688 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
689 False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800690
Tao Bao82490d32019-04-09 00:12:30 -0700691 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800692 def test_GetSparseImage_sharedBlocks_notAllowed(self):
693 """Tests the case of having overlapping blocks but disallowed."""
694 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
695 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
696 target_files_zip.write(
697 test_utils.construct_sparse_image([(0xCAC2, 16)]),
698 arcname='IMAGES/system.img')
699 # Block 10 is shared between two files.
700 target_files_zip.writestr(
701 'IMAGES/system.map',
702 '\n'.join([
703 '/system/file1 1-5 9-10',
704 '/system/file2 10-12']))
705 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
706 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
707
Tao Baodba59ee2018-01-09 13:21:02 -0800708 tempdir = common.UnzipTemp(target_files)
709 with zipfile.ZipFile(target_files, 'r') as input_zip:
710 self.assertRaises(
711 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
712 False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800713
Tao Bao82490d32019-04-09 00:12:30 -0700714 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800715 def test_GetSparseImage_sharedBlocks_allowed(self):
716 """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
717 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
718 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
719 # Construct an image with a care_map of "0-5 9-12".
720 target_files_zip.write(
721 test_utils.construct_sparse_image([(0xCAC2, 16)]),
722 arcname='IMAGES/system.img')
723 # Block 10 is shared between two files.
724 target_files_zip.writestr(
725 'IMAGES/system.map',
726 '\n'.join([
727 '/system/file1 1-5 9-10',
728 '/system/file2 10-12']))
729 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
730 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
731
Tao Baodba59ee2018-01-09 13:21:02 -0800732 tempdir = common.UnzipTemp(target_files)
733 with zipfile.ZipFile(target_files, 'r') as input_zip:
734 sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
Tao Baofc7e0e02018-02-13 13:54:02 -0800735
736 self.assertDictEqual(
737 {
738 '__COPY': RangeSet("0"),
739 '__NONZERO-0': RangeSet("6-8 13-15"),
740 '/system/file1': RangeSet("1-5 9-10"),
741 '/system/file2': RangeSet("11-12"),
742 },
743 sparse_image.file_map)
744
745 # '/system/file2' should be marked with 'uses_shared_blocks', but not with
746 # 'incomplete'.
747 self.assertTrue(
748 sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
749 self.assertNotIn(
750 'incomplete', sparse_image.file_map['/system/file2'].extra)
751
752 # All other entries should look normal without any tags.
753 self.assertFalse(sparse_image.file_map['__COPY'].extra)
754 self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
755 self.assertFalse(sparse_image.file_map['/system/file1'].extra)
756
Tao Bao82490d32019-04-09 00:12:30 -0700757 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800758 def test_GetSparseImage_incompleteRanges(self):
759 """Tests the case of ext4 images with holes."""
760 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
761 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
762 target_files_zip.write(
763 test_utils.construct_sparse_image([(0xCAC2, 16)]),
764 arcname='IMAGES/system.img')
765 target_files_zip.writestr(
766 'IMAGES/system.map',
767 '\n'.join([
768 '/system/file1 1-5 9-10',
769 '/system/file2 11-12']))
770 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
771 # '/system/file2' has less blocks listed (2) than actual (3).
772 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
773
Tao Baodba59ee2018-01-09 13:21:02 -0800774 tempdir = common.UnzipTemp(target_files)
775 with zipfile.ZipFile(target_files, 'r') as input_zip:
776 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800777
778 self.assertFalse(sparse_image.file_map['/system/file1'].extra)
779 self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
780
Tao Bao82490d32019-04-09 00:12:30 -0700781 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700782 def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
783 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
784 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
785 target_files_zip.write(
786 test_utils.construct_sparse_image([(0xCAC2, 16)]),
787 arcname='IMAGES/system.img')
788 target_files_zip.writestr(
789 'IMAGES/system.map',
790 '\n'.join([
791 '//system/file1 1-5 9-10',
792 '//system/file2 11-12',
793 '/system/app/file3 13-15']))
794 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
795 # '/system/file2' has less blocks listed (2) than actual (3).
796 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
797 # '/system/app/file3' has less blocks listed (3) than actual (4).
798 target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
799
800 tempdir = common.UnzipTemp(target_files)
801 with zipfile.ZipFile(target_files, 'r') as input_zip:
802 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
803
804 self.assertFalse(sparse_image.file_map['//system/file1'].extra)
805 self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
806 self.assertTrue(
807 sparse_image.file_map['/system/app/file3'].extra['incomplete'])
808
Tao Bao82490d32019-04-09 00:12:30 -0700809 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700810 def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
811 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
812 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
813 target_files_zip.write(
814 test_utils.construct_sparse_image([(0xCAC2, 16)]),
815 arcname='IMAGES/system.img')
816 target_files_zip.writestr(
817 'IMAGES/system.map',
818 '\n'.join([
819 '//system/file1 1-5 9-10',
820 '//init.rc 13-15']))
821 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
822 # '/init.rc' has less blocks listed (3) than actual (4).
823 target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
824
825 tempdir = common.UnzipTemp(target_files)
826 with zipfile.ZipFile(target_files, 'r') as input_zip:
827 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
828
829 self.assertFalse(sparse_image.file_map['//system/file1'].extra)
830 self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
831
Tao Bao82490d32019-04-09 00:12:30 -0700832 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700833 def test_GetSparseImage_fileNotFound(self):
834 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
835 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
836 target_files_zip.write(
837 test_utils.construct_sparse_image([(0xCAC2, 16)]),
838 arcname='IMAGES/system.img')
839 target_files_zip.writestr(
840 'IMAGES/system.map',
841 '\n'.join([
842 '//system/file1 1-5 9-10',
843 '//system/file2 11-12']))
844 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
845
846 tempdir = common.UnzipTemp(target_files)
847 with zipfile.ZipFile(target_files, 'r') as input_zip:
848 self.assertRaises(
849 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
850 False)
851
Tao Bao82490d32019-04-09 00:12:30 -0700852 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700853 def test_GetAvbChainedPartitionArg(self):
854 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
855 info_dict = {
856 'avb_avbtool': 'avbtool',
857 'avb_system_key_path': pubkey,
858 'avb_system_rollback_index_location': 2,
859 }
860 args = common.GetAvbChainedPartitionArg('system', info_dict).split(':')
861 self.assertEqual(3, len(args))
862 self.assertEqual('system', args[0])
863 self.assertEqual('2', args[1])
864 self.assertTrue(os.path.exists(args[2]))
865
Tao Bao82490d32019-04-09 00:12:30 -0700866 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700867 def test_GetAvbChainedPartitionArg_withPrivateKey(self):
868 key = os.path.join(self.testdata_dir, 'testkey.key')
869 info_dict = {
870 'avb_avbtool': 'avbtool',
871 'avb_product_key_path': key,
872 'avb_product_rollback_index_location': 2,
873 }
874 args = common.GetAvbChainedPartitionArg('product', info_dict).split(':')
875 self.assertEqual(3, len(args))
876 self.assertEqual('product', args[0])
877 self.assertEqual('2', args[1])
878 self.assertTrue(os.path.exists(args[2]))
879
Tao Bao82490d32019-04-09 00:12:30 -0700880 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700881 def test_GetAvbChainedPartitionArg_withSpecifiedKey(self):
882 info_dict = {
883 'avb_avbtool': 'avbtool',
884 'avb_system_key_path': 'does-not-exist',
885 'avb_system_rollback_index_location': 2,
886 }
887 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
888 args = common.GetAvbChainedPartitionArg(
889 'system', info_dict, pubkey).split(':')
890 self.assertEqual(3, len(args))
891 self.assertEqual('system', args[0])
892 self.assertEqual('2', args[1])
893 self.assertTrue(os.path.exists(args[2]))
894
Tao Bao82490d32019-04-09 00:12:30 -0700895 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700896 def test_GetAvbChainedPartitionArg_invalidKey(self):
897 pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem')
898 info_dict = {
899 'avb_avbtool': 'avbtool',
900 'avb_system_key_path': pubkey,
901 'avb_system_rollback_index_location': 2,
902 }
903 self.assertRaises(
Tao Bao986ee862018-10-04 15:46:16 -0700904 common.ExternalError, common.GetAvbChainedPartitionArg, 'system',
905 info_dict)
Tao Bao02a08592018-07-22 12:40:45 -0700906
Tao Baoa57ab9f2018-08-24 12:08:38 -0700907 INFO_DICT_DEFAULT = {
908 'recovery_api_version': 3,
909 'fstab_version': 2,
910 'system_root_image': 'true',
911 'no_recovery' : 'true',
912 'recovery_as_boot': 'true',
913 }
914
915 @staticmethod
916 def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
917 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
918 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
919 info_values = ''.join(
Tao Baoda30cfa2017-12-01 16:19:46 -0800920 ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
Tao Baoa57ab9f2018-08-24 12:08:38 -0700921 common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
922
923 FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
924 if info_dict.get('system_root_image') == 'true':
925 fstab_values = FSTAB_TEMPLATE.format('/')
926 else:
927 fstab_values = FSTAB_TEMPLATE.format('/system')
928 common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
Tao Bao410ad8b2018-08-24 12:08:38 -0700929
930 common.ZipWriteStr(
931 target_files_zip, 'META/file_contexts', 'file-contexts')
Tao Baoa57ab9f2018-08-24 12:08:38 -0700932 return target_files
933
934 def test_LoadInfoDict(self):
935 target_files = self._test_LoadInfoDict_createTargetFiles(
936 self.INFO_DICT_DEFAULT,
937 'BOOT/RAMDISK/system/etc/recovery.fstab')
938 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
939 loaded_dict = common.LoadInfoDict(target_files_zip)
940 self.assertEqual(3, loaded_dict['recovery_api_version'])
941 self.assertEqual(2, loaded_dict['fstab_version'])
942 self.assertIn('/', loaded_dict['fstab'])
943 self.assertIn('/system', loaded_dict['fstab'])
944
945 def test_LoadInfoDict_legacyRecoveryFstabPath(self):
946 target_files = self._test_LoadInfoDict_createTargetFiles(
947 self.INFO_DICT_DEFAULT,
948 'BOOT/RAMDISK/etc/recovery.fstab')
949 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
950 loaded_dict = common.LoadInfoDict(target_files_zip)
951 self.assertEqual(3, loaded_dict['recovery_api_version'])
952 self.assertEqual(2, loaded_dict['fstab_version'])
953 self.assertIn('/', loaded_dict['fstab'])
954 self.assertIn('/system', loaded_dict['fstab'])
955
Tao Bao82490d32019-04-09 00:12:30 -0700956 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baoa57ab9f2018-08-24 12:08:38 -0700957 def test_LoadInfoDict_dirInput(self):
958 target_files = self._test_LoadInfoDict_createTargetFiles(
959 self.INFO_DICT_DEFAULT,
960 'BOOT/RAMDISK/system/etc/recovery.fstab')
961 unzipped = common.UnzipTemp(target_files)
962 loaded_dict = common.LoadInfoDict(unzipped)
963 self.assertEqual(3, loaded_dict['recovery_api_version'])
964 self.assertEqual(2, loaded_dict['fstab_version'])
965 self.assertIn('/', loaded_dict['fstab'])
966 self.assertIn('/system', loaded_dict['fstab'])
967
Tao Bao82490d32019-04-09 00:12:30 -0700968 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baoa57ab9f2018-08-24 12:08:38 -0700969 def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self):
970 target_files = self._test_LoadInfoDict_createTargetFiles(
971 self.INFO_DICT_DEFAULT,
972 'BOOT/RAMDISK/system/etc/recovery.fstab')
973 unzipped = common.UnzipTemp(target_files)
974 loaded_dict = common.LoadInfoDict(unzipped)
975 self.assertEqual(3, loaded_dict['recovery_api_version'])
976 self.assertEqual(2, loaded_dict['fstab_version'])
977 self.assertIn('/', loaded_dict['fstab'])
978 self.assertIn('/system', loaded_dict['fstab'])
979
980 def test_LoadInfoDict_systemRootImageFalse(self):
981 # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
982 # launched prior to P will likely have this config.
983 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
984 del info_dict['no_recovery']
985 del info_dict['system_root_image']
986 del info_dict['recovery_as_boot']
987 target_files = self._test_LoadInfoDict_createTargetFiles(
988 info_dict,
989 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
990 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
991 loaded_dict = common.LoadInfoDict(target_files_zip)
992 self.assertEqual(3, loaded_dict['recovery_api_version'])
993 self.assertEqual(2, loaded_dict['fstab_version'])
994 self.assertNotIn('/', loaded_dict['fstab'])
995 self.assertIn('/system', loaded_dict['fstab'])
996
997 def test_LoadInfoDict_recoveryAsBootFalse(self):
998 # Devices using system-as-root, but with standalone recovery image. Non-A/B
999 # devices launched since P will likely have this config.
1000 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
1001 del info_dict['no_recovery']
1002 del info_dict['recovery_as_boot']
1003 target_files = self._test_LoadInfoDict_createTargetFiles(
1004 info_dict,
1005 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
1006 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1007 loaded_dict = common.LoadInfoDict(target_files_zip)
1008 self.assertEqual(3, loaded_dict['recovery_api_version'])
1009 self.assertEqual(2, loaded_dict['fstab_version'])
1010 self.assertIn('/', loaded_dict['fstab'])
1011 self.assertIn('/system', loaded_dict['fstab'])
1012
1013 def test_LoadInfoDict_noRecoveryTrue(self):
1014 # Device doesn't have a recovery partition at all.
1015 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
1016 del info_dict['recovery_as_boot']
1017 target_files = self._test_LoadInfoDict_createTargetFiles(
1018 info_dict,
1019 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
1020 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1021 loaded_dict = common.LoadInfoDict(target_files_zip)
1022 self.assertEqual(3, loaded_dict['recovery_api_version'])
1023 self.assertEqual(2, loaded_dict['fstab_version'])
1024 self.assertIsNone(loaded_dict['fstab'])
1025
Tao Bao82490d32019-04-09 00:12:30 -07001026 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao410ad8b2018-08-24 12:08:38 -07001027 def test_LoadInfoDict_missingMetaMiscInfoTxt(self):
1028 target_files = self._test_LoadInfoDict_createTargetFiles(
1029 self.INFO_DICT_DEFAULT,
1030 'BOOT/RAMDISK/system/etc/recovery.fstab')
1031 common.ZipDelete(target_files, 'META/misc_info.txt')
1032 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1033 self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
1034
Tao Bao82490d32019-04-09 00:12:30 -07001035 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao410ad8b2018-08-24 12:08:38 -07001036 def test_LoadInfoDict_repacking(self):
1037 target_files = self._test_LoadInfoDict_createTargetFiles(
1038 self.INFO_DICT_DEFAULT,
1039 'BOOT/RAMDISK/system/etc/recovery.fstab')
1040 unzipped = common.UnzipTemp(target_files)
1041 loaded_dict = common.LoadInfoDict(unzipped, True)
1042 self.assertEqual(3, loaded_dict['recovery_api_version'])
1043 self.assertEqual(2, loaded_dict['fstab_version'])
1044 self.assertIn('/', loaded_dict['fstab'])
1045 self.assertIn('/system', loaded_dict['fstab'])
1046 self.assertEqual(
1047 os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
1048 self.assertEqual(
1049 os.path.join(unzipped, 'META', 'root_filesystem_config.txt'),
1050 loaded_dict['root_fs_config'])
1051
1052 def test_LoadInfoDict_repackingWithZipFileInput(self):
1053 target_files = self._test_LoadInfoDict_createTargetFiles(
1054 self.INFO_DICT_DEFAULT,
1055 'BOOT/RAMDISK/system/etc/recovery.fstab')
1056 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1057 self.assertRaises(
1058 AssertionError, common.LoadInfoDict, target_files_zip, True)
1059
Tao Baofc7e0e02018-02-13 13:54:02 -08001060
Tao Bao65b94e92018-10-11 21:57:26 -07001061class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
Tao Bao1c830bf2017-12-25 10:43:47 -08001062 """Checks the format of install-recovery.sh.
Tianjie Xu9c384d22017-06-20 17:00:55 -07001063
Tao Bao1c830bf2017-12-25 10:43:47 -08001064 Its format should match between common.py and validate_target_files.py.
1065 """
Tianjie Xu9c384d22017-06-20 17:00:55 -07001066
1067 def setUp(self):
Tao Bao1c830bf2017-12-25 10:43:47 -08001068 self._tempdir = common.MakeTempDir()
Tianjie Xu9c384d22017-06-20 17:00:55 -07001069 # Create a dummy dict that contains the fstab info for boot&recovery.
1070 self._info = {"fstab" : {}}
Tao Bao1c830bf2017-12-25 10:43:47 -08001071 dummy_fstab = [
1072 "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
1073 "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
Tao Bao31b08072017-11-08 15:50:59 -08001074 self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab)
Tianjie Xudf055582017-11-07 12:22:58 -08001075 # Construct the gzipped recovery.img and boot.img
1076 self.recovery_data = bytearray([
1077 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
1078 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
1079 0x08, 0x00, 0x00, 0x00
1080 ])
1081 # echo -n "boot" | gzip -f | hd
1082 self.boot_data = bytearray([
1083 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
1084 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
1085 ])
Tianjie Xu9c384d22017-06-20 17:00:55 -07001086
1087 def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
1088 loc = os.path.join(self._tempdir, prefix, name)
1089 if not os.path.exists(os.path.dirname(loc)):
1090 os.makedirs(os.path.dirname(loc))
Tao Baoda30cfa2017-12-01 16:19:46 -08001091 with open(loc, "wb") as f:
Tianjie Xu9c384d22017-06-20 17:00:55 -07001092 f.write(data)
1093
1094 def test_full_recovery(self):
Tao Bao31b08072017-11-08 15:50:59 -08001095 recovery_image = common.File("recovery.img", self.recovery_data)
1096 boot_image = common.File("boot.img", self.boot_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001097 self._info["full_recovery_image"] = "true"
1098
1099 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1100 recovery_image, boot_image, self._info)
1101 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1102 self._info)
1103
Tao Bao82490d32019-04-09 00:12:30 -07001104 @test_utils.SkipIfExternalToolsUnavailable()
Tianjie Xu9c384d22017-06-20 17:00:55 -07001105 def test_recovery_from_boot(self):
Tao Bao31b08072017-11-08 15:50:59 -08001106 recovery_image = common.File("recovery.img", self.recovery_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001107 self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
Tao Bao31b08072017-11-08 15:50:59 -08001108 boot_image = common.File("boot.img", self.boot_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001109 self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
1110
1111 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1112 recovery_image, boot_image, self._info)
1113 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1114 self._info)
1115 # Validate 'recovery-from-boot' with bonus argument.
Tao Baoda30cfa2017-12-01 16:19:46 -08001116 self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
Tianjie Xu9c384d22017-06-20 17:00:55 -07001117 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1118 recovery_image, boot_image, self._info)
1119 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1120 self._info)
Yifan Hong45433e42019-01-18 13:55:25 -08001121
1122
1123class MockScriptWriter(object):
Tao Baoda30cfa2017-12-01 16:19:46 -08001124 """A class that mocks edify_generator.EdifyGenerator."""
1125
Yifan Hong45433e42019-01-18 13:55:25 -08001126 def __init__(self, enable_comments=False):
1127 self.lines = []
1128 self.enable_comments = enable_comments
Tao Baoda30cfa2017-12-01 16:19:46 -08001129
Yifan Hong45433e42019-01-18 13:55:25 -08001130 def Comment(self, comment):
1131 if self.enable_comments:
Tao Baoda30cfa2017-12-01 16:19:46 -08001132 self.lines.append('# {}'.format(comment))
1133
Yifan Hong45433e42019-01-18 13:55:25 -08001134 def AppendExtra(self, extra):
1135 self.lines.append(extra)
Tao Baoda30cfa2017-12-01 16:19:46 -08001136
Yifan Hong45433e42019-01-18 13:55:25 -08001137 def __str__(self):
Tao Baoda30cfa2017-12-01 16:19:46 -08001138 return '\n'.join(self.lines)
Yifan Hong45433e42019-01-18 13:55:25 -08001139
1140
1141class MockBlockDifference(object):
Tao Baoda30cfa2017-12-01 16:19:46 -08001142
Yifan Hong45433e42019-01-18 13:55:25 -08001143 def __init__(self, partition, tgt, src=None):
1144 self.partition = partition
1145 self.tgt = tgt
1146 self.src = src
Tao Baoda30cfa2017-12-01 16:19:46 -08001147
Yifan Hong45433e42019-01-18 13:55:25 -08001148 def WriteScript(self, script, _, progress=None,
1149 write_verify_script=False):
1150 if progress:
1151 script.AppendExtra("progress({})".format(progress))
1152 script.AppendExtra("patch({});".format(self.partition))
1153 if write_verify_script:
1154 self.WritePostInstallVerifyScript(script)
Tao Baoda30cfa2017-12-01 16:19:46 -08001155
Yifan Hong45433e42019-01-18 13:55:25 -08001156 def WritePostInstallVerifyScript(self, script):
1157 script.AppendExtra("verify({});".format(self.partition))
1158
1159
1160class FakeSparseImage(object):
Tao Baoda30cfa2017-12-01 16:19:46 -08001161
Yifan Hong45433e42019-01-18 13:55:25 -08001162 def __init__(self, size):
1163 self.blocksize = 4096
1164 self.total_blocks = size // 4096
1165 assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
1166
1167
1168class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
Tao Baoda30cfa2017-12-01 16:19:46 -08001169
Yifan Hong45433e42019-01-18 13:55:25 -08001170 @staticmethod
1171 def get_op_list(output_path):
Tao Baof1113e92019-06-18 12:10:14 -07001172 with zipfile.ZipFile(output_path) as output_zip:
Tao Baoda30cfa2017-12-01 16:19:46 -08001173 with output_zip.open('dynamic_partitions_op_list') as op_list:
1174 return [line.decode().strip() for line in op_list.readlines()
1175 if not line.startswith(b'#')]
Yifan Hong45433e42019-01-18 13:55:25 -08001176
1177 def setUp(self):
1178 self.script = MockScriptWriter()
1179 self.output_path = common.MakeTempFile(suffix='.zip')
1180
1181 def test_full(self):
1182 target_info = common.LoadDictionaryFromLines("""
1183dynamic_partition_list=system vendor
1184super_partition_groups=group_foo
1185super_group_foo_group_size={group_size}
1186super_group_foo_partition_list=system vendor
1187""".format(group_size=4 * GiB).split("\n"))
1188 block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
1189 MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
1190
1191 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
1192 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1193 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1194
1195 self.assertEqual(str(self.script).strip(), """
1196assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
Yifan Hong45433e42019-01-18 13:55:25 -08001197patch(system);
1198verify(system);
1199unmap_partition("system");
Tao Baof1113e92019-06-18 12:10:14 -07001200patch(vendor);
1201verify(vendor);
1202unmap_partition("vendor");
Yifan Hong45433e42019-01-18 13:55:25 -08001203""".strip())
1204
1205 lines = self.get_op_list(self.output_path)
1206
1207 remove_all_groups = lines.index("remove_all_groups")
1208 add_group = lines.index("add_group group_foo 4294967296")
1209 add_vendor = lines.index("add vendor group_foo")
1210 add_system = lines.index("add system group_foo")
1211 resize_vendor = lines.index("resize vendor 1073741824")
1212 resize_system = lines.index("resize system 3221225472")
1213
1214 self.assertLess(remove_all_groups, add_group,
1215 "Should add groups after removing all groups")
1216 self.assertLess(add_group, min(add_vendor, add_system),
1217 "Should add partitions after adding group")
1218 self.assertLess(add_system, resize_system,
1219 "Should resize system after adding it")
1220 self.assertLess(add_vendor, resize_vendor,
1221 "Should resize vendor after adding it")
1222
1223 def test_inc_groups(self):
1224 source_info = common.LoadDictionaryFromLines("""
1225super_partition_groups=group_foo group_bar group_baz
1226super_group_foo_group_size={group_foo_size}
1227super_group_bar_group_size={group_bar_size}
1228""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
1229 target_info = common.LoadDictionaryFromLines("""
1230super_partition_groups=group_foo group_baz group_qux
1231super_group_foo_group_size={group_foo_size}
1232super_group_baz_group_size={group_baz_size}
1233super_group_qux_group_size={group_qux_size}
1234""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
1235 group_qux_size=1 * GiB).split("\n"))
1236
1237 dp_diff = common.DynamicPartitionsDifference(target_info,
1238 block_diffs=[],
1239 source_info_dict=source_info)
1240 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1241 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1242
1243 lines = self.get_op_list(self.output_path)
1244
1245 removed = lines.index("remove_group group_bar")
1246 shrunk = lines.index("resize_group group_foo 3221225472")
1247 grown = lines.index("resize_group group_baz 4294967296")
1248 added = lines.index("add_group group_qux 1073741824")
1249
Tao Baof1113e92019-06-18 12:10:14 -07001250 self.assertLess(max(removed, shrunk),
1251 min(grown, added),
Yifan Hong45433e42019-01-18 13:55:25 -08001252 "ops that remove / shrink partitions must precede ops that "
1253 "grow / add partitions")
1254
Yifan Hongbb2658d2019-01-25 12:30:58 -08001255 def test_incremental(self):
Yifan Hong45433e42019-01-18 13:55:25 -08001256 source_info = common.LoadDictionaryFromLines("""
1257dynamic_partition_list=system vendor product product_services
1258super_partition_groups=group_foo
1259super_group_foo_group_size={group_foo_size}
1260super_group_foo_partition_list=system vendor product product_services
1261""".format(group_foo_size=4 * GiB).split("\n"))
1262 target_info = common.LoadDictionaryFromLines("""
1263dynamic_partition_list=system vendor product odm
1264super_partition_groups=group_foo group_bar
1265super_group_foo_group_size={group_foo_size}
1266super_group_foo_partition_list=system vendor odm
1267super_group_bar_group_size={group_bar_size}
1268super_group_bar_partition_list=product
1269""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
1270
1271 block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
1272 src=FakeSparseImage(1024 * MiB)),
1273 MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
1274 src=FakeSparseImage(1024 * MiB)),
1275 MockBlockDifference("product", FakeSparseImage(1024 * MiB),
1276 src=FakeSparseImage(1024 * MiB)),
1277 MockBlockDifference("product_services", None,
1278 src=FakeSparseImage(1024 * MiB)),
1279 MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
1280 src=None)]
1281
1282 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
1283 source_info_dict=source_info)
1284 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1285 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1286
1287 metadata_idx = self.script.lines.index(
1288 'assert(update_dynamic_partitions(package_extract_file('
1289 '"dynamic_partitions_op_list")));')
1290 self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
1291 self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
1292 for p in ("product", "system", "odm"):
1293 patch_idx = self.script.lines.index("patch({});".format(p))
1294 verify_idx = self.script.lines.index("verify({});".format(p))
1295 self.assertLess(metadata_idx, patch_idx,
1296 "Should patch {} after updating metadata".format(p))
1297 self.assertLess(patch_idx, verify_idx,
1298 "Should verify {} after patching".format(p))
1299
1300 self.assertNotIn("patch(product_services);", self.script.lines)
1301
1302 lines = self.get_op_list(self.output_path)
1303
1304 remove = lines.index("remove product_services")
1305 move_product_out = lines.index("move product default")
1306 shrink = lines.index("resize vendor 536870912")
1307 shrink_group = lines.index("resize_group group_foo 3221225472")
1308 add_group_bar = lines.index("add_group group_bar 1073741824")
1309 add_odm = lines.index("add odm group_foo")
1310 grow_existing = lines.index("resize system 1610612736")
1311 grow_added = lines.index("resize odm 1073741824")
1312 move_product_in = lines.index("move product group_bar")
1313
1314 max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
1315 min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
1316
1317 self.assertLess(max_idx_move_partition_out_foo, shrink_group,
1318 "Must shrink group after partitions inside group are shrunk"
1319 " / removed")
1320
1321 self.assertLess(add_group_bar, move_product_in,
1322 "Must add partitions to group after group is added")
1323
1324 self.assertLess(max_idx_move_partition_out_foo,
1325 min_idx_move_partition_in_foo,
1326 "Must shrink partitions / remove partitions from group"
1327 "before adding / moving partitions into group")
Yifan Hongbb2658d2019-01-25 12:30:58 -08001328
1329 def test_remove_partition(self):
1330 source_info = common.LoadDictionaryFromLines("""
1331blockimgdiff_versions=3,4
1332use_dynamic_partitions=true
1333dynamic_partition_list=foo
1334super_partition_groups=group_foo
1335super_group_foo_group_size={group_foo_size}
1336super_group_foo_partition_list=foo
1337""".format(group_foo_size=4 * GiB).split("\n"))
1338 target_info = common.LoadDictionaryFromLines("""
1339blockimgdiff_versions=3,4
1340use_dynamic_partitions=true
1341super_partition_groups=group_foo
1342super_group_foo_group_size={group_foo_size}
1343""".format(group_foo_size=4 * GiB).split("\n"))
1344
1345 common.OPTIONS.info_dict = target_info
1346 common.OPTIONS.target_info_dict = target_info
1347 common.OPTIONS.source_info_dict = source_info
1348 common.OPTIONS.cache_size = 4 * 4096
1349
1350 block_diffs = [common.BlockDifference("foo", EmptyImage(),
1351 src=DataImage("source", pad=True))]
1352
1353 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
1354 source_info_dict=source_info)
1355 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1356 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1357
1358 self.assertNotIn("block_image_update", str(self.script),
Tao Bao2cc0ca12019-03-15 10:44:43 -07001359 "Removed partition should not be patched.")
Yifan Hongbb2658d2019-01-25 12:30:58 -08001360
1361 lines = self.get_op_list(self.output_path)
1362 self.assertEqual(lines, ["remove foo"])