blob: 11ac9f507df62483ec97849c91cd65ad05859528 [file] [log] [blame]
Dan Albert8e0178d2015-01-27 15:53:15 -08001#
2# Copyright (C) 2015 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
Tao Baofc7e0e02018-02-13 13:54:02 -080016
Tao Baoa57ab9f2018-08-24 12:08:38 -070017import copy
Dan Albert8e0178d2015-01-27 15:53:15 -080018import os
Tao Bao17e4e612018-02-16 17:12:54 -080019import subprocess
Dan Albert8e0178d2015-01-27 15:53:15 -080020import tempfile
21import time
Dan Albert8e0178d2015-01-27 15:53:15 -080022import zipfile
Tao Bao31b08072017-11-08 15:50:59 -080023from hashlib import sha1
24
Dan Albert8e0178d2015-01-27 15:53:15 -080025import common
Tao Bao04e1f012018-02-04 12:13:35 -080026import test_utils
Tianjie Xu9c384d22017-06-20 17:00:55 -070027import validate_target_files
Tao Baofc7e0e02018-02-13 13:54:02 -080028from rangelib import RangeSet
Dan Albert8e0178d2015-01-27 15:53:15 -080029
Yifan Hongbb2658d2019-01-25 12:30:58 -080030from blockimgdiff import EmptyImage, DataImage
Tao Bao04e1f012018-02-04 12:13:35 -080031
Tao Bao31b08072017-11-08 15:50:59 -080032KiB = 1024
33MiB = 1024 * KiB
34GiB = 1024 * MiB
Dan Albert8e0178d2015-01-27 15:53:15 -080035
Tao Bao1c830bf2017-12-25 10:43:47 -080036
Tao Baof3282b42015-04-01 11:21:55 -070037def get_2gb_string():
Tao Bao31b08072017-11-08 15:50:59 -080038 size = int(2 * GiB + 1)
39 block_size = 4 * KiB
40 step_size = 4 * MiB
41 # Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
42 for _ in range(0, size, step_size):
43 yield os.urandom(block_size)
Tao Baoc1a1ec32019-06-18 16:29:37 -070044 yield b'\0' * (step_size - block_size)
Tao Baof3282b42015-04-01 11:21:55 -070045
Dan Albert8e0178d2015-01-27 15:53:15 -080046
Tao Bao65b94e92018-10-11 21:57:26 -070047class CommonZipTest(test_utils.ReleaseToolsTestCase):
48
Tao Bao31b08072017-11-08 15:50:59 -080049 def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
Tao Baof3282b42015-04-01 11:21:55 -070050 test_file_name=None, expected_stat=None, expected_mode=0o644,
51 expected_compress_type=zipfile.ZIP_STORED):
52 # Verify the stat if present.
53 if test_file_name is not None:
54 new_stat = os.stat(test_file_name)
55 self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode))
56 self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
57
58 # Reopen the zip file to verify.
59 zip_file = zipfile.ZipFile(zip_file_name, "r")
60
61 # Verify the timestamp.
62 info = zip_file.getinfo(arcname)
63 self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0))
64
65 # Verify the file mode.
66 mode = (info.external_attr >> 16) & 0o777
67 self.assertEqual(mode, expected_mode)
68
69 # Verify the compress type.
70 self.assertEqual(info.compress_type, expected_compress_type)
71
72 # Verify the zip contents.
Tao Bao31b08072017-11-08 15:50:59 -080073 entry = zip_file.open(arcname)
74 sha1_hash = sha1()
Tao Baoc1a1ec32019-06-18 16:29:37 -070075 for chunk in iter(lambda: entry.read(4 * MiB), b''):
Tao Bao31b08072017-11-08 15:50:59 -080076 sha1_hash.update(chunk)
77 self.assertEqual(expected_hash, sha1_hash.hexdigest())
Tao Baof3282b42015-04-01 11:21:55 -070078 self.assertIsNone(zip_file.testzip())
79
Dan Albert8e0178d2015-01-27 15:53:15 -080080 def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
81 extra_zipwrite_args = dict(extra_zipwrite_args or {})
82
83 test_file = tempfile.NamedTemporaryFile(delete=False)
Dan Albert8e0178d2015-01-27 15:53:15 -080084 test_file_name = test_file.name
Tao Baof3282b42015-04-01 11:21:55 -070085
86 zip_file = tempfile.NamedTemporaryFile(delete=False)
Dan Albert8e0178d2015-01-27 15:53:15 -080087 zip_file_name = zip_file.name
88
89 # File names within an archive strip the leading slash.
90 arcname = extra_zipwrite_args.get("arcname", test_file_name)
91 if arcname[0] == "/":
92 arcname = arcname[1:]
93
94 zip_file.close()
95 zip_file = zipfile.ZipFile(zip_file_name, "w")
96
97 try:
Tao Bao31b08072017-11-08 15:50:59 -080098 sha1_hash = sha1()
99 for data in contents:
Tao Baoc1a1ec32019-06-18 16:29:37 -0700100 sha1_hash.update(bytes(data))
101 test_file.write(bytes(data))
Dan Albert8e0178d2015-01-27 15:53:15 -0800102 test_file.close()
103
Tao Baof3282b42015-04-01 11:21:55 -0700104 expected_stat = os.stat(test_file_name)
Dan Albert8e0178d2015-01-27 15:53:15 -0800105 expected_mode = extra_zipwrite_args.get("perms", 0o644)
Tao Baof3282b42015-04-01 11:21:55 -0700106 expected_compress_type = extra_zipwrite_args.get("compress_type",
107 zipfile.ZIP_STORED)
Dan Albert8e0178d2015-01-27 15:53:15 -0800108 time.sleep(5) # Make sure the atime/mtime will change measurably.
109
110 common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
Tao Baof3282b42015-04-01 11:21:55 -0700111 common.ZipClose(zip_file)
Dan Albert8e0178d2015-01-27 15:53:15 -0800112
Tao Bao31b08072017-11-08 15:50:59 -0800113 self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
114 test_file_name, expected_stat, expected_mode,
115 expected_compress_type)
Dan Albert8e0178d2015-01-27 15:53:15 -0800116 finally:
117 os.remove(test_file_name)
118 os.remove(zip_file_name)
119
Tao Baof3282b42015-04-01 11:21:55 -0700120 def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
121 extra_args = dict(extra_args or {})
122
123 zip_file = tempfile.NamedTemporaryFile(delete=False)
124 zip_file_name = zip_file.name
125 zip_file.close()
126
127 zip_file = zipfile.ZipFile(zip_file_name, "w")
128
129 try:
130 expected_compress_type = extra_args.get("compress_type",
131 zipfile.ZIP_STORED)
132 time.sleep(5) # Make sure the atime/mtime will change measurably.
133
134 if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
Tao Bao58c1b962015-05-20 09:32:18 -0700135 arcname = zinfo_or_arcname
136 expected_mode = extra_args.get("perms", 0o644)
Tao Baof3282b42015-04-01 11:21:55 -0700137 else:
Tao Bao58c1b962015-05-20 09:32:18 -0700138 arcname = zinfo_or_arcname.filename
Tao Baoc1a1ec32019-06-18 16:29:37 -0700139 if zinfo_or_arcname.external_attr:
140 zinfo_perms = zinfo_or_arcname.external_attr >> 16
141 else:
142 zinfo_perms = 0o600
143 expected_mode = extra_args.get("perms", zinfo_perms)
Tao Baof3282b42015-04-01 11:21:55 -0700144
Tao Bao58c1b962015-05-20 09:32:18 -0700145 common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
Tao Baof3282b42015-04-01 11:21:55 -0700146 common.ZipClose(zip_file)
147
Tao Bao31b08072017-11-08 15:50:59 -0800148 self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700149 expected_mode=expected_mode,
Tao Baof3282b42015-04-01 11:21:55 -0700150 expected_compress_type=expected_compress_type)
151 finally:
152 os.remove(zip_file_name)
153
154 def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
155 extra_args = dict(extra_args or {})
156
157 zip_file = tempfile.NamedTemporaryFile(delete=False)
158 zip_file_name = zip_file.name
159
160 test_file = tempfile.NamedTemporaryFile(delete=False)
161 test_file_name = test_file.name
162
163 arcname_large = test_file_name
164 arcname_small = "bar"
165
166 # File names within an archive strip the leading slash.
167 if arcname_large[0] == "/":
168 arcname_large = arcname_large[1:]
169
170 zip_file.close()
171 zip_file = zipfile.ZipFile(zip_file_name, "w")
172
173 try:
Tao Bao31b08072017-11-08 15:50:59 -0800174 sha1_hash = sha1()
175 for data in large:
176 sha1_hash.update(data)
177 test_file.write(data)
Tao Baof3282b42015-04-01 11:21:55 -0700178 test_file.close()
179
180 expected_stat = os.stat(test_file_name)
181 expected_mode = 0o644
182 expected_compress_type = extra_args.get("compress_type",
183 zipfile.ZIP_STORED)
184 time.sleep(5) # Make sure the atime/mtime will change measurably.
185
186 common.ZipWrite(zip_file, test_file_name, **extra_args)
187 common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
188 common.ZipClose(zip_file)
189
190 # Verify the contents written by ZipWrite().
Tao Bao31b08072017-11-08 15:50:59 -0800191 self._verify(zip_file, zip_file_name, arcname_large,
192 sha1_hash.hexdigest(), test_file_name, expected_stat,
193 expected_mode, expected_compress_type)
Tao Baof3282b42015-04-01 11:21:55 -0700194
195 # Verify the contents written by ZipWriteStr().
Tao Bao31b08072017-11-08 15:50:59 -0800196 self._verify(zip_file, zip_file_name, arcname_small,
197 sha1(small).hexdigest(),
Tao Baof3282b42015-04-01 11:21:55 -0700198 expected_compress_type=expected_compress_type)
199 finally:
200 os.remove(zip_file_name)
201 os.remove(test_file_name)
202
203 def _test_reset_ZIP64_LIMIT(self, func, *args):
204 default_limit = (1 << 31) - 1
205 self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
206 func(*args)
207 self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
208
Dan Albert8e0178d2015-01-27 15:53:15 -0800209 def test_ZipWrite(self):
210 file_contents = os.urandom(1024)
211 self._test_ZipWrite(file_contents)
212
213 def test_ZipWrite_with_opts(self):
214 file_contents = os.urandom(1024)
215 self._test_ZipWrite(file_contents, {
216 "arcname": "foobar",
217 "perms": 0o777,
218 "compress_type": zipfile.ZIP_DEFLATED,
219 })
Tao Baof3282b42015-04-01 11:21:55 -0700220 self._test_ZipWrite(file_contents, {
221 "arcname": "foobar",
222 "perms": 0o700,
223 "compress_type": zipfile.ZIP_STORED,
224 })
Dan Albert8e0178d2015-01-27 15:53:15 -0800225
226 def test_ZipWrite_large_file(self):
Tao Baof3282b42015-04-01 11:21:55 -0700227 file_contents = get_2gb_string()
Dan Albert8e0178d2015-01-27 15:53:15 -0800228 self._test_ZipWrite(file_contents, {
229 "compress_type": zipfile.ZIP_DEFLATED,
230 })
231
232 def test_ZipWrite_resets_ZIP64_LIMIT(self):
Tao Baof3282b42015-04-01 11:21:55 -0700233 self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
234
235 def test_ZipWriteStr(self):
236 random_string = os.urandom(1024)
237 # Passing arcname
238 self._test_ZipWriteStr("foo", random_string)
239
240 # Passing zinfo
241 zinfo = zipfile.ZipInfo(filename="foo")
242 self._test_ZipWriteStr(zinfo, random_string)
243
244 # Timestamp in the zinfo should be overwritten.
245 zinfo.date_time = (2015, 3, 1, 15, 30, 0)
246 self._test_ZipWriteStr(zinfo, random_string)
247
248 def test_ZipWriteStr_with_opts(self):
249 random_string = os.urandom(1024)
250 # Passing arcname
251 self._test_ZipWriteStr("foo", random_string, {
Tao Bao58c1b962015-05-20 09:32:18 -0700252 "perms": 0o700,
Tao Baof3282b42015-04-01 11:21:55 -0700253 "compress_type": zipfile.ZIP_DEFLATED,
254 })
Tao Bao58c1b962015-05-20 09:32:18 -0700255 self._test_ZipWriteStr("bar", random_string, {
Tao Baof3282b42015-04-01 11:21:55 -0700256 "compress_type": zipfile.ZIP_STORED,
257 })
258
259 # Passing zinfo
260 zinfo = zipfile.ZipInfo(filename="foo")
261 self._test_ZipWriteStr(zinfo, random_string, {
262 "compress_type": zipfile.ZIP_DEFLATED,
263 })
264 self._test_ZipWriteStr(zinfo, random_string, {
Tao Bao58c1b962015-05-20 09:32:18 -0700265 "perms": 0o600,
Tao Baof3282b42015-04-01 11:21:55 -0700266 "compress_type": zipfile.ZIP_STORED,
267 })
Tao Baoc1a1ec32019-06-18 16:29:37 -0700268 self._test_ZipWriteStr(zinfo, random_string, {
269 "perms": 0o000,
270 "compress_type": zipfile.ZIP_STORED,
271 })
Tao Baof3282b42015-04-01 11:21:55 -0700272
273 def test_ZipWriteStr_large_file(self):
274 # zipfile.writestr() doesn't work when the str size is over 2GiB even with
275 # the workaround. We will only test the case of writing a string into a
276 # large archive.
277 long_string = get_2gb_string()
278 short_string = os.urandom(1024)
279 self._test_ZipWriteStr_large_file(long_string, short_string, {
280 "compress_type": zipfile.ZIP_DEFLATED,
281 })
282
283 def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
Tao Baoc1a1ec32019-06-18 16:29:37 -0700284 self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
Tao Baof3282b42015-04-01 11:21:55 -0700285 zinfo = zipfile.ZipInfo(filename="foo")
Tao Baoc1a1ec32019-06-18 16:29:37 -0700286 self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
Tao Bao58c1b962015-05-20 09:32:18 -0700287
288 def test_bug21309935(self):
289 zip_file = tempfile.NamedTemporaryFile(delete=False)
290 zip_file_name = zip_file.name
291 zip_file.close()
292
293 try:
294 random_string = os.urandom(1024)
295 zip_file = zipfile.ZipFile(zip_file_name, "w")
296 # Default perms should be 0o644 when passing the filename.
297 common.ZipWriteStr(zip_file, "foo", random_string)
298 # Honor the specified perms.
299 common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755)
300 # The perms in zinfo should be untouched.
301 zinfo = zipfile.ZipInfo(filename="baz")
302 zinfo.external_attr = 0o740 << 16
303 common.ZipWriteStr(zip_file, zinfo, random_string)
304 # Explicitly specified perms has the priority.
305 zinfo = zipfile.ZipInfo(filename="qux")
306 zinfo.external_attr = 0o700 << 16
307 common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
308 common.ZipClose(zip_file)
309
Tao Bao31b08072017-11-08 15:50:59 -0800310 self._verify(zip_file, zip_file_name, "foo",
311 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700312 expected_mode=0o644)
Tao Bao31b08072017-11-08 15:50:59 -0800313 self._verify(zip_file, zip_file_name, "bar",
314 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700315 expected_mode=0o755)
Tao Bao31b08072017-11-08 15:50:59 -0800316 self._verify(zip_file, zip_file_name, "baz",
317 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700318 expected_mode=0o740)
Tao Bao31b08072017-11-08 15:50:59 -0800319 self._verify(zip_file, zip_file_name, "qux",
320 sha1(random_string).hexdigest(),
Tao Bao58c1b962015-05-20 09:32:18 -0700321 expected_mode=0o400)
322 finally:
323 os.remove(zip_file_name)
Tianjie Xu9c384d22017-06-20 17:00:55 -0700324
Tao Bao82490d32019-04-09 00:12:30 -0700325 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao89d7ab22017-12-14 17:05:33 -0800326 def test_ZipDelete(self):
327 zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
328 output_zip = zipfile.ZipFile(zip_file.name, 'w',
329 compression=zipfile.ZIP_DEFLATED)
330 with tempfile.NamedTemporaryFile() as entry_file:
331 entry_file.write(os.urandom(1024))
332 common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
333 common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
334 common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
335 common.ZipClose(output_zip)
336 zip_file.close()
337
338 try:
339 common.ZipDelete(zip_file.name, 'Test2')
340 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
341 entries = check_zip.namelist()
342 self.assertTrue('Test1' in entries)
343 self.assertFalse('Test2' in entries)
344 self.assertTrue('Test3' in entries)
345
Tao Bao986ee862018-10-04 15:46:16 -0700346 self.assertRaises(
347 common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
Tao Bao89d7ab22017-12-14 17:05:33 -0800348 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
349 entries = check_zip.namelist()
350 self.assertTrue('Test1' in entries)
351 self.assertFalse('Test2' in entries)
352 self.assertTrue('Test3' in entries)
353
354 common.ZipDelete(zip_file.name, ['Test3'])
355 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
356 entries = check_zip.namelist()
357 self.assertTrue('Test1' in entries)
358 self.assertFalse('Test2' in entries)
359 self.assertFalse('Test3' in entries)
360
361 common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
362 with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
363 entries = check_zip.namelist()
364 self.assertFalse('Test1' in entries)
365 self.assertFalse('Test2' in entries)
366 self.assertFalse('Test3' in entries)
367 finally:
368 os.remove(zip_file.name)
369
Tao Bao0ff15de2019-03-20 11:26:06 -0700370 @staticmethod
371 def _test_UnzipTemp_createZipFile():
372 zip_file = common.MakeTempFile(suffix='.zip')
373 output_zip = zipfile.ZipFile(
374 zip_file, 'w', compression=zipfile.ZIP_DEFLATED)
375 contents = os.urandom(1024)
376 with tempfile.NamedTemporaryFile() as entry_file:
377 entry_file.write(contents)
378 common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
379 common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
380 common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
381 common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
382 common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
383 common.ZipClose(output_zip)
384 common.ZipClose(output_zip)
385 return zip_file
386
Tao Bao82490d32019-04-09 00:12:30 -0700387 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700388 def test_UnzipTemp(self):
389 zip_file = self._test_UnzipTemp_createZipFile()
390 unzipped_dir = common.UnzipTemp(zip_file)
391 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
392 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
393 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
394 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
395 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
396
Tao Bao82490d32019-04-09 00:12:30 -0700397 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700398 def test_UnzipTemp_withPatterns(self):
399 zip_file = self._test_UnzipTemp_createZipFile()
400
401 unzipped_dir = common.UnzipTemp(zip_file, ['Test1'])
402 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
403 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
404 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
405 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
406 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
407
408 unzipped_dir = common.UnzipTemp(zip_file, ['Test1', 'Foo3'])
409 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
410 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
411 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
412 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
413 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
414
415 unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Foo3*'])
416 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
417 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
418 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
419 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
420 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
421
422 unzipped_dir = common.UnzipTemp(zip_file, ['*Test1', '*Baz*'])
423 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
424 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
425 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
426 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
427 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
428
429 def test_UnzipTemp_withEmptyPatterns(self):
430 zip_file = self._test_UnzipTemp_createZipFile()
431 unzipped_dir = common.UnzipTemp(zip_file, [])
432 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
433 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
434 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
435 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
436 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
437
Tao Bao82490d32019-04-09 00:12:30 -0700438 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao0ff15de2019-03-20 11:26:06 -0700439 def test_UnzipTemp_withPartiallyMatchingPatterns(self):
440 zip_file = self._test_UnzipTemp_createZipFile()
441 unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*'])
442 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
443 self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
444 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
445 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
446 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
447
448 def test_UnzipTemp_withNoMatchingPatterns(self):
449 zip_file = self._test_UnzipTemp_createZipFile()
450 unzipped_dir = common.UnzipTemp(zip_file, ['Foo4', 'Nonexistent*'])
451 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
452 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
453 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
454 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
455 self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
456
Tao Bao89d7ab22017-12-14 17:05:33 -0800457
Tao Bao65b94e92018-10-11 21:57:26 -0700458class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase):
Tao Bao818ddf52018-01-05 11:17:34 -0800459 """Tests the APK utils related functions."""
460
461 APKCERTS_TXT1 = (
462 'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
463 ' private_key="certs/devkey.pk8"\n'
464 'name="Settings.apk"'
Dan Willemsen0ab1be62019-04-09 21:35:37 -0700465 ' certificate="build/make/target/product/security/platform.x509.pem"'
466 ' private_key="build/make/target/product/security/platform.pk8"\n'
Tao Bao818ddf52018-01-05 11:17:34 -0800467 'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
468 )
469
470 APKCERTS_CERTMAP1 = {
471 'RecoveryLocalizer.apk' : 'certs/devkey',
Dan Willemsen0ab1be62019-04-09 21:35:37 -0700472 'Settings.apk' : 'build/make/target/product/security/platform',
Tao Bao818ddf52018-01-05 11:17:34 -0800473 'TV.apk' : 'PRESIGNED',
474 }
475
476 APKCERTS_TXT2 = (
477 'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
478 ' private_key="certs/compressed1.pk8" compressed="gz"\n'
479 'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
480 ' private_key="certs/compressed2.pk8" compressed="gz"\n'
481 'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
482 ' private_key="certs/compressed2.pk8" compressed="gz"\n'
483 'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
484 ' private_key="certs/compressed3.pk8" compressed="gz"\n'
485 )
486
487 APKCERTS_CERTMAP2 = {
488 'Compressed1.apk' : 'certs/compressed1',
489 'Compressed2a.apk' : 'certs/compressed2',
490 'Compressed2b.apk' : 'certs/compressed2',
491 'Compressed3.apk' : 'certs/compressed3',
492 }
493
494 APKCERTS_TXT3 = (
495 'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
496 ' private_key="certs/compressed4.pk8" compressed="xz"\n'
497 )
498
499 APKCERTS_CERTMAP3 = {
500 'Compressed4.apk' : 'certs/compressed4',
501 }
502
Tao Bao17e4e612018-02-16 17:12:54 -0800503 def setUp(self):
504 self.testdata_dir = test_utils.get_testdata_dir()
505
Tao Bao818ddf52018-01-05 11:17:34 -0800506 @staticmethod
507 def _write_apkcerts_txt(apkcerts_txt, additional=None):
508 if additional is None:
509 additional = []
510 target_files = common.MakeTempFile(suffix='.zip')
511 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
512 target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
513 for entry in additional:
514 target_files_zip.writestr(entry, '')
515 return target_files
516
517 def test_ReadApkCerts_NoncompressedApks(self):
518 target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
519 with zipfile.ZipFile(target_files, 'r') as input_zip:
520 certmap, ext = common.ReadApkCerts(input_zip)
521
522 self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
523 self.assertIsNone(ext)
524
525 def test_ReadApkCerts_CompressedApks(self):
526 # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
527 # not stored in '.gz' format, so it shouldn't be considered as installed.
528 target_files = self._write_apkcerts_txt(
529 self.APKCERTS_TXT2,
530 ['Compressed1.apk.gz', 'Compressed3.apk'])
531
532 with zipfile.ZipFile(target_files, 'r') as input_zip:
533 certmap, ext = common.ReadApkCerts(input_zip)
534
535 self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
536 self.assertEqual('.gz', ext)
537
538 # Alternative case with '.xz'.
539 target_files = self._write_apkcerts_txt(
540 self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
541
542 with zipfile.ZipFile(target_files, 'r') as input_zip:
543 certmap, ext = common.ReadApkCerts(input_zip)
544
545 self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
546 self.assertEqual('.xz', ext)
547
548 def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
549 target_files = self._write_apkcerts_txt(
550 self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
551 ['Compressed1.apk.gz', 'Compressed3.apk'])
552
553 with zipfile.ZipFile(target_files, 'r') as input_zip:
554 certmap, ext = common.ReadApkCerts(input_zip)
555
556 certmap_merged = self.APKCERTS_CERTMAP1.copy()
557 certmap_merged.update(self.APKCERTS_CERTMAP2)
558 self.assertDictEqual(certmap_merged, certmap)
559 self.assertEqual('.gz', ext)
560
561 def test_ReadApkCerts_MultipleCompressionMethods(self):
562 target_files = self._write_apkcerts_txt(
563 self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
564 ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
565
566 with zipfile.ZipFile(target_files, 'r') as input_zip:
567 self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
568
569 def test_ReadApkCerts_MismatchingKeys(self):
570 malformed_apkcerts_txt = (
571 'name="App1.apk" certificate="certs/cert1.x509.pem"'
572 ' private_key="certs/cert2.pk8"\n'
573 )
574 target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
575
576 with zipfile.ZipFile(target_files, 'r') as input_zip:
577 self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
578
Tao Bao04e1f012018-02-04 12:13:35 -0800579 def test_ExtractPublicKey(self):
Tao Bao17e4e612018-02-16 17:12:54 -0800580 cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
581 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
Tao Bao04e1f012018-02-04 12:13:35 -0800582 with open(pubkey, 'rb') as pubkey_fp:
583 self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
584
585 def test_ExtractPublicKey_invalidInput(self):
Tao Bao17e4e612018-02-16 17:12:54 -0800586 wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
Tao Bao04e1f012018-02-04 12:13:35 -0800587 self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
588
Tao Bao82490d32019-04-09 00:12:30 -0700589 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao2cc0ca12019-03-15 10:44:43 -0700590 def test_ExtractAvbPublicKey(self):
591 privkey = os.path.join(self.testdata_dir, 'testkey.key')
592 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
593 with open(common.ExtractAvbPublicKey(privkey)) as privkey_fp, \
594 open(common.ExtractAvbPublicKey(pubkey)) as pubkey_fp:
595 self.assertEqual(privkey_fp.read(), pubkey_fp.read())
596
Tao Bao17e4e612018-02-16 17:12:54 -0800597 def test_ParseCertificate(self):
598 cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
599
600 cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
601 proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
602 expected, _ = proc.communicate()
603 self.assertEqual(0, proc.returncode)
604
605 with open(cert) as cert_fp:
606 actual = common.ParseCertificate(cert_fp.read())
607 self.assertEqual(expected, actual)
608
Tao Bao82490d32019-04-09 00:12:30 -0700609 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700610 def test_GetMinSdkVersion(self):
611 test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
612 self.assertEqual('24', common.GetMinSdkVersion(test_app))
613
Tao Bao82490d32019-04-09 00:12:30 -0700614 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700615 def test_GetMinSdkVersion_invalidInput(self):
616 self.assertRaises(
617 common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk')
618
Tao Bao82490d32019-04-09 00:12:30 -0700619 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700620 def test_GetMinSdkVersionInt(self):
621 test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
622 self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {}))
623
Tao Bao82490d32019-04-09 00:12:30 -0700624 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baof47bf0f2018-03-21 23:28:51 -0700625 def test_GetMinSdkVersionInt_invalidInput(self):
626 self.assertRaises(
627 common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk',
628 {})
629
Tao Bao818ddf52018-01-05 11:17:34 -0800630
Tao Bao65b94e92018-10-11 21:57:26 -0700631class CommonUtilsTest(test_utils.ReleaseToolsTestCase):
Tao Baofc7e0e02018-02-13 13:54:02 -0800632
Tao Bao02a08592018-07-22 12:40:45 -0700633 def setUp(self):
634 self.testdata_dir = test_utils.get_testdata_dir()
635
Tao Bao82490d32019-04-09 00:12:30 -0700636 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800637 def test_GetSparseImage_emptyBlockMapFile(self):
638 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
639 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
640 target_files_zip.write(
641 test_utils.construct_sparse_image([
642 (0xCAC1, 6),
643 (0xCAC3, 3),
644 (0xCAC1, 4)]),
645 arcname='IMAGES/system.img')
646 target_files_zip.writestr('IMAGES/system.map', '')
647 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
648 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
649
Tao Baodba59ee2018-01-09 13:21:02 -0800650 tempdir = common.UnzipTemp(target_files)
651 with zipfile.ZipFile(target_files, 'r') as input_zip:
652 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800653
654 self.assertDictEqual(
655 {
656 '__COPY': RangeSet("0"),
657 '__NONZERO-0': RangeSet("1-5 9-12"),
658 },
659 sparse_image.file_map)
660
Tao Baob2de7d92019-04-10 10:01:47 -0700661 def test_GetSparseImage_missingImageFile(self):
Tao Baofc7e0e02018-02-13 13:54:02 -0800662 self.assertRaises(
Tao Baob2de7d92019-04-10 10:01:47 -0700663 AssertionError, common.GetSparseImage, 'system2', self.testdata_dir,
664 None, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800665 self.assertRaises(
Tao Baob2de7d92019-04-10 10:01:47 -0700666 AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir,
667 None, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800668
Tao Bao82490d32019-04-09 00:12:30 -0700669 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800670 def test_GetSparseImage_missingBlockMapFile(self):
671 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
672 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
673 target_files_zip.write(
674 test_utils.construct_sparse_image([
675 (0xCAC1, 6),
676 (0xCAC3, 3),
677 (0xCAC1, 4)]),
678 arcname='IMAGES/system.img')
679 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
680 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
681
Tao Baodba59ee2018-01-09 13:21:02 -0800682 tempdir = common.UnzipTemp(target_files)
683 with zipfile.ZipFile(target_files, 'r') as input_zip:
684 self.assertRaises(
685 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
686 False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800687
Tao Bao82490d32019-04-09 00:12:30 -0700688 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800689 def test_GetSparseImage_sharedBlocks_notAllowed(self):
690 """Tests the case of having overlapping blocks but disallowed."""
691 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
692 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
693 target_files_zip.write(
694 test_utils.construct_sparse_image([(0xCAC2, 16)]),
695 arcname='IMAGES/system.img')
696 # Block 10 is shared between two files.
697 target_files_zip.writestr(
698 'IMAGES/system.map',
699 '\n'.join([
700 '/system/file1 1-5 9-10',
701 '/system/file2 10-12']))
702 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
703 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
704
Tao Baodba59ee2018-01-09 13:21:02 -0800705 tempdir = common.UnzipTemp(target_files)
706 with zipfile.ZipFile(target_files, 'r') as input_zip:
707 self.assertRaises(
708 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
709 False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800710
Tao Bao82490d32019-04-09 00:12:30 -0700711 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800712 def test_GetSparseImage_sharedBlocks_allowed(self):
713 """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
714 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
715 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
716 # Construct an image with a care_map of "0-5 9-12".
717 target_files_zip.write(
718 test_utils.construct_sparse_image([(0xCAC2, 16)]),
719 arcname='IMAGES/system.img')
720 # Block 10 is shared between two files.
721 target_files_zip.writestr(
722 'IMAGES/system.map',
723 '\n'.join([
724 '/system/file1 1-5 9-10',
725 '/system/file2 10-12']))
726 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
727 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
728
Tao Baodba59ee2018-01-09 13:21:02 -0800729 tempdir = common.UnzipTemp(target_files)
730 with zipfile.ZipFile(target_files, 'r') as input_zip:
731 sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
Tao Baofc7e0e02018-02-13 13:54:02 -0800732
733 self.assertDictEqual(
734 {
735 '__COPY': RangeSet("0"),
736 '__NONZERO-0': RangeSet("6-8 13-15"),
737 '/system/file1': RangeSet("1-5 9-10"),
738 '/system/file2': RangeSet("11-12"),
739 },
740 sparse_image.file_map)
741
742 # '/system/file2' should be marked with 'uses_shared_blocks', but not with
743 # 'incomplete'.
744 self.assertTrue(
745 sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
746 self.assertNotIn(
747 'incomplete', sparse_image.file_map['/system/file2'].extra)
748
749 # All other entries should look normal without any tags.
750 self.assertFalse(sparse_image.file_map['__COPY'].extra)
751 self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
752 self.assertFalse(sparse_image.file_map['/system/file1'].extra)
753
Tao Bao82490d32019-04-09 00:12:30 -0700754 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baofc7e0e02018-02-13 13:54:02 -0800755 def test_GetSparseImage_incompleteRanges(self):
756 """Tests the case of ext4 images with holes."""
757 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
758 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
759 target_files_zip.write(
760 test_utils.construct_sparse_image([(0xCAC2, 16)]),
761 arcname='IMAGES/system.img')
762 target_files_zip.writestr(
763 'IMAGES/system.map',
764 '\n'.join([
765 '/system/file1 1-5 9-10',
766 '/system/file2 11-12']))
767 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
768 # '/system/file2' has less blocks listed (2) than actual (3).
769 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
770
Tao Baodba59ee2018-01-09 13:21:02 -0800771 tempdir = common.UnzipTemp(target_files)
772 with zipfile.ZipFile(target_files, 'r') as input_zip:
773 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
Tao Baofc7e0e02018-02-13 13:54:02 -0800774
775 self.assertFalse(sparse_image.file_map['/system/file1'].extra)
776 self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
777
Tao Bao82490d32019-04-09 00:12:30 -0700778 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700779 def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
780 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
781 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
782 target_files_zip.write(
783 test_utils.construct_sparse_image([(0xCAC2, 16)]),
784 arcname='IMAGES/system.img')
785 target_files_zip.writestr(
786 'IMAGES/system.map',
787 '\n'.join([
788 '//system/file1 1-5 9-10',
789 '//system/file2 11-12',
790 '/system/app/file3 13-15']))
791 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
792 # '/system/file2' has less blocks listed (2) than actual (3).
793 target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
794 # '/system/app/file3' has less blocks listed (3) than actual (4).
795 target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
796
797 tempdir = common.UnzipTemp(target_files)
798 with zipfile.ZipFile(target_files, 'r') as input_zip:
799 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
800
801 self.assertFalse(sparse_image.file_map['//system/file1'].extra)
802 self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
803 self.assertTrue(
804 sparse_image.file_map['/system/app/file3'].extra['incomplete'])
805
Tao Bao82490d32019-04-09 00:12:30 -0700806 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700807 def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
808 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
809 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
810 target_files_zip.write(
811 test_utils.construct_sparse_image([(0xCAC2, 16)]),
812 arcname='IMAGES/system.img')
813 target_files_zip.writestr(
814 'IMAGES/system.map',
815 '\n'.join([
816 '//system/file1 1-5 9-10',
817 '//init.rc 13-15']))
818 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
819 # '/init.rc' has less blocks listed (3) than actual (4).
820 target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
821
822 tempdir = common.UnzipTemp(target_files)
823 with zipfile.ZipFile(target_files, 'r') as input_zip:
824 sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
825
826 self.assertFalse(sparse_image.file_map['//system/file1'].extra)
827 self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
828
Tao Bao82490d32019-04-09 00:12:30 -0700829 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baod3554e62018-07-10 15:31:22 -0700830 def test_GetSparseImage_fileNotFound(self):
831 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
832 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
833 target_files_zip.write(
834 test_utils.construct_sparse_image([(0xCAC2, 16)]),
835 arcname='IMAGES/system.img')
836 target_files_zip.writestr(
837 'IMAGES/system.map',
838 '\n'.join([
839 '//system/file1 1-5 9-10',
840 '//system/file2 11-12']))
841 target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
842
843 tempdir = common.UnzipTemp(target_files)
844 with zipfile.ZipFile(target_files, 'r') as input_zip:
845 self.assertRaises(
846 AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
847 False)
848
Tao Bao82490d32019-04-09 00:12:30 -0700849 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700850 def test_GetAvbChainedPartitionArg(self):
851 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
852 info_dict = {
853 'avb_avbtool': 'avbtool',
854 'avb_system_key_path': pubkey,
855 'avb_system_rollback_index_location': 2,
856 }
857 args = common.GetAvbChainedPartitionArg('system', info_dict).split(':')
858 self.assertEqual(3, len(args))
859 self.assertEqual('system', args[0])
860 self.assertEqual('2', args[1])
861 self.assertTrue(os.path.exists(args[2]))
862
Tao Bao82490d32019-04-09 00:12:30 -0700863 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700864 def test_GetAvbChainedPartitionArg_withPrivateKey(self):
865 key = os.path.join(self.testdata_dir, 'testkey.key')
866 info_dict = {
867 'avb_avbtool': 'avbtool',
868 'avb_product_key_path': key,
869 'avb_product_rollback_index_location': 2,
870 }
871 args = common.GetAvbChainedPartitionArg('product', info_dict).split(':')
872 self.assertEqual(3, len(args))
873 self.assertEqual('product', args[0])
874 self.assertEqual('2', args[1])
875 self.assertTrue(os.path.exists(args[2]))
876
Tao Bao82490d32019-04-09 00:12:30 -0700877 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700878 def test_GetAvbChainedPartitionArg_withSpecifiedKey(self):
879 info_dict = {
880 'avb_avbtool': 'avbtool',
881 'avb_system_key_path': 'does-not-exist',
882 'avb_system_rollback_index_location': 2,
883 }
884 pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
885 args = common.GetAvbChainedPartitionArg(
886 'system', info_dict, pubkey).split(':')
887 self.assertEqual(3, len(args))
888 self.assertEqual('system', args[0])
889 self.assertEqual('2', args[1])
890 self.assertTrue(os.path.exists(args[2]))
891
Tao Bao82490d32019-04-09 00:12:30 -0700892 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao02a08592018-07-22 12:40:45 -0700893 def test_GetAvbChainedPartitionArg_invalidKey(self):
894 pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem')
895 info_dict = {
896 'avb_avbtool': 'avbtool',
897 'avb_system_key_path': pubkey,
898 'avb_system_rollback_index_location': 2,
899 }
900 self.assertRaises(
Tao Bao986ee862018-10-04 15:46:16 -0700901 common.ExternalError, common.GetAvbChainedPartitionArg, 'system',
902 info_dict)
Tao Bao02a08592018-07-22 12:40:45 -0700903
Tao Baoa57ab9f2018-08-24 12:08:38 -0700904 INFO_DICT_DEFAULT = {
905 'recovery_api_version': 3,
906 'fstab_version': 2,
907 'system_root_image': 'true',
908 'no_recovery' : 'true',
909 'recovery_as_boot': 'true',
910 }
911
912 @staticmethod
913 def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
914 target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
915 with zipfile.ZipFile(target_files, 'w') as target_files_zip:
916 info_values = ''.join(
917 ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.iteritems())])
918 common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
919
920 FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
921 if info_dict.get('system_root_image') == 'true':
922 fstab_values = FSTAB_TEMPLATE.format('/')
923 else:
924 fstab_values = FSTAB_TEMPLATE.format('/system')
925 common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
Tao Bao410ad8b2018-08-24 12:08:38 -0700926
927 common.ZipWriteStr(
928 target_files_zip, 'META/file_contexts', 'file-contexts')
Tao Baoa57ab9f2018-08-24 12:08:38 -0700929 return target_files
930
931 def test_LoadInfoDict(self):
932 target_files = self._test_LoadInfoDict_createTargetFiles(
933 self.INFO_DICT_DEFAULT,
934 'BOOT/RAMDISK/system/etc/recovery.fstab')
935 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
936 loaded_dict = common.LoadInfoDict(target_files_zip)
937 self.assertEqual(3, loaded_dict['recovery_api_version'])
938 self.assertEqual(2, loaded_dict['fstab_version'])
939 self.assertIn('/', loaded_dict['fstab'])
940 self.assertIn('/system', loaded_dict['fstab'])
941
942 def test_LoadInfoDict_legacyRecoveryFstabPath(self):
943 target_files = self._test_LoadInfoDict_createTargetFiles(
944 self.INFO_DICT_DEFAULT,
945 'BOOT/RAMDISK/etc/recovery.fstab')
946 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
947 loaded_dict = common.LoadInfoDict(target_files_zip)
948 self.assertEqual(3, loaded_dict['recovery_api_version'])
949 self.assertEqual(2, loaded_dict['fstab_version'])
950 self.assertIn('/', loaded_dict['fstab'])
951 self.assertIn('/system', loaded_dict['fstab'])
952
Tao Bao82490d32019-04-09 00:12:30 -0700953 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baoa57ab9f2018-08-24 12:08:38 -0700954 def test_LoadInfoDict_dirInput(self):
955 target_files = self._test_LoadInfoDict_createTargetFiles(
956 self.INFO_DICT_DEFAULT,
957 'BOOT/RAMDISK/system/etc/recovery.fstab')
958 unzipped = common.UnzipTemp(target_files)
959 loaded_dict = common.LoadInfoDict(unzipped)
960 self.assertEqual(3, loaded_dict['recovery_api_version'])
961 self.assertEqual(2, loaded_dict['fstab_version'])
962 self.assertIn('/', loaded_dict['fstab'])
963 self.assertIn('/system', loaded_dict['fstab'])
964
Tao Bao82490d32019-04-09 00:12:30 -0700965 @test_utils.SkipIfExternalToolsUnavailable()
Tao Baoa57ab9f2018-08-24 12:08:38 -0700966 def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self):
967 target_files = self._test_LoadInfoDict_createTargetFiles(
968 self.INFO_DICT_DEFAULT,
969 'BOOT/RAMDISK/system/etc/recovery.fstab')
970 unzipped = common.UnzipTemp(target_files)
971 loaded_dict = common.LoadInfoDict(unzipped)
972 self.assertEqual(3, loaded_dict['recovery_api_version'])
973 self.assertEqual(2, loaded_dict['fstab_version'])
974 self.assertIn('/', loaded_dict['fstab'])
975 self.assertIn('/system', loaded_dict['fstab'])
976
977 def test_LoadInfoDict_systemRootImageFalse(self):
978 # Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
979 # launched prior to P will likely have this config.
980 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
981 del info_dict['no_recovery']
982 del info_dict['system_root_image']
983 del info_dict['recovery_as_boot']
984 target_files = self._test_LoadInfoDict_createTargetFiles(
985 info_dict,
986 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
987 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
988 loaded_dict = common.LoadInfoDict(target_files_zip)
989 self.assertEqual(3, loaded_dict['recovery_api_version'])
990 self.assertEqual(2, loaded_dict['fstab_version'])
991 self.assertNotIn('/', loaded_dict['fstab'])
992 self.assertIn('/system', loaded_dict['fstab'])
993
994 def test_LoadInfoDict_recoveryAsBootFalse(self):
995 # Devices using system-as-root, but with standalone recovery image. Non-A/B
996 # devices launched since P will likely have this config.
997 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
998 del info_dict['no_recovery']
999 del info_dict['recovery_as_boot']
1000 target_files = self._test_LoadInfoDict_createTargetFiles(
1001 info_dict,
1002 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
1003 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1004 loaded_dict = common.LoadInfoDict(target_files_zip)
1005 self.assertEqual(3, loaded_dict['recovery_api_version'])
1006 self.assertEqual(2, loaded_dict['fstab_version'])
1007 self.assertIn('/', loaded_dict['fstab'])
1008 self.assertIn('/system', loaded_dict['fstab'])
1009
1010 def test_LoadInfoDict_noRecoveryTrue(self):
1011 # Device doesn't have a recovery partition at all.
1012 info_dict = copy.copy(self.INFO_DICT_DEFAULT)
1013 del info_dict['recovery_as_boot']
1014 target_files = self._test_LoadInfoDict_createTargetFiles(
1015 info_dict,
1016 'RECOVERY/RAMDISK/system/etc/recovery.fstab')
1017 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1018 loaded_dict = common.LoadInfoDict(target_files_zip)
1019 self.assertEqual(3, loaded_dict['recovery_api_version'])
1020 self.assertEqual(2, loaded_dict['fstab_version'])
1021 self.assertIsNone(loaded_dict['fstab'])
1022
Tao Bao82490d32019-04-09 00:12:30 -07001023 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao410ad8b2018-08-24 12:08:38 -07001024 def test_LoadInfoDict_missingMetaMiscInfoTxt(self):
1025 target_files = self._test_LoadInfoDict_createTargetFiles(
1026 self.INFO_DICT_DEFAULT,
1027 'BOOT/RAMDISK/system/etc/recovery.fstab')
1028 common.ZipDelete(target_files, 'META/misc_info.txt')
1029 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1030 self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
1031
Tao Bao82490d32019-04-09 00:12:30 -07001032 @test_utils.SkipIfExternalToolsUnavailable()
Tao Bao410ad8b2018-08-24 12:08:38 -07001033 def test_LoadInfoDict_repacking(self):
1034 target_files = self._test_LoadInfoDict_createTargetFiles(
1035 self.INFO_DICT_DEFAULT,
1036 'BOOT/RAMDISK/system/etc/recovery.fstab')
1037 unzipped = common.UnzipTemp(target_files)
1038 loaded_dict = common.LoadInfoDict(unzipped, True)
1039 self.assertEqual(3, loaded_dict['recovery_api_version'])
1040 self.assertEqual(2, loaded_dict['fstab_version'])
1041 self.assertIn('/', loaded_dict['fstab'])
1042 self.assertIn('/system', loaded_dict['fstab'])
1043 self.assertEqual(
1044 os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
1045 self.assertEqual(
1046 os.path.join(unzipped, 'META', 'root_filesystem_config.txt'),
1047 loaded_dict['root_fs_config'])
1048
1049 def test_LoadInfoDict_repackingWithZipFileInput(self):
1050 target_files = self._test_LoadInfoDict_createTargetFiles(
1051 self.INFO_DICT_DEFAULT,
1052 'BOOT/RAMDISK/system/etc/recovery.fstab')
1053 with zipfile.ZipFile(target_files, 'r') as target_files_zip:
1054 self.assertRaises(
1055 AssertionError, common.LoadInfoDict, target_files_zip, True)
1056
Tao Baofc7e0e02018-02-13 13:54:02 -08001057
Tao Bao65b94e92018-10-11 21:57:26 -07001058class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
Tao Bao1c830bf2017-12-25 10:43:47 -08001059 """Checks the format of install-recovery.sh.
Tianjie Xu9c384d22017-06-20 17:00:55 -07001060
Tao Bao1c830bf2017-12-25 10:43:47 -08001061 Its format should match between common.py and validate_target_files.py.
1062 """
Tianjie Xu9c384d22017-06-20 17:00:55 -07001063
1064 def setUp(self):
Tao Bao1c830bf2017-12-25 10:43:47 -08001065 self._tempdir = common.MakeTempDir()
Tianjie Xu9c384d22017-06-20 17:00:55 -07001066 # Create a dummy dict that contains the fstab info for boot&recovery.
1067 self._info = {"fstab" : {}}
Tao Bao1c830bf2017-12-25 10:43:47 -08001068 dummy_fstab = [
1069 "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
1070 "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
Tao Bao31b08072017-11-08 15:50:59 -08001071 self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab)
Tianjie Xudf055582017-11-07 12:22:58 -08001072 # Construct the gzipped recovery.img and boot.img
1073 self.recovery_data = bytearray([
1074 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
1075 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
1076 0x08, 0x00, 0x00, 0x00
1077 ])
1078 # echo -n "boot" | gzip -f | hd
1079 self.boot_data = bytearray([
1080 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
1081 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
1082 ])
Tianjie Xu9c384d22017-06-20 17:00:55 -07001083
1084 def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
1085 loc = os.path.join(self._tempdir, prefix, name)
1086 if not os.path.exists(os.path.dirname(loc)):
1087 os.makedirs(os.path.dirname(loc))
1088 with open(loc, "w+") as f:
1089 f.write(data)
1090
1091 def test_full_recovery(self):
Tao Bao31b08072017-11-08 15:50:59 -08001092 recovery_image = common.File("recovery.img", self.recovery_data)
1093 boot_image = common.File("boot.img", self.boot_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001094 self._info["full_recovery_image"] = "true"
1095
1096 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1097 recovery_image, boot_image, self._info)
1098 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1099 self._info)
1100
Tao Bao82490d32019-04-09 00:12:30 -07001101 @test_utils.SkipIfExternalToolsUnavailable()
Tianjie Xu9c384d22017-06-20 17:00:55 -07001102 def test_recovery_from_boot(self):
Tao Bao31b08072017-11-08 15:50:59 -08001103 recovery_image = common.File("recovery.img", self.recovery_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001104 self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
Tao Bao31b08072017-11-08 15:50:59 -08001105 boot_image = common.File("boot.img", self.boot_data)
Tianjie Xu9c384d22017-06-20 17:00:55 -07001106 self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
1107
1108 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1109 recovery_image, boot_image, self._info)
1110 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1111 self._info)
1112 # Validate 'recovery-from-boot' with bonus argument.
1113 self._out_tmp_sink("etc/recovery-resource.dat", "bonus", "SYSTEM")
1114 common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
1115 recovery_image, boot_image, self._info)
1116 validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
1117 self._info)
Yifan Hong45433e42019-01-18 13:55:25 -08001118
1119
1120class MockScriptWriter(object):
1121 """A class that mocks edify_generator.EdifyGenerator.
1122 """
1123 def __init__(self, enable_comments=False):
1124 self.lines = []
1125 self.enable_comments = enable_comments
1126 def Comment(self, comment):
1127 if self.enable_comments:
1128 self.lines.append("# {}".format(comment))
1129 def AppendExtra(self, extra):
1130 self.lines.append(extra)
1131 def __str__(self):
1132 return "\n".join(self.lines)
1133
1134
1135class MockBlockDifference(object):
1136 def __init__(self, partition, tgt, src=None):
1137 self.partition = partition
1138 self.tgt = tgt
1139 self.src = src
1140 def WriteScript(self, script, _, progress=None,
1141 write_verify_script=False):
1142 if progress:
1143 script.AppendExtra("progress({})".format(progress))
1144 script.AppendExtra("patch({});".format(self.partition))
1145 if write_verify_script:
1146 self.WritePostInstallVerifyScript(script)
1147 def WritePostInstallVerifyScript(self, script):
1148 script.AppendExtra("verify({});".format(self.partition))
1149
1150
1151class FakeSparseImage(object):
1152 def __init__(self, size):
1153 self.blocksize = 4096
1154 self.total_blocks = size // 4096
1155 assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
1156
1157
1158class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
1159 @staticmethod
1160 def get_op_list(output_path):
Tao Baof1113e92019-06-18 12:10:14 -07001161 with zipfile.ZipFile(output_path) as output_zip:
Yifan Hong45433e42019-01-18 13:55:25 -08001162 with output_zip.open("dynamic_partitions_op_list") as op_list:
1163 return [line.strip() for line in op_list.readlines()
1164 if not line.startswith("#")]
1165
1166 def setUp(self):
1167 self.script = MockScriptWriter()
1168 self.output_path = common.MakeTempFile(suffix='.zip')
1169
1170 def test_full(self):
1171 target_info = common.LoadDictionaryFromLines("""
1172dynamic_partition_list=system vendor
1173super_partition_groups=group_foo
1174super_group_foo_group_size={group_size}
1175super_group_foo_partition_list=system vendor
1176""".format(group_size=4 * GiB).split("\n"))
1177 block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
1178 MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
1179
1180 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
1181 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1182 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1183
1184 self.assertEqual(str(self.script).strip(), """
1185assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
Yifan Hong45433e42019-01-18 13:55:25 -08001186patch(system);
1187verify(system);
1188unmap_partition("system");
Tao Baof1113e92019-06-18 12:10:14 -07001189patch(vendor);
1190verify(vendor);
1191unmap_partition("vendor");
Yifan Hong45433e42019-01-18 13:55:25 -08001192""".strip())
1193
1194 lines = self.get_op_list(self.output_path)
1195
1196 remove_all_groups = lines.index("remove_all_groups")
1197 add_group = lines.index("add_group group_foo 4294967296")
1198 add_vendor = lines.index("add vendor group_foo")
1199 add_system = lines.index("add system group_foo")
1200 resize_vendor = lines.index("resize vendor 1073741824")
1201 resize_system = lines.index("resize system 3221225472")
1202
1203 self.assertLess(remove_all_groups, add_group,
1204 "Should add groups after removing all groups")
1205 self.assertLess(add_group, min(add_vendor, add_system),
1206 "Should add partitions after adding group")
1207 self.assertLess(add_system, resize_system,
1208 "Should resize system after adding it")
1209 self.assertLess(add_vendor, resize_vendor,
1210 "Should resize vendor after adding it")
1211
1212 def test_inc_groups(self):
1213 source_info = common.LoadDictionaryFromLines("""
1214super_partition_groups=group_foo group_bar group_baz
1215super_group_foo_group_size={group_foo_size}
1216super_group_bar_group_size={group_bar_size}
1217""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
1218 target_info = common.LoadDictionaryFromLines("""
1219super_partition_groups=group_foo group_baz group_qux
1220super_group_foo_group_size={group_foo_size}
1221super_group_baz_group_size={group_baz_size}
1222super_group_qux_group_size={group_qux_size}
1223""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
1224 group_qux_size=1 * GiB).split("\n"))
1225
1226 dp_diff = common.DynamicPartitionsDifference(target_info,
1227 block_diffs=[],
1228 source_info_dict=source_info)
1229 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1230 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1231
1232 lines = self.get_op_list(self.output_path)
1233
1234 removed = lines.index("remove_group group_bar")
1235 shrunk = lines.index("resize_group group_foo 3221225472")
1236 grown = lines.index("resize_group group_baz 4294967296")
1237 added = lines.index("add_group group_qux 1073741824")
1238
Tao Baof1113e92019-06-18 12:10:14 -07001239 self.assertLess(max(removed, shrunk),
1240 min(grown, added),
Yifan Hong45433e42019-01-18 13:55:25 -08001241 "ops that remove / shrink partitions must precede ops that "
1242 "grow / add partitions")
1243
Yifan Hongbb2658d2019-01-25 12:30:58 -08001244 def test_incremental(self):
Yifan Hong45433e42019-01-18 13:55:25 -08001245 source_info = common.LoadDictionaryFromLines("""
1246dynamic_partition_list=system vendor product product_services
1247super_partition_groups=group_foo
1248super_group_foo_group_size={group_foo_size}
1249super_group_foo_partition_list=system vendor product product_services
1250""".format(group_foo_size=4 * GiB).split("\n"))
1251 target_info = common.LoadDictionaryFromLines("""
1252dynamic_partition_list=system vendor product odm
1253super_partition_groups=group_foo group_bar
1254super_group_foo_group_size={group_foo_size}
1255super_group_foo_partition_list=system vendor odm
1256super_group_bar_group_size={group_bar_size}
1257super_group_bar_partition_list=product
1258""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
1259
1260 block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
1261 src=FakeSparseImage(1024 * MiB)),
1262 MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
1263 src=FakeSparseImage(1024 * MiB)),
1264 MockBlockDifference("product", FakeSparseImage(1024 * MiB),
1265 src=FakeSparseImage(1024 * MiB)),
1266 MockBlockDifference("product_services", None,
1267 src=FakeSparseImage(1024 * MiB)),
1268 MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
1269 src=None)]
1270
1271 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
1272 source_info_dict=source_info)
1273 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1274 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1275
1276 metadata_idx = self.script.lines.index(
1277 'assert(update_dynamic_partitions(package_extract_file('
1278 '"dynamic_partitions_op_list")));')
1279 self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
1280 self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
1281 for p in ("product", "system", "odm"):
1282 patch_idx = self.script.lines.index("patch({});".format(p))
1283 verify_idx = self.script.lines.index("verify({});".format(p))
1284 self.assertLess(metadata_idx, patch_idx,
1285 "Should patch {} after updating metadata".format(p))
1286 self.assertLess(patch_idx, verify_idx,
1287 "Should verify {} after patching".format(p))
1288
1289 self.assertNotIn("patch(product_services);", self.script.lines)
1290
1291 lines = self.get_op_list(self.output_path)
1292
1293 remove = lines.index("remove product_services")
1294 move_product_out = lines.index("move product default")
1295 shrink = lines.index("resize vendor 536870912")
1296 shrink_group = lines.index("resize_group group_foo 3221225472")
1297 add_group_bar = lines.index("add_group group_bar 1073741824")
1298 add_odm = lines.index("add odm group_foo")
1299 grow_existing = lines.index("resize system 1610612736")
1300 grow_added = lines.index("resize odm 1073741824")
1301 move_product_in = lines.index("move product group_bar")
1302
1303 max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
1304 min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
1305
1306 self.assertLess(max_idx_move_partition_out_foo, shrink_group,
1307 "Must shrink group after partitions inside group are shrunk"
1308 " / removed")
1309
1310 self.assertLess(add_group_bar, move_product_in,
1311 "Must add partitions to group after group is added")
1312
1313 self.assertLess(max_idx_move_partition_out_foo,
1314 min_idx_move_partition_in_foo,
1315 "Must shrink partitions / remove partitions from group"
1316 "before adding / moving partitions into group")
Yifan Hongbb2658d2019-01-25 12:30:58 -08001317
1318 def test_remove_partition(self):
1319 source_info = common.LoadDictionaryFromLines("""
1320blockimgdiff_versions=3,4
1321use_dynamic_partitions=true
1322dynamic_partition_list=foo
1323super_partition_groups=group_foo
1324super_group_foo_group_size={group_foo_size}
1325super_group_foo_partition_list=foo
1326""".format(group_foo_size=4 * GiB).split("\n"))
1327 target_info = common.LoadDictionaryFromLines("""
1328blockimgdiff_versions=3,4
1329use_dynamic_partitions=true
1330super_partition_groups=group_foo
1331super_group_foo_group_size={group_foo_size}
1332""".format(group_foo_size=4 * GiB).split("\n"))
1333
1334 common.OPTIONS.info_dict = target_info
1335 common.OPTIONS.target_info_dict = target_info
1336 common.OPTIONS.source_info_dict = source_info
1337 common.OPTIONS.cache_size = 4 * 4096
1338
1339 block_diffs = [common.BlockDifference("foo", EmptyImage(),
1340 src=DataImage("source", pad=True))]
1341
1342 dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
1343 source_info_dict=source_info)
1344 with zipfile.ZipFile(self.output_path, 'w') as output_zip:
1345 dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
1346
1347 self.assertNotIn("block_image_update", str(self.script),
Tao Bao2cc0ca12019-03-15 10:44:43 -07001348 "Removed partition should not be patched.")
Yifan Hongbb2658d2019-01-25 12:30:58 -08001349
1350 lines = self.get_op_list(self.output_path)
1351 self.assertEqual(lines, ["remove foo"])