blob: 626a1ddd73169173c0add50ce24c2ac0619f0593 [file] [log] [blame]
Tianjie Xu67c7cbb2018-08-30 00:32:07 -07001#!/usr/bin/env python
2#
3# Copyright (C) 2018 The Android Open Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17from __future__ import print_function
18
Tao Bao71197512018-10-11 14:08:45 -070019import os.path
20import shlex
Tianjie Xu67c7cbb2018-08-30 00:32:07 -070021import struct
22
23import common
Tao Bao71197512018-10-11 14:08:45 -070024import sparse_img
Tianjie Xu67c7cbb2018-08-30 00:32:07 -070025from rangelib import RangeSet
26
Tao Bao71197512018-10-11 14:08:45 -070027OPTIONS = common.OPTIONS
28BLOCK_SIZE = common.BLOCK_SIZE
29FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
30
31
32class BuildVerityImageError(Exception):
33 """An Exception raised during verity image building."""
34
35 def __init__(self, message):
36 Exception.__init__(self, message)
37
38
39def GetVerityFECSize(partition_size):
40 cmd = ["fec", "-s", str(partition_size)]
41 output = common.RunAndCheckOutput(cmd, verbose=False)
42 return int(output)
43
44
45def GetVerityTreeSize(partition_size):
46 cmd = ["build_verity_tree", "-s", str(partition_size)]
47 output = common.RunAndCheckOutput(cmd, verbose=False)
48 return int(output)
49
50
51def GetVerityMetadataSize(partition_size):
52 cmd = ["build_verity_metadata.py", "size", str(partition_size)]
53 output = common.RunAndCheckOutput(cmd, verbose=False)
54 return int(output)
55
56
57def GetVeritySize(partition_size, fec_supported):
58 verity_tree_size = GetVerityTreeSize(partition_size)
59 verity_metadata_size = GetVerityMetadataSize(partition_size)
60 verity_size = verity_tree_size + verity_metadata_size
61 if fec_supported:
62 fec_size = GetVerityFECSize(partition_size + verity_size)
63 return verity_size + fec_size
64 return verity_size
65
66
67def GetSimgSize(image_file):
68 simg = sparse_img.SparseImage(image_file, build_map=False)
69 return simg.blocksize * simg.total_blocks
70
71
72def ZeroPadSimg(image_file, pad_size):
73 blocks = pad_size // BLOCK_SIZE
74 print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
75 simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
76 simg.AppendFillChunk(0, blocks)
77
78
79def AdjustPartitionSizeForVerity(partition_size, fec_supported):
80 """Modifies the provided partition size to account for the verity metadata.
81
82 This information is used to size the created image appropriately.
83
84 Args:
85 partition_size: the size of the partition to be verified.
86
87 Returns:
88 A tuple of the size of the partition adjusted for verity metadata, and
89 the size of verity metadata.
90 """
91 key = "%d %d" % (partition_size, fec_supported)
92 if key in AdjustPartitionSizeForVerity.results:
93 return AdjustPartitionSizeForVerity.results[key]
94
95 hi = partition_size
96 if hi % BLOCK_SIZE != 0:
97 hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
98
99 # verity tree and fec sizes depend on the partition size, which
100 # means this estimate is always going to be unnecessarily small
101 verity_size = GetVeritySize(hi, fec_supported)
102 lo = partition_size - verity_size
103 result = lo
104
105 # do a binary search for the optimal size
106 while lo < hi:
107 i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
108 v = GetVeritySize(i, fec_supported)
109 if i + v <= partition_size:
110 if result < i:
111 result = i
112 verity_size = v
113 lo = i + BLOCK_SIZE
114 else:
115 hi = i
116
117 if OPTIONS.verbose:
118 print("Adjusted partition size for verity, partition_size: {},"
119 " verity_size: {}".format(result, verity_size))
120 AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
121 return (result, verity_size)
122
123
124AdjustPartitionSizeForVerity.results = {}
125
126
127def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
128 padding_size):
129 cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
130 verity_path, verity_fec_path]
131 common.RunAndCheckOutput(cmd)
132
133
134def BuildVerityTree(sparse_image_path, verity_image_path):
135 cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
136 verity_image_path]
137 output = common.RunAndCheckOutput(cmd)
138 root, salt = output.split()
139 return root, salt
140
141
142def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
143 block_device, signer_path, key, signer_args,
144 verity_disable):
145 cmd = ["build_verity_metadata.py", "build", str(image_size),
146 verity_metadata_path, root_hash, salt, block_device, signer_path, key]
147 if signer_args:
148 cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
149 if verity_disable:
150 cmd.append("--verity_disable")
151 common.RunAndCheckOutput(cmd)
152
153
154def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
155 """Appends the unsparse image to the given sparse image.
156
157 Args:
158 sparse_image_path: the path to the (sparse) image
159 unsparse_image_path: the path to the (unsparse) image
160
161 Raises:
162 BuildVerityImageError: On error.
163 """
164 cmd = ["append2simg", sparse_image_path, unsparse_image_path]
165 try:
166 common.RunAndCheckOutput(cmd)
167 except:
168 raise BuildVerityImageError(error_message)
169
170
171def Append(target, file_to_append, error_message):
172 """Appends file_to_append to target.
173
174 Raises:
175 BuildVerityImageError: On error.
176 """
177 try:
178 with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
179 for line in input_file:
180 out_file.write(line)
181 except IOError:
182 raise BuildVerityImageError(error_message)
183
184
185def BuildVerifiedImage(data_image_path, verity_image_path,
186 verity_metadata_path, verity_fec_path,
187 padding_size, fec_supported):
188 Append(
189 verity_image_path, verity_metadata_path,
190 "Could not append verity metadata!")
191
192 if fec_supported:
193 # Build FEC for the entire partition, including metadata.
194 BuildVerityFEC(
195 data_image_path, verity_image_path, verity_fec_path, padding_size)
196 Append(verity_image_path, verity_fec_path, "Could not append FEC!")
197
198 Append2Simg(
199 data_image_path, verity_image_path, "Could not append verity data!")
200
201
202def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
203 """Creates an image that is verifiable using dm-verity.
204
205 Args:
206 out_file: the location to write the verifiable image at
207 prop_dict: a dictionary of properties required for image creation and
208 verification
209
210 Raises:
211 AssertionError: On invalid partition sizes.
212 """
213 # get properties
214 image_size = int(prop_dict["image_size"])
215 block_dev = prop_dict["verity_block_device"]
216 signer_key = prop_dict["verity_key"] + ".pk8"
217 if OPTIONS.verity_signer_path is not None:
218 signer_path = OPTIONS.verity_signer_path
219 else:
220 signer_path = prop_dict["verity_signer_cmd"]
221 signer_args = OPTIONS.verity_signer_args
222
223 tempdir_name = common.MakeTempDir(suffix="_verity_images")
224
225 # Get partial image paths.
226 verity_image_path = os.path.join(tempdir_name, "verity.img")
227 verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
228 verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
229
230 # Build the verity tree and get the root hash and salt.
231 root_hash, salt = BuildVerityTree(out_file, verity_image_path)
232
233 # Build the metadata blocks.
234 verity_disable = "verity_disable" in prop_dict
235 BuildVerityMetadata(
236 image_size, verity_metadata_path, root_hash, salt, block_dev, signer_path,
237 signer_key, signer_args, verity_disable)
238
239 # Build the full verified image.
240 partition_size = int(prop_dict["partition_size"])
241 verity_size = int(prop_dict["verity_size"])
242
243 padding_size = partition_size - image_size - verity_size
244 assert padding_size >= 0
245
246 BuildVerifiedImage(
247 out_file, verity_image_path, verity_metadata_path, verity_fec_path,
248 padding_size, fec_supported)
249
250
251def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
252 """Calculates max image size for a given partition size.
253
254 Args:
255 avbtool: String with path to avbtool.
256 footer_type: 'hash' or 'hashtree' for generating footer.
257 partition_size: The size of the partition in question.
258 additional_args: Additional arguments to pass to "avbtool add_hash_footer"
259 or "avbtool add_hashtree_footer".
260
261 Returns:
262 The maximum image size.
263
264 Raises:
265 BuildVerityImageError: On invalid image size.
266 """
267 cmd = [avbtool, "add_%s_footer" % footer_type,
268 "--partition_size", str(partition_size), "--calc_max_image_size"]
269 cmd.extend(shlex.split(additional_args))
270
271 output = common.RunAndCheckOutput(cmd)
272 image_size = int(output)
273 if image_size <= 0:
274 raise BuildVerityImageError(
275 "Invalid max image size: {}".format(output))
276 return image_size
277
278
279def AVBCalcMinPartitionSize(image_size, size_calculator):
280 """Calculates min partition size for a given image size.
281
282 Args:
283 image_size: The size of the image in question.
284 size_calculator: The function to calculate max image size
285 for a given partition size.
286
287 Returns:
288 The minimum partition size required to accommodate the image size.
289 """
290 # Use image size as partition size to approximate final partition size.
291 image_ratio = size_calculator(image_size) / float(image_size)
292
293 # Prepare a binary search for the optimal partition size.
294 lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - BLOCK_SIZE
295
296 # Ensure lo is small enough: max_image_size should <= image_size.
297 delta = BLOCK_SIZE
298 max_image_size = size_calculator(lo)
299 while max_image_size > image_size:
300 image_ratio = max_image_size / float(lo)
301 lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - delta
302 delta *= 2
303 max_image_size = size_calculator(lo)
304
305 hi = lo + BLOCK_SIZE
306
307 # Ensure hi is large enough: max_image_size should >= image_size.
308 delta = BLOCK_SIZE
309 max_image_size = size_calculator(hi)
310 while max_image_size < image_size:
311 image_ratio = max_image_size / float(hi)
312 hi = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE + delta
313 delta *= 2
314 max_image_size = size_calculator(hi)
315
316 partition_size = hi
317
318 # Start to binary search.
319 while lo < hi:
320 mid = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
321 max_image_size = size_calculator(mid)
322 if max_image_size >= image_size: # if mid can accommodate image_size
323 if mid < partition_size: # if a smaller partition size is found
324 partition_size = mid
325 hi = mid
326 else:
327 lo = mid + BLOCK_SIZE
328
329 if OPTIONS.verbose:
330 print("AVBCalcMinPartitionSize({}): partition_size: {}.".format(
331 image_size, partition_size))
332
333 return partition_size
334
335
336def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
337 partition_name, key_path, algorithm, salt,
338 additional_args):
339 """Adds dm-verity hashtree and AVB metadata to an image.
340
341 Args:
342 image_path: Path to image to modify.
343 avbtool: String with path to avbtool.
344 footer_type: 'hash' or 'hashtree' for generating footer.
345 partition_size: The size of the partition in question.
346 partition_name: The name of the partition - will be embedded in metadata.
347 key_path: Path to key to use or None.
348 algorithm: Name of algorithm to use or None.
349 salt: The salt to use (a hexadecimal string) or None.
350 additional_args: Additional arguments to pass to "avbtool add_hash_footer"
351 or "avbtool add_hashtree_footer".
352 """
353 cmd = [avbtool, "add_%s_footer" % footer_type,
354 "--partition_size", partition_size,
355 "--partition_name", partition_name,
356 "--image", image_path]
357
358 if key_path and algorithm:
359 cmd.extend(["--key", key_path, "--algorithm", algorithm])
360 if salt:
361 cmd.extend(["--salt", salt])
362
363 cmd.extend(shlex.split(additional_args))
364
365 common.RunAndCheckOutput(cmd)
366
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700367
368class HashtreeInfoGenerationError(Exception):
369 """An Exception raised during hashtree info generation."""
370
371 def __init__(self, message):
372 Exception.__init__(self, message)
373
374
375class HashtreeInfo(object):
376 def __init__(self):
377 self.hashtree_range = None
378 self.filesystem_range = None
379 self.hash_algorithm = None
380 self.salt = None
381 self.root_hash = None
382
383
384def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
385 generator = None
386 if (info_dict.get("verity") == "true" and
387 info_dict.get("{}_verity_block_device".format(partition_name))):
388 partition_size = info_dict["{}_size".format(partition_name)]
389 fec_supported = info_dict.get("verity_fec") == "true"
390 generator = VerifiedBootVersion1HashtreeInfoGenerator(
391 partition_size, block_size, fec_supported)
392
393 return generator
394
395
396class HashtreeInfoGenerator(object):
397 def Generate(self, image):
398 raise NotImplementedError
399
400 def DecomposeSparseImage(self, image):
401 raise NotImplementedError
402
403 def ValidateHashtree(self):
404 raise NotImplementedError
405
406
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700407class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
408 """A class that parses the metadata of hashtree for a given partition."""
409
410 def __init__(self, partition_size, block_size, fec_supported):
411 """Initialize VerityTreeInfo with the sparse image and input property.
412
413 Arguments:
414 partition_size: The whole size in bytes of a partition, including the
415 filesystem size, padding size, and verity size.
416 block_size: Expected size in bytes of each block for the sparse image.
417 fec_supported: True if the verity section contains fec data.
418 """
419
420 self.block_size = block_size
421 self.partition_size = partition_size
422 self.fec_supported = fec_supported
423
424 self.image = None
425 self.filesystem_size = None
426 self.hashtree_size = None
427 self.metadata_size = None
428
429 self.hashtree_info = HashtreeInfo()
430
431 def DecomposeSparseImage(self, image):
432 """Calculate the verity size based on the size of the input image.
433
434 Since we already know the structure of a verity enabled image to be:
435 [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
436 calculate the size and offset of each section.
437 """
438
439 self.image = image
440 assert self.block_size == image.blocksize
441 assert self.partition_size == image.total_blocks * self.block_size, \
442 "partition size {} doesn't match with the calculated image size." \
443 " total_blocks: {}".format(self.partition_size, image.total_blocks)
444
445 adjusted_size, _ = AdjustPartitionSizeForVerity(
446 self.partition_size, self.fec_supported)
447 assert adjusted_size % self.block_size == 0
448
449 verity_tree_size = GetVerityTreeSize(adjusted_size)
450 assert verity_tree_size % self.block_size == 0
451
452 metadata_size = GetVerityMetadataSize(adjusted_size)
453 assert metadata_size % self.block_size == 0
454
455 self.filesystem_size = adjusted_size
456 self.hashtree_size = verity_tree_size
457 self.metadata_size = metadata_size
458
459 self.hashtree_info.filesystem_range = RangeSet(
460 data=[0, adjusted_size / self.block_size])
461 self.hashtree_info.hashtree_range = RangeSet(
462 data=[adjusted_size / self.block_size,
463 (adjusted_size + verity_tree_size) / self.block_size])
464
465 def _ParseHashtreeMetadata(self):
466 """Parses the hash_algorithm, root_hash, salt from the metadata block."""
467
468 metadata_start = self.filesystem_size + self.hashtree_size
469 metadata_range = RangeSet(
470 data=[metadata_start / self.block_size,
471 (metadata_start + self.metadata_size) / self.block_size])
472 meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
473
474 # More info about the metadata structure available in:
475 # system/extras/verity/build_verity_metadata.py
476 META_HEADER_SIZE = 268
477 header_bin = meta_data[0:META_HEADER_SIZE]
478 header = struct.unpack("II256sI", header_bin)
479
480 # header: magic_number, version, signature, table_len
481 assert header[0] == 0xb001b001, header[0]
482 table_len = header[3]
483 verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
484 table_entries = verity_table.rstrip().split()
485
486 # Expected verity table format: "1 block_device block_device block_size
487 # block_size data_blocks data_blocks hash_algorithm root_hash salt"
488 assert len(table_entries) == 10, "Unexpected verity table size {}".format(
489 len(table_entries))
490 assert (int(table_entries[3]) == self.block_size and
491 int(table_entries[4]) == self.block_size)
492 assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
493 int(table_entries[6]) * self.block_size == self.filesystem_size)
494
495 self.hashtree_info.hash_algorithm = table_entries[7]
496 self.hashtree_info.root_hash = table_entries[8]
497 self.hashtree_info.salt = table_entries[9]
498
499 def ValidateHashtree(self):
500 """Checks that we can reconstruct the verity hash tree."""
501
502 # Writes the file system section to a temp file; and calls the executable
503 # build_verity_tree to construct the hash tree.
504 adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
505 with open(adjusted_partition, "wb") as fd:
506 self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
507
508 generated_verity_tree = common.MakeTempFile(prefix="verity")
Tao Bao2f057462018-10-03 16:31:18 -0700509 root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700510
Tao Bao2f057462018-10-03 16:31:18 -0700511 # The salt should be always identical, as we use fixed value.
512 assert salt == self.hashtree_info.salt, \
513 "Calculated salt {} doesn't match the one in metadata {}".format(
514 salt, self.hashtree_info.salt)
515
516 if root_hash != self.hashtree_info.root_hash:
517 print(
518 "Calculated root hash {} doesn't match the one in metadata {}".format(
519 root_hash, self.hashtree_info.root_hash))
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700520 return False
521
522 # Reads the generated hash tree and checks if it has the exact same bytes
523 # as the one in the sparse image.
524 with open(generated_verity_tree, "rb") as fd:
525 return fd.read() == ''.join(self.image.ReadRangeSet(
526 self.hashtree_info.hashtree_range))
527
528 def Generate(self, image):
529 """Parses and validates the hashtree info in a sparse image.
530
531 Returns:
532 hashtree_info: The information needed to reconstruct the hashtree.
Tao Bao2f057462018-10-03 16:31:18 -0700533
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700534 Raises:
535 HashtreeInfoGenerationError: If we fail to generate the exact bytes of
536 the hashtree.
537 """
538
539 self.DecomposeSparseImage(image)
540 self._ParseHashtreeMetadata()
541
542 if not self.ValidateHashtree():
543 raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
544
545 return self.hashtree_info