blob: c512ef3bb3ded50c14bf6b54769c220f398730fd [file] [log] [blame]
Tianjie Xu67c7cbb2018-08-30 00:32:07 -07001#!/usr/bin/env python
2#
3# Copyright (C) 2018 The Android Open Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17from __future__ import print_function
18
19import struct
20
21import common
22from build_image import (AdjustPartitionSizeForVerity, GetVerityTreeSize,
23 GetVerityMetadataSize, BuildVerityTree)
24from rangelib import RangeSet
25
26
27class HashtreeInfoGenerationError(Exception):
28 """An Exception raised during hashtree info generation."""
29
30 def __init__(self, message):
31 Exception.__init__(self, message)
32
33
34class HashtreeInfo(object):
35 def __init__(self):
36 self.hashtree_range = None
37 self.filesystem_range = None
38 self.hash_algorithm = None
39 self.salt = None
40 self.root_hash = None
41
42
43def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
44 generator = None
45 if (info_dict.get("verity") == "true" and
46 info_dict.get("{}_verity_block_device".format(partition_name))):
47 partition_size = info_dict["{}_size".format(partition_name)]
48 fec_supported = info_dict.get("verity_fec") == "true"
49 generator = VerifiedBootVersion1HashtreeInfoGenerator(
50 partition_size, block_size, fec_supported)
51
52 return generator
53
54
55class HashtreeInfoGenerator(object):
56 def Generate(self, image):
57 raise NotImplementedError
58
59 def DecomposeSparseImage(self, image):
60 raise NotImplementedError
61
62 def ValidateHashtree(self):
63 raise NotImplementedError
64
65
Tianjie Xu67c7cbb2018-08-30 00:32:07 -070066class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
67 """A class that parses the metadata of hashtree for a given partition."""
68
69 def __init__(self, partition_size, block_size, fec_supported):
70 """Initialize VerityTreeInfo with the sparse image and input property.
71
72 Arguments:
73 partition_size: The whole size in bytes of a partition, including the
74 filesystem size, padding size, and verity size.
75 block_size: Expected size in bytes of each block for the sparse image.
76 fec_supported: True if the verity section contains fec data.
77 """
78
79 self.block_size = block_size
80 self.partition_size = partition_size
81 self.fec_supported = fec_supported
82
83 self.image = None
84 self.filesystem_size = None
85 self.hashtree_size = None
86 self.metadata_size = None
87
88 self.hashtree_info = HashtreeInfo()
89
90 def DecomposeSparseImage(self, image):
91 """Calculate the verity size based on the size of the input image.
92
93 Since we already know the structure of a verity enabled image to be:
94 [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
95 calculate the size and offset of each section.
96 """
97
98 self.image = image
99 assert self.block_size == image.blocksize
100 assert self.partition_size == image.total_blocks * self.block_size, \
101 "partition size {} doesn't match with the calculated image size." \
102 " total_blocks: {}".format(self.partition_size, image.total_blocks)
103
104 adjusted_size, _ = AdjustPartitionSizeForVerity(
105 self.partition_size, self.fec_supported)
106 assert adjusted_size % self.block_size == 0
107
108 verity_tree_size = GetVerityTreeSize(adjusted_size)
109 assert verity_tree_size % self.block_size == 0
110
111 metadata_size = GetVerityMetadataSize(adjusted_size)
112 assert metadata_size % self.block_size == 0
113
114 self.filesystem_size = adjusted_size
115 self.hashtree_size = verity_tree_size
116 self.metadata_size = metadata_size
117
118 self.hashtree_info.filesystem_range = RangeSet(
119 data=[0, adjusted_size / self.block_size])
120 self.hashtree_info.hashtree_range = RangeSet(
121 data=[adjusted_size / self.block_size,
122 (adjusted_size + verity_tree_size) / self.block_size])
123
124 def _ParseHashtreeMetadata(self):
125 """Parses the hash_algorithm, root_hash, salt from the metadata block."""
126
127 metadata_start = self.filesystem_size + self.hashtree_size
128 metadata_range = RangeSet(
129 data=[metadata_start / self.block_size,
130 (metadata_start + self.metadata_size) / self.block_size])
131 meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
132
133 # More info about the metadata structure available in:
134 # system/extras/verity/build_verity_metadata.py
135 META_HEADER_SIZE = 268
136 header_bin = meta_data[0:META_HEADER_SIZE]
137 header = struct.unpack("II256sI", header_bin)
138
139 # header: magic_number, version, signature, table_len
140 assert header[0] == 0xb001b001, header[0]
141 table_len = header[3]
142 verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
143 table_entries = verity_table.rstrip().split()
144
145 # Expected verity table format: "1 block_device block_device block_size
146 # block_size data_blocks data_blocks hash_algorithm root_hash salt"
147 assert len(table_entries) == 10, "Unexpected verity table size {}".format(
148 len(table_entries))
149 assert (int(table_entries[3]) == self.block_size and
150 int(table_entries[4]) == self.block_size)
151 assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
152 int(table_entries[6]) * self.block_size == self.filesystem_size)
153
154 self.hashtree_info.hash_algorithm = table_entries[7]
155 self.hashtree_info.root_hash = table_entries[8]
156 self.hashtree_info.salt = table_entries[9]
157
158 def ValidateHashtree(self):
159 """Checks that we can reconstruct the verity hash tree."""
160
161 # Writes the file system section to a temp file; and calls the executable
162 # build_verity_tree to construct the hash tree.
163 adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
164 with open(adjusted_partition, "wb") as fd:
165 self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
166
167 generated_verity_tree = common.MakeTempFile(prefix="verity")
Tao Bao2f057462018-10-03 16:31:18 -0700168 root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700169
Tao Bao2f057462018-10-03 16:31:18 -0700170 # The salt should be always identical, as we use fixed value.
171 assert salt == self.hashtree_info.salt, \
172 "Calculated salt {} doesn't match the one in metadata {}".format(
173 salt, self.hashtree_info.salt)
174
175 if root_hash != self.hashtree_info.root_hash:
176 print(
177 "Calculated root hash {} doesn't match the one in metadata {}".format(
178 root_hash, self.hashtree_info.root_hash))
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700179 return False
180
181 # Reads the generated hash tree and checks if it has the exact same bytes
182 # as the one in the sparse image.
183 with open(generated_verity_tree, "rb") as fd:
184 return fd.read() == ''.join(self.image.ReadRangeSet(
185 self.hashtree_info.hashtree_range))
186
187 def Generate(self, image):
188 """Parses and validates the hashtree info in a sparse image.
189
190 Returns:
191 hashtree_info: The information needed to reconstruct the hashtree.
Tao Bao2f057462018-10-03 16:31:18 -0700192
Tianjie Xu67c7cbb2018-08-30 00:32:07 -0700193 Raises:
194 HashtreeInfoGenerationError: If we fail to generate the exact bytes of
195 the hashtree.
196 """
197
198 self.DecomposeSparseImage(image)
199 self._ParseHashtreeMetadata()
200
201 if not self.ValidateHashtree():
202 raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
203
204 return self.hashtree_info