blob: 0e605b172d0e2b283830ed49df4ed132a0d4ad26 [file] [log] [blame]
Tianjie Xu67c7cbb2018-08-30 00:32:07 -07001#!/usr/bin/env python
2#
3# Copyright (C) 2018 The Android Open Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17from __future__ import print_function
18
19import struct
20
21import common
22from build_image import (AdjustPartitionSizeForVerity, GetVerityTreeSize,
23 GetVerityMetadataSize, BuildVerityTree)
24from rangelib import RangeSet
25
26
27class HashtreeInfoGenerationError(Exception):
28 """An Exception raised during hashtree info generation."""
29
30 def __init__(self, message):
31 Exception.__init__(self, message)
32
33
34class HashtreeInfo(object):
35 def __init__(self):
36 self.hashtree_range = None
37 self.filesystem_range = None
38 self.hash_algorithm = None
39 self.salt = None
40 self.root_hash = None
41
42
43def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
44 generator = None
45 if (info_dict.get("verity") == "true" and
46 info_dict.get("{}_verity_block_device".format(partition_name))):
47 partition_size = info_dict["{}_size".format(partition_name)]
48 fec_supported = info_dict.get("verity_fec") == "true"
49 generator = VerifiedBootVersion1HashtreeInfoGenerator(
50 partition_size, block_size, fec_supported)
51
52 return generator
53
54
55class HashtreeInfoGenerator(object):
56 def Generate(self, image):
57 raise NotImplementedError
58
59 def DecomposeSparseImage(self, image):
60 raise NotImplementedError
61
62 def ValidateHashtree(self):
63 raise NotImplementedError
64
65
66class VerifiedBootVersion2HashtreeInfoGenerator(HashtreeInfoGenerator):
67 pass
68
69
70class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
71 """A class that parses the metadata of hashtree for a given partition."""
72
73 def __init__(self, partition_size, block_size, fec_supported):
74 """Initialize VerityTreeInfo with the sparse image and input property.
75
76 Arguments:
77 partition_size: The whole size in bytes of a partition, including the
78 filesystem size, padding size, and verity size.
79 block_size: Expected size in bytes of each block for the sparse image.
80 fec_supported: True if the verity section contains fec data.
81 """
82
83 self.block_size = block_size
84 self.partition_size = partition_size
85 self.fec_supported = fec_supported
86
87 self.image = None
88 self.filesystem_size = None
89 self.hashtree_size = None
90 self.metadata_size = None
91
92 self.hashtree_info = HashtreeInfo()
93
94 def DecomposeSparseImage(self, image):
95 """Calculate the verity size based on the size of the input image.
96
97 Since we already know the structure of a verity enabled image to be:
98 [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
99 calculate the size and offset of each section.
100 """
101
102 self.image = image
103 assert self.block_size == image.blocksize
104 assert self.partition_size == image.total_blocks * self.block_size, \
105 "partition size {} doesn't match with the calculated image size." \
106 " total_blocks: {}".format(self.partition_size, image.total_blocks)
107
108 adjusted_size, _ = AdjustPartitionSizeForVerity(
109 self.partition_size, self.fec_supported)
110 assert adjusted_size % self.block_size == 0
111
112 verity_tree_size = GetVerityTreeSize(adjusted_size)
113 assert verity_tree_size % self.block_size == 0
114
115 metadata_size = GetVerityMetadataSize(adjusted_size)
116 assert metadata_size % self.block_size == 0
117
118 self.filesystem_size = adjusted_size
119 self.hashtree_size = verity_tree_size
120 self.metadata_size = metadata_size
121
122 self.hashtree_info.filesystem_range = RangeSet(
123 data=[0, adjusted_size / self.block_size])
124 self.hashtree_info.hashtree_range = RangeSet(
125 data=[adjusted_size / self.block_size,
126 (adjusted_size + verity_tree_size) / self.block_size])
127
128 def _ParseHashtreeMetadata(self):
129 """Parses the hash_algorithm, root_hash, salt from the metadata block."""
130
131 metadata_start = self.filesystem_size + self.hashtree_size
132 metadata_range = RangeSet(
133 data=[metadata_start / self.block_size,
134 (metadata_start + self.metadata_size) / self.block_size])
135 meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
136
137 # More info about the metadata structure available in:
138 # system/extras/verity/build_verity_metadata.py
139 META_HEADER_SIZE = 268
140 header_bin = meta_data[0:META_HEADER_SIZE]
141 header = struct.unpack("II256sI", header_bin)
142
143 # header: magic_number, version, signature, table_len
144 assert header[0] == 0xb001b001, header[0]
145 table_len = header[3]
146 verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
147 table_entries = verity_table.rstrip().split()
148
149 # Expected verity table format: "1 block_device block_device block_size
150 # block_size data_blocks data_blocks hash_algorithm root_hash salt"
151 assert len(table_entries) == 10, "Unexpected verity table size {}".format(
152 len(table_entries))
153 assert (int(table_entries[3]) == self.block_size and
154 int(table_entries[4]) == self.block_size)
155 assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
156 int(table_entries[6]) * self.block_size == self.filesystem_size)
157
158 self.hashtree_info.hash_algorithm = table_entries[7]
159 self.hashtree_info.root_hash = table_entries[8]
160 self.hashtree_info.salt = table_entries[9]
161
162 def ValidateHashtree(self):
163 """Checks that we can reconstruct the verity hash tree."""
164
165 # Writes the file system section to a temp file; and calls the executable
166 # build_verity_tree to construct the hash tree.
167 adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
168 with open(adjusted_partition, "wb") as fd:
169 self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
170
171 generated_verity_tree = common.MakeTempFile(prefix="verity")
172 prop_dict = {}
173 BuildVerityTree(adjusted_partition, generated_verity_tree, prop_dict)
174
175 assert prop_dict["verity_salt"] == self.hashtree_info.salt
176 if prop_dict["verity_root_hash"] != self.hashtree_info.root_hash:
177 print("Calculated verty root hash {} doesn't match the one in metadata"
178 " {}".format(prop_dict["verity_root_hash"],
179 self.hashtree_info.root_hash))
180 return False
181
182 # Reads the generated hash tree and checks if it has the exact same bytes
183 # as the one in the sparse image.
184 with open(generated_verity_tree, "rb") as fd:
185 return fd.read() == ''.join(self.image.ReadRangeSet(
186 self.hashtree_info.hashtree_range))
187
188 def Generate(self, image):
189 """Parses and validates the hashtree info in a sparse image.
190
191 Returns:
192 hashtree_info: The information needed to reconstruct the hashtree.
193 Raises:
194 HashtreeInfoGenerationError: If we fail to generate the exact bytes of
195 the hashtree.
196 """
197
198 self.DecomposeSparseImage(image)
199 self._ParseHashtreeMetadata()
200
201 if not self.ValidateHashtree():
202 raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
203
204 return self.hashtree_info