blob: 1e2fe577c166fec3d63b6d7790ee2ea193b31b8e [file] [log] [blame]
David Zeuthen21e95262016-07-27 17:58:40 -04001#!/usr/bin/env python
2
3# Copyright 2016, The Android Open Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16#
17"""Command-line tool for working with Brillo Verified Boot images."""
18
19import argparse
20import hashlib
21import os
22import struct
23import subprocess
24import sys
25
26import Crypto.PublicKey.RSA
27
28# Keep in sync with avb_vbmeta_header.h.
29AVB_VERSION_MAJOR = 1
30AVB_VERSION_MINOR = 0
31
32
33class AvbError(Exception):
34 """Application-specific errors.
35
36 These errors represent issues for which a stack-trace should not be
37 presented.
38
39 Attributes:
40 message: Error message.
41 """
42
43 def __init__(self, message):
44 Exception.__init__(self, message)
45
46
47class Algorithm(object):
48 """Contains details about an algorithm.
49
50 See the avb_vbmeta_header.h file for more details about
51 algorithms.
52
53 The constant |ALGORITHMS| is a dictionary from human-readable
54 names (e.g 'SHA256_RSA2048') to instances of this class.
55
56 Attributes:
57 algorithm_type: Integer code corresponding to |AvbAlgorithmType|.
58 hash_num_bytes: Number of bytes used to store the hash.
59 signature_num_bytes: Number of bytes used to store the signature.
60 public_key_num_bytes: Number of bytes used to store the public key.
61 padding: Padding used for signature, if any.
62 """
63
64 def __init__(self, algorithm_type, hash_num_bytes, signature_num_bytes,
65 public_key_num_bytes, padding):
66 self.algorithm_type = algorithm_type
67 self.hash_num_bytes = hash_num_bytes
68 self.signature_num_bytes = signature_num_bytes
69 self.public_key_num_bytes = public_key_num_bytes
70 self.padding = padding
71
72# This must be kept in sync with the avb_crypto.h file.
73#
74# The PKC1-v1.5 padding is a blob of binary DER of ASN.1 and is
75# obtained from section 5.2.2 of RFC 4880.
76ALGORITHMS = {
77 'NONE': Algorithm(
78 algorithm_type=0, # AVB_ALGORITHM_TYPE_NONE
79 hash_num_bytes=0,
80 signature_num_bytes=0,
81 public_key_num_bytes=0,
82 padding=[]),
83 'SHA256_RSA2048': Algorithm(
84 algorithm_type=1, # AVB_ALGORITHM_TYPE_SHA256_RSA2048
85 hash_num_bytes=32,
86 signature_num_bytes=256,
87 public_key_num_bytes=8 + 2*2048/8,
88 padding=[
89 # PKCS1-v1_5 padding
90 0x00, 0x01] + [0xff]*202 + [0x00] + [
91 # ASN.1 header
92 0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
93 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05,
94 0x00, 0x04, 0x20,
95 ]),
96 'SHA256_RSA4096': Algorithm(
97 algorithm_type=2, # AVB_ALGORITHM_TYPE_SHA256_RSA4096
98 hash_num_bytes=32,
99 signature_num_bytes=512,
100 public_key_num_bytes=8 + 2*4096/8,
101 padding=[
102 # PKCS1-v1_5 padding
103 0x00, 0x01] + [0xff]*458 + [0x00] + [
104 # ASN.1 header
105 0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
106 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05,
107 0x00, 0x04, 0x20,
108 ]),
109 'SHA256_RSA8192': Algorithm(
110 algorithm_type=3, # AVB_ALGORITHM_TYPE_SHA256_RSA8192
111 hash_num_bytes=32,
112 signature_num_bytes=1024,
113 public_key_num_bytes=8 + 2*8192/8,
114 padding=[
115 # PKCS1-v1_5 padding
116 0x00, 0x01] + [0xff]*970 + [0x00] + [
117 # ASN.1 header
118 0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
119 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05,
120 0x00, 0x04, 0x20,
121 ]),
122 'SHA512_RSA2048': Algorithm(
123 algorithm_type=4, # AVB_ALGORITHM_TYPE_SHA512_RSA2048
124 hash_num_bytes=64,
125 signature_num_bytes=256,
126 public_key_num_bytes=8 + 2*2048/8,
127 padding=[
128 # PKCS1-v1_5 padding
129 0x00, 0x01] + [0xff]*170 + [0x00] + [
130 # ASN.1 header
131 0x30, 0x51, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
132 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03, 0x05,
133 0x00, 0x04, 0x40
134 ]),
135 'SHA512_RSA4096': Algorithm(
136 algorithm_type=5, # AVB_ALGORITHM_TYPE_SHA512_RSA4096
137 hash_num_bytes=64,
138 signature_num_bytes=512,
139 public_key_num_bytes=8 + 2*4096/8,
140 padding=[
141 # PKCS1-v1_5 padding
142 0x00, 0x01] + [0xff]*426 + [0x00] + [
143 # ASN.1 header
144 0x30, 0x51, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
145 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03, 0x05,
146 0x00, 0x04, 0x40
147 ]),
148 'SHA512_RSA8192': Algorithm(
149 algorithm_type=6, # AVB_ALGORITHM_TYPE_SHA512_RSA8192
150 hash_num_bytes=64,
151 signature_num_bytes=1024,
152 public_key_num_bytes=8 + 2*8192/8,
153 padding=[
154 # PKCS1-v1_5 padding
155 0x00, 0x01] + [0xff]*938 + [0x00] + [
156 # ASN.1 header
157 0x30, 0x51, 0x30, 0x0d, 0x06, 0x09, 0x60, 0x86,
158 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03, 0x05,
159 0x00, 0x04, 0x40
160 ]),
161}
162
163
164def round_to_multiple(number, size):
165 """Rounds a number up to nearest multiple of another number.
166
167 Args:
168 number: The number to round up.
169 size: The multiple to round up to.
170
171 Returns:
172 If |number| is a multiple of |size|, returns |number|, otherwise
173 returns |number| + |size|.
174 """
175 remainder = number % size
176 if remainder == 0:
177 return number
178 return number + size - remainder
179
180
181def round_to_pow2(number):
182 """Rounds a number up to the next power of 2.
183
184 Args:
185 number: The number to round up.
186
187 Returns:
188 If |number| is already a power of 2 then |number| is
189 returned. Otherwise the smallest power of 2 greater than |number|
190 is returned.
191 """
192 return 2**((number - 1).bit_length())
193
194
195def write_long(output, num_bits, value):
196 """Writes a long to an output stream using a given amount of bits.
197
198 This number is written big-endian, e.g. with the most significant
199 bit first.
200
201 Arguments:
202 output: The object to write the output to.
203 num_bits: The number of bits to write, e.g. 2048.
204 value: The value to write.
205 """
206 for bit_pos in range(num_bits, 0, -8):
207 octet = (value >> (bit_pos - 8)) & 0xff
208 output.write(struct.pack('!B', octet))
209
210
211def encode_long(num_bits, value):
212 """Encodes a long to a bytearray() using a given amount of bits.
213
214 This number is written big-endian, e.g. with the most significant
215 bit first.
216
217 Arguments:
218 num_bits: The number of bits to write, e.g. 2048.
219 value: The value to write.
220
221 Returns:
222 A bytearray() with the encoded long.
223 """
224 ret = bytearray()
225 for bit_pos in range(num_bits, 0, -8):
226 octet = (value >> (bit_pos - 8)) & 0xff
227 ret.extend(struct.pack('!B', octet))
228 return ret
229
230
231def egcd(a, b):
232 """Calculate greatest common divisor of two numbers.
233
234 This implementation uses a recursive version of the extended
235 Euclidian algorithm.
236
237 Arguments:
238 a: First number.
239 b: Second number.
240
241 Returns:
242 A tuple (gcd, x, y) that where |gcd| is the greatest common
243 divisor of |a| and |b| and |a|*|x| + |b|*|y| = |gcd|.
244 """
245 if a == 0:
246 return (b, 0, 1)
247 else:
248 g, y, x = egcd(b % a, a)
249 return (g, x - (b // a) * y, y)
250
251
252def modinv(a, m):
253 """Calculate modular multiplicative inverse of |a| modulo |m|.
254
255 This calculates the number |x| such that |a| * |x| == 1 (modulo
256 |m|). This number only exists if |a| and |m| are co-prime - |None|
257 is returned if this isn't true.
258
259 Arguments:
260 a: The number to calculate a modular inverse of.
261 m: The modulo to use.
262
263 Returns:
264 The modular multiplicative inverse of |a| and |m| or |None| if
265 these numbers are not co-prime.
266 """
267 gcd, x, _ = egcd(a, m)
268 if gcd != 1:
269 return None # modular inverse does not exist
270 else:
271 return x % m
272
273
274def parse_number(string):
275 """Parse a string as a number.
276
277 This is just a short-hand for int(string, 0) suitable for use in the
278 |type| parameter of |ArgumentParser|'s add_argument() function. An
279 improvement to just using type=int is that this function supports
280 numbers in other bases, e.g. "0x1234".
281
282 Arguments:
283 string: The string to parse.
284
285 Returns:
286 The parsed integer.
287
288 Raises:
289 ValueError: If the number could not be parsed.
290 """
291 return int(string, 0)
292
293
294def write_rsa_key(output, key):
295 """Writes a public RSA key in |AvbRSAPublicKeyHeader| format.
296
297 This writes the |AvbRSAPublicKeyHeader| as well as the two large
298 numbers (|key_num_bits| bits long) following it.
299
300 Arguments:
301 output: The object to write the output to.
302 key: A Crypto.PublicKey.RSA object.
303 """
304 # key.e is exponent
305 # key.n is modulus
306 key_num_bits = key.size() + 1
307 # Calculate n0inv = -1/n[0] (mod 2^32)
308 b = 2L**32
309 n0inv = b - modinv(key.n, b)
310 # Calculate rr = r^2 (mod N), where r = 2^(# of key bits)
311 r = 2L**key.n.bit_length()
312 rrmodn = r * r % key.n
313 output.write(struct.pack('!II', key_num_bits, n0inv))
314 write_long(output, key_num_bits, key.n)
315 write_long(output, key_num_bits, rrmodn)
316
317
318def encode_rsa_key(key):
319 """Encodes a public RSA key in |AvbRSAPublicKeyHeader| format.
320
321 This creates a |AvbRSAPublicKeyHeader| as well as the two large
322 numbers (|key_num_bits| bits long) following it.
323
324 Arguments:
325 key: A Crypto.PublicKey.RSA object.
326
327 Returns:
328 A bytearray() with the |AvbRSAPublicKeyHeader|.
329 """
330 ret = bytearray()
331 # key.e is exponent
332 # key.n is modulus
333 key_num_bits = key.size() + 1
334 # Calculate n0inv = -1/n[0] (mod 2^32)
335 b = 2L**32
336 n0inv = b - modinv(key.n, b)
337 # Calculate rr = r^2 (mod N), where r = 2^(# of key bits)
338 r = 2L**key.n.bit_length()
339 rrmodn = r * r % key.n
340 ret.extend(struct.pack('!II', key_num_bits, n0inv))
341 ret.extend(encode_long(key_num_bits, key.n))
342 ret.extend(encode_long(key_num_bits, rrmodn))
343 return ret
344
345
346def lookup_algorithm_by_type(alg_type):
347 """Looks up algorithm by type.
348
349 Arguments:
350 alg_type: The integer representing the type.
351
352 Returns:
353 A tuple with the algorithm name and an |Algorithm| instance.
354
355 Raises:
356 Exception: If the algorithm cannot be found
357 """
358 for alg_name in ALGORITHMS:
359 alg_data = ALGORITHMS[alg_name]
360 if alg_data.algorithm_type == alg_type:
361 return (alg_name, alg_data)
362 raise AvbError('Unknown algorithm type {}'.format(alg_type))
363
364
365class AvbDescriptor(object):
366 """Class for AVB descriptor.
367
368 See the |AvbDescriptor| C struct for more information.
369
370 Attributes:
371 tag: The tag identifying what kind of descriptor this is.
372 data: The data in the descriptor.
373 """
374
375 SIZE = 16
376 FORMAT_STRING = ('!QQ') # tag, num_bytes_following (descriptor header)
377
378 def __init__(self, data):
379 """Initializes a new property descriptor.
380
381 Arguments:
382 data: If not None, must be a bytearray().
383
384 Raises:
385 LookupError: If the given descriptor is malformed.
386 """
387 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
388
389 if data:
390 (self.tag, num_bytes_following) = (
391 struct.unpack(self.FORMAT_STRING, data[0:self.SIZE]))
392 self.data = data[self.SIZE:self.SIZE + num_bytes_following]
393 else:
394 self.tag = None
395 self.data = None
396
397 def print_desc(self, o):
398 """Print the descriptor.
399
400 Arguments:
401 o: The object to write the output to.
402 """
403 o.write(' Unknown descriptor:\n')
404 o.write(' Tag: {}\n'.format(self.tag))
405 if len(self.data) < 256:
406 o.write(' Data: {} ({} bytes)\n'.format(
407 repr(str(self.data)), len(self.data)))
408 else:
409 o.write(' Data: {} bytes\n'.format(len(self.data)))
410
411 def encode(self):
412 """Serializes the descriptor.
413
414 Returns:
415 A bytearray() with the descriptor data.
416 """
417 num_bytes_following = len(self.data)
418 nbf_with_padding = round_to_multiple(num_bytes_following, 8)
419 padding_size = nbf_with_padding - num_bytes_following
420 desc = struct.pack(self.FORMAT_STRING, self.tag, nbf_with_padding)
421 padding = struct.pack(str(padding_size) + 'x')
422 ret = desc + self.data + padding
423 return bytearray(ret)
424
425
426class AvbPropertyDescriptor(AvbDescriptor):
427 """A class for property descriptors.
428
429 See the |AvbPropertyDescriptor| C struct for more information.
430
431 Attributes:
432 key: The key.
433 value: The key.
434 """
435
436 TAG = 0
437 SIZE = 32
438 FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
439 'Q' # key size (bytes)
440 'Q') # value size (bytes)
441
442 def __init__(self, data=None):
443 """Initializes a new property descriptor.
444
445 Arguments:
446 data: If not None, must be a bytearray of size |SIZE|.
447
448 Raises:
449 LookupError: If the given descriptor is malformed.
450 """
451 AvbDescriptor.__init__(self, None)
452 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
453
454 if data:
455 (tag, num_bytes_following, key_size,
456 value_size) = struct.unpack(self.FORMAT_STRING, data[0:self.SIZE])
457 expected_size = round_to_multiple(
458 self.SIZE - 16 + key_size + 1 + value_size + 1, 8)
459 if tag != self.TAG or num_bytes_following != expected_size:
460 raise LookupError('Given data does not look like a property '
461 'descriptor.')
462 self.key = data[self.SIZE:(self.SIZE + key_size)]
463 self.value = data[(self.SIZE + key_size + 1):(self.SIZE + key_size + 1 +
464 value_size)]
465 else:
466 self.key = ''
467 self.value = ''
468
469 def print_desc(self, o):
470 """Print the descriptor.
471
472 Arguments:
473 o: The object to write the output to.
474 """
475 if len(self.value) < 256:
476 o.write(' Prop: {} -> {}\n'.format(self.key, repr(str(self.value))))
477 else:
478 o.write(' Prop: {} -> ({} bytes)\n'.format(self.key, len(self.value)))
479
480 def encode(self):
481 """Serializes the descriptor.
482
483 Returns:
484 A bytearray() with the descriptor data.
485 """
486 num_bytes_following = self.SIZE + len(self.key) + len(self.value) + 2 - 16
487 nbf_with_padding = round_to_multiple(num_bytes_following, 8)
488 padding_size = nbf_with_padding - num_bytes_following
489 desc = struct.pack(self.FORMAT_STRING, self.TAG, nbf_with_padding,
490 len(self.key), len(self.value))
491 padding = struct.pack(str(padding_size) + 'x')
492 ret = desc + self.key + '\0' + self.value + '\0' + padding
493 return bytearray(ret)
494
495
496class AvbHashtreeDescriptor(AvbDescriptor):
497 """A class for hashtree descriptors.
498
499 See the |AvbHashtreeDescriptor| C struct for more information.
500
501 Attributes:
502 dm_verity_version: dm-verity version used.
503 image_size: Size of the image, after rounding up to |block_size|.
504 tree_offset: Offset of the hash tree in the file.
505 tree_size: Size of the tree.
506 data_block_size: Data block size
507 hash_block_size: Hash block size
508 hash_algorithm: Hash algorithm used.
509 partition_name: Partition name.
510 salt: Salt used.
511 root_digest: Root digest.
512 """
513
514 TAG = 1
515 SIZE = 96
516 FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
517 'L' # dm-verity version used
518 'Q' # image size (bytes)
519 'Q' # tree offset (bytes)
520 'Q' # tree size (bytes)
521 'L' # data block size (bytes)
522 'L' # hash block size (bytes)
523 '32s' # hash algorithm used
524 'L' # partition name (bytes)
525 'L' # salt length (bytes)
526 'L') # root digest length (bytes)
527
528 def __init__(self, data=None):
529 """Initializes a new hashtree descriptor.
530
531 Arguments:
532 data: If not None, must be a bytearray of size |SIZE|.
533
534 Raises:
535 LookupError: If the given descriptor is malformed.
536 """
537 AvbDescriptor.__init__(self, None)
538 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
539
540 if data:
541 (tag, num_bytes_following, self.dm_verity_version, self.image_size,
542 self.tree_offset, self.tree_size, self.data_block_size,
543 self.hash_block_size, self.hash_algorithm, partition_name_len, salt_len,
544 root_digest_len) = struct.unpack(self.FORMAT_STRING, data[0:self.SIZE])
545 expected_size = round_to_multiple(
546 self.SIZE - 16 + partition_name_len + salt_len + root_digest_len, 8)
547 if tag != self.TAG or num_bytes_following != expected_size:
548 raise LookupError('Given data does not look like a hashtree '
549 'descriptor.')
550 # Nuke NUL-bytes at the end.
551 self.hash_algorithm = self.hash_algorithm.split('\0', 1)[0]
552 o = 0
553 self.partition_name = str(data[(self.SIZE + o):(self.SIZE + o +
554 partition_name_len)])
555 # Validate UTF-8 - decode() raises UnicodeDecodeError if not valid UTF-8.
556 self.partition_name.decode('utf-8')
557 o += partition_name_len
558 self.salt = data[(self.SIZE + o):(self.SIZE + o + salt_len)]
559 o += salt_len
560 self.root_digest = data[(self.SIZE + o):(self.SIZE + o + root_digest_len)]
561 if root_digest_len != len(hashlib.new(name=self.hash_algorithm).digest()):
562 raise LookupError('root_digest_len doesn\'t match hash algorithm')
563
564 else:
565 self.dm_verity_version = 0
566 self.image_size = 0
567 self.tree_offset = 0
568 self.tree_size = 0
569 self.data_block_size = 0
570 self.hash_block_size = 0
571 self.hash_algorithm = ''
572 self.partition_name = ''
573 self.salt = bytearray()
574 self.root_digest = bytearray()
575
576 def print_desc(self, o):
577 """Print the descriptor.
578
579 Arguments:
580 o: The object to write the output to.
581 """
582 o.write(' Hashtree descriptor:\n')
583 o.write(' Version of dm-verity: {}\n'.format(self.dm_verity_version))
584 o.write(' Image Size: {} bytes\n'.format(self.image_size))
585 o.write(' Tree Offset: {}\n'.format(self.tree_offset))
586 o.write(' Tree Size: {} bytes\n'.format(self.tree_size))
587 o.write(' Data Block Size: {} bytes\n'.format(
588 self.data_block_size))
589 o.write(' Hash Block Size: {} bytes\n'.format(
590 self.hash_block_size))
591 o.write(' Hash Algorithm: {}\n'.format(self.hash_algorithm))
592 o.write(' Partition Name: {}\n'.format(self.partition_name))
593 o.write(' Salt: {}\n'.format(str(self.salt).encode(
594 'hex')))
595 o.write(' Root Digest: {}\n'.format(str(
596 self.root_digest).encode('hex')))
597
598 def encode(self):
599 """Serializes the descriptor.
600
601 Returns:
602 A bytearray() with the descriptor data.
603 """
604 encoded_name = self.partition_name.encode('utf-8')
605 num_bytes_following = (self.SIZE + len(encoded_name) + len(self.salt) +
606 len(self.root_digest) - 16)
607 nbf_with_padding = round_to_multiple(num_bytes_following, 8)
608 padding_size = nbf_with_padding - num_bytes_following
609 desc = struct.pack(self.FORMAT_STRING, self.TAG, nbf_with_padding,
610 self.dm_verity_version, self.image_size,
611 self.tree_offset, self.tree_size, self.data_block_size,
612 self.hash_block_size, self.hash_algorithm,
613 len(encoded_name), len(self.salt), len(self.root_digest))
614 padding = struct.pack(str(padding_size) + 'x')
615 ret = desc + encoded_name + self.salt + self.root_digest + padding
616 return bytearray(ret)
617
618
619class AvbHashDescriptor(AvbDescriptor):
620 """A class for hash descriptors.
621
622 See the |AvbHashDescriptor| C struct for more information.
623
624 Attributes:
625 image_size: Image size, in bytes.
626 hash_algorithm: Hash algorithm used.
627 partition_name: Partition name.
628 salt: Salt used.
629 digest: The hash value of salt and data combined.
630 """
631
632 TAG = 2
633 SIZE = 68
634 FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
635 'Q' # image size (bytes)
636 '32s' # hash algorithm used
637 'L' # partition name (bytes)
638 'L' # salt length (bytes)
639 'L') # digest length (bytes)
640
641 def __init__(self, data=None):
642 """Initializes a new hash descriptor.
643
644 Arguments:
645 data: If not None, must be a bytearray of size |SIZE|.
646
647 Raises:
648 LookupError: If the given descriptor is malformed.
649 """
650 AvbDescriptor.__init__(self, None)
651 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
652
653 if data:
654 (tag, num_bytes_following, self.image_size, self.hash_algorithm,
655 partition_name_len, salt_len,
656 digest_len) = struct.unpack(self.FORMAT_STRING, data[0:self.SIZE])
657 expected_size = round_to_multiple(
658 self.SIZE - 16 + partition_name_len + salt_len + digest_len, 8)
659 if tag != self.TAG or num_bytes_following != expected_size:
660 raise LookupError('Given data does not look like a hash ' 'descriptor.')
661 # Nuke NUL-bytes at the end.
662 self.hash_algorithm = self.hash_algorithm.split('\0', 1)[0]
663 o = 0
664 self.partition_name = str(data[(self.SIZE + o):(self.SIZE + o +
665 partition_name_len)])
666 # Validate UTF-8 - decode() raises UnicodeDecodeError if not valid UTF-8.
667 self.partition_name.decode('utf-8')
668 o += partition_name_len
669 self.salt = data[(self.SIZE + o):(self.SIZE + o + salt_len)]
670 o += salt_len
671 self.digest = data[(self.SIZE + o):(self.SIZE + o + digest_len)]
672 if digest_len != len(hashlib.new(name=self.hash_algorithm).digest()):
673 raise LookupError('digest_len doesn\'t match hash algorithm')
674
675 else:
676 self.image_size = 0
677 self.hash_algorithm = ''
678 self.partition_name = ''
679 self.salt = bytearray()
680 self.digest = bytearray()
681
682 def print_desc(self, o):
683 """Print the descriptor.
684
685 Arguments:
686 o: The object to write the output to.
687 """
688 o.write(' Hash descriptor:\n')
689 o.write(' Image Size: {} bytes\n'.format(self.image_size))
690 o.write(' Hash Algorithm: {}\n'.format(self.hash_algorithm))
691 o.write(' Partition Name: {}\n'.format(self.partition_name))
692 o.write(' Salt: {}\n'.format(str(self.salt).encode(
693 'hex')))
694 o.write(' Digest: {}\n'.format(str(self.digest).encode(
695 'hex')))
696
697 def encode(self):
698 """Serializes the descriptor.
699
700 Returns:
701 A bytearray() with the descriptor data.
702 """
703 encoded_name = self.partition_name.encode('utf-8')
704 num_bytes_following = (
705 self.SIZE + len(encoded_name) + len(self.salt) + len(self.digest) - 16)
706 nbf_with_padding = round_to_multiple(num_bytes_following, 8)
707 padding_size = nbf_with_padding - num_bytes_following
708 desc = struct.pack(self.FORMAT_STRING, self.TAG, nbf_with_padding,
709 self.image_size, self.hash_algorithm, len(encoded_name),
710 len(self.salt), len(self.digest))
711 padding = struct.pack(str(padding_size) + 'x')
712 ret = desc + encoded_name + self.salt + self.digest + padding
713 return bytearray(ret)
714
715
716class AvbKernelCmdlineDescriptor(AvbDescriptor):
717 """A class for kernel command-line descriptors.
718
719 See the |AvbKernelCmdlineDescriptor| C struct for more information.
720
721 Attributes:
722 kernel_cmdline: The kernel command-line.
723 """
724
725 TAG = 3
726 SIZE = 20
727 FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
728 'L') # cmdline length (bytes)
729
730 def __init__(self, data=None):
731 """Initializes a new kernel cmdline descriptor.
732
733 Arguments:
734 data: If not None, must be a bytearray of size |SIZE|.
735
736 Raises:
737 LookupError: If the given descriptor is malformed.
738 """
739 AvbDescriptor.__init__(self, None)
740 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
741
742 if data:
743 (tag, num_bytes_following, kernel_cmdline_length) = (
744 struct.unpack(self.FORMAT_STRING, data[0:self.SIZE]))
745 expected_size = round_to_multiple(self.SIZE - 16 + kernel_cmdline_length,
746 8)
747 if tag != self.TAG or num_bytes_following != expected_size:
748 raise LookupError('Given data does not look like a kernel cmdline '
749 'descriptor.')
750 # Nuke NUL-bytes at the end.
751 self.kernel_cmdline = str(data[self.SIZE:(self.SIZE +
752 kernel_cmdline_length)])
753 # Validate UTF-8 - decode() raises UnicodeDecodeError if not valid UTF-8.
754 self.kernel_cmdline.decode('utf-8')
755 else:
756 self.kernel_cmdline = ''
757
758 def print_desc(self, o):
759 """Print the descriptor.
760
761 Arguments:
762 o: The object to write the output to.
763 """
764 o.write(' Kernel Cmdline descriptor:\n')
765 o.write(' Kernel Cmdline: {}\n'.format(repr(
766 self.kernel_cmdline)))
767
768 def encode(self):
769 """Serializes the descriptor.
770
771 Returns:
772 A bytearray() with the descriptor data.
773 """
774 encoded_str = self.kernel_cmdline.encode('utf-8')
775 num_bytes_following = (self.SIZE + len(encoded_str) - 16)
776 nbf_with_padding = round_to_multiple(num_bytes_following, 8)
777 padding_size = nbf_with_padding - num_bytes_following
778 desc = struct.pack(self.FORMAT_STRING, self.TAG, nbf_with_padding,
779 len(encoded_str))
780 padding = struct.pack(str(padding_size) + 'x')
781 ret = desc + encoded_str + padding
782 return bytearray(ret)
783
784
785class AvbChainPartitionDescriptor(AvbDescriptor):
786 """A class for chained partition descriptors.
787
788 See the |AvbChainPartitionDescriptor| C struct for more information.
789
790 Attributes:
791 rollback_index_slot: The rollback index slot to use.
792 partition_name: Partition name.
793 public_key: Bytes for the public key.
794 """
795
796 TAG = 4
797 SIZE = 28
798 FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
799 'L' # rollback_index_slot
800 'L' # partition_name_size (bytes)
801 'L') # public_key_size (bytes)
802
803 def __init__(self, data=None):
804 """Initializes a new chain partition descriptor.
805
806 Arguments:
807 data: If not None, must be a bytearray of size |SIZE|.
808
809 Raises:
810 LookupError: If the given descriptor is malformed.
811 """
812 AvbDescriptor.__init__(self, None)
813 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
814
815 if data:
816 (tag, num_bytes_following, self.rollback_index_slot, partition_name_len,
817 public_key_len) = struct.unpack(self.FORMAT_STRING, data[0:self.SIZE])
818 expected_size = round_to_multiple(
819 self.SIZE - 16 + partition_name_len + public_key_len, 8)
820 if tag != self.TAG or num_bytes_following != expected_size:
821 raise LookupError('Given data does not look like a chain partition '
822 'descriptor.')
823 o = 0
824 self.partition_name = str(data[(self.SIZE + o):(self.SIZE + o +
825 partition_name_len)])
826 # Validate UTF-8 - decode() raises UnicodeDecodeError if not valid UTF-8.
827 self.partition_name.decode('utf-8')
828 o += partition_name_len
829 self.public_key = data[(self.SIZE + o):(self.SIZE + o + public_key_len)]
830
831 else:
832 self.rollback_index_slot = 0
833 self.partition_name = ''
834 self.public_key = bytearray()
835
836 def print_desc(self, o):
837 """Print the descriptor.
838
839 Arguments:
840 o: The object to write the output to.
841 """
842 o.write(' Chain Partition descriptor:\n')
843 o.write(' Partition Name: {}\n'.format(self.partition_name))
844 o.write(' Rollback Index Slot: {}\n'.format(
845 self.rollback_index_slot))
846 # Just show the SHA1 of the key, for size reasons.
847 hexdig = hashlib.sha1(self.public_key).hexdigest()
848 o.write(' Public key (sha1): {}\n'.format(hexdig))
849
850 def encode(self):
851 """Serializes the descriptor.
852
853 Returns:
854 A bytearray() with the descriptor data.
855 """
856 encoded_name = self.partition_name.encode('utf-8')
857 num_bytes_following = (
858 self.SIZE + len(encoded_name) + len(self.public_key) - 16)
859 nbf_with_padding = round_to_multiple(num_bytes_following, 8)
860 padding_size = nbf_with_padding - num_bytes_following
861 desc = struct.pack(self.FORMAT_STRING, self.TAG, nbf_with_padding,
862 self.rollback_index_slot, len(encoded_name),
863 len(self.public_key))
864 padding = struct.pack(str(padding_size) + 'x')
865 ret = desc + encoded_name + self.public_key + padding
866 return bytearray(ret)
867
868
869DESCRIPTOR_CLASSES = [
870 AvbPropertyDescriptor, AvbHashtreeDescriptor, AvbHashDescriptor,
871 AvbKernelCmdlineDescriptor, AvbChainPartitionDescriptor
872]
873
874
875def parse_descriptors(data):
876 """Parses a blob of data into descriptors.
877
878 Arguments:
879 data: A bytearray() with encoded descriptors.
880
881 Returns:
882 A list of instances of objects derived from AvbDescriptor. For
883 unknown descriptors, the class AvbDescriptor is used.
884 """
885 o = 0
886 ret = []
887 while o < len(data):
888 tag, nb_following = struct.unpack('!2Q', data[o:o + 16])
889 if tag < len(DESCRIPTOR_CLASSES):
890 c = DESCRIPTOR_CLASSES[tag]
891 else:
892 c = AvbDescriptor
893 ret.append(c(bytearray(data[o:o + 16 + nb_following])))
894 o += 16 + nb_following
895 return ret
896
897
898class AvbFooter(object):
899 """A class for parsing and writing footers.
900
901 Footers are stored at the end of partitions and point to where the
902 AvbVBMeta blob is located. They also contain the original size of
903 the image before AVB information was added.
904
905 Attributes:
906 magic: Magic for identifying the footer, see |MAGIC|.
907 version_major: The major version of avbtool that wrote the footer.
908 version_minor: The minor version of avbtool that wrote the footer.
909 original_image_size: Original image size.
910 vbmeta_offset: Offset of where the AvbVBMeta blob is stored.
911 vbmeta_size: Size of the AvbVBMeta blob.
912 """
913
914 MAGIC = 'AVBf'
915 SIZE = 64
916 RESERVED = 28
917 FORMAT_STRING = ('!4s2L' # magic, 2 x version.
918 'Q' # Original image size.
919 'Q' # Offset of VBMeta blob.
920 'Q' + # Size of VBMeta blob.
921 str(RESERVED) + 'x') # padding for reserved bytes
922
923 def __init__(self, data=None):
924 """Initializes a new footer object.
925
926 Arguments:
927 data: If not None, must be a bytearray of size 4096.
928
929 Raises:
930 LookupError: If the given footer is malformed.
931 struct.error: If the given data has no footer.
932 """
933 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
934
935 if data:
936 (self.magic, self.version_major, self.version_minor,
937 self.original_image_size, self.vbmeta_offset,
938 self.vbmeta_size) = struct.unpack(self.FORMAT_STRING, data)
939 if self.magic != self.MAGIC:
940 raise LookupError('Given data does not look like a Brillo footer.')
941 else:
942 self.magic = self.MAGIC
943 self.version_major = AVB_VERSION_MAJOR
944 self.version_minor = AVB_VERSION_MINOR
945 self.original_image_size = 0
946 self.vbmeta_offset = 0
947 self.vbmeta_size = 0
948
949 def save(self, output):
950 """Serializes the footer to disk.
951
952 Arguments:
953 output: The object to write the output to.
954 """
955 output.write(struct.pack(self.FORMAT_STRING, self.magic, self.version_major,
956 self.version_minor, self.original_image_size,
957 self.vbmeta_offset, self.vbmeta_size))
958
959
960class AvbVBMetaHeader(object):
961 """A class for parsing and writing Brillo Verified Boot vbmeta images.
962
963 Attributes:
964 The attributes correspond to the |AvbVBMetaHeader| struct
965 defined in avb_vbmeta_header.h.
966 """
967
968 SIZE = 256
969
970 # Keep in sync with |reserved| field of |AvbVBMetaImageHeader|.
971 RESERVED = 152
972
973 # Keep in sync with |AvbVBMetaImageHeader|.
974 FORMAT_STRING = ('!4s2L' # magic, 2 x version
975 '2Q' # 2 x block size
976 'L' # algorithm type
977 '2Q' # offset, size (hash)
978 '2Q' # offset, size (signature)
979 '2Q' # offset, size (public key)
980 '2Q' # offset, size (descriptors)
981 'Q' + # rollback_index
982 str(RESERVED) + 'x') # padding for reserved bytes
983
984 def __init__(self, data=None):
985 """Initializes a new header object.
986
987 Arguments:
988 data: If not None, must be a bytearray of size 8192.
989
990 Raises:
991 Exception: If the given data is malformed.
992 """
993 assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
994
995 if data:
996 (self.magic, self.header_version_major, self.header_version_minor,
997 self.authentication_data_block_size, self.auxiliary_data_block_size,
998 self.algorithm_type, self.hash_offset, self.hash_size,
999 self.signature_offset, self.signature_size, self.public_key_offset,
1000 self.public_key_size, self.descriptors_offset, self.descriptors_size,
1001 self.rollback_index) = struct.unpack(self.FORMAT_STRING, data)
1002 # Nuke NUL-bytes at the end of the string.
1003 if self.magic != 'AVB0':
1004 raise AvbError('Given image does not look like a Brillo boot image')
1005 else:
1006 self.magic = 'AVB0'
1007 self.header_version_major = AVB_VERSION_MAJOR
1008 self.header_version_minor = AVB_VERSION_MINOR
1009 self.authentication_data_block_size = 0
1010 self.auxiliary_data_block_size = 0
1011 self.algorithm_type = 0
1012 self.hash_offset = 0
1013 self.hash_size = 0
1014 self.signature_offset = 0
1015 self.signature_size = 0
1016 self.public_key_offset = 0
1017 self.public_key_size = 0
1018 self.descriptors_offset = 0
1019 self.descriptors_size = 0
1020 self.rollback_index = 0
1021
1022 def save(self, output):
1023 """Serializes the header (256 bytes) to disk.
1024
1025 Arguments:
1026 output: The object to write the output to.
1027 """
1028 output.write(struct.pack(
1029 self.FORMAT_STRING, self.magic, self.header_version_major,
1030 self.header_version_minor, self.authentication_data_block_size,
1031 self.auxiliary_data_block_size, self.algorithm_type, self.hash_offset,
1032 self.hash_size, self.signature_offset, self.signature_size,
1033 self.public_key_offset, self.public_key_size, self.descriptors_offset,
1034 self.descriptors_size, self.rollback_index))
1035
1036 def encode(self):
1037 """Serializes the header (256) to a bytearray().
1038
1039 Returns:
1040 A bytearray() with the encoded header.
1041 """
1042 return struct.pack(self.FORMAT_STRING, self.magic,
1043 self.header_version_major, self.header_version_minor,
1044 self.authentication_data_block_size,
1045 self.auxiliary_data_block_size, self.algorithm_type,
1046 self.hash_offset, self.hash_size, self.signature_offset,
1047 self.signature_size, self.public_key_offset,
1048 self.public_key_size, self.descriptors_offset,
1049 self.descriptors_size, self.rollback_index)
1050
1051
1052class Avb(object):
1053 """Business logic for avbtool command-line tool."""
1054
1055 def erase_footer(self, image, keep_hashtree):
1056 """Implements the 'erase_footer' command.
1057
1058 Arguments:
1059 image: File to erase a footer from.
1060 keep_hashtree: If True, keep the hashtree around.
1061
1062 Raises:
1063 AvbError: If there's no footer in the image.
1064 """
1065
1066 (footer, _, descriptors, _) = self._parse_image(image)
1067
1068 if not footer:
1069 raise AvbError('Given image does not have a footer.')
1070
1071 new_image_size = None
1072 if not keep_hashtree:
1073 new_image_size = footer.original_image_size
1074 else:
1075 # If requested to keep the hashtree, search for a hashtree
1076 # descriptor to figure out the location and size of the hashtree.
1077 for desc in descriptors:
1078 if isinstance(desc, AvbHashtreeDescriptor):
1079 # The hashtree is always just following the main data so the
1080 # new size is easily derived.
1081 new_image_size = desc.tree_offset + desc.tree_size
1082 break
1083 if not new_image_size:
1084 raise AvbError('Requested to keep hashtree but no hashtree '
1085 'descriptor was found.')
1086
1087 # And cut...
1088 image.truncate(new_image_size)
1089
1090 def info_image(self, image, output):
1091 """Implements the 'info_image' command.
1092
1093 Arguments:
1094 image: Image file to get information from (file object).
1095 output: Output file to write human-readable information to (file object).
1096 """
1097
1098 o = output
1099
1100 (footer, header, descriptors, image_size) = self._parse_image(image)
1101
1102 if footer:
1103 o.write('Footer version: {}.{}\n'.format(footer.version_major,
1104 footer.version_minor))
1105 o.write('Image size: {} bytes\n'.format(image_size))
1106 o.write('Original image size: {} bytes\n'.format(
1107 footer.original_image_size))
1108 o.write('VBMeta offset: {}\n'.format(footer.vbmeta_offset))
1109 o.write('VBMeta size: {} bytes\n'.format(footer.vbmeta_size))
1110 o.write('--\n')
1111
1112 (alg_name, _) = lookup_algorithm_by_type(header.algorithm_type)
1113
1114 o.write('VBMeta image version: {}.{}\n'.format(
1115 header.header_version_major, header.header_version_minor))
1116 o.write('Header Block: {} bytes\n'.format(AvbVBMetaHeader.SIZE))
1117 o.write('Authentication Block: {} bytes\n'.format(
1118 header.authentication_data_block_size))
1119 o.write('Auxiliary Block: {} bytes\n'.format(
1120 header.auxiliary_data_block_size))
1121 o.write('Algorithm: {}\n'.format(alg_name))
1122 o.write('Rollback Index: {}\n'.format(header.rollback_index))
1123
1124 # Print descriptors.
1125 num_printed = 0
1126 o.write('Descriptors:\n')
1127 for desc in descriptors:
1128 desc.print_desc(o)
1129 num_printed += 1
1130 if num_printed == 0:
1131 o.write(' (none)\n')
1132
1133 def _parse_image(self, image):
1134 """Gets information about an image.
1135
1136 The image can either be a vbmeta or an image with a footer.
1137
1138 Arguments:
1139 image: An image (vbmeta or footer) with a hashtree descriptor.
1140
1141 Returns:
1142 A tuple where the first argument is a AvbFooter (None if there
1143 is no footer on the image), the second argument is a
1144 AvbVBMetaHeader, the third argument is a list of
1145 AvbDescriptor-derived instances, and the fourth argument is the
1146 size of |image|.
1147 """
1148 footer = None
1149 image.seek(0, os.SEEK_END)
1150 image_size = image.tell()
1151 image.seek(image_size - AvbFooter.SIZE)
1152 try:
1153 footer = AvbFooter(image.read(AvbFooter.SIZE))
1154 except (LookupError, struct.error):
1155 # Nope, just seek back to the start.
1156 image.seek(0)
1157
1158 vbmeta_offset = 0
1159 if footer:
1160 vbmeta_offset = footer.vbmeta_offset
1161
1162 image.seek(vbmeta_offset)
1163 h = AvbVBMetaHeader(image.read(AvbVBMetaHeader.SIZE))
1164
1165 auth_block_offset = vbmeta_offset + AvbVBMetaHeader.SIZE
1166 aux_block_offset = auth_block_offset + h.authentication_data_block_size
1167 desc_start_offset = aux_block_offset + h.descriptors_offset
1168 image.seek(desc_start_offset)
1169 descriptors = parse_descriptors(image.read(h.descriptors_size))
1170
1171 return footer, h, descriptors, image_size
1172
1173 def _get_cmdline_descriptor_for_dm_verity(self, image):
1174 """Generate kernel cmdline descriptor for dm-verity.
1175
1176 Arguments:
1177 image: An image (vbmeta or footer) with a hashtree descriptor.
1178
1179 Returns:
1180 A AvbKernelCmdlineDescriptor with dm-verity kernel cmdline
1181 instructions for the hashtree.
1182
1183 Raises:
1184 AvbError: If |image| doesn't have a hashtree descriptor.
1185
1186 """
1187
1188 (_, _, descriptors, _) = self._parse_image(image)
1189
1190 ht = None
1191 for desc in descriptors:
1192 if isinstance(desc, AvbHashtreeDescriptor):
1193 ht = desc
1194 break
1195
1196 if not ht:
1197 raise AvbError('No hashtree descriptor in given image')
1198
1199 c = 'dm="1 vroot none ro 1,'
1200 c += '0 ' # start
1201 c += '{} '.format((ht.image_size / 512)) # size (# sectors)
1202 c += 'verity {} '.format(ht.dm_verity_version) # type and version
1203 c += 'PARTUUID=$(ANDROID_SYSTEM_PARTUUID) ' # data_dev
1204 c += 'PARTUUID=$(ANDROID_SYSTEM_PARTUUID) ' # hash_dev
1205 c += '{} '.format(ht.data_block_size) # data_block
1206 c += '{} '.format(ht.hash_block_size) # hash_block
1207 c += '{} '.format(ht.image_size / ht.data_block_size) # #blocks
1208 c += '{} '.format(ht.image_size / ht.data_block_size) # hash_offset
1209 c += '{} '.format(ht.hash_algorithm) # hash_alg
1210 c += '{} '.format(str(ht.root_digest).encode('hex')) # root_digest
1211 c += '{}'.format(str(ht.salt).encode('hex')) # salt
1212 c += '"'
1213
1214 desc = AvbKernelCmdlineDescriptor()
1215 desc.kernel_cmdline = c
1216 return desc
1217
1218 def make_vbmeta_image(self, output, chain_partitions, algorithm_name,
1219 key_path, rollback_index, props, props_from_file,
1220 kernel_cmdlines,
1221 generate_dm_verity_cmdline_from_hashtree,
1222 include_descriptors_from_image):
1223 """Implements the 'make_vbmeta_image' command.
1224
1225 Arguments:
1226 output: File to write the image to.
1227 chain_partitions: List of partitions to chain.
1228 algorithm_name: Name of algorithm to use.
1229 key_path: Path to key to use or None.
1230 rollback_index: The rollback index to use.
1231 props: Properties to insert (list of strings of the form 'key:value').
1232 props_from_file: Properties to insert (list of strings 'key:<path>').
1233 kernel_cmdlines: Kernel cmdlines to insert (list of strings).
1234 generate_dm_verity_cmdline_from_hashtree: None or file to generate from.
1235 include_descriptors_from_image: List of file objects with descriptors.
1236
1237 Raises:
1238 AvbError: If a chained partition is malformed.
1239 """
1240
1241 descriptors = []
1242
1243 # Insert chained partition descriptors.
1244 if chain_partitions:
1245 for cp in chain_partitions:
1246 cp_tokens = cp.split(':')
1247 if len(cp_tokens) != 3:
1248 raise AvbError('Malformed chained partition "{}".'.format(cp))
1249 desc = AvbChainPartitionDescriptor()
1250 desc.partition_name = cp_tokens[0]
1251 desc.rollback_index_slot = int(cp_tokens[1])
1252 if desc.rollback_index_slot < 1:
1253 raise AvbError('Rollback index slot must be 1 or larger.')
1254 file_path = cp_tokens[2]
1255 desc.public_key = open(file_path, 'rb').read()
1256 descriptors.append(desc)
1257
1258 vbmeta_blob = self._generate_vbmeta_blob(
1259 algorithm_name, key_path, descriptors, rollback_index, props,
1260 props_from_file, kernel_cmdlines,
1261 generate_dm_verity_cmdline_from_hashtree,
1262 include_descriptors_from_image)
1263
1264 # Write entire vbmeta blob (header, authentication, auxiliary).
1265 output.seek(0)
1266 output.write(vbmeta_blob)
1267
1268 def _generate_vbmeta_blob(self, algorithm_name, key_path, descriptors,
1269 rollback_index, props, props_from_file,
1270 kernel_cmdlines,
1271 generate_dm_verity_cmdline_from_hashtree,
1272 include_descriptors_from_image):
1273 """Generates a VBMeta blob.
1274
1275 This blob contains the header (struct AvbVBMetaHeader), the
1276 authentication data block (which contains the hash and signature
1277 for the header and auxiliary block), and the auxiliary block
1278 (which contains descriptors, the public key used, and other data).
1279
1280 The |key| parameter can |None| only if the |algorithm_name| is
1281 'NONE'.
1282
1283 Arguments:
1284 algorithm_name: The algorithm name as per the ALGORITHMS dict.
1285 key_path: The path to the .pem file used to sign the blob.
1286 descriptors: A list of descriptors to insert or None.
1287 rollback_index: The rollback index to use.
1288 props: Properties to insert (List of strings of the form 'key:value').
1289 props_from_file: Properties to insert (List of strings 'key:<path>').
1290 kernel_cmdlines: Kernel cmdlines to insert (list of strings).
1291 generate_dm_verity_cmdline_from_hashtree: None or file to generate
1292 dm-verity kernel cmdline from.
1293 include_descriptors_from_image: List of file objects for which
1294 to insert descriptors from.
1295
1296 Returns:
1297 A bytearray() with the VBMeta blob.
1298
1299 Raises:
1300 Exception: If the |algorithm_name| is not found, if no key has
1301 been given and the given algorithm requires one, or the key is
1302 of the wrong size.
1303
1304 """
1305 try:
1306 alg = ALGORITHMS[algorithm_name]
1307 except KeyError:
1308 raise AvbError('Unknown algorithm with name {}'.format(algorithm_name))
1309
1310 # Descriptors.
1311 encoded_descriptors = bytearray()
1312 if descriptors:
1313 for desc in descriptors:
1314 encoded_descriptors.extend(desc.encode())
1315
1316 # Add properties.
1317 if props:
1318 for prop in props:
1319 idx = prop.find(':')
1320 if idx == -1:
1321 raise AvbError('Malformed property "{}".'.format(prop))
1322 desc = AvbPropertyDescriptor()
1323 desc.key = prop[0:idx]
1324 desc.value = prop[(idx + 1):]
1325 encoded_descriptors.extend(desc.encode())
1326 if props_from_file:
1327 for prop in props_from_file:
1328 idx = prop.find(':')
1329 if idx == -1:
1330 raise AvbError('Malformed property "{}".'.format(prop))
1331 desc = AvbPropertyDescriptor()
1332 desc.key = prop[0:idx]
1333 desc.value = prop[(idx + 1):]
1334 file_path = prop[(idx + 1):]
1335 desc.value = open(file_path, 'rb').read()
1336 encoded_descriptors.extend(desc.encode())
1337
1338 # Add AvbKernelCmdline descriptor for dm-verity, if requested.
1339 if generate_dm_verity_cmdline_from_hashtree:
1340 encoded_descriptors.extend(self._get_cmdline_descriptor_for_dm_verity(
1341 generate_dm_verity_cmdline_from_hashtree).encode())
1342
1343 # Add kernel command-lines.
1344 if kernel_cmdlines:
1345 for i in kernel_cmdlines:
1346 desc = AvbKernelCmdlineDescriptor()
1347 desc.kernel_cmdline = i
1348 encoded_descriptors.extend(desc.encode())
1349
1350 # Add descriptors from other images.
1351 if include_descriptors_from_image:
1352 for image in include_descriptors_from_image:
1353 (_, _, image_descriptors, _) = self._parse_image(image)
1354 for desc in image_descriptors:
1355 encoded_descriptors.extend(desc.encode())
1356
1357 key = None
1358 encoded_key = bytearray()
1359 if alg.public_key_num_bytes > 0:
1360 if not key_path:
1361 raise AvbError('Key is required for algorithm {}'.format(
1362 algorithm_name))
1363 key = Crypto.PublicKey.RSA.importKey(open(key_path).read())
1364 encoded_key = encode_rsa_key(key)
1365 if len(encoded_key) != alg.public_key_num_bytes:
1366 raise AvbError('Key is wrong size for algorithm {}'.format(
1367 algorithm_name))
1368
1369 h = AvbVBMetaHeader()
1370
1371 # For the Auxiliary data block, descriptors are stored at offset 0
1372 # and the public key is immediately after that.
1373 h.auxiliary_data_block_size = round_to_multiple(
1374 len(encoded_descriptors) + len(encoded_key), 64)
1375 h.descriptors_offset = 0
1376 h.descriptors_size = len(encoded_descriptors)
1377 h.public_key_offset = h.descriptors_size
1378 h.public_key_size = len(encoded_key)
1379
1380 # For the Authentication data block, the hash is first and then
1381 # the signature.
1382 h.authentication_data_block_size = round_to_multiple(
1383 alg.hash_num_bytes + alg.public_key_num_bytes, 64)
1384 h.algorithm_type = alg.algorithm_type
1385 h.hash_offset = 0
1386 h.hash_size = alg.hash_num_bytes
1387 # Signature offset and size - it's stored right after the hash
1388 # (in Authentication data block).
1389 h.signature_offset = alg.hash_num_bytes
1390 h.signature_size = alg.signature_num_bytes
1391
1392 h.rollback_index = rollback_index
1393
1394 # Generate Header data block.
1395 header_data_blob = h.encode()
1396
1397 # Generate Auxiliary data block.
1398 aux_data_blob = bytearray()
1399 aux_data_blob.extend(encoded_descriptors)
1400 aux_data_blob.extend(encoded_key)
1401 padding_bytes = h.auxiliary_data_block_size - len(aux_data_blob)
1402 aux_data_blob.extend('\0' * padding_bytes)
1403
1404 # Calculate the hash.
1405 binary_hash = bytearray()
1406 binary_signature = bytearray()
1407 if algorithm_name != 'NONE':
1408 if algorithm_name[0:6] == 'SHA256':
1409 ha = hashlib.sha256()
1410 elif algorithm_name[0:6] == 'SHA512':
1411 ha = hashlib.sha512()
1412 else:
1413 raise AvbError('Unsupported algorithm {}.'.format(algorithm_name))
1414 ha.update(header_data_blob)
1415 ha.update(aux_data_blob)
1416 binary_hash.extend(ha.digest())
1417
1418 # Calculate the signature.
1419 p = subprocess.Popen(
1420 ['openssl', 'rsautl', '-sign', '-inkey', key_path, '-raw'],
1421 stdin=subprocess.PIPE,
1422 stdout=subprocess.PIPE,
1423 stderr=subprocess.PIPE)
1424 padding_and_hash = str(bytearray(alg.padding)) + binary_hash
1425 (pout, perr) = p.communicate(padding_and_hash)
1426 retcode = p.wait()
1427 if retcode != 0:
1428 raise AvbError('Error signing: {}'.format(perr))
1429 binary_signature.extend(pout)
1430
1431 # Generate Authentication data block.
1432 auth_data_blob = bytearray()
1433 auth_data_blob.extend(binary_hash)
1434 auth_data_blob.extend(binary_signature)
1435 padding_bytes = h.authentication_data_block_size - len(auth_data_blob)
1436 auth_data_blob.extend('\0' * padding_bytes)
1437
1438 return header_data_blob + auth_data_blob + aux_data_blob
1439
1440 def extract_public_key(self, key_path, output):
1441 """Implements the 'extract_public_key' command.
1442
1443 Arguments:
1444 key_path: The path to a RSA private key file.
1445 output: The file to write to.
1446 """
1447 key = Crypto.PublicKey.RSA.importKey(open(key_path).read())
1448 write_rsa_key(output, key)
1449
1450 def add_hash_footer(self, image, partition_size, partition_name,
1451 hash_algorithm, salt, algorithm_name, key_path,
1452 rollback_index, props, props_from_file, kernel_cmdlines,
1453 generate_dm_verity_cmdline_from_hashtree,
1454 include_descriptors_from_image):
1455 """Implements the 'add_hash_footer' command.
1456
1457 Arguments:
1458 image: File to add the footer to.
1459 partition_size: Size of partition.
1460 partition_name: Name of partition (without A/B suffix).
1461 hash_algorithm: Hash algorithm to use.
1462 salt: Salt to use as a hexadecimal string or None to use /dev/urandom.
1463 algorithm_name: Name of algorithm to use.
1464 key_path: Path to key to use or None.
1465 rollback_index: Rollback index.
1466 props: Properties to insert (List of strings of the form 'key:value').
1467 props_from_file: Properties to insert (List of strings 'key:<path>').
1468 kernel_cmdlines: Kernel cmdlines to insert (list of strings).
1469 generate_dm_verity_cmdline_from_hashtree: None or file to generate
1470 dm-verity kernel cmdline from.
1471 include_descriptors_from_image: List of file objects for which
1472 to insert descriptors from.
1473 """
1474 # If there's already a footer, truncate the image to its original
1475 # size. This way 'avbtool add_hash_footer' is idempotent (modulo
1476 # salts).
1477 image.seek(0, os.SEEK_END)
1478 image_size = image.tell()
1479 image.seek(image_size - AvbFooter.SIZE)
1480 try:
1481 footer = AvbFooter(image.read(AvbFooter.SIZE))
1482 # Existing footer found. Just truncate.
1483 original_image_size = footer.original_image_size
1484 image_size = footer.original_image_size
1485 image.truncate(image_size)
1486 except (LookupError, struct.error):
1487 original_image_size = image_size
1488
1489 # If anything goes wrong from here-on, restore the image back to
1490 # its original size.
1491 try:
1492 digest_size = len(hashlib.new(name=hash_algorithm).digest())
1493 if salt:
1494 salt = salt.decode('hex')
1495 else:
1496 if salt is None:
1497 # If salt is not explicitly specified, choose a hash
1498 # that's the same size as the hash size.
1499 hash_size = digest_size
1500 salt = open('/dev/urandom').read(hash_size)
1501 else:
1502 salt = ''
1503
1504 hasher = hashlib.new(name=hash_algorithm, string=salt)
1505 # TODO(zeuthen): might want to read this in chunks to avoid
1506 # memory pressure, then again, this is only supposed to be used
1507 # on kernel/initramfs partitions. Possible optimization.
1508 image.seek(0)
1509 hasher.update(image.read(image_size))
1510 digest = hasher.digest()
1511
1512 h_desc = AvbHashDescriptor()
1513 h_desc.image_size = image_size
1514 h_desc.hash_algorithm = hash_algorithm
1515 h_desc.partition_name = partition_name
1516 h_desc.salt = salt
1517 h_desc.digest = digest
1518
1519 # Generate the VBMeta footer.
1520 vbmeta_offset = image_size
1521 vbmeta_blob = self._generate_vbmeta_blob(
1522 algorithm_name, key_path, [h_desc], rollback_index, props,
1523 props_from_file, kernel_cmdlines,
1524 generate_dm_verity_cmdline_from_hashtree,
1525 include_descriptors_from_image)
1526
1527 image.seek(vbmeta_offset)
1528 image.write(vbmeta_blob)
1529
1530 # Check that |partition_size| is going to be big enough
1531 footer_offset = partition_size - AvbFooter.SIZE
1532 if image.tell() > footer_offset:
1533 raise AvbError('Partition size of {} is {} bytes too small.'.format(
1534 partition_size, image.tell() - footer_offset))
1535
1536 # Generate the Footer that tells where the VBMeta footer is.
1537 footer = AvbFooter()
1538 footer.original_image_size = original_image_size
1539 footer.vbmeta_offset = vbmeta_offset
1540 footer.vbmeta_size = len(vbmeta_blob)
1541 image.seek(footer_offset)
1542 footer.save(image)
1543 except:
1544 # Truncate back to original size, then re-raise
1545 image.truncate(original_image_size)
1546 raise
1547
1548 def add_hashtree_footer(self, image, partition_size, partition_name,
1549 hash_algorithm, block_size, salt, algorithm_name,
1550 key_path, rollback_index, props, props_from_file,
1551 kernel_cmdlines,
1552 generate_dm_verity_cmdline_from_hashtree,
1553 include_descriptors_from_image):
1554 """Implements the 'add_hashtree_footer' command.
1555
1556 See https://gitlab.com/cryptsetup/cryptsetup/wikis/DMVerity for
1557 more information about dm-verity and these hashes.
1558
1559 Arguments:
1560 image: File to add the footer to.
1561 partition_size: Size of partition.
1562 partition_name: Name of partition (without A/B suffix).
1563 hash_algorithm: Hash algorithm to use.
1564 block_size: Block size to use.
1565 salt: Salt to use as a hexadecimal string or None to use /dev/urandom.
1566 algorithm_name: Name of algorithm to use.
1567 key_path: Path to key to use or None.
1568 rollback_index: Rollback index.
1569 props: Properties to insert (List of strings of the form 'key:value').
1570 props_from_file: Properties to insert (List of strings 'key:<path>').
1571 kernel_cmdlines: Kernel cmdlines to insert (list of strings).
1572 generate_dm_verity_cmdline_from_hashtree: None or file to generate
1573 dm-verity kernel cmdline from.
1574 include_descriptors_from_image: List of file objects for which
1575 to insert descriptors from.
1576 """
1577 # If there's already a footer, truncate the image to its original
1578 # size. This way 'avbtool add_hashtree_footer' is idempotent
1579 # (modulo salts).
1580 image.seek(0, os.SEEK_END)
1581 image_size = image.tell()
1582 image.seek(image_size - AvbFooter.SIZE)
1583 try:
1584 footer = AvbFooter(image.read(AvbFooter.SIZE))
1585 # Existing footer found. Just truncate.
1586 original_image_size = footer.original_image_size
1587 image_size = footer.original_image_size
1588 image.truncate(image_size)
1589 except (LookupError, struct.error):
1590 original_image_size = image_size
1591
1592 # If anything goes wrong from here-on, restore the image back to
1593 # its original size.
1594 try:
1595 # Ensure image is multiple of block_size.
1596 rounded_image_size = round_to_multiple(image_size, block_size)
1597 if rounded_image_size > image_size:
1598 image.write('\0' * (rounded_image_size - image_size))
1599 image_size = rounded_image_size
1600
1601 tree_offset = image_size
1602
1603 digest_size = len(hashlib.new(name=hash_algorithm).digest())
1604 digest_padding = round_to_pow2(digest_size) - digest_size
1605
1606 if salt:
1607 salt = salt.decode('hex')
1608 else:
1609 if salt is None:
1610 # If salt is not explicitly specified, choose a hash
1611 # that's the same size as the hash size.
1612 hash_size = digest_size
1613 salt = open('/dev/urandom').read(hash_size)
1614 else:
1615 salt = ''
1616
1617 # Hashes are stored upside down so we need to calcuate hash
1618 # offsets in advance.
1619 (hash_level_offsets, tree_size) = calc_hash_level_offsets(
1620 image_size, block_size, digest_size + digest_padding)
1621
1622 # Make room for the tree.
1623 image.truncate(image_size + tree_size)
1624
1625 # Generate the tree.
1626 root_digest = generate_hash_tree(image, image_size, block_size,
1627 hash_algorithm, salt, digest_padding,
1628 tree_offset, hash_level_offsets)
1629
1630 ht_desc = AvbHashtreeDescriptor()
1631 ht_desc.dm_verity_version = 1
1632 ht_desc.image_size = image_size
1633 ht_desc.tree_offset = tree_offset
1634 ht_desc.tree_size = tree_size
1635 ht_desc.data_block_size = block_size
1636 ht_desc.hash_block_size = block_size
1637 ht_desc.hash_algorithm = hash_algorithm
1638 ht_desc.partition_name = partition_name
1639 ht_desc.salt = salt
1640 ht_desc.root_digest = root_digest
1641
1642 # Generate the VBMeta footer.
1643 vbmeta_offset = tree_offset + tree_size
1644 vbmeta_blob = self._generate_vbmeta_blob(
1645 algorithm_name, key_path, [ht_desc], rollback_index, props,
1646 props_from_file, kernel_cmdlines,
1647 generate_dm_verity_cmdline_from_hashtree,
1648 include_descriptors_from_image)
1649 image.seek(vbmeta_offset)
1650 image.write(vbmeta_blob)
1651
1652 # Check that |partition_size| is going to be big enough
1653 footer_offset = partition_size - AvbFooter.SIZE
1654 if image.tell() > footer_offset:
1655 raise AvbError('Partition size of {} is {} bytes too small.'.format(
1656 partition_size, image.tell() - footer_offset))
1657
1658 # Generate the Footer that tells where the VBMeta footer is.
1659 footer = AvbFooter()
1660 footer.original_image_size = original_image_size
1661 footer.vbmeta_offset = vbmeta_offset
1662 footer.vbmeta_size = len(vbmeta_blob)
1663 image.seek(footer_offset)
1664 footer.save(image)
1665 except:
1666 # Truncate back to original size, then re-raise
1667 image.truncate(original_image_size)
1668 raise
1669
1670
1671def calc_hash_level_offsets(image_size, block_size, digest_size):
1672 """Calculate the offsets of all the hash-levels in a Merkle-tree.
1673
1674 Arguments:
1675 image_size: The size of the image to calculate a Merkle-tree for.
1676 block_size: The block size, e.g. 4096.
1677 digest_size: The size of each hash, e.g. 32 for SHA-256.
1678
1679 Returns:
1680 A tuple where the first argument is an array of offsets and the
1681 second is size of the tree, in bytes.
1682 """
1683 level_offsets = []
1684 level_sizes = []
1685 tree_size = 0
1686
1687 num_levels = 0
1688 size = image_size
1689 while size > block_size:
1690 num_blocks = (size + block_size - 1) / block_size
1691 level_size = round_to_multiple(num_blocks * digest_size, block_size)
1692
1693 level_sizes.append(level_size)
1694 tree_size += level_size
1695 num_levels += 1
1696
1697 size = level_size
1698
1699 for n in range(0, num_levels):
1700 offset = 0
1701 for m in range(n + 1, num_levels):
1702 offset += level_sizes[m]
1703 level_offsets.append(offset)
1704
1705 return (level_offsets, tree_size)
1706
1707
1708def generate_hash_tree(image, image_size, block_size, hash_alg_name, salt,
1709 digest_padding, tree_offset, hash_level_offsets):
1710 """Generates a Merkle-tree for a file.
1711
1712 Args:
1713 image: The image, as a file.
1714 image_size: The size of the image.
1715 block_size: The block size, e.g. 4096.
1716 hash_alg_name: The hash algorithm, e.g. 'sha256' or 'sha1'.
1717 salt: The salt to use.
1718 digest_padding: The padding for each digest.
1719 tree_offset: The offset of where to store the Merkle tree in |image|.
1720 hash_level_offsets: The offsets from calc_hash_level_offsets().
1721
1722 Returns:
1723 The top-level hash.
1724 """
1725 hash_src_offset = 0
1726 hash_src_size = image_size
1727 level_num = 0
1728 while hash_src_size > block_size:
1729 level_output = ''
1730 image.seek(hash_src_offset)
1731 remaining = hash_src_size
1732 while remaining > 0:
1733 hasher = hashlib.new(name=hash_alg_name, string=salt)
1734 data = image.read(min(remaining, block_size))
1735 assert data
1736 remaining -= len(data)
1737 hasher.update(data)
1738 if len(data) < block_size:
1739 hasher.update('\0' * (block_size - len(data)))
1740 level_output += hasher.digest()
1741 if digest_padding > 0:
1742 level_output += '\0' * digest_padding
1743
1744 padding_needed = (round_to_multiple(
1745 len(level_output), block_size) - len(level_output))
1746 level_output += '\0' * padding_needed
1747
1748 hash_dest_offset = hash_level_offsets[level_num] + tree_offset
1749
1750 image.seek(hash_dest_offset)
1751 image.write(level_output)
1752
1753 hash_src_offset = hash_dest_offset
1754 hash_src_size = len(level_output)
1755
1756 level_num += 1
1757
1758 hasher = hashlib.new(name=hash_alg_name, string=salt)
1759 hasher.update(level_output)
1760 return hasher.digest()
1761
1762
1763class AvbTool(object):
1764 """Object for avbtool command-line tool."""
1765
1766 def __init__(self):
1767 """Initializer method."""
1768 self.avb = Avb()
1769
1770 def _add_common_args(self, sub_parser):
1771 """Adds arguments used by several sub-commands.
1772
1773 Arguments:
1774 sub_parser: The parser to add arguments to.
1775 """
1776 sub_parser.add_argument('--algorithm',
1777 help='Algorithm to use (default: NONE)',
1778 metavar='ALGORITHM',
1779 default='NONE')
1780 sub_parser.add_argument('--key',
1781 help='Path to RSA private key file',
1782 metavar='KEY',
1783 required=False)
1784 sub_parser.add_argument('--rollback_index',
1785 help='Rollback Index',
1786 type=parse_number,
1787 default=0)
1788 sub_parser.add_argument('--prop',
1789 help='Add property',
1790 metavar='KEY:VALUE',
1791 action='append')
1792 sub_parser.add_argument('--prop_from_file',
1793 help='Add property from file',
1794 metavar='KEY:PATH',
1795 action='append')
1796 sub_parser.add_argument('--kernel_cmdline',
1797 help='Add kernel cmdline',
1798 metavar='CMDLINE',
1799 action='append')
1800 sub_parser.add_argument('--generate_dm_verity_cmdline_from_hashtree',
1801 metavar='IMAGE',
1802 help='Generate kernel cmdline for dm-verity',
1803 type=argparse.FileType('rb'))
1804 sub_parser.add_argument('--include_descriptors_from_image',
1805 help='Include descriptors from image',
1806 metavar='IMAGE',
1807 action='append',
1808 type=argparse.FileType('rb'))
1809
1810 def run(self, argv):
1811 """Command-line processor.
1812
1813 Arguments:
1814 argv: Pass sys.argv from main.
1815 """
1816 parser = argparse.ArgumentParser()
1817 subparsers = parser.add_subparsers(title='subcommands')
1818
1819 sub_parser = subparsers.add_parser('version',
1820 help='Prints version of avbtool.')
1821 sub_parser.set_defaults(func=self.version)
1822
1823 sub_parser = subparsers.add_parser('extract_public_key',
1824 help='Extract public key.')
1825 sub_parser.add_argument('--key',
1826 help='Path to RSA private key file',
1827 required=True)
1828 sub_parser.add_argument('--output',
1829 help='Output file name',
1830 type=argparse.FileType('wb'),
1831 required=True)
1832 sub_parser.set_defaults(func=self.extract_public_key)
1833
1834 sub_parser = subparsers.add_parser('make_vbmeta_image',
1835 help='Makes a vbmeta image.')
1836 sub_parser.add_argument('--output',
1837 help='Output file name',
1838 type=argparse.FileType('wb'),
1839 required=True)
1840 self._add_common_args(sub_parser)
1841 sub_parser.add_argument('--chain_partition',
1842 help='Allow signed integrity-data for partition',
1843 metavar='PART_NAME:ROLLBACK_SLOT:KEY_PATH',
1844 action='append')
1845 sub_parser.set_defaults(func=self.make_vbmeta_image)
1846
1847 sub_parser = subparsers.add_parser('add_hash_footer',
1848 help='Add hashes and footer to image.')
1849 sub_parser.add_argument('--image',
1850 help='Brillo boot image to add hashes to',
1851 type=argparse.FileType('rab+'))
1852 sub_parser.add_argument('--partition_size',
1853 help='Partition size',
1854 type=parse_number,
1855 required=True)
1856 sub_parser.add_argument('--partition_name',
1857 help='Partition name',
1858 required=True)
1859 sub_parser.add_argument('--hash_algorithm',
1860 help='Hash algorithm to use (default: sha256)',
1861 default='sha256')
1862 sub_parser.add_argument('--salt',
1863 help='Salt in hex (default: /dev/urandom)')
1864 self._add_common_args(sub_parser)
1865 sub_parser.set_defaults(func=self.add_hash_footer)
1866
1867 sub_parser = subparsers.add_parser('add_hashtree_footer',
1868 help='Add hashtree and footer to image.')
1869 sub_parser.add_argument('--image',
1870 help='Brillo boot image to add hashes to',
1871 type=argparse.FileType('rab+'))
1872 sub_parser.add_argument('--partition_size',
1873 help='Partition size',
1874 type=parse_number,
1875 required=True)
1876 sub_parser.add_argument('--partition_name',
1877 help='Partition name',
1878 required=True)
1879 sub_parser.add_argument('--hash_algorithm',
1880 help='Hash algorithm to use (default: sha1)',
1881 default='sha1')
1882 sub_parser.add_argument('--salt',
1883 help='Salt in hex (default: /dev/urandom)')
1884 sub_parser.add_argument('--block_size',
1885 help='Block size (default: 4096)',
1886 type=parse_number,
1887 default=4096)
1888 self._add_common_args(sub_parser)
1889 sub_parser.set_defaults(func=self.add_hashtree_footer)
1890
1891 sub_parser = subparsers.add_parser('erase_footer',
1892 help='Erase footer from an image.')
1893 sub_parser.add_argument('--image',
1894 help='Brillo image with a footer',
1895 type=argparse.FileType('rwb+'),
1896 required=True)
1897 sub_parser.add_argument('--keep_hashtree',
1898 help='Keep the hashtree in the image',
1899 action='store_true')
1900 sub_parser.set_defaults(func=self.erase_footer)
1901
1902 sub_parser = subparsers.add_parser(
1903 'info_image',
1904 help='Show information about vbmeta or footer.')
1905 sub_parser.add_argument('--image',
1906 help='Brillo boot image to use',
1907 type=argparse.FileType('rb'),
1908 required=True)
1909 sub_parser.add_argument('--output',
1910 help='Write info to file',
1911 type=argparse.FileType('wt'),
1912 default=sys.stdout)
1913 sub_parser.set_defaults(func=self.info_image)
1914
1915 args = parser.parse_args(argv[1:])
1916 try:
1917 args.func(args)
1918 except AvbError as e:
1919 sys.stderr.write('{}\n'.format(e.message))
1920 sys.exit(1)
1921
1922 def version(self, _):
1923 """Implements the 'version' sub-command."""
1924 print '{}.{}'.format(AVB_VERSION_MAJOR, AVB_VERSION_MINOR)
1925
1926 def extract_public_key(self, args):
1927 """Implements the 'extract_public_key' sub-command."""
1928 self.avb.extract_public_key(args.key, args.output)
1929
1930 def make_vbmeta_image(self, args):
1931 """Implements the 'make_vbmeta_image' sub-command."""
1932 self.avb.make_vbmeta_image(args.output, args.chain_partition,
1933 args.algorithm, args.key, args.rollback_index,
1934 args.prop, args.prop_from_file,
1935 args.kernel_cmdline,
1936 args.generate_dm_verity_cmdline_from_hashtree,
1937 args.include_descriptors_from_image)
1938
1939 def add_hash_footer(self, args):
1940 """Implements the 'add_hash_footer' sub-command."""
1941 self.avb.add_hash_footer(args.image, args.partition_size,
1942 args.partition_name, args.hash_algorithm,
1943 args.salt, args.algorithm, args.key,
1944 args.rollback_index, args.prop,
1945 args.prop_from_file, args.kernel_cmdline,
1946 args.generate_dm_verity_cmdline_from_hashtree,
1947 args.include_descriptors_from_image)
1948
1949 def add_hashtree_footer(self, args):
1950 """Implements the 'add_hashtree_footer' sub-command."""
1951 self.avb.add_hashtree_footer(args.image, args.partition_size,
1952 args.partition_name, args.hash_algorithm,
1953 args.block_size, args.salt, args.algorithm,
1954 args.key, args.rollback_index, args.prop,
1955 args.prop_from_file, args.kernel_cmdline,
1956 args.generate_dm_verity_cmdline_from_hashtree,
1957 args.include_descriptors_from_image)
1958
1959 def erase_footer(self, args):
1960 """Implements the 'erase_footer' sub-command."""
1961 self.avb.erase_footer(args.image, args.keep_hashtree)
1962
1963 def info_image(self, args):
1964 """Implements the 'info_image' sub-command."""
1965 self.avb.info_image(args.image, args.output)
1966
1967
1968if __name__ == '__main__':
1969 tool = AvbTool()
1970 tool.run(sys.argv)