refine AVB support and ramdisk gzip

- avbtool only works with python 2.x, so systems which only have python
3.x as default will fail to run avbtool, fix it
- avbtool: sync with AOSP master
- avb salt: store salt for AVB signature, and use it in re-packing process
- ramdisk gzip: java.util.zip.GZIPOutputStream doesn't support parameters, so we use
      org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream instead
pull/20/head
cfig 7 years ago
parent 5d97d46c62
commit 48fd99d1c1
No known key found for this signature in database
GPG Key ID: B104C307F0FDABB7

@ -2,50 +2,50 @@
### 1. header part
item size in bytes
+----------------------------------------------------------+
item size in bytes position
+----------------------------------------------------------+ --> 0
|<MAGIC HEADER> | 8 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 8
|<kernel length> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 12
|<kernel offset> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 16 (0x10)
|<ramdisk length> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 20
|<ramdisk offset> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 24
|<second bootloader length> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 28
|<second bootloader offset> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 32 (0x20)
|<tags offset> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 36
|<page size> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 40
|<header version> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 44
|<os version& os patch level> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 48 (0x30)
|<board name> | 16 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 64 (0x40)
|<cmdline part 1> | 512 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 576 (0x240)
|<hash digest> | 32 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 608 (0x260)
|<cmdline part 2> | 1024 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 1632 (0x660)
|<dtbo length> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 1636
|<dtbo offset> | 8 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 1644
|<header size> | 4 |
|--------------------------------+-------------------------|
|--------------------------------+-------------------------| --> 1648 (0x670)
|<padding> | min(n * page_zie - 1648)|
+----------------------------------------------------------+
+----------------------------------------------------------+ --> pagesize
### 2. data part
+----------------------------------------------------------+
+----------------------------------------------------------+ --> pagesize
|<kernel> | kernel length |
|--------------------------------+-------------------------|
|<padding> | min(n * page_zie - len)|

@ -8,7 +8,7 @@ This tool focuses on editing Android boot.img(also recovery.img and recovery-two
#### Host OS requirement:
Linux or Mac.
Also need python and java 8.
Also need python 2.x(required by avbtool) and java 8.
#### Target Android requirement:

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python2.7
# Copyright 2016, The Android Open Source Project
#
@ -38,9 +38,13 @@ import time
# Keep in sync with libavb/avb_version.h.
AVB_VERSION_MAJOR = 1
AVB_VERSION_MINOR = 0
AVB_VERSION_MINOR = 1
AVB_VERSION_SUB = 0
# Keep in sync with libavb/avb_footer.h.
AVB_FOOTER_VERSION_MAJOR = 1
AVB_FOOTER_VERSION_MINOR = 0
AVB_VBMETA_IMAGE_FLAGS_HASHTREE_DISABLED = 1
@ -1195,11 +1199,12 @@ class AvbHashtreeDescriptor(AvbDescriptor):
partition_name: Partition name.
salt: Salt used.
root_digest: Root digest.
flags: Descriptor flags (see avb_hashtree_descriptor.h).
"""
TAG = 1
RESERVED = 64
SIZE = 116 + RESERVED
RESERVED = 60
SIZE = 120 + RESERVED
FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
'L' # dm-verity version used
'Q' # image size (bytes)
@ -1213,7 +1218,8 @@ class AvbHashtreeDescriptor(AvbDescriptor):
'32s' # hash algorithm used
'L' # partition name (bytes)
'L' # salt length (bytes)
'L' + # root digest length (bytes)
'L' # root digest length (bytes)
'L' + # flags
str(RESERVED) + 's') # reserved
def __init__(self, data=None):
@ -1233,8 +1239,8 @@ class AvbHashtreeDescriptor(AvbDescriptor):
self.tree_offset, self.tree_size, self.data_block_size,
self.hash_block_size, self.fec_num_roots, self.fec_offset, self.fec_size,
self.hash_algorithm, partition_name_len, salt_len,
root_digest_len, _) = struct.unpack(self.FORMAT_STRING,
data[0:self.SIZE])
root_digest_len, self.flags, _) = struct.unpack(self.FORMAT_STRING,
data[0:self.SIZE])
expected_size = round_to_multiple(
self.SIZE - 16 + partition_name_len + salt_len + root_digest_len, 8)
if tag != self.TAG or num_bytes_following != expected_size:
@ -1252,7 +1258,8 @@ class AvbHashtreeDescriptor(AvbDescriptor):
o += salt_len
self.root_digest = data[(self.SIZE + o):(self.SIZE + o + root_digest_len)]
if root_digest_len != len(hashlib.new(name=self.hash_algorithm).digest()):
raise LookupError('root_digest_len doesn\'t match hash algorithm')
if root_digest_len != 0:
raise LookupError('root_digest_len doesn\'t match hash algorithm')
else:
self.dm_verity_version = 0
@ -1268,6 +1275,7 @@ class AvbHashtreeDescriptor(AvbDescriptor):
self.partition_name = ''
self.salt = bytearray()
self.root_digest = bytearray()
self.flags = 0
def print_desc(self, o):
"""Print the descriptor.
@ -1293,6 +1301,7 @@ class AvbHashtreeDescriptor(AvbDescriptor):
'hex')))
o.write(' Root Digest: {}\n'.format(str(
self.root_digest).encode('hex')))
o.write(' Flags: {}\n'.format(self.flags))
def encode(self):
"""Serializes the descriptor.
@ -1311,7 +1320,7 @@ class AvbHashtreeDescriptor(AvbDescriptor):
self.hash_block_size, self.fec_num_roots,
self.fec_offset, self.fec_size, self.hash_algorithm,
len(encoded_name), len(self.salt), len(self.root_digest),
self.RESERVED*'\0')
self.flags, self.RESERVED*'\0')
padding = struct.pack(str(padding_size) + 'x')
ret = desc + encoded_name + self.salt + self.root_digest + padding
return bytearray(ret)
@ -1341,8 +1350,8 @@ class AvbHashtreeDescriptor(AvbDescriptor):
digest_padding,
hash_level_offsets,
tree_size)
# The root digest must match...
if root_digest != self.root_digest:
# The root digest must match unless it is not embedded in the descriptor.
if len(self.root_digest) != 0 and root_digest != self.root_digest:
sys.stderr.write('hashtree of {} does not match descriptor\n'.
format(image_filename))
return False
@ -1374,17 +1383,19 @@ class AvbHashDescriptor(AvbDescriptor):
partition_name: Partition name.
salt: Salt used.
digest: The hash value of salt and data combined.
flags: The descriptor flags (see avb_hash_descriptor.h).
"""
TAG = 2
RESERVED = 64
SIZE = 68 + RESERVED
RESERVED = 60
SIZE = 72 + RESERVED
FORMAT_STRING = ('!QQ' # tag, num_bytes_following (descriptor header)
'Q' # image size (bytes)
'32s' # hash algorithm used
'L' # partition name (bytes)
'L' # salt length (bytes)
'L' + # digest length (bytes)
'L' # digest length (bytes)
'L' + # flags
str(RESERVED) + 's') # reserved
def __init__(self, data=None):
@ -1402,7 +1413,8 @@ class AvbHashDescriptor(AvbDescriptor):
if data:
(tag, num_bytes_following, self.image_size, self.hash_algorithm,
partition_name_len, salt_len,
digest_len, _) = struct.unpack(self.FORMAT_STRING, data[0:self.SIZE])
digest_len, self.flags, _) = struct.unpack(self.FORMAT_STRING,
data[0:self.SIZE])
expected_size = round_to_multiple(
self.SIZE - 16 + partition_name_len + salt_len + digest_len, 8)
if tag != self.TAG or num_bytes_following != expected_size:
@ -1419,7 +1431,8 @@ class AvbHashDescriptor(AvbDescriptor):
o += salt_len
self.digest = data[(self.SIZE + o):(self.SIZE + o + digest_len)]
if digest_len != len(hashlib.new(name=self.hash_algorithm).digest()):
raise LookupError('digest_len doesn\'t match hash algorithm')
if digest_len != 0:
raise LookupError('digest_len doesn\'t match hash algorithm')
else:
self.image_size = 0
@ -1427,6 +1440,7 @@ class AvbHashDescriptor(AvbDescriptor):
self.partition_name = ''
self.salt = bytearray()
self.digest = bytearray()
self.flags = 0
def print_desc(self, o):
"""Print the descriptor.
@ -1442,6 +1456,7 @@ class AvbHashDescriptor(AvbDescriptor):
'hex')))
o.write(' Digest: {}\n'.format(str(self.digest).encode(
'hex')))
o.write(' Flags: {}\n'.format(self.flags))
def encode(self):
"""Serializes the descriptor.
@ -1456,7 +1471,8 @@ class AvbHashDescriptor(AvbDescriptor):
padding_size = nbf_with_padding - num_bytes_following
desc = struct.pack(self.FORMAT_STRING, self.TAG, nbf_with_padding,
self.image_size, self.hash_algorithm, len(encoded_name),
len(self.salt), len(self.digest), self.RESERVED*'\0')
len(self.salt), len(self.digest), self.flags,
self.RESERVED*'\0')
padding = struct.pack(str(padding_size) + 'x')
ret = desc + encoded_name + self.salt + self.digest + padding
return bytearray(ret)
@ -1480,7 +1496,8 @@ class AvbHashDescriptor(AvbDescriptor):
ha.update(self.salt)
ha.update(data)
digest = ha.digest()
if digest != self.digest:
# The digest must match unless there is no digest in the descriptor.
if len(self.digest) != 0 and digest != self.digest:
sys.stderr.write('{} digest of {} does not match digest in descriptor\n'.
format(self.hash_algorithm, image_filename))
return False
@ -1754,8 +1771,8 @@ class AvbFooter(object):
MAGIC = 'AVBf'
SIZE = 64
RESERVED = 28
FOOTER_VERSION_MAJOR = 1
FOOTER_VERSION_MINOR = 0
FOOTER_VERSION_MAJOR = AVB_FOOTER_VERSION_MAJOR
FOOTER_VERSION_MINOR = AVB_FOOTER_VERSION_MINOR
FORMAT_STRING = ('!4s2L' # magic, 2 x version.
'Q' # Original image size.
'Q' # Offset of VBMeta blob.
@ -1889,7 +1906,7 @@ class AvbVBMetaHeader(object):
minor: The minor version of libavb that has support for the feature.
"""
self.required_libavb_version_minor = (
min(self.required_libavb_version_minor, minor))
max(self.required_libavb_version_minor, minor))
def save(self, output):
"""Serializes the header (256 bytes) to disk.
@ -2385,10 +2402,19 @@ class Avb(object):
"""
# If we're asked to calculate minimum required libavb version, we're done.
#
# NOTE: When we get to 1.1 and later this will get more complicated.
if print_required_libavb_version:
print '1.0'
if include_descriptors_from_image:
# Use the bump logic in AvbVBMetaHeader to calculate the max required
# version of all included descriptors.
tmp_header = AvbVBMetaHeader()
for image in include_descriptors_from_image:
(_, image_header, _, _) = self._parse_image(ImageHandler(image.name))
tmp_header.bump_required_libavb_version_minor(
image_header.required_libavb_version_minor)
print '1.{}'.format(tmp_header.required_libavb_version_minor)
else:
# Descriptors aside, all vbmeta features are supported in 1.0.
print '1.0'
return
if not output:
@ -2402,7 +2428,7 @@ class Avb(object):
kernel_cmdlines, setup_rootfs_from_kernel, ht_desc_to_setup,
include_descriptors_from_image, signing_helper,
signing_helper_with_files, release_string,
append_to_release_string)
append_to_release_string, 0)
# Write entire vbmeta blob (header, authentication, auxiliary).
output.seek(0)
@ -2422,7 +2448,8 @@ class Avb(object):
ht_desc_to_setup,
include_descriptors_from_image, signing_helper,
signing_helper_with_files,
release_string, append_to_release_string):
release_string, append_to_release_string,
required_libavb_version_minor):
"""Generates a VBMeta blob.
This blob contains the header (struct AvbVBMetaHeader), the
@ -2454,6 +2481,7 @@ class Avb(object):
signing_helper_with_files: Same as signing_helper but uses files instead.
release_string: None or avbtool release string.
append_to_release_string: None or string to append.
required_libavb_version_minor: Use at least this required minor version.
Returns:
A bytearray() with the VBMeta blob.
@ -2472,6 +2500,9 @@ class Avb(object):
if not descriptors:
descriptors = []
h = AvbVBMetaHeader()
h.bump_required_libavb_version_minor(required_libavb_version_minor)
# Insert chained partition descriptors, if any
if chain_partitions:
used_locations = {}
@ -2547,11 +2578,27 @@ class Avb(object):
# Add descriptors from other images.
if include_descriptors_from_image:
descriptors_dict = dict()
for image in include_descriptors_from_image:
image_handler = ImageHandler(image.name)
(_, _, image_descriptors, _) = self._parse_image(image_handler)
(_, image_vbmeta_header, image_descriptors, _) = self._parse_image(
image_handler)
# Bump the required libavb version to support all included descriptors.
h.bump_required_libavb_version_minor(
image_vbmeta_header.required_libavb_version_minor)
for desc in image_descriptors:
encoded_descriptors.extend(desc.encode())
# The --include_descriptors_from_image option is used in some setups
# with images A and B where both A and B contain a descriptor
# for a partition with the same name. Since it's not meaningful
# to include both descriptors, only include the last seen descriptor.
# See bug 76386656 for details.
if hasattr(desc, 'partition_name'):
key = type(desc).__name__ + '_' + desc.partition_name
descriptors_dict[key] = desc.encode()
else:
encoded_descriptors.extend(desc.encode())
for key in sorted(descriptors_dict.keys()):
encoded_descriptors.extend(descriptors_dict[key])
# Load public key metadata blob, if requested.
pkmd_blob = []
@ -2570,8 +2617,6 @@ class Avb(object):
raise AvbError('Key is wrong size for algorithm {}'.format(
algorithm_name))
h = AvbVBMetaHeader()
# Override release string, if requested.
if isinstance(release_string, (str, unicode)):
h.release_string = release_string
@ -2744,7 +2789,8 @@ class Avb(object):
signing_helper, signing_helper_with_files,
release_string, append_to_release_string,
output_vbmeta_image, do_not_append_vbmeta_image,
print_required_libavb_version):
print_required_libavb_version, use_persistent_digest,
do_not_use_ab):
"""Implementation of the add_hash_footer on unsparse images.
Arguments:
@ -2776,16 +2822,20 @@ class Avb(object):
output_vbmeta_image: If not None, also write vbmeta struct to this file.
do_not_append_vbmeta_image: If True, don't append vbmeta struct.
print_required_libavb_version: True to only print required libavb version.
use_persistent_digest: Use a persistent digest on device.
do_not_use_ab: This partition does not use A/B.
Raises:
AvbError: If an argument is incorrect.
"""
required_libavb_version_minor = 0
if use_persistent_digest or do_not_use_ab:
required_libavb_version_minor = 1
# If we're asked to calculate minimum required libavb version, we're done.
#
# NOTE: When we get to 1.1 and later this will get more complicated.
if print_required_libavb_version:
print '1.0'
print '1.{}'.format(required_libavb_version_minor)
return
# First, calculate the maximum image size such that an image
@ -2861,7 +2911,11 @@ class Avb(object):
h_desc.hash_algorithm = hash_algorithm
h_desc.partition_name = partition_name
h_desc.salt = salt
h_desc.digest = digest
h_desc.flags = 0
if do_not_use_ab:
h_desc.flags |= 1 # AVB_HASH_DESCRIPTOR_FLAGS_DO_NOT_USE_AB
if not use_persistent_digest:
h_desc.digest = digest
# Generate the VBMeta footer.
ht_desc_to_setup = None
@ -2871,7 +2925,7 @@ class Avb(object):
kernel_cmdlines, setup_rootfs_from_kernel, ht_desc_to_setup,
include_descriptors_from_image, signing_helper,
signing_helper_with_files, release_string,
append_to_release_string)
append_to_release_string, required_libavb_version_minor)
# Write vbmeta blob, if requested.
if output_vbmeta_image:
@ -2935,7 +2989,8 @@ class Avb(object):
signing_helper_with_files,
release_string, append_to_release_string,
output_vbmeta_image, do_not_append_vbmeta_image,
print_required_libavb_version):
print_required_libavb_version,
use_persistent_root_digest, do_not_use_ab):
"""Implements the 'add_hashtree_footer' command.
See https://gitlab.com/cryptsetup/cryptsetup/wikis/DMVerity for
@ -2975,16 +3030,20 @@ class Avb(object):
output_vbmeta_image: If not None, also write vbmeta struct to this file.
do_not_append_vbmeta_image: If True, don't append vbmeta struct.
print_required_libavb_version: True to only print required libavb version.
use_persistent_root_digest: Use a persistent root digest on device.
do_not_use_ab: The partition does not use A/B.
Raises:
AvbError: If an argument is incorrect.
"""
required_libavb_version_minor = 0
if use_persistent_root_digest or do_not_use_ab:
required_libavb_version_minor = 1
# If we're asked to calculate minimum required libavb version, we're done.
#
# NOTE: When we get to 1.1 and later this will get more complicated.
if print_required_libavb_version:
print '1.0'
print '1.{}'.format(required_libavb_version_minor)
return
digest_size = len(hashlib.new(name=hash_algorithm).digest())
@ -3091,7 +3150,10 @@ class Avb(object):
ht_desc.hash_algorithm = hash_algorithm
ht_desc.partition_name = partition_name
ht_desc.salt = salt
ht_desc.root_digest = root_digest
if do_not_use_ab:
ht_desc.flags |= 1 # AVB_HASHTREE_DESCRIPTOR_FLAGS_DO_NOT_USE_AB
if not use_persistent_root_digest:
ht_desc.root_digest = root_digest
# Write the hash tree
padding_needed = (round_to_multiple(len(hash_tree), image.block_size) -
@ -3126,7 +3188,7 @@ class Avb(object):
kernel_cmdlines, setup_rootfs_from_kernel, ht_desc_to_setup,
include_descriptors_from_image, signing_helper,
signing_helper_with_files, release_string,
append_to_release_string)
append_to_release_string, required_libavb_version_minor)
padding_needed = (round_to_multiple(len(vbmeta_blob), image.block_size) -
len(vbmeta_blob))
vbmeta_blob_with_padding = vbmeta_blob + '\0'*padding_needed
@ -3163,7 +3225,7 @@ class Avb(object):
def make_atx_certificate(self, output, authority_key_path, subject_key_path,
subject_key_version, subject,
is_intermediate_authority, signing_helper,
is_intermediate_authority, usage, signing_helper,
signing_helper_with_files):
"""Implements the 'make_atx_certificate' command.
@ -3185,6 +3247,7 @@ class Avb(object):
should be the same Product ID found in the permanent attributes.
is_intermediate_authority: True if the certificate is for an intermediate
authority.
usage: If not empty, overrides the cert usage with a hash of this value.
signing_helper: Program which signs a hash and returns the signature.
signing_helper_with_files: Same as signing_helper but uses files instead.
"""
@ -3194,9 +3257,10 @@ class Avb(object):
hasher = hashlib.sha256()
hasher.update(subject)
signed_data.extend(hasher.digest())
usage = 'com.google.android.things.vboot'
if is_intermediate_authority:
usage += '.ca'
if not usage:
usage = 'com.google.android.things.vboot'
if is_intermediate_authority:
usage += '.ca'
hasher = hashlib.sha256()
hasher.update(usage)
signed_data.extend(hasher.digest())
@ -3272,6 +3336,67 @@ class Avb(object):
output.write(intermediate_key_certificate)
output.write(product_key_certificate)
def make_atx_unlock_credential(self, output, intermediate_key_certificate,
unlock_key_certificate, challenge_path,
unlock_key_path, signing_helper,
signing_helper_with_files):
"""Implements the 'make_atx_unlock_credential' command.
Android Things unlock credentials can be used to authorize the unlock of AVB
on a device. These credentials are presented to an Android Things bootloader
via the fastboot interface in response to a 16-byte challenge. This method
creates all fields of the credential except the challenge signature field
(which is the last field) and can optionally create the challenge signature
field as well if a challenge and the unlock_key_path is provided.
Arguments:
output: The credential will be written to this file on success.
intermediate_key_certificate: A certificate file as output by
make_atx_certificate with
is_intermediate_authority set to true.
unlock_key_certificate: A certificate file as output by
make_atx_certificate with
is_intermediate_authority set to false and the
usage set to
'com.google.android.things.vboot.unlock'.
challenge_path: [optional] A path to the challenge to sign.
unlock_key_path: [optional] A PEM file path with the unlock private key.
signing_helper: Program which signs a hash and returns the signature.
signing_helper_with_files: Same as signing_helper but uses files instead.
Raises:
AvbError: If an argument is incorrect.
"""
EXPECTED_CERTIFICATE_SIZE = 1620
EXPECTED_CHALLENGE_SIZE = 16
if len(intermediate_key_certificate) != EXPECTED_CERTIFICATE_SIZE:
raise AvbError('Invalid intermediate key certificate length.')
if len(unlock_key_certificate) != EXPECTED_CERTIFICATE_SIZE:
raise AvbError('Invalid product key certificate length.')
challenge = bytearray()
if challenge_path:
with open(challenge_path, 'r') as f:
challenge = f.read()
if len(challenge) != EXPECTED_CHALLENGE_SIZE:
raise AvbError('Invalid unlock challenge length.')
output.write(struct.pack('<I', 1)) # Format Version
output.write(intermediate_key_certificate)
output.write(unlock_key_certificate)
if challenge_path and unlock_key_path:
signature = bytearray()
padding_and_hash = bytearray()
algorithm_name = 'SHA512_RSA4096'
alg = ALGORITHMS[algorithm_name]
hasher = hashlib.sha512()
padding_and_hash.extend(alg.padding)
hasher.update(challenge)
padding_and_hash.extend(hasher.digest())
signature.extend(raw_sign(signing_helper, signing_helper_with_files,
algorithm_name,
alg.signature_num_bytes, unlock_key_path,
padding_and_hash))
output.write(signature)
def calc_hash_level_offsets(image_size, block_size, digest_size):
"""Calculate the offsets of all the hash-levels in a Merkle-tree.
@ -3518,6 +3643,25 @@ class AvbTool(object):
help='Set the HASHTREE_DISABLED flag',
action='store_true')
def _add_common_footer_args(self, sub_parser):
"""Adds arguments used by add_*_footer sub-commands.
Arguments:
sub_parser: The parser to add arguments to.
"""
sub_parser.add_argument('--use_persistent_digest',
help='Use a persistent digest on device instead of '
'storing the digest in the descriptor. This '
'cannot be used with A/B so must be combined '
'with --do_not_use_ab when an A/B suffix is '
'expected at runtime.',
action='store_true')
sub_parser.add_argument('--do_not_use_ab',
help='The partition does not use A/B even when an '
'A/B suffix is present. This must not be used '
'for vbmeta or chained partitions.',
action='store_true')
def _fixup_common_args(self, args):
"""Common fixups needed by subcommands.
@ -3599,6 +3743,7 @@ class AvbTool(object):
'to the image'),
action='store_true')
self._add_common_args(sub_parser)
self._add_common_footer_args(sub_parser)
sub_parser.set_defaults(func=self.add_hash_footer)
sub_parser = subparsers.add_parser('append_vbmeta_image',
@ -3671,6 +3816,7 @@ class AvbTool(object):
action='store_true',
help='Adds kernel cmdline for setting up rootfs')
self._add_common_args(sub_parser)
self._add_common_footer_args(sub_parser)
sub_parser.set_defaults(func=self.add_hashtree_footer)
sub_parser = subparsers.add_parser('erase_footer',
@ -3764,6 +3910,10 @@ class AvbTool(object):
help=('Generate an intermediate authority '
'certificate'),
action='store_true')
sub_parser.add_argument('--usage',
help=('Override usage with a hash of the provided'
'string'),
required=False)
sub_parser.add_argument('--authority_key',
help='Path to authority RSA private key file',
required=False)
@ -3813,6 +3963,43 @@ class AvbTool(object):
required=True)
sub_parser.set_defaults(func=self.make_atx_metadata)
sub_parser = subparsers.add_parser(
'make_atx_unlock_credential',
help='Create an Android Things eXtension (ATX) unlock credential.')
sub_parser.add_argument('--output',
help='Write credential to file',
type=argparse.FileType('wb'),
default=sys.stdout)
sub_parser.add_argument('--intermediate_key_certificate',
help='Path to intermediate key certificate file',
type=argparse.FileType('rb'),
required=True)
sub_parser.add_argument('--unlock_key_certificate',
help='Path to unlock key certificate file',
type=argparse.FileType('rb'),
required=True)
sub_parser.add_argument('--challenge',
help='Path to the challenge to sign (optional). If '
'this is not provided the challenge signature '
'field is omitted and can be concatenated '
'later.',
required=False)
sub_parser.add_argument('--unlock_key',
help='Path to unlock key (optional). Must be '
'provided if using --challenge.',
required=False)
sub_parser.add_argument('--signing_helper',
help='Path to helper used for signing',
metavar='APP',
default=None,
required=False)
sub_parser.add_argument('--signing_helper_with_files',
help='Path to helper used for signing using files',
metavar='APP',
default=None,
required=False)
sub_parser.set_defaults(func=self.make_atx_unlock_credential)
args = parser.parse_args(argv[1:])
try:
args.func(args)
@ -3870,7 +4057,9 @@ class AvbTool(object):
args.append_to_release_string,
args.output_vbmeta_image,
args.do_not_append_vbmeta_image,
args.print_required_libavb_version)
args.print_required_libavb_version,
args.use_persistent_digest,
args.do_not_use_ab)
def add_hashtree_footer(self, args):
"""Implements the 'add_hashtree_footer' sub-command."""
@ -3901,7 +4090,9 @@ class AvbTool(object):
args.append_to_release_string,
args.output_vbmeta_image,
args.do_not_append_vbmeta_image,
args.print_required_libavb_version)
args.print_required_libavb_version,
args.use_persistent_digest,
args.do_not_use_ab)
def erase_footer(self, args):
"""Implements the 'erase_footer' sub-command."""
@ -3931,6 +4122,7 @@ class AvbTool(object):
args.subject_key_version,
args.subject.read(),
args.subject_is_intermediate_authority,
args.usage,
args.signing_helper,
args.signing_helper_with_files)
@ -3946,6 +4138,17 @@ class AvbTool(object):
args.intermediate_key_certificate.read(),
args.product_key_certificate.read())
def make_atx_unlock_credential(self, args):
"""Implements the 'make_atx_unlock_credential' sub-command."""
self.avb.make_atx_unlock_credential(
args.output,
args.intermediate_key_certificate.read(),
args.unlock_key_certificate.read(),
args.challenge,
args.unlock_key,
args.signing_helper,
args.signing_helper_with_files)
if __name__ == '__main__':
tool = AvbTool()

@ -0,0 +1,10 @@
diff --git a/avb/avbtool b/avb/avbtool
index 2830e20..647d344 100755
--- a/avb/avbtool
+++ b/avb/avbtool
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python2.7
# Copyright 2016, The Android Open Source Project
#

@ -41,6 +41,7 @@ dependencies {
compile("com.fasterxml.jackson.core:jackson-annotations:2.9.4")
compile("com.fasterxml.jackson.core:jackson-databind:2.9.4")
compile("org.apache.commons:commons-exec:1.3")
compile("org.apache.commons:commons-compress:1.16.1")
compile("junit:junit:4.12")
}
@ -53,4 +54,4 @@ jar {
manifest {
attributes "Main-Class": "cfig.RKt"
}
}
}

@ -1,16 +1,15 @@
package cfig
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream
import org.apache.commons.compress.compressors.gzip.GzipParameters
import org.slf4j.LoggerFactory
import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.IOException
import java.io.RandomAccessFile
import java.nio.charset.StandardCharsets
import java.util.zip.GZIPInputStream
import java.util.zip.GZIPOutputStream
import org.junit.Assert.*
import java.io.*
class Helper{
class Helper {
companion object {
fun toHexString(inData: ByteArray): String {
val sb = StringBuilder()
@ -47,7 +46,7 @@ class Helper{
val buffer = ByteArray(1024)
FileOutputStream(compressedFile).use { fos ->
GZIPOutputStream(fos).use { gos ->
FileInputStream(decompressedFile).use { fis->
FileInputStream(decompressedFile).use { fis ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
@ -75,12 +74,56 @@ class Helper{
if (bytesRead <= 0) break
fileOutputStream.write(buffer, 0, bytesRead)
}
log.info("un-gzip done: $compressedFile -> $decompressedFile")
log.info("decompress(gz) done: $compressedFile -> $decompressedFile")
}
}
}
}
/*
caution: about gzip header - OS (Operating System)
According to https://docs.oracle.com/javase/8/docs/api/java/util/zip/package-summary.html and
GZIP spec RFC-1952(http://www.ietf.org/rfc/rfc1952.txt), gzip files created from java.util.zip.GZIPOutputStream
will mark the OS field with
0 - FAT filesystem (MS-DOS, OS/2, NT/Win32)
But default image built from Android source code has the OS field:
3 - Unix
This MAY not be a problem, at least we didn't find it till now.
*/
@Throws(IOException::class)
fun gnuZipFile(compressedFile: String, fis: InputStream) {
val buffer = ByteArray(1024)
FileOutputStream(compressedFile).use {fos ->
GZIPOutputStream(fos).use {gos ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
log.info("compress(gz) done: $compressedFile")
}
}
}
fun gnuZipFile2(compressedFile: String, fis: InputStream) {
val buffer = ByteArray(1024)
val p = GzipParameters()
p.operatingSystem = 3
FileOutputStream(compressedFile).use {fos ->
GzipCompressorOutputStream(fos, p).use {gos ->
var bytesRead: Int
while (true) {
bytesRead = fis.read(buffer)
if (bytesRead <= 0) break
gos.write(buffer, 0, bytesRead)
}
log.info("compress(gz) done: $compressedFile")
}
}
}
fun extractImageData(fileName: String, outImgName: String, offset: Long, length: Int) {
if (0 == length) {
return

@ -24,7 +24,8 @@ data class ImgInfo(
var type: String = "avb",
var originalImageSize: Int? = null,
var imageSize: Int? = null,
var partName: String? = null)
var partName: String? = null,
var salt: String = "")
data class VeritySignature(
var type: String = "dm-verity",

@ -3,6 +3,7 @@ package cfig
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.commons.exec.CommandLine
import org.apache.commons.exec.DefaultExecutor
import org.apache.commons.exec.PumpStreamHandler
import org.slf4j.LoggerFactory
import java.io.*
import java.nio.ByteBuffer
@ -10,6 +11,7 @@ import java.nio.ByteOrder
import java.security.MessageDigest
import java.util.regex.Pattern
import org.junit.Assert.*
import sun.nio.fs.UnixFileSystemProvider
class Packer {
private val log = LoggerFactory.getLogger("Packer")
@ -209,7 +211,29 @@ class Packer {
return imageId
}
fun pack(mkbootimgBin: String) {
fun packRootfs(args: ImgArgs, mkbootfs: String) {
log.info("Packing rootfs ${UnifiedConfig.workDir}root ...")
val outputStream = ByteArrayOutputStream()
val exec = DefaultExecutor()
exec.streamHandler = PumpStreamHandler(outputStream)
val cmdline = "$mkbootfs ${UnifiedConfig.workDir}root"
log.info(cmdline)
exec.execute(CommandLine.parse(cmdline))
Helper.gnuZipFile2(args.ramdisk!!, ByteArrayInputStream(outputStream.toByteArray()))
log.info("${args.ramdisk} is ready")
}
private fun File.deleleIfExists() {
if (this.exists()) {
if (!this.isFile) {
throw IllegalStateException("${this.canonicalPath} should be regular file")
}
log.info("Deleting ${this.path} ...")
this.delete()
}
}
fun pack(mkbootimgBin: String, mkbootfsBin: String) {
log.info("Loading config from ${workDir}bootimg.json")
val cfg = ObjectMapper().readValue(File(workDir + "bootimg.json"), UnifiedConfig::class.java)
val readBack = cfg.toArgs()
@ -220,9 +244,20 @@ class Packer {
log.debug(info.toString())
//clean
if (File(args.output + ".google").exists()) File(args.output + ".google").delete()
if (File(args.output + ".clear").exists()) File(args.output + ".clear").delete()
if (File(args.output + ".signed").exists()) File(args.output + ".signed").delete()
File(args.output + ".google").deleleIfExists()
File(args.output + ".clear").deleleIfExists()
File(args.output + ".signed").deleleIfExists()
File("${UnifiedConfig.workDir}ramdisk.img").deleleIfExists()
args.ramdisk?.let {
if (File(it).exists() && !File(UnifiedConfig.workDir + "root").exists()) {
//do nothing if we have ramdisk.img.gz but no /root
log.warn("Use prebuilt ramdisk file: $it")
} else {
File(it).deleleIfExists()
packRootfs(args, mkbootfsBin)
}
}
writeHeader(args)
writeData(args)
@ -261,11 +296,12 @@ class Packer {
mapToJson(info.signature as LinkedHashMap<*, *>), ImgInfo.AvbSignature::class.java)
File(args.output + ".clear").copyTo(File(args.output + ".signed"))
DefaultExecutor().execute(CommandLine.parse(
"$avbtool add_hash_footer " +
"--image ${args.output}.signed " +
"--partition_size ${sig.imageSize} " +
"--partition_name ${sig.partName}"))
verifyAVBIntegrity(args, info, avbtool)
"$avbtool add_hash_footer " +
"--image ${args.output}.signed " +
"--partition_size ${sig.imageSize} " +
"--salt ${sig.salt} " +
"--partition_name ${sig.partName}"))
verifyAVBIntegrity(args, avbtool)
}
}
}
@ -293,7 +329,7 @@ class Packer {
assertTrue(0 == p.exitValue())
}
private fun verifyAVBIntegrity(args: ImgArgs, info: ImgInfo, avbtool: String) {
private fun verifyAVBIntegrity(args: ImgArgs, avbtool: String) {
val tgt = args.output + ".signed"
log.info("Verifying AVB: $tgt")
DefaultExecutor().execute(CommandLine.parse("$avbtool verify_image --image $tgt"))

@ -149,15 +149,19 @@ class Parser {
}
}
private fun verifyAVBIntegrity(args: ImgArgs, info: ImgInfo, avbtool: String) {
DefaultExecutor().execute(CommandLine.parse("$avbtool verify_image --image ${args.output}"))
private fun verifyAVBIntegrity(args: ImgArgs, avbtool: String) {
val cmdline = "$avbtool verify_image --image ${args.output}"
log.info(cmdline)
DefaultExecutor().execute(CommandLine.parse(cmdline))
}
private fun parseAVBInfo(args: ImgArgs, info: ImgInfo, avbtool: String) {
val outputStream = ByteArrayOutputStream()
val exec = DefaultExecutor()
exec.streamHandler = PumpStreamHandler(outputStream)
exec.execute(CommandLine.parse("$avbtool info_image --image ${args.output}"))
val cmdline = "$avbtool info_image --image ${args.output}"
log.info(cmdline)
exec.execute(CommandLine.parse(cmdline))
val lines = outputStream.toString().split("\n")
lines.forEach {
val m = Pattern.compile("^Original image size:\\s+(\\d+)\\s*bytes").matcher(it)
@ -175,11 +179,19 @@ class Parser {
(info.signature as ImgInfo.AvbSignature).partName = m3.group(1)
}
val m4 = Pattern.compile("^\\s*Salt:\\s+(\\S+)$").matcher(it)
if (m4.find()) {
(info.signature as ImgInfo.AvbSignature).salt = m4.group(1)
}
log.debug("[" + it + "]")
}
assertNotNull((info.signature as ImgInfo.AvbSignature).imageSize)
assertNotNull((info.signature as ImgInfo.AvbSignature).originalImageSize)
assertTrue(!(info.signature as ImgInfo.AvbSignature).partName.isNullOrBlank())
assertTrue(!(info.signature as ImgInfo.AvbSignature).salt.isNullOrBlank())
}
private fun unpackRamdisk(imgArgs: ImgArgs) {
@ -208,7 +220,7 @@ class Parser {
if (verifiedWithAVB(imgArgs)) {
imgArgs.verifyType = ImgArgs.VerifyType.AVB
imgInfo.signature = ImgInfo.AvbSignature()
verifyAVBIntegrity(imgArgs, imgInfo, avbtool)
verifyAVBIntegrity(imgArgs, avbtool)
parseAVBInfo(imgArgs, imgInfo, avbtool)
} else {
imgArgs.verifyType = ImgArgs.VerifyType.VERIFY

@ -1,22 +1,22 @@
package cfig
fun main(args: Array<String>) {
if ((args.size == 5) && args[0] in setOf("pack", "unpack", "sign")) {
if ((args.size == 6) && args[0] in setOf("pack", "unpack", "sign")) {
when (args[0]) {
"unpack" -> {
Parser().parseAndExtract(args[1], args[3])
Parser().parseAndExtract(fileName = args[1], avbtool = args[3])
}
"pack" -> {
Packer().pack(args[2])
Packer().pack(mkbootimgBin = args[2], mkbootfsBin = args[5])
}
"sign" -> {
Packer().sign(args[3], args[4])
Packer().sign(avbtool = args[3], bootSigner = args[4])
}
}
} else {
println("Usage: unpack <boot_image_path> <mkbootfs_bin_path> <avbtool_path> <boot_signer_path>")
println("Usage: pack <boot_image_path> <mkbootfs_bin_path> <avbtool_path> <boot_signer_path>")
println("Usage: sign <boot_image_path> <mkbootfs_bin_path> <avbtool_path> <boot_signer_path>")
println("Usage: unpack <boot_image_path> <mkbootimg_bin_path> <avbtool_path> <boot_signer_path> <mkbootfs_bin_path>")
println("Usage: pack <boot_image_path> <mkbootimg_bin_path> <avbtool_path> <boot_signer_path> <mkbootfs_bin_path>")
println("Usage: sign <boot_image_path> <mkbootimg_bin_path> <avbtool_path> <boot_signer_path> <mkbootfs_bin_path>")
System.exit(1)
}
}

@ -27,6 +27,7 @@ if (new File("boot.img").exists()) {
}
project.ext.outClearIMg = new File(String.format("%s.clear", activeImg)).getAbsolutePath()
project.ext.mkbootimgBin = new File("src/mkbootimg/mkbootimg").getAbsolutePath()
project.ext.mkbootfsBin = new File("mkbootfs/build/exe/mkbootfs/mkbootfs").getAbsolutePath()
project.ext.avbtool = new File("avb/avbtool").getAbsolutePath()
project.ext.bootSigner = new File("boot_signer/build/libs/boot_signer.jar").getAbsolutePath()
println("Active image target: " + activeImg)
@ -39,15 +40,15 @@ task unpack(type: JavaExec, dependsOn: ["bbootimg:jar"]) {
main = "cfig.RKt"
classpath = files("bbootimg/build/libs/bbootimg.jar")
maxHeapSize '512m'
args "unpack", activeImg, rootProject.mkbootimgBin, rootProject.avbtool, rootProject.bootSigner
args "unpack", activeImg, rootProject.mkbootimgBin, rootProject.avbtool, rootProject.bootSigner, rootProject.mkbootfsBin
}
task packClear(type: JavaExec, dependsOn: ["bbootimg:jar"]) {
task packClear(type: JavaExec, dependsOn: ["bbootimg:jar", "mkbootfs:mkbootfsExecutable"]) {
classpath = sourceSets.main.runtimeClasspath
main = "cfig.RKt"
classpath = files("bbootimg/build/libs/bbootimg.jar")
maxHeapSize '512m'
args "pack", activeImg, rootProject.mkbootimgBin, rootProject.avbtool, rootProject.bootSigner
args "pack", activeImg, rootProject.mkbootimgBin, rootProject.avbtool, rootProject.bootSigner, rootProject.mkbootfsBin
}
task sign(type: JavaExec, dependsOn: ["bbootimg:jar", packClear, "boot_signer:jar"]) {
@ -55,14 +56,14 @@ task sign(type: JavaExec, dependsOn: ["bbootimg:jar", packClear, "boot_signer:ja
main = "cfig.RKt"
classpath = files("bbootimg/build/libs/bbootimg.jar")
maxHeapSize '512m'
args "sign", activeImg, rootProject.mkbootimgBin, rootProject.avbtool, rootProject.bootSigner
args "sign", activeImg, rootProject.mkbootimgBin, rootProject.avbtool, rootProject.bootSigner, rootProject.mkbootfsBin
}
task signTest(type: JavaExec, dependsOn: ["boot_signer:jar"]) {
main = 'com.android.verity.BootSignature'
classpath = files("boot_signer/build/libs/boot_signer.jar")
maxHeapSize '512m'
args activePath, activeImg + '.clear', 'security/verity.pk8', 'security/verity.x509.pem', activeImg + '.signed'
args activePath, activeImg + '.clear', 'security/verity.pk8', 'security/verity.x509.pem', activeImg + '.signed', rootProject.mkbootfsBin
}
task pack(dependsOn: sign) {

Loading…
Cancel
Save