Snap for 8570526 from 5238f6d5fcfde821093b8a4b567fc01e5ce48ab6 to mainline-scheduling-release

Change-Id: I39a85371a0e52613a823e9e00ee6aae5effc71ac
diff --git a/Android.bp b/Android.bp
index e61abe9..b73a924 100644
--- a/Android.bp
+++ b/Android.bp
@@ -147,82 +147,8 @@
     compile_multilib: "first",
 }
 
-python_library_host {
-    name: "aftl_proto",
-    srcs: [
-        "proto/api.proto",
-        "proto/crypto/sigpb/sigpb.proto",
-        "proto/crypto/keyspb/keyspb.proto",
-        "proto/trillian.proto",
-    ],
-    proto: {
-        include_dirs: [
-            "external/protobuf/src",
-        ],
-        local_include_dirs: [
-            "proto",
-        ],
-        canonical_path_from_root: false,
-    },
-    version: {
-        py2: {
-            enabled: false,
-        },
-        py3: {
-            enabled: true,
-        },
-    },
-}
-
-python_binary_host {
-    name: "aftltool",
-    srcs: [
-        "aftltool.py",
-        "avbtool.py",
-    ],
-    libs: [
-        "aftl_proto",
-    ],
-    main: "aftltool.py",
-    version: {
-        py2: {
-            enabled: false,
-        },
-        py3: {
-            enabled: true,
-        },
-    },
-}
-
-python_test_host {
-    name: "aftltool_test",
-    main: "aftltool_test.py",
-    srcs: [
-        "aftltool.py",
-        "aftltool_test.py",
-        "avbtool.py",
-    ],
-    libs: [
-        "aftl_proto",
-    ],
-    data: [
-        "test/data/**/*.*",
-    ],
-    test_options: {
-        unit_test: true,
-    },
-    version: {
-        py2: {
-            enabled: false,
-        },
-        py3: {
-            enabled: true,
-        },
-    },
-}
-
 // Build libavb - this is a static library that depends
-// on only libc and doesn't drag in any other dependencies.
+// on only libc and libcrypto, but no other dependencies.
 cc_library_static {
     name: "libavb",
     defaults: [
@@ -231,6 +157,8 @@
         "avb_crypto_ops_impl_boringssl",
     ],
     host_supported: true,
+    ramdisk_available: true,
+    vendor_ramdisk_available: true,
     recovery_available: true,
     header_libs: [
         "avb_headers",
@@ -240,7 +168,7 @@
         linux: {
             srcs: ["libavb/avb_sysdeps_posix.c"],
         },
-        linux_glibc: {
+        host_linux: {
             cflags: ["-fno-stack-protector"],
         },
     },
@@ -303,67 +231,8 @@
     srcs: ["libavb_ab/avb_ab_flow.c"],
 }
 
-cc_library_static {
-    name: "libavb_aftl",
-    defaults: [
-        "avb_defaults",
-        "avb_sources",
-        "avb_crypto_ops_impl_boringssl",
-    ],
-    host_supported: true,
-    recovery_available: true,
-    header_libs: ["avb_headers"],
-    export_header_lib_headers: ["avb_headers"],
-    cflags: [
-        "-fno-stack-protector",
-    ],
-    srcs: [
-        "libavb_aftl/avb_aftl_util.c",
-        "libavb_aftl/avb_aftl_validate.c",
-        "libavb_aftl/avb_aftl_verify.c",
-    ],
-}
-
-cc_fuzz {
-    name: "libavb_aftl_fuzzer",
-    defaults: ["avb_defaults"],
-    // The fuzzing entry point is declared and defined in the same file.
-    // Overwrite the behaviour introduced by avb_defaults.
-    cflags: [
-        "-Wno-missing-prototypes",
-    ],
-    srcs: [
-        "test/avb_aftl_fuzz.cc",
-    ],
-    static_libs: [
-        "libavb",
-        "libavb_aftl",
-    ],
-    shared_libs: ["libcrypto"],
-    host_supported: true,
-    corpus: ["test/corpus/*"],
-    fuzz_config: {
-        cc: [
-            "tweek@google.com",
-            "jpm@google.com",
-        ],
-        componentid: 685985,
-    },
-}
-
-cc_library_host_static {
-    name: "libavb_atx_host",
-    defaults: [
-        "avb_defaults",
-        "avb_crypto_ops_impl_boringssl",
-    ],
-    header_libs: [
-        "avb_headers",
-    ],
-    export_header_lib_headers: ["avb_headers"],
-    cflags: [
-        "-fno-stack-protector",
-    ],
+cc_defaults {
+    name: "avb_atx_sources",
     srcs: ["libavb_atx/avb_atx_validate.c"],
 }
 
@@ -377,22 +246,19 @@
     srcs: ["libavb/avb_sysdeps_posix.c"],
 }
 
-cc_library_host_static {
-    name: "libavb_things_example",
-    defaults: [
-        "avb_defaults",
-        "avb_crypto_ops_impl_boringssl",
-    ],
-    header_libs: [
-        "avb_headers",
-    ],
-    export_header_lib_headers: ["avb_headers"],
+cc_defaults {
+    name: "avb_things_example_sources",
     srcs: ["examples/things/avb_atx_slot_verify.c"],
 }
 
 cc_defaults {
     name: "libavb_host_unittest_core",
-    defaults: ["avb_defaults"],
+    defaults: [
+        "avb_defaults",
+        "avb_sources",
+        "avb_atx_sources",
+        "avb_things_example_sources",
+    ],
     required: [
         "simg2img",
         "img2simg",
@@ -411,11 +277,7 @@
     test_config: "test/libavb_host_unittest.xml",
     test_suites: ["general-tests"],
     static_libs: [
-        "libavb",
         "libavb_ab_host",
-        "libavb_aftl",
-        "libavb_atx_host",
-        "libavb_things_example",
         "libgmock_host",
         "libgtest_host",
     ],
@@ -430,9 +292,6 @@
     ],
     srcs: [
         "test/avb_ab_flow_unittest.cc",
-        "test/avb_aftl_util_unittest.cc",
-        "test/avb_aftl_validate_unittest.cc",
-        "test/avb_aftl_verify_unittest.cc",
         "test/avb_atx_validate_unittest.cc",
         "test/avb_atx_slot_verify_unittest.cc",
         "test/avb_crypto_ops_unittest.cc",
@@ -503,6 +362,8 @@
 cc_library_headers {
     name: "avb_headers",
     host_supported: true,
+    ramdisk_available: true,
+    vendor_ramdisk_available: true,
     recovery_available: true,
     export_include_dirs: ["."],
     target: {
diff --git a/OWNERS b/OWNERS
index 114cf11..f9893cf 100644
--- a/OWNERS
+++ b/OWNERS
@@ -2,5 +2,5 @@
 samitolvanen@google.com
 zeuthen@google.com
 dkrahn@google.com
-jpm@google.com
 tweek@google.com
+billylau@google.com
diff --git a/README.md b/README.md
index de2f014..a293cb0 100644
--- a/README.md
+++ b/README.md
@@ -429,7 +429,8 @@
         [--calc_max_image_size]                                                    \
         [--do_not_use_ab]                                                          \
         [--no_hashtree]                                                            \
-        [--use_persistent_digest]
+        [--use_persistent_digest]                                                  \
+        [--check_at_most_once]
 
 Valid values for `HASH_ALG` above include `sha1`, `sha256`, and `blake2b-256`.
 
@@ -907,6 +908,13 @@
 }
 ```
 
+This logic should ideally be implemented outside of the HLOS. One
+possible implementation is to update rollback indices in the
+bootloader when booting into a successful slot. This means that
+when booting into a new OS not yet marked as successful, the
+rollback indices would not be updated. The first reboot after the
+slot succeeded would trigger an update of the rollback indices.
+
 For an HLOS where it's possible to roll back to a previous version,
 `stored_rollback_index[n]` should be set to the largest possible value
 allowing all bootable slots to boot. This approach is implemented in
@@ -1112,7 +1120,9 @@
 
 ### Version 1.2
 
-Version 1.2 adds support for the `rollback_index_location` field of the main vbmeta header.
+Version 1.2 adds support for the following:
+* `rollback_index_location` field of the main vbmeta header.
+* `check_at_most_once` parameter of dm-verity in a hashtree descriptor.
 
 ### Version 1.1
 
diff --git a/aftltool b/aftltool
deleted file mode 120000
index 11a8ff9..0000000
--- a/aftltool
+++ /dev/null
@@ -1 +0,0 @@
-aftltool.py
\ No newline at end of file
diff --git a/aftltool.py b/aftltool.py
deleted file mode 100755
index 613e83b..0000000
--- a/aftltool.py
+++ /dev/null
@@ -1,2254 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2020, The Android Open Source Project
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use, copy,
-# modify, merge, publish, distribute, sublicense, and/or sell copies
-# of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
-# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
-# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-#
-"""Command-line tool for AFTL support for Android Verified Boot images."""
-
-import abc
-import argparse
-import enum
-import hashlib
-import io
-import multiprocessing
-import os
-import queue
-import struct
-import subprocess
-import sys
-import tempfile
-import time
-
-# This is to work around temporarily with the issue that python3 does not permit
-# relative imports anymore going forward. This adds the proto directory relative
-# to the location of aftltool to the sys.path.
-# TODO(b/154068467): Implement proper importing of generated *_pb2 modules.
-EXEC_PATH = os.path.dirname(os.path.realpath(__file__))
-sys.path.append(os.path.join(EXEC_PATH, 'proto'))
-
-# pylint: disable=wrong-import-position,import-error
-import avbtool
-import api_pb2
-# pylint: enable=wrong-import-position,import-error
-
-
-class AftlError(Exception):
-  """Application-specific errors.
-
-  These errors represent issues for which a stack-trace should not be
-  presented.
-
-  Attributes:
-    message: Error message.
-  """
-
-  def __init__(self, message):
-    Exception.__init__(self, message)
-
-
-def rsa_key_read_pem_bytes(key_path):
-  """Reads the bytes out of the passed in PEM file.
-
-  Arguments:
-    key_path: A string containing the path to the PEM file.
-
-  Returns:
-    A bytearray containing the DER encoded bytes in the PEM file.
-
-  Raises:
-    AftlError: If openssl cannot decode the PEM file.
-  """
-  # Use openssl to decode the PEM file.
-  args = ['openssl', 'rsa', '-in', key_path, '-pubout', '-outform', 'DER']
-  p = subprocess.Popen(args,
-                       stdin=subprocess.PIPE,
-                       stdout=subprocess.PIPE,
-                       stderr=subprocess.PIPE)
-  (pout, perr) = p.communicate()
-  retcode = p.wait()
-  if retcode != 0:
-    raise AftlError('Error decoding: {}'.format(perr))
-  return pout
-
-
-def check_signature(log_root, log_root_sig,
-                    transparency_log_pub_key):
-  """Validates the signature provided by the transparency log.
-
-  Arguments:
-    log_root: The transparency log_root data structure.
-    log_root_sig: The signature of the transparency log_root data structure.
-    transparency_log_pub_key: The file path to the transparency log public key.
-
-  Returns:
-    True if the signature check passes, otherwise False.
-  """
-
-  logsig_tmp = tempfile.NamedTemporaryFile()
-  logsig_tmp.write(log_root_sig)
-  logsig_tmp.flush()
-  logroot_tmp = tempfile.NamedTemporaryFile()
-  logroot_tmp.write(log_root)
-  logroot_tmp.flush()
-
-  p = subprocess.Popen(['openssl', 'dgst', '-sha256', '-verify',
-                        transparency_log_pub_key,
-                        '-signature', logsig_tmp.name, logroot_tmp.name],
-                       stdin=subprocess.PIPE,
-                       stdout=subprocess.PIPE,
-                       stderr=subprocess.PIPE)
-
-  p.communicate()
-  retcode = p.wait()
-  if not retcode:
-    return True
-  return False
-
-
-# AFTL Merkle Tree Functionality
-def rfc6962_hash_leaf(leaf):
-  """RFC6962 hashing function for hashing leaves of a Merkle tree.
-
-  Arguments:
-    leaf: A bytearray containing the Merkle tree leaf to be hashed.
-
-  Returns:
-    A bytearray containing the RFC6962 SHA256 hash of the leaf.
-  """
-  hasher = hashlib.sha256()
-  # RFC6962 states a '0' byte should be prepended to the data.
-  # This is done in conjunction with the '1' byte for non-leaf
-  # nodes for 2nd preimage attack resistance.
-  hasher.update(b'\x00')
-  hasher.update(leaf)
-  return hasher.digest()
-
-
-def rfc6962_hash_children(l, r):
-  """Calculates the inner Merkle tree node hash of child nodes l and r.
-
-  Arguments:
-    l: A bytearray containing the left child node to be hashed.
-    r: A bytearray containing the right child node to be hashed.
-
-  Returns:
-    A bytearray containing the RFC6962 SHA256 hash of 1|l|r.
-  """
-  hasher = hashlib.sha256()
-  # RFC6962 states a '1' byte should be prepended to the concatenated data.
-  # This is done in conjunction with the '0' byte for leaf
-  # nodes for 2nd preimage attack resistance.
-  hasher.update(b'\x01')
-  hasher.update(l)
-  hasher.update(r)
-  return hasher.digest()
-
-
-def chain_border_right(seed, proof):
-  """Computes a subtree hash along the left-side tree border.
-
-  Arguments:
-    seed: A bytearray containing the starting hash.
-    proof: A list of bytearrays representing the hashes in the inclusion proof.
-
-  Returns:
-    A bytearray containing the left-side subtree hash.
-  """
-  for h in proof:
-    seed = rfc6962_hash_children(h, seed)
-  return seed
-
-
-def chain_inner(seed, proof, leaf_index):
-  """Computes a subtree hash on or below the tree's right border.
-
-  Arguments:
-    seed: A bytearray containing the starting hash.
-    proof: A list of bytearrays representing the hashes in the inclusion proof.
-    leaf_index: The current leaf index.
-
-  Returns:
-    A bytearray containing the subtree hash.
-  """
-  for i, h in enumerate(proof):
-    if leaf_index >> i & 1 == 0:
-      seed = rfc6962_hash_children(seed, h)
-    else:
-      seed = rfc6962_hash_children(h, seed)
-  return seed
-
-
-def root_from_icp(leaf_index, tree_size, proof, leaf_hash):
-  """Calculates the expected Merkle tree root hash.
-
-  Arguments:
-    leaf_index: The current leaf index.
-    tree_size: The number of nodes in the Merkle tree.
-    proof: A list of bytearrays containing the inclusion proof.
-    leaf_hash: A bytearray containing the initial leaf hash.
-
-  Returns:
-    A bytearray containing the calculated Merkle tree root hash.
-
-  Raises:
-    AftlError: If invalid parameters are passed in.
-  """
-  if leaf_index < 0:
-    raise AftlError('Invalid leaf_index value: {}'.format(leaf_index))
-  if tree_size < 0:
-    raise AftlError('Invalid tree_size value: {}'.format(tree_size))
-  if leaf_index >= tree_size:
-    err_str = 'leaf_index cannot be equal or larger than tree_size: {}, {}'
-    raise AftlError(err_str.format(leaf_index, tree_size))
-  if proof is None:
-    raise AftlError('Inclusion proof not provided.')
-  if leaf_hash is None:
-    raise AftlError('No leaf hash provided.')
-  # Calculate the point to split the proof into two parts.
-  # The split is where the paths to leaves diverge.
-  inner = (leaf_index ^ (tree_size - 1)).bit_length()
-  result = chain_inner(leaf_hash, proof[:inner], leaf_index)
-  result = chain_border_right(result, proof[inner:])
-  return result
-
-
-class AftlImageHeader(object):
-  """A class for representing the AFTL image header.
-
-  Attributes:
-    magic: Magic for identifying the AftlImage.
-    required_icp_version_major: The major version of AVB that wrote the entry.
-    required_icp_version_minor: The minor version of AVB that wrote the entry.
-    aftl_image_size: Total size of the AftlImage.
-    icp_count: Number of inclusion proofs represented in this structure.
-  """
-
-  SIZE = 18  # The size of the structure, in bytes
-  MAGIC = b'AFTL'
-  FORMAT_STRING = ('!4s2L'  # magic, major & minor version.
-                   'L'      # AFTL image size.
-                   'H')     # number of inclusion proof entries.
-
-  def __init__(self, data=None):
-    """Initializes a new AftlImageHeader object.
-
-    Arguments:
-      data: If not None, must be a bytearray of size |SIZE|.
-
-    Raises:
-      AftlError: If invalid structure for AftlImageHeader.
-    """
-    assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
-
-    if data:
-      (self.magic, self.required_icp_version_major,
-       self.required_icp_version_minor, self.aftl_image_size,
-       self.icp_count) = struct.unpack(self.FORMAT_STRING, data)
-    else:
-      self.magic = self.MAGIC
-      self.required_icp_version_major = avbtool.AVB_VERSION_MAJOR
-      self.required_icp_version_minor = avbtool.AVB_VERSION_MINOR
-      self.aftl_image_size = self.SIZE
-      self.icp_count = 0
-    if not self.is_valid():
-      raise AftlError('Invalid structure for AftlImageHeader.')
-
-  def encode(self):
-    """Serializes the AftlImageHeader |SIZE| to bytes.
-
-    Returns:
-      The encoded AftlImageHeader as bytes.
-
-    Raises:
-      AftlError: If invalid structure for AftlImageHeader.
-    """
-    if not self.is_valid():
-      raise AftlError('Invalid structure for AftlImageHeader')
-    return struct.pack(self.FORMAT_STRING, self.magic,
-                       self.required_icp_version_major,
-                       self.required_icp_version_minor,
-                       self.aftl_image_size,
-                       self.icp_count)
-
-  def is_valid(self):
-    """Ensures that values in the AftlImageHeader are sane.
-
-    Returns:
-      True if the values in the AftlImageHeader are sane, False otherwise.
-    """
-    if self.magic != AftlImageHeader.MAGIC:
-      sys.stderr.write(
-          'AftlImageHeader: magic value mismatch: {}\n'
-          .format(repr(self.magic)))
-      return False
-
-    if self.required_icp_version_major > avbtool.AVB_VERSION_MAJOR:
-      sys.stderr.write('AftlImageHeader: major version mismatch: {}\n'.format(
-          self.required_icp_version_major))
-      return False
-
-    if self.required_icp_version_minor > avbtool.AVB_VERSION_MINOR:
-      sys.stderr.write('AftlImageHeader: minor version mismatch: {}\n'.format(
-          self.required_icp_version_minor))
-      return False
-
-    if self.aftl_image_size < self.SIZE:
-      sys.stderr.write('AftlImageHeader: Invalid AFTL image size: {}\n'.format(
-          self.aftl_image_size))
-      return False
-
-    if self.icp_count < 0 or self.icp_count > 65535:
-      sys.stderr.write(
-          'AftlImageHeader: ICP entry count out of range: {}\n'.format(
-              self.icp_count))
-      return False
-    return True
-
-  def print_desc(self, o):
-    """Print the AftlImageHeader.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    o.write('  AFTL image header:\n')
-    i = ' ' * 4
-    fmt = '{}{:25}{}\n'
-    o.write(fmt.format(i, 'Major version:', self.required_icp_version_major))
-    o.write(fmt.format(i, 'Minor version:', self.required_icp_version_minor))
-    o.write(fmt.format(i, 'Image size:', self.aftl_image_size))
-    o.write(fmt.format(i, 'ICP entries count:', self.icp_count))
-
-
-class AftlIcpEntry(object):
-  """A class for the transparency log inclusion proof entries.
-
-  The data that represents each of the components of the ICP entry are stored
-  immediately following the ICP entry header. The format is log_url,
-  SignedLogRoot, and inclusion proof hashes.
-
-  Attributes:
-    log_url_size: Length of the string representing the transparency log URL.
-    leaf_index: Leaf index in the transparency log representing this entry.
-    log_root_descriptor_size: Size of the transparency log's SignedLogRoot.
-    annotation_leaf_size: Size of the SignedVBMetaPrimaryAnnotationLeaf passed
-        to the log.
-    log_root_sig_size: Size in bytes of the log_root_signature
-    proof_hash_count: Number of hashes comprising the inclusion proof.
-    inc_proof_size: The total size of the inclusion proof, in bytes.
-    log_url: The URL for the transparency log that generated this inclusion
-        proof.
-    log_root_descriptor: The data comprising the signed tree head structure.
-    annotation_leaf: The data comprising the SignedVBMetaPrimaryAnnotationLeaf
-        leaf.
-    log_root_signature: The data comprising the log root signature.
-    proofs: The hashes comprising the inclusion proof.
-
-  """
-  SIZE = 27  # The size of the structure, in bytes
-  FORMAT_STRING = ('!L'   # transparency log server url size
-                   'Q'    # leaf index
-                   'L'    # log root descriptor size
-                   'L'    # firmware info leaf size
-                   'H'    # log root signature size
-                   'B'    # number of hashes in the inclusion proof
-                   'L')   # size of the inclusion proof in bytes
-  # This header is followed by the log_url, log_root_descriptor,
-  # annotation leaf, log root signature, and the proofs elements.
-
-  def __init__(self, data=None):
-    """Initializes a new ICP entry object.
-
-    Arguments:
-      data: If not None, must be a bytearray of size >= |SIZE|.
-
-    Raises:
-      AftlError: If data does not represent a well-formed AftlIcpEntry.
-    """
-    # Assert the header structure is of a sane size.
-    assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
-
-    if data:
-      # Deserialize the header from the data.
-      (self._log_url_size_expected,
-       self.leaf_index,
-       self._log_root_descriptor_size_expected,
-       self._annotation_leaf_size_expected,
-       self._log_root_sig_size_expected,
-       self._proof_hash_count_expected,
-       self._inc_proof_size_expected) = struct.unpack(self.FORMAT_STRING,
-                                                      data[0:self.SIZE])
-
-      # Deserialize ICP entry components from the data.
-      expected_format_string = '{}s{}s{}s{}s{}s'.format(
-          self._log_url_size_expected,
-          self._log_root_descriptor_size_expected,
-          self._annotation_leaf_size_expected,
-          self._log_root_sig_size_expected,
-          self._inc_proof_size_expected)
-
-      (log_url, log_root_descriptor_bytes, annotation_leaf_bytes,
-       self.log_root_signature, proof_bytes) = struct.unpack(
-           expected_format_string, data[self.SIZE:self.get_expected_size()])
-
-      self.log_url = log_url.decode('ascii')
-      self.log_root_descriptor = TrillianLogRootDescriptor(
-          log_root_descriptor_bytes)
-
-      self.annotation_leaf = SignedVBMetaPrimaryAnnotationLeaf.parse(
-          annotation_leaf_bytes)
-
-      self.proofs = []
-      if self._proof_hash_count_expected > 0:
-        proof_idx = 0
-        hash_size = (self._inc_proof_size_expected
-                     // self._proof_hash_count_expected)
-        for _ in range(self._proof_hash_count_expected):
-          proof = proof_bytes[proof_idx:(proof_idx+hash_size)]
-          self.proofs.append(proof)
-          proof_idx += hash_size
-    else:
-      self.leaf_index = 0
-      self.log_url = ''
-      self.log_root_descriptor = TrillianLogRootDescriptor()
-      self.annotation_leaf = SignedVBMetaPrimaryAnnotationLeaf()
-      self.log_root_signature = b''
-      self.proofs = []
-    if not self.is_valid():
-      raise AftlError('Invalid structure for AftlIcpEntry')
-
-  @property
-  def log_url_size(self):
-    """Gets the size of the log_url attribute."""
-    if hasattr(self, 'log_url'):
-      return len(self.log_url)
-    return self._log_url_size_expected
-
-  @property
-  def log_root_descriptor_size(self):
-    """Gets the size of the log_root_descriptor attribute."""
-    if hasattr(self, 'log_root_descriptor'):
-      return self.log_root_descriptor.get_expected_size()
-    return self._log_root_descriptor_size_expected
-
-  @property
-  def annotation_leaf_size(self):
-    """Gets the size of the annotation_leaf attribute."""
-    if hasattr(self, 'annotation_leaf'):
-      return self.annotation_leaf.get_expected_size()
-    return self._annotation_leaf_size_expected
-
-  @property
-  def log_root_sig_size(self):
-    """Gets the size of the log_root signature."""
-    if hasattr(self, 'log_root_signature'):
-      return len(self.log_root_signature)
-    return self._log_root_sig_size_expected
-
-  @property
-  def proof_hash_count(self):
-    """Gets the number of proof hashes."""
-    if hasattr(self, 'proofs'):
-      return len(self.proofs)
-    return self._proof_hash_count_expected
-
-  @property
-  def inc_proof_size(self):
-    """Gets the total size of the proof hashes in bytes."""
-    if hasattr(self, 'proofs'):
-      result = 0
-      for proof in self.proofs:
-        result += len(proof)
-      return result
-    return self._inc_proof_size_expected
-
-  def verify_icp(self, transparency_log_pub_key):
-    """Verifies the contained inclusion proof given the public log key.
-
-    Arguments:
-      transparency_log_pub_key: The path to the trusted public key for the log.
-
-    Returns:
-      True if the calculated signature matches AftlIcpEntry's. False otherwise.
-    """
-    if not transparency_log_pub_key:
-      return False
-
-    leaf_hash = rfc6962_hash_leaf(self.annotation_leaf.encode())
-    calc_root = root_from_icp(self.leaf_index,
-                              self.log_root_descriptor.tree_size,
-                              self.proofs,
-                              leaf_hash)
-    if ((calc_root == self.log_root_descriptor.root_hash) and
-        check_signature(
-            self.log_root_descriptor.encode(),
-            self.log_root_signature,
-            transparency_log_pub_key)):
-      return True
-    return False
-
-  def verify_vbmeta_image(self, vbmeta_image, transparency_log_pub_key):
-    """Verify the inclusion proof for the given VBMeta image.
-
-    Arguments:
-      vbmeta_image: A bytearray with the VBMeta image.
-      transparency_log_pub_key: File path to the PEM file containing the trusted
-        transparency log public key.
-
-    Returns:
-      True if the inclusion proof validates and the vbmeta hash of the given
-      VBMeta image matches the one in the annotation leaf; otherwise False.
-    """
-    if not vbmeta_image:
-      return False
-
-    # Calculate the hash of the vbmeta image.
-    vbmeta_hash = hashlib.sha256(vbmeta_image).digest()
-
-    # Validates the inclusion proof and then compare the calculated vbmeta_hash
-    # against the one in the inclusion proof.
-    return (self.verify_icp(transparency_log_pub_key)
-            and self.annotation_leaf.annotation.vbmeta_hash == vbmeta_hash)
-
-  def encode(self):
-    """Serializes the header |SIZE| and data to bytes.
-
-    Returns:
-      bytes with the encoded header.
-
-    Raises:
-      AftlError: If invalid entry structure.
-    """
-    proof_bytes = bytearray()
-    if not self.is_valid():
-      raise AftlError('Invalid AftlIcpEntry structure')
-
-    expected_format_string = '{}{}s{}s{}s{}s{}s'.format(
-        self.FORMAT_STRING,
-        self.log_url_size,
-        self.log_root_descriptor_size,
-        self.annotation_leaf_size,
-        self.log_root_sig_size,
-        self.inc_proof_size)
-
-    for proof in self.proofs:
-      proof_bytes.extend(proof)
-
-    return struct.pack(expected_format_string,
-                       self.log_url_size, self.leaf_index,
-                       self.log_root_descriptor_size, self.annotation_leaf_size,
-                       self.log_root_sig_size, self.proof_hash_count,
-                       self.inc_proof_size, self.log_url.encode('ascii'),
-                       self.log_root_descriptor.encode(),
-                       self.annotation_leaf.encode(),
-                       self.log_root_signature,
-                       proof_bytes)
-
-  def translate_response(self, log_url, avbm_response):
-    """Translates an AddVBMetaResponse object to an AftlIcpEntry.
-
-    Arguments:
-      log_url: String representing the transparency log URL.
-      avbm_response: The AddVBMetaResponse object to translate.
-    """
-    self.log_url = log_url
-
-    # Deserializes from AddVBMetaResponse.
-    proof = avbm_response.annotation_proof
-    self.leaf_index = proof.proof.leaf_index
-    self.log_root_descriptor = TrillianLogRootDescriptor(proof.sth.log_root)
-    self.annotation_leaf = SignedVBMetaPrimaryAnnotationLeaf.parse(
-        avbm_response.annotation_leaf)
-    self.log_root_signature = proof.sth.log_root_signature
-    self.proofs = proof.proof.hashes
-
-  def get_expected_size(self):
-    """Gets the expected size of the full entry out of the header.
-
-    Returns:
-      The expected size of the AftlIcpEntry from the header.
-    """
-    return (self.SIZE + self.log_url_size + self.log_root_descriptor_size +
-            self.annotation_leaf_size + self.log_root_sig_size +
-            self.inc_proof_size)
-
-  def is_valid(self):
-    """Ensures that values in an AftlIcpEntry structure are sane.
-
-    Returns:
-      True if the values in the AftlIcpEntry are sane, False otherwise.
-    """
-    if self.leaf_index < 0:
-      sys.stderr.write('ICP entry: leaf index out of range: '
-                       '{}.\n'.format(self.leaf_index))
-      return False
-
-    if (not self.log_root_descriptor or
-        not isinstance(self.log_root_descriptor, TrillianLogRootDescriptor) or
-        not self.log_root_descriptor.is_valid()):
-      sys.stderr.write('ICP entry: invalid TrillianLogRootDescriptor.\n')
-      return False
-
-    if (not self.annotation_leaf or
-        not isinstance(self.annotation_leaf, Leaf)):
-      sys.stderr.write('ICP entry: invalid Leaf.\n')
-      return False
-    return True
-
-  def print_desc(self, o):
-    """Print the ICP entry.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    i = ' ' * 4
-    fmt = '{}{:25}{}\n'
-    o.write(fmt.format(i, 'Transparency Log:', self.log_url))
-    o.write(fmt.format(i, 'Leaf index:', self.leaf_index))
-    o.write('    ICP hashes:              ')
-    for i, proof_hash in enumerate(self.proofs):
-      if i != 0:
-        o.write(' ' * 29)
-      o.write('{}\n'.format(proof_hash.hex()))
-    self.log_root_descriptor.print_desc(o)
-    self.annotation_leaf.print_desc(o)
-
-
-class TrillianLogRootDescriptor(object):
-  """A class representing the Trillian log_root descriptor.
-
-  Taken from Trillian definitions:
-  https://github.com/google/trillian/blob/master/trillian.proto#L255
-
-  Attributes:
-    version: The version number of the descriptor. Currently only version=1 is
-        supported.
-    tree_size: The size of the tree.
-    root_hash_size: The size of the root hash in bytes. Valid values are between
-        0 and 128.
-    root_hash: The root hash as bytearray().
-    timestamp: The timestamp in nanoseconds.
-    revision: The revision number as long.
-    metadata_size: The size of the metadata in bytes. Valid values are between
-        0 and 65535.
-    metadata: The metadata as bytearray().
-  """
-  FORMAT_STRING_PART_1 = ('!H'  # version
-                          'Q'   # tree_size
-                          'B'   # root_hash_size
-                         )
-
-  FORMAT_STRING_PART_2 = ('!Q'  # timestamp
-                          'Q'   # revision
-                          'H'   # metadata_size
-                         )
-
-  def __init__(self, data=None):
-    """Initializes a new TrillianLogRoot descriptor."""
-    if data:
-      # Parses first part of the log_root descriptor.
-      data_length = struct.calcsize(self.FORMAT_STRING_PART_1)
-      (self.version, self.tree_size, self.root_hash_size) = struct.unpack(
-          self.FORMAT_STRING_PART_1, data[0:data_length])
-      data = data[data_length:]
-
-      # Parses the root_hash bytes if the size indicates existance.
-      if self.root_hash_size > 0:
-        self.root_hash = data[0:self.root_hash_size]
-        data = data[self.root_hash_size:]
-      else:
-        self.root_hash = b''
-
-      # Parses second part of the log_root descriptor.
-      data_length = struct.calcsize(self.FORMAT_STRING_PART_2)
-      (self.timestamp, self.revision, self.metadata_size) = struct.unpack(
-          self.FORMAT_STRING_PART_2, data[0:data_length])
-      data = data[data_length:]
-
-      # Parses the metadata if the size indicates existance.
-      if self.metadata_size > 0:
-        self.metadata = data[0:self.metadata_size]
-      else:
-        self.metadata = b''
-    else:
-      self.version = 1
-      self.tree_size = 0
-      self.root_hash_size = 0
-      self.root_hash = b''
-      self.timestamp = 0
-      self.revision = 0
-      self.metadata_size = 0
-      self.metadata = b''
-
-    if not self.is_valid():
-      raise AftlError('Invalid structure for TrillianLogRootDescriptor.')
-
-  def get_expected_size(self):
-    """Calculates the expected size of the TrillianLogRootDescriptor.
-
-    Returns:
-      The expected size of the TrillianLogRootDescriptor.
-    """
-    return (struct.calcsize(self.FORMAT_STRING_PART_1) + self.root_hash_size +
-            struct.calcsize(self.FORMAT_STRING_PART_2) + self.metadata_size)
-
-  def encode(self):
-    """Serializes the TrillianLogDescriptor to a bytearray().
-
-    Returns:
-      A bytearray() with the encoded header.
-
-    Raises:
-      AftlError: If invalid entry structure.
-    """
-    if not self.is_valid():
-      raise AftlError('Invalid structure for TrillianLogRootDescriptor.')
-
-    expected_format_string = '{}{}s{}{}s'.format(
-        self.FORMAT_STRING_PART_1,
-        self.root_hash_size,
-        self.FORMAT_STRING_PART_2[1:],
-        self.metadata_size)
-
-    return struct.pack(expected_format_string,
-                       self.version, self.tree_size, self.root_hash_size,
-                       self.root_hash, self.timestamp, self.revision,
-                       self.metadata_size, self.metadata)
-
-  def is_valid(self):
-    """Ensures that values in the descritor are sane.
-
-    Returns:
-      True if the values are sane; otherwise False.
-    """
-    cls = self.__class__.__name__
-    if self.version != 1:
-      sys.stderr.write('{}: Bad version value {}.\n'.format(cls, self.version))
-      return False
-    if self.tree_size < 0:
-      sys.stderr.write('{}: Bad tree_size value {}.\n'.format(cls,
-                                                              self.tree_size))
-      return False
-    if self.root_hash_size < 0 or self.root_hash_size > 128:
-      sys.stderr.write('{}: Bad root_hash_size value {}.\n'.format(
-          cls, self.root_hash_size))
-      return False
-    if len(self.root_hash) != self.root_hash_size:
-      sys.stderr.write('{}: root_hash_size {} does not match with length of '
-                       'root_hash {}.\n'.format(cls, self.root_hash_size,
-                                                len(self.root_hash)))
-      return False
-    if self.timestamp < 0:
-      sys.stderr.write('{}: Bad timestamp value {}.\n'.format(cls,
-                                                              self.timestamp))
-      return False
-    if self.revision < 0:
-      sys.stderr.write('{}: Bad revision value {}.\n'.format(cls,
-                                                             self.revision))
-      return False
-    if self.metadata_size < 0 or self.metadata_size > 65535:
-      sys.stderr.write('{}: Bad metadatasize value {}.\n'.format(
-          cls, self.metadata_size))
-      return False
-    if len(self.metadata) != self.metadata_size:
-      sys.stderr.write('{}: metadata_size {} does not match with length of'
-                       'metadata {}.\n'.format(cls, self.metadata_size,
-                                               len(self.metadata)))
-      return False
-    return True
-
-  def print_desc(self, o):
-    """Print the TrillianLogRootDescriptor.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    o.write('    Log Root Descriptor:\n')
-    i = ' ' * 6
-    fmt = '{}{:23}{}\n'
-    o.write(fmt.format(i, 'Version:', self.version))
-    o.write(fmt.format(i, 'Tree size:', self.tree_size))
-    o.write(fmt.format(i, 'Root hash size:', self.root_hash_size))
-    if self.root_hash_size > 0:
-      o.write(fmt.format(i, 'Root hash:', self.root_hash.hex()))
-      o.write(fmt.format(i, 'Timestamp (ns):', self.timestamp))
-    o.write(fmt.format(i, 'Revision:', self.revision))
-    o.write(fmt.format(i, 'Metadata size:', self.metadata_size))
-    if self.metadata_size > 0:
-      o.write(fmt.format(i, 'Metadata:', self.metadata.hex()))
-
-
-def tls_decode_bytes(byte_size, stream):
-  """Decodes a variable-length vector.
-
-  In the TLS presentation language, a variable-length vector is a pair
-  (size, value). |size| describes the size of the |value| to read
-  in bytes. All values are encoded in big-endian.
-  See https://tools.ietf.org/html/rfc8446#section-3 for more details.
-
-  Arguments:
-      byte_size: A format character as described in the struct module
-          which describes the expected length of the size. For
-          instance, "B", "H", "L" or "Q".
-      stream: a BytesIO which contains the value to decode.
-
-  Returns:
-    A bytes containing the value decoded.
-
-  Raises:
-    AftlError: If |byte_size| is not a known format character, or if not
-    enough data is available to decode the size or the value.
-  """
-  byte_size_format = "!" + byte_size
-  try:
-    byte_size_length = struct.calcsize(byte_size_format)
-  except struct.error:
-    raise AftlError("Invalid byte_size character: {}. It must be a "
-                    "format supported by struct.".format(byte_size))
-  try:
-    value_size = struct.unpack(byte_size_format,
-                               stream.read(byte_size_length))[0]
-  except struct.error:
-    raise AftlError("Not enough data to read size: {}".format(byte_size))
-  value = stream.read(value_size)
-  if value_size != len(value):
-    raise AftlError("Not enough data to read value: "
-                    "{} != {}".format(value_size, len(value)))
-  return value
-
-def tls_encode_bytes(byte_size, value, stream):
-  """Encodes a variable-length vector.
-
-  In the TLS presentation language, a variable-length vector is a pair
-  (size, value). |size| describes the size of the |value| to read
-  in bytes. All values are encoded in big-endian.
-  See https://tools.ietf.org/html/rfc8446#section-3 for more details.
-
-  Arguments:
-      byte_size: A format character as described in the struct module
-          which describes the expected length of the size. For
-          instance, "B", "H", "L" or "Q".
-      value: the value to encode. The length of |value| must be
-          representable with |byte_size|.
-      stream: a BytesIO to which the value is encoded to.
-
-  Raises:
-    AftlError: If |byte_size| is not a known format character, or if
-    |value|'s length cannot be represent with |byte_size|.
-  """
-  byte_size_format = "!" + byte_size
-  try:
-    stream.write(struct.pack(byte_size_format, len(value)))
-  except struct.error:
-    # Whether byte_size is invalid or not large enough to represent value,
-    # struct returns an struct.error exception. Instead of matching on the
-    # exception message, capture both cases in a generic message.
-    raise AftlError("Invalid byte_size to store {} bytes".format(len(value)))
-  stream.write(value)
-
-class HashAlgorithm(enum.Enum):
-  SHA256 = 0
-
-class SignatureAlgorithm(enum.Enum):
-  RSA = 0
-  ECDSA = 1
-
-class Signature(object):
-  """Represents a signature of some data.
-
-  It is usually made using a manufacturer key and used to sign part of a leaf
-  that belongs to the transparency log. The encoding of this structure must
-  match the server expectation.
-
-  Attributes:
-    hash_algorithm: the HashAlgorithm used for the signature.
-    signature_algorithm: the SignatureAlgorithm used.
-    signature: the raw signature in bytes.
-  """
-  FORMAT_STRING = ('!B'    # Hash algorithm
-                   'B'     # Signing algorithm
-                  )
-  # Followed by the raw signature, encoded as a TLS variable-length vector
-  # which size is represented using 2 bytes.
-
-  def __init__(self, hash_algorithm=HashAlgorithm.SHA256,
-               signature_algorithm=SignatureAlgorithm.RSA, signature=b''):
-    self.hash_algorithm = hash_algorithm
-    self.signature_algorithm = signature_algorithm
-    self.signature = signature
-
-  @classmethod
-  def parse(cls, stream):
-    """Parses a TLS-encoded structure and returns a new Signature.
-
-    Arguments:
-      stream: a BytesIO to read the signature from.
-
-    Returns:
-      A new Signature object.
-
-    Raises:
-      AftlError: If the hash algorithm or signature algorithm value is
-        unknown; or if the decoding failed.
-    """
-    data_length = struct.calcsize(cls.FORMAT_STRING)
-    (hash_algorithm, signature_algorithm) = struct.unpack(
-        cls.FORMAT_STRING, stream.read(data_length))
-    try:
-      hash_algorithm = HashAlgorithm(hash_algorithm)
-    except ValueError:
-      raise AftlError('unknown hash algorithm: {}'.format(hash_algorithm))
-    try:
-      signature_algorithm = SignatureAlgorithm(signature_algorithm)
-    except ValueError:
-      raise AftlError('unknown signature algorithm: {}'.format(
-          signature_algorithm))
-    signature = tls_decode_bytes('H', stream)
-    return Signature(hash_algorithm, signature_algorithm, signature)
-
-  def get_expected_size(self):
-    """Returns the size of the encoded Signature."""
-    return struct.calcsize(self.FORMAT_STRING) + \
-        struct.calcsize('H') + len(self.signature)
-
-  def encode(self, stream):
-    """Encodes the Signature.
-
-    Arguments:
-      stream: a BytesIO to which the signature is written.
-    """
-    stream.write(struct.pack(self.FORMAT_STRING, self.hash_algorithm.value,
-                             self.signature_algorithm.value))
-    tls_encode_bytes('H', self.signature, stream)
-
-class VBMetaPrimaryAnnotation(object):
-  """An annotation that contains metadata about a VBMeta image.
-
-  Attributes:
-    vbmeta_hash: the SHA256 of the VBMeta it references.
-    version_incremental: the version incremental of the build, as string.
-    manufacturer_key_hash: the hash of the manufacturer key that will
-        sign this annotation.
-    description: a free-form field.
-  """
-
-  def __init__(self, vbmeta_hash=b'', version_incremental='',
-               manufacturer_key_hash=b'', description=''):
-    """Default constructor."""
-    self.vbmeta_hash = vbmeta_hash
-    self.version_incremental = version_incremental
-    self.manufacturer_key_hash = manufacturer_key_hash
-    self.description = description
-
-  @classmethod
-  def parse(cls, stream):
-    """Parses a VBMetaPrimaryAnnotation from data.
-
-    Arguments:
-      stream: an io.BytesIO to decode the annotation from.
-
-    Returns:
-      A new VBMetaPrimaryAnnotation.
-
-    Raises:
-      AftlError: If an error occured while parsing the annotation.
-    """
-    vbmeta_hash = tls_decode_bytes("B", stream)
-    version_incremental = tls_decode_bytes("B", stream)
-    try:
-      version_incremental = version_incremental.decode("ascii")
-    except UnicodeError:
-      raise AftlError('Failed to convert version incremental to an ASCII'
-                      'string')
-    manufacturer_key_hash = tls_decode_bytes("B", stream)
-    description = tls_decode_bytes("H", stream)
-    try:
-      description = description.decode("utf-8")
-    except UnicodeError:
-      raise AftlError('Failed to convert description to an UTF-8 string')
-    return cls(vbmeta_hash, version_incremental, manufacturer_key_hash,
-               description)
-
-  def sign(self, manufacturer_key_path, signing_helper=None,
-           signing_helper_with_files=None):
-    """Signs the annotation.
-
-    Arguments:
-      manufacturer_key_path: Path to key used to sign messages sent to the
-        transparency log servers.
-      signing_helper: Program which signs a hash and returns a signature.
-      signing_helper_with_files: Same as signing_helper but uses files instead.
-
-    Returns:
-      A new SignedVBMetaPrimaryAnnotation.
-
-    Raises:
-      AftlError: If an error occured while signing the annotation.
-    """
-    # AFTL supports SHA256_RSA4096 for now, more will be available.
-    algorithm_name = 'SHA256_RSA4096'
-    encoded_leaf = io.BytesIO()
-    self.encode(encoded_leaf)
-    try:
-      rsa_key = avbtool.RSAPublicKey(manufacturer_key_path)
-      raw_signature = rsa_key.sign(algorithm_name, encoded_leaf.getvalue(),
-                                   signing_helper, signing_helper_with_files)
-    except avbtool.AvbError as e:
-      raise AftlError('Failed to sign VBMetaPrimaryAnnotation with '
-                      '--manufacturer_key: {}'.format(e))
-    signature = Signature(hash_algorithm=HashAlgorithm.SHA256,
-                          signature_algorithm=SignatureAlgorithm.RSA,
-                          signature=raw_signature)
-    return SignedVBMetaPrimaryAnnotation(signature=signature, annotation=self)
-
-  def encode(self, stream):
-    """Encodes the VBMetaPrimaryAnnotation.
-
-    Arguments:
-      stream: a BytesIO to which the signature is written.
-
-    Raises:
-      AftlError: If the encoding failed.
-    """
-    tls_encode_bytes("B", self.vbmeta_hash, stream)
-    try:
-      tls_encode_bytes("B", self.version_incremental.encode("ascii"), stream)
-    except UnicodeError:
-      raise AftlError('Unable to encode version incremental to ASCII')
-    tls_encode_bytes("B", self.manufacturer_key_hash, stream)
-    try:
-      tls_encode_bytes("H", self.description.encode("utf-8"), stream)
-    except UnicodeError:
-      raise AftlError('Unable to encode description to UTF-8')
-
-  def get_expected_size(self):
-    """Returns the size of the encoded annotation."""
-    b = io.BytesIO()
-    self.encode(b)
-    return len(b.getvalue())
-
-  def print_desc(self, o):
-    """Print the VBMetaPrimaryAnnotation.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    o.write('      VBMeta Primary Annotation:\n')
-    i = ' ' * 8
-    fmt = '{}{:23}{}\n'
-    if self.vbmeta_hash:
-      o.write(fmt.format(i, 'VBMeta hash:', self.vbmeta_hash.hex()))
-    if self.version_incremental:
-      o.write(fmt.format(i, 'Version incremental:', self.version_incremental))
-    if self.manufacturer_key_hash:
-      o.write(fmt.format(i, 'Manufacturer key hash:',
-                         self.manufacturer_key_hash.hex()))
-    if self.description:
-      o.write(fmt.format(i, 'Description:', self.description))
-
-
-class SignedVBMetaPrimaryAnnotation(object):
-  """A Signed VBMetaPrimaryAnnotation.
-
-  Attributes:
-    signature: a Signature.
-    annotation: a VBMetaPrimaryAnnotation.
-  """
-
-  def __init__(self, signature=None, annotation=None):
-    """Default constructor."""
-    if not signature:
-      signature = Signature()
-    self.signature = signature
-    if not annotation:
-      annotation = VBMetaPrimaryAnnotation()
-    self.annotation = annotation
-
-  @classmethod
-  def parse(cls, stream):
-    """Parses a signed annotation."""
-    signature = Signature.parse(stream)
-    annotation = VBMetaPrimaryAnnotation.parse(stream)
-    return cls(signature, annotation)
-
-  def get_expected_size(self):
-    """Returns the size of the encoded signed annotation."""
-    return self.signature.get_expected_size() + \
-             self.annotation.get_expected_size()
-
-  def encode(self, stream):
-    """Encodes the SignedVBMetaPrimaryAnnotation.
-
-    Arguments:
-      stream: a BytesIO to which the object is written.
-
-    Raises:
-      AftlError: If the encoding failed.
-    """
-    self.signature.encode(stream)
-    self.annotation.encode(stream)
-
-  def print_desc(self, o):
-    """Prints the annotation.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    self.annotation.print_desc(o)
-
-class Leaf(abc.ABC):
-  """An abstract class to represent the leaves in the transparency log."""
-  FORMAT_STRING = ('!B'   # Version
-                   'Q'    # Timestamp
-                   'B'    # LeafType
-                  )
-
-  class LeafType(enum.Enum):
-    VBMetaType = 0
-    SignedVBMetaPrimaryAnnotationType = 1
-
-  def __init__(self, version=1, timestamp=0, leaf_type=LeafType.VBMetaType):
-    """Build a new leaf."""
-    self.version = version
-    self.timestamp = timestamp
-    self.leaf_type = leaf_type
-
-  @classmethod
-  def _parse_header(cls, stream):
-    """Parses the header of a leaf.
-
-    This is called with the parse method of the subclasses.
-
-    Arguments:
-      stream: a BytesIO to read the header from.
-
-    Returns:
-      A tuple (version, timestamp, leaf_type).
-
-    Raises:
-      AftlError: If the header cannot be decoded; or if the leaf type is
-          unknown.
-    """
-    data_length = struct.calcsize(cls.FORMAT_STRING)
-    try:
-      (version, timestamp, leaf_type) = struct.unpack(
-          cls.FORMAT_STRING, stream.read(data_length))
-    except struct.error:
-      raise AftlError("Not enough data to parse leaf header")
-    try:
-      leaf_type = cls.LeafType(leaf_type)
-    except ValueError:
-      raise AftlError("Unknown leaf type: {}".format(leaf_type))
-    return version, timestamp, leaf_type
-
-  @classmethod
-  @abc.abstractmethod
-  def parse(cls, data):
-    """Parses a leaf and returned a new object.
-
-    This abstract method must be implemented by the subclass. It may use
-    _parse_header to parse the common fields.
-
-    Arguments:
-      data: a bytes-like object.
-
-    Returns:
-      An object of the type of the particular subclass.
-
-    Raises:
-      AftlError: If the leaf type is incorrect; or if the decoding failed.
-    """
-
-  @abc.abstractmethod
-  def encode(self):
-    """Encodes a leaf.
-
-    This abstract method must be implemented by the subclass. It may use
-    _encode_header to encode the common fields.
-
-    Returns:
-      A bytes with the encoded leaf.
-
-    Raises:
-      AftlError: If the encoding failed.
-    """
-
-  def _get_expected_header_size(self):
-    """Returns the size of the leaf header."""
-    return struct.calcsize(self.FORMAT_STRING)
-
-  def _encode_header(self, stream):
-    """Encodes the header of the leaf.
-
-    This method is called by the encode method in the subclass.
-
-    Arguments:
-      stream: a BytesIO to which the object is written.
-
-    Raises:
-      AftlError: If the encoding failed.
-    """
-    try:
-      stream.write(struct.pack(self.FORMAT_STRING, self.version, self.timestamp,
-                               self.leaf_type.value))
-    except struct.error:
-      raise AftlError('Unable to encode the leaf header')
-
-  def print_desc(self, o):
-    """Prints the leaf header.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    i = ' ' * 6
-    fmt = '{}{:23}{}\n'
-    o.write(fmt.format(i, 'Version:', self.version))
-    o.write(fmt.format(i, 'Timestamp:', self.timestamp))
-    o.write(fmt.format(i, 'Type:', self.leaf_type))
-
-
-class SignedVBMetaPrimaryAnnotationLeaf(Leaf):
-  """A Signed VBMetaPrimaryAnnotation leaf."""
-
-  def __init__(self, version=1, timestamp=0,
-               signed_vbmeta_primary_annotation=None):
-    """Builds a new Signed VBMeta Primary Annotation leaf."""
-    super(SignedVBMetaPrimaryAnnotationLeaf, self).__init__(
-        version=version, timestamp=timestamp,
-        leaf_type=self.LeafType.SignedVBMetaPrimaryAnnotationType)
-    if not signed_vbmeta_primary_annotation:
-      signed_vbmeta_primary_annotation = SignedVBMetaPrimaryAnnotation()
-    self.signed_vbmeta_primary_annotation = signed_vbmeta_primary_annotation
-
-  @property
-  def annotation(self):
-    """Returns the VBMetaPrimaryAnnotation contained in the leaf."""
-    return self.signed_vbmeta_primary_annotation.annotation
-
-  @property
-  def signature(self):
-    """Returns the Signature contained in the leaf."""
-    return self.signed_vbmeta_primary_annotation.signature
-
-  @classmethod
-  def parse(cls, data):
-    """Parses an encoded contained in data.
-
-    Arguments:
-      data: a bytes-like object.
-
-    Returns:
-      A SignedVBMetaPrimaryAnnotationLeaf.
-
-    Raises:
-      AftlError if the leaf type is incorrect; or if the decoding failed.
-    """
-    encoded_leaf = io.BytesIO(data)
-    version, timestamp, leaf_type = Leaf._parse_header(encoded_leaf)
-    if leaf_type != Leaf.LeafType.SignedVBMetaPrimaryAnnotationType:
-      raise AftlError("Incorrect leaf type")
-    signed_annotation = SignedVBMetaPrimaryAnnotation.parse(encoded_leaf)
-    return cls(version=version, timestamp=timestamp,
-               signed_vbmeta_primary_annotation=signed_annotation)
-
-  def get_expected_size(self):
-    """Returns the size of the leaf."""
-    size = self._get_expected_header_size()
-    if self.signed_vbmeta_primary_annotation:
-      size += self.signed_vbmeta_primary_annotation.get_expected_size()
-    return size
-
-  def encode(self):
-    """Encodes the leaf.
-
-    Returns:
-      bytes which contains the encoded leaf.
-
-    Raises:
-      AftlError: If the encoding failed.
-    """
-    stream = io.BytesIO()
-    self._encode_header(stream)
-    self.signed_vbmeta_primary_annotation.encode(stream)
-    return stream.getvalue()
-
-  def print_desc(self, o):
-    """Prints the leaf.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    i = ' ' * 4
-    fmt = '{}{:25}{}\n'
-    o.write(fmt.format(i, 'Leaf:', ''))
-    super(SignedVBMetaPrimaryAnnotationLeaf, self).print_desc(o)
-    self.signed_vbmeta_primary_annotation.print_desc(o)
-
-
-class AftlImage(object):
-  """A class for the AFTL image, which contains the transparency log ICPs.
-
-  This encapsulates an AFTL ICP section with all information required to
-  validate an inclusion proof.
-
-  Attributes:
-    image_header: A header for the section.
-    icp_entries: A list of AftlIcpEntry objects representing the inclusion
-        proofs.
-  """
-
-  def __init__(self, data=None):
-    """Initializes a new AftlImage section.
-
-    Arguments:
-      data: If not None, must be a bytearray representing an AftlImage.
-
-    Raises:
-      AftlError: If the data does not represent a well-formed AftlImage.
-    """
-    if data:
-      image_header_bytes = data[0:AftlImageHeader.SIZE]
-      self.image_header = AftlImageHeader(image_header_bytes)
-      if not self.image_header.is_valid():
-        raise AftlError('Invalid AftlImageHeader.')
-      icp_count = self.image_header.icp_count
-
-      # Jump past the header for entry deserialization.
-      icp_index = AftlImageHeader.SIZE
-      # Validate each entry.
-      self.icp_entries = []
-      # add_icp_entry() updates entries and header, so set header count to
-      # compensate.
-      self.image_header.icp_count = 0
-      for i in range(icp_count):
-        # Get the entry header from the AftlImage.
-        cur_icp_entry = AftlIcpEntry(data[icp_index:])
-        cur_icp_entry_size = cur_icp_entry.get_expected_size()
-        # Now validate the entry structure.
-        if not cur_icp_entry.is_valid():
-          raise AftlError('Validation of ICP entry {} failed.'.format(i))
-        self.add_icp_entry(cur_icp_entry)
-        icp_index += cur_icp_entry_size
-    else:
-      self.image_header = AftlImageHeader()
-      self.icp_entries = []
-    if not self.is_valid():
-      raise AftlError('Invalid AftlImage.')
-
-  def add_icp_entry(self, icp_entry):
-    """Adds a new AftlIcpEntry to the AftlImage, updating fields as needed.
-
-    Arguments:
-      icp_entry: An AftlIcpEntry structure.
-    """
-    self.icp_entries.append(icp_entry)
-    self.image_header.icp_count += 1
-    self.image_header.aftl_image_size += icp_entry.get_expected_size()
-
-  def verify_vbmeta_image(self, vbmeta_image, transparency_log_pub_keys):
-    """Verifies the contained inclusion proof given the public log key.
-
-    Arguments:
-      vbmeta_image: The vbmeta_image that should be verified against the
-        inclusion proof.
-      transparency_log_pub_keys: List of paths to PEM files containing trusted
-        public keys that correspond with the transparency_logs.
-
-    Returns:
-      True if all the inclusion proofs in the AfltDescriptor validate, are
-      signed by one of the give transparency log public keys; otherwise false.
-    """
-    if not transparency_log_pub_keys or not self.icp_entries:
-      return False
-
-    icp_verified = 0
-    for icp_entry in self.icp_entries:
-      verified = False
-      for pub_key in transparency_log_pub_keys:
-        if icp_entry.verify_vbmeta_image(vbmeta_image, pub_key):
-          verified = True
-          break
-      if verified:
-        icp_verified += 1
-    return icp_verified == len(self.icp_entries)
-
-  def encode(self):
-    """Serialize the AftlImage to a bytearray().
-
-    Returns:
-      A bytearray() with the encoded AFTL image.
-
-    Raises:
-      AftlError: If invalid AFTL image structure.
-    """
-    # The header and entries are guaranteed to be valid when encode is called.
-    # Check the entire structure as a whole.
-    if not self.is_valid():
-      raise AftlError('Invalid AftlImage structure.')
-
-    aftl_image = bytearray()
-    aftl_image.extend(self.image_header.encode())
-    for icp_entry in self.icp_entries:
-      aftl_image.extend(icp_entry.encode())
-    return aftl_image
-
-  def is_valid(self):
-    """Ensures that values in the AftlImage are sane.
-
-    Returns:
-      True if the values in the AftlImage are sane, False otherwise.
-    """
-    if not self.image_header.is_valid():
-      return False
-
-    if self.image_header.icp_count != len(self.icp_entries):
-      return False
-
-    for icp_entry in self.icp_entries:
-      if not icp_entry.is_valid():
-        return False
-    return True
-
-  def print_desc(self, o):
-    """Print the AFTL image.
-
-    Arguments:
-      o: The object to write the output to.
-    """
-    o.write('Android Firmware Transparency Image:\n')
-    self.image_header.print_desc(o)
-    for i, icp_entry in enumerate(self.icp_entries):
-      o.write('  Entry #{}:\n'.format(i + 1))
-      icp_entry.print_desc(o)
-
-
-class AftlCommunication(object):
-  """Class to abstract the communication layer with the transparency log."""
-
-  def __init__(self, transparency_log_config, timeout):
-    """Initializes the object.
-
-    Arguments:
-      transparency_log_config: A TransparencyLogConfig instance.
-      timeout: Duration in seconds before requests to the AFTL times out. A
-        value of 0 or None means there will be no timeout.
-    """
-    self.transparency_log_config = transparency_log_config
-    if timeout:
-      self.timeout = timeout
-    else:
-      self.timeout = None
-
-  def add_vbmeta(self, request):
-    """Calls the AddVBMeta RPC on the AFTL server.
-
-    Arguments:
-      request: An AddVBMetaRequest message.
-
-    Returns:
-      An AddVBMetaResponse message.
-
-    Raises:
-      AftlError: If grpc or the proto modules cannot be loaded, if there is an
-        error communicating with the log.
-    """
-    raise NotImplementedError(
-        'add_vbmeta() needs to be implemented by subclass.')
-
-
-class AftlGrpcCommunication(AftlCommunication):
-  """Class that implements GRPC communication to the AFTL server."""
-
-  def add_vbmeta(self, request):
-    """Calls the AddVBMeta RPC on the AFTL server.
-
-    Arguments:
-      request: An AddVBMetaRequest message.
-
-    Returns:
-      An AddVBMetaResponse message.
-
-    Raises:
-      AftlError: If grpc or the proto modules cannot be loaded, if there is an
-        error communicating with the log.
-    """
-    # Import grpc now to avoid global dependencies as it otherwise breaks
-    # running unittest with atest.
-    try:
-      import grpc  # pylint: disable=import-outside-toplevel
-      from proto import api_pb2_grpc # pylint: disable=import-outside-toplevel
-    except ImportError as e:
-      err_str = 'grpc can be installed with python pip install grpcio.\n'
-      raise AftlError('Failed to import module: ({}).\n{}'.format(e, err_str))
-
-    # Set up the gRPC channel with the transparency log.
-    sys.stdout.write('Preparing to request inclusion proof from {}. This could '
-                     'take ~30 seconds for the process to complete.\n'.format(
-                         self.transparency_log_config.target))
-    channel = grpc.insecure_channel(self.transparency_log_config.target)
-    stub = api_pb2_grpc.AFTLogStub(channel)
-
-    metadata = []
-    if self.transparency_log_config.api_key:
-      metadata.append(('x-api-key', self.transparency_log_config.api_key))
-
-    # Attempt to transmit to the transparency log.
-    sys.stdout.write('ICP is about to be requested from transparency log '
-                     'with domain {}.\n'.format(
-                         self.transparency_log_config.target))
-    try:
-      response = stub.AddVBMeta(request, timeout=self.timeout,
-                                metadata=metadata)
-    except grpc.RpcError as e:
-      raise AftlError('Error: grpc failure ({})'.format(e))
-    return response
-
-
-class Aftl(avbtool.Avb):
-  """Business logic for aftltool command-line tool."""
-
-  def get_vbmeta_image(self, image_filename):
-    """Gets the VBMeta struct bytes from image.
-
-    Arguments:
-      image_filename: Image file to get information from.
-
-    Returns:
-      A tuple with following elements:
-        1. A bytearray with the vbmeta structure or None if the file does not
-           contain a VBMeta structure.
-        2. The VBMeta image footer.
-    """
-    # Reads and parses the vbmeta image.
-    try:
-      image = avbtool.ImageHandler(image_filename, read_only=True)
-    except (IOError, ValueError) as e:
-      sys.stderr.write('The image does not contain a valid VBMeta structure: '
-                       '{}.\n'.format(e))
-      return None, None
-
-    try:
-      (footer, header, _, _) = self._parse_image(image)
-    except avbtool.AvbError as e:
-      sys.stderr.write('The image cannot be parsed: {}.\n'.format(e))
-      return None, None
-
-    # Seeks for the start of the vbmeta image and calculates its size.
-    offset = 0
-    if footer:
-      offset = footer.vbmeta_offset
-    vbmeta_image_size = (offset + header.SIZE
-                         + header.authentication_data_block_size
-                         + header.auxiliary_data_block_size)
-
-    # Reads the vbmeta image bytes.
-    try:
-      image.seek(offset)
-    except RuntimeError as e:
-      sys.stderr.write('Given vbmeta image offset is invalid: {}.\n'.format(e))
-      return None, None
-    return image.read(vbmeta_image_size), footer
-
-  def get_aftl_image(self, image_filename):
-    """Gets the AftlImage from image.
-
-    Arguments:
-      image_filename: Image file to get information from.
-
-    Returns:
-      An AftlImage or None if the file does not contain a AftlImage.
-    """
-    # Reads the vbmeta image bytes.
-    vbmeta_image, _ = self.get_vbmeta_image(image_filename)
-    if not vbmeta_image:
-      return None
-
-    try:
-      image = avbtool.ImageHandler(image_filename, read_only=True)
-    except ValueError as e:
-      sys.stderr.write('The image does not contain a valid VBMeta structure: '
-                       '{}.\n'.format(e))
-      return None
-
-    # Seeks for the start of the AftlImage.
-    try:
-      image.seek(len(vbmeta_image))
-    except RuntimeError as e:
-      sys.stderr.write('Given AftlImage image offset is invalid: {}.\n'
-                       .format(e))
-      return None
-
-    # Parses the header for the AftlImage size.
-    tmp_header_bytes = image.read(AftlImageHeader.SIZE)
-    if not tmp_header_bytes or len(tmp_header_bytes) != AftlImageHeader.SIZE:
-      sys.stderr.write('This image does not contain an AftlImage.\n')
-      return None
-
-    try:
-      tmp_header = AftlImageHeader(tmp_header_bytes)
-    except AftlError as e:
-      sys.stderr.write('This image does not contain a valid AftlImage: '
-                       '{}.\n'.format(e))
-      return None
-
-    # Resets to the beginning of the AftlImage.
-    try:
-      image.seek(len(vbmeta_image))
-    except RuntimeError as e:
-      sys.stderr.write('Given AftlImage image offset is invalid: {}.\n'
-                       .format(e))
-      return None
-
-    # Parses the full AftlImage.
-    aftl_image_bytes = image.read(tmp_header.aftl_image_size)
-    aftl_image = None
-    try:
-      aftl_image = AftlImage(aftl_image_bytes)
-    except AftlError as e:
-      sys.stderr.write('The image does not contain a valid AftlImage: '
-                       '{}.\n'.format(e))
-    return aftl_image
-
-  def info_image_icp(self, vbmeta_image_path, output):
-    """Implements the 'info_image_icp' command.
-
-    Arguments:
-      vbmeta_image_path: Image file to get information from.
-      output: Output file to write human-readable information to (file object).
-
-    Returns:
-      True if the given image has an AftlImage and could successfully
-      be processed; otherwise False.
-    """
-    aftl_image = self.get_aftl_image(vbmeta_image_path)
-    if not aftl_image:
-      return False
-    aftl_image.print_desc(output)
-    return True
-
-  def verify_image_icp(self, vbmeta_image_path, transparency_log_pub_keys,
-                       output):
-    """Implements the 'verify_image_icp' command.
-
-    Arguments:
-      vbmeta_image_path: Image file to get information from.
-      transparency_log_pub_keys: List of paths to PEM files containing trusted
-        public keys that correspond with the transparency_logs.
-      output: Output file to write human-readable information to (file object).
-
-    Returns:
-      True if for the given image the inclusion proof validates; otherwise
-      False.
-    """
-    vbmeta_image, _ = self.get_vbmeta_image(vbmeta_image_path)
-    aftl_image = self.get_aftl_image(vbmeta_image_path)
-    if not aftl_image or not vbmeta_image:
-      return False
-    verified = aftl_image.verify_vbmeta_image(vbmeta_image,
-                                              transparency_log_pub_keys)
-    if not verified:
-      output.write('The inclusion proofs for the image do not validate.\n')
-      return False
-    output.write('The inclusion proofs for the image successfully validate.\n')
-    return True
-
-  def request_inclusion_proof(self, transparency_log_config, vbmeta_image,
-                              version_inc, manufacturer_key_path,
-                              signing_helper, signing_helper_with_files,
-                              timeout, aftl_comms=None):
-    """Packages and sends a request to the specified transparency log.
-
-    Arguments:
-      transparency_log_config: A TransparencyLogConfig instance.
-      vbmeta_image: A bytearray with the VBMeta image.
-      version_inc: Subcomponent of the build fingerprint.
-      manufacturer_key_path: Path to key used to sign messages sent to the
-        transparency log servers.
-      signing_helper: Program which signs a hash and returns a signature.
-      signing_helper_with_files: Same as signing_helper but uses files instead.
-      timeout: Duration in seconds before requests to the transparency log
-        timeout.
-      aftl_comms: A subclass of the AftlCommunication class. The default is
-        to use AftlGrpcCommunication.
-
-    Returns:
-      An AftlIcpEntry with the inclusion proof for the log entry.
-
-    Raises:
-      AftlError: If grpc or the proto modules cannot be loaded, if there is an
-         error communicating with the log, if the manufacturer_key_path
-         cannot be decoded, or if the log submission cannot be signed.
-    """
-    # Calculate the hash of the vbmeta image.
-    vbmeta_hash = hashlib.sha256(vbmeta_image).digest()
-
-    # Extract the key data from the PEM file if of size 4096.
-    manufacturer_key = avbtool.RSAPublicKey(manufacturer_key_path)
-    if manufacturer_key.num_bits != 4096:
-      raise AftlError('Manufacturer keys not of size 4096: {}'.format(
-          manufacturer_key.num_bits))
-    manufacturer_key_data = rsa_key_read_pem_bytes(manufacturer_key_path)
-
-    # Calculate the hash of the manufacturer key data.
-    m_key_hash = hashlib.sha256(manufacturer_key_data).digest()
-
-    # Build VBMetaPrimaryAnnotation with that data.
-    annotation = VBMetaPrimaryAnnotation(
-        vbmeta_hash=vbmeta_hash, version_incremental=version_inc,
-        manufacturer_key_hash=m_key_hash)
-
-    # Sign annotation and add it to the request.
-    signed_annotation = annotation.sign(
-        manufacturer_key_path, signing_helper=signing_helper,
-        signing_helper_with_files=signing_helper_with_files)
-
-    encoded_signed_annotation = io.BytesIO()
-    signed_annotation.encode(encoded_signed_annotation)
-    request = api_pb2.AddVBMetaRequest(
-        vbmeta=vbmeta_image,
-        signed_vbmeta_primary_annotation=encoded_signed_annotation.getvalue())
-
-    # Submit signed VBMeta annotation to the server.
-    if not aftl_comms:
-      aftl_comms = AftlGrpcCommunication(transparency_log_config, timeout)
-    response = aftl_comms.add_vbmeta(request)
-
-    # Return an AftlIcpEntry representing this response.
-    icp_entry = AftlIcpEntry()
-    icp_entry.translate_response(transparency_log_config.target, response)
-    return icp_entry
-
-  def make_icp_from_vbmeta(self, vbmeta_image_path, output,
-                           signing_helper, signing_helper_with_files,
-                           version_incremental, transparency_log_configs,
-                           manufacturer_key, padding_size, timeout):
-    """Generates a vbmeta image with inclusion proof given a vbmeta image.
-
-    The AftlImage contains the information required to validate an inclusion
-    proof for a specific vbmeta image. It consists of a header (struct
-    AftlImageHeader) and zero or more entry structures (struct AftlIcpEntry)
-    that contain the vbmeta leaf hash, tree size, root hash, inclusion proof
-    hashes, and the signature for the root hash.
-
-    The vbmeta image, its hash, the version_incremental part of the build
-    fingerprint, and the hash of the manufacturer key are sent to the
-    transparency log, with the message signed by the manufacturer key.
-    An inclusion proof is calculated and returned. This inclusion proof is
-    then packaged in an AftlImage structure. The existing vbmeta data is
-    copied to a new file, appended with the AftlImage data, and written to
-    output. Validation of the inclusion proof does not require
-    communication with the transparency log.
-
-    Arguments:
-      vbmeta_image_path: Path to a vbmeta image file.
-      output: File to write the results to.
-      signing_helper: Program which signs a hash and returns a signature.
-      signing_helper_with_files: Same as signing_helper but uses files instead.
-      version_incremental: A string representing the subcomponent of the
-        build fingerprint used to identify the vbmeta in the transparency log.
-      transparency_log_configs: List of TransparencyLogConfig used to request
-        the inclusion proofs.
-      manufacturer_key: Path to PEM file containting the key file used to sign
-        messages sent to the transparency log servers.
-      padding_size: If not 0, pads output so size is a multiple of the number.
-      timeout: Duration in seconds before requests to the AFTL times out. A
-        value of 0 or None means there will be no timeout.
-
-    Returns:
-      True if the inclusion proofs could be fetched from the transparency log
-      servers and could be successfully validated; otherwise False.
-    """
-    # Retrieves vbmeta structure from given partition image.
-    vbmeta_image, footer = self.get_vbmeta_image(vbmeta_image_path)
-
-    # Fetches inclusion proofs for vbmeta structure from all transparency logs.
-    aftl_image = AftlImage()
-    for log_config in transparency_log_configs:
-      try:
-        icp_entry = self.request_inclusion_proof(log_config, vbmeta_image,
-                                                 version_incremental,
-                                                 manufacturer_key,
-                                                 signing_helper,
-                                                 signing_helper_with_files,
-                                                 timeout)
-        if not icp_entry.verify_vbmeta_image(vbmeta_image, log_config.pub_key):
-          sys.stderr.write('The inclusion proof from {} could not be verified.'
-                           '\n'.format(log_config.target))
-        aftl_image.add_icp_entry(icp_entry)
-      except AftlError as e:
-        # The inclusion proof request failed. Continue and see if others will.
-        sys.stderr.write('Requesting inclusion proof failed: {}.\n'.format(e))
-        continue
-
-    # Checks that the resulting AftlImage is sane.
-    if aftl_image.image_header.icp_count != len(transparency_log_configs):
-      sys.stderr.write('Valid inclusion proofs could only be retrieved from {} '
-                       'out of {} transparency logs.\n'
-                       .format(aftl_image.image_header.icp_count,
-                               len(transparency_log_configs)))
-      return False
-    if not aftl_image.is_valid():
-      sys.stderr.write('Resulting AftlImage structure is malformed.\n')
-      return False
-    keys = [log.pub_key for log in transparency_log_configs]
-    if not aftl_image.verify_vbmeta_image(vbmeta_image, keys):
-      sys.stderr.write('Resulting AftlImage inclusion proofs do not '
-                       'validate.\n')
-      return False
-
-    # Writes original VBMeta image, followed by the AftlImage into the output.
-    if footer:  # Checks if it is a chained partition.
-      # TODO(b/147217370): Determine the best way to handle chained partitions
-      # like the system.img. Currently, we only put the main vbmeta.img in the
-      # transparency log.
-      sys.stderr.write('Image has a footer and ICP for this format is not '
-                       'implemented.\n')
-      return False
-
-    output.seek(0)
-    output.write(vbmeta_image)
-    encoded_aftl_image = aftl_image.encode()
-    output.write(encoded_aftl_image)
-
-    if padding_size > 0:
-      total_image_size = len(vbmeta_image) + len(encoded_aftl_image)
-      padded_size = avbtool.round_to_multiple(total_image_size, padding_size)
-      padding_needed = padded_size - total_image_size
-      output.write('\0' * padding_needed)
-
-    sys.stdout.write('VBMeta image with AFTL image successfully created.\n')
-    return True
-
-  def _load_test_process_function(self, vbmeta_image_path,
-                                  transparency_log_config,
-                                  manufacturer_key,
-                                  process_number, submission_count,
-                                  preserve_icp_images, timeout, result_queue):
-    """Function to be used by multiprocessing.Process.
-
-    Arguments:
-      vbmeta_image_path: Path to a vbmeta image file.
-      transparency_log_config: A TransparencyLogConfig instance used to request
-        an inclusion proof.
-      manufacturer_key: Path to PEM file containting the key file used to sign
-        messages sent to the transparency log servers.
-      process_number: The number of the processes executing the function.
-      submission_count: Number of total submissions to perform per
-        process_count.
-      preserve_icp_images: Boolean to indicate if the generated vbmeta image
-        files with inclusion proofs should be preserved in the temporary
-        directory.
-      timeout: Duration in seconds before requests to the AFTL times out. A
-        value of 0 or None means there will be no timeout.
-      result_queue: Multiprocessing.Queue object for posting execution results.
-    """
-    for count in range(0, submission_count):
-      version_incremental = 'aftl_load_testing_{}_{}'.format(process_number,
-                                                             count)
-      output_file = os.path.join(tempfile.gettempdir(),
-                                 '{}_icp.img'.format(version_incremental))
-      output = open(output_file, 'wb')
-
-      # Instrumented section.
-      start_time = time.time()
-      result = self.make_icp_from_vbmeta(
-          vbmeta_image_path=vbmeta_image_path,
-          output=output,
-          signing_helper=None,
-          signing_helper_with_files=None,
-          version_incremental=version_incremental,
-          transparency_log_configs=[transparency_log_config],
-          manufacturer_key=manufacturer_key,
-          padding_size=0,
-          timeout=timeout)
-      end_time = time.time()
-
-      output.close()
-      if not preserve_icp_images:
-        os.unlink(output_file)
-
-      # Puts the result onto the result queue.
-      execution_time = end_time - start_time
-      result_queue.put((start_time, end_time, execution_time,
-                        version_incremental, result))
-
-  def load_test_aftl(self, vbmeta_image_path, output, transparency_log_config,
-                     manufacturer_key,
-                     process_count, submission_count, stats_filename,
-                     preserve_icp_images, timeout):
-    """Performs multi-threaded load test on a given AFTL and prints stats.
-
-    Arguments:
-      vbmeta_image_path: Path to a vbmeta image file.
-      output: File to write the report to.
-      transparency_log_config: A TransparencyLogConfig used to request an
-        inclusion proof.
-      manufacturer_key: Path to PEM file containting the key file used to sign
-        messages sent to the transparency log servers.
-      process_count: Number of processes used for parallel testing.
-      submission_count: Number of total submissions to perform per
-        process_count.
-      stats_filename: Path to the stats file to write the raw execution data to.
-        If None, it will be written to the temp directory.
-      preserve_icp_images: Boolean to indicate if the generated vbmeta
-        image files with inclusion proofs should preserved.
-      timeout: Duration in seconds before requests to the AFTL times out. A
-        value of 0 or None means there will be no timeout.
-
-    Returns:
-      True if the load tested succeeded without errors; otherwise False.
-    """
-    if process_count < 1 or submission_count < 1:
-      sys.stderr.write('Values for --processes/--submissions '
-                       'must be at least 1.\n')
-      return False
-
-    if not stats_filename:
-      stats_filename = os.path.join(
-          tempfile.gettempdir(),
-          'load_test_p{}_s{}.csv'.format(process_count, submission_count))
-
-    stats_file = None
-    try:
-      stats_file = open(stats_filename, 'wt')
-      stats_file.write('start_time,end_time,execution_time,version_incremental,'
-                       'result\n')
-    except IOError as e:
-      sys.stderr.write('Could not open stats file {}: {}.\n'
-                       .format(stats_file, e))
-      return False
-
-    # Launch all the processes with their workloads.
-    result_queue = multiprocessing.Queue()
-    processes = set()
-    execution_times = []
-    results = []
-    for i in range(0, process_count):
-      p = multiprocessing.Process(
-          target=self._load_test_process_function,
-          args=(vbmeta_image_path, transparency_log_config,
-                manufacturer_key, i, submission_count,
-                preserve_icp_images, timeout, result_queue))
-      p.start()
-      processes.add(p)
-
-    while processes:
-      # Processes the results queue and writes these to a stats file.
-      try:
-        (start_time, end_time, execution_time, version_incremental,
-         result) = result_queue.get(timeout=1)
-        stats_file.write('{},{},{},{},{}\n'.format(start_time, end_time,
-                                                   execution_time,
-                                                   version_incremental, result))
-        execution_times.append(execution_time)
-        results.append(result)
-      except queue.Empty:
-        pass
-
-      # Checks if processes are still alive; if not clean them up. join() would
-      # have been nicer but we want to continously stream out the stats to file.
-      for p in processes.copy():
-        if not p.is_alive():
-          processes.remove(p)
-
-    # Prepares stats.
-    executions = sorted(execution_times)
-    execution_count = len(execution_times)
-    median = 0
-
-    # pylint: disable=old-division
-    if execution_count % 2 == 0:
-      median = (executions[execution_count // 2 - 1]
-                + executions[execution_count // 2]) / 2
-    else:
-      median = executions[execution_count // 2]
-
-    # Outputs the stats report.
-    o = output
-    o.write('Load testing results:\n')
-    o.write('  Processes:               {}\n'.format(process_count))
-    o.write('  Submissions per process: {}\n'.format(submission_count))
-    o.write('\n')
-    o.write('  Submissions:\n')
-    o.write('    Total:                 {}\n'.format(len(executions)))
-    o.write('    Succeeded:             {}\n'.format(results.count(True)))
-    o.write('    Failed:                {}\n'.format(results.count(False)))
-    o.write('\n')
-    o.write('  Submission execution durations:\n')
-    o.write('    Average:               {:.2f} sec\n'.format(
-        sum(execution_times) / execution_count))
-    o.write('    Median:                {:.2f} sec\n'.format(median))
-    o.write('    Min:                   {:.2f} sec\n'.format(min(executions)))
-    o.write('    Max:                   {:.2f} sec\n'.format(max(executions)))
-
-    # Close the stats file.
-    stats_file.close()
-    if results.count(False):
-      return False
-    return True
-
-
-class TransparencyLogConfig(object):
-  """Class that gathers the fields representing a transparency log.
-
-  Attributes:
-    target: The hostname and port of the server in hostname:port format.
-    pub_key: A PEM file that contains the public key of the transparency
-      log server.
-    api_key: The API key to use to interact with the transparency log
-      server.
-  """
-
-  @staticmethod
-  def from_argument(arg):
-    """Build an object from a command line argument string.
-
-    Arguments:
-      arg: The transparency log as passed in the command line argument.
-        It must be in the format: host:port,key_file[,api_key].
-
-    Returns:
-      The TransparencyLogConfig instance.
-
-    Raises:
-      argparse.ArgumentTypeError: If the format of arg is invalid.
-    """
-    api_key = None
-    try:
-      target, pub_key, *rest = arg.split(",", maxsplit=2)
-    except ValueError:
-      raise argparse.ArgumentTypeError("incorrect format for transparency log "
-                                       "server, expected "
-                                       "host:port,publickey_file.")
-    if not target:
-      raise argparse.ArgumentTypeError("incorrect format for transparency log "
-                                       "server: host:port cannot be empty.")
-    if not pub_key:
-      raise argparse.ArgumentTypeError("incorrect format for transparency log "
-                                       "server: publickey_file cannot be "
-                                       "empty.")
-    if rest:
-      api_key = rest[0]
-    return TransparencyLogConfig(target, pub_key, api_key)
-
-  def __init__(self, target, pub_key, api_key=None):
-    """Initializes a new TransparencyLogConfig object."""
-    self.target = target
-    self.pub_key = pub_key
-    self.api_key = api_key
-
-
-class AftlTool(avbtool.AvbTool):
-  """Object for aftltool command-line tool."""
-
-  def __init__(self):
-    """Initializer method."""
-    self.aftl = Aftl()
-    super(AftlTool, self).__init__()
-
-  def make_icp_from_vbmeta(self, args):
-    """Implements the 'make_icp_from_vbmeta' sub-command."""
-    args = self._fixup_common_args(args)
-    return self.aftl.make_icp_from_vbmeta(args.vbmeta_image_path,
-                                          args.output,
-                                          args.signing_helper,
-                                          args.signing_helper_with_files,
-                                          args.version_incremental,
-                                          args.transparency_log_servers,
-                                          args.manufacturer_key,
-                                          args.padding_size,
-                                          args.timeout)
-
-  def info_image_icp(self, args):
-    """Implements the 'info_image_icp' sub-command."""
-    return self.aftl.info_image_icp(args.vbmeta_image_path.name, args.output)
-
-  def verify_image_icp(self, args):
-    """Implements the 'verify_image_icp' sub-command."""
-    return self.aftl.verify_image_icp(args.vbmeta_image_path.name,
-                                      args.transparency_log_pub_keys,
-                                      args.output)
-
-  def load_test_aftl(self, args):
-    """Implements the 'load_test_aftl' sub-command."""
-    return self.aftl.load_test_aftl(args.vbmeta_image_path,
-                                    args.output,
-                                    args.transparency_log_server,
-                                    args.manufacturer_key,
-                                    args.processes,
-                                    args.submissions,
-                                    args.stats_file,
-                                    args.preserve_icp_images,
-                                    args.timeout)
-
-  def run(self, argv):
-    """Command-line processor.
-
-    Arguments:
-      argv: Pass sys.argv from main.
-    """
-    parser = argparse.ArgumentParser()
-    subparsers = parser.add_subparsers(title='subcommands')
-
-    # Command: make_icp_from_vbmeta
-    sub_parser = subparsers.add_parser('make_icp_from_vbmeta',
-                                       help='Makes an ICP enhanced vbmeta image'
-                                       ' from an existing vbmeta image.')
-    sub_parser.add_argument('--output',
-                            help='Output file name.',
-                            type=argparse.FileType('wb'),
-                            default=sys.stdout)
-    sub_parser.add_argument('--vbmeta_image_path',
-                            help='Path to a generate vbmeta image file.',
-                            required=True)
-    sub_parser.add_argument('--version_incremental',
-                            help='Current build ID.',
-                            required=True)
-    sub_parser.add_argument('--manufacturer_key',
-                            help='Path to the PEM file containing the '
-                            'manufacturer key for use with the log.',
-                            required=True)
-    sub_parser.add_argument('--transparency_log_servers',
-                            help='List of transparency log servers in '
-                            'host:port,publickey_file[,api_key] format. The '
-                            'publickey_file must be in the PEM format.',
-                            nargs='+', type=TransparencyLogConfig.from_argument)
-    sub_parser.add_argument('--padding_size',
-                            metavar='NUMBER',
-                            help='If non-zero, pads output with NUL bytes so '
-                            'its size is a multiple of NUMBER (default: 0)',
-                            type=avbtool.parse_number,
-                            default=0)
-    sub_parser.add_argument('--timeout',
-                            metavar='SECONDS',
-                            help='Timeout in seconds for transparency log '
-                            'requests (default: 600 sec). A value of 0 means '
-                            'no timeout.',
-                            type=avbtool.parse_number,
-                            default=600)
-    self._add_common_args(sub_parser)
-    sub_parser.set_defaults(func=self.make_icp_from_vbmeta)
-
-    # Command: info_image_icp
-    sub_parser = subparsers.add_parser(
-        'info_image_icp',
-        help='Show information about AFTL ICPs in vbmeta or footer.')
-    sub_parser.add_argument('--vbmeta_image_path',
-                            help='Path to vbmeta image for AFTL information.',
-                            type=argparse.FileType('rb'),
-                            required=True)
-    sub_parser.add_argument('--output',
-                            help='Write info to file',
-                            type=argparse.FileType('wt'),
-                            default=sys.stdout)
-    sub_parser.set_defaults(func=self.info_image_icp)
-
-    # Arguments for verify_image_icp.
-    sub_parser = subparsers.add_parser(
-        'verify_image_icp',
-        help='Verify AFTL ICPs in vbmeta or footer.')
-
-    sub_parser.add_argument('--vbmeta_image_path',
-                            help='Image to verify the inclusion proofs.',
-                            type=argparse.FileType('rb'),
-                            required=True)
-    sub_parser.add_argument('--transparency_log_pub_keys',
-                            help='Paths to PEM files containing transparency '
-                            'log server key(s). This must not be None.',
-                            nargs='*',
-                            required=True)
-    sub_parser.add_argument('--output',
-                            help='Write info to file',
-                            type=argparse.FileType('wt'),
-                            default=sys.stdout)
-    sub_parser.set_defaults(func=self.verify_image_icp)
-
-    # Command: load_test_aftl
-    sub_parser = subparsers.add_parser(
-        'load_test_aftl',
-        help='Perform load testing against one AFTL log server. Note: This MUST'
-        ' not be performed against a production system.')
-    sub_parser.add_argument('--vbmeta_image_path',
-                            help='Path to a generate vbmeta image file.',
-                            required=True)
-    sub_parser.add_argument('--output',
-                            help='Write report to file.',
-                            type=argparse.FileType('wt'),
-                            default=sys.stdout)
-    sub_parser.add_argument('--manufacturer_key',
-                            help='Path to the PEM file containing the '
-                            'manufacturer key for use with the log.',
-                            required=True)
-    sub_parser.add_argument('--transparency_log_server',
-                            help='Transparency log server to test against in '
-                            'host:port,publickey_file[,api_key] format. The '
-                            'publickey_file must be in the PEM format.',
-                            required=True,
-                            type=TransparencyLogConfig.from_argument)
-    sub_parser.add_argument('--processes',
-                            help='Number of parallel processes to use for '
-                            'testing (default: 1).',
-                            type=avbtool.parse_number,
-                            default=1)
-    sub_parser.add_argument('--submissions',
-                            help='Number of submissions to perform against the '
-                            'log per process (default: 1).',
-                            type=avbtool.parse_number,
-                            default=1)
-    sub_parser.add_argument('--stats_file',
-                            help='Path to the stats file to write the raw '
-                            'execution data to (Default: '
-                            'load_test_p[processes]_s[submissions].csv.')
-    sub_parser.add_argument('--preserve_icp_images',
-                            help='Boolean flag to indicate if the generated '
-                            'vbmeta image files with inclusion proofs should '
-                            'preserved.',
-                            action='store_true')
-    sub_parser.add_argument('--timeout',
-                            metavar='SECONDS',
-                            help='Timeout in seconds for transparency log '
-                            'requests (default: 0). A value of 0 means '
-                            'no timeout.',
-                            type=avbtool.parse_number,
-                            default=0)
-    sub_parser.set_defaults(func=self.load_test_aftl)
-
-    args = parser.parse_args(argv[1:])
-    if not 'func' in args:
-      # This error gets raised when the command line tool is called without any
-      # arguments. It mimics the original Python 2 behavior.
-      parser.print_usage()
-      print('aftltool: error: too few arguments')
-      sys.exit(2)
-    try:
-      success = args.func(args)
-    except AftlError as e:
-      # Signals to calling tools that an unhandled exception occured.
-      sys.stderr.write('Unhandled AftlError occured: {}\n'.format(e))
-      sys.exit(2)
-
-    if not success:
-      # Signals to calling tools that the command has failed.
-      sys.exit(1)
-
-if __name__ == '__main__':
-  tool = AftlTool()
-  tool.run(sys.argv)
diff --git a/aftltool_integration_test.py b/aftltool_integration_test.py
deleted file mode 100755
index 814631b..0000000
--- a/aftltool_integration_test.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2019, The Android Open Source Project
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use, copy,
-# modify, merge, publish, distribute, sublicense, and/or sell copies
-# of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
-# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
-# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-#
-"""Integration tests for the avbtool with an actual AFTL.
-
-The test cases directly interact with a transparency log. However,
-before using this script the following environment variables
-need to be set:
-
-  AFTL_HOST: host:port of the transparency log to test with.
-  AFTL_PUBKEY: Transparency log public key in PEM format.
-  AFTL_APIKEY: If the transparency log requires an API key to submit data,
-      this should be the complete key.
-  AFTL_VBMETA_IMAGE: VBMeta image that should be used for submission to AFTL.
-  AFTL_MANUFACTURER_KEY: Manufacturer signing key used to sign submissions
-      to the transparency log in PEM format.
-"""
-
-import os
-import unittest
-
-import aftltool
-import aftltool_test
-
-
-class AftlIntegrationTest(aftltool_test.AftlTest):
-  """Test suite for integration testing aftltool with an actual AFTL.
-
-  Note: The actual testcases are implemented are implemented as part of the
-  super class. This class only contains the configuration for running the unit
-  tests against as a live log as a means of integration testing.
-  """
-
-  def set_up_environment(self):
-    """Sets up the environment for integration testing with actual AFTL."""
-    self.aftl_host = os.environ.get('AFTL_HOST')
-    self.aftl_pubkey = os.environ.get('AFTL_PUBKEY')
-    self.aftl_apikey = os.environ.get('AFTL_APIKEY')
-    self.vbmeta_image = os.environ.get('AFTL_VBMETA_IMAGE')
-    self.manufacturer_key = os.environ.get('AFTL_MANUFACTURER_KEY')
-
-    if (not self.aftl_host or not self.aftl_pubkey or not self.vbmeta_image
-        or not self.manufacturer_key):
-      self.fail('Environment variables not correctly set up. See description of'
-                ' this test case for details')
-
-  def get_aftl_implementation(self, canned_response):
-    """Retrieves an instance if aftltool.Aftl for integration testing.
-
-    Arguments:
-      canned_response: Since we are using the actual implementation and not a
-      mock this gets ignored.
-
-    Returns:
-      An instance of aftltool.Aftl()
-    """
-    return aftltool.Aftl()
-
-
-if __name__ == '__main__':
-  unittest.main(verbosity=2)
diff --git a/aftltool_test.py b/aftltool_test.py
deleted file mode 100755
index 7c8c205..0000000
--- a/aftltool_test.py
+++ /dev/null
@@ -1,1609 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2019, The Android Open Source Project
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use, copy,
-# modify, merge, publish, distribute, sublicense, and/or sell copies
-# of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
-# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
-# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-#
-"""Unit tests for aftltool."""
-
-import argparse
-import binascii
-import io
-import os
-import struct
-import sys
-import tempfile
-import unittest
-
-import aftltool
-import avbtool
-
-# pylint: disable=import-error
-import api_pb2
-# pylint: enable=import-error
-
-
-# Workaround for b/149307145 in order to pick up the test data from the right
-# location independent where the script is called from.
-# TODO(b/149307145): Remove workaround once the referenced bug is fixed.
-TEST_EXEC_PATH = os.path.dirname(os.path.realpath(__file__))
-
-class TlsDataTest(unittest.TestCase):
-
-  def test_decode(self):
-    data = io.BytesIO(b'\x01\x02')
-    value = aftltool.tls_decode_bytes('B', data)
-    self.assertEqual(value, b'\x02')
-    self.assertEqual(data.read(), b'')
-
-    data = io.BytesIO(b'\x00\x01\x03\xff')
-    value = aftltool.tls_decode_bytes('H', data)
-    self.assertEqual(value, b'\x03')
-    self.assertEqual(data.read(), b'\xff')
-
-    data = io.BytesIO(b'\x00\x00\x00\x02\x04\x05\xff\xff')
-    value = aftltool.tls_decode_bytes('L', data)
-    self.assertEqual(value, b'\x04\x05')
-    self.assertEqual(data.read(), b'\xff\xff')
-
-  def test_decode_invalid(self):
-    # Insufficient data for reading the size.
-    with self.assertRaises(aftltool.AftlError):
-      aftltool.tls_decode_bytes('B', io.BytesIO(b''))
-
-    # Invalid byte_size character.
-    with self.assertRaises(aftltool.AftlError):
-      aftltool.tls_decode_bytes('/o/', io.BytesIO(b'\x01\x02\xff'))
-
-    # Insufficient data for reading the value.
-    with self.assertRaises(aftltool.AftlError):
-      aftltool.tls_decode_bytes('B', io.BytesIO(b'\x01'))
-
-  def test_encode(self):
-    stream = io.BytesIO()
-    aftltool.tls_encode_bytes('B', b'\x01\x02\x03\x04', stream)
-    self.assertEqual(stream.getvalue(), b'\x04\x01\x02\x03\x04')
-
-    stream = io.BytesIO()
-    aftltool.tls_encode_bytes('H', b'\x01\x02\x03\x04', stream)
-    self.assertEqual(stream.getvalue(), b'\x00\x04\x01\x02\x03\x04')
-
-  def test_encode_invalid(self):
-    # Byte size is not large enough to encode the value.
-    stream = io.BytesIO()
-    with self.assertRaises(aftltool.AftlError):
-      aftltool.tls_encode_bytes('B', b'\x01'*256, stream)
-
-    # Invalid byte_size character.
-    stream = io.BytesIO()
-    with self.assertRaises(aftltool.AftlError):
-      aftltool.tls_encode_bytes('/o/', b'\x01\x02', stream)
-
-
-class VBMetaPrimaryAnnotationTest(unittest.TestCase):
-
-  def test_decode(self):
-    stream = io.BytesIO(b'\x00\x00\x00\x00\x00')
-    anno = aftltool.VBMetaPrimaryAnnotation.parse(stream)
-    self.assertEqual(anno.vbmeta_hash, b'')
-    self.assertEqual(anno.version_incremental, '')
-    self.assertEqual(anno.manufacturer_key_hash, b'')
-    self.assertEqual(anno.description, '')
-
-  def test_encode(self):
-    stream = io.BytesIO()
-    anno = aftltool.VBMetaPrimaryAnnotation()
-    anno.encode(stream)
-    self.assertEqual(stream.getvalue(), b'\x00\x00\x00\x00\x00')
-
-  def test_encode_invalid(self):
-    stream = io.BytesIO()
-    anno = aftltool.VBMetaPrimaryAnnotation()
-    # Version incremental should be ASCII only.
-    anno.version_incremental = '☃'
-    with self.assertRaises(aftltool.AftlError):
-      anno.encode(stream)
-
-
-class SignedVBMetaAnnotationLeafTest(unittest.TestCase):
-
-  def test_encode(self):
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf()
-    self.assertEqual(leaf.encode(),
-                     b'\x01'   # Version
-                     b'\x00\x00\x00\x00\x00\x00\x00\x00'  # Timestamp
-                     b'\x01' + # Leaf Type
-                     b'\x00' * 4 + # Empty Signature
-                     b'\x00' * 5) # Empty Annotation
-
-  def test_encode_invalid_type(self):
-    # The version field must be a 1-byte integer.
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf()
-    leaf.version = 'x'
-    with self.assertRaises(aftltool.AftlError):
-      leaf.encode()
-
-  def test_encode_invalid_size(self):
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf()
-    leaf.version = 256
-    with self.assertRaises(aftltool.AftlError):
-      leaf.encode()
-
-
-class AftltoolTestCase(unittest.TestCase):
-
-  def setUp(self):
-    """Sets up the test bed for the unit tests."""
-    super(AftltoolTestCase, self).setUp()
-
-    # Redirects the stderr to /dev/null when running the unittests. The reason
-    # is that soong interprets any output on stderr as an error and marks the
-    # unit test as failed although the test itself succeeded.
-    self.stderr = sys.stderr
-    self.null = open(os.devnull, 'wt')
-    sys.stderr = self.null
-
-    # AFTL public key.
-    self.test_aftl_pub_key = (
-        '-----BEGIN PUBLIC KEY-----\n'
-        'MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4ilqCNsenNA013iCdwgD\n'
-        'YPxZ853nbHG9lMBp9boXiwRcqT/8bUKHIL7YX5z7s+QoRYVY3rkMKppRabclXzyx\n'
-        'H59YnPMaU4uv7NqwWzjgaZo7E+vo7IF+KBjV3cJulId5Av0yIYUCsrwd7MpGtWdC\n'
-        'Q3S+7Vd4zwzCKEhcvliNIhnNlp1U3wNkPCxOyCAsMEn6k8O5ar12ke5TvxDv15db\n'
-        'rPDeHh8G2OYWoCkWL+lSN35L2kOJqKqVbLKWrrOd96RCYrrtbPCi580OADJRcUlG\n'
-        'lgcjwmNwmypBWvQMZ6ITj0P0ksHnl1zZz1DE2rXe1goLI1doghb5KxLaezlR8c2C\n'
-        'E3w/uo9KJgNmNgUVzzqZZ6FE0moyIDNOpP7KtZAL0DvEZj6jqLbB0ccPQElrg52m\n'
-        'Dv2/A3nYSr0mYBKeskT4+Bg7PGgoC8p7WyLSxMyzJEDYdtrj9OFx6eZaA23oqTQx\n'
-        'k3Qq5H8RfNBeeSUEeKF7pKH/7gyqZ2bNzBFMA2EBZgBozwRfaeN/HCv3qbaCnwvu\n'
-        '6caacmAsK+RxiYxSL1QsJqyhCWWGxVyenmxdc1KG/u5ypi7OIioztyzR3t2tAzD3\n'
-        'Nb+2t8lgHBRxbV24yiPlnvPmB1ZYEctXnlRR9Evpl1o9xA9NnybPHKr9rozN39CZ\n'
-        'V/USB8K6ao1y5xPZxa8CZksCAwEAAQ==\n'
-        '-----END PUBLIC KEY-----\n')
-
-    # Test AftlIcpEntry #1
-    self.test_tl_url_1 = 'aftl-test-server.google.com'
-
-    self.test_sth_1 = aftltool.TrillianLogRootDescriptor()
-    self.test_sth_1.tree_size = 2
-    self.test_sth_1.root_hash_size = 32
-    self.test_sth_1.root_hash = b'f' * 32
-    self.test_sth_1.timestamp = 0x1234567890ABCDEF
-    self.test_sth_1.revision = 0xFEDCBA0987654321
-
-    self.test_sth_1_bytes = (
-        b'\x00\x01'                          # version
-        b'\x00\x00\x00\x00\x00\x00\x00\x02'  # tree_size
-        b'\x20'                              # root_hash_size
-        + b'f' * 32 +                        # root_hash
-        b'\x12\x34\x56\x78\x90\xAB\xCD\xEF'  # timestamp
-        b'\xFE\xDC\xBA\x09\x87\x65\x43\x21'  # revision
-        b'\x00\x00'                          # metadata_size
-        b''                                  # metadata (empty)
-    )
-
-    # Test Annotation #1
-    anno_1 = aftltool.VBMetaPrimaryAnnotation(vbmeta_hash=b'w'*32,
-                                              version_incremental='x'*5,
-                                              manufacturer_key_hash=b'y'*32,
-                                              description='z'*51)
-    signed_anno_1 = aftltool.SignedVBMetaPrimaryAnnotation(annotation=anno_1)
-
-    self.test_anno_1 = aftltool.SignedVBMetaPrimaryAnnotationLeaf(
-        signed_vbmeta_primary_annotation=signed_anno_1)
-    self.test_anno_1_bytes = (
-        b'\x01'                              # version
-        b'\x00\x00\x00\x00\x00\x00\x00\x00'  # timestamp
-        b'\x01'                              # leaf_type
-        b'\x00'                              # hash_algorithm
-        b'\x00'                              # signature_algorithm
-        + b'\x00\x00'                        # signature
-        + b'\x20' + b'w' * 32                # vbmeta_hash
-        + b'\x05' + b'x' * 5                 # version_incremental
-        + b'\x20' + b'y' * 32                # manufacturer_key_hash
-        + b'\x00\x33' + b'z' * 51            # description
-    )
-
-    # Fill each structure with an easily observable pattern for easy validation.
-    self.test_proof_hashes_1 = []
-    self.test_proof_hashes_1.append(b'b' * 32)
-    self.test_proof_hashes_1.append(b'c' * 32)
-    self.test_proof_hashes_1.append(b'd' * 32)
-    self.test_proof_hashes_1.append(b'e' * 32)
-
-    # Valid test AftlIcpEntry #1.
-    self.test_entry_1 = aftltool.AftlIcpEntry()
-    self.test_entry_1.log_url = self.test_tl_url_1
-    self.test_entry_1.leaf_index = 1
-    self.test_entry_1.annotation_leaf = self.test_anno_1
-    self.test_entry_1.log_root_descriptor = self.test_sth_1
-    self.test_entry_1.proofs = self.test_proof_hashes_1
-    self.test_entry_1.log_root_signature = b'g' * 512
-
-    self.test_entry_1_bytes = (
-        b'\x00\x00\x00\x1b'                  # Transparency log url size.
-        b'\x00\x00\x00\x00\x00\x00\x00\x01'  # Leaf index.
-        b'\x00\x00\x00\x3d'                  # Log root descriptor size.
-        b'\x00\x00\x00\x8b'                  # Annotation leaf size.
-        b'\x02\x00'                          # Log root signature size.
-        b'\x04'                              # Number of hashes in ICP.
-        b'\x00\x00\x00\x80'                  # Size of ICP in bytes.
-        + self.test_tl_url_1.encode('ascii') # Transparency log url.
-        + self.test_sth_1_bytes
-        + self.test_anno_1_bytes
-        + b'g' * 512                         # Log root signature.
-        + b'b' * 32                          # Hashes...
-        + b'c' * 32
-        + b'd' * 32
-        + b'e' * 32)
-
-    # Valid test AftlIcpEntry #2.
-    self.test_tl_url_2 = 'aftl-test-server.google.ch'
-
-    self.test_sth_2 = aftltool.TrillianLogRootDescriptor()
-    self.test_sth_2.tree_size = 4
-    self.test_sth_2.root_hash_size = 32
-    self.test_sth_2.root_hash = b'e' * 32
-    self.test_sth_2.timestamp = 6
-    self.test_sth_2.revision = 7
-    self.test_sth_2.metadata_size = 2
-    self.test_sth_2.metadata = b'12'
-
-    self.test_sth_2_bytes = (
-        b'\x00\x01'                          # version
-        b'\x00\x00\x00\x00\x00\x00\x00\x04'  # tree_size
-        b'\x20'                              # root_hash_size
-        + b'e' * 32 +                        # root_hash
-        b'\x00\x00\x00\x00\x00\x00\x00\x06'  # timestamp
-        b'\x00\x00\x00\x00\x00\x00\x00\x07'  # revision
-        b'\x00\x02'                          # metadata_size
-        b'12'                                # metadata
-    )
-
-    # Fill each structure with an easily observable pattern for easy validation.
-    self.test_proof_hashes_2 = []
-    self.test_proof_hashes_2.append(b'g' * 32)
-    self.test_proof_hashes_2.append(b'h' * 32)
-
-    self.test_entry_2 = aftltool.AftlIcpEntry()
-    self.test_entry_2.log_url = self.test_tl_url_2
-    self.test_entry_2.leaf_index = 2
-    self.test_entry_2.annotation_leaf = self.test_anno_1
-    self.test_entry_2.log_root_descriptor = self.test_sth_2
-    self.test_entry_2.log_root_signature = b'd' * 512
-    self.test_entry_2.proofs = self.test_proof_hashes_2
-
-    self.test_entry_2_bytes = (
-        b'\x00\x00\x00\x1a'                   # Transparency log url size.
-        b'\x00\x00\x00\x00\x00\x00\x00\x02'   # Leaf index.
-        b'\x00\x00\x00\x3f'                   # Log root descriptor size.
-        b'\x00\x00\x00\x8b'                   # Annotation leaf size.
-        b'\x02\x00'                           # Log root signature size.
-        b'\x02'                               # Number of hashes in ICP.
-        b'\x00\x00\x00\x40'                   # Size of ICP in bytes.
-        + self.test_tl_url_2.encode('ascii')  # Transparency log url.
-        + self.test_sth_2_bytes               # Log root
-        + self.test_anno_1_bytes
-        + b'd' * 512                          # Log root signature.
-        + b'g' * 32                           # Hashes...
-        + b'h' * 32)
-
-    # Valid test AftlImage made out of AftlEntry #1 and #2.
-    self.test_aftl_desc = aftltool.AftlImage()
-    self.test_aftl_desc.add_icp_entry(self.test_entry_1)
-    self.test_aftl_desc.add_icp_entry(self.test_entry_2)
-
-    self.test_expected_aftl_image_bytes = (
-        b'AFTL'                                         # Magic.
-        + struct.pack('!L', avbtool.AVB_VERSION_MAJOR)  # Major version.
-        + struct.pack('!L', avbtool.AVB_VERSION_MINOR)  # Minor version.
-        + b'\x00\x00\x06\xcf'                           # Image size.
-        b'\x00\x02'                                     # Number of ICP entries.
-        + self.test_entry_1_bytes
-        + self.test_entry_2_bytes)
-
-    self.test_avbm_resp = api_pb2.AddVBMetaResponse()
-    self.test_avbm_resp.annotation_proof.proof.leaf_index = 9127
-    hashes = [
-        '61076ca285b4982669e67757f55682ddc43ab5c11ba671260f82a8efa8831f94',
-        '89c2fbcc58da25a65ce5e9b4fb22aaf208b20601f0bc023f73f05d35bc1f3bac',
-        '75d26b5f754b4bed332a3ce2a2bfea0334706a974b7e00ee663f0279fa8b446e',
-        'e1cd9c96feb893b5ef7771e424ac1c6c47509c2b98bc578d22ad07369c9641aa',
-        'e83e0e4dd352b1670a55f93f88781a73bb41efcadb9927399f59459dfa14bc40',
-        '8d5d25996117c88655d66f685baa3c94390867a040507b10587b17fbe92b496a',
-        '5de4c627e9ca712f207d6056f56f0d3286ed4a5381ed7f3cc1aa470217734138',
-        '19acfdb424d7fe28d1f850c76302f78f9a50146a5b9c65f9fdfbbc0173fd6993']
-    for h in hashes:
-      self.test_avbm_resp.annotation_proof.proof.hashes.append(
-          binascii.unhexlify(h))
-    self.test_avbm_resp.annotation_proof.sth.key_hint = binascii.unhexlify(
-        '5af859abce8fe1ea')
-    self.test_avbm_resp.annotation_proof.sth.log_root = binascii.unhexlify(
-        '0001'
-        '00000000000023a8'
-        '20'
-        '9a5f71340f8dc98bdc6320f976dda5f34db8554cb273ba5ab60f1697c519d6f6'
-        '1609ae15024774b1'
-        '0000000000001e5a'
-        '0000'
-    )
-    self.test_avbm_resp.annotation_proof.sth.log_root_signature = (
-        binascii.unhexlify(
-            '7c37903cc76e8689a6b31da9ad56c3daeb6194029510297cc7d147278390da33'
-            '09c4d9eb1f6be0cdcd1de5315b0b3b573cc9fcd8620d3fab956abbe3c597a572'
-            '46e5a5d277c4cc4b590872d0292fa64e1d3285626b1dedeb00b6aa0a7a0717c0'
-            '7d4c89b68fda9091be06180be1369675a7c4ce7f42cca133ef0daf8dcc5ba1ee'
-            '930cef6dcb71b0a7690446e19661c8e18c089a5d6f6fc9299a0592efb33a4db5'
-            '4c640027fa4f0ad0009f8bf75ec5fc17e0fa1091fabe74fe52738443745066ab'
-            '48f99b297809b863c01016abda17a2479fce91f9929c60bc2ce15e474204fc5a'
-            '8e79b2190aadb7c149671e8c76a4da506860f8d6020fb2eaabfee025cc267bad'
-            '3c8257186c8aaf1da9eefe50cae4b3e8deb66033ebc4bfcda2b317f9e7d2dd78'
-            'b47f2d86795815d82058ad4cba8fc7983a3bbf843e9b8c7ec7f1ae137be6848d'
-            '03c76eefdac40ce5e66cc23d9f3e79ad87acbe7ec0c0bb419a7d368ae1e73c85'
-            '742871f847bde69c871e8797638e0e270282fb058ef1cbcba52aded9dcc8249b'
-            '38fbed8424c33b8cfcde4f49797c64dda8d089d73b84062602fd41c66091543c'
-            'e13c18cfa7f8300530ad4b7adb8924bbb86d17bcc5f1d3d74c522a7dcc8c3c1f'
-            '28a999f2fe1bfe5520c66f93f7c90996dc7f52e62dd95ace9ceace90324c3040'
-            '669b7f5aeb5c5a53f217f1de46e32f80d0aaaf7d9cc9d0e8f8fd7026c612103a'
-        )
-    )
-
-    anno = aftltool.VBMetaPrimaryAnnotation(
-        vbmeta_hash=bytes.fromhex(
-            '5623731104bfa1cfdb275df2978d1f93f72f5f0f746f11d06f3091c601c32067'),
-        version_incremental='only_for_testing',
-        manufacturer_key_hash=bytes.fromhex(
-            '83ab3b109b73a1d32dce4153a2de57a1a0485052db8364f3180d98614749d7f7'))
-    raw_signature = bytes.fromhex(
-        '6a523021bc5b933bb58c38c8238be3a5fe1166002f5df8b77dee9dd22d353595'
-        'be7996656d3824ebf4e1411a05ee3652d64669d3d62b167d3290dbdf4f2741ba'
-        '4b6472e1bd71fc1860465fdcdca1ff08c4ab0420d7dcbf4ad144f64e211d8f92'
-        '081ba51192358e2478195e573d000282423b23e6dd945069907dcf11520ff11a'
-        '250e26643b820f8a5d80ccfe7d5d84f58e549cd05630f2254ade8edc88d9aa8a'
-        'ec2089f84643854e1f265a4f746598ce4cae529c4eaa637f6e35fa1d1da9254e'
-        'ec8dfede7a4313f7b151547dcdde98782ce6fb3149326ee5b8e750813d3fd37a'
-        '738fe92f6111bf0dff4091769e216b842980e05716f2e50268a7dcca430e175e'
-        '711f80e41a1a28f20635741ac11a56f97492d30db6d1955a827daf8e83faebe5'
-        'a96e18a13c558ae561a02c90982514c853db0296c2e791e68b77c30e6232a3b7'
-        'ed355441d4706277f33a01735f56cb8279336491731939691683f96f1c3e3183'
-        'a0b77510d6ff0199b7688902044829793106546fd6fd4a5294d63c31c91256ad'
-        'f7be6d053e77875698ad32ffaaeaac5d54b432e537f72549d2543072ae35578f'
-        '138d82afcadd668511ba276ce02b6f9c18ef3b6f2f6ae0d123e9f8cb930f21a9'
-        'c49a6d9e95de741c7860593a956735e1b77e9851ecb1f6572abf6e2c8ba15085'
-        'e37e0f7bab0a30d108b997ed5edd74cf7f89cf082590a6f0af7a3a1f68c0077a')
-    signature = aftltool.Signature(signature=raw_signature)
-    signed_anno = aftltool.SignedVBMetaPrimaryAnnotation(annotation=anno,
-                                                         signature=signature)
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf(
-        timestamp=1587991742919072870,
-        signed_vbmeta_primary_annotation=signed_anno).encode()
-    self.test_avbm_resp.annotation_leaf = leaf
-
-
-  def tearDown(self):
-    """Tears down the test bed for the unit tests."""
-    # Reconnects stderr back to the normal stderr; see setUp() for details.
-    sys.stderr = self.stderr
-    self.null.close()
-
-    super(AftltoolTestCase, self).tearDown()
-
-  def get_testdata_path(self, relative_path):
-    """Retrieves the absolute path for testdata given the relative path.
-
-    Arguments:
-      relative_path: The relative path to the testdata in the testdata
-        directory.
-
-    Returns:
-      The absolute path to the testdata.
-    """
-    rel_path_parts = ['test', 'data']
-    rel_path_parts.extend(relative_path.split(os.path.sep))
-    return os.path.join(TEST_EXEC_PATH, *rel_path_parts)
-
-
-class AftltoolTest(AftltoolTestCase):
-
-  def test_merkle_root_hash(self):
-    """Tests validation of inclusion proof and the merkle tree calculations.
-
-    The test vectors have been taken from the Trillian tests:
-    https://github.com/google/trillian/blob/v1.3.3/merkle/log_verifier_test.go
-    """
-
-    inclusion_proofs = [
-        (1,
-         8,
-         [
-             binascii.unhexlify('96a296d224f285c67bee93c30f8a3091'
-                                '57f0daa35dc5b87e410b78630a09cfc7'),
-             binascii.unhexlify('5f083f0a1a33ca076a95279832580db3'
-                                'e0ef4584bdff1f54c8a360f50de3031e'),
-             binascii.unhexlify('6b47aaf29ee3c2af9af889bc1fb9254d'
-                                'abd31177f16232dd6aab035ca39bf6e4')
-         ]),
-        (6,
-         8,
-         [
-             binascii.unhexlify('bc1a0643b12e4d2d7c77918f44e0f4f7'
-                                '9a838b6cf9ec5b5c283e1f4d88599e6b'),
-             binascii.unhexlify('ca854ea128ed050b41b35ffc1b87b8eb'
-                                '2bde461e9e3b5596ece6b9d5975a0ae0'),
-             binascii.unhexlify('d37ee418976dd95753c1c73862b9398f'
-                                'a2a2cf9b4ff0fdfe8b30cd95209614b7')
-         ]),
-        (3,
-         3,
-         [
-             binascii.unhexlify('fac54203e7cc696cf0dfcb42c92a1d9d'
-                                'baf70ad9e621f4bd8d98662f00e3c125')
-         ]),
-        (2,
-         5,
-         [
-             binascii.unhexlify('6e340b9cffb37a989ca544e6bb780a2c'
-                                '78901d3fb33738768511a30617afa01d'),
-             binascii.unhexlify('5f083f0a1a33ca076a95279832580db3'
-                                'e0ef4584bdff1f54c8a360f50de3031e'),
-             binascii.unhexlify('bc1a0643b12e4d2d7c77918f44e0f4f7'
-                                '9a838b6cf9ec5b5c283e1f4d88599e6b')
-         ]
-        )
-    ]
-
-    leaves = [
-        binascii.unhexlify(''),
-        binascii.unhexlify('00'),
-        binascii.unhexlify('10'),
-        binascii.unhexlify('2021'),
-        binascii.unhexlify('3031'),
-        binascii.unhexlify('40414243'),
-        binascii.unhexlify('5051525354555657'),
-        binascii.unhexlify('606162636465666768696a6b6c6d6e6f'),
-    ]
-
-    roots = [
-        binascii.unhexlify('6e340b9cffb37a989ca544e6bb780a2c'
-                           '78901d3fb33738768511a30617afa01d'),
-        binascii.unhexlify('fac54203e7cc696cf0dfcb42c92a1d9d'
-                           'baf70ad9e621f4bd8d98662f00e3c125'),
-        binascii.unhexlify('aeb6bcfe274b70a14fb067a5e5578264'
-                           'db0fa9b51af5e0ba159158f329e06e77'),
-        binascii.unhexlify('d37ee418976dd95753c1c73862b9398f'
-                           'a2a2cf9b4ff0fdfe8b30cd95209614b7'),
-        binascii.unhexlify('4e3bbb1f7b478dcfe71fb631631519a3'
-                           'bca12c9aefca1612bfce4c13a86264d4'),
-        binascii.unhexlify('76e67dadbcdf1e10e1b74ddc608abd2f'
-                           '98dfb16fbce75277b5232a127f2087ef'),
-        binascii.unhexlify('ddb89be403809e325750d3d263cd7892'
-                           '9c2942b7942a34b77e122c9594a74c8c'),
-        binascii.unhexlify('5dc9da79a70659a9ad559cb701ded9a2'
-                           'ab9d823aad2f4960cfe370eff4604328'),
-    ]
-
-    for icp in inclusion_proofs:
-      leaf_id = icp[0] - 1
-      leaf_hash = aftltool.rfc6962_hash_leaf(leaves[leaf_id])
-      root_hash = aftltool.root_from_icp(leaf_id, icp[1], icp[2], leaf_hash)
-      self.assertEqual(root_hash, roots[icp[1] -1])
-
-
-class AftlImageTest(AftltoolTestCase):
-
-  def test__init__(self):
-    """Tests the constructor."""
-    # Calls constructor without data.
-    d = aftltool.AftlImage()
-    self.assertIsInstance(d.image_header, aftltool.AftlImageHeader)
-    self.assertEqual(d.image_header.icp_count, 0)
-    self.assertEqual(d.icp_entries, [])
-    self.assertTrue(d.is_valid())
-
-    # Calls constructor with data.
-    d = aftltool.AftlImage(self.test_expected_aftl_image_bytes)
-    self.assertIsInstance(d.image_header, aftltool.AftlImageHeader)
-    self.assertEqual(d.image_header.icp_count, 2)
-    self.assertEqual(len(d.icp_entries), 2)
-    for entry in d.icp_entries:
-      self.assertIsInstance(entry, aftltool.AftlIcpEntry)
-    self.assertTrue(d.is_valid())
-
-  def test_add_icp_entry(self):
-    """Tests the add_icp_entry method."""
-    d = aftltool.AftlImage()
-
-    # Adds 1st ICP.
-    d.add_icp_entry(self.test_entry_1)
-    self.assertEqual(d.image_header.icp_count, 1)
-    self.assertEqual(len(d.icp_entries), 1)
-    self.assertTrue(d.is_valid())
-
-    # Adds 2nd ICP.
-    d.add_icp_entry(self.test_entry_2)
-    self.assertEqual(d.image_header.icp_count, 2)
-    self.assertEqual(len(d.icp_entries), 2)
-    self.assertTrue(d.is_valid())
-
-  def test_verify_vbmeta_image_with_1_icp(self):
-    """Tests the verify_vbmeta_image method."""
-    # Valid vbmeta image without footer with 1 ICP.
-    tool = aftltool.Aftl()
-    image_path = self.get_testdata_path(
-        'aftl_output_vbmeta_with_1_icp.img')
-    vbmeta_image, _ = tool.get_vbmeta_image(image_path)
-    desc = tool.get_aftl_image(image_path)
-
-    # Valid image checked against correct log key.
-    self.assertTrue(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('aftl_pubkey_1.pem')]))
-
-    # Valid image checked with a key from another log.
-    self.assertFalse(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('testkey_rsa4096_pub.pem')]))
-
-    # Valid image checked with non existed key file path.
-    self.assertFalse(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('non_existent_blabli')]))
-
-    # Valid image checked with an invalid key.
-    self.assertFalse(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('large_blob.bin')]))
-
-    # Valid image checked with empty list of keys.
-    self.assertFalse(desc.verify_vbmeta_image(vbmeta_image, []))
-
-    # Valid image checked with empty list of keys.
-    self.assertFalse(desc.verify_vbmeta_image(vbmeta_image, None))
-
-  def test_verify_vbmeta_image_with_2_icp_from_same_log(self):
-    """Tests the verify_vbmeta_image method."""
-    # Valid vbmeta image without footer with 2 ICPs from same log.
-    tool = aftltool.Aftl()
-    image_path = self.get_testdata_path(
-        'aftl_output_vbmeta_with_2_icp_same_log.img')
-    vbmeta_image, _ = tool.get_vbmeta_image(image_path)
-    desc = tool.get_aftl_image(image_path)
-
-    # Valid image checked against correct log key.
-    self.assertTrue(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('aftl_pubkey_1.pem')]))
-
-    # Valid vbmeta image checked with key from another log.
-    self.assertFalse(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('testkey_rsa4096_pub.pem')]))
-
-    # Valid image checked with non existed key file path.
-    self.assertFalse(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('non_existent_blabli')]))
-
-    # Valid image checked with invalid key.
-    self.assertFalse(desc.verify_vbmeta_image(
-        vbmeta_image, [self.get_testdata_path('large_blob.bin')]))
-
-    # Valid image but checked with empty list of keys.
-    self.assertFalse(desc.verify_vbmeta_image(vbmeta_image, []))
-
-  def test_encode(self):
-    """Tests encode method."""
-    desc_bytes = self.test_aftl_desc.encode()
-    self.assertEqual(desc_bytes, self.test_expected_aftl_image_bytes)
-
-  def test_is_valid(self):
-    """Tests is_valid method."""
-    d = aftltool.AftlImage()
-    d.add_icp_entry(self.test_entry_1)
-    d.add_icp_entry(self.test_entry_2)
-
-    # Force invalid ICP header.
-    old_magic = d.image_header.magic
-    d.image_header.magic = b'YOLO'
-    self.assertFalse(d.is_valid())
-    d.image_header.magic = old_magic
-    self.assertTrue(d.is_valid())
-
-    # Force count mismatch between header and actual entries.
-    old_icp_count = d.image_header.icp_count
-    d.image_header.icp_count = 1
-    self.assertFalse(d.is_valid())
-    d.image_header.icp_count = old_icp_count
-    self.assertTrue(d.is_valid())
-
-    # Force invalid ICP entry.
-    old_leaf_index = d.icp_entries[0].leaf_index
-    d.icp_entries[0].leaf_index = -10
-    self.assertFalse(d.is_valid())
-    d.icp_entries[0].leaf_index = old_leaf_index
-    self.assertTrue(d.is_valid())
-
-  def test_print_desc(self):
-    """Tests print_desc method."""
-    buf = io.StringIO()
-    self.test_aftl_desc.print_desc(buf)
-    desc = buf.getvalue()
-
-    # Cursory check whether the printed description contains something useful.
-    self.assertGreater(len(desc), 0)
-    self.assertIn('Log Root Descriptor:', desc)
-
-
-class AftlImageHeaderTest(AftltoolTestCase):
-  """Test suite for testing the AftlImageHeader descriptor."""
-
-  def setUp(self):
-    """Sets up the test bed for the unit tests."""
-    super(AftlImageHeaderTest, self).setUp()
-
-    self.test_header_valid = aftltool.AftlImageHeader()
-    self.test_header_valid.icp_count = 1
-
-    self.test_header_invalid = aftltool.AftlImageHeader()
-    self.test_header_invalid.icp_count = -34
-
-    self.test_header_bytes = (
-        b'AFTL'                                         # Magic.
-        + struct.pack('!L', avbtool.AVB_VERSION_MAJOR)  # Major version.
-        + struct.pack('!L', avbtool.AVB_VERSION_MINOR)  # Minor version.
-        + b'\x00\x00\x00\x12'                           # Image size.
-        b'\x00\x01')                                    # Number of ICP entries.
-
-  def test__init__(self):
-    """Tests constructor."""
-
-    # Calls constructor without data.
-    header = aftltool.AftlImageHeader()
-    self.assertEqual(header.magic, b'AFTL')
-    self.assertEqual(header.required_icp_version_major,
-                     avbtool.AVB_VERSION_MAJOR)
-    self.assertEqual(header.required_icp_version_minor,
-                     avbtool.AVB_VERSION_MINOR)
-    self.assertEqual(header.aftl_image_size, aftltool.AftlImageHeader.SIZE)
-    self.assertEqual(header.icp_count, 0)
-    self.assertTrue(header.is_valid())
-
-    # Calls constructor with data.
-    header = aftltool.AftlImageHeader(self.test_header_bytes)
-    self.assertEqual(header.magic, b'AFTL')
-    self.assertEqual(header.required_icp_version_major,
-                     avbtool.AVB_VERSION_MAJOR)
-    self.assertEqual(header.required_icp_version_minor,
-                     avbtool.AVB_VERSION_MINOR)
-    self.assertEqual(header.aftl_image_size, aftltool.AftlImageHeader.SIZE)
-    self.assertTrue(header.icp_count, 1)
-    self.assertTrue(header.is_valid())
-
-  def test_encode(self):
-    """Tests encode method."""
-    # Valid header.
-    header_bytes = self.test_header_valid.encode()
-    self.assertEqual(header_bytes, self.test_header_bytes)
-
-    # Invalid header
-    with self.assertRaises(aftltool.AftlError):
-      header_bytes = self.test_header_invalid.encode()
-
-  def test_is_valid(self):
-    """Tests is_valid method."""
-    # Valid default record.
-    header = aftltool.AftlImageHeader()
-    self.assertTrue(header.is_valid())
-
-    # Invalid magic.
-    header = aftltool.AftlImageHeader()
-    header.magic = b'YOLO'
-    self.assertFalse(header.is_valid())
-
-    # Valid ICP count.
-    self.assertTrue(self.test_header_valid.is_valid())
-
-    # Invalid ICP count.
-    self.assertFalse(self.test_header_invalid.is_valid())
-
-    header = aftltool.AftlImageHeader()
-    header.icp_count = 10000000
-    self.assertFalse(header.is_valid())
-
-    # Invalid ICP major version.
-    header = aftltool.AftlImageHeader()
-    header.required_icp_version_major = avbtool.AVB_VERSION_MAJOR + 1
-    self.assertFalse(header.is_valid())
-
-    # Invalid ICP minor version.
-    header = aftltool.AftlImageHeader()
-    header.required_icp_version_minor = avbtool.AVB_VERSION_MINOR + 1
-    self.assertFalse(header.is_valid())
-
-  def test_print_desc(self):
-    """Tests print_desc method."""
-    buf = io.StringIO()
-    self.test_header_valid.print_desc(buf)
-    desc = buf.getvalue()
-
-    # Cursory check whether the printed description contains something useful.
-    self.assertGreater(len(desc), 0)
-    self.assertIn('Major version:', desc)
-
-
-class AftlIcpEntryTest(AftltoolTestCase):
-  """Test suite for testing the AftlIcpEntry descriptor."""
-
-  def test__init__and_properties(self):
-    """Tests constructor and properties methods."""
-
-    # Calls constructor without data.
-    entry = aftltool.AftlIcpEntry()
-    self.assertEqual(entry.log_url_size, 0)
-    self.assertEqual(entry.leaf_index, 0)
-    self.assertEqual(entry.log_root_descriptor_size, 29)
-    self.assertEqual(entry.annotation_leaf_size, 19)
-    self.assertEqual(entry.log_root_sig_size, 0)
-    self.assertEqual(entry.proof_hash_count, 0)
-    self.assertEqual(entry.inc_proof_size, 0)
-    self.assertEqual(entry.log_url, '')
-    self.assertIsInstance(entry.log_root_descriptor,
-                          aftltool.TrillianLogRootDescriptor)
-    self.assertEqual(entry.proofs, [])
-    self.assertTrue(entry.is_valid())
-
-    # Calls constructor with data.
-    entry = aftltool.AftlIcpEntry(self.test_entry_1_bytes)
-    self.assertEqual(entry.log_url_size, len(self.test_tl_url_1))
-    self.assertEqual(entry.leaf_index, 1)
-    self.assertEqual(entry.annotation_leaf_size, 139)
-    self.assertEqual(entry.log_root_sig_size, 512)
-    self.assertEqual(entry.proof_hash_count, len(self.test_proof_hashes_1))
-    self.assertEqual(entry.inc_proof_size, 128)
-    self.assertEqual(entry.log_url, self.test_tl_url_1)
-    self.assertEqual(entry.proofs, self.test_proof_hashes_1)
-    self.assertTrue(entry.is_valid())
-
-  def test_encode(self):
-    """Tests encode method."""
-    entry_bytes = self.test_entry_1.encode()
-    self.assertEqual(entry_bytes, self.test_entry_1_bytes)
-
-  def test_get_expected_size(self):
-    """Tests get_expected_size method."""
-    # Default record.
-    entry = aftltool.AftlIcpEntry()
-    self.assertEqual(entry.get_expected_size(), 75)
-    self.assertEqual(entry.get_expected_size(), len(entry.encode()))
-
-    # Test record.
-    self.assertEqual(self.test_entry_1.get_expected_size(), 894)
-    self.assertEqual(self.test_entry_1.get_expected_size(),
-                     len(self.test_entry_1.encode()))
-
-  def test_is_valid(self):
-    """Tests is_valid method."""
-    # Valid default record.
-    entry = aftltool.AftlIcpEntry()
-    entry.leaf_index = 2
-    entry.log_url = self.test_tl_url_1
-    entry.set_log_root_descriptor = self.test_sth_1
-    entry.proofs = self.test_proof_hashes_1
-    self.assertTrue(entry.is_valid())
-
-    # Invalid leaf index.
-    entry = aftltool.AftlIcpEntry()
-    entry.leaf_index = -1
-    self.assertFalse(entry.is_valid())
-
-    # Invalid log_root_descriptor
-    entry = aftltool.AftlIcpEntry()
-    entry.log_root_descriptor = None
-    self.assertFalse(entry.is_valid())
-
-    entry.log_root_descriptor = b''
-    self.assertFalse(entry.is_valid())
-
-    entry.log_root_descriptor = b'blabli'
-    self.assertFalse(entry.is_valid())
-
-  def test_translate_response(self):
-    """Tests translate_response method."""
-    entry = aftltool.AftlIcpEntry()
-    entry.translate_response('aftl-test.foo.bar:80', self.test_avbm_resp)
-    self.assertEqual(entry.log_url, 'aftl-test.foo.bar:80')
-    self.assertEqual(entry.leaf_index, 9127)
-    self.assertEqual(entry.log_root_descriptor.encode(),
-                     self.test_avbm_resp.annotation_proof.sth.log_root)
-    self.assertEqual(
-        entry.log_root_signature,
-        self.test_avbm_resp.annotation_proof.sth.log_root_signature)
-    self.assertEqual(
-        entry.proofs,
-        self.test_avbm_resp.annotation_proof.proof.hashes)
-
-  def test_verify_icp(self):
-    """Tests verify_icp method."""
-    with tempfile.NamedTemporaryFile('wt+') as key_file:
-      key_file.write(self.test_aftl_pub_key)
-      key_file.flush()
-
-      # Valid ICP.
-      entry = aftltool.AftlIcpEntry()
-      entry.translate_response(self.test_tl_url_1, self.test_avbm_resp)
-      self.assertTrue(entry.verify_icp(key_file.name))
-
-      # Invalid ICP where annotation_leaf is not matching up with proofs.
-      # pylint: disable=protected-access
-      entry = aftltool.AftlIcpEntry()
-      entry.translate_response(self.test_tl_url_1, self.test_avbm_resp)
-      vbmeta_hash = entry.annotation_leaf.annotation.vbmeta_hash
-      vbmeta_hash = vbmeta_hash.replace(b"\x56\x23\x73\x11",
-                                        b"\x00\x00\x00\x00")
-      entry.annotation_leaf.annotation.vbmeta_hash = vbmeta_hash
-      self.assertFalse(entry.verify_icp(key_file))
-
-  def test_verify_vbmeta_image(self):
-    """Tests the verify_vbmeta_image method."""
-    # Valid vbmeta image without footer with 1 ICP.
-    tool = aftltool.Aftl()
-    image_path = self.get_testdata_path(
-        'aftl_output_vbmeta_with_1_icp.img')
-    vbmeta_image, _ = tool.get_vbmeta_image(image_path)
-    desc = tool.get_aftl_image(image_path)
-
-    # Checks that there is 1 ICP.
-    self.assertEqual(desc.image_header.icp_count, 1)
-    entry = desc.icp_entries[0]
-
-    # Valid vbmeta image checked with correct log key.
-    self.assertTrue(entry.verify_vbmeta_image(
-        vbmeta_image, self.get_testdata_path('aftl_pubkey_1.pem')))
-
-    # Valid vbmeta image checked with public key of another log.
-    self.assertFalse(entry.verify_vbmeta_image(
-        vbmeta_image, self.get_testdata_path('testkey_rsa4096_pub.pem')))
-
-    # Valid vbmeta image checked with invalid key.
-    self.assertFalse(entry.verify_vbmeta_image(
-        vbmeta_image, self.get_testdata_path('large_blob.bin')))
-
-    # Valid vbmeta image checked with no key.
-    self.assertFalse(entry.verify_vbmeta_image(vbmeta_image, None))
-
-  def test_verify_invalid_vbmeta_image(self):
-    """Tests the verify_vbmeta_image method."""
-    # Valid vbmeta image without footer with 1 ICP.
-    tool = aftltool.Aftl()
-    image_path = self.get_testdata_path(
-        'aftl_output_vbmeta_with_1_icp.img')
-    vbmeta_image, _ = tool.get_vbmeta_image(image_path)
-    desc = tool.get_aftl_image(image_path)
-
-    self.assertEqual(desc.image_header.icp_count, 1)
-    entry = desc.icp_entries[0]
-
-    # Modify vbmeta image to become invalid
-    vbmeta_image = b'A' * len(vbmeta_image)
-
-    # Invalid vbmeta image checked with correct log key.
-    self.assertFalse(entry.verify_vbmeta_image(
-        vbmeta_image, self.get_testdata_path('aftl_pubkey_1.pem')))
-
-    # Invalid vbmeta image checked with invalid key.
-    self.assertFalse(entry.verify_vbmeta_image(
-        vbmeta_image, self.get_testdata_path('large_blob.bin')))
-
-    # Valid vbmeta image checked with no key.
-    self.assertFalse(entry.verify_vbmeta_image(vbmeta_image, None))
-
-    # None image checked with a key.
-    self.assertFalse(entry.verify_vbmeta_image(
-        None, self.get_testdata_path('aftl_pubkey_1.pem')))
-
-  def test_print_desc(self):
-    """Tests print_desc method."""
-    buf = io.StringIO()
-    self.test_entry_1.print_desc(buf)
-    desc = buf.getvalue()
-
-    # Cursory check whether the printed description contains something useful.
-    self.assertGreater(len(desc), 0)
-    self.assertIn('ICP hashes:', desc)
-
-
-class TrillianLogRootDescriptorTest(AftltoolTestCase):
-  """Test suite for testing the TrillianLogRootDescriptor descriptor."""
-
-  def setUp(self):
-    """Sets up the test bed for the unit tests."""
-    super(TrillianLogRootDescriptorTest, self).setUp()
-
-    # Creates basic log root without metadata fields.
-    base_log_root = (
-        '0001'                              # version
-        '00000000000002e5'                  # tree_size
-        '20'                                # root_hash_size
-        '2d614759ad408a111a3351c0cb33c099'  # root_hash
-        '422c30a5c5104788a343332bde2b387b'
-        '15e1c97e3b4bd239'                  # timestamp
-        '00000000000002e4'                  # revision
-    )
-
-    # Create valid log roots with metadata fields w/ and w/o metadata.
-    self.test_log_root_bytes_wo_metadata = binascii.unhexlify(
-        base_log_root + '0000')
-    self.test_log_root_bytes_with_metadata = binascii.unhexlify(
-        base_log_root + '00023132')
-
-  def test__init__(self):
-    """Tests constructor."""
-    # Calls constructor without data.
-    d = aftltool.TrillianLogRootDescriptor()
-    self.assertTrue(d.is_valid())
-    self.assertEqual(d.version, 1)
-    self.assertEqual(d.tree_size, 0)
-    self.assertEqual(d.root_hash_size, 0)
-    self.assertEqual(d.root_hash, b'')
-    self.assertEqual(d.timestamp, 0)
-    self.assertEqual(d.revision, 0)
-    self.assertEqual(d.metadata_size, 0)
-    self.assertEqual(d.metadata, b'')
-
-    # Calls constructor with log_root w/o metadata
-    d = aftltool.TrillianLogRootDescriptor(self.test_log_root_bytes_wo_metadata)
-    self.assertTrue(d.is_valid())
-    self.assertEqual(d.version, 1)
-    self.assertEqual(d.tree_size, 741)
-    self.assertEqual(d.root_hash_size, 32)
-    self.assertEqual(d.root_hash,
-                     binascii.unhexlify('2d614759ad408a111a3351c0cb33c099'
-                                        '422c30a5c5104788a343332bde2b387b'))
-    self.assertEqual(d.timestamp, 1576762888554271289)
-    self.assertEqual(d.revision, 740)
-    self.assertEqual(d.metadata_size, 0)
-    self.assertEqual(d.metadata, b'')
-
-    # Calls constructor with log_root with metadata
-    d = aftltool.TrillianLogRootDescriptor(
-        self.test_log_root_bytes_with_metadata)
-    self.assertEqual(d.metadata_size, 2)
-    self.assertEqual(d.metadata, b'12')
-
-  def test_get_expected_size(self):
-    """Tests get_expected_size method."""
-    # Default constructor.
-    d = aftltool.TrillianLogRootDescriptor()
-    self.assertEqual(d.get_expected_size(), 11 + 18)
-
-    # Log root without metadata.
-    d = aftltool.TrillianLogRootDescriptor(self.test_log_root_bytes_wo_metadata)
-    self.assertEqual(d.get_expected_size(), 11 + 18 + 32)
-
-    # Log root with metadata.
-    d = aftltool.TrillianLogRootDescriptor(
-        self.test_log_root_bytes_with_metadata)
-    self.assertEqual(d.get_expected_size(), 11 + 18 + 32 + 2)
-
-  def test_encode(self):
-    """Tests encode method."""
-    # Log root from default constructor.
-    d = aftltool.TrillianLogRootDescriptor()
-    expected_bytes = (
-        '0001'                              # version
-        '0000000000000000'                  # tree_size
-        '00'                                # root_hash_size
-        ''                                  # root_hash (empty)
-        '0000000000000000'                  # timestamp
-        '0000000000000000'                  # revision
-        '0000'                              # metadata size
-        ''                                  # metadata (empty)
-    )
-    self.assertEqual(d.encode(), binascii.unhexlify(expected_bytes))
-
-    # Log root without metadata.
-    d = aftltool.TrillianLogRootDescriptor(self.test_log_root_bytes_wo_metadata)
-    self.assertEqual(d.encode(), self.test_log_root_bytes_wo_metadata)
-
-    # Log root with metadata.
-    d = aftltool.TrillianLogRootDescriptor(
-        self.test_log_root_bytes_with_metadata)
-    self.assertEqual(d.encode(), self.test_log_root_bytes_with_metadata)
-
-  def test_is_valid(self):
-    """Tests is_valid method."""
-    d = aftltool.TrillianLogRootDescriptor()
-    self.assertTrue(d.is_valid())
-
-    # Invalid version.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.version = 2
-    self.assertFalse(d.is_valid())
-
-    # Invalid tree_size.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.tree_size = -1
-    self.assertFalse(d.is_valid())
-
-    # Invalid root_hash_size.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.root_hash_size = -1
-    self.assertFalse(d.is_valid())
-    d.root_hash_size = 300
-    self.assertFalse(d.is_valid())
-
-    # Invalid/valid root_hash_size / root_hash combination.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.root_hash_size = 4
-    d.root_hash = b'123'
-    self.assertFalse(d.is_valid())
-    d.root_hash = b'1234'
-    self.assertTrue(d.is_valid())
-
-    # Invalid timestamp.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.timestamp = -1
-    self.assertFalse(d.is_valid())
-
-    # Invalid revision.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.revision = -1
-    self.assertFalse(d.is_valid())
-
-    # Invalid metadata_size.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.metadata_size = -1
-    self.assertFalse(d.is_valid())
-    d.metadata_size = 70000
-    self.assertFalse(d.is_valid())
-
-    # Invalid/valid metadata_size / metadata combination.
-    d = aftltool.TrillianLogRootDescriptor()
-    d.metadata_size = 4
-    d.metadata = b'123'
-    self.assertFalse(d.is_valid())
-    d.metadata = b'1234'
-    self.assertTrue(d.is_valid())
-
-  def test_print_desc(self):
-    """Tests print_desc method."""
-    # Log root without metadata
-    buf = io.StringIO()
-    d = aftltool.TrillianLogRootDescriptor(self.test_log_root_bytes_wo_metadata)
-    d.print_desc(buf)
-    desc = buf.getvalue()
-
-    # Cursory check whether the printed description contains something useful.
-    self.assertGreater(len(desc), 0)
-    self.assertIn('Version:', desc)
-    self.assertNotIn('Metadata:', desc)
-
-    # Log root with metadata
-    buf = io.StringIO()
-    d = aftltool.TrillianLogRootDescriptor(
-        self.test_log_root_bytes_with_metadata)
-    d.print_desc(buf)
-    desc = buf.getvalue()
-
-    # Cursory check whether the printed description contains something useful.
-    self.assertGreater(len(desc), 0)
-    self.assertIn('Version:', desc)
-    self.assertIn('Metadata:', desc)
-
-
-class SignedVBMetaPrimaryAnnotationLeafTest(AftltoolTestCase):
-  """Test suite for testing the Leaf."""
-
-  def test__init__(self):
-    """Tests constructor and properties methods."""
-    # Calls constructor without data.
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf()
-    self.assertEqual(leaf.version, 1)
-    self.assertEqual(leaf.timestamp, 0)
-    self.assertEqual(leaf.signature.signature, b'')
-    self.assertEqual(leaf.annotation.vbmeta_hash, b'')
-    self.assertEqual(leaf.annotation.description, '')
-
-  def test_parse(self):
-    # Calls parse with valid data.
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf.parse(
-        self.test_anno_1_bytes)
-    self.assertEqual(leaf.annotation.vbmeta_hash, b'w'*32)
-    self.assertEqual(leaf.annotation.version_incremental, 'x'*5)
-    self.assertEqual(leaf.annotation.manufacturer_key_hash, b'y'*32)
-    self.assertEqual(leaf.annotation.description, 'z'*51)
-
-    # Calls parse with invalid data.
-    with self.assertRaises(aftltool.AftlError):
-      leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf.parse(b'Invalid data')
-
-  def test_get_expected_size(self):
-    """Tests get_expected_size method."""
-    # Calls constructor without data.
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf()
-    self.assertEqual(leaf.get_expected_size(), 19)
-
-    # Calls constructor with data.
-    leaf = aftltool.SignedVBMetaPrimaryAnnotationLeaf.parse(
-        self.test_anno_1_bytes)
-    self.assertEqual(leaf.get_expected_size(),
-                     len(self.test_anno_1_bytes))
-
-  def test_encode(self):
-    """Tests encode method."""
-    # Calls constructor with data.
-    self.assertEqual(self.test_anno_1.encode(),
-                     self.test_anno_1_bytes)
-
-  def test_print_desc(self):
-    """Tests print_desc method."""
-    buf = io.StringIO()
-    self.test_anno_1.print_desc(buf)
-    desc = buf.getvalue()
-
-    # Cursory check whether the printed description contains something useful.
-    self.assertGreater(len(desc), 0)
-    self.assertIn('VBMeta hash:', desc)
-
-
-class AftlMockCommunication(aftltool.AftlCommunication):
-  """Testing Mock implementation of AftlCommunication."""
-
-  def __init__(self, transparency_log_config, canned_response):
-    """Initializes the object.
-
-    Arguments:
-      transparency_log_config: An aftltool.TransparencyLogConfig instance.
-      canned_response: AddVBMetaResponse to return or the Exception to
-        raise.
-    """
-    super(AftlMockCommunication, self).__init__(transparency_log_config,
-                                                timeout=None)
-    self.request = None
-    self.canned_response = canned_response
-
-  def add_vbmeta(self, request):
-    """Records the request and returns the canned response."""
-    self.request = request
-
-    if isinstance(self.canned_response, aftltool.AftlError):
-      raise self.canned_response
-    return self.canned_response
-
-
-class AftlMock(aftltool.Aftl):
-  """Mock for aftltool.Aftl to mock the communication piece."""
-
-  def __init__(self, canned_response):
-    """Initializes the object.
-
-    Arguments:
-      canned_response: AddVBMetaResponse to return or the Exception to
-        raise.
-    """
-    self.mock_canned_response = canned_response
-
-  def request_inclusion_proof(self, transparency_log_config, vbmeta_image,
-                              version_inc, manufacturer_key_path,
-                              signing_helper, signing_helper_with_files,
-                              timeout, aftl_comms=None):
-    """Mocked request_inclusion_proof function."""
-    aftl_comms = AftlMockCommunication(transparency_log_config,
-                                       self.mock_canned_response)
-    return super(AftlMock, self).request_inclusion_proof(
-        transparency_log_config, vbmeta_image, version_inc,
-        manufacturer_key_path, signing_helper, signing_helper_with_files,
-        timeout, aftl_comms=aftl_comms)
-
-
-class AftlTestCase(AftltoolTestCase):
-
-  def setUp(self):
-    """Sets up the test bed for the unit tests."""
-    super(AftlTestCase, self).setUp()
-
-    # Sets up the member variables which are then configured by
-    # set_up_environment() in the subclasses.
-    self.aftl_host = None
-    self.aftl_pubkey = None
-    self.aftl_apikey = None
-    self.vbmeta_image = None
-    self.manufacturer_key = None
-    self.set_up_environment()
-
-    self.transparency_log_config = aftltool.TransparencyLogConfig(
-        self.aftl_host, self.aftl_pubkey, self.aftl_apikey)
-
-    self.make_icp_default_params = {
-        'vbmeta_image_path': self.vbmeta_image,
-        'output': None,
-        'signing_helper': None,
-        'signing_helper_with_files': None,
-        'version_incremental': '1',
-        'transparency_log_configs': [self.transparency_log_config],
-        'manufacturer_key': self.manufacturer_key,
-        'padding_size': 0,
-        'timeout': None
-    }
-
-    self.info_icp_default_params = {
-        'vbmeta_image_path': None,
-        'output': io.StringIO()
-    }
-
-    self.verify_icp_default_params = {
-        'vbmeta_image_path': None,
-        'transparency_log_pub_keys': [self.aftl_pubkey],
-        'output': io.StringIO()
-    }
-
-    self.load_test_aftl_default_params = {
-        'vbmeta_image_path': self.vbmeta_image,
-        'output': io.StringIO(),
-        'transparency_log_config': self.transparency_log_config,
-        'manufacturer_key': self.manufacturer_key,
-        'process_count': 1,
-        'submission_count': 1,
-        'stats_filename': None,
-        'preserve_icp_images': False,
-        'timeout': None
-    }
-
-  def set_up_environment(self):
-    """Sets up member variables for the particular test environment.
-
-    This allows to have different settings and mocking for unit tests and
-    integration tests.
-    """
-    raise NotImplementedError('set_up_environment() needs to be implemented '
-                              'by subclass.')
-
-  def get_aftl_implementation(self, canned_response):
-    """Gets the aftltool.Aftl implementation used for testing.
-
-    This allows to have different Aftl implementations for unit tests and
-    integration tests.
-
-    Arguments:
-      canned_response: Since we are using the actual implementation and not a
-      mock this gets ignored.
-
-    Raises:
-      NotImplementedError if subclass is not implementing the method.
-    """
-    raise NotImplementedError('get_aftl_implementation() needs to be'
-                              'implemented by subclass.')
-
-
-class AftlTest(AftlTestCase):
-
-  def set_up_environment(self):
-    """Sets up the environment for unit testing without networking."""
-    self.aftl_host = 'test.foo.bar:9000'
-    self.aftl_pubkey = self.get_testdata_path('aftl_pubkey_1.pem')
-    self.vbmeta_image = self.get_testdata_path('aftl_input_vbmeta.img')
-    self.manufacturer_key = self.get_testdata_path('testkey_rsa4096.pem')
-
-  def get_aftl_implementation(self, canned_response):
-    """Retrieves the AftlMock for unit testing without networking."""
-    return AftlMock(canned_response)
-
-  def test_get_vbmeta_image(self):
-    """Tests the get_vbmeta_image method."""
-    tool = aftltool.Aftl()
-
-    # Valid vbmeta image without footer and AftlImage.
-    image, footer = tool.get_vbmeta_image(
-        self.get_testdata_path('aftl_input_vbmeta.img'))
-    self.assertIsNotNone(image)
-    self.assertEqual(len(image), 4352)
-    self.assertIsNone(footer)
-
-    # Valid vbmeta image without footer but with AftlImage.
-    image, footer = tool.get_vbmeta_image(
-        self.get_testdata_path('aftl_output_vbmeta_with_1_icp.img'))
-    self.assertIsNotNone(image)
-    self.assertEqual(len(image), 4352)
-    self.assertIsNone(footer)
-
-    # Invalid vbmeta image.
-    image, footer = tool.get_vbmeta_image(
-        self.get_testdata_path('large_blob.bin'))
-    self.assertIsNone(image)
-    self.assertIsNone(footer)
-
-    # Invalid file path.
-    image, footer = tool.get_vbmeta_image(
-        self.get_testdata_path('blabli_not_existing_file'))
-    self.assertIsNone(image)
-    self.assertIsNone(footer)
-
-  def test_get_aftl_image(self):
-    """Tests the get_aftl_image method."""
-    tool = aftltool.Aftl()
-
-    # Valid vbmeta image without footer with AftlImage.
-    desc = tool.get_aftl_image(
-        self.get_testdata_path('aftl_output_vbmeta_with_1_icp.img'))
-    self.assertIsInstance(desc, aftltool.AftlImage)
-
-    # Valid vbmeta image without footer and AftlImage.
-    desc = tool.get_aftl_image(
-        self.get_testdata_path('aftl_input_vbmeta.img'))
-    self.assertIsNone(desc)
-
-    # Invalid vbmeta image.
-    desc = tool.get_aftl_image(self.get_testdata_path('large_blob.bin'))
-    self.assertIsNone(desc)
-
-  # pylint: disable=no-member
-  def test_request_inclusion_proof(self):
-    """Tests the request_inclusion_proof method."""
-    # Always work with a mock independent if run as unit or integration tests.
-    aftl = AftlMock(self.test_avbm_resp)
-
-    icp = aftl.request_inclusion_proof(
-        self.transparency_log_config, b'a' * 1024, '1',
-        self.get_testdata_path('testkey_rsa4096.pem'), None, None, None)
-    self.assertEqual(icp.leaf_index,
-                     self.test_avbm_resp.annotation_proof.proof.leaf_index)
-    self.assertEqual(icp.proof_hash_count,
-                     len(self.test_avbm_resp.annotation_proof.proof.hashes))
-    self.assertEqual(icp.log_url, self.aftl_host)
-    self.assertEqual(
-        icp.log_root_descriptor.root_hash, binascii.unhexlify(
-            '9a5f71340f8dc98bdc6320f976dda5f34db8554cb273ba5ab60f1697c519d6f6'))
-
-    self.assertEqual(icp.annotation_leaf.annotation.version_incremental,
-                     'only_for_testing')
-    # To calculate the hash of the a RSA key use the following command:
-    # openssl rsa -in test/data/testkey_rsa4096.pem -pubout \
-    #    -outform DER | sha256sum
-    self.assertEqual(
-        icp.annotation_leaf.annotation.manufacturer_key_hash,
-        bytes.fromhex(
-            "83ab3b109b73a1d32dce4153a2de57a1a0485052db8364f3180d98614749d7f7"))
-
-    self.assertEqual(
-        icp.log_root_signature,
-        self.test_avbm_resp.annotation_proof.sth.log_root_signature)
-    self.assertEqual(
-        icp.proofs,
-        self.test_avbm_resp.annotation_proof.proof.hashes)
-
-  # pylint: disable=no-member
-  def test_request_inclusion_proof_failure(self):
-    """Tests the request_inclusion_proof method in case of a comms problem."""
-    # Always work with a mock independent if run as unit or integration tests.
-    aftl = AftlMock(aftltool.AftlError('Comms error'))
-
-    with self.assertRaises(aftltool.AftlError):
-      aftl.request_inclusion_proof(
-          self.transparency_log_config, b'a' * 1024, 'version_inc',
-          self.get_testdata_path('testkey_rsa4096.pem'), None, None, None)
-
-  def test_request_inclusion_proof_manuf_key_not_4096(self):
-    """Tests request_inclusion_proof with manufacturing key not of size 4096."""
-    # Always work with a mock independent if run as unit or integration tests.
-    aftl = AftlMock(self.test_avbm_resp)
-    with self.assertRaises(aftltool.AftlError) as e:
-      aftl.request_inclusion_proof(
-          self.transparency_log_config, b'a' * 1024, 'version_inc',
-          self.get_testdata_path('testkey_rsa2048.pem'), None, None, None)
-    self.assertIn('not of size 4096: 2048', str(e.exception))
-
-  def test_make_and_verify_icp_with_1_log(self):
-    """Tests make_icp_from_vbmeta, verify_image_icp & info_image_icp."""
-    aftl = self.get_aftl_implementation(self.test_avbm_resp)
-
-    # Make a VBmeta image with ICP.
-    with tempfile.NamedTemporaryFile('wb+') as output_file:
-      self.make_icp_default_params['output'] = output_file
-      result = aftl.make_icp_from_vbmeta(**self.make_icp_default_params)
-      output_file.flush()
-      self.assertTrue(result)
-
-      # Checks that there is 1 ICP.
-      aftl_image = aftl.get_aftl_image(output_file.name)
-      self.assertEqual(aftl_image.image_header.icp_count, 1)
-
-      # Verifies the generated image.
-      self.verify_icp_default_params['vbmeta_image_path'] = output_file.name
-      result = aftl.verify_image_icp(**self.verify_icp_default_params)
-      self.assertTrue(result)
-
-      # Prints the image details.
-      self.info_icp_default_params['vbmeta_image_path'] = output_file.name
-      result = aftl.info_image_icp(**self.info_icp_default_params)
-      self.assertTrue(result)
-
-  def test_make_and_verify_icp_with_2_logs(self):
-    """Tests make_icp_from_vbmeta, verify_image_icp & info_image_icp."""
-    aftl = self.get_aftl_implementation(self.test_avbm_resp)
-
-    # Reconfigures default parameters with two transparency logs.
-    self.make_icp_default_params['transparency_log_configs'] = [
-        self.transparency_log_config, self.transparency_log_config]
-
-    # Make a VBmeta image with ICP.
-    with tempfile.NamedTemporaryFile('wb+') as output_file:
-      self.make_icp_default_params['output'] = output_file
-      result = aftl.make_icp_from_vbmeta(
-          **self.make_icp_default_params)
-      output_file.flush()
-      self.assertTrue(result)
-
-      # Checks that there are 2 ICPs.
-      aftl_image = aftl.get_aftl_image(output_file.name)
-      self.assertEqual(aftl_image.image_header.icp_count, 2)
-
-      # Verifies the generated image.
-      self.verify_icp_default_params['vbmeta_image_path'] = output_file.name
-      result = aftl.verify_image_icp(**self.verify_icp_default_params)
-      self.assertTrue(result)
-
-      # Prints the image details.
-      self.info_icp_default_params['vbmeta_image_path'] = output_file.name
-      result = aftl.info_image_icp(**self.info_icp_default_params)
-      self.assertTrue(result)
-
-  def test_info_image_icp(self):
-    """Tests info_image_icp with vbmeta image with 2 ICP."""
-    # Always work with a mock independent if run as unit or integration tests.
-    aftl = AftlMock(self.test_avbm_resp)
-
-    image_path = self.get_testdata_path(
-        'aftl_output_vbmeta_with_2_icp_same_log.img')
-    self.info_icp_default_params['vbmeta_image_path'] = image_path
-
-    # Verifies the generated image.
-    result = aftl.info_image_icp(**self.info_icp_default_params)
-    self.assertTrue(result)
-
-  def test_info_image_icp_fail(self):
-    """Tests info_image_icp with invalid vbmeta image."""
-    # Always work with a mock independent if run as unit or integration tests.
-    aftl = AftlMock(self.test_avbm_resp)
-
-    image_path = self.get_testdata_path('large_blob.bin')
-    self.info_icp_default_params['vbmeta_image_path'] = image_path
-
-    # Verifies the generated image.
-    result = aftl.info_image_icp(**self.info_icp_default_params)
-    self.assertFalse(result)
-
-  def test_verify_image_icp(self):
-    """Tets verify_image_icp with 2 ICP with all matching log keys."""
-    # Always work with a mock independent if run as unit or integration tests.
-    aftl = AftlMock(self.test_avbm_resp)
-
-    image_path = self.get_testdata_path(
-        'aftl_output_vbmeta_with_2_icp_same_log.img')
-    self.verify_icp_default_params['vbmeta_image_path'] = image_path
-    self.verify_icp_default_params['transparency_log_pub_keys'] = [
-        self.get_testdata_path('aftl_pubkey_1.pem'),
-    ]
-
-    result = aftl.verify_image_icp(**self.verify_icp_default_params)
-    self.assertTrue(result)
-
-  def test_make_icp_with_invalid_grpc_service(self):
-    """Tests make_icp_from_vbmeta command with a host not supporting GRPC."""
-    aftl = self.get_aftl_implementation(aftltool.AftlError('Comms error'))
-    self.make_icp_default_params[
-        'transparency_log_configs'][0].target = 'www.google.com:80'
-    with tempfile.NamedTemporaryFile('wb+') as output_file:
-      self.make_icp_default_params['output'] = output_file
-      result = aftl.make_icp_from_vbmeta(
-          **self.make_icp_default_params)
-      self.assertFalse(result)
-
-  def test_make_icp_grpc_timeout(self):
-    """Tests make_icp_from_vbmeta command when running into GRPC timeout."""
-    aftl = self.get_aftl_implementation(aftltool.AftlError('Comms error'))
-
-    # The timeout is set to 1 second which is way below the minimum processing
-    # time of the transparency log per load test results in b/139407814#2 where
-    # it was 3.43 seconds.
-    self.make_icp_default_params['timeout'] = 1
-    with tempfile.NamedTemporaryFile('wb+') as output_file:
-      self.make_icp_default_params['output'] = output_file
-      result = aftl.make_icp_from_vbmeta(
-          **self.make_icp_default_params)
-      self.assertFalse(result)
-
-  def test_load_test_single_process_single_submission(self):
-    """Tests load_test_aftl command with 1 process which does 1 submission."""
-    aftl = self.get_aftl_implementation(self.test_avbm_resp)
-
-    with tempfile.TemporaryDirectory() as tmp_dir:
-      self.load_test_aftl_default_params[
-          'stats_filename'] = os.path.join(tmp_dir, 'load_test.csv')
-      result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
-      self.assertTrue(result)
-
-      output = self.load_test_aftl_default_params['output'].getvalue()
-      self.assertRegex(output, 'Succeeded:.+?1\n')
-      self.assertRegex(output, 'Failed:.+?0\n')
-
-      self.assertTrue(os.path.exists(
-          self.load_test_aftl_default_params['stats_filename']))
-
-  def test_load_test_multi_process_multi_submission(self):
-    """Tests load_test_aftl command with 2 processes and 2 submissions each."""
-    aftl = self.get_aftl_implementation(self.test_avbm_resp)
-
-    self.load_test_aftl_default_params['process_count'] = 2
-    self.load_test_aftl_default_params['submission_count'] = 2
-    with tempfile.TemporaryDirectory() as tmp_dir:
-      self.load_test_aftl_default_params[
-          'stats_filename'] = os.path.join(tmp_dir, 'load_test.csv')
-      result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
-      self.assertTrue(result)
-
-      output = self.load_test_aftl_default_params['output'].getvalue()
-      self.assertRegex(output, 'Succeeded:.+?4\n')
-      self.assertRegex(output, 'Failed:.+?0\n')
-
-      self.assertTrue(os.path.exists(
-          self.load_test_aftl_default_params['stats_filename']))
-
-  def test_load_test_invalid_grpc_service(self):
-    """Tests load_test_aftl command with a host that does not support GRPC."""
-    aftl = self.get_aftl_implementation(aftltool.AftlError('Comms error'))
-
-    self.load_test_aftl_default_params[
-        'transparency_log_config'].target = 'www.google.com:80'
-    result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
-    self.assertFalse(result)
-
-    output = self.load_test_aftl_default_params['output'].getvalue()
-    self.assertRegex(output, 'Succeeded:.+?0\n')
-    self.assertRegex(output, 'Failed:.+?1\n')
-
-  def test_load_test_grpc_timeout(self):
-    """Tests load_test_aftl command when running into timeout."""
-    aftl = self.get_aftl_implementation(aftltool.AftlError('Comms error'))
-
-    self.load_test_aftl_default_params['timeout'] = 1
-    result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
-    self.assertFalse(result)
-
-    output = self.load_test_aftl_default_params['output'].getvalue()
-    self.assertRegex(output, 'Succeeded:.+?0\n')
-    self.assertRegex(output, 'Failed:.+?1\n')
-
-
-class TransparencyLogConfigTestCase(unittest.TestCase):
-
-  def test_from_argument(self):
-    log = aftltool.TransparencyLogConfig.from_argument(
-        "example.com:8080,mykey.pub")
-    self.assertEqual(log.target, "example.com:8080")
-    self.assertEqual(log.pub_key, "mykey.pub")
-
-    with self.assertRaises(argparse.ArgumentTypeError):
-      aftltool.TransparencyLogConfig.from_argument("example.com:8080,")
-
-    with self.assertRaises(argparse.ArgumentTypeError):
-      aftltool.TransparencyLogConfig.from_argument(",")
-
-  def test_from_argument_with_api_key(self):
-    log = aftltool.TransparencyLogConfig.from_argument(
-        "example.com:8080,mykey.pub,Aipl29gj3x9")
-    self.assertEqual(log.target, "example.com:8080")
-    self.assertEqual(log.pub_key, "mykey.pub")
-    self.assertEqual(log.api_key, "Aipl29gj3x9")
-
-if __name__ == '__main__':
-  unittest.main(verbosity=2)
diff --git a/avbtool.py b/avbtool.py
index 8647b29..f944af4 100755
--- a/avbtool.py
+++ b/avbtool.py
@@ -342,6 +342,7 @@
     exponent: The key exponent.
     modulus: The key modulus.
     num_bits: The key size.
+    key_path: The path to a key file.
   """
 
   MODULUS_PREFIX = b'modulus='
@@ -900,7 +901,7 @@
     Arguments:
       num_bytes: Size in number of bytes of the DONT_CARE chunk.
 
-    Raises
+    Raises:
       OSError: If ImageHandler was initialized in read-only mode.
     """
     assert num_bytes % self.block_size == 0
@@ -937,7 +938,7 @@
     Arguments:
       data: Data to append as bytes.
 
-    Raises
+    Raises:
       OSError: If ImageHandler was initialized in read-only mode.
     """
     assert len(data) % self.block_size == 0
@@ -974,7 +975,7 @@
       fill_data: Fill data to append - must be four bytes.
       size: Number of chunk - must be a multiple of four and the block size.
 
-    Raises
+    Raises:
       OSError: If ImageHandler was initialized in read-only mode.
     """
     assert len(fill_data) == 4
@@ -1259,7 +1260,7 @@
     Raises:
       LookupError: If the given descriptor is malformed.
     """
-    super(AvbPropertyDescriptor, self).__init__(None)
+    super().__init__(None)
     assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
 
     if data:
@@ -1273,7 +1274,8 @@
       try:
         self.key = data[self.SIZE:(self.SIZE + key_size)].decode('utf-8')
       except UnicodeDecodeError as e:
-        raise LookupError('Key cannot be decoded as UTF-8: {}.'.format(e))
+        raise LookupError('Key cannot be decoded as UTF-8: {}.'
+                          .format(e)) from e
       self.value = data[(self.SIZE + key_size + 1):(self.SIZE + key_size + 1 +
                                                     value_size)]
     else:
@@ -1377,6 +1379,9 @@
                    'L' +  # flags
                    str(RESERVED) + 's')  # reserved
 
+  FLAGS_DO_NOT_USE_AB = (1 << 0)
+  FLAGS_CHECK_AT_MOST_ONCE = (1 << 1)
+
   def __init__(self, data=None):
     """Initializes a new hashtree descriptor.
 
@@ -1386,7 +1391,7 @@
     Raises:
       LookupError: If the given descriptor is malformed.
     """
-    super(AvbHashtreeDescriptor, self).__init__(None)
+    super().__init__(None)
     assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
 
     if data:
@@ -1410,7 +1415,7 @@
         ].decode('utf-8')
       except UnicodeDecodeError as e:
         raise LookupError('Partition name cannot be decoded as UTF-8: {}.'
-                          .format(e))
+                          .format(e)) from e
       o += partition_name_len
       self.salt = data[(self.SIZE + o):(self.SIZE + o + salt_len)]
       o += salt_len
@@ -1582,7 +1587,7 @@
     Raises:
       LookupError: If the given descriptor is malformed.
     """
-    super(AvbHashDescriptor, self).__init__(None)
+    super().__init__(None)
     assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
 
     if data:
@@ -1603,7 +1608,7 @@
         ].decode('utf-8')
       except UnicodeDecodeError as e:
         raise LookupError('Partition name cannot be decoded as UTF-8: {}.'
-                          .format(e))
+                          .format(e)) from e
       o += partition_name_len
       self.salt = data[(self.SIZE + o):(self.SIZE + o + salt_len)]
       o += salt_len
@@ -1720,7 +1725,7 @@
     Raises:
       LookupError: If the given descriptor is malformed.
     """
-    super(AvbKernelCmdlineDescriptor, self).__init__(None)
+    super().__init__(None)
     assert struct.calcsize(self.FORMAT_STRING) == self.SIZE
 
     if data:
@@ -1737,7 +1742,7 @@
             self.SIZE:(self.SIZE + kernel_cmdline_length)].decode('utf-8')
       except UnicodeDecodeError as e:
         raise LookupError('Kernel command-line cannot be decoded as UTF-8: {}.'
-                          .format(e))
+                          .format(e)) from e
     else:
       self.flags = 0
       self.kernel_cmdline = ''
@@ -1835,7 +1840,7 @@
         ].decode('utf-8')
       except UnicodeDecodeError as e:
         raise LookupError('Partition name cannot be decoded as UTF-8: {}.'
-                          .format(e))
+                          .format(e)) from e
       o += partition_name_len
       self.public_key = data[(self.SIZE + o):(self.SIZE + o + public_key_len)]
 
@@ -2468,8 +2473,8 @@
       o.write('    Metadata version:        {}\n'.format(version))
 
       def print_atx_certificate(cert):
-        version, public_key, subject, usage, key_version, _signature = \
-            struct.unpack('<I1032s32s32sQ512s', cert)
+        version, public_key, subject, usage, key_version, _ = (
+            struct.unpack('<I1032s32s32sQ512s', cert))
         o.write('      Version:               {}\n'.format(version))
         o.write('      Public key (sha1):     {}\n'.format(
             hashlib.sha1(public_key).hexdigest()))
@@ -2832,7 +2837,11 @@
     c += ' {}'.format(ht.root_digest.hex())                 # root_digest
     c += ' {}'.format(ht.salt.hex())                        # salt
     if ht.fec_num_roots > 0:
-      c += ' 10'  # number of optional args
+      if ht.flags & AvbHashtreeDescriptor.FLAGS_CHECK_AT_MOST_ONCE:
+        c += ' 11'  # number of optional args
+        c += ' check_at_most_once'
+      else:
+        c += ' 10'  # number of optional args
       c += ' $(ANDROID_VERITY_MODE)'
       c += ' ignore_zero_blocks'
       c += ' use_fec_from_device PARTUUID=$(ANDROID_SYSTEM_PARTUUID)'
@@ -2843,7 +2852,11 @@
       c += ' fec_blocks {}'.format(ht.fec_offset // ht.data_block_size)
       c += ' fec_start {}'.format(ht.fec_offset // ht.data_block_size)
     else:
-      c += ' 2'  # number of optional args
+      if ht.flags & AvbHashtreeDescriptor.FLAGS_CHECK_AT_MOST_ONCE:
+        c += ' 3'  # number of optional args
+        c += ' check_at_most_once'
+      else:
+        c += ' 2'  # number of optional args
       c += ' $(ANDROID_VERITY_MODE)'
       c += ' ignore_zero_blocks'
     c += '" root=/dev/dm-0'
@@ -3025,8 +3038,9 @@
     """
     try:
       alg = ALGORITHMS[algorithm_name]
-    except KeyError:
-      raise AvbError('Unknown algorithm with name {}'.format(algorithm_name))
+    except KeyError as e:
+      raise AvbError('Unknown algorithm with name {}'
+                     .format(algorithm_name)) from e
 
     if not descriptors:
       descriptors = []
@@ -3313,9 +3327,10 @@
     except Exception as e:
       # Truncate back to original size, then re-raise.
       image.truncate(original_image_size)
-      raise AvbError('Appending VBMeta image failed: {}.'.format(e))
+      raise AvbError('Appending VBMeta image failed: {}.'.format(e)) from e
 
-  def add_hash_footer(self, image_filename, partition_size, partition_name,
+  def add_hash_footer(self, image_filename, partition_size,
+                      dynamic_partition_size, partition_name,
                       hash_algorithm, salt, chain_partitions, algorithm_name,
                       key_path,
                       public_key_metadata_path, rollback_index, flags,
@@ -3333,6 +3348,7 @@
     Arguments:
       image_filename: File to add the footer to.
       partition_size: Size of partition.
+      dynamic_partition_size: Calculate partition size based on image size.
       partition_name: Name of partition (without A/B suffix).
       hash_algorithm: Hash algorithm to use.
       salt: Salt to use as a hexadecimal string or None to use /dev/urandom.
@@ -3366,6 +3382,14 @@
     Raises:
       AvbError: If an argument is incorrect of if adding of hash_footer failed.
     """
+    if not partition_size and not dynamic_partition_size:
+      raise AvbError('--dynamic_partition_size required when not specifying a '
+                     'partition size')
+
+    if dynamic_partition_size and calc_max_image_size:
+      raise AvbError('--calc_max_image_size not supported with '
+                     '--dynamic_partition_size')
+
     required_libavb_version_minor = 0
     if use_persistent_digest or do_not_use_ab:
       required_libavb_version_minor = 1
@@ -3381,24 +3405,18 @@
     # this size + metadata (footer + vbmeta struct) fits in
     # |partition_size|.
     max_metadata_size = self.MAX_VBMETA_SIZE + self.MAX_FOOTER_SIZE
-    if partition_size < max_metadata_size:
+    if not dynamic_partition_size and partition_size < max_metadata_size:
       raise AvbError('Parition size of {} is too small. '
                      'Needs to be at least {}'.format(
                          partition_size, max_metadata_size))
-    max_image_size = partition_size - max_metadata_size
 
     # If we're asked to only calculate the maximum image size, we're done.
     if calc_max_image_size:
-      print('{}'.format(max_image_size))
+      print('{}'.format(partition_size - max_metadata_size))
       return
 
     image = ImageHandler(image_filename)
 
-    if partition_size % image.block_size != 0:
-      raise AvbError('Partition size of {} is not a multiple of the image '
-                     'block size {}.'.format(partition_size,
-                                             image.block_size))
-
     # If there's already a footer, truncate the image to its original
     # size. This way 'avbtool add_hash_footer' is idempotent (modulo
     # salts).
@@ -3415,6 +3433,16 @@
       # Image size is too small to possibly contain a footer.
       original_image_size = image.image_size
 
+    if dynamic_partition_size:
+      partition_size = round_to_multiple(
+          original_image_size + max_metadata_size, image.block_size)
+
+    max_image_size = partition_size - max_metadata_size
+    if partition_size % image.block_size != 0:
+      raise AvbError('Partition size of {} is not a multiple of the image '
+                     'block size {}.'.format(partition_size,
+                                             image.block_size))
+
     # If anything goes wrong from here-on, restore the image back to
     # its original size.
     try:
@@ -3514,7 +3542,7 @@
     except Exception as e:
       # Truncate back to original size, then re-raise.
       image.truncate(original_image_size)
-      raise AvbError('Adding hash_footer failed: {}.'.format(e))
+      raise AvbError('Adding hash_footer failed: {}.'.format(e)) from e
 
   def add_hashtree_footer(self, image_filename, partition_size, partition_name,
                           generate_fec, fec_num_roots, hash_algorithm,
@@ -3532,7 +3560,7 @@
                           output_vbmeta_image, do_not_append_vbmeta_image,
                           print_required_libavb_version,
                           use_persistent_root_digest, do_not_use_ab,
-                          no_hashtree):
+                          no_hashtree, check_at_most_once):
     """Implements the 'add_hashtree_footer' command.
 
     See https://gitlab.com/cryptsetup/cryptsetup/wikis/DMVerity for
@@ -3576,13 +3604,15 @@
       use_persistent_root_digest: Use a persistent root digest on device.
       do_not_use_ab: The partition does not use A/B.
       no_hashtree: Do not append hashtree. Set size in descriptor as zero.
+      check_at_most_once: Set to verify data blocks only the first time they
+        are read from the data device.
 
     Raises:
       AvbError: If an argument is incorrect or adding the hashtree footer
           failed.
     """
     required_libavb_version_minor = 0
-    if use_persistent_root_digest or do_not_use_ab:
+    if use_persistent_root_digest or do_not_use_ab or check_at_most_once:
       required_libavb_version_minor = 1
     if rollback_index_location > 0:
       required_libavb_version_minor = 2
@@ -3713,9 +3743,11 @@
       ht_desc.partition_name = partition_name
       ht_desc.salt = salt
       if do_not_use_ab:
-        ht_desc.flags |= 1  # AVB_HASHTREE_DESCRIPTOR_FLAGS_DO_NOT_USE_AB
+        ht_desc.flags |= AvbHashtreeDescriptor.FLAGS_DO_NOT_USE_AB
       if not use_persistent_root_digest:
         ht_desc.root_digest = root_digest
+      if check_at_most_once:
+        ht_desc.flags |= AvbHashtreeDescriptor.FLAGS_CHECK_AT_MOST_ONCE
 
       # Write the hash tree
       padding_needed = (round_to_multiple(len(hash_tree), image.block_size) -
@@ -3788,7 +3820,7 @@
     except Exception as e:
       # Truncate back to original size, then re-raise.
       image.truncate(original_image_size)
-      raise AvbError('Adding hashtree_footer failed: {}.'.format(e))
+      raise AvbError('Adding hashtree_footer failed: {}.'.format(e)) from e
 
   def make_atx_certificate(self, output, authority_key_path, subject_key_path,
                            subject_key_version, subject,
@@ -4040,7 +4072,8 @@
            fec_tmpfile.name],
           stderr=open(os.devnull, 'wb'))
     except subprocess.CalledProcessError as e:
-      raise ValueError('Execution of \'fec\' tool failed: {}.'.format(e))
+      raise ValueError('Execution of \'fec\' tool failed: {}.'
+                       .format(e)) from e
     fec_data = fec_tmpfile.read()
 
   footer_size = struct.calcsize(FEC_FOOTER_FORMAT)
@@ -4074,6 +4107,14 @@
   hash_src_offset = 0
   hash_src_size = image_size
   level_num = 0
+
+  # If there is only one block, returns the top-level hash directly.
+  if hash_src_size == block_size:
+    hasher = create_avb_hashtree_hasher(hash_alg_name, salt)
+    image.seek(0)
+    hasher.update(image.read(block_size))
+    return hasher.digest(), bytes(hash_ret)
+
   while hash_src_size > block_size:
     level_output_list = []
     remaining = hash_src_size
@@ -4304,6 +4345,9 @@
     sub_parser.add_argument('--partition_size',
                             help='Partition size',
                             type=parse_number)
+    sub_parser.add_argument('--dynamic_partition_size',
+                            help='Calculate partition size based on image size',
+                            action='store_true')
     sub_parser.add_argument('--partition_name',
                             help='Partition name',
                             default=None)
@@ -4404,6 +4448,9 @@
     sub_parser.add_argument('--no_hashtree',
                             action='store_true',
                             help='Do not append hashtree')
+    sub_parser.add_argument('--check_at_most_once',
+                            action='store_true',
+                            help='Set to verify data block only once')
     self._add_common_args(sub_parser)
     self._add_common_footer_args(sub_parser)
     sub_parser.set_defaults(func=self.add_hashtree_footer)
@@ -4729,7 +4776,7 @@
     """Implements the 'add_hash_footer' sub-command."""
     args = self._fixup_common_args(args)
     self.avb.add_hash_footer(args.image.name if args.image else None,
-                             args.partition_size,
+                             args.partition_size, args.dynamic_partition_size,
                              args.partition_name, args.hash_algorithm,
                              args.salt, args.chain_partition, args.algorithm,
                              args.key,
@@ -4784,7 +4831,8 @@
         args.print_required_libavb_version,
         args.use_persistent_digest,
         args.do_not_use_ab,
-        args.no_hashtree)
+        args.no_hashtree,
+        args.check_at_most_once)
 
   def erase_footer(self, args):
     """Implements the 'erase_footer' sub-command."""
diff --git a/docs/aftl-architecture.png b/docs/aftl-architecture.png
deleted file mode 100644
index 5526f92..0000000
--- a/docs/aftl-architecture.png
+++ /dev/null
Binary files differ
diff --git a/docs/aftl-boot-flow.png b/docs/aftl-boot-flow.png
deleted file mode 100644
index f724808..0000000
--- a/docs/aftl-boot-flow.png
+++ /dev/null
Binary files differ
diff --git a/docs/aftl-image-location.png b/docs/aftl-image-location.png
deleted file mode 100644
index 0e609d7..0000000
--- a/docs/aftl-image-location.png
+++ /dev/null
Binary files differ
diff --git a/docs/aftl-image-structure.png b/docs/aftl-image-structure.png
deleted file mode 100644
index 2487314..0000000
--- a/docs/aftl-image-structure.png
+++ /dev/null
Binary files differ
diff --git a/libavb/avb_hashtree_descriptor.h b/libavb/avb_hashtree_descriptor.h
index d0f7e2c..6595f46 100644
--- a/libavb/avb_hashtree_descriptor.h
+++ b/libavb/avb_hashtree_descriptor.h
@@ -40,9 +40,12 @@
  * AVB_HASHTREE_DESCRIPTOR_FLAGS_DO_NOT_USE_AB: Do not apply the default A/B
  *   partition logic to this partition. This is intentionally a negative boolean
  *   because A/B should be both the default and most used in practice.
+ * AVB_HASHTREE_DESCRIPTOR_FLAGS_CHECK_AT_MOST_ONCE: supports to validate hashes
+ *   at most once in DM-Verity.
  */
 typedef enum {
   AVB_HASHTREE_DESCRIPTOR_FLAGS_DO_NOT_USE_AB = (1 << 0),
+  AVB_HASHTREE_DESCRIPTOR_FLAGS_CHECK_AT_MOST_ONCE = (1 << 1),
 } AvbHashtreeDescriptorFlags;
 
 /* A descriptor containing information about a dm-verity hashtree.
diff --git a/libavb/avb_slot_verify.c b/libavb/avb_slot_verify.c
index 6a2745a..8e0721d 100644
--- a/libavb/avb_slot_verify.c
+++ b/libavb/avb_slot_verify.c
@@ -468,6 +468,7 @@
     loaded_partition->data_size = image_size;
     loaded_partition->data = image_buf;
     loaded_partition->preloaded = image_preloaded;
+    loaded_partition->verify_result = ret;
     image_buf = NULL;
   }
 
@@ -1706,7 +1707,7 @@
   return ret;
 }
 
-void avb_slot_verify_data_calculate_vbmeta_digest(AvbSlotVerifyData* data,
+void avb_slot_verify_data_calculate_vbmeta_digest(const AvbSlotVerifyData* data,
                                                   AvbDigestType digest_type,
                                                   uint8_t* out_digest) {
   bool ret = false;
diff --git a/libavb/avb_slot_verify.h b/libavb/avb_slot_verify.h
index 633e7da..c7f3f45 100644
--- a/libavb/avb_slot_verify.h
+++ b/libavb/avb_slot_verify.h
@@ -158,6 +158,7 @@
   uint8_t* data;
   size_t data_size;
   bool preloaded;
+  AvbSlotVerifyResult verify_result;
 } AvbPartitionData;
 
 /* AvbVBMetaData contains a vbmeta struct loaded from a partition when
@@ -311,7 +312,7 @@
  * in |out_digest| which must be large enough to hold a digest
  * of the requested type.
  */
-void avb_slot_verify_data_calculate_vbmeta_digest(AvbSlotVerifyData* data,
+void avb_slot_verify_data_calculate_vbmeta_digest(const AvbSlotVerifyData* data,
                                                   AvbDigestType digest_type,
                                                   uint8_t* out_digest);
 
diff --git a/libavb_aftl/README.md b/libavb_aftl/README.md
deleted file mode 100644
index abf2636..0000000
--- a/libavb_aftl/README.md
+++ /dev/null
@@ -1,305 +0,0 @@
-# Android Firmware Transparency Log 1.0
----
-
-This repository contains tools and libraries for working with the Android
-Firmware Transparency Log components of Android Verified Boot. AFTL will be
-used to refer to these components.
-
-[TOC]
-
-# What is it?
-
-The Android Firmware Transparency Log (AFTL) is an implementation of binary
-transparency that leverages cryptographic proofs of inclusion of build images
-using a public append-only ledger. The device manufacturer stores these
-cryptographic proofs, called inclusion proofs, on-device to allow for offline
-validation of the build during system update and device boot. This ensures that
-only a publicly known build is running on the device. Furthermore, it allows
-device manufacturers and other interested parties to audit the information in
-the log to detect unexpected or malicious uses of the publisher's signing keys.
-
-
-## System overview
-
-An AFTL can be implemented using a
-[Trillian](https://github.com/google/trillian) instance which manages a Merkle
-tree with build metadata (particularly the [VBMeta
-struct](https://android.googlesource.com/platform/external/avb/+/refs/heads/master#The-VBMeta-struct))
-and keeps a repository of other data that were made transparent.
-
-![Android Firmware Transparency Log system overview](../docs/aftl-architecture.png)
-
-As part of the finalizing step in the build process the `aftltool` reaches out
-to the **AFTL personality** to submit a manufacturer-signed message (containing
-VBMeta struct and other metadata) to the AFTL. Then the AFTL personality
-submits that data to Trillian to incorporate it into the Merkle tree. After
-integration into the log, the AFTL personality returns the inclusion proof back
-to the `aftltool`, which in turn incorporates the inclusion proof with the
-VBMeta image.
-
-The AFTL uses two sets of keys for authentication and validation, the
-transparency log key and the manufacturer key.
-
-* **Transparency log key:** Used by Trillian to sign inclusion proofs. The
-  public key is embedded with the device for on-device for validation.
-* **Manufacturer key:** Used by OEMs or other build providers to sign
-  submissions sent to the log. This ensures that malicious entries posing as a
-  valid OEM entry cannot be provided to the log. For the log to authenticate
-  messages, the manufacturer key must be shared out-of-band with the AFTL prior
-  to submission.
-
-## The AftlImage struct
-
-The central data structure used for AFTL validation is the `AftlImage` struct.
-The structure is saved on the vbmeta partition, right after the
-`AvbVBMetaImage` struct, as illustrated below.
-
-![Overview of the AftlImage structure](../docs/aftl-image-location.png)
-
-This structure contains the `AftlImageHeader` header that describes the number
-of inclusion proofs (`AftlIcpEntry`) represented by this structure. Each
-inclusion proof has associated metadata, such as the transparency log URL. A
-high-level description of the structures is given below. See
-[aftltool](https://android.googlesource.com/platform/external/avb/+/refs/heads/master/aftltool.py)
-and
-[libavb_aftl](https://android.googlesource.com/platform/external/avb/+/refs/heads/master/libavb_aftl/)
-for more details.
-
-
-![Overview of AftlIcpHeader and AftlIcpEntry](../docs/aftl-image-structure.png)
-
-Each `AftlIcpEntry` structure contains the information required to validate an
-inclusion proof from a specific transparency log server for a given [VBMeta
-structure](https://android.googlesource.com/platform/external/avb/+/master#the-vbmeta-struct)
-given the corresponding transparency log public key. The inclusion proof
-validation process is described in the [inclusion proofs](#inclusion-proofs)
-section of this document.
-
-*Note*: A single `AftlImage` can have multiple inclusion proofs from different
-transparency logs. This allows the device manufacturer to not rely on a single
-transparency log, and ensures that the builds represented by the VBMeta
-structure are deemed transparent in multiple disparate jurisdictions.
-
-## Inclusion proofs
-
-An inclusion proof allows a user to prove that a specific VBMeta structure is
-included in a transparency log. An inclusion proof consists of three parts:
-
-*  A `SignedVBMetaPrimaryAnnotation` structure containing the hash of the
-VBMeta structure (and other build meta information) that is signed with the
-manufacturer key.
-*  A set of sibling node hashes (`Proof`) in a Merkle tree on the path from the
-leaf node in question, which represents the logged annotation, to the root
-node.
-*  A `TrillianLogRootDescriptor` structure containing the log's root hash,
-along with related metadata, which is signed by the transparency log’s private
-key.
-
-Validation of an inclusion proof can be performed with the following steps,
-which are implemented in both `aftltool` and `libavb_aftl`.
-
-1.  Calculate the hash of the VBMeta structure stored on the device.
-1.  Determine if the hash matches the hash stored in the
-`SignedVBMetaPrimaryAnnotation` structure inside the `AftlImage` on device. If
-it does, continue validation.
-1.  Given the set of hashes provided from the transparency log as part of the
-inclusion proof, attempt to recalculate the root hash. Details of the process
-can be found [here](https://www.certificate-transparency.org/log-proofs-work)
-in the Merkle Audit Proofs section.
-1.  Check the calculated root hash against the log's root hash from the
-inclusion proof. If it matches, continue validation.
-1.  Finally, verify the log root signature given the calculated root hash and
-the public key of the transparency log that is stored on device. If the
-signature is valid, the inclusion proof is valid.
-
-# Tools and libraries
-
-This section contains information about the tools and libraries added to AVB
-repository or modified to include AFTL support.
-
-## aftltool and libavb\_aftl
-
-The main purpose of
-[aftltool](https://android.googlesource.com/platform/external/avb/+/refs/heads/master/aftltool.py)
-is to add an inclusion proof to an existing `vbmeta.img` to be used
-for transparency checks at boot or system update time. This enhanced image is
-stored in the `vbmeta` partition or in the `vbmeta_a` and
-`vbmeta_b` slots when using A/B  and will still be of minimal size
-(for out-of-band updates). Creation, query, and verification tasks can be
-performed with `aftltool`.
-
-In addition to the `aftltool`, the
-[libavb](https://android.googlesource.com/platform/external/avb/+/master/libavb/)
-library comes with an extension called
-[libavb\_aftl](https://android.googlesource.com/platform/external/avb/+/master/libavb_aftl/).
-This component performs all verification on the device side related to AFTL and
-inclusion proofs. That is, it loads the `vbmeta` partition, checks
-the VBMeta structure signature, walks through each inclusion proof stored in
-the `AftlImage`, and validates them against a trusted transparency
-log key stored on the device.
-
-This library is intended to be used in both the boot loader and inside Android
-as part of the OTA client. The main entry point for verification is
-`aftl_slot_verify()`, which is intended to be called after `vbmeta`
-verification is done via `avb_slot_verify()`.
-
-## Files and directories
-
-* `libavb_aftl/`
-  + An implementation of AFTL inclusion proof validation. This
-    code is designed to be highly portable so it can be used in as many contexts
-    as possible. This code requires a C99-compliant C compiler. Only the content
-    declared in `libavb_aftl.h` is considered public. The other files are
-    considered internal to the implementation and may change without notice.
-* `libavb_aftl/README.md`
-  + This document.
-* `test/`
-  + Unit tests for `libavb_aftl.`
-* `test/data/`
-  + Test data for the `aftltool` and `libavb_aftl` unit tests.
-* `aftltool`
-  + A symlink to `aftltool.py`.
-* `aftltool.py`
-  + A tool written in Python for working with images related to AFTL.
-* `Android.bp`
-  + Build rules for `aftltool`,  `libavb_aftl` (a static library
-    for use on the device), host-side libraries (for unit tests), and unit
-    tests.
-* `aftltool_test.py`
-  + Source-code for `aftltool` related unit tests.
-* `aftltool_integration_test.py`
-  + Source-code for `aftltool` related integration tests against
-    a live transparency log.
-
-## Portability
-
-The `libavb_aftl` code is intended to be used in bootloaders in devices that
-will load Android or other operating systems. The suggested approach is to copy
-the appropriate header and C files mentioned in the previous section into the
-boot loader and integrate as appropriate. The library is intended to be highly
-portable, working on both little and big endian architectures, as well as
-32-bit and 64-bit variants of each. It is also intended to work in environments
-without the standard C library and runtime.
-
-As in libavb, if the `AVB_ENABLE_DEBUG` preprocessor symbol is set, the code
-will include useful debug information and run-time checks.
-
-## Versioning and compatibility
-
-The `libavb_aftl` library follows the [versioning of
-libavb](https://android.googlesource.com/platform/external/avb/+/master#Versioning-and-Compatibility).
-
-## Using aftltool
-
-The content for the vbmeta partition is assumed to have been generated
-previously using `avbtool`. Instructions can be found in the
-[README.md](https://android.googlesource.com/platform/external/avb/+/master/README.md)
-for libavb. After the VBMeta partition is generated, it can be extended with
-inclusion proofs from transparency logs in the following manner:
-
-
-```
-aftltool make_icp_from_vbmeta \
-      --vbmeta_image_path /path/to/image.bin \
-      --output OUTPUT \
-      [--signing_helper /path/to/external/signer]  \
-      [--signing_helper_with_files /path/to/external/signer_with_files] \
-      --version_incremental STR \
-      --transparency_log_servers host:port,/path/to/log_key.pub \
-      --manufacturer_key /path/to/priv_key \
-      [--padding_size NUM]
-```
-
-The
-[version\_incremental](https://developer.android.com/reference/android/os/Build.VERSION#INCREMENTAL)
-is a part of the build fingerprint which allows for tagging the transparency
-log entry for easier tracking.
-
-An example of how to use the `make_icp_from_vbmeta` command is as follows:
-
-```
-aftltool make_icp_from_vbmeta \
-      --vbmeta_image_path ./vbmeta.img \
-      --output ./vbmeta_icp.img \
-      --version_incremental 99999999 \
-      --transparency_log_servers \
-        log.aftl-android.com:9000,/aftl-log-rsa-pub.pem \
-      --manufacturer_key ./manufacturer-rsa.pem \
-      --algorithm SHA256_RSA4096 \
-      --padding 4096
-```
-
-The AFTL information can be viewed in a human readable format in the following
-manner:
-
-```
-aftltool info_image_icp \
-      --vbmeta_image_path /path/to/image.bin \
-      [--output OUTPUT]
-```
-
-An example using `info_image_icp` is as follows:
-
-```
-aftltool info_image_icp --vbmeta_image_path ./vbmeta.img
-```
-
-Verification of an AFTL enhanced vbmeta image can be performed with the
-following command:
-
-```
-aftltool verify_image_icp \
-      --vbmeta_image_path /path/to/image.bin \
-      --transparency_log_pub_keys [TRANSPARENCY_LOG_PUB_KEYS [TRANSPARENCY_LOG_PUB_KEYS ...]] 
-[--output OUTPUT]
-```
-
-An example using `verify_image_icp` is as follows:
-
-```
-aftltool verify_image_icp --vbmeta_image_path ./vbmeta.img --transparency_log_pub_keys ./log_pub_key.pem
-```
-
-More information on the options can be found using `aftltool --help`.
-
-# Build system integration
-
-AFTL modifications only will work if AVB is enabled in the build. In Android,
-AVB is enabled in an `Android.mk` file by the `BOARD_AVB_ENABLE` variable as
-described in the AVB
-[README.md](https://android.googlesource.com/platform/external/avb/#Build-System-Integration).
-
-When calling the
-[sign\_target\_files\_apks.py](https://android.googlesource.com/platform/build/+/master/tools/releasetools/sign_target_files_apks.py)
-script, the following parameters must be set:
-
-*  `--aftl_tool_path`, the location of aftltool.py
-*  `--aftl_server`, the address of the transparency log
-*  `--aftl_key_path`, which gives the path to the DER encoded transparency log public key
-*  `--aftl_manufacturer_key_path`, which gives the path to the DER encoded OEM
-  private key. Note: This key is different to the signing key used to sign VBMeta
-  structure
-
-Remember that the public part of the transparency log keys need to be available
-to the bootloader of the device to validate the inclusion proofs.
-
-# Device integration
-
-This section discusses recommendations and best practices for integrating
-`libavb` AFTL support with a device boot loader. It's important to emphasize
-that these are just recommendations. Most of these recommendations are the same
-as those for AVB.
-
-## Recommended bootflow
-
-The boot flow should ensure checking of the inclusion proofs independent of the
-unlock state of the device. It is recommended to present the user with a
-warning in case transparency checks fail.
-
-AFTL modifies this flow in the following manner: as soon as a valid OS has been
-found, search for an `AftlImage` for each VBMeta image and validate their
-inclusion proofs (this is done by the `aftl_slot_verify` function). The result
-of the verification can be appended to the kernel command line for further
-processing by the OS.
-
-![Recommended boot flow for a device using AFTL](../docs/aftl-boot-flow.png)
diff --git a/libavb_aftl/avb_aftl_types.h b/libavb_aftl/avb_aftl_types.h
deleted file mode 100644
index 028ffda..0000000
--- a/libavb_aftl/avb_aftl_types.h
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#ifdef AVB_INSIDE_LIBAVB_AFTL_H
-#error "You can't include avb_aftl_types.h in the public header libavb_aftl.h."
-#endif
-
-#ifndef AVB_COMPILATION
-#error "Never include this file, it may only be used from internal avb code."
-#endif
-
-#ifndef AVB_AFTL_TYPES_H_
-#define AVB_AFTL_TYPES_H_
-
-#include <libavb/libavb.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#define AVB_AFTL_UINT64_MAX 0xfffffffffffffffful
-#define AVB_AFTL_HASH_SIZE 32ul
-#define AVB_AFTL_SIGNATURE_SIZE 512ul
-/* Raw key size used for signature validation. */
-#define AVB_AFTL_PUB_KEY_SIZE 1032ul
-/* Limit AftlImage size to 64KB. */
-#define AVB_AFTL_MAX_AFTL_IMAGE_SIZE 65536ul
-/* Limit version.incremental size to 256 characters. */
-#define AVB_AFTL_MAX_VERSION_INCREMENTAL_SIZE 256ul
-/* AFTL trees require at most 64 hashes to reconstruct the root */
-#define AVB_AFTL_MAX_PROOF_SIZE 64 * AVB_AFTL_HASH_SIZE
-/* Max URL limit. */
-#define AVB_AFTL_MAX_URL_SIZE 2048ul
-/* Minimum valid size for an Annotation leaf. */
-#define AVB_AFTL_MIN_ANNOTATION_SIZE 18ul
-/* Minimum valid size for a TrillianLogRootDescriptor. See the
-   TrillianLogRootDescriptor struct for details. The values here cover:
-   version: sizeof(uint16_t)
-   tree_size: sizeof(uint64_t)
-   root_hash_size: sizeof(uint8_t)
-   root_hash: AVB_AFTL_HASH_SIZE
-   timestamp; sizeof(uint64_t)
-   revision; sizeof(uint64_t)
-   metadata_size: sizeof(uint16_t)
-   metadata is optional, so it's not required for the minimum size. */
-#define AVB_AFTL_MIN_TLRD_SIZE                                \
-  (sizeof(uint16_t) + sizeof(uint64_t) + sizeof(uint8_t) +    \
-   AVB_AFTL_HASH_SIZE + sizeof(uint64_t) + sizeof(uint64_t) + \
-   sizeof(uint16_t))
-/* Minimum valid size for an AftlIcpEntry structure. See the
-   AftlIcpEntry struct for details. The values here cover:
-   log_url_size: sizeof(uint32_t)
-   leaf_index: sizeof(uint64_t)
-   log_root_descriptor_size: sizeof(uint32_t)
-   annotation_leaf_size: sizeof(uint32_t)
-   log_root_sig_size: sizeof(uint32_t)
-   proof_hash_count: sizeof(uint8_t)
-   inc_proof_size: sizeof(uint32_t)
-   log_url: 4 (shortest practical URL)
-   log_root_descriptor: AVB_AFTL_MIN_TLRD_SIZE
-   annotation_leaf: AVB_AFTL_MIN_ANNOTATION_SIZE
-   log_root_signature: AVB_AFTL_SIGNATURE_SIZE
-   proofs: AVB_AFTL_HASH_SIZE as there must be at least one hash. */
-#define AVB_AFTL_MIN_AFTL_ICP_ENTRY_SIZE                                       \
-  (sizeof(uint32_t) + sizeof(uint64_t) + sizeof(uint32_t) + sizeof(uint32_t) + \
-   sizeof(uint32_t) + sizeof(uint8_t) + sizeof(uint32_t) + 4 +                 \
-   AVB_AFTL_MIN_TLRD_SIZE + AVB_AFTL_MIN_ANNOTATION_SIZE +                     \
-   AVB_AFTL_SIGNATURE_SIZE + AVB_AFTL_HASH_SIZE)
-/* The maximum AftlIcpEntrySize is the max AftlImage size minus the size
-   of the AftlImageHeader. */
-#define AVB_AFTL_MAX_AFTL_ICP_ENTRY_SIZE \
-  (AVB_AFTL_MAX_AFTL_IMAGE_SIZE - sizeof(AftlImageHeader))
-/* The maximum Annotation size is the max AftlImage size minus the
-   size of the smallest valid AftlIcpEntry. */
-#define AVB_AFTL_MAX_ANNOTATION_SIZE \
-  (AVB_AFTL_MAX_AFTL_IMAGE_SIZE - AVB_AFTL_MIN_AFTL_ICP_ENTRY_SIZE)
-/* The maximum metadata size in a TrillianLogRootDescriptor for AFTL is the
-   max AftlImage size minus the smallest valid AftlIcpEntry size. */
-#define AVB_AFTL_MAX_METADATA_SIZE \
-  (AVB_AFTL_MAX_AFTL_IMAGE_SIZE - AVB_AFTL_MIN_AFTL_ICP_ENTRY_SIZE)
-/* The maximum TrillianLogRootDescriptor is the size of the smallest valid
-TrillianLogRootDescriptor + the largest possible metadata size. */
-#define AVB_AFTL_MAX_TLRD_SIZE \
-  (AVB_AFTL_MIN_TLRD_SIZE + AVB_AFTL_MAX_METADATA_SIZE)
-
-/* Data structure containing a Trillian LogRootDescriptor, from
-   https://github.com/google/trillian/blob/master/trillian.proto#L255
-   The log_root_signature is calculated over this structure. */
-typedef struct TrillianLogRootDescriptor {
-  uint16_t version;
-  uint64_t tree_size;
-  uint8_t root_hash_size;
-  uint8_t* root_hash;
-  uint64_t timestamp;
-  uint64_t revision;
-  uint16_t metadata_size;
-  uint8_t* metadata;
-} TrillianLogRootDescriptor;
-
-typedef enum {
-  AVB_AFTL_HASH_SHA256,
-  _AVB_AFTL_HASH_ALGORITHM_NUM
-} HashAlgorithm;
-
-typedef enum {
-  AVB_AFTL_SIGNATURE_RSA,    // RSA with PKCS1v15
-  AVB_AFTL_SIGNATURE_ECDSA,  // ECDSA with P256 curve
-  _AVB_AFTL_SIGNATURE_ALGORITHM_NUM
-} SignatureAlgorithm;
-
-/* Data structure containing the signature within a leaf of the VBMeta
- * annotation. This signature is made using the manufacturer key which is
- * generally not available at boot time. Therefore, this structure is not
- * verified by the bootloader. */
-typedef struct {
-  uint8_t hash_algorithm;
-  uint8_t signature_algorithm;
-  uint16_t signature_size;
-  uint8_t* signature;
-} Signature;
-
-/* Data structure containing the VBMeta annotation. */
-typedef struct {
-  uint8_t vbmeta_hash_size;
-  uint8_t* vbmeta_hash;
-  uint8_t version_incremental_size;
-  uint8_t* version_incremental;
-  uint8_t manufacturer_key_hash_size;
-  uint8_t* manufacturer_key_hash;
-  uint16_t description_size;
-  uint8_t* description;
-} VBMetaPrimaryAnnotation;
-
-#define AVB_AFTL_VBMETA_LEAF 0
-#define AVB_AFTL_SIGNED_VBMETA_PRIMARY_ANNOTATION_LEAF 1
-
-/* Data structure containing the leaf that is stored in the
-   transparency log. */
-typedef struct {
-  uint8_t version;
-  uint64_t timestamp;
-  uint8_t leaf_type;
-  Signature* signature;
-  VBMetaPrimaryAnnotation* annotation;
-} SignedVBMetaPrimaryAnnotationLeaf;
-
-/* Data structure containing AFTL inclusion proof data from a single
-   transparency log. */
-typedef struct AftlIcpEntry {
-  uint32_t log_url_size;
-  uint64_t leaf_index;
-  uint32_t log_root_descriptor_size;
-  uint32_t annotation_leaf_size;
-  uint16_t log_root_sig_size;
-  uint8_t proof_hash_count;
-  uint32_t inc_proof_size;
-  uint8_t* log_url;
-  TrillianLogRootDescriptor log_root_descriptor;
-  uint8_t* log_root_descriptor_raw;
-  SignedVBMetaPrimaryAnnotationLeaf* annotation_leaf;
-  uint8_t* annotation_leaf_raw;
-  uint8_t* log_root_signature;
-  uint8_t (*proofs)[AVB_AFTL_HASH_SIZE];
-} AftlIcpEntry;
-
-/* Data structure containing AFTL header information. */
-typedef struct AftlImageHeader {
-  uint32_t magic;
-  uint32_t required_icp_version_major;
-  uint32_t required_icp_version_minor;
-  uint32_t image_size; /* Total size of the AftlImage, including this header */
-  uint16_t icp_count;
-} AVB_ATTR_PACKED AftlImageHeader;
-
-/* Main data structure for an AFTL image. */
-typedef struct AftlImage {
-  AftlImageHeader header;
-  AftlIcpEntry** entries;
-} AftlImage;
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* AVB_AFTL_TYPES_H_ */
diff --git a/libavb_aftl/avb_aftl_util.c b/libavb_aftl/avb_aftl_util.c
deleted file mode 100644
index f780e82..0000000
--- a/libavb_aftl/avb_aftl_util.c
+++ /dev/null
@@ -1,932 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <libavb/avb_crypto.h>
-#include <libavb/avb_rsa.h>
-#include <libavb/avb_sha.h>
-#include <libavb/avb_util.h>
-
-#include "avb_aftl_types.h"
-#include "avb_aftl_util.h"
-#include "avb_aftl_validate.h"
-
-/* Performs a SHA256 hash operation on data. */
-bool avb_aftl_sha256(uint8_t* data,
-                     uint64_t length,
-                     uint8_t hash[AVB_AFTL_HASH_SIZE]) {
-  AvbSHA256Ctx context;
-  uint8_t* tmp;
-
-  if ((data == NULL) && (length != 0)) return false;
-
-  avb_sha256_init(&context);
-  avb_sha256_update(&context, data, length);
-  tmp = avb_sha256_final(&context);
-  avb_memcpy(hash, tmp, AVB_AFTL_HASH_SIZE);
-  return true;
-}
-
-/* Computes a leaf hash as detailed by https://tools.ietf.org/html/rfc6962. */
-bool avb_aftl_rfc6962_hash_leaf(uint8_t* leaf,
-                                uint64_t leaf_size,
-                                uint8_t* hash) {
-  uint8_t* buffer;
-  bool retval;
-
-  avb_assert(leaf != NULL && hash != NULL);
-  avb_assert(leaf_size != AVB_AFTL_UINT64_MAX);
-
-  buffer = (uint8_t*)avb_malloc(leaf_size + 1);
-
-  if (buffer == NULL) {
-    avb_error("Allocation failure in avb_aftl_rfc6962_hash_leaf.\n");
-    return false;
-  }
-  /* Prefix the data with a '0' for 2nd preimage attack resistance. */
-  buffer[0] = 0;
-
-  if (leaf_size > 0) avb_memcpy(buffer + 1, leaf, leaf_size);
-
-  retval = avb_aftl_sha256(buffer, leaf_size + 1, hash);
-  avb_free(buffer);
-  return retval;
-}
-
-/* Computes an inner hash as detailed by https://tools.ietf.org/html/rfc6962. */
-bool avb_aftl_rfc6962_hash_children(uint8_t* left_child,
-                                    uint64_t left_child_size,
-                                    uint8_t* right_child,
-                                    uint64_t right_child_size,
-                                    uint8_t* hash) {
-  uint8_t* buffer;
-  uint64_t data_size;
-  bool retval;
-
-  avb_assert(left_child != NULL && right_child != NULL && hash != NULL);
-
-  /* Check for integer overflow. */
-  avb_assert(left_child_size < AVB_AFTL_UINT64_MAX - right_child_size);
-
-  data_size = left_child_size + right_child_size + 1;
-  buffer = (uint8_t*)avb_malloc(data_size);
-  if (buffer == NULL) {
-    avb_error("Allocation failure in avb_aftl_rfc6962_hash_children.\n");
-    return false;
-  }
-
-  /* Prefix the data with '1' for 2nd preimage attack resistance. */
-  buffer[0] = 1;
-
-  /* Copy the left child data, if it exists. */
-  if (left_child_size > 0) avb_memcpy(buffer + 1, left_child, left_child_size);
-  /* Copy the right child data, if it exists. */
-  if (right_child_size > 0)
-    avb_memcpy(buffer + 1 + left_child_size, right_child, right_child_size);
-
-  /* Hash the concatenated data and clean up. */
-  retval = avb_aftl_sha256(buffer, data_size, hash);
-  avb_free(buffer);
-  return retval;
-}
-
-/* Computes a subtree hash along tree's right border. */
-bool avb_aftl_chain_border_right(uint8_t* seed,
-                                 uint64_t seed_size,
-                                 uint8_t* proof,
-                                 uint32_t proof_entry_count,
-                                 uint8_t* hash) {
-  size_t i;
-  uint8_t* tmp_hash;
-  uint8_t* tmp = seed;
-  bool retval = true;
-
-  avb_assert(seed_size == AVB_AFTL_HASH_SIZE);
-  avb_assert(seed != NULL && proof != NULL && hash != NULL);
-
-  tmp_hash = (uint8_t*)avb_malloc(AVB_AFTL_HASH_SIZE);
-  if (tmp_hash == NULL) {
-    avb_error("Allocation failure in avb_aftl_chain_border_right.\n");
-    return false;
-  }
-  for (i = 0; i < proof_entry_count; i++) {
-    retval = avb_aftl_rfc6962_hash_children(proof + (i * AVB_AFTL_HASH_SIZE),
-                                            AVB_AFTL_HASH_SIZE,
-                                            tmp,
-                                            AVB_AFTL_HASH_SIZE,
-                                            tmp_hash);
-    if (!retval) {
-      avb_error("Failed to hash Merkle tree children.\n");
-      break;
-    }
-    tmp = tmp_hash;
-  }
-
-  if (retval) avb_memcpy(hash, tmp, AVB_AFTL_HASH_SIZE);
-
-  avb_free(tmp_hash);
-  return retval;
-}
-
-/* Computes a subtree hash on or below the tree's right border. */
-bool avb_aftl_chain_inner(uint8_t* seed,
-                          uint64_t seed_size,
-                          uint8_t* proof,
-                          uint32_t proof_entry_count,
-                          uint64_t leaf_index,
-                          uint8_t* hash) {
-  size_t i;
-  uint8_t* tmp_hash;
-  uint8_t* tmp = seed;
-  bool retval = true;
-
-  avb_assert(seed_size == AVB_AFTL_HASH_SIZE);
-  avb_assert(seed != NULL && proof != NULL && hash != NULL);
-
-  tmp_hash = (uint8_t*)avb_malloc(AVB_AFTL_HASH_SIZE);
-  if (tmp_hash == NULL) {
-    avb_error("Allocation failure in avb_aftl_chain_inner.\n");
-    return false;
-  }
-  for (i = 0; i < proof_entry_count; i++) {
-    if ((leaf_index >> i & 1) == 0) {
-      retval = avb_aftl_rfc6962_hash_children(tmp,
-                                              seed_size,
-                                              proof + (i * AVB_AFTL_HASH_SIZE),
-                                              AVB_AFTL_HASH_SIZE,
-                                              tmp_hash);
-    } else {
-      retval = avb_aftl_rfc6962_hash_children(proof + (i * AVB_AFTL_HASH_SIZE),
-                                              AVB_AFTL_HASH_SIZE,
-                                              tmp,
-                                              seed_size,
-                                              tmp_hash);
-    }
-    if (!retval) {
-      avb_error("Failed to hash Merkle tree children.\n");
-      break;
-    }
-    tmp = tmp_hash;
-  }
-  if (retval) avb_memcpy(hash, tmp, AVB_AFTL_HASH_SIZE);
-  avb_free(tmp_hash);
-  return retval;
-}
-
-/* Counts leading zeros. Used in Merkle tree hash validation .*/
-unsigned int avb_aftl_count_leading_zeros(uint64_t val) {
-  int r = 0;
-  if (val == 0) return 64;
-  if (!(val & 0xffffffff00000000u)) {
-    val <<= 32;
-    r += 32;
-  }
-  if (!(val & 0xffff000000000000u)) {
-    val <<= 16;
-    r += 16;
-  }
-  if (!(val & 0xff00000000000000u)) {
-    val <<= 8;
-    r += 8;
-  }
-  if (!(val & 0xf000000000000000u)) {
-    val <<= 4;
-    r += 4;
-  }
-  if (!(val & 0xc000000000000000u)) {
-    val <<= 2;
-    r += 2;
-  }
-  if (!(val & 0x8000000000000000u)) {
-    val <<= 1;
-    r += 1;
-  }
-
-  return r;
-}
-
-/* Calculates the expected Merkle tree hash. */
-bool avb_aftl_root_from_icp(uint64_t leaf_index,
-                            uint64_t tree_size,
-                            uint8_t proof[][AVB_AFTL_HASH_SIZE],
-                            uint32_t proof_entry_count,
-                            uint8_t* leaf_hash,
-                            uint64_t leaf_hash_size,
-                            uint8_t* root_hash) {
-  uint64_t inner_proof_size;
-  uint64_t border_proof_size;
-  size_t i;
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-  uint8_t* inner_proof;
-  uint8_t* border_proof;
-  bool retval;
-
-  avb_assert(proof_entry_count != 0);
-  avb_assert(leaf_hash_size != 0);
-  avb_assert(proof != NULL && leaf_hash != NULL && root_hash != NULL);
-
-  /* This cannot overflow. */
-  inner_proof_size =
-      64 - avb_aftl_count_leading_zeros(leaf_index ^ (tree_size - 1));
-
-  /* Check for integer underflow.*/
-  if ((proof_entry_count - inner_proof_size) > proof_entry_count) {
-    avb_error("Invalid proof entry count value.\n");
-    return false;
-  }
-  border_proof_size = proof_entry_count - inner_proof_size;
-  /* Split the proof into two parts based on the calculated pivot point. */
-  inner_proof = (uint8_t*)avb_malloc(inner_proof_size * AVB_AFTL_HASH_SIZE);
-  if (inner_proof == NULL) {
-    avb_error("Allocation failure in avb_aftl_root_from_icp.\n");
-    return false;
-  }
-  border_proof = (uint8_t*)avb_malloc(border_proof_size * AVB_AFTL_HASH_SIZE);
-  if (border_proof == NULL) {
-    avb_free(inner_proof);
-    avb_error("Allocation failure in avb_aftl_root_from_icp.\n");
-    return false;
-  }
-
-  for (i = 0; i < inner_proof_size; i++) {
-    avb_memcpy(
-        inner_proof + (AVB_AFTL_HASH_SIZE * i), proof[i], AVB_AFTL_HASH_SIZE);
-  }
-  for (i = 0; i < border_proof_size; i++) {
-    avb_memcpy(border_proof + (AVB_AFTL_HASH_SIZE * i),
-               proof[inner_proof_size + i],
-               AVB_AFTL_HASH_SIZE);
-  }
-
-  /* Calculate the root hash and store it in root_hash. */
-  retval = avb_aftl_chain_inner(leaf_hash,
-                                leaf_hash_size,
-                                inner_proof,
-                                inner_proof_size,
-                                leaf_index,
-                                hash);
-  if (retval)
-    retval = avb_aftl_chain_border_right(
-        hash, AVB_AFTL_HASH_SIZE, border_proof, border_proof_size, root_hash);
-
-  if (inner_proof != NULL) avb_free(inner_proof);
-  if (border_proof != NULL) avb_free(border_proof);
-  return retval;
-}
-
-/* Defines helper functions read_u8, read_u16, read_u32 and read_u64. These
- * functions can be used to read from a |data| stream a |value| of a specific
- * size. The value endianness is converted from big-endian to host.  We ensure
- * that the read do not overflow beyond |data_end|. If successful, |data| is
- * brought forward by the size of the value read.
- */
-#define _read_u(fct)                                   \
-  {                                                    \
-    size_t value_size = sizeof(*value);                \
-    if ((*data + value_size) < *data) return false;    \
-    if ((*data + value_size) > data_end) return false; \
-    avb_memcpy(value, *data, value_size);              \
-    *value = fct(*value);                              \
-    *data += value_size;                               \
-    return true;                                       \
-  }
-static bool read_u8(uint8_t* value, uint8_t** data, uint8_t* data_end) {
-  _read_u();
-}
-AVB_ATTR_WARN_UNUSED_RESULT
-static bool read_u16(uint16_t* value, uint8_t** data, uint8_t* data_end) {
-  _read_u(avb_be16toh);
-}
-AVB_ATTR_WARN_UNUSED_RESULT
-static bool read_u32(uint32_t* value, uint8_t** data, uint8_t* data_end) {
-  _read_u(avb_be32toh);
-}
-AVB_ATTR_WARN_UNUSED_RESULT
-static bool read_u64(uint64_t* value, uint8_t** data, uint8_t* data_end) {
-  _read_u(avb_be64toh);
-}
-AVB_ATTR_WARN_UNUSED_RESULT
-
-/* Allocates |value_size| bytes into |value| and copy |value_size| bytes from
- * |data|.  Ensure that we don't overflow beyond |data_end|. It is the caller
- * responsibility to avb_free |value|. Advances the |data| pointer pass the
- * value that has been read. Returns false if an overflow would have occurred or
- * if the allocation failed.
- */
-static bool read_mem(uint8_t** value,
-                     size_t value_size,
-                     uint8_t** data,
-                     uint8_t* data_end) {
-  if (*data + value_size < *data || *data + value_size > data_end) {
-    return false;
-  }
-  *value = (uint8_t*)avb_calloc(value_size);
-  if (!value) {
-    return false;
-  }
-  avb_memcpy(*value, *data, value_size);
-  *data += value_size;
-  return true;
-}
-
-/* Allocates and populates a TrillianLogRootDescriptor element in an
-   AftlIcpEntry from a binary blob.
-   The blob is expected to be pointing to the beginning of a
-   serialized TrillianLogRootDescriptor element of an AftlIcpEntry.
-   The aftl_blob argument is updated to point to the area after the
-   TrillianLogRootDescriptor. aftl_blob_remaining gives the amount of the
-   aftl_blob that is left to parse. */
-static bool parse_trillian_log_root_descriptor(AftlIcpEntry* icp_entry,
-                                               uint8_t** aftl_blob,
-                                               size_t aftl_blob_remaining) {
-  avb_assert(icp_entry);
-  avb_assert(aftl_blob);
-  uint8_t* blob_end = *aftl_blob + aftl_blob_remaining;
-  if (*aftl_blob > blob_end) {
-    return false;
-  }
-
-  /* Copy in the version field from the blob. */
-  if (!read_u16(
-          &(icp_entry->log_root_descriptor.version), aftl_blob, blob_end)) {
-    avb_error("Unable to parse version.\n");
-    return false;
-  }
-
-  /* Copy in the tree size field from the blob. */
-  if (!read_u64(
-          &(icp_entry->log_root_descriptor.tree_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse tree size.\n");
-    return false;
-  }
-
-  /* Copy in the root hash size field from the blob. */
-  if (!read_u8(&(icp_entry->log_root_descriptor.root_hash_size),
-               aftl_blob,
-               blob_end)) {
-    avb_error("Unable to parse root hash size.\n");
-    return false;
-  }
-  if (icp_entry->log_root_descriptor.root_hash_size != AVB_AFTL_HASH_SIZE) {
-    avb_error("Invalid root hash size.\n");
-    return false;
-  }
-
-  /* Copy in the root hash from the blob. */
-  if (!read_mem(&(icp_entry->log_root_descriptor.root_hash),
-                icp_entry->log_root_descriptor.root_hash_size,
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse root hash.\n");
-    return false;
-  }
-
-  /* Copy in the timestamp field from the blob. */
-  if (!read_u64(
-          &(icp_entry->log_root_descriptor.timestamp), aftl_blob, blob_end)) {
-    avb_error("Unable to parse timestamp.\n");
-    return false;
-  }
-
-  /* Copy in the revision field from the blob. */
-  if (!read_u64(
-          &(icp_entry->log_root_descriptor.revision), aftl_blob, blob_end)) {
-    avb_error("Unable to parse revision.\n");
-    return false;
-  }
-
-  /* Copy in the metadata size field from the blob. */
-  if (!read_u16(&(icp_entry->log_root_descriptor.metadata_size),
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse metadata size.\n");
-    return false;
-  }
-
-  if (icp_entry->log_root_descriptor.metadata_size >
-      AVB_AFTL_MAX_METADATA_SIZE) {
-    avb_error("Invalid metadata size.\n");
-    return false;
-  }
-
-  /* If it exists, copy in the metadata field from the blob. */
-  if (icp_entry->log_root_descriptor.metadata_size > 0) {
-    if (!read_mem(&(icp_entry->log_root_descriptor.metadata),
-                  icp_entry->log_root_descriptor.metadata_size,
-                  aftl_blob,
-                  blob_end)) {
-      avb_error("Unable to parse metadata.\n");
-      return false;
-    }
-  } else {
-    icp_entry->log_root_descriptor.metadata = NULL;
-  }
-  return true;
-}
-
-/* Parses a Signature from |aftl_blob| into leaf->signature.
- * Returns false if an error occurred during the parsing */
-static bool parse_signature(SignedVBMetaPrimaryAnnotationLeaf* leaf,
-                            uint8_t** aftl_blob,
-                            uint8_t* blob_end) {
-  Signature* signature = (Signature*)avb_calloc(sizeof(Signature));
-  if (!signature) {
-    avb_error("Failed to allocate signature.\n");
-    return false;
-  }
-  leaf->signature = signature;
-
-  if (!read_u8(&(signature->hash_algorithm), aftl_blob, blob_end)) {
-    avb_error("Unable to parse the hash algorithm.\n");
-    return false;
-  }
-  if (signature->hash_algorithm >= _AVB_AFTL_HASH_ALGORITHM_NUM) {
-    avb_error("Unexpect hash algorithm in leaf signature.\n");
-    return false;
-  }
-
-  if (!read_u8(&(signature->signature_algorithm), aftl_blob, blob_end)) {
-    avb_error("Unable to parse the signature algorithm.\n");
-    return false;
-  }
-  if (signature->signature_algorithm >= _AVB_AFTL_SIGNATURE_ALGORITHM_NUM) {
-    avb_error("Unexpect signature algorithm in leaf signature.\n");
-    return false;
-  }
-
-  if (!read_u16(&(signature->signature_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse the signature size.\n");
-    return false;
-  }
-  if (!read_mem(&(signature->signature),
-                signature->signature_size,
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse signature.\n");
-    return false;
-  }
-  return true;
-}
-
-/* Parses an VBMetaPrimaryAnnotation from |aftl_blob| into leaf->annotation.
- * Returns false if an error occurred during the parsing */
-static bool parse_annotation(SignedVBMetaPrimaryAnnotationLeaf* leaf,
-                             uint8_t** aftl_blob,
-                             uint8_t* blob_end) {
-  VBMetaPrimaryAnnotation* annotation =
-      (VBMetaPrimaryAnnotation*)avb_calloc(sizeof(VBMetaPrimaryAnnotation));
-  if (!annotation) {
-    avb_error("Failed to allocate annotation.\n");
-    return false;
-  }
-  leaf->annotation = annotation;
-
-  if (!read_u8(&(annotation->vbmeta_hash_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse VBMeta hash size.\n");
-    return false;
-  }
-  if (annotation->vbmeta_hash_size != AVB_AFTL_HASH_SIZE) {
-    avb_error("Unexpected VBMeta hash size.\n");
-    return false;
-  }
-  if (!read_mem(&(annotation->vbmeta_hash),
-                annotation->vbmeta_hash_size,
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse VBMeta hash.\n");
-    return false;
-  }
-
-  if (!read_u8(&(annotation->version_incremental_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse version incremental size.\n");
-    return false;
-  }
-  if (!read_mem(&(annotation->version_incremental),
-                annotation->version_incremental_size,
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse version incremental.\n");
-    return false;
-  }
-
-  if (!read_u8(
-          &(annotation->manufacturer_key_hash_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse manufacturer key hash size.\n");
-    return false;
-  }
-  if (!read_mem(&(annotation->manufacturer_key_hash),
-                annotation->manufacturer_key_hash_size,
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse manufacturer key hash.\n");
-    return false;
-  }
-
-  if (!read_u16(&(annotation->description_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse description size.\n");
-    return false;
-  }
-  if (!read_mem(&(annotation->description),
-                annotation->description_size,
-                aftl_blob,
-                blob_end)) {
-    avb_error("Unable to parse description.\n");
-    return false;
-  }
-  return true;
-}
-
-/* Allocates and populates a SignedVBMetaPrimaryAnnotationLeaf element in an
-   AftlIcpEntry from a binary blob.
-   The blob is expected to be pointing to the beginning of a
-   serialized SignedVBMetaPrimaryAnnotationLeaf element of an AftlIcpEntry.
-   The aftl_blob argument is updated to point to the area after the leaf. */
-static bool parse_annotation_leaf(AftlIcpEntry* icp_entry,
-                                  uint8_t** aftl_blob) {
-  SignedVBMetaPrimaryAnnotationLeaf* leaf;
-  uint8_t* blob_end = *aftl_blob + icp_entry->annotation_leaf_size;
-  if (*aftl_blob > blob_end) {
-    return false;
-  }
-
-  leaf = (SignedVBMetaPrimaryAnnotationLeaf*)avb_calloc(
-      sizeof(SignedVBMetaPrimaryAnnotationLeaf));
-  if (!leaf) {
-    avb_error("Failed to allocate for annotation leaf.\n");
-    return false;
-  }
-  /* The leaf will be free'd within the free_aftl_icp_entry() */
-  icp_entry->annotation_leaf = leaf;
-  if (!read_u8(&(leaf->version), aftl_blob, blob_end)) {
-    avb_error("Unable to parse version.\n");
-    return false;
-  }
-  if (leaf->version != 1) {
-    avb_error("Unexpected leaf version.\n");
-    return false;
-  }
-  if (!read_u64(&(leaf->timestamp), aftl_blob, blob_end)) {
-    avb_error("Unable to parse timestamp.\n");
-    return false;
-  }
-  if (!read_u8(&(leaf->leaf_type), aftl_blob, blob_end)) {
-    avb_error("Unable to parse version.\n");
-    return false;
-  }
-  if (leaf->leaf_type != AVB_AFTL_SIGNED_VBMETA_PRIMARY_ANNOTATION_LEAF) {
-    avb_error("Unexpected leaf type.\n");
-    return false;
-  }
-  if (!parse_signature(leaf, aftl_blob, blob_end)) {
-    avb_error("Unable to parse signature.\n");
-    return false;
-  }
-  if (!parse_annotation(leaf, aftl_blob, blob_end)) {
-    avb_error("Unable to parse annotation.\n");
-    return false;
-  }
-  return true;
-}
-
-/* Allocates and populates an AftlIcpEntry from a binary blob.
-   The blob is expected to be pointing to the beginning of a
-   serialized AftlIcpEntry structure. */
-AftlIcpEntry* parse_icp_entry(uint8_t** aftl_blob, size_t* remaining_size) {
-  AftlIcpEntry* icp_entry;
-  uint8_t* blob_start = *aftl_blob;
-  uint8_t* blob_end = *aftl_blob + *remaining_size;
-  if (*aftl_blob > blob_end) {
-    return NULL;
-  }
-
-  if (*remaining_size < AVB_AFTL_MIN_AFTL_ICP_ENTRY_SIZE) {
-    avb_error("Invalid AftlImage\n");
-    return NULL;
-  }
-
-  icp_entry = (AftlIcpEntry*)avb_calloc(sizeof(AftlIcpEntry));
-  if (!icp_entry) {
-    avb_error("Failure allocating AftlIcpEntry\n");
-    return NULL;
-  }
-
-  /* Copy in the log server URL size field. */
-  if (!read_u32(&(icp_entry->log_url_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse log url size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  if (icp_entry->log_url_size > AVB_AFTL_MAX_URL_SIZE) {
-    avb_error("Invalid log URL size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  /* Copy in the leaf index field. */
-  if (!read_u64(&(icp_entry->leaf_index), aftl_blob, blob_end)) {
-    avb_error("Unable to parse leaf_index.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  /* Copy in the TrillianLogRootDescriptor size field. */
-  if (!read_u32(&(icp_entry->log_root_descriptor_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse log root descriptor size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  if (icp_entry->log_root_descriptor_size < AVB_AFTL_MIN_TLRD_SIZE ||
-      icp_entry->log_root_descriptor_size > AVB_AFTL_MAX_TLRD_SIZE) {
-    avb_error("Invalid TrillianLogRootDescriptor size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-
-  /* Copy in the annotation leaf size field. */
-  if (!read_u32(&(icp_entry->annotation_leaf_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse annotation leaf size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  if (icp_entry->annotation_leaf_size == 0 ||
-      icp_entry->annotation_leaf_size > AVB_AFTL_MAX_ANNOTATION_SIZE) {
-    avb_error("Invalid annotation leaf size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-
-  /* Copy the log root signature size field. */
-  if (!read_u16(&(icp_entry->log_root_sig_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse log root signature size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  if (icp_entry->log_root_sig_size != AVB_AFTL_SIGNATURE_SIZE) {
-    avb_error("Invalid log root signature size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  /* Copy the inclusion proof hash count field. */
-  if (!read_u8(&(icp_entry->proof_hash_count), aftl_blob, blob_end)) {
-    avb_error("Unable to parse proof hash count.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  /* Copy the inclusion proof size field. */
-  if (!read_u32(&(icp_entry->inc_proof_size), aftl_blob, blob_end)) {
-    avb_error("Unable to parse inclusion proof size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  if ((icp_entry->inc_proof_size !=
-       icp_entry->proof_hash_count * AVB_AFTL_HASH_SIZE) ||
-      (icp_entry->inc_proof_size > AVB_AFTL_MAX_PROOF_SIZE)) {
-    avb_error("Invalid inclusion proof size.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  /* Copy in the log server URL from the blob. */
-  if (*aftl_blob + icp_entry->log_url_size < *aftl_blob ||
-      *aftl_blob + icp_entry->log_url_size > blob_end) {
-    avb_error("Invalid AftlImage.\n");
-    avb_free(icp_entry);
-    return NULL;
-  }
-  icp_entry->log_url = (uint8_t*)avb_calloc(icp_entry->log_url_size);
-  if (!icp_entry->log_url) {
-    avb_error("Failure to allocate URL.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  avb_memcpy(icp_entry->log_url, *aftl_blob, icp_entry->log_url_size);
-  *aftl_blob += icp_entry->log_url_size;
-
-  /* Populate the TrillianLogRootDescriptor elements. */
-  if (*aftl_blob + icp_entry->log_root_descriptor_size < *aftl_blob ||
-      *aftl_blob + icp_entry->log_root_descriptor_size > blob_end) {
-    avb_error("Invalid AftlImage.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  icp_entry->log_root_descriptor_raw =
-      (uint8_t*)avb_calloc(icp_entry->log_root_descriptor_size);
-  if (!icp_entry->log_root_descriptor_raw) {
-    avb_error("Failure to allocate log root descriptor.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  avb_memcpy(icp_entry->log_root_descriptor_raw,
-             *aftl_blob,
-             icp_entry->log_root_descriptor_size);
-  if (!parse_trillian_log_root_descriptor(
-          icp_entry, aftl_blob, icp_entry->log_root_descriptor_size)) {
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-
-  /* Populate the annotation leaf. */
-  if (*aftl_blob + icp_entry->annotation_leaf_size < *aftl_blob ||
-      *aftl_blob + icp_entry->annotation_leaf_size > blob_end) {
-    avb_error("Invalid AftlImage.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  icp_entry->annotation_leaf_raw =
-      (uint8_t*)avb_calloc(icp_entry->annotation_leaf_size);
-  if (!icp_entry->annotation_leaf_raw) {
-    avb_error("Failure to allocate annotation leaf.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  avb_memcpy(icp_entry->annotation_leaf_raw,
-             *aftl_blob,
-             icp_entry->annotation_leaf_size);
-  if (!parse_annotation_leaf(icp_entry, aftl_blob)) {
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-
-  /* Allocate and copy the log root signature from the blob. */
-  if (*aftl_blob + icp_entry->log_root_sig_size < *aftl_blob ||
-      *aftl_blob + icp_entry->log_root_sig_size > blob_end) {
-    avb_error("Invalid AftlImage.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  icp_entry->log_root_signature =
-      (uint8_t*)avb_calloc(icp_entry->log_root_sig_size);
-  if (!icp_entry->log_root_signature) {
-    avb_error("Failure to allocate log root signature.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  avb_memcpy(
-      icp_entry->log_root_signature, *aftl_blob, icp_entry->log_root_sig_size);
-  *aftl_blob += icp_entry->log_root_sig_size;
-
-  /* Finally, copy the proof hash data from the blob to the AftlImage. */
-  if (*aftl_blob + icp_entry->inc_proof_size < *aftl_blob ||
-      *aftl_blob + icp_entry->inc_proof_size > blob_end) {
-    avb_error("Invalid AftlImage.\n");
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  icp_entry->proofs = avb_calloc(icp_entry->inc_proof_size);
-  if (!icp_entry->proofs) {
-    free_aftl_icp_entry(icp_entry);
-    return NULL;
-  }
-  avb_memcpy(icp_entry->proofs, *aftl_blob, icp_entry->inc_proof_size);
-  *aftl_blob += icp_entry->inc_proof_size;
-
-  *remaining_size -= *aftl_blob - blob_start;
-  return icp_entry;
-}
-
-/* Allocate and parse an AftlImage object out of binary data. */
-AftlImage* parse_aftl_image(uint8_t* aftl_blob, size_t aftl_blob_size) {
-  AftlImage* image;
-  AftlImageHeader* image_header;
-  AftlIcpEntry* entry;
-  size_t image_size;
-  size_t i;
-  size_t remaining_size;
-
-  /* Ensure the blob is at least large enough for an AftlImageHeader */
-  if (aftl_blob_size < sizeof(AftlImageHeader)) {
-    avb_error("Invalid image header.\n");
-    return NULL;
-  }
-  image_header = (AftlImageHeader*)aftl_blob;
-  /* Check for the magic value for an AftlImageHeader. */
-  if (image_header->magic != AVB_AFTL_MAGIC) {
-    avb_error("Invalid magic number\n");
-    return NULL;
-  }
-  /* Extract the size out of the header. */
-  image_size = avb_be32toh(image_header->image_size);
-  if (image_size < sizeof(AftlImageHeader) ||
-      image_size > AVB_AFTL_MAX_AFTL_IMAGE_SIZE) {
-    avb_error("Invalid image size.\n");
-    return NULL;
-  }
-  image = (AftlImage*)avb_calloc(sizeof(AftlImage));
-  if (!image) {
-    avb_error("Failed allocation for AftlImage.\n");
-    return NULL;
-  }
-  /* Copy the header bytes directly from the aftl_blob. */
-  avb_memcpy(&(image->header), aftl_blob, sizeof(AftlImageHeader));
-  /* Fix endianness. */
-  image->header.required_icp_version_major =
-      avb_be32toh(image->header.required_icp_version_major);
-  image->header.required_icp_version_minor =
-      avb_be32toh(image->header.required_icp_version_minor);
-  image->header.image_size = avb_be32toh(image->header.image_size);
-  image->header.icp_count = avb_be16toh(image->header.icp_count);
-  /* Allocate memory for the entry array */
-  image->entries = (AftlIcpEntry**)avb_calloc(sizeof(AftlIcpEntry*) *
-                                              image->header.icp_count);
-  if (!image->entries) {
-    avb_error("Failed allocation for AftlIcpEntry array.\n");
-    avb_free(image);
-    return NULL;
-  }
-
-  /* Jump past the header and parse out each AftlIcpEntry. */
-  aftl_blob += sizeof(AftlImageHeader);
-  remaining_size = aftl_blob_size - sizeof(AftlImageHeader);
-  for (i = 0; i < image->header.icp_count && remaining_size > 0; i++) {
-    entry = parse_icp_entry(&aftl_blob, &remaining_size);
-    if (!entry) {
-      free_aftl_image(image);
-      return NULL;
-    }
-    image->entries[i] = entry;
-  }
-
-  return image;
-}
-
-/* Free an AftlIcpEntry and each allocated sub-element. */
-void free_aftl_icp_entry(AftlIcpEntry* icp_entry) {
-  /* Ensure the AftlIcpEntry exists before attempting to free it. */
-  if (icp_entry) {
-    /* Free the log_url and log_root_signature elements if they exist. */
-    if (icp_entry->log_url) avb_free(icp_entry->log_url);
-    if (icp_entry->log_root_signature) avb_free(icp_entry->log_root_signature);
-    /* Free the annotation elements if they exist. */
-    if (icp_entry->annotation_leaf) {
-      if (icp_entry->annotation_leaf->signature) {
-        if (icp_entry->annotation_leaf->signature->signature) {
-          avb_free(icp_entry->annotation_leaf->signature->signature);
-        }
-        avb_free(icp_entry->annotation_leaf->signature);
-      }
-      if (icp_entry->annotation_leaf->annotation) {
-        if (icp_entry->annotation_leaf->annotation->vbmeta_hash)
-          avb_free(icp_entry->annotation_leaf->annotation->vbmeta_hash);
-        if (icp_entry->annotation_leaf->annotation->version_incremental)
-          avb_free(icp_entry->annotation_leaf->annotation->version_incremental);
-        if (icp_entry->annotation_leaf->annotation->manufacturer_key_hash)
-          avb_free(
-              icp_entry->annotation_leaf->annotation->manufacturer_key_hash);
-        if (icp_entry->annotation_leaf->annotation->description)
-          avb_free(icp_entry->annotation_leaf->annotation->description);
-        avb_free(icp_entry->annotation_leaf->annotation);
-      }
-      avb_free(icp_entry->annotation_leaf);
-    }
-    if (icp_entry->annotation_leaf_raw)
-      avb_free(icp_entry->annotation_leaf_raw);
-    /* Free the TrillianLogRoot elements if they exist. */
-    if (icp_entry->log_root_descriptor.metadata)
-      avb_free(icp_entry->log_root_descriptor.metadata);
-    if (icp_entry->log_root_descriptor.root_hash)
-      avb_free(icp_entry->log_root_descriptor.root_hash);
-    if (icp_entry->log_root_descriptor_raw)
-      avb_free(icp_entry->log_root_descriptor_raw);
-    if (icp_entry->proofs) avb_free(icp_entry->proofs);
-    /* Finally, free the AftlIcpEntry. */
-    avb_free(icp_entry);
-  }
-}
-
-/* Free the AftlImage and each allocated sub-element. */
-void free_aftl_image(AftlImage* image) {
-  size_t i;
-
-  /* Ensure the descriptor exists before attempting to free it. */
-  if (!image) {
-    return;
-  }
-  /* Free the entry array. */
-  if (image->entries) {
-    /* Walk through each entry, freeing each one. */
-    for (i = 0; i < image->header.icp_count; i++) {
-      if (image->entries[i]) {
-        free_aftl_icp_entry(image->entries[i]);
-      }
-    }
-    avb_free(image->entries);
-  }
-  avb_free(image);
-}
diff --git a/libavb_aftl/avb_aftl_util.h b/libavb_aftl/avb_aftl_util.h
deleted file mode 100644
index db222c3..0000000
--- a/libavb_aftl/avb_aftl_util.h
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#ifdef AVB_INSIDE_LIBAVB_AFTL_H
-#error "You can't include avb_aftl_util.h in the public header libavb_aftl.h."
-#endif
-
-#ifndef AVB_COMPILATION
-#error "Never include this file, it may only be used from internal avb code."
-#endif
-
-#ifndef AVB_AFTL_UTIL_H_
-#define AVB_AFTL_UTIL_H_
-
-#include "avb_aftl_types.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#define AVB_AFTL_MAGIC 0x4c544641
-#define avb_aftl_member_size(type, member) sizeof(((type*)0)->member)
-
-/* Performs a SHA256 hash operation on data. */
-bool avb_aftl_sha256(
-    uint8_t* data,                     /* Data to be hashed. */
-    uint64_t length,                   /* Size of data. */
-    uint8_t hash[AVB_AFTL_HASH_SIZE]); /* Resulting SHA256 hash. */
-
-/* Calculates a SHA256 hash of the TrillianLogRootDescriptor in icp_entry. */
-bool avb_aftl_hash_log_root_descriptor(
-    AftlIcpEntry* icp_entry, /* The icp_entry containing the descriptor. */
-    uint8_t* hash);          /* The resulting hash of the descriptor data. */
-
-/* RFC 6962 Hashing function for leaves of a Merkle tree. */
-bool avb_aftl_rfc6962_hash_leaf(
-    uint8_t* leaf,      /* The Merkle tree leaf data to be hashed. */
-    uint64_t leaf_size, /* Size of the leaf data. */
-    uint8_t* hash);     /* Resulting RFC 6962 hash of the leaf data. */
-
-/* Computes an inner hash as detailed by https://tools.ietf.org/html/rfc6962. */
-bool avb_aftl_rfc6962_hash_children(
-    uint8_t* left_child,       /* The left child node data. */
-    uint64_t left_child_size,  /* Size of the left child node data. */
-    uint8_t* right_child,      /* The right child node data. */
-    uint64_t right_child_size, /* Size of the right child node data. */
-    uint8_t
-        hash[AVB_AFTL_HASH_SIZE]); /* Resulting RFC 6962 hash of the children.*/
-
-/* Computes a subtree hash along the left-side tree border. */
-bool avb_aftl_chain_border_right(
-    uint8_t* seed,              /* Data containing the starting hash. */
-    uint64_t seed_size,         /* Size of the starting hash data. */
-    uint8_t* proof,             /* The hashes in the inclusion proof. */
-    uint32_t proof_entry_count, /* Number of inclusion proof entries. */
-    uint8_t* hash);             /* Resulting subtree hash. */
-
-/* Computes a subtree hash on or below the tree's right border. */
-bool avb_aftl_chain_inner(
-    uint8_t* seed,              /* Data containing the starting hash. */
-    uint64_t seed_size,         /* Size of the starting hash data. */
-    uint8_t* proof,             /* The hashes in the inclusion proof. */
-    uint32_t proof_entry_count, /* Number of inclusion proof entries. */
-    uint64_t leaf_index,        /* The current Merkle tree leaf index. */
-    uint8_t* hash);             /* Resulting subtree hash. */
-
-/* Counts leading zeros. Used in Merkle tree hash validation .*/
-unsigned int avb_aftl_count_leading_zeros(
-    uint64_t val); /* Value to count leading zeros of. */
-
-/* Calculates the expected Merkle tree hash. */
-bool avb_aftl_root_from_icp(
-    uint64_t leaf_index,                 /* The leaf index in the Merkle tree.*/
-    uint64_t tree_size,                  /* The size of the Merkle tree. */
-    uint8_t proof[][AVB_AFTL_HASH_SIZE], /* Inclusion proof hash data. */
-    uint32_t proof_entry_count,          /* Number of inclusion proof hashes. */
-    uint8_t* leaf_hash,      /* The leaf hash to prove inclusion of. */
-    uint64_t leaf_hash_size, /* Size of the leaf hash. */
-    uint8_t* root_hash);     /* The resulting tree root hash. */
-
-/* Allocates and populates an AftlImage from a binary blob. */
-AftlImage* parse_aftl_image(uint8_t* aftl_blob, size_t aftl_blob_size);
-
-/* Allocates and populates an AftlIcpEntry and all sub-fields from
-   a binary blob. It is assumed that the blob points to an AftlIcpEntry. */
-AftlIcpEntry* parse_icp_entry(uint8_t** aftl_blob, size_t* remaining_size);
-
-/* Frees an AftlIcpEntry and all sub-fields that were previously
-   allocated by a call to parse_icp_entry. */
-void free_aftl_icp_entry(AftlIcpEntry* aftl_icp_entry);
-
-/* Frees an AftlImage and all sub-fields that were previously
-   allocated by a call to parse_aftl_image. */
-void free_aftl_image(AftlImage* image);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* AVB_AFTL_UTIL_H_ */
diff --git a/libavb_aftl/avb_aftl_validate.c b/libavb_aftl/avb_aftl_validate.c
deleted file mode 100644
index 2d76b2f..0000000
--- a/libavb_aftl/avb_aftl_validate.c
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <libavb/avb_crypto.h>
-#include <libavb/avb_rsa.h>
-#include <libavb/avb_sha.h>
-#include <libavb/avb_util.h>
-
-#include "avb_aftl_types.h"
-#include "avb_aftl_util.h"
-#include "avb_aftl_validate.h"
-
-/* Verifies that the logged VBMeta hash matches the one on device. */
-bool avb_aftl_verify_vbmeta_hash(uint8_t* vbmeta,
-                                 size_t vbmeta_size,
-                                 AftlIcpEntry* icp_entry) {
-  uint8_t vbmeta_hash[AVB_AFTL_HASH_SIZE];
-
-  avb_assert(vbmeta != NULL && icp_entry != NULL);
-  if (!avb_aftl_sha256(vbmeta, vbmeta_size, vbmeta_hash)) return false;
-
-  /* Only SHA256 hashes are currently supported. If the vbmeta hash
-     size is not AVB_AFTL_HASH_SIZE, return false. */
-  if (icp_entry->annotation_leaf->annotation->vbmeta_hash_size !=
-      AVB_AFTL_HASH_SIZE) {
-    avb_error("Invalid VBMeta hash size.\n");
-    return false;
-  }
-
-  /* Return whether the calculated VBMeta hash matches the stored one. */
-  return avb_safe_memcmp(vbmeta_hash,
-                         icp_entry->annotation_leaf->annotation->vbmeta_hash,
-                         AVB_AFTL_HASH_SIZE) == 0;
-}
-
-/* Verifies the Merkle tree root hash. */
-bool avb_aftl_verify_icp_root_hash(AftlIcpEntry* icp_entry) {
-  uint8_t leaf_hash[AVB_AFTL_HASH_SIZE];
-  uint8_t result_hash[AVB_AFTL_HASH_SIZE];
-
-  avb_assert(icp_entry != NULL);
-  /* Calculate the RFC 6962 hash of the seed entry. */
-  if (!avb_aftl_rfc6962_hash_leaf(icp_entry->annotation_leaf_raw,
-                                  icp_entry->annotation_leaf_size,
-                                  leaf_hash)) {
-    return false;
-  }
-  /* Calculate the Merkle tree's root hash. */
-  if (!avb_aftl_root_from_icp(icp_entry->leaf_index,
-                              icp_entry->log_root_descriptor.tree_size,
-                              icp_entry->proofs,
-                              icp_entry->proof_hash_count,
-                              leaf_hash,
-                              AVB_AFTL_HASH_SIZE,
-                              result_hash))
-    return false;
-
-  /* Return whether the calculated root hash matches the stored one. */
-  return (avb_safe_memcmp(result_hash,
-                          icp_entry->log_root_descriptor.root_hash,
-                          AVB_AFTL_HASH_SIZE) == 0);
-}
-
-/* Verifies the log root signature for the transparency log submission. */
-bool avb_aftl_verify_entry_signature(const uint8_t* key,
-                                     size_t key_num_bytes,
-                                     AftlIcpEntry* icp_entry) {
-  uint8_t* sig;
-  size_t sig_num_bytes;
-  uint8_t log_root_hash[AVB_AFTL_HASH_SIZE];
-  size_t log_root_hash_num_bytes;
-  const AvbAlgorithmData* algorithm_data;
-
-  avb_assert(key != NULL && icp_entry != NULL);
-
-  /* Extract the log root signature from the AftlIcpEntry. */
-  sig = icp_entry->log_root_signature;
-  if (sig == NULL) {
-    avb_error("Invalid log root signature.\n");
-    return false;
-  }
-  sig_num_bytes = icp_entry->log_root_sig_size;
-  log_root_hash_num_bytes = AVB_AFTL_HASH_SIZE;
-
-  /* Calculate the SHA256 of the TrillianLogRootDescriptor. */
-  if (!avb_aftl_sha256(icp_entry->log_root_descriptor_raw,
-                       icp_entry->log_root_descriptor_size,
-                       log_root_hash))
-    return false;
-
-  /* algorithm_data is used to calculate the padding for signature verification.
-   */
-  algorithm_data = avb_get_algorithm_data(AVB_ALGORITHM_TYPE_SHA256_RSA4096);
-  if (algorithm_data == NULL) {
-    avb_error("Failed to get algorithm data.\n");
-    return false;
-  }
-
-  return avb_rsa_verify(key,
-                        key_num_bytes,
-                        sig,
-                        sig_num_bytes,
-                        log_root_hash,
-                        log_root_hash_num_bytes,
-                        algorithm_data->padding,
-                        algorithm_data->padding_len);
-}
diff --git a/libavb_aftl/avb_aftl_validate.h b/libavb_aftl/avb_aftl_validate.h
deleted file mode 100644
index 4a1ecc8..0000000
--- a/libavb_aftl/avb_aftl_validate.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#if !defined(AVB_INSIDE_LIBAVB_AFTL_H) && !defined(AVB_COMPILATION)
-#error "Never include this file directly, include libavb_aftl/libavb_aftl.h."
-#endif
-
-#ifndef AVB_AFTL_VALIDATE_H_
-#define AVB_AFTL_VALIDATE_H_
-
-#include <libavb/libavb.h>
-#include "avb_aftl_types.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/* Verifies that the logged vbmeta hash matches the one on device. */
-bool avb_aftl_verify_vbmeta_hash(
-    uint8_t* vbmeta,          /* Buffer containing the vbmeta data. */
-    size_t vbmeta_size,       /* Size of the vbmeta buffer. */
-    AftlIcpEntry* icp_entry); /* Pointer to the AftlIcpEntry to verify. */
-
-/* Verifies the Merkle tree root hash. */
-bool avb_aftl_verify_icp_root_hash(
-    AftlIcpEntry* icp_entry); /* Pointer to the AftlIcpEntry to verify. */
-
-/* Verifies the log root signature for the transparency log submission. */
-bool avb_aftl_verify_entry_signature(
-    const uint8_t* key,       /* Transparency log public key data. */
-    size_t key_num_bytes,     /* Size of the key data. */
-    AftlIcpEntry* icp_entry); /* Pointer to the AftlIcpEntry to verify. */
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* AVB_AFTL_VALIDATE_H_ */
diff --git a/libavb_aftl/avb_aftl_verify.c b/libavb_aftl/avb_aftl_verify.c
deleted file mode 100644
index b35e417..0000000
--- a/libavb_aftl/avb_aftl_verify.c
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include "libavb_aftl/avb_aftl_verify.h"
-
-#include <libavb/avb_cmdline.h>
-#include <libavb/avb_slot_verify.h>
-#include <libavb/avb_util.h>
-
-#include "libavb_aftl/avb_aftl_types.h"
-#include "libavb_aftl/avb_aftl_util.h"
-#include "libavb_aftl/avb_aftl_validate.h"
-
-/* Read the vbmeta partition, after the AvbVBMetaImageHeader structure, to find
- * the AftlImage.
- */
-static AftlSlotVerifyResult avb_aftl_find_aftl_image(AvbOps* ops,
-                                                     const char* part_name,
-                                                     size_t vbmeta_size,
-                                                     uint8_t* out_image_buf,
-                                                     size_t* out_image_size) {
-  AvbIOResult io_ret;
-
-  avb_assert(vbmeta_size <= AVB_AFTL_MAX_AFTL_IMAGE_SIZE);
-  io_ret = ops->read_from_partition(ops,
-                                    part_name,
-                                    vbmeta_size /* offset */,
-                                    AVB_AFTL_MAX_AFTL_IMAGE_SIZE - vbmeta_size,
-                                    out_image_buf,
-                                    out_image_size);
-  switch (io_ret) {
-    case AVB_IO_RESULT_OK:
-      break;
-    case AVB_IO_RESULT_ERROR_OOM:
-      return AFTL_SLOT_VERIFY_RESULT_ERROR_OOM;
-    case AVB_IO_RESULT_ERROR_RANGE_OUTSIDE_PARTITION:
-    case AVB_IO_RESULT_ERROR_NO_SUCH_PARTITION:
-      return AFTL_SLOT_VERIFY_RESULT_ERROR_IMAGE_NOT_FOUND;
-    default:
-      avb_errorv(
-          part_name, ": Error loading AftlImage from partition.\n", NULL);
-      return AFTL_SLOT_VERIFY_RESULT_ERROR_IO;
-  }
-
-  if (*out_image_size < 4 || (out_image_buf[0] != 'A') ||
-      (out_image_buf[1] != 'F') || (out_image_buf[2] != 'T') ||
-      (out_image_buf[3] != 'L')) {
-    avb_errorv(part_name, ": Unexpected AftlImage magic.\n", NULL);
-    return AFTL_SLOT_VERIFY_RESULT_ERROR_IMAGE_NOT_FOUND;
-  }
-
-  return AFTL_SLOT_VERIFY_RESULT_OK;
-}
-
-/* Performs the three validation steps for an AFTL image:
-   1. Ensure the vbmeta image hash matches that in the image.
-   2. Ensure the root hash of the Merkle tree matches that in the image.
-   3. Verify the signature using the transparency log public key.
-*/
-static AftlSlotVerifyResult avb_aftl_verify_image(uint8_t* cur_vbmeta_data,
-                                                  size_t cur_vbmeta_size,
-                                                  uint8_t* aftl_blob,
-                                                  size_t aftl_size,
-                                                  uint8_t* key_bytes,
-                                                  size_t key_num_bytes) {
-  size_t i;
-  AftlImage* image;
-  AftlSlotVerifyResult result = AFTL_SLOT_VERIFY_RESULT_ERROR_VERIFICATION;
-
-  /* Attempt to parse the AftlImage pointed to by aftl_blob. */
-  image = parse_aftl_image(aftl_blob, aftl_size);
-  if (!image) {
-    return AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_IMAGE;
-  }
-
-  /* Now that a valid AftlImage has been parsed, attempt to verify
-     the inclusion proof(s) in three steps. */
-  for (i = 0; i < image->header.icp_count; i++) {
-    /* 1. Ensure that the vbmeta hash stored in the AftlIcpEntry matches
-       the one that represents the partition. */
-    if (!avb_aftl_verify_vbmeta_hash(
-            cur_vbmeta_data, cur_vbmeta_size, image->entries[i])) {
-      avb_error("AFTL vbmeta hash verification failed.\n");
-      result = AFTL_SLOT_VERIFY_RESULT_ERROR_VBMETA_HASH_MISMATCH;
-      break;
-    }
-    /* 2. Ensure that the root hash of the Merkle tree representing
-       the transparency log entry matches the one stored in the
-       AftlIcpEntry. */
-    if (!avb_aftl_verify_icp_root_hash(image->entries[i])) {
-      avb_error("AFTL root hash verification failed.\n");
-      result = AFTL_SLOT_VERIFY_RESULT_ERROR_TREE_HASH_MISMATCH;
-      break;
-    }
-    /* 3. Verify the signature using the transparency log public
-       key stored on device. */
-    if (!avb_aftl_verify_entry_signature(
-            key_bytes, key_num_bytes, image->entries[i])) {
-      avb_error("AFTL signature verification failed on entry.\n");
-      result = AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_PROOF_SIGNATURE;
-      break;
-    }
-    result = AFTL_SLOT_VERIFY_RESULT_OK;
-  }
-  free_aftl_image(image);
-  return result;
-}
-
-AftlSlotVerifyResult aftl_slot_verify(AvbOps* ops,
-                                      AvbSlotVerifyData* slot_verify_data,
-                                      uint8_t* key_bytes,
-                                      size_t key_size) {
-  size_t i;
-  size_t aftl_image_size;
-  size_t vbmeta_size;
-  uint8_t* current_aftl_blob;
-  char part_name[AVB_PART_NAME_MAX_SIZE];
-  char* pname;
-  AftlSlotVerifyResult ret = AFTL_SLOT_VERIFY_RESULT_ERROR_VERIFICATION;
-
-  avb_assert(slot_verify_data != NULL);
-  avb_assert(key_bytes != NULL);
-  avb_assert(key_size == AVB_AFTL_PUB_KEY_SIZE);
-  if (slot_verify_data->vbmeta_images == NULL) {
-    return AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_ARGUMENT;
-  }
-
-  current_aftl_blob = avb_malloc(AVB_AFTL_MAX_AFTL_IMAGE_SIZE);
-  if (current_aftl_blob == NULL) {
-    return AFTL_SLOT_VERIFY_RESULT_ERROR_OOM;
-  }
-
-  /* Walk through each vbmeta blob in the AvbSlotVerifyData struct. */
-  for (i = 0; i < slot_verify_data->num_vbmeta_images; i++) {
-    /* Rebuild partition name, appending the suffix */
-    pname = slot_verify_data->vbmeta_images[i].partition_name;
-    if (!avb_str_concat(part_name,
-                        sizeof part_name,
-                        (const char*)pname,
-                        avb_strlen(pname),
-                        slot_verify_data->ab_suffix,
-                        avb_strlen(slot_verify_data->ab_suffix))) {
-      avb_error("Partition name and suffix does not fit.\n");
-      ret = AFTL_SLOT_VERIFY_RESULT_ERROR_VERIFICATION;
-      break;
-    }
-
-    /* Use the partition info to find the AftlImage */
-    vbmeta_size = slot_verify_data->vbmeta_images[i].vbmeta_size;
-    ret = avb_aftl_find_aftl_image(
-        ops, part_name, vbmeta_size, current_aftl_blob, &aftl_image_size);
-    if (ret != AFTL_SLOT_VERIFY_RESULT_OK) {
-      avb_errorv(part_name, ": Unable to find the AftlImage.\n", NULL);
-      break;
-    }
-
-    /* Validate the AFTL image in the vbmeta image. */
-    ret = avb_aftl_verify_image(slot_verify_data->vbmeta_images[i].vbmeta_data,
-                                vbmeta_size,
-                                current_aftl_blob,
-                                aftl_image_size,
-                                key_bytes,
-                                key_size);
-    if (ret != AVB_SLOT_VERIFY_RESULT_OK) break;
-  }
-
-  avb_free(current_aftl_blob);
-  return ret;
-}
diff --git a/libavb_aftl/avb_aftl_verify.h b/libavb_aftl/avb_aftl_verify.h
deleted file mode 100644
index 7c6799f..0000000
--- a/libavb_aftl/avb_aftl_verify.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#if !defined(AVB_INSIDE_LIBAVB_AFTL_H) && !defined(AVB_COMPILATION)
-#error "Never include this file directly, include libavb_aftl.h instead."
-#endif
-
-#ifndef AVB_AFTL_VERIFY_H_
-#define AVB_AFTL_VERIFY_H_
-
-#include <libavb/libavb.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-typedef enum {
-  // When the verification succeeded.
-  AFTL_SLOT_VERIFY_RESULT_OK,
-
-  // If at some point during the verification, a memory allocation failed. This
-  // could be the case when handling a large number of log keys or inclusion
-  // proofs.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_OOM,
-
-  // If at some point during the verification, we were not able to access some
-  // devices. This can be the case when reading the AftlImage from the
-  // partition.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_IO,
-
-  // The VBMeta hash in the inclusion proof is not matching the VBMeta image
-  // hash.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_VBMETA_HASH_MISMATCH,
-
-  // The root hash of the reconstructed tree do not match the value contained in
-  // the inclusion proof.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_TREE_HASH_MISMATCH,
-
-  // The inclusion proof signature cannot be verified by the given key.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_PROOF_SIGNATURE,
-
-  // A generic error occurred during the verification.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_VERIFICATION,
-
-  // At least one of the VBMetas did not have an AftlImage attached.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_IMAGE_NOT_FOUND,
-
-  // Some content of one of the AFTLImages was found corrupted.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_IMAGE,
-
-  // Returned if the caller passed invalid parameters, for example if the prior
-  // call to avb_slot_verify failed.
-  AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_ARGUMENT
-
-} AftlSlotVerifyResult;
-
-/* The entry point of AFTL validation. It uses the AvbSlotVerifyData structure,
- * |slot_verify_data|, generated by a prior call to the avb_slot_verify
- * function, and a transparency log key to validate the inclusion proof(s)
- * attached to each VBMeta images.
- *
- * The caller is responsible for ensuring that the previous call to
- * avb_slot_verify succeeded. If |slot_verify_data| is incomplete or NULL,
- * AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_ARGUMENT will be returned.
- *
- * The AftlImage structure is located after the VBMetaImage structure. Uses
- * |ops| to read the partition where the VBMeta was loaded from.
- *
- * For each inclusion proof found, the following three validation steps are
- * performed:
- *   1. Match the VBMeta image hash with the hash in the tree leaf.
- *   2. Match the root hash of the Merkle tree with the hash in the proof.
- *   3. Verify the signature of the proof using the transparency log public key.
- * See the definition of AftlSlotVerifyResult for all the possible return
- * values.
- */
-
-AftlSlotVerifyResult aftl_slot_verify(AvbOps* ops,
-                                      AvbSlotVerifyData* slot_verify_data,
-                                      uint8_t* key_bytes,
-                                      size_t key_size);
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* AVB_AFTL_VERIFY_H_ */
diff --git a/libavb_aftl/avb_ops_aftl.h b/libavb_aftl/avb_ops_aftl.h
deleted file mode 100644
index a2f5577..0000000
--- a/libavb_aftl/avb_ops_aftl.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#if !defined(AVB_INSIDE_LIBAVB_AFTL_H) && !defined(AVB_COMPILATION)
-#error "Never include this file directly, include libavb_aftl/libavb_aftl.h."
-#endif
-
-#ifndef AVB_AFTL_OPS_H_
-#define AVB_AFTL_OPS_H_
-
-#include <libavb/libavb.h>
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-struct AvbAftlOps;
-typedef struct AvbAftlOps AvbAftlOps;
-
-/* An extension to AvbOps required by the new AFTL validation flow.
-   TODO(danielaustin): update the AFTL readme link once it is submitted.  */
-struct AvbAftlOps {
-  /* Operations from libavb. */
-  AvbOps* ops;
-};
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* AVB_AFTL_OPS_H_ */
diff --git a/libavb_aftl/libavb_aftl.h b/libavb_aftl/libavb_aftl.h
deleted file mode 100644
index dbf0146..0000000
--- a/libavb_aftl/libavb_aftl.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#ifndef LIBAVB_AFTL_H_
-#define LIBAVB_AFTL_H_
-
-
-/* The AVB_INSIDE_LIBAVB_AFTL_H preprocessor symbol is used to enforce
- * library users to include only this file. All public interfaces, and
- * only public interfaces, must be included here.
- */
-
-#define AVB_INSIDE_LIBAVB_AFTL_H
-#include "avb_aftl_verify.h"
-#undef AVB_INSIDE_LIBAVB_AFTL_H
-
-#endif /* LIBAVB_AFTL_H_ */
diff --git a/proto/README.md b/proto/README.md
deleted file mode 100644
index ee64b01..0000000
--- a/proto/README.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# Android Firmware Transparency Log Proto Definitions
----
-
-This directory contains the proto definitions required to communicate with an
-AFTL server. The original repos and purpose for each proto file are as
-follows:
-
-* api.proto
-   Contains the messages to communicate with the AFTL personality.
-* crypto/keyspb/keyspb.proto
-   From https://github.com/google/trillian
-   Dependency of trillian.proto
-   Contains the PublicKey message definition used by Tree.
-* crypto/sigpb/sigpb.proto
-   From https://github.com/google/trillian
-   Dependency of trillian.proto
-   For trillian.proto, contains the DigitallySigned message used by Tree and
-   SignedEntryTimestamp.
-* trillian.proto
-   From https://github.com/google/trillian
-   Dependency of aftl.proto
-   For aftl.proto, contains message definitions for SignedLogRoot.
diff --git a/proto/__init__.py b/proto/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/__init__.py
+++ /dev/null
diff --git a/proto/api.proto b/proto/api.proto
deleted file mode 100644
index e22ae47..0000000
--- a/proto/api.proto
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright 2019-2020 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-package aftl;
-option go_package = "proto";
-
-import "trillian.proto";
-
-message InclusionProof {
-  trillian.Proof proof = 1;
-  trillian.SignedLogRoot sth = 2;
-}
-
-message AddVBMetaRequest {
-  // VBMeta structure as described in
-  // https://android.googlesource.com/platform/external/avb/+/master/README.md.
-  // In case of chained partitions, each VBMeta is added via a separate call.
-  // The default size for gRPC payload is about 4MB. We expect vbmeta to be
-  // in the order of 64kB.
-  bytes vbmeta = 1;
-
-  // Serialized SignedVBMetaPrimaryAnnotation. This annotation contains the hash
-  // of the vbmeta structure. It is signed using the manufacturer key.
-  // See types/types.go.
-  bytes signed_vbmeta_primary_annotation = 2;
-}
-
-message AddVBMetaResponse {
-  // Inclusion proof and the leaf that was added to the log, which contains
-  // the annotation on VBMeta.
-  // It is required to have the complete leaf to validate the inclusion proof.
-  // For on-device verification, only these first 2 fields are required to
-  // validate the inclusion.
-  InclusionProof annotation_proof = 1;
-  bytes          annotation_leaf = 2;
-
-  // Inclusion proof and leaf that was added to the log, which contains the full
-  // vbmeta partition.
-  // These fields are NOT required for validation but can still be recorded by a
-  // vendor to prove that the complete VBMeta was submitted.
-  InclusionProof vbmeta_proof = 3;
-  bytes          vbmeta_leaf = 4;
-}
-
-message AnnotateVBMetaWithBuildRequest {
-  // Serialized SignedVBMetaBuildAnnotation.  This annotation contains the hash
-  // of the full build image. See types/types.go.
-  bytes signed_vbmeta_build_annotation = 1;
-
-  // Bytes of the binary images. The hash value of the concatenation of these
-  // chunk is contained in SignedVBMetaBuildAnnotation.
-  // This is ignored if any of the requests origin_url is set.
-  bytes image_chunk = 2;
-
-  // Origin location of image. It is used to get a copy of the binary image
-  // from another server (e.g., Google Cloud Storage).
-  string origin_url = 3;
-}
-
-message AnnotateVBMetaWithBuildResponse {
-  // Inclusion proof and leaf for the firmware image. The leaf contains the URL
-  // where the image was stored.
-  // It is not required for vendors to keep this information. However, this can
-  // be used for their records to ensure the correctness of the log.
-  InclusionProof  annotation_proof = 1;
-  bytes           annotation_leaf = 2;
-}
-
-service AFTLog {
-
-  // Insert a new VBMeta structure into the log.
-  // This request will effectively create 2 log entries:
-  //  - VBMeta itself
-  //  - Vendor annotations, which includes a reference to the VBMeta.
-  rpc AddVBMeta(AddVBMetaRequest) returns (AddVBMetaResponse) {}
-
-  // Upload (or copy) the complete firmware image.
-  rpc AnnotateVBMetaWithBuild(stream AnnotateVBMetaWithBuildResponse) returns (AnnotateVBMetaWithBuildResponse) {}
-
-  // TODO(tweek): GetProofByHash, GetSthConsistency, GetEntries, GetRootKeys
-}
diff --git a/proto/api_pb2.py b/proto/api_pb2.py
deleted file mode 100644
index fbe3bab..0000000
--- a/proto/api_pb2.py
+++ /dev/null
@@ -1,323 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: api.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-import trillian_pb2 as trillian__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='api.proto',
-  package='aftl',
-  syntax='proto3',
-  serialized_options=_b('Z\005proto'),
-  serialized_pb=_b('\n\tapi.proto\x12\x04\x61\x66tl\x1a\x0etrillian.proto\"V\n\x0eInclusionProof\x12\x1e\n\x05proof\x18\x01 \x01(\x0b\x32\x0f.trillian.Proof\x12$\n\x03sth\x18\x02 \x01(\x0b\x32\x17.trillian.SignedLogRoot\"L\n\x10\x41\x64\x64VBMetaRequest\x12\x0e\n\x06vbmeta\x18\x01 \x01(\x0c\x12(\n signed_vbmeta_primary_annotation\x18\x02 \x01(\x0c\"\x9d\x01\n\x11\x41\x64\x64VBMetaResponse\x12.\n\x10\x61nnotation_proof\x18\x01 \x01(\x0b\x32\x14.aftl.InclusionProof\x12\x17\n\x0f\x61nnotation_leaf\x18\x02 \x01(\x0c\x12*\n\x0cvbmeta_proof\x18\x03 \x01(\x0b\x32\x14.aftl.InclusionProof\x12\x13\n\x0bvbmeta_leaf\x18\x04 \x01(\x0c\"q\n\x1e\x41nnotateVBMetaWithBuildRequest\x12&\n\x1esigned_vbmeta_build_annotation\x18\x01 \x01(\x0c\x12\x13\n\x0bimage_chunk\x18\x02 \x01(\x0c\x12\x12\n\norigin_url\x18\x03 \x01(\t\"j\n\x1f\x41nnotateVBMetaWithBuildResponse\x12.\n\x10\x61nnotation_proof\x18\x01 \x01(\x0b\x32\x14.aftl.InclusionProof\x12\x17\n\x0f\x61nnotation_leaf\x18\x02 \x01(\x0c\x32\xb5\x01\n\x06\x41\x46TLog\x12>\n\tAddVBMeta\x12\x16.aftl.AddVBMetaRequest\x1a\x17.aftl.AddVBMetaResponse\"\x00\x12k\n\x17\x41nnotateVBMetaWithBuild\x12%.aftl.AnnotateVBMetaWithBuildResponse\x1a%.aftl.AnnotateVBMetaWithBuildResponse\"\x00(\x01\x42\x07Z\x05protob\x06proto3')
-  ,
-  dependencies=[trillian__pb2.DESCRIPTOR,])
-
-
-
-
-_INCLUSIONPROOF = _descriptor.Descriptor(
-  name='InclusionProof',
-  full_name='aftl.InclusionProof',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='proof', full_name='aftl.InclusionProof.proof', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='sth', full_name='aftl.InclusionProof.sth', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=35,
-  serialized_end=121,
-)
-
-
-_ADDVBMETAREQUEST = _descriptor.Descriptor(
-  name='AddVBMetaRequest',
-  full_name='aftl.AddVBMetaRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='vbmeta', full_name='aftl.AddVBMetaRequest.vbmeta', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signed_vbmeta_primary_annotation', full_name='aftl.AddVBMetaRequest.signed_vbmeta_primary_annotation', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=123,
-  serialized_end=199,
-)
-
-
-_ADDVBMETARESPONSE = _descriptor.Descriptor(
-  name='AddVBMetaResponse',
-  full_name='aftl.AddVBMetaResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='annotation_proof', full_name='aftl.AddVBMetaResponse.annotation_proof', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='annotation_leaf', full_name='aftl.AddVBMetaResponse.annotation_leaf', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='vbmeta_proof', full_name='aftl.AddVBMetaResponse.vbmeta_proof', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='vbmeta_leaf', full_name='aftl.AddVBMetaResponse.vbmeta_leaf', index=3,
-      number=4, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=202,
-  serialized_end=359,
-)
-
-
-_ANNOTATEVBMETAWITHBUILDREQUEST = _descriptor.Descriptor(
-  name='AnnotateVBMetaWithBuildRequest',
-  full_name='aftl.AnnotateVBMetaWithBuildRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='signed_vbmeta_build_annotation', full_name='aftl.AnnotateVBMetaWithBuildRequest.signed_vbmeta_build_annotation', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='image_chunk', full_name='aftl.AnnotateVBMetaWithBuildRequest.image_chunk', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='origin_url', full_name='aftl.AnnotateVBMetaWithBuildRequest.origin_url', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=361,
-  serialized_end=474,
-)
-
-
-_ANNOTATEVBMETAWITHBUILDRESPONSE = _descriptor.Descriptor(
-  name='AnnotateVBMetaWithBuildResponse',
-  full_name='aftl.AnnotateVBMetaWithBuildResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='annotation_proof', full_name='aftl.AnnotateVBMetaWithBuildResponse.annotation_proof', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='annotation_leaf', full_name='aftl.AnnotateVBMetaWithBuildResponse.annotation_leaf', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=476,
-  serialized_end=582,
-)
-
-_INCLUSIONPROOF.fields_by_name['proof'].message_type = trillian__pb2._PROOF
-_INCLUSIONPROOF.fields_by_name['sth'].message_type = trillian__pb2._SIGNEDLOGROOT
-_ADDVBMETARESPONSE.fields_by_name['annotation_proof'].message_type = _INCLUSIONPROOF
-_ADDVBMETARESPONSE.fields_by_name['vbmeta_proof'].message_type = _INCLUSIONPROOF
-_ANNOTATEVBMETAWITHBUILDRESPONSE.fields_by_name['annotation_proof'].message_type = _INCLUSIONPROOF
-DESCRIPTOR.message_types_by_name['InclusionProof'] = _INCLUSIONPROOF
-DESCRIPTOR.message_types_by_name['AddVBMetaRequest'] = _ADDVBMETAREQUEST
-DESCRIPTOR.message_types_by_name['AddVBMetaResponse'] = _ADDVBMETARESPONSE
-DESCRIPTOR.message_types_by_name['AnnotateVBMetaWithBuildRequest'] = _ANNOTATEVBMETAWITHBUILDREQUEST
-DESCRIPTOR.message_types_by_name['AnnotateVBMetaWithBuildResponse'] = _ANNOTATEVBMETAWITHBUILDRESPONSE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-InclusionProof = _reflection.GeneratedProtocolMessageType('InclusionProof', (_message.Message,), {
-  'DESCRIPTOR' : _INCLUSIONPROOF,
-  '__module__' : 'api_pb2'
-  # @@protoc_insertion_point(class_scope:aftl.InclusionProof)
-  })
-_sym_db.RegisterMessage(InclusionProof)
-
-AddVBMetaRequest = _reflection.GeneratedProtocolMessageType('AddVBMetaRequest', (_message.Message,), {
-  'DESCRIPTOR' : _ADDVBMETAREQUEST,
-  '__module__' : 'api_pb2'
-  # @@protoc_insertion_point(class_scope:aftl.AddVBMetaRequest)
-  })
-_sym_db.RegisterMessage(AddVBMetaRequest)
-
-AddVBMetaResponse = _reflection.GeneratedProtocolMessageType('AddVBMetaResponse', (_message.Message,), {
-  'DESCRIPTOR' : _ADDVBMETARESPONSE,
-  '__module__' : 'api_pb2'
-  # @@protoc_insertion_point(class_scope:aftl.AddVBMetaResponse)
-  })
-_sym_db.RegisterMessage(AddVBMetaResponse)
-
-AnnotateVBMetaWithBuildRequest = _reflection.GeneratedProtocolMessageType('AnnotateVBMetaWithBuildRequest', (_message.Message,), {
-  'DESCRIPTOR' : _ANNOTATEVBMETAWITHBUILDREQUEST,
-  '__module__' : 'api_pb2'
-  # @@protoc_insertion_point(class_scope:aftl.AnnotateVBMetaWithBuildRequest)
-  })
-_sym_db.RegisterMessage(AnnotateVBMetaWithBuildRequest)
-
-AnnotateVBMetaWithBuildResponse = _reflection.GeneratedProtocolMessageType('AnnotateVBMetaWithBuildResponse', (_message.Message,), {
-  'DESCRIPTOR' : _ANNOTATEVBMETAWITHBUILDRESPONSE,
-  '__module__' : 'api_pb2'
-  # @@protoc_insertion_point(class_scope:aftl.AnnotateVBMetaWithBuildResponse)
-  })
-_sym_db.RegisterMessage(AnnotateVBMetaWithBuildResponse)
-
-
-DESCRIPTOR._options = None
-
-_AFTLOG = _descriptor.ServiceDescriptor(
-  name='AFTLog',
-  full_name='aftl.AFTLog',
-  file=DESCRIPTOR,
-  index=0,
-  serialized_options=None,
-  serialized_start=585,
-  serialized_end=766,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='AddVBMeta',
-    full_name='aftl.AFTLog.AddVBMeta',
-    index=0,
-    containing_service=None,
-    input_type=_ADDVBMETAREQUEST,
-    output_type=_ADDVBMETARESPONSE,
-    serialized_options=None,
-  ),
-  _descriptor.MethodDescriptor(
-    name='AnnotateVBMetaWithBuild',
-    full_name='aftl.AFTLog.AnnotateVBMetaWithBuild',
-    index=1,
-    containing_service=None,
-    input_type=_ANNOTATEVBMETAWITHBUILDRESPONSE,
-    output_type=_ANNOTATEVBMETAWITHBUILDRESPONSE,
-    serialized_options=None,
-  ),
-])
-_sym_db.RegisterServiceDescriptor(_AFTLOG)
-
-DESCRIPTOR.services_by_name['AFTLog'] = _AFTLOG
-
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/api_pb2_grpc.py b/proto/api_pb2_grpc.py
deleted file mode 100644
index d487856..0000000
--- a/proto/api_pb2_grpc.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-import api_pb2 as api__pb2
-
-
-class AFTLogStub(object):
-  # missing associated documentation comment in .proto file
-  pass
-
-  def __init__(self, channel):
-    """Constructor.
-
-    Args:
-      channel: A grpc.Channel.
-    """
-    self.AddVBMeta = channel.unary_unary(
-        '/aftl.AFTLog/AddVBMeta',
-        request_serializer=api__pb2.AddVBMetaRequest.SerializeToString,
-        response_deserializer=api__pb2.AddVBMetaResponse.FromString,
-        )
-    self.AnnotateVBMetaWithBuild = channel.stream_unary(
-        '/aftl.AFTLog/AnnotateVBMetaWithBuild',
-        request_serializer=api__pb2.AnnotateVBMetaWithBuildResponse.SerializeToString,
-        response_deserializer=api__pb2.AnnotateVBMetaWithBuildResponse.FromString,
-        )
-
-
-class AFTLogServicer(object):
-  # missing associated documentation comment in .proto file
-  pass
-
-  def AddVBMeta(self, request, context):
-    """Insert a new VBMeta structure into the log.
-    This request will effectively create 2 log entries:
-    - VBMeta itself
-    - Vendor annotations, which includes a reference to the VBMeta.
-    """
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-  def AnnotateVBMetaWithBuild(self, request_iterator, context):
-    """Upload (or copy) the complete firmware image.
-    """
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-
-def add_AFTLogServicer_to_server(servicer, server):
-  rpc_method_handlers = {
-      'AddVBMeta': grpc.unary_unary_rpc_method_handler(
-          servicer.AddVBMeta,
-          request_deserializer=api__pb2.AddVBMetaRequest.FromString,
-          response_serializer=api__pb2.AddVBMetaResponse.SerializeToString,
-      ),
-      'AnnotateVBMetaWithBuild': grpc.stream_unary_rpc_method_handler(
-          servicer.AnnotateVBMetaWithBuild,
-          request_deserializer=api__pb2.AnnotateVBMetaWithBuildResponse.FromString,
-          response_serializer=api__pb2.AnnotateVBMetaWithBuildResponse.SerializeToString,
-      ),
-  }
-  generic_handler = grpc.method_handlers_generic_handler(
-      'aftl.AFTLog', rpc_method_handlers)
-  server.add_generic_rpc_handlers((generic_handler,))
diff --git a/proto/crypto/__init__.py b/proto/crypto/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/crypto/__init__.py
+++ /dev/null
diff --git a/proto/crypto/keyspb/__init__.py b/proto/crypto/keyspb/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/crypto/keyspb/__init__.py
+++ /dev/null
diff --git a/proto/crypto/keyspb/keyspb.proto b/proto/crypto/keyspb/keyspb.proto
deleted file mode 100644
index 03a8313..0000000
--- a/proto/crypto/keyspb/keyspb.proto
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright 2017 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-option go_package = "github.com/google/trillian/crypto/keyspb";
-
-package keyspb;
-
-// Specification for a private key.
-message Specification {
-  /// ECDSA defines parameters for an ECDSA key.
-  message ECDSA {
-    // The supported elliptic curves.
-    enum Curve {
-      DEFAULT_CURVE = 0;  // Curve will be chosen by Trillian.
-      P256 = 1;
-      P384 = 2;
-      P521 = 3;
-    }
-
-    // The elliptic curve to use.
-    // Optional. If not set, the default curve will be used.
-    Curve curve = 1;
-  }
-
-  // RSA defines parameters for an RSA key.
-  message RSA {
-    // Size of the keys in bits. Must be sufficiently large to allow two primes
-    // to be generated.
-    // Optional. If not set, the key size will be chosen by Trillian.
-    int32 bits = 1;
-  }
-
-  // Ed25519 defines (empty) parameters for an Ed25519 private key.
-  message Ed25519 {
-  }
-
-  // The type of parameters provided determines the algorithm used for the key.
-  oneof params {
-    // The parameters for an ECDSA key.
-    ECDSA ecdsa_params = 1;
-
-    // The parameters for an RSA key.
-    RSA rsa_params = 2;
-
-    // The parameters for an Ed25519 key.
-    Ed25519 ed25519_params = 3;
-  }
-}
-
-// PEMKeyFile identifies a private key stored in a PEM-encoded file.
-message PEMKeyFile {
-  // File path of the private key.
-  string path = 1;
-
-  // Password for decrypting the private key.
-  // If empty, indicates that the private key is not encrypted.
-  string password = 2;
-}
-
-// PrivateKey is a private key, used for generating signatures.
-message PrivateKey {
-  // The key in DER-encoded form.
-  // The specific format (e.g. PKCS8) is not specified.
-  bytes der = 1;
-}
-
-// PublicKey is a public key, used for verifying signatures.
-message PublicKey {
-  // The key in DER-encoded PKIX form.
-  bytes der = 1;
-}
-
-// PKCS11Config identifies a private key accessed using PKCS #11.
-message PKCS11Config {
-  // The label of the PKCS#11 token.
-  string token_label = 1;
-  // The PIN for the specific token.
-  string pin = 2;
-  // The PEM public key assosciated with the private key to be used.
-  string public_key = 3;
-}
diff --git a/proto/crypto/keyspb/keyspb_pb2.py b/proto/crypto/keyspb/keyspb_pb2.py
deleted file mode 100644
index eba4099..0000000
--- a/proto/crypto/keyspb/keyspb_pb2.py
+++ /dev/null
@@ -1,421 +0,0 @@
-# pylint: skip-file
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: crypto/keyspb/keyspb.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='crypto/keyspb/keyspb.proto',
-  package='keyspb',
-  syntax='proto3',
-  serialized_options=_b('Z(github.com/google/trillian/crypto/keyspb'),
-  serialized_pb=_b('\n\x1a\x63rypto/keyspb/keyspb.proto\x12\x06keyspb\"\xcd\x02\n\rSpecification\x12\x33\n\x0c\x65\x63\x64sa_params\x18\x01 \x01(\x0b\x32\x1b.keyspb.Specification.ECDSAH\x00\x12/\n\nrsa_params\x18\x02 \x01(\x0b\x32\x19.keyspb.Specification.RSAH\x00\x12\x37\n\x0e\x65\x64\x32\x35\x35\x31\x39_params\x18\x03 \x01(\x0b\x32\x1d.keyspb.Specification.Ed25519H\x00\x1as\n\x05\x45\x43\x44SA\x12\x30\n\x05\x63urve\x18\x01 \x01(\x0e\x32!.keyspb.Specification.ECDSA.Curve\"8\n\x05\x43urve\x12\x11\n\rDEFAULT_CURVE\x10\x00\x12\x08\n\x04P256\x10\x01\x12\x08\n\x04P384\x10\x02\x12\x08\n\x04P521\x10\x03\x1a\x13\n\x03RSA\x12\x0c\n\x04\x62its\x18\x01 \x01(\x05\x1a\t\n\x07\x45\x64\x32\x35\x35\x31\x39\x42\x08\n\x06params\",\n\nPEMKeyFile\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\"\x19\n\nPrivateKey\x12\x0b\n\x03\x64\x65r\x18\x01 \x01(\x0c\"\x18\n\tPublicKey\x12\x0b\n\x03\x64\x65r\x18\x01 \x01(\x0c\"D\n\x0cPKCS11Config\x12\x13\n\x0btoken_label\x18\x01 \x01(\t\x12\x0b\n\x03pin\x18\x02 \x01(\t\x12\x12\n\npublic_key\x18\x03 \x01(\tB*Z(github.com/google/trillian/crypto/keyspbb\x06proto3')
-)
-
-
-
-_SPECIFICATION_ECDSA_CURVE = _descriptor.EnumDescriptor(
-  name='Curve',
-  full_name='keyspb.Specification.ECDSA.Curve',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='DEFAULT_CURVE', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='P256', index=1, number=1,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='P384', index=2, number=2,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='P521', index=3, number=3,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=274,
-  serialized_end=330,
-)
-_sym_db.RegisterEnumDescriptor(_SPECIFICATION_ECDSA_CURVE)
-
-
-_SPECIFICATION_ECDSA = _descriptor.Descriptor(
-  name='ECDSA',
-  full_name='keyspb.Specification.ECDSA',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='curve', full_name='keyspb.Specification.ECDSA.curve', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _SPECIFICATION_ECDSA_CURVE,
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=215,
-  serialized_end=330,
-)
-
-_SPECIFICATION_RSA = _descriptor.Descriptor(
-  name='RSA',
-  full_name='keyspb.Specification.RSA',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bits', full_name='keyspb.Specification.RSA.bits', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=332,
-  serialized_end=351,
-)
-
-_SPECIFICATION_ED25519 = _descriptor.Descriptor(
-  name='Ed25519',
-  full_name='keyspb.Specification.Ed25519',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=353,
-  serialized_end=362,
-)
-
-_SPECIFICATION = _descriptor.Descriptor(
-  name='Specification',
-  full_name='keyspb.Specification',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='ecdsa_params', full_name='keyspb.Specification.ecdsa_params', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='rsa_params', full_name='keyspb.Specification.rsa_params', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='ed25519_params', full_name='keyspb.Specification.ed25519_params', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[_SPECIFICATION_ECDSA, _SPECIFICATION_RSA, _SPECIFICATION_ED25519, ],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='params', full_name='keyspb.Specification.params',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=39,
-  serialized_end=372,
-)
-
-
-_PEMKEYFILE = _descriptor.Descriptor(
-  name='PEMKeyFile',
-  full_name='keyspb.PEMKeyFile',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='path', full_name='keyspb.PEMKeyFile.path', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='password', full_name='keyspb.PEMKeyFile.password', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=374,
-  serialized_end=418,
-)
-
-
-_PRIVATEKEY = _descriptor.Descriptor(
-  name='PrivateKey',
-  full_name='keyspb.PrivateKey',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='der', full_name='keyspb.PrivateKey.der', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=420,
-  serialized_end=445,
-)
-
-
-_PUBLICKEY = _descriptor.Descriptor(
-  name='PublicKey',
-  full_name='keyspb.PublicKey',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='der', full_name='keyspb.PublicKey.der', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=447,
-  serialized_end=471,
-)
-
-
-_PKCS11CONFIG = _descriptor.Descriptor(
-  name='PKCS11Config',
-  full_name='keyspb.PKCS11Config',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='token_label', full_name='keyspb.PKCS11Config.token_label', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='pin', full_name='keyspb.PKCS11Config.pin', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='public_key', full_name='keyspb.PKCS11Config.public_key', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=473,
-  serialized_end=541,
-)
-
-_SPECIFICATION_ECDSA.fields_by_name['curve'].enum_type = _SPECIFICATION_ECDSA_CURVE
-_SPECIFICATION_ECDSA.containing_type = _SPECIFICATION
-_SPECIFICATION_ECDSA_CURVE.containing_type = _SPECIFICATION_ECDSA
-_SPECIFICATION_RSA.containing_type = _SPECIFICATION
-_SPECIFICATION_ED25519.containing_type = _SPECIFICATION
-_SPECIFICATION.fields_by_name['ecdsa_params'].message_type = _SPECIFICATION_ECDSA
-_SPECIFICATION.fields_by_name['rsa_params'].message_type = _SPECIFICATION_RSA
-_SPECIFICATION.fields_by_name['ed25519_params'].message_type = _SPECIFICATION_ED25519
-_SPECIFICATION.oneofs_by_name['params'].fields.append(
-  _SPECIFICATION.fields_by_name['ecdsa_params'])
-_SPECIFICATION.fields_by_name['ecdsa_params'].containing_oneof = _SPECIFICATION.oneofs_by_name['params']
-_SPECIFICATION.oneofs_by_name['params'].fields.append(
-  _SPECIFICATION.fields_by_name['rsa_params'])
-_SPECIFICATION.fields_by_name['rsa_params'].containing_oneof = _SPECIFICATION.oneofs_by_name['params']
-_SPECIFICATION.oneofs_by_name['params'].fields.append(
-  _SPECIFICATION.fields_by_name['ed25519_params'])
-_SPECIFICATION.fields_by_name['ed25519_params'].containing_oneof = _SPECIFICATION.oneofs_by_name['params']
-DESCRIPTOR.message_types_by_name['Specification'] = _SPECIFICATION
-DESCRIPTOR.message_types_by_name['PEMKeyFile'] = _PEMKEYFILE
-DESCRIPTOR.message_types_by_name['PrivateKey'] = _PRIVATEKEY
-DESCRIPTOR.message_types_by_name['PublicKey'] = _PUBLICKEY
-DESCRIPTOR.message_types_by_name['PKCS11Config'] = _PKCS11CONFIG
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Specification = _reflection.GeneratedProtocolMessageType('Specification', (_message.Message,), {
-
-  'ECDSA' : _reflection.GeneratedProtocolMessageType('ECDSA', (_message.Message,), {
-    'DESCRIPTOR' : _SPECIFICATION_ECDSA,
-    '__module__' : 'crypto.keyspb.keyspb_pb2'
-    # @@protoc_insertion_point(class_scope:keyspb.Specification.ECDSA)
-    })
-  ,
-
-  'RSA' : _reflection.GeneratedProtocolMessageType('RSA', (_message.Message,), {
-    'DESCRIPTOR' : _SPECIFICATION_RSA,
-    '__module__' : 'crypto.keyspb.keyspb_pb2'
-    # @@protoc_insertion_point(class_scope:keyspb.Specification.RSA)
-    })
-  ,
-
-  'Ed25519' : _reflection.GeneratedProtocolMessageType('Ed25519', (_message.Message,), {
-    'DESCRIPTOR' : _SPECIFICATION_ED25519,
-    '__module__' : 'crypto.keyspb.keyspb_pb2'
-    # @@protoc_insertion_point(class_scope:keyspb.Specification.Ed25519)
-    })
-  ,
-  'DESCRIPTOR' : _SPECIFICATION,
-  '__module__' : 'crypto.keyspb.keyspb_pb2'
-  # @@protoc_insertion_point(class_scope:keyspb.Specification)
-  })
-_sym_db.RegisterMessage(Specification)
-_sym_db.RegisterMessage(Specification.ECDSA)
-_sym_db.RegisterMessage(Specification.RSA)
-_sym_db.RegisterMessage(Specification.Ed25519)
-
-PEMKeyFile = _reflection.GeneratedProtocolMessageType('PEMKeyFile', (_message.Message,), {
-  'DESCRIPTOR' : _PEMKEYFILE,
-  '__module__' : 'crypto.keyspb.keyspb_pb2'
-  # @@protoc_insertion_point(class_scope:keyspb.PEMKeyFile)
-  })
-_sym_db.RegisterMessage(PEMKeyFile)
-
-PrivateKey = _reflection.GeneratedProtocolMessageType('PrivateKey', (_message.Message,), {
-  'DESCRIPTOR' : _PRIVATEKEY,
-  '__module__' : 'crypto.keyspb.keyspb_pb2'
-  # @@protoc_insertion_point(class_scope:keyspb.PrivateKey)
-  })
-_sym_db.RegisterMessage(PrivateKey)
-
-PublicKey = _reflection.GeneratedProtocolMessageType('PublicKey', (_message.Message,), {
-  'DESCRIPTOR' : _PUBLICKEY,
-  '__module__' : 'crypto.keyspb.keyspb_pb2'
-  # @@protoc_insertion_point(class_scope:keyspb.PublicKey)
-  })
-_sym_db.RegisterMessage(PublicKey)
-
-PKCS11Config = _reflection.GeneratedProtocolMessageType('PKCS11Config', (_message.Message,), {
-  'DESCRIPTOR' : _PKCS11CONFIG,
-  '__module__' : 'crypto.keyspb.keyspb_pb2'
-  # @@protoc_insertion_point(class_scope:keyspb.PKCS11Config)
-  })
-_sym_db.RegisterMessage(PKCS11Config)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/crypto/keyspb/keyspb_pb2_grpc.py b/proto/crypto/keyspb/keyspb_pb2_grpc.py
deleted file mode 100644
index 73636b2..0000000
--- a/proto/crypto/keyspb/keyspb_pb2_grpc.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# pylint: skip-file
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
diff --git a/proto/crypto/sigpb/__init__.py b/proto/crypto/sigpb/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/crypto/sigpb/__init__.py
+++ /dev/null
diff --git a/proto/crypto/sigpb/sigpb.proto b/proto/crypto/sigpb/sigpb.proto
deleted file mode 100644
index 3e333d3..0000000
--- a/proto/crypto/sigpb/sigpb.proto
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright 2016 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-option go_package = "github.com/google/trillian/crypto/sigpb";
-
-package sigpb;
-
-// Protocol buffer encoding of the TLS DigitallySigned type, from RFC 5246 §4.7.
-message DigitallySigned {
-  // HashAlgorithm defines the approved methods for object hashing.
-  //
-  // Supported hash algorithms. The numbering space is the same as for TLS,
-  // given in RFC 5246 s7.4.1.4.1 and at:
-  // http://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#tls-parameters-18
-  enum HashAlgorithm {
-    // No hash algorithm is used.
-    NONE = 0;
-    // SHA256 is used.
-    SHA256 = 4;
-  }
-
-  // SignatureAlgorithm defines the algorithm used to sign the object.
-  //
-  // Supported signature algorithms. The numbering space is the same as for TLS,
-  // given in RFC 5246 s7.4.1.4.1 and at:
-  // http://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#tls-parameters-16
-  enum SignatureAlgorithm {
-    // Anonymous signature scheme.
-    ANONYMOUS = 0;
-    // RSA signature scheme.
-    RSA = 1;
-    // ECDSA signature scheme.
-    ECDSA = 3;
-    // Ed25519 signature scheme.
-    ED25519 = 7;
-  }
-
-  // hash_algorithm contains the hash algorithm used.
-  HashAlgorithm hash_algorithm = 1;
-  // sig_algorithm contains the signing algorithm used.
-  SignatureAlgorithm signature_algorithm = 2;
-  // signature contains the object signature.
-  bytes signature = 3;
-}
diff --git a/proto/crypto/sigpb/sigpb_pb2.py b/proto/crypto/sigpb/sigpb_pb2.py
deleted file mode 100644
index b00d42a..0000000
--- a/proto/crypto/sigpb/sigpb_pb2.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# pylint: skip-file
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: crypto/sigpb/sigpb.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='crypto/sigpb/sigpb.proto',
-  package='sigpb',
-  syntax='proto3',
-  serialized_options=_b('Z\'github.com/google/trillian/crypto/sigpb'),
-  serialized_pb=_b('\n\x18\x63rypto/sigpb/sigpb.proto\x12\x05sigpb\"\x97\x02\n\x0f\x44igitallySigned\x12<\n\x0ehash_algorithm\x18\x01 \x01(\x0e\x32$.sigpb.DigitallySigned.HashAlgorithm\x12\x46\n\x13signature_algorithm\x18\x02 \x01(\x0e\x32).sigpb.DigitallySigned.SignatureAlgorithm\x12\x11\n\tsignature\x18\x03 \x01(\x0c\"%\n\rHashAlgorithm\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06SHA256\x10\x04\"D\n\x12SignatureAlgorithm\x12\r\n\tANONYMOUS\x10\x00\x12\x07\n\x03RSA\x10\x01\x12\t\n\x05\x45\x43\x44SA\x10\x03\x12\x0b\n\x07\x45\x44\x32\x35\x35\x31\x39\x10\x07\x42)Z\'github.com/google/trillian/crypto/sigpbb\x06proto3')
-)
-
-
-
-_DIGITALLYSIGNED_HASHALGORITHM = _descriptor.EnumDescriptor(
-  name='HashAlgorithm',
-  full_name='sigpb.DigitallySigned.HashAlgorithm',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NONE', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SHA256', index=1, number=4,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=208,
-  serialized_end=245,
-)
-_sym_db.RegisterEnumDescriptor(_DIGITALLYSIGNED_HASHALGORITHM)
-
-_DIGITALLYSIGNED_SIGNATUREALGORITHM = _descriptor.EnumDescriptor(
-  name='SignatureAlgorithm',
-  full_name='sigpb.DigitallySigned.SignatureAlgorithm',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='ANONYMOUS', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='RSA', index=1, number=1,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ECDSA', index=2, number=3,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ED25519', index=3, number=7,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=247,
-  serialized_end=315,
-)
-_sym_db.RegisterEnumDescriptor(_DIGITALLYSIGNED_SIGNATUREALGORITHM)
-
-
-_DIGITALLYSIGNED = _descriptor.Descriptor(
-  name='DigitallySigned',
-  full_name='sigpb.DigitallySigned',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='hash_algorithm', full_name='sigpb.DigitallySigned.hash_algorithm', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signature_algorithm', full_name='sigpb.DigitallySigned.signature_algorithm', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signature', full_name='sigpb.DigitallySigned.signature', index=2,
-      number=3, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _DIGITALLYSIGNED_HASHALGORITHM,
-    _DIGITALLYSIGNED_SIGNATUREALGORITHM,
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=36,
-  serialized_end=315,
-)
-
-_DIGITALLYSIGNED.fields_by_name['hash_algorithm'].enum_type = _DIGITALLYSIGNED_HASHALGORITHM
-_DIGITALLYSIGNED.fields_by_name['signature_algorithm'].enum_type = _DIGITALLYSIGNED_SIGNATUREALGORITHM
-_DIGITALLYSIGNED_HASHALGORITHM.containing_type = _DIGITALLYSIGNED
-_DIGITALLYSIGNED_SIGNATUREALGORITHM.containing_type = _DIGITALLYSIGNED
-DESCRIPTOR.message_types_by_name['DigitallySigned'] = _DIGITALLYSIGNED
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-DigitallySigned = _reflection.GeneratedProtocolMessageType('DigitallySigned', (_message.Message,), {
-  'DESCRIPTOR' : _DIGITALLYSIGNED,
-  '__module__' : 'crypto.sigpb.sigpb_pb2'
-  # @@protoc_insertion_point(class_scope:sigpb.DigitallySigned)
-  })
-_sym_db.RegisterMessage(DigitallySigned)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/crypto/sigpb/sigpb_pb2_grpc.py b/proto/crypto/sigpb/sigpb_pb2_grpc.py
deleted file mode 100644
index 73636b2..0000000
--- a/proto/crypto/sigpb/sigpb_pb2_grpc.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# pylint: skip-file
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
diff --git a/proto/trillian.proto b/proto/trillian.proto
deleted file mode 100644
index e14522f..0000000
--- a/proto/trillian.proto
+++ /dev/null
@@ -1,316 +0,0 @@
-// Copyright 2016 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-option java_multiple_files = true;
-option java_package = "com.google.trillian.proto";
-option java_outer_classname = "TrillianProto";
-option go_package = "github.com/google/trillian";
-
-package trillian;
-
-import "crypto/keyspb/keyspb.proto";
-import "crypto/sigpb/sigpb.proto";
-import "google/protobuf/any.proto";
-import "google/protobuf/duration.proto";
-import "google/protobuf/timestamp.proto";
-
-// LogRootFormat specifies the fields that are covered by the
-// SignedLogRoot signature, as well as their ordering and formats.
-enum LogRootFormat {
-  LOG_ROOT_FORMAT_UNKNOWN = 0;
-  LOG_ROOT_FORMAT_V1 = 1;
-}
-
-// MapRootFormat specifies the fields that are covered by the
-// SignedMapRoot signature, as well as their ordering and formats.
-enum MapRootFormat {
-  MAP_ROOT_FORMAT_UNKNOWN = 0;
-  MAP_ROOT_FORMAT_V1 = 1;
-}
-
-// What goes in here?
-// Things which are exposed through the public trillian APIs.
-
-// Defines the way empty / node / leaf hashes are constructed incorporating
-// preimage protection, which can be application specific.
-enum HashStrategy {
-  // Hash strategy cannot be determined. Included to enable detection of
-  // mismatched proto versions being used. Represents an invalid value.
-  UNKNOWN_HASH_STRATEGY = 0;
-
-  // Certificate Transparency strategy: leaf hash prefix = 0x00, node prefix =
-  // 0x01, empty hash is digest([]byte{}), as defined in the specification.
-  RFC6962_SHA256 = 1;
-
-  // Sparse Merkle Tree strategy:  leaf hash prefix = 0x00, node prefix = 0x01,
-  // empty branch is recursively computed from empty leaf nodes.
-  // NOT secure in a multi tree environment. For testing only.
-  TEST_MAP_HASHER = 2;
-
-  // Append-only log strategy where leaf nodes are defined as the ObjectHash.
-  // All other properties are equal to RFC6962_SHA256.
-  OBJECT_RFC6962_SHA256 = 3;
-
-  // The CONIKS sparse tree hasher with SHA512_256 as the hash algorithm.
-  CONIKS_SHA512_256 = 4;
-
-  // The CONIKS sparse tree hasher with SHA256 as the hash algorithm.
-  CONIKS_SHA256 = 5;
-}
-
-// State of the tree.
-enum TreeState {
-  // Tree state cannot be determined. Included to enable detection of
-  // mismatched proto versions being used. Represents an invalid value.
-  UNKNOWN_TREE_STATE = 0;
-
-  // Active trees are able to respond to both read and write requests.
-  ACTIVE = 1;
-
-  // Frozen trees are only able to respond to read requests, writing to a frozen
-  // tree is forbidden. Trees should not be frozen when there are entries
-  // in the queue that have not yet been integrated. See the DRAINING
-  // state for this case.
-  FROZEN = 2;
-
-  // Deprecated: now tracked in Tree.deleted.
-  DEPRECATED_SOFT_DELETED = 3 [deprecated = true];
-
-  // Deprecated: now tracked in Tree.deleted.
-  DEPRECATED_HARD_DELETED = 4 [deprecated = true];
-
-  // A tree that is draining will continue to integrate queued entries.
-  // No new entries should be accepted.
-  DRAINING = 5;
-}
-
-// Type of the tree.
-enum TreeType {
-  // Tree type cannot be determined. Included to enable detection of mismatched
-  // proto versions being used. Represents an invalid value.
-  UNKNOWN_TREE_TYPE = 0;
-
-  // Tree represents a verifiable log.
-  LOG = 1;
-
-  // Tree represents a verifiable map.
-  MAP = 2;
-
-  // Tree represents a verifiable pre-ordered log, i.e., a log whose entries are
-  // placed according to sequence numbers assigned outside of Trillian.
-  PREORDERED_LOG = 3;
-}
-
-// Represents a tree, which may be either a verifiable log or map.
-// Readonly attributes are assigned at tree creation, after which they may not
-// be modified.
-//
-// Note: Many APIs within the rest of the code require these objects to
-// be provided. For safety they should be obtained via Admin API calls and
-// not created dynamically.
-message Tree {
-  // ID of the tree.
-  // Readonly.
-  int64 tree_id = 1;
-
-  // State of the tree.
-  // Trees are ACTIVE after creation. At any point the tree may transition
-  // between ACTIVE, DRAINING and FROZEN states.
-  TreeState tree_state = 2;
-
-  // Type of the tree.
-  // Readonly after Tree creation. Exception: Can be switched from
-  // PREORDERED_LOG to LOG if the Tree is and remains in the FROZEN state.
-  TreeType tree_type = 3;
-
-  // Hash strategy to be used by the tree.
-  // Readonly.
-  HashStrategy hash_strategy = 4;
-
-  // Hash algorithm to be used by the tree.
-  // Readonly.
-  sigpb.DigitallySigned.HashAlgorithm hash_algorithm = 5;
-
-  // Signature algorithm to be used by the tree.
-  // Readonly.
-  sigpb.DigitallySigned.SignatureAlgorithm signature_algorithm = 6;
-
-  reserved 18;  // Signature cipher suite (removed)
-  reserved 7;   // DuplicatePolicy (removed)
-
-  // Display name of the tree.
-  // Optional.
-  string display_name = 8;
-
-  // Description of the tree,
-  // Optional.
-  string description = 9;
-
-  reserved 10;  // create_time_millis_since_epoch (removed)
-  reserved 11;  // update_time_millis_since_epoch (removed)
-
-  // Identifies the private key used for signing tree heads and entry
-  // timestamps.
-  // This can be any type of message to accommodate different key management
-  // systems, e.g. PEM files, HSMs, etc.
-  // Private keys are write-only: they're never returned by RPCs.
-  // The private_key message can be changed after a tree is created, but the
-  // underlying key must remain the same - this is to enable migrating a key
-  // from one provider to another.
-  google.protobuf.Any private_key = 12;
-
-  // Storage-specific settings.
-  // Varies according to the storage implementation backing Trillian.
-  google.protobuf.Any storage_settings = 13;
-
-  // The public key used for verifying tree heads and entry timestamps.
-  // Readonly.
-  keyspb.PublicKey public_key = 14;
-
-  // Interval after which a new signed root is produced even if there have been
-  // no submission.  If zero, this behavior is disabled.
-  google.protobuf.Duration max_root_duration = 15;
-
-  // Time of tree creation.
-  // Readonly.
-  google.protobuf.Timestamp create_time = 16;
-
-  // Time of last tree update.
-  // Readonly (automatically assigned on updates).
-  google.protobuf.Timestamp update_time = 17;
-
-  // If true, the tree has been deleted.
-  // Deleted trees may be undeleted during a certain time window, after which
-  // they're permanently deleted (and unrecoverable).
-  // Readonly.
-  bool deleted = 19;
-
-  // Time of tree deletion, if any.
-  // Readonly.
-  google.protobuf.Timestamp delete_time = 20;
-}
-
-message SignedEntryTimestamp {
-  int64 timestamp_nanos = 1;
-  int64 log_id = 2;
-  sigpb.DigitallySigned signature = 3;
-}
-
-// SignedLogRoot represents a commitment by a Log to a particular tree.
-message SignedLogRoot {
-  // Deleted: TimestampNanos moved to LogRoot.
-  reserved 1;
-  // Deleted: RootHash moved to LogRoot.
-  reserved 2;
-  // Deleted: TreeSize moved to LogRoot.
-  reserved 3;
-  // Deleted: Signature replaced by LogRootSignature.
-  reserved 4;
-  // Deleted: LogID is associated with the public key that validates signature.
-  reserved 5;
-  // Deleted: TreeRevision moved to LogRoot.
-  reserved 6;
-
-  // key_hint is a hint to identify the public key for signature verification.
-  // key_hint is not authenticated and may be incorrect or missing, in which
-  // case all known public keys may be used to verify the signature.
-  // When directly communicating with a Trillian gRPC server, the key_hint will
-  // typically contain the LogID encoded as a big-endian 64-bit integer;
-  // however, in other contexts the key_hint is likely to have different
-  // contents (e.g. it could be a GUID, a URL + TreeID, or it could be
-  // derived from the public key itself).
-  bytes key_hint = 7;
-
-  // log_root holds the TLS-serialization of the following structure (described
-  // in RFC5246 notation): Clients should validate log_root_signature with
-  // VerifySignedLogRoot before deserializing log_root.
-  // enum { v1(1), (65535)} Version;
-  // struct {
-  //   uint64 tree_size;
-  //   opaque root_hash<0..128>;
-  //   uint64 timestamp_nanos;
-  //   uint64 revision;
-  //   opaque metadata<0..65535>;
-  // } LogRootV1;
-  // struct {
-  //   Version version;
-  //   select(version) {
-  //     case v1: LogRootV1;
-  //   }
-  // } LogRoot;
-  //
-  // A serialized v1 log root will therefore be laid out as:
-  //
-  // +---+---+---+---+---+---+---+---+---+---+---+---+---+---+-....--+
-  // | ver=1 |          tree_size            |len|    root_hashlen   |
-  // +---+---+---+---+---+---+---+---+---+---+---+---+---+---+-....--+
-  //
-  // +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
-  // |        timestamp_nanos        |      revision                 |
-  // +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
-  //
-  // +---+---+---+---+---+-....---+
-  // |  len  |    metadata        |
-  // +---+---+---+---+---+-....---+
-  //
-  // (with all integers encoded big-endian).
-  bytes log_root = 8;
-
-  // log_root_signature is the raw signature over log_root.
-  bytes log_root_signature = 9;
-}
-
-// SignedMapRoot represents a commitment by a Map to a particular tree.
-message SignedMapRoot {
-  reserved 1;  // Deprecated: Was timestamp_nanos. Use map_root.
-  reserved 2;  // Deprecated: Was root_hash. Use map_root.
-  reserved 3;  // Deprecated: Was MapperMetadata. Use map_root.
-  reserved 5;  // Deprecated: Was map_id. Use signature.
-  reserved 6;  // Deprecated: Was map_revision. Use map_root.
-  reserved 7;  // Deprecated: Was metadata Any. Use map_root.
-  reserved 8;  // Deprecated: Was metadata bytes. Use map_root.
-
-  // map_root holds the TLS-serialization of the following structure (described
-  // in RFC5246 notation): Clients should validate signature with
-  // VerifySignedMapRoot before deserializing map_root.
-  // enum { v1(1), (65535)} Version;
-  // struct {
-  //   opaque root_hash<0..128>;
-  //   uint64 timestamp_nanos;
-  //   uint64 revision;
-  //   opaque metadata<0..65535>;
-  // } MapRootV1;
-  // struct {
-  //   Version version;
-  //   select(version) {
-  //     case v1: MapRootV1;
-  //   }
-  // } MapRoot;
-  bytes map_root = 9;
-  // Signature is the raw signature over MapRoot.
-  bytes signature = 4;
-}
-
-// Proof holds a consistency or inclusion proof for a Merkle tree, as returned
-// by the API.
-message Proof {
-  // leaf_index indicates the requested leaf index when this message is used for
-  // a leaf inclusion proof.  This field is set to zero when this message is
-  // used for a consistency proof.
-  int64 leaf_index = 1;
-  reserved 2; // Contained internal node details (removed)
-  repeated bytes hashes = 3;
-}
diff --git a/proto/trillian_pb2.py b/proto/trillian_pb2.py
deleted file mode 100644
index 4c7ddd6..0000000
--- a/proto/trillian_pb2.py
+++ /dev/null
@@ -1,576 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: trillian.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from crypto.keyspb import keyspb_pb2 as crypto_dot_keyspb_dot_keyspb__pb2
-from crypto.sigpb import sigpb_pb2 as crypto_dot_sigpb_dot_sigpb__pb2
-from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
-from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='trillian.proto',
-  package='trillian',
-  syntax='proto3',
-  serialized_options=_b('\n\031com.google.trillian.protoB\rTrillianProtoP\001Z\032github.com/google/trillian'),
-  serialized_pb=_b('\n\x0etrillian.proto\x12\x08trillian\x1a\x1a\x63rypto/keyspb/keyspb.proto\x1a\x18\x63rypto/sigpb/sigpb.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xbb\x05\n\x04Tree\x12\x0f\n\x07tree_id\x18\x01 \x01(\x03\x12\'\n\ntree_state\x18\x02 \x01(\x0e\x32\x13.trillian.TreeState\x12%\n\ttree_type\x18\x03 \x01(\x0e\x32\x12.trillian.TreeType\x12-\n\rhash_strategy\x18\x04 \x01(\x0e\x32\x16.trillian.HashStrategy\x12<\n\x0ehash_algorithm\x18\x05 \x01(\x0e\x32$.sigpb.DigitallySigned.HashAlgorithm\x12\x46\n\x13signature_algorithm\x18\x06 \x01(\x0e\x32).sigpb.DigitallySigned.SignatureAlgorithm\x12\x14\n\x0c\x64isplay_name\x18\x08 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\t \x01(\t\x12)\n\x0bprivate_key\x18\x0c \x01(\x0b\x32\x14.google.protobuf.Any\x12.\n\x10storage_settings\x18\r \x01(\x0b\x32\x14.google.protobuf.Any\x12%\n\npublic_key\x18\x0e \x01(\x0b\x32\x11.keyspb.PublicKey\x12\x34\n\x11max_root_duration\x18\x0f \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0b\x63reate_time\x18\x10 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x64\x65leted\x18\x13 \x01(\x08\x12/\n\x0b\x64\x65lete_time\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.TimestampJ\x04\x08\x12\x10\x13J\x04\x08\x07\x10\x08J\x04\x08\n\x10\x0bJ\x04\x08\x0b\x10\x0c\"j\n\x14SignedEntryTimestamp\x12\x17\n\x0ftimestamp_nanos\x18\x01 \x01(\x03\x12\x0e\n\x06log_id\x18\x02 \x01(\x03\x12)\n\tsignature\x18\x03 \x01(\x0b\x32\x16.sigpb.DigitallySigned\"s\n\rSignedLogRoot\x12\x10\n\x08key_hint\x18\x07 \x01(\x0c\x12\x10\n\x08log_root\x18\x08 \x01(\x0c\x12\x1a\n\x12log_root_signature\x18\t \x01(\x0cJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07\"^\n\rSignedMapRoot\x12\x10\n\x08map_root\x18\t \x01(\x0c\x12\x11\n\tsignature\x18\x04 \x01(\x0cJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08J\x04\x08\x08\x10\t\"1\n\x05Proof\x12\x12\n\nleaf_index\x18\x01 \x01(\x03\x12\x0e\n\x06hashes\x18\x03 \x03(\x0cJ\x04\x08\x02\x10\x03*D\n\rLogRootFormat\x12\x1b\n\x17LOG_ROOT_FORMAT_UNKNOWN\x10\x00\x12\x16\n\x12LOG_ROOT_FORMAT_V1\x10\x01*D\n\rMapRootFormat\x12\x1b\n\x17MAP_ROOT_FORMAT_UNKNOWN\x10\x00\x12\x16\n\x12MAP_ROOT_FORMAT_V1\x10\x01*\x97\x01\n\x0cHashStrategy\x12\x19\n\x15UNKNOWN_HASH_STRATEGY\x10\x00\x12\x12\n\x0eRFC6962_SHA256\x10\x01\x12\x13\n\x0fTEST_MAP_HASHER\x10\x02\x12\x19\n\x15OBJECT_RFC6962_SHA256\x10\x03\x12\x15\n\x11\x43ONIKS_SHA512_256\x10\x04\x12\x11\n\rCONIKS_SHA256\x10\x05*\x8b\x01\n\tTreeState\x12\x16\n\x12UNKNOWN_TREE_STATE\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\n\n\x06\x46ROZEN\x10\x02\x12\x1f\n\x17\x44\x45PRECATED_SOFT_DELETED\x10\x03\x1a\x02\x08\x01\x12\x1f\n\x17\x44\x45PRECATED_HARD_DELETED\x10\x04\x1a\x02\x08\x01\x12\x0c\n\x08\x44RAINING\x10\x05*G\n\x08TreeType\x12\x15\n\x11UNKNOWN_TREE_TYPE\x10\x00\x12\x07\n\x03LOG\x10\x01\x12\x07\n\x03MAP\x10\x02\x12\x12\n\x0ePREORDERED_LOG\x10\x03\x42H\n\x19\x63om.google.trillian.protoB\rTrillianProtoP\x01Z\x1agithub.com/google/trillianb\x06proto3')
-  ,
-  dependencies=[crypto_dot_keyspb_dot_keyspb__pb2.DESCRIPTOR,crypto_dot_sigpb_dot_sigpb__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
-
-_LOGROOTFORMAT = _descriptor.EnumDescriptor(
-  name='LogRootFormat',
-  full_name='trillian.LogRootFormat',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='LOG_ROOT_FORMAT_UNKNOWN', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LOG_ROOT_FORMAT_V1', index=1, number=1,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=1248,
-  serialized_end=1316,
-)
-_sym_db.RegisterEnumDescriptor(_LOGROOTFORMAT)
-
-LogRootFormat = enum_type_wrapper.EnumTypeWrapper(_LOGROOTFORMAT)
-_MAPROOTFORMAT = _descriptor.EnumDescriptor(
-  name='MapRootFormat',
-  full_name='trillian.MapRootFormat',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='MAP_ROOT_FORMAT_UNKNOWN', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MAP_ROOT_FORMAT_V1', index=1, number=1,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=1318,
-  serialized_end=1386,
-)
-_sym_db.RegisterEnumDescriptor(_MAPROOTFORMAT)
-
-MapRootFormat = enum_type_wrapper.EnumTypeWrapper(_MAPROOTFORMAT)
-_HASHSTRATEGY = _descriptor.EnumDescriptor(
-  name='HashStrategy',
-  full_name='trillian.HashStrategy',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_HASH_STRATEGY', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='RFC6962_SHA256', index=1, number=1,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TEST_MAP_HASHER', index=2, number=2,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='OBJECT_RFC6962_SHA256', index=3, number=3,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CONIKS_SHA512_256', index=4, number=4,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CONIKS_SHA256', index=5, number=5,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=1389,
-  serialized_end=1540,
-)
-_sym_db.RegisterEnumDescriptor(_HASHSTRATEGY)
-
-HashStrategy = enum_type_wrapper.EnumTypeWrapper(_HASHSTRATEGY)
-_TREESTATE = _descriptor.EnumDescriptor(
-  name='TreeState',
-  full_name='trillian.TreeState',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_TREE_STATE', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ACTIVE', index=1, number=1,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FROZEN', index=2, number=2,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DEPRECATED_SOFT_DELETED', index=3, number=3,
-      serialized_options=_b('\010\001'),
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DEPRECATED_HARD_DELETED', index=4, number=4,
-      serialized_options=_b('\010\001'),
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DRAINING', index=5, number=5,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=1543,
-  serialized_end=1682,
-)
-_sym_db.RegisterEnumDescriptor(_TREESTATE)
-
-TreeState = enum_type_wrapper.EnumTypeWrapper(_TREESTATE)
-_TREETYPE = _descriptor.EnumDescriptor(
-  name='TreeType',
-  full_name='trillian.TreeType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_TREE_TYPE', index=0, number=0,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LOG', index=1, number=1,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MAP', index=2, number=2,
-      serialized_options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PREORDERED_LOG', index=3, number=3,
-      serialized_options=None,
-      type=None),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=1684,
-  serialized_end=1755,
-)
-_sym_db.RegisterEnumDescriptor(_TREETYPE)
-
-TreeType = enum_type_wrapper.EnumTypeWrapper(_TREETYPE)
-LOG_ROOT_FORMAT_UNKNOWN = 0
-LOG_ROOT_FORMAT_V1 = 1
-MAP_ROOT_FORMAT_UNKNOWN = 0
-MAP_ROOT_FORMAT_V1 = 1
-UNKNOWN_HASH_STRATEGY = 0
-RFC6962_SHA256 = 1
-TEST_MAP_HASHER = 2
-OBJECT_RFC6962_SHA256 = 3
-CONIKS_SHA512_256 = 4
-CONIKS_SHA256 = 5
-UNKNOWN_TREE_STATE = 0
-ACTIVE = 1
-FROZEN = 2
-DEPRECATED_SOFT_DELETED = 3
-DEPRECATED_HARD_DELETED = 4
-DRAINING = 5
-UNKNOWN_TREE_TYPE = 0
-LOG = 1
-MAP = 2
-PREORDERED_LOG = 3
-
-
-
-_TREE = _descriptor.Descriptor(
-  name='Tree',
-  full_name='trillian.Tree',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='tree_id', full_name='trillian.Tree.tree_id', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='tree_state', full_name='trillian.Tree.tree_state', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='tree_type', full_name='trillian.Tree.tree_type', index=2,
-      number=3, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='hash_strategy', full_name='trillian.Tree.hash_strategy', index=3,
-      number=4, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='hash_algorithm', full_name='trillian.Tree.hash_algorithm', index=4,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signature_algorithm', full_name='trillian.Tree.signature_algorithm', index=5,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='display_name', full_name='trillian.Tree.display_name', index=6,
-      number=8, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='description', full_name='trillian.Tree.description', index=7,
-      number=9, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='private_key', full_name='trillian.Tree.private_key', index=8,
-      number=12, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='storage_settings', full_name='trillian.Tree.storage_settings', index=9,
-      number=13, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='public_key', full_name='trillian.Tree.public_key', index=10,
-      number=14, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='max_root_duration', full_name='trillian.Tree.max_root_duration', index=11,
-      number=15, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='create_time', full_name='trillian.Tree.create_time', index=12,
-      number=16, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='update_time', full_name='trillian.Tree.update_time', index=13,
-      number=17, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='deleted', full_name='trillian.Tree.deleted', index=14,
-      number=19, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='delete_time', full_name='trillian.Tree.delete_time', index=15,
-      number=20, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=175,
-  serialized_end=874,
-)
-
-
-_SIGNEDENTRYTIMESTAMP = _descriptor.Descriptor(
-  name='SignedEntryTimestamp',
-  full_name='trillian.SignedEntryTimestamp',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='timestamp_nanos', full_name='trillian.SignedEntryTimestamp.timestamp_nanos', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='log_id', full_name='trillian.SignedEntryTimestamp.log_id', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signature', full_name='trillian.SignedEntryTimestamp.signature', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=876,
-  serialized_end=982,
-)
-
-
-_SIGNEDLOGROOT = _descriptor.Descriptor(
-  name='SignedLogRoot',
-  full_name='trillian.SignedLogRoot',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key_hint', full_name='trillian.SignedLogRoot.key_hint', index=0,
-      number=7, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='log_root', full_name='trillian.SignedLogRoot.log_root', index=1,
-      number=8, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='log_root_signature', full_name='trillian.SignedLogRoot.log_root_signature', index=2,
-      number=9, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=984,
-  serialized_end=1099,
-)
-
-
-_SIGNEDMAPROOT = _descriptor.Descriptor(
-  name='SignedMapRoot',
-  full_name='trillian.SignedMapRoot',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='map_root', full_name='trillian.SignedMapRoot.map_root', index=0,
-      number=9, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='signature', full_name='trillian.SignedMapRoot.signature', index=1,
-      number=4, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1101,
-  serialized_end=1195,
-)
-
-
-_PROOF = _descriptor.Descriptor(
-  name='Proof',
-  full_name='trillian.Proof',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='leaf_index', full_name='trillian.Proof.leaf_index', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='hashes', full_name='trillian.Proof.hashes', index=1,
-      number=3, type=12, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1197,
-  serialized_end=1246,
-)
-
-_TREE.fields_by_name['tree_state'].enum_type = _TREESTATE
-_TREE.fields_by_name['tree_type'].enum_type = _TREETYPE
-_TREE.fields_by_name['hash_strategy'].enum_type = _HASHSTRATEGY
-_TREE.fields_by_name['hash_algorithm'].enum_type = crypto_dot_sigpb_dot_sigpb__pb2._DIGITALLYSIGNED_HASHALGORITHM
-_TREE.fields_by_name['signature_algorithm'].enum_type = crypto_dot_sigpb_dot_sigpb__pb2._DIGITALLYSIGNED_SIGNATUREALGORITHM
-_TREE.fields_by_name['private_key'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_TREE.fields_by_name['storage_settings'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_TREE.fields_by_name['public_key'].message_type = crypto_dot_keyspb_dot_keyspb__pb2._PUBLICKEY
-_TREE.fields_by_name['max_root_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
-_TREE.fields_by_name['create_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TREE.fields_by_name['update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TREE.fields_by_name['delete_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_SIGNEDENTRYTIMESTAMP.fields_by_name['signature'].message_type = crypto_dot_sigpb_dot_sigpb__pb2._DIGITALLYSIGNED
-DESCRIPTOR.message_types_by_name['Tree'] = _TREE
-DESCRIPTOR.message_types_by_name['SignedEntryTimestamp'] = _SIGNEDENTRYTIMESTAMP
-DESCRIPTOR.message_types_by_name['SignedLogRoot'] = _SIGNEDLOGROOT
-DESCRIPTOR.message_types_by_name['SignedMapRoot'] = _SIGNEDMAPROOT
-DESCRIPTOR.message_types_by_name['Proof'] = _PROOF
-DESCRIPTOR.enum_types_by_name['LogRootFormat'] = _LOGROOTFORMAT
-DESCRIPTOR.enum_types_by_name['MapRootFormat'] = _MAPROOTFORMAT
-DESCRIPTOR.enum_types_by_name['HashStrategy'] = _HASHSTRATEGY
-DESCRIPTOR.enum_types_by_name['TreeState'] = _TREESTATE
-DESCRIPTOR.enum_types_by_name['TreeType'] = _TREETYPE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Tree = _reflection.GeneratedProtocolMessageType('Tree', (_message.Message,), {
-  'DESCRIPTOR' : _TREE,
-  '__module__' : 'trillian_pb2'
-  # @@protoc_insertion_point(class_scope:trillian.Tree)
-  })
-_sym_db.RegisterMessage(Tree)
-
-SignedEntryTimestamp = _reflection.GeneratedProtocolMessageType('SignedEntryTimestamp', (_message.Message,), {
-  'DESCRIPTOR' : _SIGNEDENTRYTIMESTAMP,
-  '__module__' : 'trillian_pb2'
-  # @@protoc_insertion_point(class_scope:trillian.SignedEntryTimestamp)
-  })
-_sym_db.RegisterMessage(SignedEntryTimestamp)
-
-SignedLogRoot = _reflection.GeneratedProtocolMessageType('SignedLogRoot', (_message.Message,), {
-  'DESCRIPTOR' : _SIGNEDLOGROOT,
-  '__module__' : 'trillian_pb2'
-  # @@protoc_insertion_point(class_scope:trillian.SignedLogRoot)
-  })
-_sym_db.RegisterMessage(SignedLogRoot)
-
-SignedMapRoot = _reflection.GeneratedProtocolMessageType('SignedMapRoot', (_message.Message,), {
-  'DESCRIPTOR' : _SIGNEDMAPROOT,
-  '__module__' : 'trillian_pb2'
-  # @@protoc_insertion_point(class_scope:trillian.SignedMapRoot)
-  })
-_sym_db.RegisterMessage(SignedMapRoot)
-
-Proof = _reflection.GeneratedProtocolMessageType('Proof', (_message.Message,), {
-  'DESCRIPTOR' : _PROOF,
-  '__module__' : 'trillian_pb2'
-  # @@protoc_insertion_point(class_scope:trillian.Proof)
-  })
-_sym_db.RegisterMessage(Proof)
-
-
-DESCRIPTOR._options = None
-_TREESTATE.values_by_name["DEPRECATED_SOFT_DELETED"]._options = None
-_TREESTATE.values_by_name["DEPRECATED_HARD_DELETED"]._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/trillian_pb2_grpc.py b/proto/trillian_pb2_grpc.py
deleted file mode 100644
index a894352..0000000
--- a/proto/trillian_pb2_grpc.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
diff --git a/test/avb_aftl_fuzz.cc b/test/avb_aftl_fuzz.cc
deleted file mode 100644
index 89b59d8..0000000
--- a/test/avb_aftl_fuzz.cc
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <stddef.h>
-#include <stdint.h>
-
-#include "libavb_aftl/avb_aftl_types.h"
-#include "libavb_aftl/avb_aftl_util.h"
-
-extern "C" int LLVMFuzzerTestOneInput(const char* data, size_t size) {
-  AftlImage* image = parse_aftl_image((uint8_t*)data, size);
-  free_aftl_image(image);
-  return 0;
-}
diff --git a/test/avb_aftl_util_unittest.cc b/test/avb_aftl_util_unittest.cc
deleted file mode 100644
index 33a598c..0000000
--- a/test/avb_aftl_util_unittest.cc
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include "libavb_aftl/avb_aftl_util.h"
-#include <libavb_aftl/libavb_aftl.h>
-#include "avb_unittest_util.h"
-#include "libavb_aftl/avb_aftl_types.h"
-
-namespace {
-
-/* TODO(b/154115873): These VBMetas are manually generated. We need to implement
- * a mock in aftltool that generates an inclusion proof and call that mock from
- * the unit tests, similarly to what is done with GenerateVBMetaImage. */
-const char kAftlImagePath[] = "test/data/aftl_output_vbmeta_with_1_icp.img";
-const uint64_t kAftlImageOffset = 0x1100;
-const char kAftlImageMultiPath[] =
-    "test/data/aftl_output_vbmeta_with_2_icp_same_log.img";
-
-}  // namespace
-
-namespace avb {
-/* Extend BaseAvbToolTest to take advantage of common checks and tooling. */
-class AvbAftlUtilTest : public BaseAvbToolTest {
- public:
-  AvbAftlUtilTest() {}
-  ~AvbAftlUtilTest() {}
-  void SetUp() override {
-    std::string content;
-
-    BaseAvbToolTest::SetUp();
-    /* Read in test data from the aftl_image binaries. */
-    ASSERT_TRUE(
-        base::ReadFileToString(base::FilePath(kAftlImagePath), &content));
-    content = content.substr(kAftlImageOffset);
-    /* Allocate and populate an AftlImage for testing. */
-    aftl_image_ = parse_aftl_image((uint8_t*)content.data(), content.size());
-
-    /* Read in test data from the aftl_image file with multiple ICPs. */
-    ASSERT_TRUE(
-        base::ReadFileToString(base::FilePath(kAftlImageMultiPath), &content));
-    content = content.substr(kAftlImageOffset);
-    /* Allocate and populate an AftlImage for testing. */
-    aftl_image_multi_ =
-        parse_aftl_image((uint8_t*)content.data(), content.size());
-  }
-
-  void TearDown() override {
-    free_aftl_image(aftl_image_);
-    free_aftl_image(aftl_image_multi_);
-    BaseAvbToolTest::TearDown();
-  }
-
-  void TestAftlImageHeader(AftlImageHeader* aftl_header, uint16_t icp_count) {
-    EXPECT_EQ(aftl_header->magic, 0x4c544641ul);
-    EXPECT_EQ(aftl_header->required_icp_version_major, 1ul);
-    EXPECT_EQ(aftl_header->required_icp_version_minor, 2ul);
-    EXPECT_EQ(aftl_header->icp_count, icp_count);
-  }
-
-  void TestAftlIcpEntry(AftlIcpEntry* icp_entry) {
-    /* Test each field in the AftlIcpEntry. */
-    EXPECT_GT(icp_entry->log_url_size, 0ul);
-    EXPECT_GT(icp_entry->leaf_index, 1ul);
-    EXPECT_GT(icp_entry->log_root_descriptor_size, 0ul);
-    EXPECT_GT(icp_entry->annotation_leaf_size, 0ul);
-    EXPECT_EQ(icp_entry->log_root_sig_size, AVB_AFTL_SIGNATURE_SIZE);
-    EXPECT_GT(icp_entry->proof_hash_count, 0ul);
-    EXPECT_LT(icp_entry->proof_hash_count, 64ul);
-    EXPECT_GT(icp_entry->inc_proof_size, 0ul);
-    EXPECT_EQ(mem_to_hexstring(icp_entry->log_url, 8), "6c6f672e656e6470");
-    /* Test the TrillianLogRootDescriptor fields. */
-    EXPECT_EQ(icp_entry->log_root_descriptor.version, 1ul);
-    EXPECT_GT(icp_entry->log_root_descriptor.tree_size, 0ull);
-    EXPECT_EQ(icp_entry->log_root_descriptor.root_hash_size,
-              AVB_AFTL_HASH_SIZE);
-    EXPECT_GT(icp_entry->log_root_descriptor.timestamp, 0ull);
-    EXPECT_GT(icp_entry->log_root_descriptor.revision, 0ull);
-    EXPECT_EQ(icp_entry->log_root_descriptor.metadata_size, 0);
-    /* Test the FirmwareInfo fields. */
-    EXPECT_EQ(icp_entry->annotation_leaf->annotation->vbmeta_hash_size,
-              AVB_AFTL_HASH_SIZE);
-    EXPECT_EQ(icp_entry->proof_hash_count * 32ul, icp_entry->inc_proof_size);
-  }
-
- protected:
-  AftlImage* aftl_image_;
-  AftlImage* aftl_image_multi_;
-};
-
-TEST_F(AvbAftlUtilTest, AftlImageHeaderStructure) {
-  AftlImageHeader* header;
-  ASSERT_NE(aftl_image_, nullptr);
-  header = &(aftl_image_->header);
-  ASSERT_NE(header, nullptr);
-  TestAftlImageHeader(header, 1);
-}
-
-TEST_F(AvbAftlUtilTest, AftlImageMultipleIcps) {
-  AftlImageHeader* header;
-  size_t i;
-
-  ASSERT_NE(aftl_image_multi_, nullptr);
-  header = &(aftl_image_multi_->header);
-  ASSERT_NE(header, nullptr);
-  TestAftlImageHeader(header, 2);
-
-  for (i = 0; i < header->icp_count; i++) {
-    ASSERT_NE(aftl_image_multi_->entries[i], nullptr)
-        << " Failed at entry " << i;
-    TestAftlIcpEntry(aftl_image_multi_->entries[i]);
-  }
-}
-
-TEST_F(AvbAftlUtilTest, AftlIcpEntryStructure) {
-  AftlIcpEntry* icp_entry;
-
-  icp_entry = aftl_image_->entries[0];
-  ASSERT_NE(icp_entry, nullptr);
-  TestAftlIcpEntry(icp_entry);
-}
-
-} /* namespace avb */
diff --git a/test/avb_aftl_validate_unittest.cc b/test/avb_aftl_validate_unittest.cc
deleted file mode 100644
index 2a78e6a..0000000
--- a/test/avb_aftl_validate_unittest.cc
+++ /dev/null
@@ -1,539 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <gtest/gtest.h>
-
-#include <libavb_aftl/libavb_aftl.h>
-
-#include "avb_unittest_util.h"
-#include "libavb_aftl/avb_aftl_types.h"
-#include "libavb_aftl/avb_aftl_util.h"
-#include "libavb_aftl/avb_aftl_validate.h"
-
-namespace {
-
-/* Public part of testkey_rsa4096.pem, in the AvbRsaPublicKey format. Generated
- * using:
- *   $ openssl rsa -in testkey_rsa4096.pem -pubout -out testkey_rsa4096_pub.pem
- *   $ avbtool extract_public_key --key testkey_rsa4096_pub.pem --output \
- *     testkey_rsa4096_pub.bin.
- */
-const char kKeyBytesPath[] = "test/data/testkey_rsa4096_pub.bin";
-/* Example VBMeta. Its hash should match the value kVBMetaHash defined below. */
-const char kVBMetaPath[] = "test/data/aftl_input_vbmeta.img";
-
-} /* namespace */
-
-namespace avb {
-
-/* Extend BaseAvbToolTest to take advantage of common checks and tooling. */
-class AvbAftlValidateTest : public BaseAvbToolTest {
- public:
-  AvbAftlValidateTest() {}
-  ~AvbAftlValidateTest() {}
-  void SetUp() override {
-    /* Generate an artificial inclusion proof with its own annotation. The
-     * annotation matches the kVBMetaPath file. It is signed using the
-     * testkey_rsa4096.pem key. */
-    /* We define the constants below as string literals (to be able to annotate
-     * the bytes). We keep their sizes in a separate variable as sizeof will
-     * include the final null byte that is automatically appended. */
-    const uint8_t kAnnotationLeafHeader[] =
-        "\x01"                              // Version
-        "\x00\x00\x00\x00\x00\x00\x00\x00"  // Timestamp
-        "\x01";                             // Leaf Type
-    const size_t kAnnotationLeafHeaderSize = sizeof(kAnnotationLeafHeader) - 1;
-    const uint8_t kSignature[] =
-        "\x00"   // Hash Type
-        "\x00"   // Signature Type
-        "\x00";  // Signature size
-    const size_t kSignatureSize = sizeof(kSignature) - 1;
-    const uint8_t kAnnotationHeader[] = "\x20";  // VBMeta hash size
-    const size_t kAnnotationHeaderSize = sizeof(kAnnotationHeader) - 1;
-    /* This is the SHA256 hash of the image at kVBMetaPath */
-    const uint8_t kVBMetaHash[] =
-        "\x34\x1c\x6c\xf2\x4b\xc1\xe6\x4a\xb1\x03\xa0\xee\xe1\x9d\xee\x9c"
-        "\x35\x34\xdb\x07\x17\x29\xb4\xad\xd0\xce\xa0\xbd\x52\x92\x54\xec";
-    const uint8_t kAnnotationFooter[] =
-        "\x03"      // Version incremental size
-        "123"       // Version incremental
-        "\x00"      // Manufacturer key hash size
-        "\x00\x05"  // Description size
-        "abcde";    // Description
-    const size_t kAnnotationFooterSize = sizeof(kAnnotationFooter) - 1;
-    const uint8_t kLogRootDescriptorHeader[] =
-        "\x00\x01"                          // Version
-        "\x00\x00\x00\x00\x00\x00\x00\x03"  // Tree size
-        "\x20";                             // Root hash size
-    const size_t kLogRootDescriptorHeaderSize =
-        sizeof(kLogRootDescriptorHeader) - 1;
-    const uint8_t kLogRootDescriptorRootHash[] =
-        "\x40\x79\x2f\xf1\xcb\xfc\xd1\x8a\x13\x70\x90\xaf\x6a\x16\x4d\xa9"
-        "\x36\x80\x99\xb3\xf9\x7f\x99\x13\x3e\x07\xff\xbc\x73\x42\xfc\xc7";
-    const uint8_t kLogRootDescriptorFooter[] =
-        "\x00\x00\x00\x00\x13\x36\x4b\xff"  // Timestamp
-        "\x00\x00\x00\x00\x00\x00\x00\x00"  // Revision
-        "\x00\x00";                         // Metadata size
-    const size_t kLogRootDescriptorFooterSize =
-        sizeof(kLogRootDescriptorFooter) - 1;
-    /* Signature of the log root descriptor.
-     *   $ openssl dgst -sha256 -sign testkey_rsa4096.pem \
-     *   -out kLogRootHashSignature log_root_descriptor_raw
-     * log_root_descriptor_raw is defined as the concatenation:
-     * kLogRootDescriptorHeader || kLogRootDescriptorRootHash ||
-     * kLogRootDescriptorFooter */
-    const uint8_t kLogRootHashSignature[] = {
-        0x55, 0x1d, 0xd3, 0x13, 0x3c, 0x41, 0xde, 0x67, 0x79, 0xf1, 0xc6, 0xad,
-        0x72, 0x10, 0xff, 0xfb, 0x6d, 0xac, 0xc1, 0x1c, 0x06, 0x2a, 0x3e, 0xa8,
-        0xd9, 0xf3, 0x8c, 0x9c, 0x67, 0xbe, 0x1e, 0x8e, 0xe1, 0x02, 0xf6, 0xdb,
-        0xd2, 0x5c, 0x31, 0x4b, 0x26, 0xad, 0x9a, 0xd1, 0xf5, 0x7d, 0xb9, 0x6b,
-        0x4b, 0xf1, 0x7a, 0x89, 0x9d, 0xf0, 0x17, 0xb4, 0xee, 0xb2, 0x08, 0x0d,
-        0xd8, 0x99, 0xac, 0x7b, 0x34, 0x1f, 0xd1, 0x9c, 0x2e, 0x0c, 0xd1, 0xb1,
-        0x42, 0x34, 0xf2, 0x65, 0xbb, 0x79, 0x7a, 0xac, 0x23, 0x37, 0xec, 0xfc,
-        0xff, 0xbf, 0x66, 0x51, 0xed, 0x3e, 0xa7, 0x45, 0x3a, 0xf9, 0x72, 0xaa,
-        0x01, 0x3c, 0xfd, 0x59, 0x01, 0x67, 0x67, 0xb4, 0x57, 0x23, 0xb6, 0x7e,
-        0x59, 0x82, 0xb3, 0x98, 0xa2, 0x57, 0xd4, 0x64, 0x83, 0xaa, 0x02, 0x17,
-        0x87, 0xfd, 0xa2, 0xe2, 0x3b, 0xa8, 0xf5, 0xc2, 0xfb, 0xce, 0x7f, 0x59,
-        0x72, 0x10, 0xc5, 0x11, 0x81, 0x80, 0x20, 0x4a, 0x3e, 0xf9, 0x85, 0x2e,
-        0x44, 0x94, 0x87, 0xec, 0xfa, 0x2e, 0x8f, 0x75, 0x00, 0x6f, 0x52, 0x1b,
-        0x4d, 0x5c, 0xfc, 0xe4, 0x1f, 0xe2, 0x94, 0xbc, 0x8c, 0xe8, 0x7f, 0x74,
-        0x14, 0x2f, 0x66, 0x8e, 0xfb, 0x11, 0x34, 0xde, 0x80, 0x21, 0x92, 0xc3,
-        0x52, 0xa7, 0xf7, 0x5e, 0x49, 0x53, 0x21, 0x7d, 0x8b, 0xa2, 0xcb, 0x84,
-        0x80, 0x64, 0x0d, 0xd7, 0xd0, 0x6d, 0x6f, 0x2a, 0x98, 0x57, 0x3b, 0x95,
-        0xa1, 0x63, 0x39, 0x00, 0x22, 0x9e, 0x5a, 0x75, 0x07, 0x10, 0x1f, 0x7e,
-        0xdb, 0x05, 0x5d, 0x3d, 0x76, 0x75, 0x3c, 0x1a, 0xd4, 0x1e, 0x8d, 0x6e,
-        0xce, 0x57, 0xd6, 0xce, 0x23, 0xc0, 0x23, 0x4c, 0xcb, 0x10, 0xec, 0x59,
-        0x22, 0x64, 0x57, 0x33, 0x1c, 0x3f, 0xa9, 0x43, 0x97, 0xc1, 0xc0, 0x93,
-        0x5a, 0x16, 0x80, 0x51, 0x56, 0x28, 0x98, 0x33, 0xee, 0x1a, 0xf8, 0x38,
-        0x7a, 0xaa, 0xdb, 0x43, 0x39, 0x90, 0x9e, 0x74, 0xb7, 0x9f, 0xfe, 0xa5,
-        0x84, 0x69, 0xf5, 0x77, 0x80, 0x92, 0xec, 0x06, 0x06, 0xe0, 0xd2, 0x98,
-        0x34, 0x66, 0x25, 0xc3, 0x7c, 0x89, 0x78, 0x3a, 0x0b, 0x48, 0x49, 0x37,
-        0x46, 0x07, 0xc4, 0xc8, 0x04, 0x72, 0x45, 0x60, 0x36, 0x98, 0x2d, 0x47,
-        0xfe, 0xba, 0x74, 0xb9, 0xb0, 0xe4, 0xf5, 0x45, 0xa0, 0xfb, 0x4a, 0x53,
-        0xe0, 0x16, 0x6a, 0x6b, 0x82, 0xcc, 0x33, 0x1c, 0x3c, 0x64, 0xe0, 0x90,
-        0x3c, 0x59, 0xfa, 0x04, 0x51, 0xe0, 0xe8, 0xaa, 0xe9, 0x92, 0x43, 0x04,
-        0x2a, 0x49, 0xd4, 0xdf, 0xac, 0x1d, 0x46, 0x44, 0xad, 0x65, 0x62, 0xaf,
-        0x44, 0x16, 0xb0, 0x05, 0x56, 0x2b, 0xa4, 0xad, 0x4c, 0x7e, 0xbd, 0x04,
-        0x95, 0xcb, 0xce, 0x0e, 0xf6, 0xd5, 0x4b, 0x3a, 0xc0, 0xde, 0x1e, 0xf8,
-        0xfa, 0xf5, 0x73, 0x4a, 0x6d, 0xc2, 0x4a, 0xe1, 0xaf, 0xae, 0xd8, 0x31,
-        0x23, 0x16, 0x5d, 0x15, 0x41, 0xe6, 0xbf, 0x4a, 0xe0, 0xf3, 0xdd, 0x74,
-        0x32, 0x96, 0x64, 0x4c, 0x16, 0x7d, 0xd3, 0xad, 0x21, 0x47, 0x2b, 0x17,
-        0xb9, 0xf3, 0x84, 0x38, 0x80, 0x60, 0xb6, 0xcb, 0x24, 0x45, 0x24, 0x90,
-        0x74, 0xe9, 0x50, 0xea, 0x2e, 0x1f, 0xc2, 0x74, 0x36, 0xa2, 0xf5, 0xd7,
-        0x24, 0xb3, 0xa1, 0x1f, 0xd3, 0x39, 0x61, 0x67, 0x37, 0xe4, 0x2a, 0x20,
-        0x67, 0x95, 0x53, 0x9d, 0xd4, 0xdb, 0x4f, 0xa6, 0xb8, 0x7f, 0x91, 0xb2,
-        0xc5, 0x6f, 0x71, 0x3c, 0x86, 0xc8, 0x36, 0x8d, 0xa4, 0x4d, 0x53, 0x6b,
-        0x3f, 0xe6, 0xce, 0xf1, 0x7a, 0xa2, 0x2e, 0x53, 0x80, 0x4c, 0x52, 0x9d,
-        0x3e, 0xd7, 0xec, 0x47, 0x4a, 0xfa, 0x84, 0xa5, 0x9a, 0x2f, 0x7b, 0xfc,
-        0xfc, 0xe8, 0xa4, 0x09, 0xfb, 0xb5, 0xb7, 0xf2};
-    BaseAvbToolTest::SetUp();
-
-    /* Read in test data from the key and log_sig binaries. */
-    ASSERT_TRUE(
-        base::ReadFileToString(base::FilePath(kKeyBytesPath), &key_bytes_));
-
-    /* Allocate and populate the inclusion proof */
-    icp_entry_ = (AftlIcpEntry*)avb_malloc(sizeof(AftlIcpEntry));
-    if (!icp_entry_) return;
-    icp_entry_->log_root_descriptor.version = 1;
-    icp_entry_->log_root_descriptor.tree_size = 3;
-    icp_entry_->log_root_descriptor.root_hash_size = AVB_AFTL_HASH_SIZE;
-    icp_entry_->log_root_descriptor.timestamp = 322325503;
-    icp_entry_->log_root_descriptor.revision = 0;
-    icp_entry_->log_root_descriptor.metadata_size = 0;
-    icp_entry_->log_root_descriptor.metadata = NULL;
-    icp_entry_->log_root_descriptor_size = kLogRootDescriptorHeaderSize +
-                                           AVB_AFTL_HASH_SIZE +
-                                           kLogRootDescriptorFooterSize;
-    icp_entry_->log_root_descriptor_raw =
-        (uint8_t*)avb_malloc(icp_entry_->log_root_descriptor_size);
-    if (!icp_entry_->log_root_descriptor_raw) {
-      return;
-    }
-    memcpy(icp_entry_->log_root_descriptor_raw,
-           kLogRootDescriptorHeader,
-           kLogRootDescriptorHeaderSize);
-    memcpy(icp_entry_->log_root_descriptor_raw + kLogRootDescriptorHeaderSize,
-           kLogRootDescriptorRootHash,
-           AVB_AFTL_HASH_SIZE);
-    memcpy(icp_entry_->log_root_descriptor_raw + kLogRootDescriptorHeaderSize +
-               AVB_AFTL_HASH_SIZE,
-           kLogRootDescriptorFooter,
-           kLogRootDescriptorFooterSize);
-    icp_entry_->log_root_descriptor.root_hash =
-        (uint8_t*)avb_malloc(AVB_AFTL_HASH_SIZE);
-    if (!icp_entry_->log_root_descriptor.root_hash) return;
-    /* Copy the hash from within the raw version */
-    memcpy(icp_entry_->log_root_descriptor.root_hash,
-           kLogRootDescriptorRootHash,
-           AVB_AFTL_HASH_SIZE);
-    icp_entry_->log_root_sig_size = AVB_AFTL_SIGNATURE_SIZE;
-    icp_entry_->log_root_signature =
-        (uint8_t*)avb_malloc(AVB_AFTL_SIGNATURE_SIZE);
-    memcpy(icp_entry_->log_root_signature,
-           kLogRootHashSignature,
-           AVB_AFTL_SIGNATURE_SIZE);
-
-    /* Allocate the annotation leaf */
-    icp_entry_->annotation_leaf_size =
-        kAnnotationLeafHeaderSize + kSignatureSize + kAnnotationHeaderSize +
-        AVB_AFTL_HASH_SIZE + kAnnotationFooterSize;
-    icp_entry_->annotation_leaf =
-        (SignedVBMetaPrimaryAnnotationLeaf*)avb_calloc(
-            sizeof(SignedVBMetaPrimaryAnnotationLeaf));
-    if (!icp_entry_->annotation_leaf) return;
-    icp_entry_->annotation_leaf->version = 1;
-    icp_entry_->annotation_leaf->timestamp = 0;
-    icp_entry_->annotation_leaf->leaf_type =
-        AVB_AFTL_SIGNED_VBMETA_PRIMARY_ANNOTATION_LEAF;
-    icp_entry_->annotation_leaf->annotation =
-        (VBMetaPrimaryAnnotation*)avb_calloc(sizeof(VBMetaPrimaryAnnotation));
-    if (!icp_entry_->annotation_leaf->annotation) return;
-    icp_entry_->annotation_leaf->annotation->vbmeta_hash_size =
-        AVB_AFTL_HASH_SIZE;
-    icp_entry_->annotation_leaf->annotation->vbmeta_hash =
-        (uint8_t*)avb_calloc(AVB_AFTL_HASH_SIZE);
-    if (!icp_entry_->annotation_leaf->annotation->vbmeta_hash) return;
-    memcpy(icp_entry_->annotation_leaf->annotation->vbmeta_hash,
-           kVBMetaHash,
-           AVB_AFTL_HASH_SIZE);
-    icp_entry_->annotation_leaf_raw =
-        (uint8_t*)avb_calloc(icp_entry_->annotation_leaf_size);
-    if (!icp_entry_->annotation_leaf_raw) return;
-    memcpy(icp_entry_->annotation_leaf_raw,
-           kAnnotationLeafHeader,
-           kAnnotationLeafHeaderSize);
-    memcpy(icp_entry_->annotation_leaf_raw + kAnnotationLeafHeaderSize,
-           kSignature,
-           kSignatureSize);
-    memcpy(icp_entry_->annotation_leaf_raw + kAnnotationLeafHeaderSize +
-               kSignatureSize,
-           kAnnotationHeader,
-           kAnnotationHeaderSize);
-    memcpy(icp_entry_->annotation_leaf_raw + kAnnotationLeafHeaderSize +
-               kSignatureSize + kAnnotationHeaderSize,
-           kVBMetaHash,
-           AVB_AFTL_HASH_SIZE);
-    memcpy(icp_entry_->annotation_leaf_raw + kAnnotationLeafHeaderSize +
-               kSignatureSize + kAnnotationHeaderSize + AVB_AFTL_HASH_SIZE,
-           kAnnotationFooter,
-           kAnnotationFooterSize);
-
-    icp_entry_->leaf_index = 2;
-    icp_entry_->proofs =
-        (uint8_t(*)[AVB_AFTL_HASH_SIZE])avb_calloc(AVB_AFTL_HASH_SIZE);
-    memcpy(icp_entry_->proofs[0],
-           "\xfa\xc5\x42\x03\xe7\xcc\x69\x6c\xf0\xdf\xcb\x42\xc9\x2a\x1d\x9d"
-           "\xba\xf7\x0a\xd9\xe6\x21\xf4\xbd\x8d\x98\x66\x2f\x00\xe3\xc1\x25",
-           AVB_AFTL_HASH_SIZE);
-    icp_entry_->proof_hash_count = 1;
-  }
-
-  void TearDown() override {
-    if (icp_entry_) {
-      if (icp_entry_->annotation_leaf_raw)
-        avb_free(icp_entry_->annotation_leaf_raw);
-      if (icp_entry_->annotation_leaf) {
-        if (icp_entry_->annotation_leaf->annotation) {
-          if (icp_entry_->annotation_leaf->annotation->vbmeta_hash)
-            avb_free(icp_entry_->annotation_leaf->annotation->vbmeta_hash);
-          avb_free(icp_entry_->annotation_leaf->annotation);
-        }
-        avb_free(icp_entry_->annotation_leaf);
-      }
-      if (icp_entry_->log_root_descriptor.root_hash)
-        avb_free(icp_entry_->log_root_descriptor.root_hash);
-      if (icp_entry_->log_root_descriptor_raw)
-        avb_free(icp_entry_->log_root_descriptor_raw);
-      if (icp_entry_->log_root_signature)
-        avb_free(icp_entry_->log_root_signature);
-      if (icp_entry_->proofs) avb_free(icp_entry_->proofs);
-      avb_free(icp_entry_);
-    }
-    BaseAvbToolTest::TearDown();
-  }
-
- protected:
-  AftlIcpEntry* icp_entry_;
-  std::string key_bytes_;
-};
-
-TEST_F(AvbAftlValidateTest, VerifyEntrySignature) {
-  EXPECT_EQ(true,
-            avb_aftl_verify_entry_signature(
-                (uint8_t*)key_bytes_.data(), key_bytes_.size(), icp_entry_));
-}
-
-TEST_F(AvbAftlValidateTest, VerifyIcpRootHash) {
-  EXPECT_EQ(true, avb_aftl_verify_icp_root_hash(icp_entry_));
-}
-
-TEST_F(AvbAftlValidateTest, VerifyVbmetaHash) {
-  std::string vbmeta;
-  ASSERT_TRUE(base::ReadFileToString(base::FilePath(kVBMetaPath), &vbmeta));
-  EXPECT_EQ(true,
-            avb_aftl_verify_vbmeta_hash(
-                (uint8_t*)vbmeta.data(), vbmeta.size(), icp_entry_));
-}
-
-TEST_F(AvbAftlValidateTest, RootFromIcp) {
-  /* Tests from trillian root_from_icp functionality:
-     https://github.com/google/trillian/blob/master/merkle/log_verifier_test.go
-  */
-  uint64_t leaf_index;
-  uint64_t tree_size;
-  uint8_t proof[3][AVB_AFTL_HASH_SIZE];
-  uint8_t leaf_hash[AVB_AFTL_HASH_SIZE];
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-
-  leaf_index = 0;
-  tree_size = 8;
-  avb_aftl_rfc6962_hash_leaf((uint8_t*)"", 0, leaf_hash);
-  memcpy(proof[0],
-         "\x96\xa2\x96\xd2\x24\xf2\x85\xc6\x7b\xee\x93\xc3\x0f\x8a\x30\x91"
-         "\x57\xf0\xda\xa3\x5d\xc5\xb8\x7e\x41\x0b\x78\x63\x0a\x09\xcf\xc7",
-         AVB_AFTL_HASH_SIZE);
-  memcpy(proof[1],
-         "\x5f\x08\x3f\x0a\x1a\x33\xca\x07\x6a\x95\x27\x98\x32\x58\x0d\xb3"
-         "\xe0\xef\x45\x84\xbd\xff\x1f\x54\xc8\xa3\x60\xf5\x0d\xe3\x03\x1e",
-         AVB_AFTL_HASH_SIZE);
-  memcpy(proof[2],
-         "\x6b\x47\xaa\xf2\x9e\xe3\xc2\xaf\x9a\xf8\x89\xbc\x1f\xb9\x25\x4d"
-         "\xab\xd3\x11\x77\xf1\x62\x32\xdd\x6a\xab\x03\x5c\xa3\x9b\xf6\xe4",
-         AVB_AFTL_HASH_SIZE);
-  avb_aftl_root_from_icp(
-      leaf_index, tree_size, proof, 3, leaf_hash, AVB_AFTL_HASH_SIZE, hash);
-  EXPECT_EQ("5dc9da79a70659a9ad559cb701ded9a2ab9d823aad2f4960cfe370eff4604328",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on test #1";
-
-  leaf_index = 5;
-  tree_size = 8;
-  avb_aftl_rfc6962_hash_leaf((uint8_t*)"@ABC", 4, leaf_hash);
-  memcpy(proof[0],
-         "\xbc\x1a\x06\x43\xb1\x2e\x4d\x2d\x7c\x77\x91\x8f\x44\xe0\xf4\xf7"
-         "\x9a\x83\x8b\x6c\xf9\xec\x5b\x5c\x28\x3e\x1f\x4d\x88\x59\x9e\x6b",
-         AVB_AFTL_HASH_SIZE);
-  memcpy(proof[1],
-         "\xca\x85\x4e\xa1\x28\xed\x05\x0b\x41\xb3\x5f\xfc\x1b\x87\xb8\xeb"
-         "\x2b\xde\x46\x1e\x9e\x3b\x55\x96\xec\xe6\xb9\xd5\x97\x5a\x0a\xe0",
-         AVB_AFTL_HASH_SIZE);
-  memcpy(proof[2],
-         "\xd3\x7e\xe4\x18\x97\x6d\xd9\x57\x53\xc1\xc7\x38\x62\xb9\x39\x8f"
-         "\xa2\xa2\xcf\x9b\x4f\xf0\xfd\xfe\x8b\x30\xcd\x95\x20\x96\x14\xb7",
-         AVB_AFTL_HASH_SIZE);
-  avb_aftl_root_from_icp(
-      leaf_index, tree_size, proof, 3, leaf_hash, AVB_AFTL_HASH_SIZE, hash);
-  EXPECT_EQ("5dc9da79a70659a9ad559cb701ded9a2ab9d823aad2f4960cfe370eff4604328",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on test #2";
-
-  leaf_index = 2;
-  tree_size = 3;
-  avb_aftl_rfc6962_hash_leaf((uint8_t*)"\x10", 1, leaf_hash);
-  memcpy(proof[0],
-         "\xfa\xc5\x42\x03\xe7\xcc\x69\x6c\xf0\xdf\xcb\x42\xc9\x2a\x1d\x9d"
-         "\xba\xf7\x0a\xd9\xe6\x21\xf4\xbd\x8d\x98\x66\x2f\x00\xe3\xc1\x25",
-         AVB_AFTL_HASH_SIZE);
-  avb_aftl_root_from_icp(
-      leaf_index, tree_size, proof, 1, leaf_hash, AVB_AFTL_HASH_SIZE, hash);
-  EXPECT_EQ("aeb6bcfe274b70a14fb067a5e5578264db0fa9b51af5e0ba159158f329e06e77",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on test #3";
-
-  leaf_index = 1;
-  tree_size = 5;
-  avb_aftl_rfc6962_hash_leaf((uint8_t*)"\x00", 1, leaf_hash);
-  memcpy(proof[0],
-         "\x6e\x34\x0b\x9c\xff\xb3\x7a\x98\x9c\xa5\x44\xe6\xbb\x78\x0a\x2c"
-         "\x78\x90\x1d\x3f\xb3\x37\x38\x76\x85\x11\xa3\x06\x17\xaf\xa0\x1d",
-         AVB_AFTL_HASH_SIZE);
-  memcpy(proof[1],
-         "\x5f\x08\x3f\x0a\x1a\x33\xca\x07\x6a\x95\x27\x98\x32\x58\x0d\xb3"
-         "\xe0\xef\x45\x84\xbd\xff\x1f\x54\xc8\xa3\x60\xf5\x0d\xe3\x03\x1e",
-         AVB_AFTL_HASH_SIZE);
-  memcpy(proof[2],
-         "\xbc\x1a\x06\x43\xb1\x2e\x4d\x2d\x7c\x77\x91\x8f\x44\xe0\xf4\xf7"
-         "\x9a\x83\x8b\x6c\xf9\xec\x5b\x5c\x28\x3e\x1f\x4d\x88\x59\x9e\x6b",
-         AVB_AFTL_HASH_SIZE);
-  avb_aftl_root_from_icp(
-      leaf_index, tree_size, proof, 3, leaf_hash, AVB_AFTL_HASH_SIZE, hash);
-  EXPECT_EQ("4e3bbb1f7b478dcfe71fb631631519a3bca12c9aefca1612bfce4c13a86264d4",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on test #4";
-}
-
-TEST_F(AvbAftlValidateTest, ChainInner) {
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-  uint8_t seed[AVB_AFTL_HASH_SIZE];
-  uint8_t proof[4][AVB_AFTL_HASH_SIZE];
-  uint64_t i;
-
-  for (i = 0; i < AVB_AFTL_HASH_SIZE; i++) {
-    hash[i] = 0;
-  }
-
-  memcpy(seed, "1234567890abcdefghijklmnopqrstuv", AVB_AFTL_HASH_SIZE);
-  memcpy(proof[0], "abcdefghijklmnopqrstuvwxyz123456", AVB_AFTL_HASH_SIZE);
-  avb_aftl_chain_inner(seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 1, 0, hash);
-  EXPECT_EQ("9cb6af81b146b6a81d911d26f4c0d467265a3385d6caf926d5515e58efd161a3",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\", proof ["
-      << "\"abcdefghijklmnopqrstuvwxyz123456\"], and leaf_index 0";
-  memcpy(proof[1], "7890abcdefghijklmnopqrstuvwxyz12", AVB_AFTL_HASH_SIZE);
-  avb_aftl_chain_inner(seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 2, 0, hash);
-  EXPECT_EQ("368d8213cd7d62335a84b3a3d75c8a0302c0d63c93cbbd22c5396dc4c75ba019",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\", proof ["
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\"],"
-      << " and leaf_index 0";
-  avb_aftl_chain_inner(seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 2, 1, hash);
-  EXPECT_EQ("78418158eb5943c50ec581b41f105ba9aecc1b9e7aba3ea2e93021cbd5bd166e",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\", proof ["
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\"],"
-      << " and leaf_index 1";
-  memcpy(proof[2], "abcdefghijklmn0pqrstuvwxyz123456", AVB_AFTL_HASH_SIZE);
-  memcpy(proof[3], "7890abcdefgh1jklmnopqrstuvwxyz12", AVB_AFTL_HASH_SIZE);
-  avb_aftl_chain_inner(seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 4, 1, hash);
-  EXPECT_EQ("83309c48fb92707f5788b6dd4c9a89042dff20856ad9529b7fb8e5cdf47c04f8",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\", proof ["
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\","
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\"]"
-      << " and leaf_index 1";
-  avb_aftl_chain_inner(seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 4, 3, hash);
-  EXPECT_EQ("13e5f7e441dc4dbea659acbc989ac33222f4447546e3dac36b0e0c9977d52b97",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\", proof ["
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\","
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\"]"
-      << " and leaf_index 3";
-}
-
-TEST_F(AvbAftlValidateTest, ChainBorderRight) {
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-  uint8_t seed[AVB_AFTL_HASH_SIZE];
-  uint8_t proof[2][AVB_AFTL_HASH_SIZE];
-  uint64_t i;
-
-  for (i = 0; i < AVB_AFTL_HASH_SIZE; i++) {
-    hash[i] = 0;
-  }
-
-  memcpy(seed, "1234567890abcdefghijklmnopqrstuv", AVB_AFTL_HASH_SIZE);
-  memcpy(proof[0], "abcdefghijklmnopqrstuvwxyz123456", AVB_AFTL_HASH_SIZE);
-  avb_aftl_chain_border_right(
-      seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 1, hash);
-  EXPECT_EQ("363aa8a62b784be38392ab69ade1aac2562f8989ce8986bec685d2957d657310",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\" and proof "
-         "[\"abcdefghijklmnopqrstuvwxyz123456\"]";
-  memcpy(proof[1], "7890abcdefghijklmnopqrstuvwxyz12", AVB_AFTL_HASH_SIZE);
-  avb_aftl_chain_border_right(
-      seed, AVB_AFTL_HASH_SIZE, (uint8_t*)proof, 2, hash);
-  EXPECT_EQ("618fc58c45faea808e0bbe0f82afbe7687f4db2608824120e8ade507cbce221f",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed with seed: "
-      << "\"1234567890abcdefghijklmnopqrstuv\" and proof ["
-      << "\"abcdefghijklmnopqrstuvwxyz123456\", "
-         "\"7890abcdefghijklmnopqrstuvwxyz12\"]";
-}
-
-TEST_F(AvbAftlValidateTest, RFC6962HashChildren) {
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-
-  avb_aftl_rfc6962_hash_children((uint8_t*)"", 0, (uint8_t*)"", 0, hash);
-  EXPECT_EQ("4bf5122f344554c53bde2ebb8cd2b7e3d1600ad631c385a5d7cce23c7785459a",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on inputs \"\" and \"\"";
-
-  avb_aftl_rfc6962_hash_children((uint8_t*)"abcd", 4, (uint8_t*)"", 0, hash);
-  EXPECT_EQ("b75eb7b06e69c1c49597fba37398e0f5ba319c7164ed67bb19b41e9d576313b9",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on inputs \"abcd\" and \"\"";
-
-  avb_aftl_rfc6962_hash_children((uint8_t*)"", 0, (uint8_t*)"efgh", 4, hash);
-  EXPECT_EQ("8d65f3e92e3853cee633345caca3e035f01c2e44815371985baed2c45c10ca40",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on inputs \"\" and \"efgh\"";
-
-  avb_aftl_rfc6962_hash_children(
-      (uint8_t*)"abcd", 4, (uint8_t*)"efgh", 4, hash);
-  EXPECT_EQ("41561b1297f692dad705e28ece8bf47060fba1abeeebda0aa67c43570a36bf79",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on inputs \"abcd\" and \"efgh\"";
-}
-
-TEST_F(AvbAftlValidateTest, RFC6962HashLeaf) {
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-  avb_aftl_rfc6962_hash_leaf((uint8_t*)"", 0, hash);
-  EXPECT_EQ("6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on input \"\"";
-  avb_aftl_rfc6962_hash_leaf((uint8_t*)"abcdefg", 7, hash);
-  EXPECT_EQ("6b43f785b72386e132b275bc918c25dbc687ab8427836bef6ce4509b64f4f54d",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE))
-      << "Failed on input \"abcdefg\"";
-}
-
-TEST_F(AvbAftlValidateTest, Sha256) {
-  /* Computed with:
-   *
-   * $ echo -n foobar |sha256sum
-   * c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2
-   */
-  uint8_t hash[AVB_AFTL_HASH_SIZE];
-  avb_aftl_sha256(NULL, 0, hash);
-  EXPECT_EQ("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE));
-  avb_aftl_sha256((uint8_t*)"foobar", 6, hash);
-  EXPECT_EQ("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2",
-            mem_to_hexstring(hash, AVB_AFTL_HASH_SIZE));
-}
-
-TEST_F(AvbAftlValidateTest, AvbAftlCountLeadingZeros) {
-  /* Spot checks to ensure aftl_count_leading_zeros is correct. */
-  EXPECT_EQ(52ull, avb_aftl_count_leading_zeros(4095))
-      << "Failed on input 4095";
-  EXPECT_EQ(12ull, avb_aftl_count_leading_zeros(0xfffffffffffff))
-      << "Failed on input 0xfffffffffffff";
-  EXPECT_EQ(64ull, avb_aftl_count_leading_zeros(0)) << "Failed on input 0";
-  EXPECT_EQ(0ull, avb_aftl_count_leading_zeros(0xffffffffffffffff))
-      << "Failed on input 0xffffffffffffffff";
-}
-
-} /* namespace avb */
diff --git a/test/avb_aftl_verify_unittest.cc b/test/avb_aftl_verify_unittest.cc
deleted file mode 100644
index 1e44a4a..0000000
--- a/test/avb_aftl_verify_unittest.cc
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use, copy,
- * modify, merge, publish, distribute, sublicense, and/or sell copies
- * of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
- * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
- * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-
-#include <gtest/gtest.h>
-
-#include <libavb_aftl/libavb_aftl.h>
-
-#include "avb_unittest_util.h"
-#include "fake_avb_ops.h"
-#include "libavb_aftl/avb_aftl_types.h"
-#include "libavb_aftl/avb_aftl_util.h"
-#include "libavb_aftl/avb_aftl_validate.h"
-#include "libavb_aftl/avb_aftl_verify.h"
-
-namespace {
-
-/* Log transparency key */
-const char kAftlTestKey[] = "test/data/aftl_pubkey_1.bin";
-/* Full VBMeta partition which contains an AftlImage */
-/* TODO(b/154115873): These VBMetas are manually generated. We need to implement
- * a mock in aftltool that generates an inclusion proof and call that mock from
- * the unit tests, similarly to what is done with GenerateVBMetaImage. */
-const char kVbmetaWithAftlDescBin[] =
-    "test/data/aftl_output_vbmeta_with_1_icp.img";
-/* Size of the VBMetaImage in the partition */
-const uint64_t kVbmetaSize = 0x1100;
-
-} /* namespace */
-
-namespace avb {
-
-/* Extend BaseAvbToolTest to take advantage of common checks and tooling. */
-class AvbAftlVerifyTest : public BaseAvbToolTest,
-                          public FakeAvbOpsDelegateWithDefaults {
- public:
-  AvbAftlVerifyTest() {}
-  ~AvbAftlVerifyTest() {}
-  void SetUp() override {
-    BaseAvbToolTest::SetUp();
-    ops_.set_delegate(this);
-    ops_.set_partition_dir(base::FilePath("test/data"));
-    asv_test_data_ = NULL;
-
-    /* Read in the test data. */
-    ASSERT_TRUE(base::ReadFileToString(base::FilePath(kAftlTestKey), &key_));
-    ASSERT_TRUE(base::ReadFileToString(base::FilePath(kVbmetaWithAftlDescBin),
-                                       &vbmeta_icp_));
-    /* Keep a truncated version of the image without the ICP */
-    vbmeta_ = vbmeta_icp_.substr(0, kVbmetaSize);
-
-    /* Set up required parts of asv_test_data */
-    asv_test_data_ = (AvbSlotVerifyData*)avb_calloc(sizeof(AvbSlotVerifyData));
-    ASSERT_TRUE(asv_test_data_ != NULL);
-    asv_test_data_->ab_suffix = (char*)"";
-    asv_test_data_->num_vbmeta_images = 1;
-    asv_test_data_->vbmeta_images =
-        (AvbVBMetaData*)avb_calloc(sizeof(AvbVBMetaData));
-    ASSERT_TRUE(asv_test_data_->vbmeta_images != NULL);
-    asv_test_data_->vbmeta_images[0].vbmeta_size = vbmeta_.size();
-    asv_test_data_->vbmeta_images[0].vbmeta_data =
-        (uint8_t*)avb_calloc(vbmeta_.size());
-    ASSERT_TRUE(asv_test_data_->vbmeta_images[0].vbmeta_data != NULL);
-    memcpy(asv_test_data_->vbmeta_images[0].vbmeta_data,
-           vbmeta_.data(),
-           vbmeta_.size());
-    asv_test_data_->vbmeta_images[0].partition_name =
-        (char*)"aftl_output_vbmeta_with_1_icp";
-  }
-
-  void TearDown() override {
-    if (asv_test_data_ != NULL) {
-      if (asv_test_data_->vbmeta_images != NULL) {
-        if (asv_test_data_->vbmeta_images[0].vbmeta_data != NULL) {
-          avb_free(asv_test_data_->vbmeta_images[0].vbmeta_data);
-        }
-        avb_free(asv_test_data_->vbmeta_images);
-      }
-      avb_free(asv_test_data_);
-    }
-    BaseAvbToolTest::TearDown();
-  }
-
- protected:
-  AvbSlotVerifyData* asv_test_data_;
-  std::string key_;
-  std::string vbmeta_;
-  std::string vbmeta_icp_;
-};
-
-TEST_F(AvbAftlVerifyTest, Basic) {
-  AftlSlotVerifyResult result = aftl_slot_verify(
-      ops_.avb_ops(), asv_test_data_, (uint8_t*)key_.data(), key_.size());
-  EXPECT_EQ(result, AFTL_SLOT_VERIFY_RESULT_OK);
-}
-
-TEST_F(AvbAftlVerifyTest, PartitionError) {
-  asv_test_data_->vbmeta_images[0].partition_name = (char*)"do-no-exist";
-  AftlSlotVerifyResult result = aftl_slot_verify(
-      ops_.avb_ops(), asv_test_data_, (uint8_t*)key_.data(), key_.size());
-  EXPECT_EQ(result, AFTL_SLOT_VERIFY_RESULT_ERROR_IMAGE_NOT_FOUND);
-}
-
-TEST_F(AvbAftlVerifyTest, MismatchingVBMeta) {
-  asv_test_data_->vbmeta_images[0].vbmeta_data[0] = 'X';
-  AftlSlotVerifyResult result = aftl_slot_verify(
-      ops_.avb_ops(), asv_test_data_, (uint8_t*)key_.data(), key_.size());
-  EXPECT_EQ(result, AFTL_SLOT_VERIFY_RESULT_ERROR_VBMETA_HASH_MISMATCH);
-}
-
-TEST_F(AvbAftlVerifyTest, InvalidKey) {
-  // Corrupt the key in order to fail the verification: complement the last
-  // byte, we keep the key header valid.
-  key_[key_.size() - 1] = ~key_[key_.size() - 1];
-  AftlSlotVerifyResult result = aftl_slot_verify(
-      ops_.avb_ops(), asv_test_data_, (uint8_t*)key_.data(), key_.size());
-  EXPECT_EQ(result, AFTL_SLOT_VERIFY_RESULT_ERROR_INVALID_PROOF_SIGNATURE);
-}
-
-} /* namespace avb */
diff --git a/test/avbtool_unittest.cc b/test/avbtool_unittest.cc
index 7fb89c4..b8fdc0e 100644
--- a/test/avbtool_unittest.cc
+++ b/test/avbtool_unittest.cc
@@ -1935,6 +1935,62 @@
                  path.value().c_str());
 }
 
+TEST_F(AvbToolTest, AddHashtreeFooterSingleBlock) {
+  // Tests a special case that the file size is just one block.
+  size_t file_size = 4096;
+  base::FilePath path = GenerateImage("data.bin", file_size);
+
+  // Note how there is no --partition_size or --partition_name here.
+  EXPECT_COMMAND(0,
+                 "./avbtool add_hashtree_footer --salt d00df00d "
+                 "--image %s "
+                 "--algorithm SHA256_RSA2048 "
+                 "--key test/data/testkey_rsa2048.pem "
+                 "--internal_release_string \"\" ",
+                 path.value().c_str());
+
+  ASSERT_EQ(
+      "Footer version:           1.0\n"
+      "Image size:               20480 bytes\n"
+      "Original image size:      4096 bytes\n"
+      "VBMeta offset:            12288\n"
+      "VBMeta size:              1344 bytes\n"
+      "--\n"
+      "Minimum libavb version:   1.0\n"
+      "Header Block:             256 bytes\n"
+      "Authentication Block:     320 bytes\n"
+      "Auxiliary Block:          768 bytes\n"
+      "Public key (sha1):        cdbb77177f731920bbe0a0f94f84d9038ae0617d\n"
+      "Algorithm:                SHA256_RSA2048\n"
+      "Rollback Index:           0\n"
+      "Flags:                    0\n"
+      "Rollback Index Location:  0\n"
+      "Release String:           ''\n"
+      "Descriptors:\n"
+      "    Hashtree descriptor:\n"
+      "      Version of dm-verity:  1\n"
+      "      Image Size:            4096 bytes\n"
+      "      Tree Offset:           4096\n"
+      "      Tree Size:             0 bytes\n"
+      "      Data Block Size:       4096 bytes\n"
+      "      Hash Block Size:       4096 bytes\n"
+      "      FEC num roots:         2\n"
+      "      FEC offset:            4096\n"
+      "      FEC size:              8192 bytes\n"
+      "      Hash Algorithm:        sha1\n"
+      "      Partition Name:        \n"
+      "      Salt:                  d00df00d\n"
+      "      Root Digest:           4bd1e1f0aa1c2c793bb9f3e52de6ae7393889e61\n"
+      "      Flags:                 0\n",
+      InfoImage(path));
+
+  // Check that at least avbtool can verify the image and hashtree.
+  EXPECT_COMMAND(0,
+                 "./avbtool verify_image "
+                 "--image %s ",
+                 path.value().c_str());
+}
+
 TEST_F(AvbToolTest, AddHashtreeFooterNoSizeWrongSize) {
   // Size must be a multiple of block size (4096 bytes) and this one isn't...
   size_t file_size = 70 * 1024;
@@ -1950,6 +2006,59 @@
                  path.value().c_str());
 }
 
+TEST_F(AvbToolTest, AddHashtreeFooterWithCheckAtMostOnce) {
+  size_t partition_size = 10 * 1024 * 1024;
+  base::FilePath path = GenerateImage("digest_location", partition_size / 2);
+  EXPECT_COMMAND(0,
+                 "./avbtool add_hashtree_footer --salt d00df00d "
+                 "--hash_algorithm sha256 --image %s "
+                 "--partition_size %d --partition_name foobar "
+                 "--algorithm SHA256_RSA2048 "
+                 "--key test/data/testkey_rsa2048.pem "
+                 "--internal_release_string \"\" "
+                 "--check_at_most_once",
+                 path.value().c_str(),
+                 (int)partition_size);
+  // There are two important bits here we're expecting with --check_at_most_once:
+  //   Minimum libavb version = 1.1
+  //   Hashtree descriptor -> Flags = 2
+  ASSERT_EQ(
+      "Footer version:           1.0\n"
+      "Image size:               10485760 bytes\n"
+      "Original image size:      5242880 bytes\n"
+      "VBMeta offset:            5337088\n"
+      "VBMeta size:              1344 bytes\n"
+      "--\n"
+      "Minimum libavb version:   1.1\n"
+      "Header Block:             256 bytes\n"
+      "Authentication Block:     320 bytes\n"
+      "Auxiliary Block:          768 bytes\n"
+      "Public key (sha1):        cdbb77177f731920bbe0a0f94f84d9038ae0617d\n"
+      "Algorithm:                SHA256_RSA2048\n"
+      "Rollback Index:           0\n"
+      "Flags:                    0\n"
+      "Rollback Index Location:  0\n"
+      "Release String:           ''\n"
+      "Descriptors:\n"
+      "    Hashtree descriptor:\n"
+      "      Version of dm-verity:  1\n"
+      "      Image Size:            5242880 bytes\n"
+      "      Tree Offset:           5242880\n"
+      "      Tree Size:             45056 bytes\n"
+      "      Data Block Size:       4096 bytes\n"
+      "      Hash Block Size:       4096 bytes\n"
+      "      FEC num roots:         2\n"
+      "      FEC offset:            5287936\n"
+      "      FEC size:              49152 bytes\n"
+      "      Hash Algorithm:        sha256\n"
+      "      Partition Name:        foobar\n"
+      "      Salt:                  d00df00d\n"
+      "      Root Digest:           "
+      "d0e31526f5a3f8e3f59acf726bd31ae7861ee78f9baa9195356bf479c6f9119d\n"
+      "      Flags:                 2\n",
+      InfoImage(path));
+}
+
 TEST_F(AvbToolTest, KernelCmdlineDescriptor) {
   base::FilePath vbmeta_path =
       testdir_.Append("vbmeta_kernel_cmdline_desc.bin");
@@ -3118,7 +3227,7 @@
     std::string extra_args;
     if (target_required_minor_version == 1) {
       // The --do_not_use_ab option will require 1.1.
-      extra_args = "--do_not_use_ab";
+      extra_args = "--do_not_use_ab --check_at_most_once";
     } else if (target_required_minor_version == 2) {
       extra_args = "--rollback_index_location 2";
     }
diff --git a/test/corpus/icp.bin b/test/corpus/icp.bin
deleted file mode 100644
index 19125fe..0000000
--- a/test/corpus/icp.bin
+++ /dev/null
Binary files differ
diff --git a/test/data/aftl_input_vbmeta.img b/test/data/aftl_input_vbmeta.img
deleted file mode 100644
index 4660701..0000000
--- a/test/data/aftl_input_vbmeta.img
+++ /dev/null
Binary files differ
diff --git a/test/data/aftl_output_vbmeta_with_1_icp.img b/test/data/aftl_output_vbmeta_with_1_icp.img
deleted file mode 100644
index 74b67bb..0000000
--- a/test/data/aftl_output_vbmeta_with_1_icp.img
+++ /dev/null
Binary files differ
diff --git a/test/data/aftl_output_vbmeta_with_2_icp_same_log.img b/test/data/aftl_output_vbmeta_with_2_icp_same_log.img
deleted file mode 100644
index 2a6368e..0000000
--- a/test/data/aftl_output_vbmeta_with_2_icp_same_log.img
+++ /dev/null
Binary files differ
diff --git a/test/data/aftl_pubkey_1.bin b/test/data/aftl_pubkey_1.bin
deleted file mode 100644
index 5bad4be..0000000
--- a/test/data/aftl_pubkey_1.bin
+++ /dev/null
Binary files differ
diff --git a/test/data/aftl_pubkey_1.pem b/test/data/aftl_pubkey_1.pem
deleted file mode 100644
index 8bfd816..0000000
--- a/test/data/aftl_pubkey_1.pem
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN PUBLIC KEY-----
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4ilqCNsenNA013iCdwgD
-YPxZ853nbHG9lMBp9boXiwRcqT/8bUKHIL7YX5z7s+QoRYVY3rkMKppRabclXzyx
-H59YnPMaU4uv7NqwWzjgaZo7E+vo7IF+KBjV3cJulId5Av0yIYUCsrwd7MpGtWdC
-Q3S+7Vd4zwzCKEhcvliNIhnNlp1U3wNkPCxOyCAsMEn6k8O5ar12ke5TvxDv15db
-rPDeHh8G2OYWoCkWL+lSN35L2kOJqKqVbLKWrrOd96RCYrrtbPCi580OADJRcUlG
-lgcjwmNwmypBWvQMZ6ITj0P0ksHnl1zZz1DE2rXe1goLI1doghb5KxLaezlR8c2C
-E3w/uo9KJgNmNgUVzzqZZ6FE0moyIDNOpP7KtZAL0DvEZj6jqLbB0ccPQElrg52m
-Dv2/A3nYSr0mYBKeskT4+Bg7PGgoC8p7WyLSxMyzJEDYdtrj9OFx6eZaA23oqTQx
-k3Qq5H8RfNBeeSUEeKF7pKH/7gyqZ2bNzBFMA2EBZgBozwRfaeN/HCv3qbaCnwvu
-6caacmAsK+RxiYxSL1QsJqyhCWWGxVyenmxdc1KG/u5ypi7OIioztyzR3t2tAzD3
-Nb+2t8lgHBRxbV24yiPlnvPmB1ZYEctXnlRR9Evpl1o9xA9NnybPHKr9rozN39CZ
-V/USB8K6ao1y5xPZxa8CZksCAwEAAQ==
------END PUBLIC KEY-----
-
diff --git a/test/data/find_aftl_descriptor.bin b/test/data/find_aftl_descriptor.bin
deleted file mode 100644
index 5044ab3..0000000
--- a/test/data/find_aftl_descriptor.bin
+++ /dev/null
Binary files differ
diff --git a/tools/transparency/pixel_factory_image_verify.py b/tools/transparency/pixel_factory_image_verify.py
index 8e1069d..2040ab9 100755
--- a/tools/transparency/pixel_factory_image_verify.py
+++ b/tools/transparency/pixel_factory_image_verify.py
@@ -34,10 +34,14 @@
 The list of canonical Pixel factory images can be found here:
 https://developers.google.com/android/images
 
-Supported are all factory images of Pixel 3 and later devices.
+Supported: all factory images of Pixel 6 and later devices.
 
 In order for the tool to run correct the following utilities need to be
-pre-installed: wget, unzip.
+pre-installed: grep, wget or curl, unzip.
+
+Additionally, make sure that the bootloader unpacker script is separately
+downloaded, made executable, and symlinked as 'fbpacktool', and made accessible
+via your shell $PATH.
 
 The tool also runs outside of the repository location as long as the working
 directory is writable.
@@ -45,6 +49,7 @@
 
 from __future__ import print_function
 
+import glob
 import os
 import shutil
 import subprocess
@@ -56,12 +61,17 @@
 class PixelFactoryImageVerifier(object):
   """Object for the pixel_factory_image_verify command line tool."""
 
+  ERR_TOOL_UNAVAIL_FMT_STR = 'Necessary command line tool needs to be installed first: %s'
+
   def __init__(self):
     self.working_dir = os.getcwd()
     self.script_path = os.path.realpath(__file__)
     self.script_dir = os.path.split(self.script_path)[0]
     self.avbtool_path = os.path.abspath(os.path.join(self.script_path,
                                                      '../../../avbtool'))
+    self.fw_unpacker_path = distutils.spawn.find_executable('fbpacktool')
+    self.wget_path = distutils.spawn.find_executable('wget')
+    self.curl_path = distutils.spawn.find_executable('curl')
 
   def run(self, argv):
     """Command line processor.
@@ -76,12 +86,21 @@
       sys.exit(1)
 
     # Checks if necessary commands are available.
-    for cmd in ['grep', 'unzip', 'wget']:
+    for cmd in ['grep', 'unzip']:
       if not distutils.spawn.find_executable(cmd):
-        print('Necessary command line tool needs to be installed first: %s'
-              % cmd)
+        print(PixelFactoryImageVerifier.ERR_TOOL_UNAVAIL_FMT_STR % cmd)
         sys.exit(1)
 
+    # Checks if `fbpacktool` is available.
+    if not self.fw_unpacker_path:
+      print(PixelFactoryImageVerifier.ERR_TOOL_UNAVAIL_FMT_STR % 'fbpacktool')
+      sys.exit(1)
+
+    # Checks if either `wget` or `curl` is available.
+    if not self.wget_path and not self.curl_path:
+      print(PixelFactoryImageVerifier.ERR_TOOL_UNAVAIL_FMT_STR % 'wget or curl')
+      sys.exit(1)
+
     # Downloads factory image if URL is specified; otherwise treat it as file.
     if argv[1].lower().startswith('https://'):
       factory_image_zip = self._download_factory_image(argv[1])
@@ -95,6 +114,11 @@
     if not partition_image_dir:
       sys.exit(1)
 
+    # Unpacks bootloader image into individual component images.
+    unpack_successful = self._unpack_bootloader(partition_image_dir)
+    if not unpack_successful:
+      sys.exit(1)
+
     # Validates the VBMeta of the factory image.
     verified = self._verify_vbmeta_partitions(partition_image_dir)
     if not verified:
@@ -111,6 +135,11 @@
 
     print('The build fingerprint for factory image is: %s' % fingerprint)
     print('The VBMeta Digest for factory image is: %s' % vbmeta_digest)
+
+    with open('payload.txt', 'w') as f_out:
+      f_out.write(fingerprint.strip() + '\n')
+      f_out.write(vbmeta_digest.strip() + '\n')
+    print('A corresponding "payload.txt" file has been created.')
     sys.exit(0)
 
   def _download_factory_image(self, url):
@@ -167,7 +196,12 @@
     """
     print('Fetching file from: %s' % url)
     os.chdir(download_dir)
-    args = ['wget', url]
+    args = []
+    if self.wget_path:
+      args = [self.wget_path, url]
+    else:
+      args = [self.curl_path, '-O', url]
+
     result, _ = self._run_command(args,
                                   'Successfully downloaded file.',
                                   'File download failed.')
@@ -183,6 +217,27 @@
     else:
       return None
 
+  def _unpack_bootloader(self, factory_image_folder):
+    """Unpacks the bootloader to produce individual images.
+
+    Args:
+      factory_image_folder: path to the directory containing factory images.
+
+    Returns:
+      True if unpack is successful. False if otherwise.
+    """
+    os.chdir(factory_image_folder)
+    bootloader_path = os.path.join(factory_image_folder, 'bootloader*.img')
+    glob_result = glob.glob(bootloader_path)
+    if not glob_result:
+      return False
+
+    args = [self.fw_unpacker_path, 'unpack', glob_result[0]]
+    result, _ = self._run_command(args,
+                                  'Successfully unpacked bootloader image.',
+                                  'Failed to unpack bootloader image.')
+    return result
+
   def _unpack_factory_image(self, factory_image_file):
     """Unpacks the factory image zip file.
 
@@ -259,7 +314,7 @@
       which contains a vbmeta.img patition.
 
     Returns:
-      True if the VBMeta protected parititions verify.
+      True if the VBMeta protected partitions verify.
     """
     os.chdir(image_dir)
     args = [self.avbtool_path,
@@ -281,7 +336,7 @@
     Returns:
       The build fingerprint string, e.g.
       google/blueline/blueline:9/PQ2A.190305.002/5240760:user/release-keys
-    """ 
+    """
     os.chdir(image_dir)
     args = ['grep',
             '-a',
@@ -290,7 +345,7 @@
 
     result, output = self._run_command(
         args,
-        'Successfully extracted build fingerpint.',
+        'Successfully extracted build fingerprint.',
         'Build fingerprint extraction failed.')
     os.chdir(self.working_dir)
     if result:
@@ -300,7 +355,7 @@
       return None
 
   def _calculate_vbmeta_digest(self, image_dir):
-    """Calculates the VBMeta Digest for given parititions using avbtool.
+    """Calculates the VBMeta Digest for given partitions using avbtool.
 
     Args:
       image_dir: The folder containing the unpacked factory image partitions,
@@ -325,7 +380,8 @@
   def _run_command(self, args, success_msg, fail_msg):
     """Runs command line tools."""
     p = subprocess.Popen(args, stdin=subprocess.PIPE,
-                         stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                         encoding='utf-8')
     pout, _ = p.communicate()
     if p.wait() == 0:
       print(success_msg)
diff --git a/tools/transparency/verify/README.md b/tools/transparency/verify/README.md
new file mode 100644
index 0000000..c69fb05
--- /dev/null
+++ b/tools/transparency/verify/README.md
@@ -0,0 +1,37 @@
+# Verifier of Binary Transparency for Pixel Factory Images
+
+This repository contains code to read the transparency log for [Binary Transparency for Pixel Factory Images](https://developers.google.com/android/binary_transparency/pixel). See the particular section for this tool [here](https://developers.google.com/android/binary_transparency/pixel#verifying-image-inclusion-inclusion-proof).
+
+## Files and Directories
+* `cmd/verifier/`
+  * Contains the binary to read the transparency log. It is embedded with the public key of the log to verify log identity.
+* `internal/`
+  * Internal libraries for the verifier binary.
+
+## Build
+This module requires Go 1.17. Install [here](https://go.dev/doc/install), and run `go build cmd/verifier/verifier.go`.
+
+An executable named `verifier` should be produced upon successful build.
+
+## Usage
+The verifier uses the checkpoint and the log contents (found at the [tile directory](https://developers.google.com/android/binary_transparency/tile)) to check that your image payload is in the transparency log, i.e. that it is published by Google.
+
+To run the verifier after you have built it in the previous section:
+```
+$ ./verifier --payload_path=${PAYLOAD_PATH}
+```
+
+### Input
+The verifier takes a `payload_path` as input.
+
+Each Pixel Factory image corresponds to a [payload](https://developers.google.com/android/binary_transparency/pixel#log-content) stored in the transparency log, the format of which is:
+```
+<build_fingerprint>\n<vbmeta_digest>\n
+```
+See [here](https://developers.google.com/android/binary_transparency/pixel#construct-the-payload-for-verification) for a few methods detailing how to extract this payload from an image.
+
+### Output
+The output of the command is written to stdout:
+  * `OK` if the image is included in the log, i.e. that this [claim](https://developers.google.com/android/binary_transparency/pixel#claimant-model) is true,
+  * `FAILURE` otherwise.
+
diff --git a/tools/transparency/verify/cmd/verifier/log_pub_key.pem b/tools/transparency/verify/cmd/verifier/log_pub_key.pem
new file mode 100644
index 0000000..01a2127
--- /dev/null
+++ b/tools/transparency/verify/cmd/verifier/log_pub_key.pem
@@ -0,0 +1,4 @@
+-----BEGIN PUBLIC KEY-----
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEU83uXNUiTYE53c2TfdWmqpW20bBX
+y4KEf5Ff8dV8GLKlVAXKHyjw3Lp9J3E0yCRJ/39XKeuAAMF7KzSvhD248A==
+-----END PUBLIC KEY-----
diff --git a/tools/transparency/verify/cmd/verifier/verifier.go b/tools/transparency/verify/cmd/verifier/verifier.go
new file mode 100644
index 0000000..fd02241
--- /dev/null
+++ b/tools/transparency/verify/cmd/verifier/verifier.go
@@ -0,0 +1,109 @@
+// Binary `verifier` checks the inclusion of a particular Pixel Factory Image,
+// identified by its build_fingerprint and vbmeta_digest (the payload), in the
+// Transparency Log.
+//
+// Inputs to the tool are:
+//   - the log leaf index of the image of interest, from the Pixel Binary
+//     Transparency Log, see:
+//     https://developers.google.com/android/binary_transparency/image_info.txt
+//   - the path to a file containing the payload, see this page for instructions
+//     https://developers.google.com/android/binary_transparency/pixel#construct-the-payload-for-verification.
+//   - the log's base URL, if different from the default provided.
+//
+// Outputs:
+//   - "OK" if the image is included in the log,
+//   - "FAILURE" if it isn't.
+//
+// Usage: See README.md.
+// For more details on inclusion proofs, see:
+// https://developers.google.com/android/binary_transparency/pixel#verifying-image-inclusion-inclusion-proof
+package main
+
+import (
+	"bytes"
+	"flag"
+	"log"
+	"os"
+	"path/filepath"
+
+	"android.googlesource.com/platform/external/avb.git/tools/transparency/verify/internal/checkpoint"
+	"android.googlesource.com/platform/external/avb.git/tools/transparency/verify/internal/tiles"
+	"golang.org/x/mod/sumdb/tlog"
+
+	_ "embed"
+)
+
+// Domain separation prefix for Merkle tree hashing with second preimage
+// resistance similar to that used in RFC 6962.
+const (
+	LeafHashPrefix     = 0
+	KeyNameForVerifier = "pixel6_transparency_log"
+)
+
+// See https://developers.google.com/android/binary_transparency/pixel#signature-verification.
+//go:embed log_pub_key.pem
+var logPubKey []byte
+
+var (
+	payloadPath = flag.String("payload_path", "", "Path to the payload describing the image of interest.")
+	logBaseURL  = flag.String("log_base_url", "https://developers.google.com/android/binary_transparency", "Base url for the verifiable log files.")
+)
+
+func main() {
+	flag.Parse()
+
+	if *payloadPath == "" {
+		log.Fatal("must specify the payload_path for the image payload")
+	}
+	b, err := os.ReadFile(*payloadPath)
+	if err != nil {
+		log.Fatalf("unable to open file %q: %v", *payloadPath, err)
+	}
+	// Payload should not contain excessive leading or trailing whitespace.
+	payloadBytes := bytes.TrimSpace(b)
+	payloadBytes = append(payloadBytes, '\n')
+	if string(b) != string(payloadBytes) {
+		log.Printf("Reformatted payload content from %q to %q", b, payloadBytes)
+	}
+
+
+	v, err := checkpoint.NewVerifier(logPubKey, KeyNameForVerifier)
+	if err != nil {
+		log.Fatalf("error creating verifier: %v", err)
+	}
+	root, err := checkpoint.FromURL(*logBaseURL, v)
+	if err != nil {
+		log.Fatalf("error reading checkpoint for log(%s): %v", *logBaseURL, err)
+	}
+
+	m, err := tiles.ImageInfosIndex(*logBaseURL)
+	if err != nil {
+		log.Fatalf("failed to load image info map to find log index: %v", err)
+	}
+	imageInfoIndex, ok := m[string(payloadBytes)]
+	if !ok {
+		log.Fatalf("failed to find payload %q in %s", string(payloadBytes), filepath.Join(*logBaseURL, "image_info.txt"))
+	}
+
+	var th tlog.Hash
+	copy(th[:], root.Hash)
+
+	logSize := int64(root.Size)
+	r := tiles.HashReader{URL: *logBaseURL}
+	rp, err := tlog.ProveRecord(logSize, imageInfoIndex, r)
+	if err != nil {
+		log.Fatalf("error in tlog.ProveRecord: %v", err)
+	}
+
+	leafHash, err := tiles.PayloadHash(payloadBytes)
+	if err != nil {
+		log.Fatalf("error hashing payload: %v", err)
+	}
+
+	if err := tlog.CheckRecord(rp, logSize, th, imageInfoIndex, leafHash); err != nil {
+		log.Fatalf("FAILURE: inclusion check error in tlog.CheckRecord: %v", err)
+	} else {
+		log.Print("OK. inclusion check success")
+	}
+}
+
diff --git a/tools/transparency/verify/go.mod b/tools/transparency/verify/go.mod
new file mode 100644
index 0000000..9eb49c9
--- /dev/null
+++ b/tools/transparency/verify/go.mod
@@ -0,0 +1,10 @@
+module android.googlesource.com/platform/external/avb.git/tools/transparency/verify
+
+go 1.17
+
+require (
+  github.com/google/go-cmp v0.5.7
+  golang.org/x/mod v0.5.1
+)
+
+require golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 // indirect
diff --git a/tools/transparency/verify/go.sum b/tools/transparency/verify/go.sum
new file mode 100644
index 0000000..f02f24c
--- /dev/null
+++ b/tools/transparency/verify/go.sum
@@ -0,0 +1,18 @@
+github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o=
+github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38=
+golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/tools/transparency/verify/internal/checkpoint/checkpoint.go b/tools/transparency/verify/internal/checkpoint/checkpoint.go
new file mode 100644
index 0000000..600707f
--- /dev/null
+++ b/tools/transparency/verify/internal/checkpoint/checkpoint.go
@@ -0,0 +1,175 @@
+// Package checkpoint implements methods to interact with checkpoints
+// as described below.
+//
+// Root is the internal representation of the information needed to
+// commit to the contents of the tree, and contains the root hash and size.
+//
+// When a commitment needs to be sent to other processes (such as a witness or
+// other log clients), it is put in the form of a checkpoint, which also
+// includes an "ecosystem identifier". The "ecosystem identifier" defines how
+// to parse the checkpoint data. This package deals only with the DEFAULT
+// ecosystem, which has only the information from Root and no additional data.
+// Support for other ecosystems will be added as needed.
+//
+// This checkpoint is signed in a note format (golang.org/x/mod/sumdb/note)
+// before sending out. An unsigned checkpoint is not a valid commitment and
+// must not be used.
+//
+// There is only a single signature.
+// Support for multiple signing identities will be added as needed.
+package checkpoint
+
+import (
+	"crypto/ecdsa"
+	"crypto/sha256"
+	"crypto/x509"
+	"encoding/base64"
+	"encoding/binary"
+	"encoding/pem"
+	"errors"
+	"fmt"
+	"io"
+	"net/http"
+	"net/url"
+	"path"
+	"strconv"
+	"strings"
+
+	"golang.org/x/mod/sumdb/note"
+)
+
+const (
+	// defaultEcosystemID identifies a checkpoint in the DEFAULT ecosystem.
+	defaultEcosystemID = "DEFAULT\n"
+)
+
+type verifier interface {
+	Verify(msg []byte, sig []byte) bool
+	Name() string
+	KeyHash() uint32
+}
+
+// EcdsaVerifier verifies a message signature that was signed using ECDSA.
+type EcdsaVerifier struct {
+	PubKey *ecdsa.PublicKey
+	name   string
+	hash   uint32
+}
+
+// Verify returns whether the signature of the message is valid using its
+// pubKey.
+func (v EcdsaVerifier) Verify(msg, sig []byte) bool {
+	h := sha256.Sum256(msg)
+	if !ecdsa.VerifyASN1(v.PubKey, h[:], sig) {
+		return false
+	}
+	return true
+}
+
+// KeyHash returns a 4 byte hash of the public key to be used as a hint to the
+// verifier.
+func (v EcdsaVerifier) KeyHash() uint32 {
+	return v.hash
+}
+
+// Name returns the name of the key.
+func (v EcdsaVerifier) Name() string {
+	return v.name
+}
+
+// NewVerifier expects an ECDSA public key in PEM format in a file with the provided path and key name.
+func NewVerifier(pemKey []byte, name string) (EcdsaVerifier, error) {
+	b, _ := pem.Decode(pemKey)
+	if b == nil || b.Type != "PUBLIC KEY" {
+		return EcdsaVerifier{}, fmt.Errorf("Failed to decode public key, must contain an ECDSA public key in PEM format")
+	}
+
+	key := b.Bytes
+	sum := sha256.Sum256(key)
+	keyHash := binary.BigEndian.Uint32(sum[:])
+
+	pub, err := x509.ParsePKIXPublicKey(key)
+	if err != nil {
+		return EcdsaVerifier{}, fmt.Errorf("Can't parse key: %v", err)
+	}
+	return EcdsaVerifier{
+		PubKey: pub.(*ecdsa.PublicKey),
+		hash:   keyHash,
+		name:   name,
+	}, nil
+}
+
+// Root contains the checkpoint data for a DEFAULT ecosystem checkpoint.
+type Root struct {
+	// Size is the number of entries in the log at this point.
+	Size uint64
+	// Hash commits to the contents of the entire log.
+	Hash []byte
+}
+
+func parseCheckpoint(ckpt string) (Root, error) {
+	if !strings.HasPrefix(ckpt, defaultEcosystemID) {
+		return Root{}, errors.New("invalid checkpoint - unknown ecosystem, must be DEFAULT")
+	}
+	// Strip the ecosystem ID and parse the rest of the checkpoint.
+	body := ckpt[len(defaultEcosystemID):]
+	// body must contain exactly 2 lines, size and the root hash.
+	l := strings.SplitN(body, "\n", 3)
+	if len(l) != 3 || len(l[2]) != 0 {
+		return Root{}, errors.New("invalid checkpoint - bad format: must have ecosystem id, size and root hash each followed by newline")
+	}
+	size, err := strconv.ParseUint(l[0], 10, 64)
+	if err != nil {
+		return Root{}, fmt.Errorf("invalid checkpoint - cannot read size: %w", err)
+	}
+	rh, err := base64.StdEncoding.DecodeString(l[1])
+	if err != nil {
+		return Root{}, fmt.Errorf("invalid checkpoint - invalid roothash: %w", err)
+	}
+	return Root{Size: size, Hash: rh}, nil
+}
+
+func getSignedCheckpoint(logURL string) ([]byte, error) {
+	// Sanity check the input url.
+	u, err := url.Parse(logURL)
+	if err != nil {
+		return []byte{}, fmt.Errorf("invalid URL %s: %v", u, err)
+	}
+
+	u.Path = path.Join(u.Path, "checkpoint.txt")
+
+	resp, err := http.Get(u.String())
+	if err != nil {
+		return []byte{}, fmt.Errorf("http.Get(%s): %v", u, err)
+	}
+	defer resp.Body.Close()
+	if code := resp.StatusCode; code != 200 {
+		return []byte{}, fmt.Errorf("http.Get(%s): %s", u, http.StatusText(code))
+	}
+
+	return io.ReadAll(resp.Body)
+}
+
+// FromURL verifies the signature and unpacks and returns a Root.
+//
+// Validates signature before reading data, using a provided verifier.
+// Data at `logURL` is the checkpoint and must be in the note format
+// (golang.org/x/mod/sumdb/note).
+//
+// The checkpoint must be in the DEFAULT ecosystem.
+//
+// Returns error if the signature fails to verify or if the checkpoint
+// does not conform to the following format:
+// 	[]byte("[ecosystem]\n[size]\n[hash]").
+func FromURL(logURL string, v verifier) (Root, error) {
+	b, err := getSignedCheckpoint(logURL)
+	if err != nil {
+		return Root{}, fmt.Errorf("failed to get signed checkpoint: %v", err)
+	}
+
+	n, err := note.Open(b, note.VerifierList(v))
+	if err != nil {
+		return Root{}, fmt.Errorf("failed to verify note signatures: %v", err)
+	}
+	return parseCheckpoint(n.Text)
+}
diff --git a/tools/transparency/verify/internal/checkpoint/checkpoint_test.go b/tools/transparency/verify/internal/checkpoint/checkpoint_test.go
new file mode 100644
index 0000000..1c81394
--- /dev/null
+++ b/tools/transparency/verify/internal/checkpoint/checkpoint_test.go
@@ -0,0 +1,133 @@
+package checkpoint
+
+import (
+	"crypto/ecdsa"
+	"crypto/elliptic"
+	"crypto/rand"
+	"net/http"
+	"net/http/httptest"
+	"net/url"
+	"path"
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+)
+
+// privateKeyForTest returns a ecdsa PrivateKey used in tests only.
+func privateKeyForTest(t *testing.T) *ecdsa.PrivateKey {
+	t.Helper()
+	privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
+	if err != nil {
+		t.Fatalf("GenerateKey(): %v", err)
+	}
+
+	return privateKey
+}
+
+func TestInvalidCheckpointFormat(t *testing.T) {
+	tests := []struct {
+		desc    string
+		m       string
+		wantErr bool
+	}{
+		{
+			desc:    "unknown ecosystem",
+			m:       "UNKNOWN\n1\nbananas\n",
+			wantErr: true,
+		},
+		{
+			desc:    "bad size",
+			m:       "DEFAULT\n-1\nbananas\n",
+			wantErr: true,
+		},
+		{
+			desc:    "not enough newlines",
+			m:       "DEFAULT\n1\n",
+			wantErr: true,
+		},
+		{
+			desc:    "non-numeric size",
+			m:       "DEFAULT\nbananas\ndGhlIHZpZXcgZnJvbSB0aGUgdHJlZSB0b3BzIGlzIGdyZWF0IQ==\n",
+			wantErr: true,
+		},
+		{
+			desc:    "too many newlines",
+			m:       "DEFAULT\n1\n\n\n\n",
+			wantErr: true,
+		},
+		{
+			desc:    "does not end with newline",
+			m:       "DEFAULT\n1\ngarbage",
+			wantErr: true,
+		},
+		{
+			desc:    "invalid - empty header",
+			m:       "\n9944\ndGhlIHZpZXcgZnJvbSB0aGUgdHJlZSB0b3BzIGlzIGdyZWF0IQ==\n",
+			wantErr: true,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.desc, func(t *testing.T) {
+			if _, gotErr := parseCheckpoint(tt.m); gotErr == nil {
+				t.Fatalf("fromText(%v): want error, got nil", tt.m)
+			}
+		})
+	}
+}
+
+// testServer serves a test envelope `e` at path "test/file" and 404 otherwise.
+// It is used to minimally test FromURL.
+func testServer(t *testing.T, e string) *httptest.Server {
+	t.Helper()
+	return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		if r.URL.String() == "/test/file/checkpoint.txt" {
+			w.Write([]byte(e))
+		} else {
+			w.WriteHeader(http.StatusNotFound)
+		}
+	}))
+}
+
+// TestGetSignedCheckpoint is a minimal test to check URL I/O.
+// Content specific tests are done in the other tests.
+func TestGetSignedCheckpoint(t *testing.T) {
+	serverContent := "testContent"
+	s := testServer(t, serverContent)
+	u, err := url.Parse(s.URL)
+	if err != nil {
+		t.Fatalf("invalid URL for testServer %s: %v", s.URL, err)
+	}
+	defer s.Close()
+
+	for _, tt := range []struct {
+		desc    string
+		path    string
+		want    string
+		wantErr bool
+	}{
+		{
+			desc:    "good_file",
+			path:    "test/file",
+			want:    serverContent,
+			wantErr: false,
+		},
+		{
+			desc:    "bad_path",
+			path:    "bad/path",
+			wantErr: true,
+		},
+	} {
+		t.Run(tt.desc, func(t *testing.T) {
+			u.Path = path.Join(u.Path, tt.path)
+			b, gotErr := getSignedCheckpoint(u.String())
+			got := string(b)
+			if diff := cmp.Diff(got, tt.want); diff != "" {
+				t.Errorf("bad response body: got %v, want %v", got, tt.want)
+			}
+			if gotErr != nil && !tt.wantErr {
+				t.Errorf("unexpected error: got %t, want %t", gotErr, tt.wantErr)
+			}
+		})
+	}
+}
diff --git a/tools/transparency/verify/internal/tiles/reader.go b/tools/transparency/verify/internal/tiles/reader.go
new file mode 100644
index 0000000..f998f54
--- /dev/null
+++ b/tools/transparency/verify/internal/tiles/reader.go
@@ -0,0 +1,124 @@
+// Package tiles contains methods to work with tlog based verifiable logs.
+package tiles
+
+import (
+	"crypto/sha256"
+	"errors"
+	"fmt"
+	"io"
+	"net/http"
+	"net/url"
+	"path"
+	"strconv"
+	"strings"
+
+	"golang.org/x/mod/sumdb/tlog"
+)
+
+// HashReader implements tlog.HashReader, reading from tlog-based log located at
+// URL.
+type HashReader struct {
+	URL string
+}
+
+
+// Domain separation prefix for Merkle tree hashing with second preimage
+// resistance similar to that used in RFC 6962.
+const (
+	leafHashPrefix = 0
+)
+
+// ReadHashes implements tlog.HashReader's ReadHashes.
+// See: https://pkg.go.dev/golang.org/x/mod/sumdb/tlog#HashReader.
+func (h HashReader) ReadHashes(indices []int64) ([]tlog.Hash, error) {
+	tiles := make(map[string][]byte)
+	hashes := make([]tlog.Hash, 0, len(indices))
+	for _, index := range indices {
+		// The PixelBT log is tiled at height = 1.
+		tile := tlog.TileForIndex(1, index)
+
+		var content []byte
+		var exists bool
+		var err error
+		content, exists = tiles[tile.Path()]
+		if !exists {
+			content, err = readFromURL(h.URL, tile.Path())
+			if err != nil {
+				return nil, fmt.Errorf("failed to read from %s: %v", tile.Path(), err)
+			}
+			tiles[tile.Path()] = content
+		}
+
+		hash, err := tlog.HashFromTile(tile, content, index)
+		if err != nil {
+			return nil, fmt.Errorf("failed to read data from tile for index %d: %v", index, err)
+		}
+		hashes = append(hashes, hash)
+	}
+	return hashes, nil
+}
+
+// ImageInfosIndex returns a map from payload to its index in the
+// transparency log according to the image_info.txt.
+func ImageInfosIndex(logBaseURL string) (map[string]int64, error) {
+	b, err := readFromURL(logBaseURL, "image_info.txt")
+	if err != nil {
+		return nil, err
+	}
+
+	imageInfos := string(b)
+	return parseImageInfosIndex(imageInfos)
+}
+
+func parseImageInfosIndex(imageInfos string) (map[string]int64, error) {
+	m := make(map[string]int64)
+
+	infosStr := strings.Split(imageInfos, "\n\n")
+	for _, infoStr := range infosStr {
+		pieces := strings.SplitN(infoStr, "\n", 2)
+		if len(pieces) != 2 {
+			return nil, errors.New("missing newline, malformed image_info.txt")
+		}
+
+		idx, err := strconv.ParseInt(pieces[0], 10, 64)
+		if err != nil {
+			return nil, fmt.Errorf("failed to convert %q to int64", pieces[0])
+		}
+
+		// Ensure that each log entry does not have extraneous whitespace, but
+		// also terminates with a newline.
+		logEntry := strings.TrimSpace(pieces[1]) + "\n"
+		m[logEntry] = idx
+	}
+
+	return m, nil
+}
+
+func readFromURL(base, suffix string) ([]byte, error) {
+	u, err := url.Parse(base)
+	if err != nil {
+		return nil, fmt.Errorf("invalid URL %s: %v", base, err)
+	}
+	u.Path = path.Join(u.Path, suffix)
+
+	resp, err := http.Get(u.String())
+	if err != nil {
+		return nil, fmt.Errorf("http.Get(%s): %v", u.String(), err)
+	}
+	defer resp.Body.Close()
+	if code := resp.StatusCode; code != 200 {
+		return nil, fmt.Errorf("http.Get(%s): %s", u.String(), http.StatusText(code))
+	}
+
+	return io.ReadAll(resp.Body)
+}
+
+// PayloadHash returns the hash of the payload.
+func PayloadHash(p []byte) (tlog.Hash, error) {
+	l := append([]byte{leafHashPrefix}, p...)
+	h := sha256.Sum256(l)
+
+	var hash tlog.Hash
+	copy(hash[:], h[:])
+	return hash, nil
+}
diff --git a/tools/transparency/verify/internal/tiles/reader_test.go b/tools/transparency/verify/internal/tiles/reader_test.go
new file mode 100644
index 0000000..47e26c3
--- /dev/null
+++ b/tools/transparency/verify/internal/tiles/reader_test.go
@@ -0,0 +1,182 @@
+package tiles
+
+import (
+	"bytes"
+	"context"
+	"encoding/hex"
+	"io"
+	"log"
+	"net/http"
+	"net/http/httptest"
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+	"golang.org/x/mod/sumdb/tlog"
+)
+
+const (
+	tileHeight = 1
+)
+
+// mustHexDecode decodes its input string from hex and panics if this fails.
+func mustHexDecode(b string) []byte {
+	r, err := hex.DecodeString(b)
+	if err != nil {
+		log.Fatalf("unable to decode string %v", err)
+	}
+	return r
+}
+
+// nodeHashes is a structured slice of node hashes for all complete subtrees of a Merkle tree built from test data using the RFC 6962 hashing strategy. The first index in the slice is the tree level (zero being the leaves level), the second is the horizontal index within a level.
+var nodeHashes = [][][]byte{{
+	mustHexDecode("6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d"),
+	mustHexDecode("96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7"),
+	mustHexDecode("0298d122906dcfc10892cb53a73992fc5b9f493ea4c9badb27b791b4127a7fe7"),
+	mustHexDecode("07506a85fd9dd2f120eb694f86011e5bb4662e5c415a62917033d4a9624487e7"),
+	mustHexDecode("bc1a0643b12e4d2d7c77918f44e0f4f79a838b6cf9ec5b5c283e1f4d88599e6b"),
+	mustHexDecode("4271a26be0d8a84f0bd54c8c302e7cb3a3b5d1fa6780a40bcce2873477dab658"),
+	mustHexDecode("b08693ec2e721597130641e8211e7eedccb4c26413963eee6c1e2ed16ffb1a5f"),
+	mustHexDecode("46f6ffadd3d06a09ff3c5860d2755c8b9819db7df44251788c7d8e3180de8eb1"),
+}, {
+	mustHexDecode("fac54203e7cc696cf0dfcb42c92a1d9dbaf70ad9e621f4bd8d98662f00e3c125"),
+	mustHexDecode("5f083f0a1a33ca076a95279832580db3e0ef4584bdff1f54c8a360f50de3031e"),
+	mustHexDecode("0ebc5d3437fbe2db158b9f126a1d118e308181031d0a949f8dededebc558ef6a"),
+	mustHexDecode("ca854ea128ed050b41b35ffc1b87b8eb2bde461e9e3b5596ece6b9d5975a0ae0"),
+}, {
+	mustHexDecode("d37ee418976dd95753c1c73862b9398fa2a2cf9b4ff0fdfe8b30cd95209614b7"),
+	mustHexDecode("6b47aaf29ee3c2af9af889bc1fb9254dabd31177f16232dd6aab035ca39bf6e4"),
+}, {
+	mustHexDecode("5dc9da79a70659a9ad559cb701ded9a2ab9d823aad2f4960cfe370eff4604328"),
+}}
+
+// testServer serves a tile based log of height 1, using the test data in
+// nodeHashes.
+func testServer(ctx context.Context, t *testing.T) *httptest.Server {
+	t.Helper()
+	return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+
+		// Parse the tile data out of r.URL.
+		// Strip the leading `/` to get a valid tile path.
+		tile, err := tlog.ParseTilePath(r.URL.String()[1:])
+		if err != nil {
+			t.Fatalf("ParseTilePath(%s): %v", r.URL.String(), err)
+		}
+		// Fill the response with the test nodeHashes ...
+		io.Copy(w, bytes.NewReader(nodeHashes[tile.L][2*tile.N]))
+		if tile.W == 2 {
+			// ... with special handling when the width is 2
+			io.Copy(w, bytes.NewReader(nodeHashes[tile.L][2*tile.N+1]))
+		}
+	}))
+}
+
+func TestReadHashesWithReadTileData(t *testing.T) {
+	ctx := context.Background()
+	s := testServer(ctx, t)
+	defer s.Close()
+
+	for _, tc := range []struct {
+		desc string
+		size uint64
+		want [][]byte
+	}{
+		{desc: "empty-0", size: 0},
+		{
+			desc: "size-3",
+			size: 3,
+			want: [][]byte{
+				nodeHashes[0][0],
+				append(nodeHashes[0][0], nodeHashes[0][1]...),
+				nodeHashes[1][0],
+				nodeHashes[0][2],
+			},
+		},
+	} {
+		t.Run(tc.desc, func(t *testing.T) {
+			r := HashReader{URL: s.URL}
+
+			// Read hashes.
+			for i, want := range tc.want {
+				tile := tlog.TileForIndex(tileHeight, int64(i))
+				got, err := tlog.ReadTileData(tile, r)
+				if err != nil {
+					t.Fatalf("ReadTileData: %v", err)
+				}
+				if !cmp.Equal(got, want) {
+					t.Errorf("tile %+v: got %X, want %X", tile, got, want)
+				}
+			}
+		})
+	}
+}
+
+func TestReadHashesCachedTile(t *testing.T) {
+	ctx := context.Background()
+	s := testServer(ctx, t)
+	defer s.Close()
+
+	wantHash := nodeHashes[0][0]
+	r := HashReader{URL: s.URL}
+
+	// Read hash at index 0 twice, to exercise the caching of tiles.
+	// On the first pass, the read is fresh and readFromURL is called.
+	// On the second pass, the tile is cached, so we skip readFromURL.
+	// We don't explicitly check that readFromURL is only called once,
+	// but we do check ReadHashes returns the correct values.
+	indices := []int64{0, 0}
+	hashes, err := r.ReadHashes(indices)
+	if err != nil {
+		t.Fatalf("ReadHashes: %v", err)
+	}
+
+	got := make([][]byte, 0, len(indices))
+	for _, hash := range hashes {
+		got = append(got, hash[:])
+	}
+
+	if !bytes.Equal(got[0], got[1]) {
+		t.Errorf("expected the same hash: got %X, want %X", got[0], got[1])
+	}
+	if !bytes.Equal(got[0], wantHash) {
+		t.Errorf("wrong ReadHashes result: got %X, want %X", got[0], wantHash)
+	}
+}
+
+func TestParseImageInfosIndex(t *testing.T) {
+	for _, tc := range []struct {
+		desc       string
+		imageInfos string
+		want       map[string]int64
+		wantErr    bool
+	}{
+		{
+			desc:       "size 2",
+			imageInfos: "0\nbuild_fingerprint0\nimage_digest0\n\n1\nbuild_fingerprint1\nimage_digest1\n",
+			wantErr:    false,
+			want: map[string]int64{
+				"build_fingerprint0\nimage_digest0\n": 0,
+				"build_fingerprint1\nimage_digest1\n": 1,
+			},
+		},
+		{
+			desc:       "invalid log entry (no newlines)",
+			imageInfos: "0build_fingerprintimage_digest",
+			wantErr:    true,
+		},
+	} {
+		t.Run(tc.desc, func(t *testing.T) {
+			got, err := parseImageInfosIndex(tc.imageInfos)
+			if err != nil && !tc.wantErr {
+				t.Fatalf("parseImageInfosIndex(%s) received unexpected err %q", tc.imageInfos, err)
+			}
+
+			if err == nil && tc.wantErr {
+				t.Fatalf("parseImageInfosIndex(%s) did not return err, expected err", tc.imageInfos)
+			}
+
+			if diff := cmp.Diff(tc.want, got); diff != "" {
+				t.Errorf("parseImageInfosIndex returned unexpected diff (-want +got):\n%s", diff)
+			}
+		})
+	}
+}