Snap for 6435660 from a12ac7533b9e1429a42f011d7e1e647e84992bb4 to sdk-release

Change-Id: I422fd491113e8cb8936a8dcb8b38179e87112fa9
diff --git a/Android.bp b/Android.bp
index 9bae50e..aa4897a 100644
--- a/Android.bp
+++ b/Android.bp
@@ -76,55 +76,37 @@
 
 python_binary_host {
     name: "avbtool",
-    srcs: [":avbtool_py"],
+    srcs: ["avbtool.py"],
     main: "avbtool.py",
     required: ["fec"],
     version: {
         py2: {
-            enabled: true,
-            embedded_launcher: true,
-        },
-        py3: {
             enabled: false,
         },
-    },
-}
-
-python_test_host {
-    name: "avbtool_test",
-    main: "avbtool_test.py",
-    srcs: [
-        ":avbtool_py",
-        "avbtool_test.py",
-    ],
-    data: [
-        "test/data/**/*.*",
-    ],
-    test_suites: ["general-tests"],
-    version: {
-        py2: {
-            enabled: true,
-            // This is needs to be false due to b/146057182#comment5.
-            embedded_launcher: false,
-        },
         py3: {
-            enabled: false,
+            enabled: true,
         },
     },
 }
 
-genrule {
-  name: "aftltool_py",
-  srcs: ["aftltool",],
-  out: ["aftltool.py"],
-  cmd: "cp $(in) $(out)",
-}
-
 python_library_host {
     name: "aftl_proto",
     srcs: [
-        "proto/**/*.py",
+        "proto/api.proto",
+        "proto/aftl.proto",
+        "proto/crypto/sigpb/sigpb.proto",
+        "proto/crypto/keyspb/keyspb.proto",
+        "proto/trillian.proto",
     ],
+    proto: {
+        include_dirs: [
+            "external/protobuf/src",
+        ],
+        local_include_dirs: [
+            "proto",
+        ],
+        canonical_path_from_root: false,
+    },
     version: {
         py2: {
             enabled: false,
@@ -138,14 +120,13 @@
 python_binary_host {
     name: "aftltool",
     srcs: [
-        ":aftltool_py",
-        ":avbtool_py",
+        "aftltool.py",
+        "avbtool.py",
     ],
     libs: [
         "aftl_proto",
     ],
     main: "aftltool.py",
-    required: ["fec"],
     version: {
         py2: {
             enabled: false,
@@ -160,9 +141,9 @@
     name: "aftltool_test",
     main: "aftltool_test.py",
     srcs: [
-        ":aftltool_py",
-        ":avbtool_py",
+        "aftltool.py",
         "aftltool_test.py",
+        "avbtool.py",
     ],
     libs: [
         "aftl_proto",
@@ -181,13 +162,6 @@
     },
 }
 
-genrule {
-  name: "avbtool_py",
-  srcs: ["avbtool",],
-  out: ["avbtool.py"],
-  cmd: "cp $(in) $(out)",
-}
-
 // Build libavb - this is a static library that depends
 // on only libc and doesn't drag in any other dependencies.
 cc_library_static {
diff --git a/TEST_MAPPING b/TEST_MAPPING
index 3a62b6c..714bf6e 100644
--- a/TEST_MAPPING
+++ b/TEST_MAPPING
@@ -1,16 +1,14 @@
 {
-  "postsubmit": [
-    {
-      "name": "avbtool_test",
-      "host": true
-    },
-    {
-      "name": "libavb_host_unittest",
-      "host": true
-    },
+  "presubmit": [
     {
       "name": "aftltool_test",
       "host": true
     }
+  ],
+  "postsubmit": [
+    {
+      "name": "libavb_host_unittest",
+      "host": true
+    }
   ]
 }
diff --git a/aftltool b/aftltool
new file mode 120000
index 0000000..11a8ff9
--- /dev/null
+++ b/aftltool
@@ -0,0 +1 @@
+aftltool.py
\ No newline at end of file
diff --git a/aftltool.py b/aftltool.py
deleted file mode 120000
index 39f589b..0000000
--- a/aftltool.py
+++ /dev/null
@@ -1 +0,0 @@
-aftltool
\ No newline at end of file
diff --git a/aftltool b/aftltool.py
similarity index 97%
rename from aftltool
rename to aftltool.py
index ea9db8e..0fe2d07 100755
--- a/aftltool
+++ b/aftltool.py
@@ -26,7 +26,6 @@
 
 import argparse
 import base64
-import binascii
 import hashlib
 import json
 import multiprocessing
@@ -49,7 +48,7 @@
 import avbtool
 import aftl_pb2
 import api_pb2
-from crypto import sigpb
+from crypto.sigpb import sigpb_pb2
 # pylint: enable=wrong-import-position,import-error
 
 
@@ -625,7 +624,7 @@
     for i, proof_hash in enumerate(self.proofs):
       if i != 0:
         o.write(' ' * 29)
-      o.write('{}\n'.format(binascii.hexlify(proof_hash)))
+      o.write('{}\n'.format(proof_hash.hex()))
     self.log_root_descriptor.print_desc(o)
     self.fw_info_leaf.print_desc(o)
 
@@ -786,13 +785,12 @@
     o.write(fmt.format(i, 'Tree size:', self.tree_size))
     o.write(fmt.format(i, 'Root hash size:', self.root_hash_size))
     if self.root_hash_size > 0:
-      o.write(fmt.format(i, 'Root hash:', binascii.hexlify(self.root_hash)))
+      o.write(fmt.format(i, 'Root hash:', self.root_hash.hex()))
       o.write(fmt.format(i, 'Timestamp (ns):', self.timestamp))
     o.write(fmt.format(i, 'Revision:', self.revision))
     o.write(fmt.format(i, 'Metadata size:', self.metadata_size))
     if self.metadata_size > 0:
-      o.write(fmt.format(i, 'Metadata:',
-                         binascii.hexlify(self.metadata).decode('ascii')))
+      o.write(fmt.format(i, 'Metadata:', self.metadata.hex()))
 
 
 class FirmwareInfoLeaf(object):
@@ -923,14 +921,14 @@
     i = ' ' * 6
     fmt = '{}{:23}{}\n'
     if self.vbmeta_hash:
-      o.write(fmt.format(i, 'VBMeta hash:', binascii.hexlify(self.vbmeta_hash)))
+      o.write(fmt.format(i, 'VBMeta hash:', self.vbmeta_hash.hex()))
     if self.version_incremental:
       o.write(fmt.format(i, 'Version incremental:', self.version_incremental))
     if self.platform_key:
       o.write(fmt.format(i, 'Platform key:', self.platform_key))
     if self.manufacturer_key_hash:
       o.write(fmt.format(i, 'Manufacturer key hash:',
-                         binascii.hexlify(self.manufacturer_key_hash)))
+                         self.manufacturer_key_hash.hex()))
     if self.description:
       o.write(fmt.format(i, 'Description:', self.description))
 
@@ -1356,7 +1354,7 @@
     except avbtool.AvbError as e:
       raise AftlError('Failed to sign FirmwareInfo with '
                       '--manufacturer_key: {}'.format(e))
-    fw_info_sig = sigpb.sigpb_pb2.DigitallySigned(
+    fw_info_sig = sigpb_pb2.DigitallySigned(
         hash_algorithm='SHA256',
         signature_algorithm='RSA',
         signature=signed_fw_info)
@@ -1493,8 +1491,9 @@
       process_number: The number of the processes executing the function.
       submission_count: Number of total submissions to perform per
         process_count.
-      preserve_icp_images: Boolean to indicate if the generated vbmeta
-        image files with inclusion proofs should preserved.
+      preserve_icp_images: Boolean to indicate if the generated vbmeta image
+        files with inclusion proofs should be preserved in the temporary
+        directory.
       timeout: Duration in seconds before requests to the AFTL times out. A
         value of 0 or None means there will be no timeout.
       result_queue: Multiprocessing.Queue object for posting execution results.
@@ -1502,7 +1501,8 @@
     for count in range(0, submission_count):
       version_incremental = 'aftl_load_testing_{}_{}'.format(process_number,
                                                              count)
-      output_file = '{}_icp.img'.format(version_incremental)
+      output_file = os.path.join(tempfile.gettempdir(),
+                                 '{}_icp.img'.format(version_incremental))
       output = open(output_file, 'wb')
 
       # Instrumented section.
@@ -1545,6 +1545,7 @@
       submission_count: Number of total submissions to perform per
         process_count.
       stats_filename: Path to the stats file to write the raw execution data to.
+        If None, it will be written to the temp directory.
       preserve_icp_images: Boolean to indicate if the generated vbmeta
         image files with inclusion proofs should preserved.
       timeout: Duration in seconds before requests to the AFTL times out. A
@@ -1559,10 +1560,13 @@
       return False
 
     if not stats_filename:
-      stats_filename = 'load_test_p{}_s{}.csv'.format(process_count,
-                                                      submission_count)
+      stats_filename = os.path.join(
+          tempfile.gettempdir(),
+          'load_test_p{}_s{}.csv'.format(process_count, submission_count))
+
+    stats_file = None
     try:
-      stats_file = open(stats_filename, 'w')
+      stats_file = open(stats_filename, 'wt')
       stats_file.write('start_time,end_time,execution_time,version_incremental,'
                        'result\n')
     except IOError as e:
@@ -1663,20 +1667,22 @@
       The TransparencyLogConfig instance.
 
     Raises:
-      AftlError: If the format of arg is invalid.
+      argparse.ArgumentTypeError: If the format of arg is invalid.
     """
     api_key = None
     try:
       target, pub_key, *rest = arg.split(",", maxsplit=2)
-    except ValueError as e:
-      raise AftlError("Incorrect format for transparency log config: "
-                      "{}.".format(e))
+    except ValueError:
+      raise argparse.ArgumentTypeError("incorrect format for transparency log "
+                                       "server, expected "
+                                       "host:port,publickey_file.")
     if not target:
-      raise AftlError("Incorrect format for transparency log config: "
-                      "host:port cannot be empty.")
+      raise argparse.ArgumentTypeError("incorrect format for transparency log "
+                                       "server: host:port cannot be empty.")
     if not pub_key:
-      raise AftlError("Incorrect format for transparency log config: "
-                      "publickey_file cannot be empty.")
+      raise argparse.ArgumentTypeError("incorrect format for transparency log "
+                                       "server: publickey_file cannot be "
+                                       "empty.")
     if rest:
       api_key = rest[0]
     return TransparencyLogConfig(target, pub_key, api_key)
@@ -1873,7 +1879,7 @@
       print('aftltool: error: too few arguments')
       sys.exit(2)
     except AftlError as e:
-      # Signals to calling tools that an unhandled exeception occured.
+      # Signals to calling tools that an unhandled exception occured.
       sys.stderr.write('Unhandled AftlError occured: {}\n'.format(e))
       sys.exit(2)
 
diff --git a/aftltool_test.py b/aftltool_test.py
index a672f6e..006deb9 100755
--- a/aftltool_test.py
+++ b/aftltool_test.py
@@ -24,11 +24,14 @@
 #
 """Unit tests for aftltool."""
 
+import argparse
 import base64
 import binascii
 import io
 import os
+import struct
 import sys
+import tempfile
 import unittest
 
 import aftltool
@@ -162,7 +165,7 @@
     self.test_entry_2.log_root_signature = b'd' * 512
     self.test_entry_2.proofs = self.test_proof_hashes_2
 
-    self.test_entry_2_bytes = bytearray(
+    self.test_entry_2_bytes = (
         b'\x00\x00\x00\x1a'                   # Transparency log url size.
         b'\x00\x00\x00\x00\x00\x00\x00\x02'   # Leaf index.
         b'\x00\x00\x00\x3f'                   # Log root descriptor size.
@@ -182,11 +185,11 @@
     self.test_aftl_desc.add_icp_entry(self.test_entry_2)
 
     self.test_expected_aftl_image_bytes = (
-        b'AFTL'                               # Magic.
-        b'\x00\x00\x00\x01'                   # Major version.
-        b'\x00\x00\x00\x01'                   # Minor version.
-        b'\x00\x00\x05\xb9'                   # Image size.
-        b'\x00\x02'                           # Number of ICP entries.
+        b'AFTL'                                         # Magic.
+        + struct.pack('!L', avbtool.AVB_VERSION_MAJOR)  # Major version.
+        + struct.pack('!L', avbtool.AVB_VERSION_MINOR)  # Minor version.
+        + b'\x00\x00\x05\xb9'                           # Image size.
+        b'\x00\x02'                                     # Number of ICP entries.
         + self.test_entry_1_bytes
         + self.test_entry_2_bytes)
 
@@ -508,7 +511,7 @@
 
     # Force invalid ICP header.
     old_magic = d.image_header.magic
-    d.image_header.magic = 'YOLO'
+    d.image_header.magic = b'YOLO'
     self.assertFalse(d.is_valid())
     d.image_header.magic = old_magic
     self.assertTrue(d.is_valid())
@@ -551,9 +554,12 @@
     self.test_header_invalid = aftltool.AftlImageHeader()
     self.test_header_invalid.icp_count = -34
 
-    self.test_header_bytes = (b'\x41\x46\x54\x4c\x00\x00\x00\x01'
-                              b'\x00\x00\x00\x01\x00\x00\x00\x12'
-                              b'\x00\x01')
+    self.test_header_bytes = (
+        b'AFTL'                                         # Magic.
+        + struct.pack('!L', avbtool.AVB_VERSION_MAJOR)  # Major version.
+        + struct.pack('!L', avbtool.AVB_VERSION_MINOR)  # Minor version.
+        + b'\x00\x00\x00\x12'                           # Image size.
+        b'\x00\x01')                                    # Number of ICP entries.
 
   def test__init__(self):
     """Tests constructor."""
@@ -572,8 +578,10 @@
     # Calls constructor with data.
     header = aftltool.AftlImageHeader(self.test_header_bytes)
     self.assertEqual(header.magic, b'AFTL')
-    self.assertEqual(header.required_icp_version_major, 1)
-    self.assertEqual(header.required_icp_version_minor, 1)
+    self.assertEqual(header.required_icp_version_major,
+                     avbtool.AVB_VERSION_MAJOR)
+    self.assertEqual(header.required_icp_version_minor,
+                     avbtool.AVB_VERSION_MINOR)
     self.assertEqual(header.aftl_image_size, aftltool.AftlImageHeader.SIZE)
     self.assertTrue(header.icp_count, 1)
     self.assertTrue(header.is_valid())
@@ -721,25 +729,23 @@
 
   def test_verify_icp(self):
     """Tests verify_icp method."""
-    key_file = 'transparency_log_pub_key.pem'
-    with open(key_file, 'w') as f:
-      f.write(self.test_aftl_pub_key)
+    with tempfile.NamedTemporaryFile('wt+') as key_file:
+      key_file.write(self.test_aftl_pub_key)
+      key_file.flush()
 
-    # Valid ICP.
-    entry = aftltool.AftlIcpEntry()
-    entry.translate_response(self.test_tl_url_1, self.test_afi_resp)
-    self.assertTrue(entry.verify_icp(key_file))
+      # Valid ICP.
+      entry = aftltool.AftlIcpEntry()
+      entry.translate_response(self.test_tl_url_1, self.test_afi_resp)
+      self.assertTrue(entry.verify_icp(key_file.name))
 
-    # Invalid ICP where fw_info_leaf is not matching up with proofs.
-    # pylint: disable=protected-access
-    entry = aftltool.AftlIcpEntry()
-    entry.translate_response(self.test_tl_url_1, self.test_afi_resp)
-    fw_info_leaf_bytes = entry.fw_info_leaf._fw_info_leaf_bytes.replace(
-        b'ViNzEQS', b'1234567')
-    entry.fw_info_leaf._fw_info_leaf_bytes = fw_info_leaf_bytes
-    self.assertFalse(entry.verify_icp(key_file))
-
-    os.remove(key_file)
+      # Invalid ICP where fw_info_leaf is not matching up with proofs.
+      # pylint: disable=protected-access
+      entry = aftltool.AftlIcpEntry()
+      entry.translate_response(self.test_tl_url_1, self.test_afi_resp)
+      fw_info_leaf_bytes = entry.fw_info_leaf._fw_info_leaf_bytes.replace(
+          b'ViNzEQS', b'1234567')
+      entry.fw_info_leaf._fw_info_leaf_bytes = fw_info_leaf_bytes
+      self.assertFalse(entry.verify_icp(key_file.name))
 
   def test_verify_vbmeta_image(self):
     """Tests the verify_vbmeta_image method."""
@@ -842,11 +848,11 @@
     self.assertEqual(d.version, 1)
     self.assertEqual(d.tree_size, 0)
     self.assertEqual(d.root_hash_size, 0)
-    self.assertEqual(d.root_hash, bytearray())
+    self.assertEqual(d.root_hash, b'')
     self.assertEqual(d.timestamp, 0)
     self.assertEqual(d.revision, 0)
     self.assertEqual(d.metadata_size, 0)
-    self.assertEqual(d.metadata, bytearray())
+    self.assertEqual(d.metadata, b'')
 
     # Calls constructor with log_root w/o metadata
     d = aftltool.TrillianLogRootDescriptor(self.test_log_root_bytes_wo_metadata)
@@ -860,7 +866,7 @@
     self.assertEqual(d.timestamp, 1576762888554271289)
     self.assertEqual(d.revision, 740)
     self.assertEqual(d.metadata_size, 0)
-    self.assertEqual(d.metadata, bytearray())
+    self.assertEqual(d.metadata, b'')
 
     # Calls constructor with log_root with metadata
     d = aftltool.TrillianLogRootDescriptor(
@@ -933,9 +939,9 @@
     # Invalid/valid root_hash_size / root_hash combination.
     d = aftltool.TrillianLogRootDescriptor()
     d.root_hash_size = 4
-    d.root_hash = '123'
+    d.root_hash = b'123'
     self.assertFalse(d.is_valid())
-    d.root_hash = '1234'
+    d.root_hash = b'1234'
     self.assertTrue(d.is_valid())
 
     # Invalid timestamp.
@@ -958,9 +964,9 @@
     # Invalid/valid metadata_size / metadata combination.
     d = aftltool.TrillianLogRootDescriptor()
     d.metadata_size = 4
-    d.metadata = '123'
+    d.metadata = b'123'
     self.assertFalse(d.is_valid())
-    d.metadata = '1234'
+    d.metadata = b'1234'
     self.assertTrue(d.is_valid())
 
   def test_print_desc(self):
@@ -1149,7 +1155,6 @@
     self.manufacturer_key = None
     self.set_up_environment()
 
-    self.output_filename = 'vbmeta_icp.img'
     self.transparency_log_config = aftltool.TransparencyLogConfig(
         self.aftl_host, self.aftl_pubkey, self.aftl_apikey)
 
@@ -1166,12 +1171,12 @@
     }
 
     self.info_icp_default_params = {
-        'vbmeta_image_path': self.output_filename,
+        'vbmeta_image_path': None,
         'output': io.StringIO()
     }
 
     self.verify_icp_default_params = {
-        'vbmeta_image_path': self.output_filename,
+        'vbmeta_image_path': None,
         'transparency_log_pub_keys': [self.aftl_pubkey],
         'output': io.StringIO()
     }
@@ -1188,24 +1193,6 @@
         'timeout': None
     }
 
-    self.load_test_stats_file_p1_s1 = 'load_test_p1_s1.csv'
-    self.load_test_stats_file_p2_p2 = 'load_test_p2_s2.csv'
-
-    self.files_to_cleanup = [
-        self.output_filename,
-        self.load_test_stats_file_p1_s1,
-        self.load_test_stats_file_p2_p2
-    ]
-
-  def tearDown(self):
-    """Tears down the test bed for the unit tests."""
-    for filename in self.files_to_cleanup:
-      try:
-        os.remove(filename)
-      except OSError:
-        pass
-    super(AftlTestCase, self).tearDown()
-
   def set_up_environment(self):
     """Sets up member variables for the particular test environment.
 
@@ -1348,22 +1335,25 @@
     aftl = self.get_aftl_implementation(self.test_afi_resp)
 
     # Make a VBmeta image with ICP.
-    with open(self.output_filename, 'wb') as output_file:
+    with tempfile.NamedTemporaryFile('wb+') as output_file:
       self.make_icp_default_params['output'] = output_file
       result = aftl.make_icp_from_vbmeta(**self.make_icp_default_params)
-    self.assertTrue(result)
+      output_file.flush()
+      self.assertTrue(result)
 
-    # Checks that there is 1 ICP.
-    aftl_image = aftl.get_aftl_image(self.output_filename)
-    self.assertEqual(aftl_image.image_header.icp_count, 1)
+      # Checks that there is 1 ICP.
+      aftl_image = aftl.get_aftl_image(output_file.name)
+      self.assertEqual(aftl_image.image_header.icp_count, 1)
 
-    # Verifies the generated image.
-    result = aftl.verify_image_icp(**self.verify_icp_default_params)
-    self.assertTrue(result)
+      # Verifies the generated image.
+      self.verify_icp_default_params['vbmeta_image_path'] = output_file.name
+      result = aftl.verify_image_icp(**self.verify_icp_default_params)
+      self.assertTrue(result)
 
-    # Prints the image details.
-    result = aftl.info_image_icp(**self.info_icp_default_params)
-    self.assertTrue(result)
+      # Prints the image details.
+      self.info_icp_default_params['vbmeta_image_path'] = output_file.name
+      result = aftl.info_image_icp(**self.info_icp_default_params)
+      self.assertTrue(result)
 
   def test_make_and_verify_icp_with_2_logs(self):
     """Tests make_icp_from_vbmeta, verify_image_icp & info_image_icp."""
@@ -1374,23 +1364,26 @@
         self.transparency_log_config, self.transparency_log_config]
 
     # Make a VBmeta image with ICP.
-    with open(self.output_filename, 'wb') as output_file:
+    with tempfile.NamedTemporaryFile('wb+') as output_file:
       self.make_icp_default_params['output'] = output_file
       result = aftl.make_icp_from_vbmeta(
           **self.make_icp_default_params)
+      output_file.flush()
       self.assertTrue(result)
 
-    # Checks that there are 2 ICPs.
-    aftl_image = aftl.get_aftl_image(self.output_filename)
-    self.assertEqual(aftl_image.image_header.icp_count, 2)
+      # Checks that there are 2 ICPs.
+      aftl_image = aftl.get_aftl_image(output_file.name)
+      self.assertEqual(aftl_image.image_header.icp_count, 2)
 
-    # Verifies the generated image.
-    result = aftl.verify_image_icp(**self.verify_icp_default_params)
-    self.assertTrue(result)
+      # Verifies the generated image.
+      self.verify_icp_default_params['vbmeta_image_path'] = output_file.name
+      result = aftl.verify_image_icp(**self.verify_icp_default_params)
+      self.assertTrue(result)
 
-    # Prints the image details.
-    result = aftl.info_image_icp(**self.info_icp_default_params)
-    self.assertTrue(result)
+      # Prints the image details.
+      self.info_icp_default_params['vbmeta_image_path'] = output_file.name
+      result = aftl.info_image_icp(**self.info_icp_default_params)
+      self.assertTrue(result)
 
   def test_info_image_icp(self):
     """Tests info_image_icp with vbmeta image with 2 ICP."""
@@ -1453,7 +1446,7 @@
     aftl = self.get_aftl_implementation(aftltool.AftlError('Comms error'))
     self.make_icp_default_params[
         'transparency_log_configs'][0].target = 'www.google.com:80'
-    with open(self.output_filename, 'wb') as output_file:
+    with tempfile.NamedTemporaryFile('wb+') as output_file:
       self.make_icp_default_params['output'] = output_file
       result = aftl.make_icp_from_vbmeta(
           **self.make_icp_default_params)
@@ -1467,7 +1460,7 @@
     # time of the transparency log per load test results in b/139407814#2 where
     # it was 3.43 seconds.
     self.make_icp_default_params['timeout'] = 1
-    with open(self.output_filename, 'wb') as output_file:
+    with tempfile.NamedTemporaryFile('wb+') as output_file:
       self.make_icp_default_params['output'] = output_file
       result = aftl.make_icp_from_vbmeta(
           **self.make_icp_default_params)
@@ -1477,14 +1470,18 @@
     """Tests load_test_aftl command with 1 process which does 1 submission."""
     aftl = self.get_aftl_implementation(self.test_afi_resp)
 
-    result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
-    self.assertTrue(result)
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      self.load_test_aftl_default_params[
+          'stats_filename'] = os.path.join(tmp_dir, 'load_test.csv')
+      result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
+      self.assertTrue(result)
 
-    output = self.load_test_aftl_default_params['output'].getvalue()
-    self.assertRegex(output, 'Succeeded:.+?1\n')
-    self.assertRegex(output, 'Failed:.+?0\n')
+      output = self.load_test_aftl_default_params['output'].getvalue()
+      self.assertRegex(output, 'Succeeded:.+?1\n')
+      self.assertRegex(output, 'Failed:.+?0\n')
 
-    self.assertTrue(os.path.exists(self.load_test_stats_file_p1_s1))
+      self.assertTrue(os.path.exists(
+          self.load_test_aftl_default_params['stats_filename']))
 
   def test_load_test_multi_process_multi_submission(self):
     """Tests load_test_aftl command with 2 processes and 2 submissions each."""
@@ -1492,14 +1489,18 @@
 
     self.load_test_aftl_default_params['process_count'] = 2
     self.load_test_aftl_default_params['submission_count'] = 2
-    result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
-    self.assertTrue(result)
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      self.load_test_aftl_default_params[
+          'stats_filename'] = os.path.join(tmp_dir, 'load_test.csv')
+      result = aftl.load_test_aftl(**self.load_test_aftl_default_params)
+      self.assertTrue(result)
 
-    output = self.load_test_aftl_default_params['output'].getvalue()
-    self.assertRegex(output, 'Succeeded:.+?4\n')
-    self.assertRegex(output, 'Failed:.+?0\n')
+      output = self.load_test_aftl_default_params['output'].getvalue()
+      self.assertRegex(output, 'Succeeded:.+?4\n')
+      self.assertRegex(output, 'Failed:.+?0\n')
 
-    self.assertTrue(os.path.exists(self.load_test_stats_file_p2_p2))
+      self.assertTrue(os.path.exists(
+          self.load_test_aftl_default_params['stats_filename']))
 
   def test_load_test_invalid_grpc_service(self):
     """Tests load_test_aftl command with a host that does not support GRPC."""
@@ -1535,10 +1536,10 @@
     self.assertEqual(log.target, "example.com:8080")
     self.assertEqual(log.pub_key, "mykey.pub")
 
-    with self.assertRaises(aftltool.AftlError):
+    with self.assertRaises(argparse.ArgumentTypeError):
       aftltool.TransparencyLogConfig.from_argument("example.com:8080,")
 
-    with self.assertRaises(aftltool.AftlError):
+    with self.assertRaises(argparse.ArgumentTypeError):
       aftltool.TransparencyLogConfig.from_argument(",")
 
   def test_from_argument_with_api_key(self):
diff --git a/avbtool b/avbtool
new file mode 120000
index 0000000..5f6383e
--- /dev/null
+++ b/avbtool
@@ -0,0 +1 @@
+avbtool.py
\ No newline at end of file
diff --git a/avbtool.py b/avbtool.py
deleted file mode 120000
index 16657b9..0000000
--- a/avbtool.py
+++ /dev/null
@@ -1 +0,0 @@
-avbtool
\ No newline at end of file
diff --git a/avbtool b/avbtool.py
similarity index 98%
rename from avbtool
rename to avbtool.py
index 901b89e..4b9c594 100755
--- a/avbtool
+++ b/avbtool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 # Copyright 2016, The Android Open Source Project
 #
@@ -24,8 +24,6 @@
 #
 """Command-line tool for working with Android Verified Boot images."""
 
-from __future__ import print_function
-
 import argparse
 import binascii
 import bisect
@@ -1421,10 +1419,8 @@
     o.write('      FEC size:              {} bytes\n'.format(self.fec_size))
     o.write('      Hash Algorithm:        {}\n'.format(self.hash_algorithm))
     o.write('      Partition Name:        {}\n'.format(self.partition_name))
-    o.write('      Salt:                  {}\n'.format(
-        binascii.hexlify(self.salt).decode('ascii')))
-    o.write('      Root Digest:           {}\n'.format(
-        binascii.hexlify(self.root_digest).decode('ascii')))
+    o.write('      Salt:                  {}\n'.format(self.salt.hex()))
+    o.write('      Root Digest:           {}\n'.format(self.root_digest.hex()))
     o.write('      Flags:                 {}\n'.format(self.flags))
 
   def encode(self):
@@ -1594,10 +1590,8 @@
     o.write('      Image Size:            {} bytes\n'.format(self.image_size))
     o.write('      Hash Algorithm:        {}\n'.format(self.hash_algorithm))
     o.write('      Partition Name:        {}\n'.format(self.partition_name))
-    o.write('      Salt:                  {}\n'.format(
-        binascii.hexlify(self.salt).decode('ascii')))
-    o.write('      Digest:                {}\n'.format(
-        binascii.hexlify(self.digest).decode('ascii')))
+    o.write('      Salt:                  {}\n'.format(self.salt.hex()))
+    o.write('      Digest:                {}\n'.format(self.digest.hex()))
     o.write('      Flags:                 {}\n'.format(self.flags))
 
   def encode(self):
@@ -2409,7 +2403,8 @@
     o.write('Algorithm:                {}\n'.format(alg_name))
     o.write('Rollback Index:           {}\n'.format(header.rollback_index))
     o.write('Flags:                    {}\n'.format(header.flags))
-    o.write('Rollback Index Location:  {}\n'.format(header.rollback_index_location))
+    o.write('Rollback Index Location:  {}\n'.format(
+        header.rollback_index_location))
     o.write('Release String:           \'{}\'\n'.format(header.release_string))
 
     # Print descriptors.
@@ -2563,14 +2558,14 @@
 
     for desc in descriptors:
       if isinstance(desc, AvbHashDescriptor):
-        digest = binascii.hexlify(desc.digest).decode('ascii')
+        digest = desc.digest.hex()
         if json_partitions is not None:
           json_partitions.append({'name': desc.partition_name,
                                   'digest': digest})
         else:
           output.write('{}: {}\n'.format(desc.partition_name, digest))
       elif isinstance(desc, AvbHashtreeDescriptor):
-        digest = binascii.hexlify(desc.root_digest).decode('ascii')
+        digest = desc.root_digest.hex()
         if json_partitions is not None:
           json_partitions.append({'name': desc.partition_name,
                                   'digest': digest})
@@ -2624,7 +2619,7 @@
         hasher.update(ch_vbmeta_blob)
 
     digest = hasher.digest()
-    output.write('{}\n'.format(binascii.hexlify(digest).decode('ascii')))
+    output.write('{}\n'.format(digest.hex()))
 
   def calculate_kernel_cmdline(self, image_filename, hashtree_disabled, output):
     """Implements the 'calculate_kernel_cmdline' command.
@@ -2758,19 +2753,18 @@
 
     """
     c = 'dm="1 vroot none ro 1,'
-    c += '0'  # start
-    c += ' {}'.format((ht.image_size // 512))  # size (# sectors)
-    c += ' verity {}'.format(ht.dm_verity_version)  # type and version
-    c += ' PARTUUID=$(ANDROID_SYSTEM_PARTUUID)'  # data_dev
-    c += ' PARTUUID=$(ANDROID_SYSTEM_PARTUUID)'  # hash_dev
-    c += ' {}'.format(ht.data_block_size)  # data_block
-    c += ' {}'.format(ht.hash_block_size)  # hash_block
+    c += '0'                                                # start
+    c += ' {}'.format((ht.image_size // 512))               # size (# sectors)
+    c += ' verity {}'.format(ht.dm_verity_version)          # type and version
+    c += ' PARTUUID=$(ANDROID_SYSTEM_PARTUUID)'             # data_dev
+    c += ' PARTUUID=$(ANDROID_SYSTEM_PARTUUID)'             # hash_dev
+    c += ' {}'.format(ht.data_block_size)                   # data_block
+    c += ' {}'.format(ht.hash_block_size)                   # hash_block
     c += ' {}'.format(ht.image_size // ht.data_block_size)  # #blocks
     c += ' {}'.format(ht.image_size // ht.data_block_size)  # hash_offset
-    c += ' {}'.format(ht.hash_algorithm)  # hash_alg
-    c += ' {}'.format(
-        binascii.hexlify(ht.root_digest).decode('ascii'))  # root_digest
-    c += ' {}'.format(binascii.hexlify(ht.salt).decode('ascii'))  # salt
+    c += ' {}'.format(ht.hash_algorithm)                    # hash_alg
+    c += ' {}'.format(ht.root_digest.hex())                 # root_digest
+    c += ' {}'.format(ht.salt.hex())                        # salt
     if ht.fec_num_roots > 0:
       c += ' 10'  # number of optional args
       c += ' $(ANDROID_VERITY_MODE)'
@@ -3147,7 +3141,7 @@
 
       # Calculate the signature.
       rsa_key = RSAPublicKey(key_path)
-      data_to_sign = header_data_blob + aux_data_blob
+      data_to_sign = header_data_blob + bytes(aux_data_blob)
       binary_signature = rsa_key.sign(algorithm_name, data_to_sign,
                                       signing_helper, signing_helper_with_files)
 
@@ -3609,7 +3603,7 @@
         # size as the hash size. Don't populate a random salt if this
         # descriptor is being created to use a persistent digest on device.
         hash_size = digest_size
-        with open('/dev/urandom') as f:
+        with open('/dev/urandom', 'rb') as f:
           salt = f.read(hash_size)
       else:
         salt = b''
diff --git a/avbtool_test.py b/avbtool_test.py
deleted file mode 100755
index eccd15e..0000000
--- a/avbtool_test.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2019, The Android Open Source Project
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use, copy,
-# modify, merge, publish, distribute, sublicense, and/or sell copies
-# of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
-# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
-# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-#
-"""Unit tests for avbtool for supporting the migration from Python 2.7 to 3."""
-
-# pylint: disable=unused-import
-from __future__ import print_function
-
-import base64
-import binascii
-import io
-import os
-import sys
-import unittest
-
-import avbtool
-
-
-# Workaround for b/149307145 in order to pick up the test data from the right
-# location independent where the script is called from.
-# TODO(b/149307145): Remove workaround once the referenced bug is fixed.
-TEST_EXEC_PATH = os.path.dirname(os.path.realpath(__file__))
-
-
-class AvbtoolTestCase(unittest.TestCase):
-
-  def testX(self):
-    pass
-
-if __name__ == '__main__':
-  unittest.main(verbosity=2)
diff --git a/proto/README.md b/proto/README.md
index f40db7a..eb3966c 100644
--- a/proto/README.md
+++ b/proto/README.md
@@ -27,12 +27,3 @@
    From https://github.com/google/trillian
    Dependency of aftl.proto
    For aftl.proto, contains message definitions for SignedLogRoot.
-* aftl_google/api/annotations.proto
-   From https://github.com/googleapis/googleapis
-   Used to get access to google.api.http options.
-* aftl_google/api/http.proto
-   From https://github.com/googleapis/googleapis
-   Dependency of aftl_google/api/annotations.proto
-   Contains the HttpRule message that extends MethodOptions.
-* aftl_google/rpc/status.proto
-   From https://github.com/googleapis/googleapis
diff --git a/proto/aftl_google/__init__.py b/proto/aftl_google/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/aftl_google/__init__.py
+++ /dev/null
diff --git a/proto/aftl_google/api/__init__.py b/proto/aftl_google/api/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/aftl_google/api/__init__.py
+++ /dev/null
diff --git a/proto/aftl_google/api/annotations.proto b/proto/aftl_google/api/annotations.proto
deleted file mode 100644
index 8e90cc8..0000000
--- a/proto/aftl_google/api/annotations.proto
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright (c) 2015, Google Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-package google.api;
-
-import "aftl_google/api/http.proto";
-import "google/protobuf/descriptor.proto";
-
-option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
-option java_multiple_files = true;
-option java_outer_classname = "AnnotationsProto";
-option java_package = "com.google.api";
-option objc_class_prefix = "GAPI";
-
-extend google.protobuf.MethodOptions {
-  // See `HttpRule`.
-  HttpRule http = 72295728;
-}
diff --git a/proto/aftl_google/api/annotations_pb2.py b/proto/aftl_google/api/annotations_pb2.py
deleted file mode 100644
index 2982db1..0000000
--- a/proto/aftl_google/api/annotations_pb2.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# pylint: skip-file
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: aftl_google/api/annotations.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from proto.aftl_google.api import http_pb2 as aftl__google_dot_api_dot_http__pb2
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='aftl_google/api/annotations.proto',
-  package='google.api',
-  syntax='proto3',
-  serialized_options=_b('\n\016com.google.apiB\020AnnotationsProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI'),
-  serialized_pb=_b('\n!aftl_google/api/annotations.proto\x12\ngoogle.api\x1a\x1a\x61\x66tl_google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc\" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3')
-  ,
-  dependencies=[aftl__google_dot_api_dot_http__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
-
-
-HTTP_FIELD_NUMBER = 72295728
-http = _descriptor.FieldDescriptor(
-  name='http', full_name='google.api.http', index=0,
-  number=72295728, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  serialized_options=None, file=DESCRIPTOR)
-
-DESCRIPTOR.extensions_by_name['http'] = http
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-http.message_type = aftl__google_dot_api_dot_http__pb2._HTTPRULE
-google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(http)
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/aftl_google/api/annotations_pb2_grpc.py b/proto/aftl_google/api/annotations_pb2_grpc.py
deleted file mode 100644
index 73636b2..0000000
--- a/proto/aftl_google/api/annotations_pb2_grpc.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# pylint: skip-file
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
diff --git a/proto/aftl_google/api/http.proto b/proto/aftl_google/api/http.proto
deleted file mode 100644
index b2977f5..0000000
--- a/proto/aftl_google/api/http.proto
+++ /dev/null
@@ -1,376 +0,0 @@
-// Copyright 2019 Google LLC.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-syntax = "proto3";
-
-package google.api;
-
-option cc_enable_arenas = true;
-option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
-option java_multiple_files = true;
-option java_outer_classname = "HttpProto";
-option java_package = "com.google.api";
-option objc_class_prefix = "GAPI";
-
-// Defines the HTTP configuration for an API service. It contains a list of
-// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method
-// to one or more HTTP REST API methods.
-message Http {
-  // A list of HTTP configuration rules that apply to individual API methods.
-  //
-  // **NOTE:** All service configuration rules follow "last one wins" order.
-  repeated HttpRule rules = 1;
-
-  // When set to true, URL path parameters will be fully URI-decoded except in
-  // cases of single segment matches in reserved expansion, where "%2F" will be
-  // left encoded.
-  //
-  // The default behavior is to not decode RFC 6570 reserved characters in multi
-  // segment matches.
-  bool fully_decode_reserved_expansion = 2;
-}
-
-// # gRPC Transcoding
-//
-// gRPC Transcoding is a feature for mapping between a gRPC method and one or
-// more HTTP REST endpoints. It allows developers to build a single API service
-// that supports both gRPC APIs and REST APIs. Many systems, including [Google
-// APIs](https://github.com/googleapis/googleapis),
-// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC
-// Gateway](https://github.com/grpc-ecosystem/grpc-gateway),
-// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature
-// and use it for large scale production services.
-//
-// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies
-// how different portions of the gRPC request message are mapped to the URL
-// path, URL query parameters, and HTTP request body. It also controls how the
-// gRPC response message is mapped to the HTTP response body. `HttpRule` is
-// typically specified as an `google.api.http` annotation on the gRPC method.
-//
-// Each mapping specifies a URL path template and an HTTP method. The path
-// template may refer to one or more fields in the gRPC request message, as long
-// as each field is a non-repeated field with a primitive (non-message) type.
-// The path template controls how fields of the request message are mapped to
-// the URL path.
-//
-// Example:
-//
-//     service Messaging {
-//       rpc GetMessage(GetMessageRequest) returns (Message) {
-//         option (google.api.http) = {
-//             get: "/v1/{name=messages/*}"
-//         };
-//       }
-//     }
-//     message GetMessageRequest {
-//       string name = 1; // Mapped to URL path.
-//     }
-//     message Message {
-//       string text = 1; // The resource content.
-//     }
-//
-// This enables an HTTP REST to gRPC mapping as below:
-//
-// HTTP | gRPC
-// -----|-----
-// `GET /v1/messages/123456`  | `GetMessage(name: "messages/123456")`
-//
-// Any fields in the request message which are not bound by the path template
-// automatically become HTTP query parameters if there is no HTTP request body.
-// For example:
-//
-//     service Messaging {
-//       rpc GetMessage(GetMessageRequest) returns (Message) {
-//         option (google.api.http) = {
-//             get:"/v1/messages/{message_id}"
-//         };
-//       }
-//     }
-//     message GetMessageRequest {
-//       message SubMessage {
-//         string subfield = 1;
-//       }
-//       string message_id = 1; // Mapped to URL path.
-//       int64 revision = 2;    // Mapped to URL query parameter `revision`.
-//       SubMessage sub = 3;    // Mapped to URL query parameter `sub.subfield`.
-//     }
-//
-// This enables a HTTP JSON to RPC mapping as below:
-//
-// HTTP | gRPC
-// -----|-----
-// `GET /v1/messages/123456?revision=2&sub.subfield=foo` |
-// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield:
-// "foo"))`
-//
-// Note that fields which are mapped to URL query parameters must have a
-// primitive type or a repeated primitive type or a non-repeated message type.
-// In the case of a repeated type, the parameter can be repeated in the URL
-// as `...?param=A&param=B`. In the case of a message type, each field of the
-// message is mapped to a separate parameter, such as
-// `...?foo.a=A&foo.b=B&foo.c=C`.
-//
-// For HTTP methods that allow a request body, the `body` field
-// specifies the mapping. Consider a REST update method on the
-// message resource collection:
-//
-//     service Messaging {
-//       rpc UpdateMessage(UpdateMessageRequest) returns (Message) {
-//         option (google.api.http) = {
-//           patch: "/v1/messages/{message_id}"
-//           body: "message"
-//         };
-//       }
-//     }
-//     message UpdateMessageRequest {
-//       string message_id = 1; // mapped to the URL
-//       Message message = 2;   // mapped to the body
-//     }
-//
-// The following HTTP JSON to RPC mapping is enabled, where the
-// representation of the JSON in the request body is determined by
-// protos JSON encoding:
-//
-// HTTP | gRPC
-// -----|-----
-// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id:
-// "123456" message { text: "Hi!" })`
-//
-// The special name `*` can be used in the body mapping to define that
-// every field not bound by the path template should be mapped to the
-// request body.  This enables the following alternative definition of
-// the update method:
-//
-//     service Messaging {
-//       rpc UpdateMessage(Message) returns (Message) {
-//         option (google.api.http) = {
-//           patch: "/v1/messages/{message_id}"
-//           body: "*"
-//         };
-//       }
-//     }
-//     message Message {
-//       string message_id = 1;
-//       string text = 2;
-//     }
-//
-//
-// The following HTTP JSON to RPC mapping is enabled:
-//
-// HTTP | gRPC
-// -----|-----
-// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id:
-// "123456" text: "Hi!")`
-//
-// Note that when using `*` in the body mapping, it is not possible to
-// have HTTP parameters, as all fields not bound by the path end in
-// the body. This makes this option more rarely used in practice when
-// defining REST APIs. The common usage of `*` is in custom methods
-// which don't use the URL at all for transferring data.
-//
-// It is possible to define multiple HTTP methods for one RPC by using
-// the `additional_bindings` option. Example:
-//
-//     service Messaging {
-//       rpc GetMessage(GetMessageRequest) returns (Message) {
-//         option (google.api.http) = {
-//           get: "/v1/messages/{message_id}"
-//           additional_bindings {
-//             get: "/v1/users/{user_id}/messages/{message_id}"
-//           }
-//         };
-//       }
-//     }
-//     message GetMessageRequest {
-//       string message_id = 1;
-//       string user_id = 2;
-//     }
-//
-// This enables the following two alternative HTTP JSON to RPC mappings:
-//
-// HTTP | gRPC
-// -----|-----
-// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")`
-// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id:
-// "123456")`
-//
-// ## Rules for HTTP mapping
-//
-// 1. Leaf request fields (recursive expansion nested messages in the request
-//    message) are classified into three categories:
-//    - Fields referred by the path template. They are passed via the URL path.
-//    - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP
-//      request body.
-//    - All other fields are passed via the URL query parameters, and the
-//      parameter name is the field path in the request message. A repeated
-//      field can be represented as multiple query parameters under the same
-//      name.
-//  2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields
-//     are passed via URL path and HTTP request body.
-//  3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all
-//     fields are passed via URL path and URL query parameters.
-//
-// ### Path template syntax
-//
-//     Template = "/" Segments [ Verb ] ;
-//     Segments = Segment { "/" Segment } ;
-//     Segment  = "*" | "**" | LITERAL | Variable ;
-//     Variable = "{" FieldPath [ "=" Segments ] "}" ;
-//     FieldPath = IDENT { "." IDENT } ;
-//     Verb     = ":" LITERAL ;
-//
-// The syntax `*` matches a single URL path segment. The syntax `**` matches
-// zero or more URL path segments, which must be the last part of the URL path
-// except the `Verb`.
-//
-// The syntax `Variable` matches part of the URL path as specified by its
-// template. A variable template must not contain other variables. If a variable
-// matches a single path segment, its template may be omitted, e.g. `{var}`
-// is equivalent to `{var=*}`.
-//
-// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL`
-// contains any reserved character, such characters should be percent-encoded
-// before the matching.
-//
-// If a variable contains exactly one path segment, such as `"{var}"` or
-// `"{var=*}"`, when such a variable is expanded into a URL path on the client
-// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The
-// server side does the reverse decoding. Such variables show up in the
-// [Discovery
-// Document](https://developers.google.com/discovery/v1/reference/apis) as
-// `{var}`.
-//
-// If a variable contains multiple path segments, such as `"{var=foo/*}"`
-// or `"{var=**}"`, when such a variable is expanded into a URL path on the
-// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded.
-// The server side does the reverse decoding, except "%2F" and "%2f" are left
-// unchanged. Such variables show up in the
-// [Discovery
-// Document](https://developers.google.com/discovery/v1/reference/apis) as
-// `{+var}`.
-//
-// ## Using gRPC API Service Configuration
-//
-// gRPC API Service Configuration (service config) is a configuration language
-// for configuring a gRPC service to become a user-facing product. The
-// service config is simply the YAML representation of the `google.api.Service`
-// proto message.
-//
-// As an alternative to annotating your proto file, you can configure gRPC
-// transcoding in your service config YAML files. You do this by specifying a
-// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same
-// effect as the proto annotation. This can be particularly useful if you
-// have a proto that is reused in multiple services. Note that any transcoding
-// specified in the service config will override any matching transcoding
-// configuration in the proto.
-//
-// Example:
-//
-//     http:
-//       rules:
-//         # Selects a gRPC method and applies HttpRule to it.
-//         - selector: example.v1.Messaging.GetMessage
-//           get: /v1/messages/{message_id}/{sub.subfield}
-//
-// ## Special notes
-//
-// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the
-// proto to JSON conversion must follow the [proto3
-// specification](https://developers.google.com/protocol-buffers/docs/proto3#json).
-//
-// While the single segment variable follows the semantics of
-// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
-// Expansion, the multi segment variable **does not** follow RFC 6570 Section
-// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion
-// does not expand special characters like `?` and `#`, which would lead
-// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding
-// for multi segment variables.
-//
-// The path variables **must not** refer to any repeated or mapped field,
-// because client libraries are not capable of handling such variable expansion.
-//
-// The path variables **must not** capture the leading "/" character. The reason
-// is that the most common use case "{var}" does not capture the leading "/"
-// character. For consistency, all path variables must share the same behavior.
-//
-// Repeated message fields must not be mapped to URL query parameters, because
-// no client library can support such complicated mapping.
-//
-// If an API needs to use a JSON array for request or response body, it can map
-// the request or response body to a repeated field. However, some gRPC
-// Transcoding implementations may not support this feature.
-message HttpRule {
-  // Selects a method to which this rule applies.
-  //
-  // Refer to [selector][google.api.DocumentationRule.selector] for syntax details.
-  string selector = 1;
-
-  // Determines the URL pattern is matched by this rules. This pattern can be
-  // used with any of the {get|put|post|delete|patch} methods. A custom method
-  // can be defined using the 'custom' field.
-  oneof pattern {
-    // Maps to HTTP GET. Used for listing and getting information about
-    // resources.
-    string get = 2;
-
-    // Maps to HTTP PUT. Used for replacing a resource.
-    string put = 3;
-
-    // Maps to HTTP POST. Used for creating a resource or performing an action.
-    string post = 4;
-
-    // Maps to HTTP DELETE. Used for deleting a resource.
-    string delete = 5;
-
-    // Maps to HTTP PATCH. Used for updating a resource.
-    string patch = 6;
-
-    // The custom pattern is used for specifying an HTTP method that is not
-    // included in the `pattern` field, such as HEAD, or "*" to leave the
-    // HTTP method unspecified for this rule. The wild-card rule is useful
-    // for services that provide content to Web (HTML) clients.
-    CustomHttpPattern custom = 8;
-  }
-
-  // The name of the request field whose value is mapped to the HTTP request
-  // body, or `*` for mapping all request fields not captured by the path
-  // pattern to the HTTP body, or omitted for not having any HTTP request body.
-  //
-  // NOTE: the referred field must be present at the top-level of the request
-  // message type.
-  string body = 7;
-
-  // Optional. The name of the response field whose value is mapped to the HTTP
-  // response body. When omitted, the entire response message will be used
-  // as the HTTP response body.
-  //
-  // NOTE: The referred field must be present at the top-level of the response
-  // message type.
-  string response_body = 12;
-
-  // Additional HTTP bindings for the selector. Nested bindings must
-  // not contain an `additional_bindings` field themselves (that is,
-  // the nesting may only be one level deep).
-  repeated HttpRule additional_bindings = 11;
-}
-
-// A custom pattern is used for defining custom HTTP verb.
-message CustomHttpPattern {
-  // The name of this custom HTTP verb.
-  string kind = 1;
-
-  // The path matched by this custom verb.
-  string path = 2;
-}
diff --git a/proto/aftl_google/api/http_pb2.py b/proto/aftl_google/api/http_pb2.py
deleted file mode 100644
index 8cd65f3..0000000
--- a/proto/aftl_google/api/http_pb2.py
+++ /dev/null
@@ -1,251 +0,0 @@
-# pylint: skip-file
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: aftl_google/api/http.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='aftl_google/api/http.proto',
-  package='google.api',
-  syntax='proto3',
-  serialized_options=_b('\n\016com.google.apiB\tHttpProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI'),
-  serialized_pb=_b('\n\x1a\x61\x66tl_google/api/http.proto\x12\ngoogle.api\"T\n\x04Http\x12#\n\x05rules\x18\x01 \x03(\x0b\x32\x14.google.api.HttpRule\x12\'\n\x1f\x66ully_decode_reserved_expansion\x18\x02 \x01(\x08\"\x81\x02\n\x08HttpRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\r\n\x03get\x18\x02 \x01(\tH\x00\x12\r\n\x03put\x18\x03 \x01(\tH\x00\x12\x0e\n\x04post\x18\x04 \x01(\tH\x00\x12\x10\n\x06\x64\x65lete\x18\x05 \x01(\tH\x00\x12\x0f\n\x05patch\x18\x06 \x01(\tH\x00\x12/\n\x06\x63ustom\x18\x08 \x01(\x0b\x32\x1d.google.api.CustomHttpPatternH\x00\x12\x0c\n\x04\x62ody\x18\x07 \x01(\t\x12\x15\n\rresponse_body\x18\x0c \x01(\t\x12\x31\n\x13\x61\x64\x64itional_bindings\x18\x0b \x03(\x0b\x32\x14.google.api.HttpRuleB\t\n\x07pattern\"/\n\x11\x43ustomHttpPattern\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\tBj\n\x0e\x63om.google.apiB\tHttpProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3')
-)
-
-
-
-
-_HTTP = _descriptor.Descriptor(
-  name='Http',
-  full_name='google.api.Http',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='rules', full_name='google.api.Http.rules', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='fully_decode_reserved_expansion', full_name='google.api.Http.fully_decode_reserved_expansion', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=42,
-  serialized_end=126,
-)
-
-
-_HTTPRULE = _descriptor.Descriptor(
-  name='HttpRule',
-  full_name='google.api.HttpRule',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='selector', full_name='google.api.HttpRule.selector', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='get', full_name='google.api.HttpRule.get', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='put', full_name='google.api.HttpRule.put', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='post', full_name='google.api.HttpRule.post', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='delete', full_name='google.api.HttpRule.delete', index=4,
-      number=5, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='patch', full_name='google.api.HttpRule.patch', index=5,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='custom', full_name='google.api.HttpRule.custom', index=6,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='body', full_name='google.api.HttpRule.body', index=7,
-      number=7, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='response_body', full_name='google.api.HttpRule.response_body', index=8,
-      number=12, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='additional_bindings', full_name='google.api.HttpRule.additional_bindings', index=9,
-      number=11, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='pattern', full_name='google.api.HttpRule.pattern',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=129,
-  serialized_end=386,
-)
-
-
-_CUSTOMHTTPPATTERN = _descriptor.Descriptor(
-  name='CustomHttpPattern',
-  full_name='google.api.CustomHttpPattern',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='kind', full_name='google.api.CustomHttpPattern.kind', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='path', full_name='google.api.CustomHttpPattern.path', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=388,
-  serialized_end=435,
-)
-
-_HTTP.fields_by_name['rules'].message_type = _HTTPRULE
-_HTTPRULE.fields_by_name['custom'].message_type = _CUSTOMHTTPPATTERN
-_HTTPRULE.fields_by_name['additional_bindings'].message_type = _HTTPRULE
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
-  _HTTPRULE.fields_by_name['get'])
-_HTTPRULE.fields_by_name['get'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
-  _HTTPRULE.fields_by_name['put'])
-_HTTPRULE.fields_by_name['put'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
-  _HTTPRULE.fields_by_name['post'])
-_HTTPRULE.fields_by_name['post'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
-  _HTTPRULE.fields_by_name['delete'])
-_HTTPRULE.fields_by_name['delete'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
-  _HTTPRULE.fields_by_name['patch'])
-_HTTPRULE.fields_by_name['patch'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
-  _HTTPRULE.fields_by_name['custom'])
-_HTTPRULE.fields_by_name['custom'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-DESCRIPTOR.message_types_by_name['Http'] = _HTTP
-DESCRIPTOR.message_types_by_name['HttpRule'] = _HTTPRULE
-DESCRIPTOR.message_types_by_name['CustomHttpPattern'] = _CUSTOMHTTPPATTERN
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Http = _reflection.GeneratedProtocolMessageType('Http', (_message.Message,), {
-  'DESCRIPTOR' : _HTTP,
-  '__module__' : 'aftl_google.api.http_pb2'
-  # @@protoc_insertion_point(class_scope:google.api.Http)
-  })
-_sym_db.RegisterMessage(Http)
-
-HttpRule = _reflection.GeneratedProtocolMessageType('HttpRule', (_message.Message,), {
-  'DESCRIPTOR' : _HTTPRULE,
-  '__module__' : 'aftl_google.api.http_pb2'
-  # @@protoc_insertion_point(class_scope:google.api.HttpRule)
-  })
-_sym_db.RegisterMessage(HttpRule)
-
-CustomHttpPattern = _reflection.GeneratedProtocolMessageType('CustomHttpPattern', (_message.Message,), {
-  'DESCRIPTOR' : _CUSTOMHTTPPATTERN,
-  '__module__' : 'aftl_google.api.http_pb2'
-  # @@protoc_insertion_point(class_scope:google.api.CustomHttpPattern)
-  })
-_sym_db.RegisterMessage(CustomHttpPattern)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/aftl_google/api/http_pb2_grpc.py b/proto/aftl_google/api/http_pb2_grpc.py
deleted file mode 100644
index 73636b2..0000000
--- a/proto/aftl_google/api/http_pb2_grpc.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# pylint: skip-file
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
diff --git a/proto/aftl_google/rpc/__init__.py b/proto/aftl_google/rpc/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/proto/aftl_google/rpc/__init__.py
+++ /dev/null
diff --git a/proto/aftl_google/rpc/status.proto b/proto/aftl_google/rpc/status.proto
deleted file mode 100644
index b0daa36..0000000
--- a/proto/aftl_google/rpc/status.proto
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright 2017 Google Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-package google.rpc;
-
-import "google/protobuf/any.proto";
-
-option go_package = "google.golang.org/genproto/googleapis/rpc/status;status";
-option java_multiple_files = true;
-option java_outer_classname = "StatusProto";
-option java_package = "com.google.rpc";
-option objc_class_prefix = "RPC";
-
-// The `Status` type defines a logical error model that is suitable for
-// different programming environments, including REST APIs and RPC APIs. It is
-// used by [gRPC](https://github.com/grpc). The error model is designed to be:
-//
-// - Simple to use and understand for most users
-// - Flexible enough to meet unexpected needs
-//
-// # Overview
-//
-// The `Status` message contains three pieces of data: error code, error
-// message, and error details. The error code should be an enum value of
-// [google.rpc.Code][google.rpc.Code], but it may accept additional error codes
-// if needed.  The error message should be a developer-facing English message
-// that helps developers *understand* and *resolve* the error. If a localized
-// user-facing error message is needed, put the localized message in the error
-// details or localize it in the client. The optional error details may contain
-// arbitrary information about the error. There is a predefined set of error
-// detail types in the package `google.rpc` that can be used for common error
-// conditions.
-//
-// # Language mapping
-//
-// The `Status` message is the logical representation of the error model, but it
-// is not necessarily the actual wire format. When the `Status` message is
-// exposed in different client libraries and different wire protocols, it can be
-// mapped differently. For example, it will likely be mapped to some exceptions
-// in Java, but more likely mapped to some error codes in C.
-//
-// # Other uses
-//
-// The error model and the `Status` message can be used in a variety of
-// environments, either with or without APIs, to provide a
-// consistent developer experience across different environments.
-//
-// Example uses of this error model include:
-//
-// - Partial errors. If a service needs to return partial errors to the client,
-//     it may embed the `Status` in the normal response to indicate the partial
-//     errors.
-//
-// - Workflow errors. A typical workflow has multiple steps. Each step may
-//     have a `Status` message for error reporting.
-//
-// - Batch operations. If a client uses batch request and batch response, the
-//     `Status` message should be used directly inside batch response, one for
-//     each error sub-response.
-//
-// - Asynchronous operations. If an API call embeds asynchronous operation
-//     results in its response, the status of those operations should be
-//     represented directly using the `Status` message.
-//
-// - Logging. If some API errors are stored in logs, the message `Status` could
-//     be used directly after any stripping needed for security/privacy reasons.
-message Status {
-  // The status code, which should be an enum value of
-  // [google.rpc.Code][google.rpc.Code].
-  int32 code = 1;
-
-  // A developer-facing error message, which should be in English. Any
-  // user-facing error message should be localized and sent in the
-  // [google.rpc.Status.details][google.rpc.Status.details] field, or localized
-  // by the client.
-  string message = 2;
-
-  // A list of messages that carry the error details.  There is a common set of
-  // message types for APIs to use.
-  repeated google.protobuf.Any details = 3;
-}
diff --git a/proto/aftl_google/rpc/status_pb2.py b/proto/aftl_google/rpc/status_pb2.py
deleted file mode 100644
index 28e7c88..0000000
--- a/proto/aftl_google/rpc/status_pb2.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# pylint: skip-file
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: aftl_google/rpc/status.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='aftl_google/rpc/status.proto',
-  package='google.rpc',
-  syntax='proto3',
-  serialized_options=_b('\n\016com.google.rpcB\013StatusProtoP\001Z7google.golang.org/genproto/googleapis/rpc/status;status\242\002\003RPC'),
-  serialized_pb=_b('\n\x1c\x61\x66tl_google/rpc/status.proto\x12\ngoogle.rpc\x1a\x19google/protobuf/any.proto\"N\n\x06Status\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12%\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyB^\n\x0e\x63om.google.rpcB\x0bStatusProtoP\x01Z7google.golang.org/genproto/googleapis/rpc/status;status\xa2\x02\x03RPCb\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
-
-
-
-
-_STATUS = _descriptor.Descriptor(
-  name='Status',
-  full_name='google.rpc.Status',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='code', full_name='google.rpc.Status.code', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='message', full_name='google.rpc.Status.message', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-    _descriptor.FieldDescriptor(
-      name='details', full_name='google.rpc.Status.details', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=71,
-  serialized_end=149,
-)
-
-_STATUS.fields_by_name['details'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-DESCRIPTOR.message_types_by_name['Status'] = _STATUS
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), {
-  'DESCRIPTOR' : _STATUS,
-  '__module__' : 'aftl_google.rpc.status_pb2'
-  # @@protoc_insertion_point(class_scope:google.rpc.Status)
-  })
-_sym_db.RegisterMessage(Status)
-
-
-DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/proto/aftl_google/rpc/status_pb2_grpc.py b/proto/aftl_google/rpc/status_pb2_grpc.py
deleted file mode 100644
index 73636b2..0000000
--- a/proto/aftl_google/rpc/status_pb2_grpc.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# pylint: skip-file
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
diff --git a/test/avbtool_unittest.cc b/test/avbtool_unittest.cc
index 8e1f0dc..8749ea3 100644
--- a/test/avbtool_unittest.cc
+++ b/test/avbtool_unittest.cc
@@ -3035,11 +3035,11 @@
       "{\n"
       "  \"partitions\": [\n"
       "    {\n"
-      "      \"name\": \"system\", \n"
+      "      \"name\": \"system\",\n"
       "      \"digest\": \"d52d93c988d336a79abe1c05240ae9a79a9b7d61\"\n"
-      "    }, \n"
+      "    },\n"
       "    {\n"
-      "      \"name\": \"boot\", \n"
+      "      \"name\": \"boot\",\n"
       "      \"digest\": "
       "\"184cb36243adb8b87d2d8c4802de32125fe294ec46753d732144ee65df68a23d\"\n"
       "    }\n"