Merge branch 'snapshot' into develop
diff --git a/src/client.rs b/src/client.rs
index 57b3d75..d5d22e0 100644
--- a/src/client.rs
+++ b/src/client.rs
@@ -1,11 +1,11 @@
 //! Clients for high level interactions with TUF repositories.
 
-use std::collections::{HashSet, VecDeque};
-
 use Result;
+use crypto;
 use error::Error;
 use interchange::DataInterchange;
-use metadata::{MetadataVersion, RootMetadata, Role, MetadataPath, TargetPath};
+use metadata::{MetadataVersion, RootMetadata, Role, MetadataPath, TargetPath, TargetDescription,
+               TargetsMetadata, SnapshotMetadata};
 use repository::Repository;
 use tuf::Tuf;
 
@@ -96,23 +96,7 @@
             }
         };
 
-        let de = match Self::update_delegations(
-            &mut self.tuf,
-            &mut self.local,
-            self.config.min_bytes_per_second,
-        ) {
-            Ok(b) => b,
-            Err(e) => {
-                warn!(
-                    "Error updating delegation metadata from local sources: {:?}",
-                    e
-                );
-                // TODO this might be untrue because of a partial update
-                false
-            }
-        };
-
-        Ok(r || ts || sn || ta || de)
+        Ok(r || ts || sn || ta)
     }
 
     /// Update TUF metadata from the remote repository.
@@ -141,13 +125,8 @@
             &mut self.remote,
             self.config.min_bytes_per_second,
         )?;
-        let de = Self::update_delegations(
-            &mut self.tuf,
-            &mut self.remote,
-            self.config.min_bytes_per_second,
-        )?;
 
-        Ok(r || ts || sn || ta || de)
+        Ok(r || ts || sn || ta)
     }
 
     /// Returns `true` if an update occurred and `false` otherwise.
@@ -251,13 +230,15 @@
             return Ok(false);
         }
 
+        let (alg, value) = crypto::hash_preference(snapshot_description.hashes())?;
+
         let snap = repo.fetch_metadata(
             &Role::Snapshot,
             &MetadataPath::from_role(&Role::Snapshot),
             &MetadataVersion::None,
-            &None,
+            &Some(snapshot_description.size()),
             min_bytes_per_second,
-            None,
+            Some((alg, value.clone())),
         )?;
         tuf.update_snapshot(snap)
     }
@@ -286,89 +267,153 @@
             return Ok(false);
         }
 
+        let (alg, value) = crypto::hash_preference(targets_description.hashes())?;
+
         let targets = repo.fetch_metadata(
             &Role::Targets,
             &MetadataPath::from_role(&Role::Targets),
             &MetadataVersion::None,
-            &None,
+            &Some(targets_description.size()),
             min_bytes_per_second,
-            None,
+            Some((alg, value.clone())),
         )?;
         tuf.update_targets(targets)
     }
 
-    /// Returns `true` if an update occurred and `false` otherwise.
-    fn update_delegations<T>(
-        tuf: &mut Tuf<D>,
-        repo: &mut T,
-        min_bytes_per_second: u32,
-    ) -> Result<bool>
-    where
-        T: Repository<D>,
-    {
-        let _ = match tuf.snapshot() {
-            Some(s) => s,
-            None => return Err(Error::MissingMetadata(Role::Snapshot)),
-        }.clone();
-        let targets = match tuf.targets() {
-            Some(t) => t,
-            None => return Err(Error::MissingMetadata(Role::Targets)),
-        }.clone();
-        let delegations = match targets.delegations() {
-            Some(d) => d,
-            None => return Ok(false),
-        }.clone();
-
-        let mut visited = HashSet::new();
-        let mut to_visit = VecDeque::new();
-
-        for role in delegations.roles().iter().map(|r| r.role()) {
-            let _ = to_visit.push_back(role.clone());
-        }
-
-        let mut updated = false;
-        while let Some(role) = to_visit.pop_front() {
-            if visited.contains(&role) {
-                continue;
-            }
-            let _ = visited.insert(role.clone());
-
-            let delegation = match repo.fetch_metadata(
-                &Role::Targets,
-                &role,
-                &MetadataVersion::None,
-                &None,
-                min_bytes_per_second,
-                None,
-            ) {
-                Ok(d) => d,
-                Err(e) => {
-                    warn!("Failed to fetuch delegation {:?}: {:?}", role, e);
-                    continue;
-                }
-            };
-
-            match tuf.update_delegation(&role, delegation) {
-                Ok(u) => updated |= u,
-                Err(e) => {
-                    warn!("Failed to update delegation {:?}: {:?}", role, e);
-                    continue;
-                }
-            };
-
-            if let Some(ds) = tuf.delegations().get(&role).and_then(|t| t.delegations()) {
-                for d in ds.roles() {
-                    let _ = to_visit.push_back(d.role().clone());
-                }
-            }
-        }
-
-        Ok(updated)
-    }
-
     /// Fetch a target from the remote repo and write it to the local repo.
     pub fn fetch_target(&mut self, target: &TargetPath) -> Result<()> {
-        let target_description = self.tuf.target_description(target)?;
+        fn lookup<_D, _L, _R>(
+            tuf: &mut Tuf<_D>,
+            config: &Config,
+            default_terminate: bool,
+            target: &TargetPath,
+            snapshot: &SnapshotMetadata,
+            targets: Option<&TargetsMetadata>,
+            local: &mut _L,
+            remote: &mut _R,
+        ) -> (bool, Result<TargetDescription>)
+        where
+            _D: DataInterchange,
+            _L: Repository<_D>,
+            _R: Repository<_D>,
+        {
+            // these clones are dumb, but we need immutable values and not references for update
+            // tuf in the loop below
+            let targets = match targets {
+                Some(t) => t.clone(),
+                None => {
+                    match tuf.targets() {
+                        Some(t) => t.clone(),
+                        None => {
+                            return (
+                                default_terminate,
+                                Err(Error::MissingMetadata(Role::Targets)),
+                            )
+                        }
+                    }
+                }
+            };
+
+            match targets.targets().get(target) {
+                Some(t) => return (default_terminate, Ok(t.clone())),
+                None => (),
+            }
+
+            let delegations = match targets.delegations() {
+                Some(d) => d,
+                None => return (default_terminate, Err(Error::NotFound)),
+            };
+
+            for delegation in delegations.roles().iter() {
+                if !delegation.paths().iter().any(|p| target.is_child(p)) {
+                    if delegation.terminating() {
+                        return (true, Err(Error::NotFound));
+                    } else {
+                        continue;
+                    }
+                }
+
+                let role_meta = match snapshot.meta().get(delegation.role()) {
+                    Some(m) => m,
+                    None if !delegation.terminating() => continue,
+                    None => return (true, Err(Error::NotFound)),
+                };
+
+                let meta = match local
+                    .fetch_metadata::<TargetsMetadata>(
+                        &Role::Targets,
+                        delegation.role(),
+                        &MetadataVersion::None,
+                        &None, // TODO max size
+                        config.min_bytes_per_second(),
+                        None, // TODO hashes
+                    )
+                    .or_else(|_| {
+                        remote.fetch_metadata::<TargetsMetadata>(
+                            &Role::Targets,
+                            delegation.role(),
+                            &MetadataVersion::None,
+                            &None, // TODO max size
+                            config.min_bytes_per_second(),
+                            None, // TODO hashes
+                        )
+                    }) {
+                    Ok(m) => m,
+                    Err(ref e) if !delegation.terminating() => {
+                        warn!("Failed to fetch metadata {:?}: {:?}", delegation.role(), e);
+                        continue;
+                    }
+                    Err(e) => {
+                        warn!("Failed to fetch metadata {:?}: {:?}", delegation.role(), e);
+                        return (true, Err(e));
+                    }
+                };
+
+                match tuf.update_delegation(delegation.role(), meta) {
+                    Ok(_) => {
+                        let meta = tuf.delegations().get(delegation.role()).unwrap().clone();
+                        let (term, res) = lookup(
+                            tuf,
+                            config,
+                            delegation.terminating(),
+                            target,
+                            snapshot,
+                            Some(&meta),
+                            local,
+                            remote,
+                        );
+
+                        if term && res.is_err() {
+                            return (true, res);
+                        }
+
+                        // TODO end recursion early
+                    }
+                    Err(_) if !delegation.terminating() => continue,
+                    Err(e) => return (true, Err(e)),
+
+                };
+            }
+
+            (default_terminate, Err(Error::NotFound))
+        }
+
+        let snapshot = self.tuf
+            .snapshot()
+            .ok_or_else(|| Error::MissingMetadata(Role::Snapshot))?
+            .clone();
+        let (_, target_description) = lookup(
+            &mut self.tuf,
+            &self.config,
+            false,
+            target,
+            &snapshot,
+            None,
+            &mut self.local,
+            &mut self.remote,
+        );
+        let target_description = target_description?;
+
         let read = self.remote.fetch_target(
             target,
             &target_description,
diff --git a/src/crypto.rs b/src/crypto.rs
index c88b5d5..c5416f1 100644
--- a/src/crypto.rs
+++ b/src/crypto.rs
@@ -3,7 +3,7 @@
 use data_encoding::BASE64URL;
 use derp::{self, Der, Tag};
 use ring;
-use ring::digest::{self, SHA256};
+use ring::digest::{self, SHA256, SHA512};
 use ring::rand::SystemRandom;
 use ring::signature::{RSAKeyPair, RSASigningState, Ed25519KeyPair, ED25519,
                       RSA_PSS_2048_8192_SHA256, RSA_PSS_2048_8192_SHA512, RSA_PSS_SHA256,
@@ -12,6 +12,7 @@
 use serde::ser::{Serialize, Serializer, Error as SerializeError};
 use std::collections::HashMap;
 use std::fmt::{self, Debug, Display};
+use std::io::Read;
 use std::str::FromStr;
 use std::sync::Arc;
 use untrusted::Input;
@@ -56,6 +57,55 @@
     Err(Error::NoSupportedHashAlgorithm)
 }
 
+/// Calculate the size and hash digest from a given `Read`.
+pub fn calculate_hashes<R: Read>(
+    mut read: R,
+    hash_algs: &[HashAlgorithm],
+) -> Result<(u64, HashMap<HashAlgorithm, HashValue>)> {
+    if hash_algs.len() == 0 {
+        return Err(Error::IllegalArgument(
+            "Cannot provide empty set of hash algorithms".into(),
+        ));
+    }
+
+    let mut size = 0;
+    let mut hashes = HashMap::new();
+    for alg in hash_algs {
+        let context = match alg {
+            &HashAlgorithm::Sha256 => digest::Context::new(&SHA256),
+            &HashAlgorithm::Sha512 => digest::Context::new(&SHA512),
+        };
+
+        let _ = hashes.insert(alg, context);
+    }
+
+    let mut buf = vec![0; 1024];
+    loop {
+        match read.read(&mut buf) {
+            Ok(read_bytes) => {
+                if read_bytes == 0 {
+                    break;
+                }
+
+                size += read_bytes as u64;
+
+                for (_, mut context) in hashes.iter_mut() {
+                    context.update(&buf[0..read_bytes]);
+                }
+            }
+            e @ Err(_) => e.map(|_| ())?,
+        }
+    }
+
+    let hashes = hashes
+        .drain()
+        .map(|(k, v)| {
+            (k.clone(), HashValue::new(v.finish().as_ref().to_vec()))
+        })
+        .collect();
+    Ok((size, hashes))
+}
+
 /// Calculate the given key's ID.
 ///
 /// A `KeyId` is calculated as `sha256(public_key_bytes)`. The TUF spec says that it should be
diff --git a/src/interchange/mod.rs b/src/interchange/mod.rs
index a43eafd..2063c53 100644
--- a/src/interchange/mod.rs
+++ b/src/interchange/mod.rs
@@ -33,7 +33,10 @@
         T: Serialize;
 
     /// Write a struct to a stream.
-    fn to_writer<W, T: ?Sized>(writer: W, value: &T) -> Result<()>
+    ///
+    /// Note: This *MUST* writer the bytes canonically for hashes to line up correctly in other
+    /// areas of the library.
+    fn to_writer<W, T: Sized>(writer: W, value: &T) -> Result<()>
     where
         W: Write,
         T: Serialize;
@@ -141,12 +144,14 @@
     /// JsonDataInterchange::to_writer(&mut buf, &arr).unwrap();
     /// assert!(&buf == b"[1, 2, 3]" || &buf == b"[1,2,3]");
     /// ```
-    fn to_writer<W, T: ?Sized>(writer: W, value: &T) -> Result<()>
+    fn to_writer<W, T: Sized>(mut writer: W, value: &T) -> Result<()>
     where
         W: Write,
         T: Serialize,
     {
-        Ok(json::to_writer(writer, value)?)
+        let bytes = Self::canonicalize(&Self::serialize(value)?)?;
+        writer.write_all(&bytes)?;
+        Ok(())
     }
 
     /// ```
diff --git a/src/metadata.rs b/src/metadata.rs
index d2386d0..03c90ca 100644
--- a/src/metadata.rs
+++ b/src/metadata.rs
@@ -2,7 +2,6 @@
 
 use chrono::DateTime;
 use chrono::offset::Utc;
-use ring::digest::{self, SHA256, SHA512};
 use serde::de::{Deserialize, DeserializeOwned, Deserializer, Error as DeserializeError};
 use serde::ser::{Serialize, Serializer, Error as SerializeError};
 use std::collections::{HashMap, HashSet};
@@ -12,7 +11,8 @@
 use std::marker::PhantomData;
 
 use Result;
-use crypto::{KeyId, PublicKey, Signature, HashAlgorithm, HashValue, SignatureScheme, PrivateKey};
+use crypto::{self, KeyId, PublicKey, Signature, HashAlgorithm, HashValue, SignatureScheme,
+             PrivateKey};
 use error::Error;
 use interchange::DataInterchange;
 use shims;
@@ -704,14 +704,47 @@
 }
 
 /// Description of a piece of metadata, used in verification.
-#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+#[derive(Debug, Clone, PartialEq, Serialize)]
 pub struct MetadataDescription {
     version: u32,
+    size: usize,
+    hashes: HashMap<HashAlgorithm, HashValue>,
 }
 
 impl MetadataDescription {
+    /// Create a `MetadataDescription` from a given reader. Size and hashes will be calculated.
+    pub fn from_reader<R: Read>(
+        read: R,
+        version: u32,
+        hash_algs: &[HashAlgorithm],
+    ) -> Result<Self> {
+        if version < 1 {
+            return Err(Error::IllegalArgument(
+                "Version must be greater than zero".into(),
+            ));
+        }
+
+        let (size, hashes) = crypto::calculate_hashes(read, hash_algs)?;
+
+        if size > ::std::usize::MAX as u64 {
+            return Err(Error::IllegalArgument(
+                "Calculated size exceeded usize".into(),
+            ));
+        }
+
+        Ok(MetadataDescription {
+            version: version,
+            size: size as usize,
+            hashes: hashes,
+        })
+    }
+
     /// Create a new `MetadataDescription`.
-    pub fn new(version: u32) -> Result<Self> {
+    pub fn new(
+        version: u32,
+        size: usize,
+        hashes: HashMap<HashAlgorithm, HashValue>,
+    ) -> Result<Self> {
         if version < 1 {
             return Err(Error::IllegalArgument(format!(
                 "Metadata version must be greater than zero. Found: {}",
@@ -719,13 +752,42 @@
             )));
         }
 
-        Ok(MetadataDescription { version: version })
+        if hashes.is_empty() {
+            return Err(Error::IllegalArgument(
+                "Cannot have empty set of hashes".into(),
+            ));
+        }
+
+        Ok(MetadataDescription {
+            version: version,
+            size: size,
+            hashes: hashes,
+        })
     }
 
     /// The version of the described metadata.
     pub fn version(&self) -> u32 {
         self.version
     }
+
+    /// The size of the described metadata.
+    pub fn size(&self) -> usize {
+        self.size
+    }
+
+    /// An immutable reference to the hashes of the described metadata.
+    pub fn hashes(&self) -> &HashMap<HashAlgorithm, HashValue> {
+        &self.hashes
+    }
+}
+
+impl<'de> Deserialize<'de> for MetadataDescription {
+    fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+        let intermediate: shims::MetadataDescription = Deserialize::deserialize(de)?;
+        intermediate.try_into().map_err(|e| {
+            DeserializeError::custom(format!("{:?}", e))
+        })
+    }
 }
 
 /// Metadata for the snapshot role.
@@ -909,13 +971,30 @@
 }
 
 /// Description of a target, used in verification.
-#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+#[derive(Debug, Clone, PartialEq, Serialize)]
 pub struct TargetDescription {
     size: u64,
     hashes: HashMap<HashAlgorithm, HashValue>,
 }
 
 impl TargetDescription {
+    /// Create a new `TargetDescription`.
+    ///
+    /// Note: Creating this manually could lead to errors, and the `from_reader` method is
+    /// preferred.
+    pub fn new(size: u64, hashes: HashMap<HashAlgorithm, HashValue>) -> Result<Self> {
+        if hashes.is_empty() {
+            return Err(Error::IllegalArgument(
+                "Cannot have empty set of hashes".into(),
+            ));
+        }
+
+        Ok(TargetDescription {
+            size: size,
+            hashes: hashes,
+        })
+    }
+
     /// Read the from the given reader and calculate the size and hash values.
     ///
     /// ```
@@ -928,7 +1007,6 @@
     /// fn main() {
     ///     let bytes: &[u8] = b"it was a pleasure to burn";
     ///
-    ///     // $ printf 'it was a pleasure to burn' | sha256sum
     ///     let s = "Rd9zlbzrdWfeL7gnIEi05X-Yv2TCpy4qqZM1N72ZWQs=";
     ///     let sha256 = HashValue::new(BASE64URL.decode(s.as_bytes()).unwrap());
     ///
@@ -937,7 +1015,6 @@
     ///     assert_eq!(target_description.size(), bytes.len() as u64);
     ///     assert_eq!(target_description.hashes().get(&HashAlgorithm::Sha256), Some(&sha256));
     ///
-    ///     // $ printf 'it was a pleasure to burn' | sha512sum
     ///     let s ="tuIxwKybYdvJpWuUj6dubvpwhkAozWB6hMJIRzqn2jOUdtDTBg381brV4K\
     ///         BU1zKP8GShoJuXEtCf5NkDTCEJgQ==";
     ///     let sha512 = HashValue::new(BASE64URL.decode(s.as_bytes()).unwrap());
@@ -948,46 +1025,11 @@
     ///     assert_eq!(target_description.hashes().get(&HashAlgorithm::Sha512), Some(&sha512));
     /// }
     /// ```
-    pub fn from_reader<R>(mut read: R, hash_algs: &[HashAlgorithm]) -> Result<Self>
+    pub fn from_reader<R>(read: R, hash_algs: &[HashAlgorithm]) -> Result<Self>
     where
         R: Read,
     {
-        let mut size = 0;
-        let mut hashes = HashMap::new();
-        for alg in hash_algs {
-            let context = match alg {
-                &HashAlgorithm::Sha256 => digest::Context::new(&SHA256),
-                &HashAlgorithm::Sha512 => digest::Context::new(&SHA512),
-            };
-
-            let _ = hashes.insert(alg, context);
-        }
-
-        let mut buf = vec![0; 1024];
-        loop {
-            match read.read(&mut buf) {
-                Ok(read_bytes) => {
-                    if read_bytes == 0 {
-                        break;
-                    }
-
-                    size += read_bytes as u64;
-
-                    for (_, mut context) in hashes.iter_mut() {
-                        context.update(&buf[0..read_bytes]);
-                    }
-                }
-                e @ Err(_) => e.map(|_| ())?,
-            }
-        }
-
-        let hashes = hashes
-            .drain()
-            .map(|(k, v)| {
-                (k.clone(), HashValue::new(v.finish().as_ref().to_vec()))
-            })
-            .collect();
-
+        let (size, hashes) = crypto::calculate_hashes(read, hash_algs)?;
         Ok(TargetDescription {
             size: size,
             hashes: hashes,
@@ -1005,6 +1047,15 @@
     }
 }
 
+impl<'de> Deserialize<'de> for TargetDescription {
+    fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+        let intermediate: shims::TargetDescription = Deserialize::deserialize(de)?;
+        intermediate.try_into().map_err(|e| {
+            DeserializeError::custom(format!("{:?}", e))
+        })
+    }
+}
+
 /// Metadata for the targets role.
 #[derive(Debug, Clone, PartialEq)]
 pub struct TargetsMetadata {
@@ -1451,7 +1502,12 @@
             1,
             Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
             hashmap!{
-                MetadataPath::new("foo".into()).unwrap() => MetadataDescription::new(1).unwrap(),
+                MetadataPath::new("foo".into()).unwrap() =>
+                    MetadataDescription::new(
+                        1,
+                        100,
+                        hashmap! { HashAlgorithm::Sha256 => HashValue::new(vec![]) }
+                    ).unwrap(),
             },
         ).unwrap();
 
@@ -1462,6 +1518,10 @@
             "meta": {
                 "foo": {
                     "version": 1,
+                    "size": 100,
+                    "hashes": {
+                        "sha256": "",
+                    },
                 },
             },
         });
@@ -1478,7 +1538,12 @@
             1,
             Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
             hashmap! {
-                MetadataPath::new("foo".into()).unwrap() => MetadataDescription::new(1).unwrap(),
+                MetadataPath::new("foo".into()).unwrap() =>
+                    MetadataDescription::new(
+                        1,
+                        100,
+                        hashmap! { HashAlgorithm::Sha256 => HashValue::new(vec![]) },
+                    ).unwrap(),
             },
         ).unwrap();
 
@@ -1489,6 +1554,10 @@
             "meta": {
                 "foo": {
                     "version": 1,
+                    "size": 100,
+                    "hashes": {
+                        "sha256": "",
+                    },
                 },
             },
         });
@@ -1593,7 +1662,11 @@
             Utc.ymd(2017, 1, 1).and_hms(0, 0, 0),
             hashmap! {
                 MetadataPath::new("foo".into()).unwrap() =>
-                    MetadataDescription::new(1).unwrap(),
+                    MetadataDescription::new(
+                        1,
+                        100,
+                        hashmap! { HashAlgorithm::Sha256 => HashValue::new(vec![]) },
+                    ).unwrap(),
             },
         ).unwrap();
 
@@ -1610,8 +1683,8 @@
                 {
                     "key_id": "qfrfBrkB4lBBSDEBlZgaTGS_SrE6UfmON9kP4i3dJFY=",
                     "scheme": "ed25519",
-                    "value": "T2cUdVcGn08q9Cl4sKXqQni4J63TxZ48wR3jt583QuWXJ2AmxRHwEnW\
-                        IHtkCOmzohF4D0v9JspeH6samO-H6CA==",
+                    "value": "9QXO-Av15zaWEsheO9JbWdo8iAF9vEbUKVePJpGRX5s6b1G8eqH4kvAE2jZV349JvZ\
+                        -2yPGLE20V_7JwhMLYCQ==",
                 }
             ],
             "signed": {
@@ -1621,6 +1694,10 @@
                 "meta": {
                     "foo": {
                         "version": 1,
+                        "size": 100,
+                        "hashes": {
+                            "sha256": "",
+                        },
                     },
                 },
             },
diff --git a/src/repository.rs b/src/repository.rs
index 38c5864..6b2e6c9 100644
--- a/src/repository.rs
+++ b/src/repository.rs
@@ -76,20 +76,23 @@
                     if let Some((context, expected_hash)) = self.hasher.take() {
                         let generated_hash = context.finish();
                         if generated_hash.as_ref() != expected_hash.value() {
-                            return Err(io::Error::new(ErrorKind::InvalidData,
-                                "Calculated hash did not match the required hash."))
+                            return Err(io::Error::new(
+                                ErrorKind::InvalidData,
+                                "Calculated hash did not match the required hash.",
+                            ));
                         }
                     }
 
-                    return Ok(0)
+                    return Ok(0);
                 }
 
                 match self.bytes_read.checked_add(read_bytes as u64) {
                     Some(sum) if sum <= self.max_size => self.bytes_read = sum,
                     _ => {
-                        return Err(io::Error::new(ErrorKind::InvalidData, 
-                            "Read exceeded the maximum allowed bytes."),
-                        );
+                        return Err(io::Error::new(
+                            ErrorKind::InvalidData,
+                            "Read exceeded the maximum allowed bytes.",
+                        ));
                     }
                 }
 
@@ -99,8 +102,10 @@
                     if self.bytes_read as f32 / (duration.num_seconds() as f32) <
                         self.min_bytes_per_second as f32
                     {
-                        return Err(io::Error::new(ErrorKind::TimedOut,
-                                                  "Read aborted. Bitrate too low."));
+                        return Err(io::Error::new(
+                            ErrorKind::TimedOut,
+                            "Read aborted. Bitrate too low.",
+                        ));
                     }
                 }
 
@@ -129,6 +134,9 @@
     fn initialize(&mut self) -> Result<()>;
 
     /// Store signed metadata.
+    ///
+    /// Note: This *MUST* canonicalize the bytes before storing them as a read will expect the
+    /// hashes of the metadata to match.
     fn store_metadata<M>(
         &mut self,
         role: &Role,
@@ -475,7 +483,7 @@
         min_bytes_per_second: u32,
     ) -> Result<SafeReader<Self::TargetRead>> {
         let resp = self.get(&None, &target_path.components())?;
-        let (alg, value) = crypto::hash_preference(target_description.hashes())?; 
+        let (alg, value) = crypto::hash_preference(target_description.hashes())?;
         Ok(SafeReader::new(
             resp,
             target_description.size(),
@@ -588,9 +596,14 @@
             Some(bytes) => {
                 let cur = Cursor::new(bytes.clone());
                 let (alg, value) = crypto::hash_preference(target_description.hashes())?;
-                let read = SafeReader::new(cur, target_description.size(), min_bytes_per_second, Some((alg, value.clone())));
+                let read = SafeReader::new(
+                    cur,
+                    target_description.size(),
+                    min_bytes_per_second,
+                    Some((alg, value.clone())),
+                );
                 Ok(read)
-            },
+            }
             None => Err(Error::NotFound),
         }
     }
@@ -608,7 +621,8 @@
         repo.initialize().unwrap();
 
         let data: &[u8] = b"like tears in the rain";
-        let target_description = TargetDescription::from_reader(data, &[HashAlgorithm::Sha256]).unwrap();
+        let target_description = TargetDescription::from_reader(data, &[HashAlgorithm::Sha256])
+            .unwrap();
         let path = TargetPath::new("batty".into()).unwrap();
         repo.store_target(data, &path).unwrap();
 
@@ -631,7 +645,8 @@
         repo.initialize().unwrap();
 
         let data: &[u8] = b"like tears in the rain";
-        let target_description = TargetDescription::from_reader(data, &[HashAlgorithm::Sha256]).unwrap();
+        let target_description = TargetDescription::from_reader(data, &[HashAlgorithm::Sha256])
+            .unwrap();
         let path = TargetPath::new("batty".into()).unwrap();
         repo.store_target(data, &path).unwrap();
         assert!(temp_dir.path().join("targets").join("batty").exists());
diff --git a/src/shims.rs b/src/shims.rs
index 5e65a22..7f8d2cc 100644
--- a/src/shims.rs
+++ b/src/shims.rs
@@ -337,3 +337,28 @@
         metadata::Delegations::new(keys, self.roles)
     }
 }
+
+#[derive(Deserialize)]
+pub struct TargetDescription {
+    size: u64,
+    hashes: HashMap<crypto::HashAlgorithm, crypto::HashValue>,
+}
+
+impl TargetDescription {
+    pub fn try_into(self) -> Result<metadata::TargetDescription> {
+        metadata::TargetDescription::new(self.size, self.hashes)
+    }
+}
+
+#[derive(Deserialize)]
+pub struct MetadataDescription {
+    version: u32,
+    size: usize,
+    hashes: HashMap<crypto::HashAlgorithm, crypto::HashValue>,
+}
+
+impl MetadataDescription {
+    pub fn try_into(self) -> Result<metadata::MetadataDescription> {
+        metadata::MetadataDescription::new(self.version, self.size, self.hashes)
+    }
+}
diff --git a/src/tuf.rs b/src/tuf.rs
index 9aef6ee..36793e9 100644
--- a/src/tuf.rs
+++ b/src/tuf.rs
@@ -81,89 +81,6 @@
         &self.delegations
     }
 
-    /// Return the list of all available targets.
-    pub fn available_targets(&self) -> Result<HashSet<TargetPath>> {
-        let _ = self.safe_root_ref()?; // ensure root still valid
-        let _ = self.safe_snapshot_ref()?;
-        let targets = self.safe_targets_ref()?;
-        let out = targets
-            .targets()
-            .keys()
-            .cloned()
-            .collect::<HashSet<TargetPath>>();
-
-        // TODO ensure meta not expired
-        fn lookup<D: DataInterchange>(
-            tuf: &Tuf<D>,
-            role: &MetadataPath,
-            parents: Vec<HashSet<TargetPath>>,
-            visited: &mut HashSet<MetadataPath>,
-        ) -> Option<HashSet<TargetPath>> {
-            if visited.contains(role) {
-                return None;
-            }
-            let _ = visited.insert(role.clone());
-
-            let targets = match tuf.delegations.get(role) {
-                Some(t) => t,
-                None => return None,
-            };
-
-            if targets.expires() <= &Utc::now() {
-                return None;
-            }
-
-            let mut result = HashSet::new();
-            for target in targets.targets().keys() {
-                if target.matches_chain(&parents) {
-                    let _ = result.insert(target.clone());
-                }
-            }
-
-            match targets.delegations() {
-                Some(d) => {
-                    for delegation in d.roles() {
-                        let mut new_parents = parents.clone();
-                        new_parents.push(delegation.paths().clone());
-                        if let Some(res) = lookup(tuf, delegation.role(), new_parents, visited) {
-
-                            for p in res.iter() {
-                                let _ = result.insert(p.clone());
-                            }
-                        }
-                    }
-                }
-                None => (),
-            }
-
-            Some(result)
-        }
-
-        let delegated_targets = match targets.delegations() {
-            Some(delegations) => {
-                let mut result = HashSet::new();
-                let mut visited = HashSet::new();
-                for delegation in delegations.roles() {
-                    if let Some(res) = lookup(
-                        self,
-                        delegation.role(),
-                        vec![delegation.paths().clone()],
-                        &mut visited,
-                    )
-                    {
-                        for p in res.iter() {
-                            let _ = result.insert(p.clone());
-                        }
-                    }
-                }
-                result
-            }
-            None => HashSet::new(),
-        };
-
-        Ok(out.union(&delegated_targets).map(|t| t.clone()).collect())
-    }
-
     /// Verify and update the root metadata.
     pub fn update_root(&mut self, signed_root: SignedMetadata<D, RootMetadata>) -> Result<bool> {
         signed_root.verify(
diff --git a/tests/integration.rs b/tests/integration.rs
index 1c57fa7..b7ae23d 100644
--- a/tests/integration.rs
+++ b/tests/integration.rs
@@ -73,7 +73,7 @@
     //// build the timestamp ////
     let mut meta_map = HashMap::new();
     let path = MetadataPath::new("snapshot".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let timestamp = TimestampMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)
         .unwrap();
@@ -89,10 +89,10 @@
     //// build the snapshot ////
     let mut meta_map = HashMap::new();
     let path = MetadataPath::new("targets".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let path = MetadataPath::new("delegation".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let snapshot = SnapshotMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)
         .unwrap();
@@ -159,11 +159,6 @@
     tuf.update_delegation(&MetadataPath::new("delegation".into()).unwrap(), signed)
         .unwrap();
 
-    assert_eq!(
-        tuf.available_targets().unwrap().iter().next(),
-        Some(&TargetPath::new("foo".into()).unwrap())
-    );
-
     assert!(
         tuf.target_description(&TargetPath::new("foo".into()).unwrap())
             .is_ok()
@@ -226,7 +221,7 @@
     //// build the timestamp ////
     let mut meta_map = HashMap::new();
     let path = MetadataPath::new("snapshot".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let timestamp = TimestampMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)
         .unwrap();
@@ -242,13 +237,13 @@
     //// build the snapshot ////
     let mut meta_map = HashMap::new();
     let path = MetadataPath::new("targets".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let path = MetadataPath::new("delegation-a".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let path = MetadataPath::new("delegation-b".into()).unwrap();
-    let desc = MetadataDescription::new(1).unwrap();
+    let desc = MetadataDescription::from_reader(&*vec![0u8], 1, &[HashAlgorithm::Sha256]).unwrap();
     let _ = meta_map.insert(path, desc);
     let snapshot = SnapshotMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)
         .unwrap();
@@ -351,11 +346,6 @@
     tuf.update_delegation(&MetadataPath::new("delegation-b".into()).unwrap(), signed)
         .unwrap();
 
-    assert_eq!(
-        tuf.available_targets().unwrap().iter().next(),
-        Some(&TargetPath::new("foo".into()).unwrap())
-    );
-
     assert!(
         tuf.target_description(&TargetPath::new("foo".into()).unwrap())
             .is_ok()
diff --git a/tests/simple_example.rs b/tests/simple_example.rs
index 688c692..4afeb81 100644
--- a/tests/simple_example.rs
+++ b/tests/simple_example.rs
@@ -7,7 +7,7 @@
 use tuf::{Tuf, Error};
 use tuf::client::{Client, Config};
 use tuf::crypto::{PrivateKey, SignatureScheme, KeyId, HashAlgorithm};
-use tuf::interchange::JsonDataInterchange;
+use tuf::interchange::{DataInterchange, JsonDataInterchange};
 use tuf::metadata::{RoleDefinition, RootMetadata, Role, MetadataVersion, MetadataPath,
                     SignedMetadata, TargetDescription, TargetPath, TargetsMetadata,
                     MetadataDescription, SnapshotMetadata, TimestampMetadata};
@@ -144,10 +144,13 @@
         &signed,
     )?;
 
+    let targets_bytes =
+        JsonDataInterchange::canonicalize(&JsonDataInterchange::serialize(&signed)?)?;
+
     //// build the snapshot ////
     let mut meta_map = HashMap::new();
     let path = MetadataPath::new("targets".into())?;
-    let desc = MetadataDescription::new(1)?;
+    let desc = MetadataDescription::from_reader(&*targets_bytes, 1, &[HashAlgorithm::Sha256])?;
     let _ = meta_map.insert(path, desc);
     let snapshot = SnapshotMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)?;
 
@@ -170,10 +173,13 @@
         &signed,
     )?;
 
+    let snapshot_bytes =
+        JsonDataInterchange::canonicalize(&JsonDataInterchange::serialize(&signed)?)?;
+
     //// build the timestamp ////
     let mut meta_map = HashMap::new();
     let path = MetadataPath::new("snapshot".into())?;
-    let desc = MetadataDescription::new(1)?;
+    let desc = MetadataDescription::from_reader(&*snapshot_bytes, 1, &[HashAlgorithm::Sha256])?;
     let _ = meta_map.insert(path, desc);
     let timestamp = TimestampMetadata::new(1, Utc.ymd(2038, 1, 1).and_hms(0, 0, 0), meta_map)?;