fetch targets
diff --git a/Cargo.toml b/Cargo.toml
index 125765c..5c80a0a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -32,10 +32,10 @@
serde = "1"
serde_derive = "1"
serde_json = "1"
+tempfile = "2.1.5"
url = "1.4"
untrusted = "0.5"
uuid = { version = "0.5", features = [ "v4" ] }
-walkdir = "1"
[dev-dependencies]
tempdir = "0.3"
diff --git a/Makefile b/Makefile
index cbe6005..c71954d 100644
--- a/Makefile
+++ b/Makefile
@@ -5,7 +5,7 @@
@find . -name '*.rs.bk' -type f -delete
dev-docs: ## Generate the documentation for all modules (dev friendly)
- @cargo rustdoc --all-features -- --no-defaults --passes "collapse-docs" --passes "unindent-comments"
+ @cargo rustdoc --all-features --open -- --no-defaults --passes "collapse-docs" --passes "unindent-comments"
help: ## Print this message
@awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%16s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST)
diff --git a/src/client.rs b/src/client.rs
index 643e176..60b1614 100644
--- a/src/client.rs
+++ b/src/client.rs
@@ -4,7 +4,7 @@
use crypto;
use error::Error;
use interchange::DataInterchange;
-use metadata::{MetadataVersion, RootMetadata, Role, MetadataPath};
+use metadata::{MetadataVersion, RootMetadata, Role, MetadataPath, TargetPath};
use repository::Repository;
use tuf::Tuf;
@@ -106,6 +106,7 @@
{
let latest_root = repo.fetch_metadata(
&Role::Root,
+ &MetadataPath::from_role(&Role::Root),
&MetadataVersion::None,
max_root_size,
None,
@@ -129,6 +130,7 @@
for i in (tuf.root().version() + 1)..latest_version {
let signed = repo.fetch_metadata(
&Role::Root,
+ &MetadataPath::from_role(&Role::Root),
&MetadataVersion::Number(i),
max_root_size,
None,
@@ -157,6 +159,7 @@
{
let ts = repo.fetch_metadata(
&Role::Timestamp,
+ &MetadataPath::from_role(&Role::Timestamp),
&MetadataVersion::None,
max_timestamp_size,
None,
@@ -191,6 +194,7 @@
let snap = repo.fetch_metadata(
&Role::Snapshot,
+ &MetadataPath::from_role(&Role::Snapshot),
&MetadataVersion::None,
&snapshot_description.length(),
hashes,
@@ -225,12 +229,20 @@
let targets = repo.fetch_metadata(
&Role::Targets,
+ &MetadataPath::from_role(&Role::Targets),
&MetadataVersion::None,
&targets_description.length(),
hashes,
)?;
tuf.update_targets(targets)
}
+
+ /// Fetch a target from the remote repo and write it to the local repo.
+ pub fn fetch_target(&mut self, target: &TargetPath) -> Result<()> {
+ let target_description = self.tuf.target_description(target)?;
+ let read = self.remote.fetch_target(target)?;
+ self.local.store_target(read, target, target_description)
+ }
}
/// Configuration for a TUF `Client`.
diff --git a/src/crypto.rs b/src/crypto.rs
index 8114e54..3e3b8b6 100644
--- a/src/crypto.rs
+++ b/src/crypto.rs
@@ -10,7 +10,7 @@
use serde::de::{Deserialize, Deserializer, Error as DeserializeError};
use serde::ser::{Serialize, Serializer, SerializeTupleStruct, Error as SerializeError};
use std::collections::HashMap;
-use std::fmt::{self, Debug};
+use std::fmt::{self, Debug, Display};
use std::str::FromStr;
use std::sync::Arc;
use untrusted::Input;
@@ -24,6 +24,20 @@
/// Given a map of hash algorithms and their values, get the prefered algorithm and the hash
/// calculated by it. Returns an `Err` if there is no match.
+///
+/// ```
+/// use std::collections::HashMap;
+/// use tuf::crypto::{hash_preference, HashValue, HashAlgorithm};
+///
+/// let mut map = HashMap::new();
+/// assert!(hash_preference(&map).is_err());
+///
+/// let _ = map.insert(HashAlgorithm::Sha512, HashValue::from_hex("abcd").unwrap());
+/// assert_eq!(hash_preference(&map).unwrap().0, &HashAlgorithm::Sha512);
+///
+/// let _ = map.insert(HashAlgorithm::Sha256, HashValue::from_hex("0123").unwrap());
+/// assert_eq!(hash_preference(&map).unwrap().0, &HashAlgorithm::Sha512);
+/// ```
pub fn hash_preference<'a>(
hashes: &'a HashMap<HashAlgorithm, HashValue>,
) -> Result<(&'static HashAlgorithm, &'a HashValue)> {
@@ -483,16 +497,43 @@
}
/// Wrapper for the value of a hash digest.
-#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+#[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct HashValue(Vec<u8>);
impl HashValue {
+ /// Parse a hex-lower string and return a `HashValue`.
+ ///
+ /// ```
+ /// use tuf::crypto::HashValue;
+ /// assert_eq!(HashValue::from_hex("abcd").unwrap().value(), &[0xab, 0xcd]);
+ /// ```
+ pub fn from_hex(s: &str) -> Result<Self> {
+ Ok(HashValue(HEXLOWER.decode(s.as_bytes())?))
+ }
+
+ /// Create a new `HashValue` from the given digest bytes.
+ pub fn new(bytes: Vec<u8>) -> Self {
+ HashValue(bytes)
+ }
+
/// An immutable reference to the bytes of the hash value.
pub fn value(&self) -> &[u8] {
&self.0
}
}
+impl Debug for HashValue {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "HashValue {{ \"{}\" }}", HEXLOWER.encode(&self.0))
+ }
+}
+
+impl Display for HashValue {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", HEXLOWER.encode(&self.0))
+ }
+}
+
#[cfg(test)]
mod test {
use super::*;
diff --git a/src/error.rs b/src/error.rs
index 66c2079..7c2e903 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -6,6 +6,7 @@
use pem;
use std::io;
use std::path::Path;
+use tempfile;
use metadata::Role;
use rsa::der;
@@ -32,6 +33,9 @@
Opaque(String),
/// There was a library internal error. These errors are *ALWAYS* bugs and should be reported.
Programming(String),
+ /// The target is unavailable. This may mean it is either not in the metadata or the metadata
+ /// chain to the target cannot be fully verified.
+ TargetUnavailable,
/// The metadata or target failed to verify.
VerificationFailure(String),
}
@@ -84,3 +88,9 @@
Error::Opaque("Error reading/writing DER".into())
}
}
+
+impl From <tempfile::PersistError> for Error {
+ fn from(err: tempfile::PersistError) -> Error {
+ Error::Opaque(format!("Error persisting temp file: {:?}", err))
+ }
+}
diff --git a/src/interchange/mod.rs b/src/interchange/mod.rs
index 838ff61..e5bda61 100644
--- a/src/interchange/mod.rs
+++ b/src/interchange/mod.rs
@@ -1,4 +1,4 @@
-//! Contains structures and functions to aid in various TUF data interchange formats.
+//! Structures and functions to aid in various TUF data interchange formats.
mod cjson;
diff --git a/src/lib.rs b/src/lib.rs
index 83023f3..bdf4ad8 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -43,7 +43,7 @@
//!
//! let tuf = Tuf::<JsonDataInterchange>::from_root_pinned(root, &key_ids).unwrap();
//!
-//! let mut local = FileSystemRepository::new(PathBuf::from("~/.cargo/tuf"));
+//! let mut local = FileSystemRepository::new(PathBuf::from("~/.rustup"));
//!
//! let mut remote = HttpRepository::new(
//! Url::parse("https://static.rust-lang.org/").unwrap(),
@@ -78,10 +78,10 @@
extern crate serde_json as json;
#[cfg(test)]
extern crate tempdir;
+extern crate tempfile;
extern crate url;
extern crate untrusted;
extern crate uuid;
-extern crate walkdir;
pub mod error;
@@ -96,7 +96,6 @@
mod rsa;
mod shims;
pub mod tuf;
-mod util;
pub use tuf::*;
pub use error::*;
diff --git a/src/metadata.rs b/src/metadata.rs
index f230f03..4fe2863 100644
--- a/src/metadata.rs
+++ b/src/metadata.rs
@@ -2,10 +2,12 @@
use chrono::DateTime;
use chrono::offset::Utc;
+use ring::digest::{self, SHA256, SHA512};
use serde::de::{Deserialize, DeserializeOwned, Deserializer, Error as DeserializeError};
use serde::ser::{Serialize, Serializer, Error as SerializeError};
use std::collections::{HashMap, HashSet};
use std::fmt::{self, Debug, Display};
+use std::io::Read;
use std::marker::PhantomData;
use Result;
@@ -14,6 +16,117 @@
use interchange::DataInterchange;
use shims;
+static PATH_ILLEGAL_COMPONENTS: &'static [&str] = &[
+ "", // empty
+ ".", // current dir
+ "..", // parent dir
+ // TODO ? "0", // may translate to nul in windows
+];
+
+static PATH_ILLEGAL_COMPONENTS_CASE_INSENSITIVE: &'static [&str] = &[
+ // DOS device files
+ "CON",
+ "PRN",
+ "AUX",
+ "NUL",
+ "COM1",
+ "COM2",
+ "COM3",
+ "COM4",
+ "COM5",
+ "COM6",
+ "COM7",
+ "COM8",
+ "COM9",
+ "LPT1",
+ "LPT2",
+ "LPT3",
+ "LPT4",
+ "LPT5",
+ "LPT6",
+ "LPT7",
+ "LPT8",
+ "LPT9",
+ "KEYBD$",
+ "CLOCK$",
+ "SCREEN$",
+ "$IDLE$",
+ "CONFIG$",
+];
+
+static PATH_ILLEGAL_STRINGS: &'static [&str] = &[
+ "\\", // for windows compatibility
+ "<",
+ ">",
+ "\"",
+ "|",
+ "?",
+ "*",
+ // control characters, all illegal in FAT
+ "\u{000}",
+ "\u{001}",
+ "\u{002}",
+ "\u{003}",
+ "\u{004}",
+ "\u{005}",
+ "\u{006}",
+ "\u{007}",
+ "\u{008}",
+ "\u{009}",
+ "\u{00a}",
+ "\u{00b}",
+ "\u{00c}",
+ "\u{00d}",
+ "\u{00e}",
+ "\u{00f}",
+ "\u{010}",
+ "\u{011}",
+ "\u{012}",
+ "\u{013}",
+ "\u{014}",
+ "\u{015}",
+ "\u{016}",
+ "\u{017}",
+ "\u{018}",
+ "\u{019}",
+ "\u{01a}",
+ "\u{01b}",
+ "\u{01c}",
+ "\u{01d}",
+ "\u{01e}",
+ "\u{01f}",
+ "\u{07f}",
+];
+
+fn safe_path(path: &str) -> Result<()> {
+ if path.starts_with("/") {
+ return Err(Error::IllegalArgument("Cannot start with '/'".into()))
+ }
+
+ for bad_str in PATH_ILLEGAL_STRINGS {
+ if path.contains(bad_str) {
+ return Err(Error::IllegalArgument(format!("Path cannot contain {:?}", bad_str)))
+ }
+ }
+
+ for component in path.split('/') {
+ for bad_str in PATH_ILLEGAL_COMPONENTS {
+ if component == *bad_str {
+ return Err(Error::IllegalArgument(format!("Path cannot have component {:?}", component)))
+ }
+ }
+
+ let component_lower = component.to_lowercase();
+ for bad_str in PATH_ILLEGAL_COMPONENTS_CASE_INSENSITIVE {
+ if component_lower.as_str() == *bad_str {
+ return Err(Error::IllegalArgument(format!("Path cannot have component {:?}", component)))
+ }
+ }
+ }
+
+ Ok(())
+}
+
/// Trait used to represent whether a piece of data is verified or not.
pub trait VerificationStatus {}
@@ -42,6 +155,32 @@
Timestamp,
}
+impl Role {
+ /// Check if this role could be associated with a given path.
+ ///
+ /// ```
+ /// use tuf::metadata::{MetadataPath, Role};
+ ///
+ /// assert!(Role::Root.fuzzy_matches_path(&MetadataPath::from_role(&Role::Root)));
+ /// assert!(Role::Snapshot.fuzzy_matches_path(&MetadataPath::from_role(&Role::Snapshot)));
+ /// assert!(Role::Targets.fuzzy_matches_path(&MetadataPath::from_role(&Role::Targets)));
+ /// assert!(Role::Timestamp.fuzzy_matches_path(&MetadataPath::from_role(&Role::Timestamp)));
+ ///
+ /// assert!(!Role::Root.fuzzy_matches_path(&MetadataPath::from_role(&Role::Snapshot)));
+ /// assert!(!Role::Root.fuzzy_matches_path(&MetadataPath::new("wat".into()).unwrap()));
+ /// ```
+ pub fn fuzzy_matches_path(&self, path: &MetadataPath) -> bool {
+ match self {
+ &Role::Root if &path.0 == "root" => true,
+ &Role::Snapshot if &path.0 == "snapshot" => true,
+ &Role::Timestamp if &path.0 == "timestamp" => true,
+ &Role::Targets if &path.0 == "targets" => true,
+ // TODO delegation support
+ _ => false
+ }
+ }
+}
+
impl Display for Role {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
@@ -54,14 +193,14 @@
}
/// Enum used for addressing versioned TUF metadata.
-#[derive(Debug)]
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum MetadataVersion {
/// The metadata is unversioned.
None,
/// The metadata is addressed by a specific version number.
Number(u32),
/// The metadata is addressed by a hash prefix. Used with TUF's consistent snapshot feature.
- Hash(String),
+ Hash(HashValue),
}
impl MetadataVersion {
@@ -70,7 +209,7 @@
match self {
&MetadataVersion::None => String::new(),
&MetadataVersion::Number(ref x) => format!("{}.", x),
- &MetadataVersion::Hash(ref s) => format!("{}.", s),
+ &MetadataVersion::Hash(ref v) => format!("{}.", v),
}
}
}
@@ -114,7 +253,7 @@
/// An immutable reference to the unverified raw data.
///
- /// *WARNING*: This data is untrusted.
+ /// **WARNING**: This data is untrusted.
pub fn unverified_signed(&self) -> &D::RawData {
&self.signed
}
@@ -385,17 +524,83 @@
}
}
-/// Wrapper for a path to metadata.
-#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize, Deserialize)]
+/// Wrapper for a path to metadata
+#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize)]
pub struct MetadataPath(String);
impl MetadataPath {
- /// Create a metadata path from the given role.
- pub fn from_role(role: &Role) -> Self {
- MetadataPath(role.to_string())
+ /// Create a new `MetadataPath` from a `String`.
+ ///
+ /// ```
+ /// use tuf::metadata::MetadataPath;
+ ///
+ /// assert!(MetadataPath::new("foo".into()).is_ok());
+ /// assert!(MetadataPath::new("/foo".into()).is_err());
+ /// assert!(MetadataPath::new("../foo".into()).is_err());
+ /// assert!(MetadataPath::new("foo/".into()).is_err());
+ /// assert!(MetadataPath::new("foo/..".into()).is_err());
+ /// assert!(MetadataPath::new("foo/../bar".into()).is_err());
+ /// assert!(MetadataPath::new("..foo".into()).is_ok());
+ /// assert!(MetadataPath::new("foo//bar".into()).is_err());
+ /// assert!(MetadataPath::new("foo/..bar".into()).is_ok());
+ /// assert!(MetadataPath::new("foo/bar..".into()).is_ok());
+ /// ```
+ pub fn new(path: String) -> Result<Self> {
+ safe_path(&path)?;
+ Ok(MetadataPath(path))
}
- // TODO convert to/from paths/urls/etc
+ /// Create a metadata path from the given role.
+ /// ```
+ /// use tuf::metadata::{Role, MetadataPath};
+ ///
+ /// assert_eq!(MetadataPath::from_role(&Role::Root),
+ /// MetadataPath::new("root".into()))
+ /// assert_eq!(MetadataPath::from_role(&Role::Snapshot),
+ /// MetadataPath::new("snapshot".into()))
+ /// assert_eq!(MetadataPath::from_role(&Role::Targets),
+ /// MetadataPath::new("targets".into()))
+ /// assert_eq!(MetadataPath::from_role(&Role::Timestamp),
+ /// MetadataPath::new("timestamp".into()))
+ /// ```
+ pub fn from_role(role: &Role) -> Self {
+ Self::new(format!("{}", role)).unwrap()
+ }
+
+ /// Split `MetadataPath` into components that can be joined to create URL paths, Unix paths, or
+ /// Windows paths.
+ ///
+ /// ```
+ /// use tuf::crypto::HashValue;
+ /// use tuf::interchange::JsonDataInterchange;
+ /// use tuf::metadata::{MetadataPath, MetadataVersion};
+ ///
+ /// let path = MetadataPath::new("foo/bar".into()).unwrap();
+ /// assert_eq!(path.components::<JsonDataInterchange>(&MetadataVersion::None),
+ /// ["foo".to_string(), "bar.json".to_string()]);
+ /// assert_eq!(path.components::<JsonDataInterchange>(&MetadataVersion::Number(1)),
+ /// ["foo".to_string(), "1.bar.json".to_string()]);
+ /// assert_eq!(path.components::<JsonDataInterchange>(&MetadataVersion::Hash(HashValue::from_hex("abcd").unwrap())),
+ /// ["foo".to_string(), "abcd.bar.json".to_string()]);
+ /// ```
+ pub fn components<D>(&self, version: &MetadataVersion) -> Vec<String>
+ where
+ D: DataInterchange,
+ {
+ let mut buf: Vec<String> = self.0.split('/').map(|s| s.to_string()).collect();
+ let len = buf.len();
+ buf[len - 1] = format!("{}{}.{}", version.prefix(), buf[len - 1], D::extension());
+ buf
+ }
+}
+
+impl<'de> Deserialize<'de> for MetadataPath {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let s: String = Deserialize::deserialize(de)?;
+ MetadataPath::new(s).map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
+ }
}
/// Metadata for the timestamp role.
@@ -587,9 +792,54 @@
/// Wrapper for a path to a target.
-#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize, Deserialize)]
+#[derive(Debug, Clone, PartialEq, Hash, Eq, Serialize)]
pub struct TargetPath(String);
+impl TargetPath {
+ /// Create a new `TargetPath` from a `String`.
+ ///
+ /// ```
+ /// use tuf::metadata::TargetPath;
+ ///
+ /// assert!(TargetPath::new("foo".into()).is_ok());
+ /// assert!(TargetPath::new("/foo".into()).is_err());
+ /// assert!(TargetPath::new("../foo".into()).is_err());
+ /// assert!(TargetPath::new("foo/".into()).is_err());
+ /// assert!(TargetPath::new("foo/..".into()).is_err());
+ /// assert!(TargetPath::new("foo/../bar".into()).is_err());
+ /// assert!(TargetPath::new("..foo".into()).is_ok());
+ /// assert!(TargetPath::new("foo//bar".into()).is_err());
+ /// assert!(TargetPath::new("foo/..bar".into()).is_ok());
+ /// assert!(TargetPath::new("foo/bar..".into()).is_ok());
+ /// ```
+ pub fn new(path: String) -> Result<Self> {
+ safe_path(&path)?;
+ Ok(TargetPath(path))
+ }
+
+ /// Split `TargetPath` into components that can be joined to create URL paths, Unix paths, or
+ /// Windows paths.
+ ///
+ /// ```
+ /// use tuf::metadata::TargetPath;
+ ///
+ /// let path = TargetPath::new("foo/bar".into()).unwrap();
+ /// assert_eq!(path.components(), ["foo".to_string(), "bar".to_string()]);
+ /// ```
+ pub fn components(&self) -> Vec<String> {
+ self.0.split('/').map(|s| s.to_string()).collect()
+ }
+}
+
+impl<'de> Deserialize<'de> for TargetPath {
+ fn deserialize<D: Deserializer<'de>>(de: D) -> ::std::result::Result<Self, D::Error> {
+ let s: String = Deserialize::deserialize(de)?;
+ TargetPath::new(s).map_err(|e| {
+ DeserializeError::custom(format!("{:?}", e))
+ })
+ }
+}
+
/// Description of a target, used in verification.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct TargetDescription {
@@ -597,6 +847,69 @@
hashes: HashMap<HashAlgorithm, HashValue>,
}
+impl TargetDescription {
+ /// Read the from the given reader and calculate the length and hash values.
+ ///
+ /// ```
+ /// use tuf::crypto::{HashAlgorithm,HashValue};
+ /// use tuf::metadata::TargetDescription;
+ ///
+ /// let bytes: &[u8] = b"it was a pleasure to burn";
+ /// let target_description = TargetDescription::from_reader(bytes).unwrap();
+ ///
+ /// // $ printf 'it was a pleasure to burn' | sha256sum
+ /// let sha256 = HashValue::from_hex("45df7395bceb7567de2fb8272048b4e57f98bf64c2a72e2aa9933537bd99590b").unwrap();
+ /// // $ printf 'it was a pleasure to burn' | sha512sum
+ /// let sha512 = HashValue::from_hex("b6e231c0ac9b61dbc9a56b948fa76e6efa70864028cd607a84c248473aa7da339476d0d3060dfcd5bad5e0a054d7328ff064a1a09b9712d09fe4d9034c210981").unwrap();
+ ///
+ /// assert_eq!(target_description.length(), bytes.len() as u64);
+ /// assert_eq!(target_description.hashes().get(&HashAlgorithm::Sha256), Some(&sha256));
+ /// assert_eq!(target_description.hashes().get(&HashAlgorithm::Sha512), Some(&sha512));
+ /// ```
+ pub fn from_reader<R>(mut read: R) -> Result<Self>
+ where
+ R: Read,
+ {
+ let mut length = 0;
+ let mut sha256 = digest::Context::new(&SHA256);
+ let mut sha512 = digest::Context::new(&SHA512);
+
+ let mut buf = vec![0; 1024];
+ loop {
+ match read.read(&mut buf) {
+ Ok(read_bytes) => {
+ if read_bytes == 0 {
+ break;
+ }
+
+ length += read_bytes as u64;
+ sha256.update(&buf[0..read_bytes]);
+ sha512.update(&buf[0..read_bytes]);
+ }
+ e @ Err(_) => e.map(|_| ())?,
+ }
+ }
+
+ let mut hashes = HashMap::new();
+ let _ = hashes.insert(HashAlgorithm::Sha256, HashValue::new(sha256.finish().as_ref().to_vec()));
+ let _ = hashes.insert(HashAlgorithm::Sha512, HashValue::new(sha512.finish().as_ref().to_vec()));
+ Ok(TargetDescription {
+ length: length,
+ hashes: hashes,
+ })
+ }
+
+ /// The maximum length of the target.
+ pub fn length(&self) -> u64 {
+ self.length
+ }
+
+ /// An immutable reference to the list of calculated hashes.
+ pub fn hashes(&self) -> &HashMap<HashAlgorithm, HashValue> {
+ &self.hashes
+ }
+}
+
/// Metadata for the targets role.
#[derive(Debug, PartialEq)]
pub struct TargetsMetadata {
diff --git a/src/repository.rs b/src/repository.rs
index a155a33..715d945 100644
--- a/src/repository.rs
+++ b/src/repository.rs
@@ -3,17 +3,19 @@
use hyper::{Url, Client};
use hyper::client::response::Response;
use hyper::header::{Headers, UserAgent};
+use hyper::status::StatusCode;
use ring::digest::{self, SHA256, SHA512};
use std::collections::HashMap;
use std::fs::{self, File, DirBuilder};
-use std::io::{Read, Write};
+use std::io::{Read, Write, Cursor};
use std::marker::PhantomData;
use std::path::PathBuf;
+use tempfile::NamedTempFile;
use Result;
-use crypto::{HashAlgorithm, HashValue};
+use crypto::{self, HashAlgorithm, HashValue};
use error::Error;
-use metadata::{SignedMetadata, MetadataVersion, Unverified, Verified, Role, Metadata};
+use metadata::{SignedMetadata, MetadataVersion, Unverified, Verified, Role, Metadata, TargetPath, TargetDescription, MetadataPath};
use interchange::DataInterchange;
/// Top-level trait that represents a TUF repository and contains all the ways it can be interacted
@@ -22,6 +24,9 @@
where
D: DataInterchange,
{
+ /// The type returned when reading a target.
+ type TargetRead: Read;
+
/// Initialize the repository.
fn initialize(&mut self) -> Result<()>;
@@ -29,6 +34,7 @@
fn store_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
metadata: &SignedMetadata<D, M, Verified>,
) -> Result<()>
@@ -39,6 +45,7 @@
fn fetch_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
max_size: &Option<usize>,
hash_data: Option<(&HashAlgorithm, &HashValue)>,
@@ -46,17 +53,44 @@
where
M: Metadata;
- /// Get the version string that addresses the metadata.
- fn version_string(role: &Role, version: &MetadataVersion) -> String {
- // TODO this doesn't support delegations that could have `/` chars in them
- format!("{}{}{}", version.prefix(), role, D::extension())
+ /// Store the given target.
+ fn store_target<R>(&mut self, read: R, target_path: &TargetPath, target_description: &TargetDescription) -> Result<()>
+ where
+ R: Read;
+
+ /// Fetch the given target.
+ ///
+ /// **WARNING**: The target will **NOT** yet be verified.
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<Self::TargetRead>;
+
+ /// Perform a sanity check that `M`, `Role`, and `MetadataPath` all desrcribe the same entity.
+ fn check<M>(role: &Role, meta_path: &MetadataPath) -> Result<()>
+ where
+ M: Metadata
+ {
+ if role != &M::role() {
+ return Err(Error::IllegalArgument(format!(
+ "Attempted to store {} metadata as {}.",
+ M::role(),
+ role
+ )));
+ }
+
+ if !role.fuzzy_matches_path(meta_path) {
+ return Err(Error::IllegalArgument(format!(
+ "Role {} does not match path {:?}",
+ role,
+ meta_path)))
+ }
+
+ Ok(())
}
/// Read the from given reader, optionally capped at `max_size` bytes, optionally requiring
/// hashes to match.
fn safe_read<R, W>(
- read: &mut R,
- write: &mut W,
+ mut read: R,
+ mut write: W,
max_size: Option<i64>,
hash_data: Option<(&HashAlgorithm, &HashValue)>,
) -> Result<()>
@@ -151,8 +185,10 @@
where
D: DataInterchange,
{
+ type TargetRead = File;
+
fn initialize(&mut self) -> Result<()> {
- for p in &["metadata", "targets"] {
+ for p in &["metadata", "targets", "temp"] {
DirBuilder::new().recursive(true).create(
self.local_path.join(p),
)?
@@ -164,21 +200,17 @@
fn store_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
metadata: &SignedMetadata<D, M, Verified>,
) -> Result<()>
where
M: Metadata,
{
- if role != &M::role() {
- return Err(Error::IllegalArgument(format!(
- "Attempted to store {} metadata as {}.",
- M::role(),
- role
- )));
- }
- let version_str = Self::version_string(role, version);
- let path = self.local_path.join("metadata").join(&version_str);
+ Self::check::<M>(role, meta_path)?;
+
+ let mut path = self.local_path.join("metadata");
+ path.extend(meta_path.components::<D>(version));
if path.exists() {
debug!("Metadata path exists. Deleting: {:?}", path);
@@ -195,6 +227,7 @@
fn fetch_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
max_size: &Option<usize>,
hash_data: Option<(&HashAlgorithm, &HashValue)>,
@@ -202,13 +235,43 @@
where
M: Metadata,
{
- let version_str = Self::version_string(role, version);
- let path = self.local_path.join("metadata").join(&version_str);
+ Self::check::<M>(role, meta_path)?;
+
+ let mut path = self.local_path.join("metadata");
+ path.extend(meta_path.components::<D>(&version));
+
let mut file = File::open(&path)?;
let mut out = Vec::new();
Self::safe_read(&mut file, &mut out, max_size.map(|x| x as i64), hash_data)?;
+
Ok(D::from_reader(&*out)?)
}
+
+ fn store_target<R>(&mut self, read: R, target_path: &TargetPath, target_description: &TargetDescription) -> Result<()>
+ where
+ R: Read
+ {
+ let mut temp_file = NamedTempFile::new_in(self.local_path.join("temp"))?;
+ let hash_data = crypto::hash_preference(target_description.hashes())?;
+ Self::safe_read(read, &mut temp_file, Some(target_description.length() as i64), Some(hash_data))?;
+
+ let mut path = self.local_path.clone().join("targets");
+ path.extend(target_path.components());
+ temp_file.persist(&path)?;
+
+ Ok(())
+ }
+
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<File> {
+ let mut path = self.local_path.join("targets");
+ path.extend(target_path.components());
+
+ if !path.exists() {
+ return Err(Error::NotFound)
+ }
+
+ Ok(File::open(&path)?)
+ }
}
@@ -244,12 +307,27 @@
}
}
- fn get(&self, path: &str) -> Result<Response> {
+ fn get(&self, components: &[String]) -> Result<Response> {
let mut headers = Headers::new();
headers.set(UserAgent(self.user_agent.clone()));
- let req = self.client.get(self.url.join(path)?).headers(headers);
- Ok(req.send()?)
+ let mut url = self.url.clone();
+ url.path_segments_mut()
+ .map_err(|_| Error::IllegalArgument(format!("URL was 'cannot-be-a-base': {:?}", self.url)))?
+ .extend(components);
+
+ let req = self.client.get(url.clone()).headers(headers);
+ let resp = req.send()?;
+
+ if !resp.status.is_success() {
+ if resp.status == StatusCode::NotFound {
+ Err(Error::NotFound)
+ } else {
+ Err(Error::Opaque(format!("Error getting {:?}: {:?}", url, resp)))
+ }
+ } else {
+ Ok(resp)
+ }
}
}
@@ -257,34 +335,32 @@
where
D: DataInterchange,
{
+ type TargetRead = Response;
+
fn initialize(&mut self) -> Result<()> {
Ok(())
}
+ /// This always returns `Err` as storing over HTTP is not yet supported.
fn store_metadata<M>(
&mut self,
- role: &Role,
+ _: &Role,
+ _: &MetadataPath,
_: &MetadataVersion,
_: &SignedMetadata<D, M, Verified>,
) -> Result<()>
where
M: Metadata,
{
- if role != &M::role() {
- return Err(Error::IllegalArgument(format!(
- "Attempted to store {} metadata as {}.",
- M::role(),
- role
- )));
- }
Err(Error::Opaque(
- "Http repo store root not implemented".to_string(),
+ "Http repo store metadata not implemented".to_string(),
))
}
fn fetch_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
max_size: &Option<usize>,
hash_data: Option<(&HashAlgorithm, &HashValue)>,
@@ -292,12 +368,28 @@
where
M: Metadata,
{
- let version_str = Self::version_string(role, version);
- let mut resp = self.get(&version_str)?;
+ Self::check::<M>(role, meta_path)?;
+
+ let mut resp = self.get(&meta_path.components::<D>(&version))?;
let mut out = Vec::new();
Self::safe_read(&mut resp, &mut out, max_size.map(|x| x as i64), hash_data)?;
Ok(D::from_reader(&*out)?)
}
+
+ /// This always returns `Err` as storing over HTTP is not yet supported.
+ fn store_target<R>(&mut self, _: R, _: &TargetPath, _: &TargetDescription) -> Result<()>
+ where
+ R: Read
+ {
+ Err(Error::Opaque(
+ "Http repo store not implemented".to_string(),
+ ))
+ }
+
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<Self::TargetRead> {
+ let resp = self.get(&target_path.components())?;
+ Ok(resp)
+ }
}
@@ -306,7 +398,8 @@
where
D: DataInterchange,
{
- metadata: HashMap<String, Vec<u8>>,
+ metadata: HashMap<(MetadataPath, MetadataVersion), Vec<u8>>,
+ targets: HashMap<TargetPath, Vec<u8>>,
_interchange: PhantomData<D>,
}
@@ -318,6 +411,7 @@
pub fn new() -> Self {
EphemeralRepository {
metadata: HashMap::new(),
+ targets: HashMap::new(),
_interchange: PhantomData,
}
}
@@ -327,6 +421,8 @@
where
D: DataInterchange,
{
+ type TargetRead = Cursor<Vec<u8>>;
+
fn initialize(&mut self) -> Result<()> {
Ok(())
}
@@ -334,41 +430,115 @@
fn store_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
- root: &SignedMetadata<D, M, Verified>,
+ metadata: &SignedMetadata<D, M, Verified>,
) -> Result<()>
where
M: Metadata,
{
- if role != &M::role() {
- return Err(Error::IllegalArgument(format!(
- "Attempted to store {} metadata as {}.",
- M::role(),
- role
- )));
- }
-
- let version_str = Self::version_string(role, version);
+ Self::check::<M>(role, meta_path)?;
let mut buf = Vec::new();
- D::to_writer(&mut buf, root)?;
- let _ = self.metadata.insert(version_str, buf);
+ D::to_writer(&mut buf, metadata)?;
+ let _ = self.metadata.insert((meta_path.clone(), version.clone()), buf);
Ok(())
}
fn fetch_metadata<M>(
&mut self,
role: &Role,
+ meta_path: &MetadataPath,
version: &MetadataVersion,
- _: &Option<usize>,
- _: Option<(&HashAlgorithm, &HashValue)>,
+ max_size: &Option<usize>,
+ hash_data: Option<(&HashAlgorithm, &HashValue)>,
) -> Result<SignedMetadata<D, M, Unverified>>
where
M: Metadata,
- {
- let version_str = Self::version_string(role, version);
- match self.metadata.get(&version_str) {
- Some(bytes) => D::from_reader(&**bytes),
+ {
+ Self::check::<M>(role, meta_path)?;
+
+ match self.metadata.get(&(meta_path.clone(), version.clone())) {
+ Some(bytes) => {
+ let mut buf = Vec::new();
+ Self::safe_read(bytes.as_slice(), &mut buf, max_size.map(|x| x as i64), hash_data)?;
+ D::from_reader(&*buf)
+ },
None => Err(Error::NotFound),
}
}
+
+ fn store_target<R>(&mut self, read: R, target_path: &TargetPath, target_description: &TargetDescription) -> Result<()>
+ where
+ R: Read
+ {
+ let mut buf = Vec::new();
+ let hash_data = crypto::hash_preference(target_description.hashes())?;
+ Self::safe_read(read, &mut buf, Some(target_description.length() as i64), Some(hash_data))?;
+ let _ = self.targets.insert(target_path.clone(), buf);
+ Ok(())
+ }
+
+ fn fetch_target(&mut self, target_path: &TargetPath) -> Result<Self::TargetRead> {
+ match self.targets.get(target_path) {
+ Some(bytes) => Ok(Cursor::new(bytes.clone())),
+ None => Err(Error::NotFound)
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use tempdir::TempDir;
+ use interchange::JsonDataInterchange;
+
+ #[test]
+ fn ephemeral_repo_targets() {
+ let mut repo = EphemeralRepository::<JsonDataInterchange>::new();
+ repo.initialize().unwrap();
+
+ let data: &[u8] = b"like tears in the rain";
+ let target_description = TargetDescription::from_reader(data).unwrap();
+ let path = TargetPath::new("batty".into()).unwrap();
+ repo.store_target(data, &path, &target_description).unwrap();
+
+ let mut read = repo.fetch_target(&path).unwrap();
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).unwrap();
+ assert_eq!(buf.as_slice(), data);
+
+ let bad_data: &[u8] = b"you're in a desert";
+ assert!(repo.store_target(bad_data, &path, &target_description).is_err());
+
+ let mut read = repo.fetch_target(&path).unwrap();
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).unwrap();
+ assert_eq!(buf.as_slice(), data);
+ }
+
+ #[test]
+ fn file_system_repo_targets() {
+ let temp_dir = TempDir::new("rust-tuf").unwrap();
+ let mut repo = FileSystemRepository::<JsonDataInterchange>::new(temp_dir.path().to_path_buf());
+ repo.initialize().unwrap();
+
+ let data: &[u8] = b"like tears in the rain";
+ let target_description = TargetDescription::from_reader(data).unwrap();
+ let path = TargetPath::new("batty".into()).unwrap();
+ repo.store_target(data, &path, &target_description).unwrap();
+ assert!(temp_dir.path().join("targets").join("batty").exists());
+
+ let mut read = repo.fetch_target(&path).unwrap();
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).unwrap();
+ assert_eq!(buf.as_slice(), data);
+
+ let bad_data: &[u8] = b"you're in a desert";
+ assert!(repo.store_target(bad_data, &path, &target_description).is_err());
+
+ let mut read = repo.fetch_target(&path).unwrap();
+ let mut buf = Vec::new();
+ read.read_to_end(&mut buf).unwrap();
+ assert_eq!(buf.as_slice(), data);
+ }
}
diff --git a/src/tuf.rs b/src/tuf.rs
index 1eaf7b9..7e97611 100644
--- a/src/tuf.rs
+++ b/src/tuf.rs
@@ -9,7 +9,7 @@
use error::Error;
use interchange::DataInterchange;
use metadata::{SignedMetadata, RootMetadata, VerificationStatus, TimestampMetadata, Role,
- SnapshotMetadata, MetadataPath, TargetsMetadata, TargetPath};
+ SnapshotMetadata, MetadataPath, TargetsMetadata, TargetPath, TargetDescription};
/// Contains trusted TUF metadata and can be used to verify other metadata and targets.
#[derive(Debug)]
@@ -39,7 +39,7 @@
/// Create a new `TUF` struct from a piece of metadata that is assumed to be trusted.
///
- /// *WARNING*: This is trust-on-first-use (TOFU) and offers weaker security guarantees than the
+ /// **WARNING**: This is trust-on-first-use (TOFU) and offers weaker security guarantees than the
/// related method `from_root_pinned`.
pub fn from_root<V>(signed_root: SignedMetadata<D, RootMetadata, V>) -> Result<Self>
where
@@ -193,13 +193,24 @@
)
})?;
+ let current_version = self.snapshot.as_ref().map(|t| t.version()).unwrap_or(0);
+
+ if snapshot_description.version() < current_version {
+ return Err(Error::VerificationFailure(format!(
+ "Attempted to roll back snapshot metadata at version {} to {}.",
+ current_version,
+ snapshot_description.version()
+ )));
+ } else if snapshot_description.version() == current_version {
+ return Ok(false);
+ }
+
let signed_snapshot = signed_snapshot.verify(
root.snapshot().threshold(),
root.snapshot().key_ids(),
root.keys(),
)?;
- let current_version = self.snapshot.as_ref().map(|t| t.version()).unwrap_or(0);
let snapshot: SnapshotMetadata = D::deserialize(&signed_snapshot.signed())?;
if snapshot.version() != snapshot_description.version() {
@@ -215,17 +226,7 @@
return Err(Error::ExpiredMetadata(Role::Snapshot));
}
- if snapshot.version() < current_version {
- return Err(Error::VerificationFailure(format!(
- "Attempted to roll back snapshot metadata at version {} to {}.",
- current_version,
- snapshot.version()
- )));
- } else if snapshot.version() == current_version {
- return Ok(false);
- } else {
- snapshot
- }
+ snapshot
};
self.snapshot = Some(snapshot);
@@ -252,13 +253,24 @@
)
})?;
+ let current_version = self.targets.as_ref().map(|t| t.version()).unwrap_or(0);
+
+ if targets_description.version() < current_version {
+ return Err(Error::VerificationFailure(format!(
+ "Attempted to roll back targets metadata at version {} to {}.",
+ current_version,
+ targets_description.version()
+ )));
+ } else if targets_description.version() == current_version {
+ return Ok(false);
+ }
+
let signed_targets = signed_targets.verify(
root.targets().threshold(),
root.targets().key_ids(),
root.keys(),
)?;
- let current_version = self.targets.as_ref().map(|t| t.version()).unwrap_or(0);
let targets: TargetsMetadata = D::deserialize(&signed_targets.signed())?;
if targets.version() != targets_description.version() {
@@ -273,24 +285,28 @@
if targets.expires() <= &Utc::now() {
return Err(Error::ExpiredMetadata(Role::Snapshot));
}
-
- if targets.version() < current_version {
- return Err(Error::VerificationFailure(format!(
- "Attempted to roll back targets metadata at version {} to {}.",
- current_version,
- targets.version()
- )));
- } else if targets.version() == current_version {
- return Ok(false);
- } else {
- targets
- }
+ targets
};
self.targets = Some(targets);
Ok(true)
}
+ /// Get a reference to the description needed to verify the target defined by the given
+ /// `TargetPath`. Returns an `Error` if the target is not defined in the trusted metadata. This
+ /// may mean the target exists somewhere in the metadata, but the chain of trust to that target
+ /// may be invalid or incomplete.
+ pub fn target_description(&self, target_path: &TargetPath) -> Result<&TargetDescription> {
+ let _ = self.safe_root_ref()?;
+ let _ = self.safe_snapshot_ref()?;
+ let targets = self.safe_targets_ref()?;
+
+ targets.targets().get(target_path)
+ .ok_or(Error::TargetUnavailable)
+
+ // TODO include searching delegations
+ }
+
fn purge_metadata(&mut self) {
self.snapshot = None;
self.targets = None;
diff --git a/src/util.rs b/src/util.rs
deleted file mode 100644
index 865bcc9..0000000
--- a/src/util.rs
+++ /dev/null
@@ -1,175 +0,0 @@
-use std::fs::{self, File};
-use std::io::{self, Read, Write, Seek, SeekFrom};
-use std::path::{Path, PathBuf};
-use url::percent_encoding::percent_decode;
-use uuid::Uuid;
-
-use error::Error;
-
-/// Converts a URL string (without scheme) into an OS specific path.
-pub fn url_path_to_os_path(url_path: &str) -> Result<PathBuf, Error> {
- let url_path = if cfg!(os = "windows") {
- url_path.replace("/", r"\")
- } else {
- url_path.to_string()
- };
-
- let url_path = percent_decode(url_path.as_bytes())
- .decode_utf8()
- .map_err(|e| Error::Opaque(format!("{}", e)))?
- .into_owned();
-
- Ok(Path::new(&url_path).to_path_buf())
-}
-
-pub fn url_path_to_path_components(url_path: &str) -> Result<Vec<String>, Error> {
- let mut out = Vec::new();
- for component in url_path.split("/") {
- let component = percent_decode(component.as_bytes())
- .decode_utf8()
- .map_err(|e| {
- Error::Opaque(format!("Path component not utf-8: {:?}", e))
- })?
- .into_owned();
- out.push(component);
- }
- Ok(out)
-}
-
-
-#[derive(Debug)]
-struct TempFileInner {
- path: PathBuf,
- file: File,
-}
-
-#[derive(Debug)]
-pub struct TempFile(Option<TempFileInner>);
-
-impl TempFile {
- pub fn new(prefix: PathBuf) -> Result<Self, io::Error> {
- let path = prefix.join(Uuid::new_v4().hyphenated().to_string());
- Ok(TempFile(Some(TempFileInner {
- path: path.clone(),
- file: File::create(path)?,
- })))
- }
-
- pub fn from_existing(path: PathBuf) -> Result<Self, io::Error> {
- Ok(TempFile(Some(TempFileInner {
- path: path.clone(),
- file: File::open(path)?,
- })))
- }
-
- pub fn file_mut(&mut self) -> Result<&mut File, io::Error> {
- match self.0 {
- Some(ref mut inner) => Ok(&mut inner.file),
- None => Err(io::Error::new(
- io::ErrorKind::Other,
- "invalid TempFile reference",
- )),
- }
- }
-
- pub fn persist(mut self, dest: &Path) -> Result<(), io::Error> {
- match self.0.take() {
- Some(inner) => fs::rename(inner.path, dest),
- None => Err(io::Error::new(
- io::ErrorKind::Other,
- "invalid TempFile reference",
- )),
- }
- }
-}
-
-impl Write for TempFile {
- fn write(&mut self, buf: &[u8]) -> Result<usize, io::Error> {
- self.file_mut()?.write(buf)
- }
-
- fn flush(&mut self) -> Result<(), io::Error> {
- self.file_mut()?.flush()
- }
-}
-
-impl Read for TempFile {
- fn read(&mut self, buf: &mut [u8]) -> Result<usize, io::Error> {
- self.file_mut()?.read(buf)
- }
-}
-
-impl Seek for TempFile {
- fn seek(&mut self, pos: SeekFrom) -> Result<u64, io::Error> {
- self.file_mut()?.seek(pos)
- }
-}
-
-impl Drop for TempFile {
- fn drop(&mut self) {
- match self.0.take() {
- Some(inner) => {
- drop(inner.file);
- match fs::remove_file(inner.path) {
- Ok(()) => (),
- Err(e) => warn!("Failed to delete tempfile: {:?}", e),
- }
- }
- None => (),
- }
- }
-}
-
-#[cfg(test)]
-mod test {
- use super::*;
-
- #[test]
- #[cfg(not(target_os = "windows"))]
- fn test_url_path_to_os_path_nix() {
- let path = "/tmp/test";
- assert_eq!(url_path_to_os_path(path), Ok(PathBuf::from("/tmp/test")));
- }
-
- #[test]
- #[cfg(not(target_os = "windows"))]
- fn test_url_path_to_os_path_percent_nix() {
- let path = "/tmp/test%20stuff";
- assert_eq!(
- url_path_to_os_path(path),
- Ok(PathBuf::from("/tmp/test stuff"))
- );
- }
-
- #[test]
- #[cfg(target_os = "windows")]
- fn test_url_path_to_os_path_win() {
- let path = r"C:/tmp/test";
- assert_eq!(url_path_to_os_path(path), Ok(PathBuf::from(r"C:\tmp\test")));
- }
-
- #[test]
- #[cfg(target_os = "windows")]
- fn test_url_path_to_os_path_spaces_win() {
- let path = r"C:/tmp/test%20stuff";
- assert_eq!(
- url_path_to_os_path(path),
- Ok(PathBuf::from(r"C:\tmp\test stuff"))
- );
- }
-
- #[test]
- fn test_url_path_to_path_components() {
- let path = "test/foo";
- assert_eq!(
- url_path_to_path_components(path),
- Ok(vec!["test".into(), "foo".into()])
- );
-
- let path = "test/foo%20bar";
- assert_eq!(
- url_path_to_path_components(path),
- Ok(vec!["test".into(), "foo bar".into()])
- );
- }
-}